Example usage for java.lang ThreadLocal withInitial

List of usage examples for java.lang ThreadLocal withInitial

Introduction

In this page you can find the example usage for java.lang ThreadLocal withInitial.

Prototype

public static <S> ThreadLocal<S> withInitial(Supplier<? extends S> supplier) 

Source Link

Document

Creates a thread local variable.

Usage

From source file:com.dulion.astatium.mesh.shredder.XmlShredder.java

public XmlShredder(MetaGraph metaGraph) {
    this.manager = (ContextManager) metaGraph;
    this.factory = ThreadLocal.withInitial(() -> {
        XMLInputFactory factory = XMLInputFactory.newInstance();
        factory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, Boolean.TRUE);
        factory.setProperty(XMLInputFactory.IS_COALESCING, Boolean.TRUE);
        factory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, Boolean.TRUE);
        return factory;
    });/* www  .j  a  v  a  2 s .  c o  m*/
}

From source file:de.tudarmstadt.lt.lm.service.LtSegProvider.java

/**
 * /*from w ww. j a v a2 s  .  com*/
 */
public LtSegProvider() {
    _sentenceSplitter = ThreadLocal.withInitial(() -> {
        try {
            return Properties.sentenceSplitter().newInstance();
        } catch (InstantiationException | IllegalAccessException e) {
            throw new IllegalArgumentException(e);
        }
    });
    _tokenizer = ThreadLocal.withInitial(() -> {
        try {
            return Properties.tokenizer().newInstance();
        } catch (InstantiationException | IllegalAccessException e) {
            throw new IllegalArgumentException(e);
        }
    });

}

From source file:com.adobe.ags.curly.controller.BatchRunner.java

public BatchRunner(AuthHandler auth, int concurrency, List<Action> actions, List<Map<String, String>> batchData,
        Map<String, StringProperty> defaultValues, Set<String> displayColumns) {
    clientThread = ThreadLocal.withInitial(auth::getAuthenticatedClient);
    result = new BatchRunnerResult();
    tasks = new ArrayBlockingQueue<>(batchData.size());
    this.concurrency = concurrency;
    defaultValues.put("server", new ReadOnlyStringWrapper(auth.getUrlBase()));
    buildWorkerPool = () -> buildTasks(actions, batchData, defaultValues, displayColumns);
}

From source file:com.adobe.ags.curly.ConnectionManager.java

private void createNewConnectionManager() {
    try {/*w ww . j a  v a2s  . co  m*/
        SSLContextBuilder builder = new SSLContextBuilder();
        builder.loadTrustMaterial(new TrustSelfSignedStrategy());

        SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(builder.build(),
                NoopHostnameVerifier.INSTANCE);
        Registry<ConnectionSocketFactory> r = RegistryBuilder.<ConnectionSocketFactory>create()
                .register("http", new PlainConnectionSocketFactory()).register("https", sslsf).build();
        connectionManager = new PoolingHttpClientConnectionManager(r);
        connectionManager.setValidateAfterInactivity(500);
        sharedContext = ThreadLocal.withInitial(HttpClientContext::new);
    } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) {
        Logger.getLogger(ConnectionManager.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:com.openkappa.rst.Classifier.java

private Classifier(PatriciaTrie<Container>[] criteria, T[] outcomes, T unmatchedValue) {
    this.criteria = criteria;
    this.outcomes = outcomes;
    this.existence = ThreadLocal.withInitial(() -> new ArrayContainer(0, outcomes.length));
    this.unmatchedValue = unmatchedValue;
    this.searcher = ThreadLocal.withInitial(StringBuilder::new);
}

From source file:de.tudarmstadt.lt.seg.app.Segmenter.java

private void run_parallel() throws Exception {

    InputStream in = System.in;
    if (!"-".equals(_filename_in))
        in = new FileInputStream(_filename_in);
    Stream<String> liter = new BufferedReader(new InputStreamReader(in, Charset.defaultCharset())).lines();

    ThreadLocal<ISentenceSplitter> sentenceSplitter = ThreadLocal.withInitial(() -> {
        try {// w w  w  .j a  va 2  s.c  om
            return newSentenceSplitter();
        } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
            throw new RuntimeException(e);
        }
    });
    ThreadLocal<ITokenizer> tokenizer = ThreadLocal.withInitial(() -> {
        try {
            return newTokenizer();
        } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
            throw new RuntimeException(e);
        }
    });

    final PrintWriter[] w = new PrintWriter[_parallelism];
    // init writers
    for (int i = 0; i < _parallelism; i++) {
        OutputStream out = System.out;
        if (!"-".equals(_filename_out)) {
            out = new FileOutputStream(String.format("%s_%d", _filename_out, i));
        }
        w[i] = new PrintWriter(new OutputStreamWriter(out, Charset.defaultCharset()));
    }

    BlockingQueue<Runnable> queue = new ArrayBlockingQueue<Runnable>(_parallelism * 2, true);
    ExecutorService es = new ThreadPoolExecutor(_parallelism, _parallelism, 0L, TimeUnit.MILLISECONDS, queue);

    AtomicLong lc = new AtomicLong(0);
    liter.forEach((line) -> {
        // don't try to submit new threads, wait until the thread queue has some capacity again
        while (queue.remainingCapacity() == 0)
            try {
                Thread.sleep(10);
            } catch (InterruptedException e) {
                /**/}
        es.submit(() -> {
            final long docid = lc.incrementAndGet();
            if (docid % 1000 == 0)
                System.err.format("Processing line %d ('%s')%n", docid, _filename_in);
            final int w_i = (int) (docid % _parallelism);
            split_and_tokenize(new StringReader(line.trim()), String.format("%s:%d", _filename_in, docid),
                    sentenceSplitter.get(), tokenizer.get(), _level_filter, _level_normalize, _merge_types,
                    _merge_tokens, _separator_sentence, _separator_token, _separator_desc, w[w_i]);

        });
    });
    es.shutdown();
    es.awaitTermination(Integer.MAX_VALUE, TimeUnit.DAYS);

    // TODO: the stream parallelism version does not work because it submits too many threads at once
    //      AtomicLong lc = new AtomicLong(0);
    //      ForkJoinPool forkJoinPool = new ForkJoinPool(_parallelism);
    //      forkJoinPool.submit(() -> 
    //         liter.parallel().forEach((line) -> {
    //            final long docid = lc.incrementAndGet();
    //            if(docid % 1000 == 0)
    //               System.err.format("Processing line %d ('%s')%n", docid, _filename_in);
    //   
    //            String l = line.replace("\\t", "\t").replace("\\n", "\n");
    //            split_and_tokenize(
    //                  new StringReader(l),
    //                  String.format("%s:%d", _filename_in, docid),
    //                  sentenceSplitter.get(), 
    //                  tokenizer.get(), 
    //                  _level_filter,
    //                  _level_normalize,
    //                  _merge_types,
    //                  _merge_tokens,
    //                  _separator_sentence,
    //                  _separator_token,
    //                  _separator_desc,
    //                  w);
    //      })).get();

}

From source file:org.apache.metron.dataloads.nonbulk.flatfile.importer.LocalSummarizer.java

@Override
protected ThreadLocal<SummarizationState> createState(EnumMap<SummarizeOptions, Optional<Object>> config,
        Configuration hadoopConfig, ExtractorHandler handler) {
    final StatefulExtractor extractor = (StatefulExtractor) handler.getExtractor();
    return ThreadLocal.withInitial(() -> {
        Object initState = extractor.initializeState(handler.getConfig());
        SummarizationState ret = new SummarizationState(extractor, initState);
        stateList.add(ret);/*  ww  w.  ja  v  a 2  s  .c  o m*/
        return ret;
    });
}

From source file:org.apache.metron.performance.load.LoadGenerator.java

public static void main(String[] args) throws Exception {
    CommandLine cli = LoadOptions.parse(new PosixParser(), args);
    EnumMap<LoadOptions, Optional<Object>> evaluatedArgs = LoadOptions.createConfig(cli);
    Map<String, Object> kafkaConfig = new HashMap<>();
    kafkaConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    kafkaConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    kafkaConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    kafkaConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    if (LoadOptions.ZK.has(cli)) {
        String zkQuorum = (String) evaluatedArgs.get(LoadOptions.ZK).get();
        kafkaConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
                Joiner.on(",").join(KafkaUtils.INSTANCE.getBrokersFromZookeeper(zkQuorum)));
    }/*from w  w  w .  ja va  2  s  .  c  o  m*/
    String groupId = evaluatedArgs.get(LoadOptions.CONSUMER_GROUP).get().toString();
    System.out.println("Consumer Group: " + groupId);
    kafkaConfig.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    if (LoadOptions.KAFKA_CONFIG.has(cli)) {
        kafkaConfig.putAll((Map<String, Object>) evaluatedArgs.get(LoadOptions.KAFKA_CONFIG).get());
    }
    kafkaProducer = ThreadLocal.withInitial(() -> new KafkaProducer<>(kafkaConfig));
    int numThreads = (int) evaluatedArgs.get(LoadOptions.NUM_THREADS).get();
    System.out.println("Thread pool size: " + numThreads);
    pool = Executors.newFixedThreadPool(numThreads);
    Optional<Object> eps = evaluatedArgs.get(LoadOptions.EPS);

    Optional<Object> outputTopic = evaluatedArgs.get(LoadOptions.OUTPUT_TOPIC);
    Optional<Object> monitorTopic = evaluatedArgs.get(LoadOptions.MONITOR_TOPIC);
    long sendDelta = (long) evaluatedArgs.get(LoadOptions.SEND_DELTA).get();
    long monitorDelta = (long) evaluatedArgs.get(LoadOptions.MONITOR_DELTA).get();
    if ((eps.isPresent() && outputTopic.isPresent()) || monitorTopic.isPresent()) {
        Timer timer = new Timer(false);
        long startTimeMs = System.currentTimeMillis();
        if (outputTopic.isPresent() && eps.isPresent()) {
            List<String> templates = (List<String>) evaluatedArgs.get(LoadOptions.TEMPLATE).get();
            if (templates.isEmpty()) {
                System.out.println("Empty templates, so nothing to do.");
                return;
            }
            Optional<Object> biases = evaluatedArgs.get(LoadOptions.BIASED_SAMPLE);
            Sampler sampler = new UnbiasedSampler();
            if (biases.isPresent()) {
                sampler = new BiasedSampler((List<Map.Entry<Integer, Integer>>) biases.get(), templates.size());
            }
            MessageGenerator generator = new MessageGenerator(templates, sampler);
            Long targetLoad = (Long) eps.get();
            int periodsPerSecond = (int) (1000 / sendDelta);
            long messagesPerPeriod = targetLoad / periodsPerSecond;
            String outputTopicStr = (String) outputTopic.get();
            System.out.println(
                    "Generating data to " + outputTopicStr + " at " + targetLoad + " events per second");
            System.out.println("Sending " + messagesPerPeriod + " messages to " + outputTopicStr + " every "
                    + sendDelta + "ms");
            timer.scheduleAtFixedRate(new SendToKafka(outputTopicStr, messagesPerPeriod, numThreads, generator,
                    pool, numSent, kafkaProducer), 0, sendDelta);
        }
        List<AbstractMonitor> monitors = new ArrayList<>();
        if (outputTopic.isPresent() && monitorTopic.isPresent()) {
            System.out.println("Monitoring " + monitorTopic.get() + " every " + monitorDelta + " ms");
            monitors.add(new EPSGeneratedMonitor(outputTopic, numSent));
            monitors.add(new EPSThroughputWrittenMonitor(monitorTopic, kafkaConfig));
        } else if (outputTopic.isPresent() && !monitorTopic.isPresent()) {
            System.out.println("Monitoring " + outputTopic.get() + " every " + monitorDelta + " ms");
            monitors.add(new EPSGeneratedMonitor(outputTopic, numSent));
            monitors.add(new EPSThroughputWrittenMonitor(outputTopic, kafkaConfig));
        } else if (!outputTopic.isPresent() && monitorTopic.isPresent()) {
            System.out.println("Monitoring " + monitorTopic.get() + " every " + monitorDelta + " ms");
            monitors.add(new EPSThroughputWrittenMonitor(monitorTopic, kafkaConfig));
        } else if (!outputTopic.isPresent() && !monitorTopic.isPresent()) {
            System.out.println(
                    "You have not specified an output topic or a monitoring topic, so I have nothing to do here.");
        }
        int lookback = (int) evaluatedArgs.get(LoadOptions.SUMMARY_LOOKBACK).get();
        if (lookback > 0) {
            System.out.println("Summarizing over the last " + lookback + " monitoring periods ("
                    + lookback * monitorDelta + "ms)");
        } else {
            System.out.println("Turning off summarization.");
        }
        final CSVWriter csvWriter = new CSVWriter((File) evaluatedArgs.get(LoadOptions.CSV).orElse(null));
        Writer writer = new Writer(monitors, lookback, new ArrayList<Consumer<Writable>>() {
            {
                add(new ConsoleWriter());
                add(csvWriter);
            }
        });
        timer.scheduleAtFixedRate(new MonitorTask(writer), 0, monitorDelta);
        Optional<Object> timeLimit = evaluatedArgs.get(LoadOptions.TIME_LIMIT);
        if (timeLimit.isPresent()) {
            System.out.println("Ending in " + timeLimit.get() + " ms.");
            timer.schedule(new TimerTask() {
                @Override
                public void run() {
                    timer.cancel();
                    long durationS = (System.currentTimeMillis() - startTimeMs) / 1000;
                    System.out.println("\nGenerated " + numSent.get() + " in " + durationS + " seconds.");
                    csvWriter.close();
                    System.exit(0);
                }
            }

                    , (Long) timeLimit.get());
        }
    }
}

From source file:spimedb.SpimeDB.java

private SpimeDB(File file, Directory dir) {

    this.file = file;
    this.dir = dir;
    this.analyzer = new StandardAnalyzer();

    this.facetsConfig.setHierarchical(NObject.ID, true);
    this.facetsConfig.setMultiValued(NObject.ID, false);

    this.facetsConfig.setHierarchical(NObject.TAG, false);
    this.facetsConfig.setMultiValued(NObject.TAG, true);

    final String[] defaultFindFields = new String[] { NObject.NAME, NObject.DESC, NObject.TAG, NObject.ID };

    this.defaultFindQueryParser = ThreadLocal.withInitial(() -> new MultiFieldQueryParser(defaultFindFields,
            analyzer,/*w ww .j  a v  a 2  s  .  c  o  m*/
            Maps.mutable.with(NObject.NAME, 1f, NObject.ID, 1f, NObject.DESC, 0.25f, NObject.TAG, 0.5f)));

    writerConf = new IndexWriterConfig(analyzer);
    writerConf.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
    writerConf.setCommitOnClose(true);
    try {
        writer = new IndexWriter(dir, writerConf);
        readerMgr = new ReaderManager(writer, true, true);
        searcherMgr = new SearcherManager(writer, true, true, new SearcherFactory());
    } catch (IOException e) {
        e.printStackTrace();
    }

}