Example usage for java.util.concurrent TimeUnit DAYS

List of usage examples for java.util.concurrent TimeUnit DAYS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit DAYS.

Prototype

TimeUnit DAYS

To view the source code for java.util.concurrent TimeUnit DAYS.

Click Source Link

Document

Time unit representing twenty four hours.

Usage

From source file:com.linkedin.pinot.query.aggregation.AggregationGroupByWithDictionaryAndTrieTreeOperatorMultiValueTest.java

private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    _indexSegmentList.clear();//from ww w . j  a v a 2s . c om
    if (INDEXES_DIR.exists()) {
        FileUtils.deleteQuietly(INDEXES_DIR);
    }
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
        final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

        final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
                new File(filePath), segmentDir, "daysSinceEpoch", TimeUnit.DAYS, "test");

        final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
        driver.init(config);
        driver.build();

        LOGGER.debug("built at : {}", segmentDir.getAbsolutePath());
        _indexSegmentList
                .add(ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.heap));
    }
}

From source file:org.deeplearning4j.models.word2vec.Word2Vec.java

/**
 * Train the model//www  . j a  v a  2  s  .c  om
 */
public void fit() throws IOException {
    boolean loaded = buildVocab();
    //save vocab after building
    if (!loaded && saveVocab)
        vocab().saveVocab();
    if (stopWords == null)
        readStopWords();

    log.info("Training word2vec multithreaded");

    if (sentenceIter != null)
        sentenceIter.reset();
    if (docIter != null)
        docIter.reset();

    int[] docs = vectorizer.index().allDocs();

    if (docs.length < 1) {
        vectorizer.fit();
    }

    docs = vectorizer.index().allDocs();
    if (docs.length < 1) {
        throw new IllegalStateException("No documents found");
    }

    totalWords = vectorizer.numWordsEncountered();
    if (totalWords < 1)
        throw new IllegalStateException("Unable to train, total words less than 1");

    totalWords *= numIterations;

    log.info("Processing sentences...");

    AtomicLong numWordsSoFar = new AtomicLong(0);
    final AtomicLong nextRandom = new AtomicLong(5);
    ExecutorService exec = new ThreadPoolExecutor(Runtime.getRuntime().availableProcessors(),
            Runtime.getRuntime().availableProcessors(), 0L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<Runnable>(), new RejectedExecutionHandler() {
                @Override
                public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
                    try {
                        Thread.sleep(1000);
                    } catch (InterruptedException e) {
                        Thread.currentThread().interrupt();
                    }
                    executor.submit(r);
                }
            });

    final Queue<List<VocabWord>> batch2 = new ConcurrentLinkedDeque<>();
    vectorizer.index().eachDoc(new Function<List<VocabWord>, Void>() {
        @Override
        public Void apply(List<VocabWord> input) {
            List<VocabWord> batch = new ArrayList<>();
            addWords(input, nextRandom, batch);
            if (!batch.isEmpty()) {
                batch2.add(batch);
            }

            return null;
        }
    }, exec);

    exec.shutdown();
    try {
        exec.awaitTermination(1, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }

    ActorSystem actorSystem = ActorSystem.create();

    for (int i = 0; i < numIterations; i++)
        doIteration(batch2, numWordsSoFar, nextRandom, actorSystem);
    actorSystem.shutdown();

}

From source file:com.linkedin.pinot.query.selection.SelectionOnlyQueriesForMultiValueColumnTest.java

private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));

    if (INDEXES_DIR.exists()) {
        FileUtils.deleteQuietly(INDEXES_DIR);
    }//from   ww  w.  ja va2  s  .  c o m
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
        final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

        final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
                new File(filePath), segmentDir, "daysSinceEpoch", TimeUnit.DAYS, "test");

        final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
        driver.init(config);
        driver.build();

        System.out.println("built at : " + segmentDir.getAbsolutePath());
        _indexSegmentList
                .add(ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.heap));
    }
}

From source file:com.linkedin.pinot.query.aggregation.AggregationGroupByOperatorForMultiValueTest.java

private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    _indexSegmentList.clear();/*from   w w w.j a v  a  2  s .  c  om*/
    if (INDEXES_DIR.exists()) {
        FileUtils.deleteQuietly(INDEXES_DIR);
    }
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
        final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

        final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
                new File(filePath), segmentDir, "daysSinceEpoch", TimeUnit.DAYS, "test");

        final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
        driver.init(config);
        driver.build();

        LOGGER.info("built at : {}", segmentDir.getAbsolutePath());
        _indexSegmentList.add(new OfflineSegmentDataManager(
                ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.heap)));
    }
}

From source file:com.linkedin.pinot.query.aggregation.AggregationGroupByWithDictionaryOperatorTest.java

private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));

    _indexSegmentList.clear();/*  w  ww . j  a v  a 2 s.c om*/
    if (INDEXES_DIR.exists()) {
        FileUtils.deleteQuietly(INDEXES_DIR);
    }
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
        final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

        final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
                new File(filePath), segmentDir, "time_day", TimeUnit.DAYS, "test");

        final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
        driver.init(config);
        driver.build();

        LOGGER.debug("built at : {}", segmentDir.getAbsolutePath());
        _indexSegmentList.add(new OfflineSegmentDataManager(
                ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.heap)));
    }
}

From source file:com.linkedin.pinot.query.selection.SelectionQueriesForMultiValueColumnTest.java

private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));

    if (INDEXES_DIR.exists()) {
        FileUtils.deleteQuietly(INDEXES_DIR);
    }/*from   www . ja  va  2s.  c  om*/
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
        final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

        final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
                new File(filePath), segmentDir, "daysSinceEpoch", TimeUnit.DAYS, "test");

        final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
        driver.init(config);
        driver.build();

        System.out.println("built at : " + segmentDir.getAbsolutePath());
        _indexSegmentList.add(new OfflineSegmentDataManager(
                ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.heap)));
    }
}

From source file:com.linkedin.pinot.query.aggregation.AggregationGroupByWithDictionaryOperatorForMultiValueTest.java

private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    _indexSegmentList.clear();//from w w w .jav a2s . c o  m
    if (INDEXES_DIR.exists()) {
        FileUtils.deleteQuietly(INDEXES_DIR);
    }
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
        final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

        final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
                new File(filePath), segmentDir, "daysSinceEpoch", TimeUnit.DAYS, "test");

        final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
        driver.init(config);
        driver.build();

        LOGGER.debug("built at : {}", segmentDir.getAbsolutePath());
        _indexSegmentList.add(new OfflineSegmentDataManager(
                ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.heap)));
    }
}

From source file:org.hibernate.search.test.performance.scenario.TestExecutor.java

private void scheduleTasksAndStart(TestScenarioContext ctx, long cyclesCount) {
    ExecutorService executor = newAutoStoppingErrorReportingThreadPool(ctx);
    for (int i = 0; i < cyclesCount; i++) {
        for (Runnable task : ctx.tasks) {
            executor.execute(task);/*from   www. j ava 2s  . c o m*/
        }
    }

    try {
        ctx.executionStopWatch.start();
        ctx.startSignal.countDown();
        executor.shutdown();
        executor.awaitTermination(1, TimeUnit.DAYS);
        ctx.executionStopWatch.stop();
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.codebullets.sagalib.timeout.InMemoryTimeoutManagerTest.java

/**
 * <pre>//  w ww  . ja v a2  s . com
 * Given => Multiple timeout are added
 * When  => timeout is canceled by saga id
 * Then  => timeout is removed from schedule
 * </pre>
 */
@Test
public void cancelTimeouts_multipleTimeoutHaveBeenAdded_timeoutRemovedFromSchedule() {
    // given
    String sagaId = "sagaId_" + RandomStringUtils.randomAlphanumeric(5);
    ScheduledFuture future = mock(ScheduledFuture.class);
    when(executor.schedule(any(Runnable.class), anyLong(), any(TimeUnit.class))).thenReturn(future);
    sut.requestTimeout(null, sagaId, 1, TimeUnit.DAYS, null, null);

    // when
    sut.cancelTimeouts(sagaId);

    // then
    verify(future).cancel(false);
}

From source file:com.linkedin.pinot.query.aggregation.AggregationQueriesTest.java

private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));

    if (INDEXES_DIR.exists()) {
        FileUtils.deleteQuietly(INDEXES_DIR);
    }//from  ww w.ja v a2 s  .  co m
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
        final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

        final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
                new File(filePath), segmentDir, "time_day", TimeUnit.DAYS, "test");

        final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
        driver.init(config);
        driver.build();

        LOGGER.debug("built at : {}", segmentDir.getAbsolutePath());
        _indexSegmentList.add(new OfflineSegmentDataManager(
                ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.heap)));
    }
}