Example usage for java.util.concurrent TimeUnit HOURS

List of usage examples for java.util.concurrent TimeUnit HOURS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit HOURS.

Prototype

TimeUnit HOURS

To view the source code for java.util.concurrent TimeUnit HOURS.

Click Source Link

Document

Time unit representing sixty minutes.

Usage

From source file:com.linkedin.pinot.core.data.manager.realtime.LLRealtimeSegmentDataManager.java

private boolean endCriteriaReached() {
    Preconditions.checkState(_state.shouldConsume(), "Incorrect state %s", _state);
    long now = now();
    switch (_state) {
    case INITIAL_CONSUMING:
        // The segment has been created, and we have not posted a segmentConsumed() message on the controller yet.
        // We need to consume as much data as available, until we have either reached the max number of rows or
        // the max time we are allowed to consume.
        if (now >= _consumeEndTime) {
            if (_realtimeSegment.getRawDocumentCount() == 0) {
                segmentLogger.info("No events came in, extending time by {} hours",
                        TIME_EXTENSION_ON_EMPTY_SEGMENT_HOURS);
                _consumeEndTime += TimeUnit.HOURS.toMillis(TIME_EXTENSION_ON_EMPTY_SEGMENT_HOURS);
                return false;
            }/*w  w w. j  ava2 s. c  om*/
            segmentLogger.info("Stopping consumption due to time limit start={} now={} numRows={}",
                    _startTimeMs, now, _numRowsConsumed);
            return true;
        } else if (_numRowsConsumed >= _segmentMaxRowCount) {
            segmentLogger.info("Stopping consumption due to row limit nRows={} maxNRows={}", _numRowsConsumed,
                    _segmentMaxRowCount);
            return true;
        }
        return false;

    case CATCHING_UP:
        // We have posted segmentConsumed() at least once, and the controller is asking us to catch up to a certain offset.
        // There is no time limit here, so just check to see that we are still within the offset we need to reach.
        // Going past the offset is an exception.
        if (_currentOffset == _finalOffset) {
            segmentLogger.info("Caught up to offset={}, state={}", _finalOffset, _state.toString());
            return true;
        }
        if (_currentOffset > _finalOffset) {
            segmentLogger.error("Offset higher in state={}, current={}, final={}", _state.toString(),
                    _currentOffset, _finalOffset);
            throw new RuntimeException("Past max offset");
        }
        return false;

    case CONSUMING_TO_ONLINE:
        // We are attempting to go from CONSUMING to ONLINE state. We are making a last attempt to catch up to the
        // target offset. We have a time constraint, and need to stop consuming if we cannot get to the target offset
        // within that time.
        if (_currentOffset == _finalOffset) {
            segmentLogger.info("Caught up to offset={}, state={}", _finalOffset, _state.toString());
            return true;
        } else if (now >= _consumeEndTime) {
            segmentLogger.info("Past max time budget: offset={}, state={}", _currentOffset, _state.toString());
            return true;
        }
        if (_currentOffset > _finalOffset) {
            segmentLogger.error("Offset higher in state={}, current={}, final={}", _state.toString(),
                    _currentOffset, _finalOffset);
            throw new RuntimeException("Past max offset");
        }
        return false;
    default:
        segmentLogger.error("Illegal state {}" + _state.toString());
        throw new RuntimeException("Illegal state to consume");
    }
}

From source file:org.eclipse.skalli.core.rest.admin.StatisticsQueryTest.java

@Test
public void testFromToNow() throws Exception {
    Calendar cal = Calendar.getInstance();
    long now = cal.getTimeInMillis();

    StatisticsQuery query = new StatisticsQuery(getParams(null, "now", null), now);
    Assert.assertEquals(0, query.getFrom());
    Assert.assertEquals(now, query.getTo());

    query = new StatisticsQuery(getParams("-1h", "now", null), now);
    Assert.assertEquals(now - TimeUnit.MILLISECONDS.convert(1, TimeUnit.HOURS), query.getFrom());
    Assert.assertEquals(now, query.getTo());

    long fromMillis = now - TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
    cal.setTimeInMillis(fromMillis);/*  w w  w. j  ava  2s .  c  o m*/
    String fromStr = DatatypeConverter.printDateTime(cal);
    query = new StatisticsQuery(getParams(fromStr, "now", null), now);
    Assert.assertEquals(fromMillis, query.getFrom());
    Assert.assertEquals(now, query.getTo());

    query = new StatisticsQuery(getParams("now", null, null), now);
    Assert.assertEquals(now, query.getFrom());
    Assert.assertEquals(now, query.getTo());

    query = new StatisticsQuery(getParams("now", "now", null), now);
    Assert.assertEquals(now, query.getFrom());
    Assert.assertEquals(now, query.getTo());

    // period is ignored, if from and to are specified
    query = new StatisticsQuery(getParams("now", "now", "1d"), now);
    Assert.assertEquals(now, query.getFrom());
    Assert.assertEquals(now, query.getTo());
}

From source file:yrun.YarnRunner.java

public void execute() throws IOException, YarnException, InterruptedException {
    LOG.info("Using application path [" + _installPath + "]");
    Path jarPath = installThisJar(_installPath, _appJarFile);
    LOG.info("Driver installed [" + jarPath + "]");
    List<Path> installedArchivePathList = install(_installPath, _archivePathList);
    for (Path p : installedArchivePathList) {
        LOG.info("Archive installed [" + p + "]");
    }//  w ww . ja  va2 s .  c  o m

    YarnRunnerArgs yarnRunnerArgs = new YarnRunnerArgs();
    yarnRunnerArgs.setCommand(_command);

    Path argsPath = installThisArgs(_installPath, yarnRunnerArgs);

    final YarnClient client = YarnClient.createYarnClient();
    _configuration.setInt("yarn.nodemanager.delete.debug-delay-sec", (int) TimeUnit.HOURS.toSeconds(1));
    client.init(_configuration);
    client.start();

    YarnClientApplication app = client.createApplication();
    ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class);

    Map<String, String> appMasterEnv = new HashMap<String, String>();
    setupAppMasterEnv(appMasterEnv, _appJarFile);

    Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
    {
        LocalResource appMasterJar = Records.newRecord(LocalResource.class);
        setupAppMasterJar(jarPath, appMasterJar);
        localResources.put(jarPath.getName(), appMasterJar);
    }
    {
        LocalResource appMasterArgs = Records.newRecord(LocalResource.class);
        setupAppMasterArgs(argsPath, appMasterArgs);
        localResources.put(MASTER_JSON, appMasterArgs);
    }

    List<String> vargs = new ArrayList<String>();
    vargs.add(Environment.JAVA_HOME.$() + "/bin/java");
    vargs.add("-Xmx256m");
    vargs.add("-Djava.net.preferIPv4Stack=true");
    vargs.add(YarnRunnerApplicationMaster.class.getName());

    String strCommand = "(echo ENV && set && echo CURRENT_DIR_LISTING && ls -la && echo PWD && pwd && ("
            + StringUtils.join(" ", vargs) + "))";
    strCommand += " 1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout";
    strCommand += " 2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr";
    LOG.debug("Application Master command [" + strCommand + "]");

    amContainer.setCommands(Collections.singletonList(strCommand));
    amContainer.setLocalResources(localResources);
    amContainer.setEnvironment(appMasterEnv);

    Resource capability = Records.newRecord(Resource.class);
    capability.setMemory(256);
    capability.setVirtualCores(1);

    ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext();
    appContext.setApplicationName(_yarnName);
    appContext.setAMContainerSpec(amContainer);
    appContext.setResource(capability);
    if (_queue != null) {
        appContext.setQueue(_queue);
    }
    appContext.setApplicationType("yrun");

    ApplicationId appId = appContext.getApplicationId();
    AtomicBoolean shutdown = new AtomicBoolean();
    if (!_isDaemon) {
        addShutdownHook(client, appId, shutdown);
    }

    LOG.info("Submitting application with id [" + appId + "]");
    client.submitApplication(appContext);
    ApplicationReport report;
    YarnApplicationState state;
    do {
        report = client.getApplicationReport(appId);
        state = report.getYarnApplicationState();
        if (state == YarnApplicationState.RUNNING) {
            if (_isDaemon) {
                LOG.info("Application is running.  This is a daemon application driver program exiting.");
                return;
            }
        }
        Thread.sleep(100);
    } while (isNoLongerRunning(state));
    shutdown.set(true);
    LOG.info("Application has finished with state [" + state + "]");
}

From source file:cn.keke.travelmix.publictransport.type.EfaConnectionResponseHandler.java

/**
 * approximated minutes diff of two locations
 * //from  ww w .  ja v  a 2 s . c om
 * @param loc1
 * @param loc2
 * @return
 */
private int getLocationMinutesDiff(LocationPoint loc1, LocationPoint loc2) {
    if (loc1.hasTime() && loc2.hasTime()) {
        return (int) (TimeUnit.DAYS.toMinutes((loc2.getYearInt() - loc1.getYearInt()) * 365)
                + TimeUnit.DAYS.toMinutes((loc2.getMonthInt() - loc1.getMonthInt()) * 30)
                + TimeUnit.DAYS.toMinutes(loc2.getDayInt() - loc1.getDayInt())
                + TimeUnit.HOURS.toMinutes(loc2.getHourInt() - loc1.getHourInt()) + loc2.getMinuteInt()
                - loc1.getMinuteInt());

    }
    return 0;
}

From source file:org.ednovo.goorusearchwidget.HomeScreenActivity.java

@Override
public void onBackPressed() {

    // Take End Time for FLurry
    timeInMill_endAppSession = System.currentTimeMillis();

    long time_spentInAppSession = timeInMill_endAppSession - timeInMill_startAppSession;

    String time_hms = String.format("%02d:%02d:%02d", TimeUnit.MILLISECONDS.toHours(time_spentInAppSession),
            TimeUnit.MILLISECONDS.toMinutes(time_spentInAppSession)
                    - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time_spentInAppSession)),
            TimeUnit.MILLISECONDS.toSeconds(time_spentInAppSession)
                    - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time_spentInAppSession)));

    Log.i("Flurry", "onStop");
    Log.i("TimeSpentInApp :", time_hms);

    Map<String, String> articleParams = new HashMap<String, String>();
    articleParams.put("SessionTime", time_hms); // Capture author info

    // Flurry : Session Time
    FlurryAgent.logEvent("AppSessionStart", articleParams);

    FlurryAgent.onEndSession(this);

    finish();/*from  w w w.  j a  v  a  2  s  .  c om*/

}

From source file:org.dcache.util.histograms.HistogramModelTest.java

@Test
public void rebuiltTimeframeHistogramShouldBeTheSameAsOriginal() throws NoSuchMethodException,
        InstantiationException, IllegalAccessException, InvocationTargetException {
    givenTimeframeHistogram();/*from   www .j  av a 2  s.c o  m*/
    givenQueueCountValuesFor(48);
    givenBinUnitOf((double) TimeUnit.HOURS.toMillis(1));
    givenBinCountOf(48);
    givenBinLabelOf(TimeUnit.HOURS.name());
    givenDataLabelOf("COUNT");
    givenHistogramTypeOf("Queued Movers");
    givenHighestBinOf(getHoursInThePastFromNow(0));
    whenConfigureIsCalled();
    whenHistogramIsStored();
    givenTimeframeHistogram();
    whenConfigureIsCalled();
    assertThatOriginalHistogramEqualsStored();
}

From source file:uk.ac.cam.eng.extraction.hadoop.features.lexical.TTableServer.java

private void setup(Configuration conf, String direction, boolean source2Target)
        throws IOException, InterruptedException {
    int serverPort;
    if (source2Target) {
        serverPort = Integer.parseInt(conf.get(TTABLE_S2T_SERVER_PORT));
    } else {/*from  w w  w  .  j a va2 s  .  c om*/
        serverPort = Integer.parseInt(conf.get(TTABLE_T2S_SERVER_PORT));
    }
    minLexProb = Double.parseDouble(conf.get("min_lex_prob"));
    serverSocket = new ServerSocket(serverPort);
    String lexTemplate = conf.get(LEX_TABLE_TEMPLATE);
    String allString = lexTemplate.replace(GENRE, "ALL").replace(DIRECTION, direction);
    System.out.println("Loading " + allString);
    String[] provenances = conf.getStrings(ProvenanceCountMap.PROV);
    ExecutorService loaderThreadPool = Executors.newFixedThreadPool(4);
    model.put((byte) 0, new HashMap<Integer, Map<Integer, Double>>());
    loaderThreadPool.execute(new LoadTask(allString, (byte) 0));
    for (int i = 0; i < provenances.length; ++i) {
        String provString = lexTemplate.replace(GENRE, provenances[i]).replace(DIRECTION, direction);
        System.out.println("Loading " + provString);
        byte prov = (byte) (i + 1);
        model.put(prov, new HashMap<Integer, Map<Integer, Double>>());
        loaderThreadPool.execute(new LoadTask(provString, prov));
    }
    loaderThreadPool.shutdown();
    loaderThreadPool.awaitTermination(3, TimeUnit.HOURS);
    System.gc();
}

From source file:org.apache.synapse.transport.amqp.pollingtask.AMQPTransportPollingTaskFactory.java

private static TimeUnit getTimeUnit(String timeUnit) {

    if ("days".equals(timeUnit)) {
        return TimeUnit.DAYS;
    } else if ("hours".equals(timeUnit)) {
        return TimeUnit.HOURS;
    } else if ("minutes".equals(timeUnit)) {
        return TimeUnit.MINUTES;
    } else if ("seconds".equals(timeUnit)) {
        return TimeUnit.SECONDS;
    } else if ("milliseconds".equals(timeUnit)) {
        return TimeUnit.MILLISECONDS;
    } else {/*from  www. j  a  v a 2 s .c  o m*/
        return TimeUnit.MICROSECONDS;
    }
}

From source file:com.linkedin.pinot.common.metadata.SegmentZKMetadataTest.java

private RealtimeSegmentZKMetadata getTestInProgressRealtimeSegmentMetadata() {
    RealtimeSegmentZKMetadata realtimeSegmentMetadata = new RealtimeSegmentZKMetadata();
    realtimeSegmentMetadata.setSegmentName("testTable_R_1000_groupId0_part0");
    realtimeSegmentMetadata.setTableName("testTable");
    realtimeSegmentMetadata.setSegmentType(SegmentType.REALTIME);
    realtimeSegmentMetadata.setIndexVersion("v1");
    realtimeSegmentMetadata.setStartTime(1000);
    realtimeSegmentMetadata.setEndTime(-1);
    realtimeSegmentMetadata.setTimeUnit(TimeUnit.HOURS);
    realtimeSegmentMetadata.setStatus(Status.IN_PROGRESS);
    realtimeSegmentMetadata.setTotalRawDocs(-1);
    realtimeSegmentMetadata.setCrc(-1);/*w  w  w.  ja va2 s.com*/
    realtimeSegmentMetadata.setCreationTime(1000);
    realtimeSegmentMetadata.setSizeThresholdToFlushSegment(1234);
    return realtimeSegmentMetadata;
}