Example usage for com.google.common.base Stopwatch Stopwatch

List of usage examples for com.google.common.base Stopwatch Stopwatch

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch Stopwatch.

Prototype

Stopwatch() 

Source Link

Usage

From source file:com.metamx.druid.utils.DruidMasterBalancerProfiler.java

public void profileRun() {
    Stopwatch watch = new Stopwatch();
    LoadQueuePeonTester fromPeon = new LoadQueuePeonTester();
    LoadQueuePeonTester toPeon = new LoadQueuePeonTester();

    EasyMock.expect(druidServer1.getName()).andReturn("from").atLeastOnce();
    EasyMock.expect(druidServer1.getCurrSize()).andReturn(30L).atLeastOnce();
    EasyMock.expect(druidServer1.getMaxSize()).andReturn(100L).atLeastOnce();
    EasyMock.expect(druidServer1.getSegments()).andReturn(segments).anyTimes();
    EasyMock.expect(druidServer1.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
    EasyMock.replay(druidServer1);/*from   w w w. j  a  v  a 2s.c  o m*/

    EasyMock.expect(druidServer2.getName()).andReturn("to").atLeastOnce();
    EasyMock.expect(druidServer2.getTier()).andReturn("normal").anyTimes();
    EasyMock.expect(druidServer2.getCurrSize()).andReturn(0L).atLeastOnce();
    EasyMock.expect(druidServer2.getMaxSize()).andReturn(100L).atLeastOnce();
    EasyMock.expect(druidServer2.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes();
    EasyMock.expect(druidServer2.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
    EasyMock.replay(druidServer2);

    master.moveSegment(EasyMock.<String>anyObject(), EasyMock.<String>anyObject(), EasyMock.<String>anyObject(),
            EasyMock.<LoadPeonCallback>anyObject());
    EasyMock.expectLastCall().anyTimes();
    EasyMock.replay(master);

    DruidMasterRuntimeParams params = DruidMasterRuntimeParams.newBuilder()
            .withDruidCluster(
                    new DruidCluster(ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of("normal",
                            MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
                                    .create(Arrays.asList(new ServerHolder(druidServer1, fromPeon),
                                            new ServerHolder(druidServer2, toPeon))))))
            .withLoadManagementPeons(ImmutableMap.<String, LoadQueuePeon>of("from", fromPeon, "to", toPeon))
            .withAvailableSegments(segments.values())
            .withMasterSegmentSettings(
                    new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE).build())
            .withBalancerReferenceTimestamp(new DateTime("2013-01-01")).build();
    DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master);
    watch.start();
    DruidMasterRuntimeParams balanceParams = tester.run(params);
    System.out.println(watch.stop());
}

From source file:org.apache.drill.exec.server.Drillbit.java

public Drillbit(final DrillConfig config, final RemoteServiceSet serviceSet) throws Exception {
    final Stopwatch w = new Stopwatch().start();
    logger.debug("Construction started.");
    final boolean allowPortHunting = serviceSet != null;
    final boolean enableHttp = config.getBoolean(ExecConstants.HTTP_ENABLE);
    context = new BootStrapContext(config);
    manager = new WorkManager(context);
    engine = new ServiceEngine(manager.getControlMessageHandler(), manager.getUserWorker(), context,
            manager.getWorkBus(), manager.getDataHandler(), allowPortHunting);

    if (enableHttp) {
        embeddedJetty = new Server(config.getInt(ExecConstants.HTTP_PORT));
    } else {/*from   www  .  ja v a 2s  .  c  o m*/
        embeddedJetty = null;
    }

    if (serviceSet != null) {
        coord = serviceSet.getCoordinator();
        storeProvider = new CachingStoreProvider(new LocalPStoreProvider(config));
    } else {
        coord = new ZKClusterCoordinator(config);
        storeProvider = new PStoreRegistry(this.coord, config).newPStoreProvider();
    }
    logger.info("Construction completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS));
}

From source file:org.rhq.server.metrics.MetricsServer.java

public AggregateNumericMetric getSummaryAggregate(int scheduleId, long beginTime, long endTime) {
    Stopwatch stopwatch = new Stopwatch().start();
    try {//from ww w. ja  v a2s.co  m
        DateTime begin = new DateTime(beginTime);

        if (dateTimeService.isInRawDataRange(begin)) {
            Iterable<RawNumericMetric> metrics = dao.findRawMetrics(scheduleId, beginTime, endTime);
            return calculateAggregatedRaw(metrics, beginTime);
        }

        Bucket bucket = getBucket(begin);
        List<AggregateNumericMetric> metrics = dao.findAggregateMetrics(scheduleId, bucket, beginTime, endTime);
        return calculateAggregate(metrics, beginTime, bucket);
    } finally {
        stopwatch.stop();
        if (log.isDebugEnabled()) {
            log.debug("Finished calculating resource summary aggregate for [scheduleId: " + scheduleId
                    + ", beginTime: " + beginTime + ", endTime: " + endTime + "] in "
                    + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms");
        }
    }
}

From source file:cosmos.store.PersistedStores.java

public static CloseableIterable<Store> list(final Connector c, Authorizations auths, String metadataTable)
        throws TableNotFoundException {
    checkNotNull(c);// w  w  w  .j a  v a  2s  . co m
    checkNotNull(auths);
    checkNotNull(metadataTable);

    BatchScanner bs = c.createBatchScanner(metadataTable, auths, 10);
    bs.setRanges(Collections.singleton(new Range()));
    bs.fetchColumnFamily(SERIALIZED_STORE_COLFAM);

    return CloseableIterable.transform(bs, new Function<Entry<Key, Value>, Store>() {

        @Override
        public Store apply(Entry<Key, Value> input) {
            try {
                return deserialize(c, input.getValue());
            } catch (InvalidProtocolBufferException e) {
                throw new RuntimeException(e);
            }
        }

    }, NullTracer.instance(), "List Stores", new Stopwatch());
}

From source file:org.apache.twill.yarn.YarnTwillController.java

@Override
protected synchronized void doShutDown() {
    if (processController == null) {
        LOG.warn("No process controller for application that is not submitted.");
        return;// w  ww  .j  ava  2s  . co m
    }

    // Stop polling if it is running.
    stopPollStatus();

    // Wait for the stop message being processed
    try {
        Uninterruptibles.getUninterruptibly(getStopMessageFuture(), Constants.APPLICATION_MAX_STOP_SECONDS,
                TimeUnit.SECONDS);
    } catch (Exception e) {
        LOG.error("Failed to wait for stop message being processed.", e);
        // Kill the application through yarn
        kill();
    }

    FinalApplicationStatus finalStatus;
    // Poll application status from yarn
    try (ProcessController<YarnApplicationReport> processController = this.processController) {
        Stopwatch stopWatch = new Stopwatch().start();
        long maxTime = TimeUnit.MILLISECONDS.convert(Constants.APPLICATION_MAX_STOP_SECONDS, TimeUnit.SECONDS);

        YarnApplicationReport report = processController.getReport();
        finalStatus = report.getFinalApplicationStatus();
        ApplicationId appId = report.getApplicationId();
        while (finalStatus == FinalApplicationStatus.UNDEFINED
                && stopWatch.elapsedTime(TimeUnit.MILLISECONDS) < maxTime) {
            LOG.debug("Yarn application final status for {} {}: {}", appName, appId, finalStatus);
            TimeUnit.SECONDS.sleep(1);
            finalStatus = processController.getReport().getFinalApplicationStatus();
        }

        // Application not finished after max stop time, kill the application
        if (finalStatus == FinalApplicationStatus.UNDEFINED) {
            kill();
            finalStatus = FinalApplicationStatus.KILLED;
        }
    } catch (Exception e) {
        LOG.warn("Exception while waiting for application report: {}", e.getMessage(), e);
        kill();
        finalStatus = FinalApplicationStatus.KILLED;
    }

    super.doShutDown();

    if (finalStatus == FinalApplicationStatus.FAILED) {
        // If we know the app status is failed, throw an exception to make this controller goes into error state.
        // All other final status are not treated as failure as we can't be sure.
        setTerminationStatus(TerminationStatus.FAILED);
        throw new RuntimeException(
                String.format("Yarn application completed with failure %s, %s.", appName, getRunId()));
    }
    setTerminationStatus(finalStatus == FinalApplicationStatus.SUCCEEDED ? TerminationStatus.SUCCEEDED
            : TerminationStatus.KILLED);
}

From source file:processing.ActCalculator.java

private static List<Map<Integer, Double>> startActCreation(BookmarkReader reader, int sampleSize,
        boolean sorting, boolean userBased, boolean resBased, int dVal, int beta) {
    timeString = "";
    int size = reader.getUserLines().size();
    int trainSize = size - sampleSize;

    Stopwatch timer = new Stopwatch();
    timer.start();/*  w  w w . j  av  a2 s.  co m*/
    ActCalculator calculator = new ActCalculator(reader, trainSize, dVal, beta, userBased, resBased);
    timer.stop();
    long trainingTime = timer.elapsed(TimeUnit.MILLISECONDS);
    List<Map<Integer, Double>> results = new ArrayList<Map<Integer, Double>>();
    if (trainSize == size) {
        trainSize = 0;
    }

    timer = new Stopwatch();
    timer.start();
    for (int i = trainSize; i < size; i++) { // the test-set
        UserData data = reader.getUserLines().get(i);
        Map<Integer, Double> map = calculator.getRankedTagList(data.getUserID(), data.getWikiID(), sorting);
        results.add(map);
    }
    timer.stop();
    long testTime = timer.elapsed(TimeUnit.MILLISECONDS);
    timeString += ("Full training time: " + trainingTime + "\n");
    timeString += ("Full test time: " + testTime + "\n");
    timeString += ("Average test time: " + testTime / (double) sampleSize) + "\n";
    timeString += ("Total time: " + (trainingTime + testTime) + "\n");
    return results;
}

From source file:org.apache.hadoop.hbase.ScanPerformanceEvaluation.java

public void testSnapshotScan() throws IOException {
    Stopwatch snapshotRestoreTimer = new Stopwatch();
    Stopwatch scanOpenTimer = new Stopwatch();
    Stopwatch scanTimer = new Stopwatch();

    Path restoreDir = new Path(this.restoreDir);

    snapshotRestoreTimer.start();/*from   w  ww .  j av a  2  s.co m*/
    restoreDir.getFileSystem(conf).delete(restoreDir, true);
    snapshotRestoreTimer.stop();

    Scan scan = getScan();
    scanOpenTimer.start();
    TableSnapshotScanner scanner = new TableSnapshotScanner(conf, restoreDir, snapshotName, scan);
    scanOpenTimer.stop();

    long numRows = 0;
    long numCells = 0;
    scanTimer.start();
    while (true) {
        Result result = scanner.next();
        if (result == null) {
            break;
        }
        numRows++;

        numCells += result.rawCells().length;
    }
    scanTimer.stop();
    scanner.close();

    ScanMetrics metrics = scanner.getScanMetrics();
    long totalBytes = metrics.countOfBytesInResults.get();
    double throughput = (double) totalBytes / scanTimer.elapsedTime(TimeUnit.SECONDS);
    double throughputRows = (double) numRows / scanTimer.elapsedTime(TimeUnit.SECONDS);
    double throughputCells = (double) numCells / scanTimer.elapsedTime(TimeUnit.SECONDS);

    System.out.println("HBase scan snapshot: ");
    System.out.println("total time to restore snapshot: " + snapshotRestoreTimer.elapsedMillis() + " ms");
    System.out.println("total time to open scanner: " + scanOpenTimer.elapsedMillis() + " ms");
    System.out.println("total time to scan: " + scanTimer.elapsedMillis() + " ms");

    System.out.println("Scan metrics:\n" + metrics.getMetricsMap());

    System.out.println(
            "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
    System.out.println("total rows  : " + numRows);
    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
    System.out.println("total cells : " + numCells);
    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");

}

From source file:com.Grande.GSM.BACCWS_WAR.WS.REST.EOS.BACCAdminEndpoint.java

@Path("/SystemProperties")
@GET/*from   w ww . j  a  v a 2 s .  co  m*/
public String fetchSystemProperties() {

    // <editor-fold defaultstate="collapsed" desc="****** Method vars ******">
    final Stopwatch timer = new Stopwatch();
    final QueryResponse qRes = new QueryResponse();
    String strResponse = null;
    Map mapResult = null;
    // start the execution timer
    timer.start();
    // </editor-fold>

    try {

        qRes.vSetNode(java.net.InetAddress.getLocalHost().getHostName());
        mapResult = this.bacEJB.lstFetchSystemProperties();
        qRes.vSetSuccessFlag(true);
        qRes.vAddResult(mapResult);

    } catch (Exception e) {

        // <editor-fold defaultstate="collapsed" desc="****** Handle failures ******">
        qRes.vSetSuccessFlag(false);
        // handle NPE differently since getMessage() is null
        if (e instanceof NullPointerException) {
            qRes.vSetMessage("NPE occured when serializing result to JSON! " + "File: "
                    + e.getStackTrace()[0].getFileName() + ", " + "Method: "
                    + e.getStackTrace()[0].getMethodName() + ", " + "Line: "
                    + e.getStackTrace()[0].getLineNumber());
        } else {
            qRes.vSetMessage(e.getMessage());
        }
        SimpleLogging.vLogException(this.strThreadId, e);
        // </editor-fold>

    } finally {

        // <editor-fold defaultstate="collapsed" desc="****** Stop timer, convert response to JSON ******">
        timer.stop();
        qRes.vSetRoundTrip(String.valueOf(timer.elapsedTime(TimeUnit.SECONDS)) + "."
                + String.valueOf(timer.elapsedTime(TimeUnit.MILLISECONDS)));
        strResponse = this.trnBN.strQueryResponseToJSON(qRes);
        SimpleLogging.vLogEvent(this.strThreadId + "|" + qRes.strGetRoundTripInSeconds() + "s",
                "retrieved " + qRes.intGetDataCount() + " records");
        // </editor-fold>

    }
    return strResponse;
}

From source file:org.apache.drill.exec.store.schedule.AssignmentCreator.java

/**
 * Builds the list of WorkEndpointListPairs, which pair a work unit with a list of endpoints sorted by affinity
 * @return the list of WorkEndpointListPairs
 *//*from w  ww.  j ava2s. c om*/
private LinkedList<WorkEndpointListPair<T>> getWorkList() {
    Stopwatch watch = new Stopwatch();
    watch.start();
    LinkedList<WorkEndpointListPair<T>> workList = Lists.newLinkedList();
    for (T work : units) {
        List<Map.Entry<DrillbitEndpoint, Long>> entries = Lists.newArrayList();
        for (ObjectLongCursor<DrillbitEndpoint> cursor : work.getByteMap()) {
            final DrillbitEndpoint ep = cursor.key;
            final Long val = cursor.value;
            Map.Entry<DrillbitEndpoint, Long> entry = new Entry() {

                @Override
                public Object getKey() {
                    return ep;
                }

                @Override
                public Object getValue() {
                    return val;
                }

                @Override
                public Object setValue(Object value) {
                    throw new UnsupportedOperationException();
                }
            };
            entries.add(entry);
        }
        Collections.sort(entries, comparator);
        List<DrillbitEndpoint> sortedEndpoints = Lists.newArrayList();
        for (Entry<DrillbitEndpoint, Long> entry : entries) {
            sortedEndpoints.add(entry.getKey());
        }
        workList.add(new WorkEndpointListPair<T>(work, sortedEndpoints));
    }
    return workList;
}

From source file:co.cask.cdap.gateway.router.handlers.SecurityAuthenticationHttpHandler.java

/**
 *
 * @param externalAuthenticationURIs the list that should be populated with discovered with
 *                                   external auth servers URIs
 * @throws Exception// www .  java  2 s. c o  m
 */
private void stopWatchWait(JsonArray externalAuthenticationURIs) throws Exception {
    boolean done = false;
    Stopwatch stopwatch = new Stopwatch();
    stopwatch.start();
    String protocol;
    int port;
    if (configuration.getBoolean(Constants.Security.SSL_ENABLED)) {
        protocol = "https";
        port = configuration.getInt(Constants.Security.AuthenticationServer.SSL_PORT);
    } else {
        protocol = "http";
        port = configuration.getInt(Constants.Security.AUTH_SERVER_BIND_PORT);
    }

    do {
        for (Discoverable d : discoverables) {
            String url = String.format("%s://%s:%d/%s", protocol, d.getSocketAddress().getHostName(), port,
                    GrantAccessToken.Paths.GET_TOKEN);
            externalAuthenticationURIs.add(new JsonPrimitive(url));
            done = true;
        }
        if (!done) {
            TimeUnit.MILLISECONDS.sleep(200);
        }
    } while (!done && stopwatch.elapsedTime(TimeUnit.SECONDS) < 2L);
}