Example usage for com.google.common.base Stopwatch Stopwatch

List of usage examples for com.google.common.base Stopwatch Stopwatch

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch Stopwatch.

Prototype

Stopwatch() 

Source Link

Usage

From source file:com.google.android.marvin.utils.TraceAspect.java

@Around("methodAnnotatedWithDebugTrace() " + "|| talkbackAllMethods()  "
        + "||  constructorAnnotatedDebugTrace()")
public Object weaveJoinPoint(ProceedingJoinPoint joinPoint) throws Throwable {
    final Stopwatch stopWatch = new Stopwatch();
    stopWatch.start();//  w w w  .  java 2s .  c om
    callLevel++;
    Object result = joinPoint.proceed();
    stopWatch.stop();
    log(joinPoint, stopWatch.elapsedMillis());
    callLevel--;
    return result;
}

From source file:org.jclouds.compute.functions.PollNodeRunning.java

/**
 * @param node/*from  w ww  .  j a v a  2  s  .  c om*/
 *           will be updated with the node which is running
 * @throws {@link IllegalStateException} if this cannot be achieved within
 *         the timeout.
 */
@Override
public AtomicReference<NodeMetadata> apply(AtomicReference<NodeMetadata> node) throws IllegalStateException {
    String originalId = node.get().getId();
    NodeMetadata originalNode = node.get();
    try {
        Stopwatch stopwatch = new Stopwatch().start();
        if (!nodeRunning.apply(node)) {
            long timeWaited = stopwatch.elapsedMillis();
            if (node.get() == null) {
                node.set(originalNode);
                throw new IllegalStateException(format("api response for node(%s) was null", originalId));
            } else {
                throw new IllegalStateException(format(
                        "node(%s) didn't achieve the status running; aborting after %d seconds with final status: %s",
                        originalId, timeWaited / 1000, formatStatus(node.get())));
            }
        }
    } catch (IllegalStateException e) {
        if (node.get().getStatus() == Status.TERMINATED) {
            throw new IllegalStateException(format("node(%s) terminated", originalId));
        } else {
            throw propagate(e);
        }
    }
    return node;
}

From source file:org.geoserver.jdbcconfig.JDBCGeoServerLoader.java

@Override
protected void loadCatalog(Catalog catalog, XStreamPersister xp) throws Exception {
    if (!config.isEnabled()) {
        super.loadCatalog(catalog, xp);
        return;/*from w ww . j  av a2 s.  c  o  m*/
    }

    Stopwatch sw = new Stopwatch().start();
    loadCatalogInternal(catalog, xp);
    sw.stop();
    //System.err.println("Loaded catalog in " + sw.toString());
}

From source file:org.jetbrains.android.inspections.lint.DomPsiConverter.java

/**
 * Convert the given {@link XmlFile} to a DOM tree
 *
 * @param xmlFile the file to be converted
 * @return a corresponding W3C DOM tree/*w  w  w.  j  a va  2  s  . co m*/
 */
@Nullable
public static Document convert(@NotNull XmlFile xmlFile) {
    try {
        XmlDocument xmlDocument = xmlFile.getDocument();
        if (xmlDocument == null) {
            return null;
        }

        @SuppressWarnings("UnusedAssignment")
        Stopwatch timer;
        if (BENCHMARK) {
            timer = new Stopwatch();
            timer.start();
        }

        Document document = convert(xmlDocument);

        if (BENCHMARK) {
            timer.stop();
            //noinspection UseOfSystemOutOrSystemErr
            System.out.println("Creating PSI for " + xmlFile.getName() + " took " + timer.elapsedMillis()
                    + "ms (" + timer.toString() + ")");
        }

        return document;
    } catch (Exception e) {
        String path = xmlFile.getName();
        VirtualFile virtualFile = xmlFile.getVirtualFile();
        if (virtualFile != null) {
            path = virtualFile.getPath();
        }
        throw new RuntimeException("Could not convert file " + path, e);
    }
}

From source file:org.apache.hadoop.mapreduce.lib.input.CSVFileInputFormat.java

/**
 * Very similar to the FileInputFormat implementation but makes sure to split on a CSV row boundary. Since we have
 * to parse the entire file, expect this function to be slow for huge files
 *
 * @param job the job context// www.  ja  v a2 s .c  o m
 * @return the file splits
 * @throws IOException
 */
@Override
public List<InputSplit> getSplits(JobContext job) throws IOException {
    Stopwatch sw = new Stopwatch().start();
    long minSize = Math.max(getFormatMinSplitSize(), getMinSplitSize(job));
    long maxSize = getMaxSplitSize(job);

    // generate splits
    List<InputSplit> splits = new ArrayList<InputSplit>();
    List<FileStatus> files = listStatus(job);
    Text row = new Text();
    for (FileStatus file : files) {
        Path path = file.getPath();
        long length = file.getLen();
        if (length != 0) {
            BlockLocation[] blkLocations;
            FileSystem fs = path.getFileSystem(job.getConfiguration());
            if (file instanceof LocatedFileStatus) {
                blkLocations = ((LocatedFileStatus) file).getBlockLocations();
            } else {
                blkLocations = fs.getFileBlockLocations(file, 0, length);
            }
            if (isSplitable(job, path)) {
                // NOTE: using the CSVRawLineRecordReader instead of CSVLineRecordReader saw performance
                // gains of ~25%
                CSVRawLineRecordReader reader = new CSVRawLineRecordReader(
                        new FSDataInputStream(fs.open(file.getPath())), job.getConfiguration());
                long blockSize = file.getBlockSize();
                long splitSize = computeSplitSize(blockSize, minSize, maxSize);

                long bytesRemaining = length, startPos = 0;
                while (((double) bytesRemaining) / splitSize > SPLIT_SLOP) {
                    long bytesRead = 0, bytesInLine;
                    // read CSV lines until we are at least split size large
                    while (bytesRead < splitSize) {
                        bytesInLine = reader.readLine(row);
                        // if we read 0 bytes, we have hit EOF
                        if (bytesInLine <= 0) {
                            // NOTE: I don't think we should ever reach EOF actually; because of SPLIT_SLOP
                            //       it won't break anything and I would rather program defensively
                            LOG.debug("Reached EOF while splitting; this is unexpected");
                            break;
                        }
                        bytesRead += bytesInLine;
                    }
                    // if we read 0 bytes, we have hit EOF
                    if (bytesRead <= 0) {
                        // NOTE: I don't think we should ever reach EOF actually; because of SPLIT_SLOP
                        //       it won't break anything and I would rather program defensively
                        LOG.debug("Reached EOF while splitting; this is unexpected");
                        break;
                    }
                    int blkIndex = getBlockIndex(blkLocations, startPos);
                    splits.add(makeSplit(path, startPos, bytesRead, blkLocations[blkIndex].getHosts(),
                            blkLocations[blkIndex].getCachedHosts()));
                    // increment start position by the number of bytes we have read
                    startPos += bytesRead;
                    bytesRemaining -= bytesRead;
                }
                if (bytesRemaining != 0) {
                    int blkIndex = getBlockIndex(blkLocations, length - bytesRemaining);
                    splits.add(makeSplit(path, length - bytesRemaining, bytesRemaining,
                            blkLocations[blkIndex].getHosts(), blkLocations[blkIndex].getCachedHosts()));
                }
                reader.close();
            } else { // not splitable
                splits.add(makeSplit(path, 0, length, blkLocations[0].getHosts(),
                        blkLocations[0].getCachedHosts()));
            }
        } else {
            // create empty hosts array for zero length files
            splits.add(makeSplit(path, 0, length, new String[0]));
        }
    }

    // Save the number of input files for metrics/loadgen
    job.getConfiguration().setLong(NUM_INPUT_FILES, files.size());
    sw.stop();
    if (LOG.isDebugEnabled()) {
        LOG.debug("Total # of splits generated by getSplits: " + splits.size() + ", TimeTaken: "
                + sw.elapsed(MILLISECONDS));
    }

    return splits;
}

From source file:com.flipkart.foxtrot.core.datastore.impl.hbase.HBaseDataStore.java

@Override
public void save(final Table table, Document document) throws DataStoreException {
    if (document == null || document.getData() == null || document.getId() == null) {
        throw new DataStoreException(DataStoreException.ErrorCode.STORE_INVALID_REQUEST, "Invalid Document");
    }//  w w w . j a va  2s  .co  m
    HTableInterface hTable = null;
    try {
        hTable = tableWrapper.getTable(table);
        Stopwatch stopwatch = new Stopwatch();
        stopwatch.start();
        hTable.put(getPutForDocument(table, document));
        logger.error(String.format("HBASE put took : %d table : %s", stopwatch.elapsedMillis(), table));
    } catch (JsonProcessingException e) {
        throw new DataStoreException(DataStoreException.ErrorCode.STORE_INVALID_REQUEST, e.getMessage(), e);
    } catch (IOException e) {
        throw new DataStoreException(DataStoreException.ErrorCode.STORE_SINGLE_SAVE, e.getMessage(), e);
    } catch (Exception e) {
        throw new DataStoreException(DataStoreException.ErrorCode.STORE_SINGLE_SAVE, e.getMessage(), e);
    } finally {
        if (null != hTable) {
            try {
                hTable.close();
            } catch (IOException e) {
                logger.error("Error closing table: ", e);
            }
        }
    }
}

From source file:demos.AsynchronousRead.java

@Override
public void run() {
    try {//from w  w  w .j  ava2s.  c  om
        logger.info("Preparing to read data points");

        Cluster cluster = Cluster.builder().addContactPoint("127.0.0.1").build();
        Session session = cluster.connect("demo");
        PreparedStatement query = session.prepare(
                "SELECT metric_id, time, value FROM metric_data WHERE metric_id = ? AND time >= ? AND time <= ?");
        DateTime end = DateTime.now();
        DateTime start = end.minusYears(1);
        final CountDownLatch latch = new CountDownLatch(NUM_METRICS);
        final Set<DataPoint> dataPoints = new ConcurrentSkipListSet<>(
                comparing(DataPoint::getTimestamp).thenComparing(DataPoint::getValue));

        Stopwatch stopwatch = new Stopwatch().start();
        for (int i = 0; i < NUM_METRICS; ++i) {
            ResultSetFuture queryFuture = session
                    .executeAsync(query.bind("metric-" + i, start.toDate(), end.toDate()));
            ListenableFuture<List<DataPoint>> dataFuture = Futures.transform(queryFuture,
                    (ResultSet resultSet) -> StreamSupport.stream(resultSet.spliterator(), false)
                            .map(row -> new DataPoint(row.getString(0), row.getDate(1), row.getDouble(2)))
                            .collect(toList()));
            Futures.addCallback(dataFuture, new FutureCallback<List<DataPoint>>() {
                @Override
                public void onSuccess(List<DataPoint> results) {
                    dataPoints.addAll(results);
                    latch.countDown();
                }

                @Override
                public void onFailure(Throwable t) {
                    logger.warn("There was an error reading data", t);
                    latch.countDown();
                }
            });
        }
        latch.await();
        stopwatch.stop();

        logger.info("Retrieved {} data points in {} ms", dataPoints.size(),
                stopwatch.elapsed(TimeUnit.MILLISECONDS));
    } catch (InterruptedException e) {
        logger.info("There was an interrupt while waiting for inserts to complete");
    }
}

From source file:demos.AsynchronousInsert.java

@Override
public void run() {
    try {/*from  w  w w . jav a2s.  c om*/
        logger.info("Preparing to insert metric data points");

        Cluster cluster = Cluster.builder().addContactPoint("127.0.0.1").build();
        Session session = cluster.connect("demo");
        PreparedStatement insert = session
                .prepare("insert into metric_data (metric_id, time, value) values (?, ?, ?)");
        Random random = new Random();
        DateTime time = DateTime.now().minusYears(1);
        final CountDownLatch latch = new CountDownLatch(NUM_INSERTS);

        FutureCallback<ResultSet> callback = new FutureCallback<ResultSet>() {
            @Override
            public void onSuccess(ResultSet result) {
                latch.countDown();
            }

            @Override
            public void onFailure(Throwable t) {
                logger.warn("There was an error inserting data", t);
                latch.countDown();
            }
        };

        Stopwatch stopwatch = new Stopwatch().start();
        for (int i = 0; i < NUM_INSERTS; ++i) {
            String metricId = "metric-" + Math.abs(random.nextInt() % NUM_METRICS);
            double value = random.nextDouble();
            ResultSetFuture future = session.executeAsync(insert.bind(metricId, time.toDate(), value));
            time = time.plusSeconds(10);
            Futures.addCallback(future, callback);
        }
        latch.await();
        stopwatch.stop();

        logger.info("Finished inserting {} data points in {} ms", NUM_INSERTS,
                stopwatch.elapsed(TimeUnit.MILLISECONDS));
    } catch (InterruptedException e) {
        logger.info("There was an interrupt while waiting for inserts to complete");
    }
}

From source file:org.openqa.selenium.internal.selenesedriver.ScriptExecutor.java

@SuppressWarnings({ "unchecked" })
private <T> T evaluateScript(String script) {
    Stopwatch stopWatch = new Stopwatch();
    stopWatch.start();/* w w  w.  j  ava2 s . c om*/
    String result = selenium.getEval(script);
    stopWatch.stop();

    Response response = new JsonToBeanConverter().convert(Response.class, result);
    new ErrorHandler().throwIfResponseFailed(response, stopWatch.elapsed(MILLISECONDS));
    return (T) response.getValue();
}

From source file:org.apache.drill.common.util.PathScanner.java

/**
 * @param  scanPackages  note:  not currently used
 *//* w  ww  .  j a va 2  s .c  o m*/
public static <T> Set<Class<? extends T>> scanForImplementations(final Class<T> baseClass,
        final List<String> scanPackages) {
    final Stopwatch w = new Stopwatch().start();
    try {
        synchronized (SYNC) {
            final Set<Class<? extends T>> classes = getReflections().getSubTypesOf(baseClass);
            for (Iterator<Class<? extends T>> i = classes.iterator(); i.hasNext();) {
                final Class<? extends T> c = i.next();
                assert baseClass.isAssignableFrom(c);
                if (Modifier.isAbstract(c.getModifiers())) {
                    i.remove();
                }
            }
            return classes;
        }
    } finally {
        logger.debug("Implementations scanning took {} ms for {}.", w.elapsed(TimeUnit.MILLISECONDS),
                baseClass); // no .getName(), so it has "class "/"interface "
    }
}