List of usage examples for com.google.common.base Stopwatch stop
public Stopwatch stop()
From source file:qa.qcri.nadeef.core.pipeline.ViolationDetector.java
/** * {@inheritDoc}//from w w w . j a v a 2 s .com */ @Override @SuppressWarnings("unchecked") public Collection<Violation> execute(Optional emptyInput) throws Exception { detectCount = 0; totalThreadCount = 0; finishedThreadCount = 0; Rule rule = getCurrentContext().getRule(); IteratorBlockingQueue iteratorBlockingQueue = new IteratorBlockingQueue(); resultCollection.clear(); List<Object> tupleList; Stopwatch stopwatch = Stopwatch.createStarted(); List<ListenableFuture<Integer>> futures = Lists.newArrayList(); while (true) { tupleList = iteratorBlockingQueue.poll(); if (tupleList.size() == 0) { break; } totalThreadCount++; ListenableFuture<Integer> future = service.submit(new Detector(tupleList, rule)); futures.add(future); Futures.addCallback(future, new DetectorCallback()); } // wait until all the futures finished for (ListenableFuture<Integer> future : futures) { future.get(); } PerfReport.appendMetric(PerfReport.Metric.DetectTimeOnly, stopwatch.elapsed(TimeUnit.MILLISECONDS)); PerfReport.appendMetric(PerfReport.Metric.DetectCount, detectCount); PerfReport.appendMetric(PerfReport.Metric.DetectThreadCount, totalThreadCount); stopwatch.stop(); return resultCollection; }
From source file:cosmos.impl.CosmosImpl.java
@Override public CloseableIterable<Entry<RecordValue<?>, Long>> groupResults(Store id, Column column) throws TableNotFoundException, UnexpectedStateException, UnindexedColumnException { checkNotNull(id);/*from w w w. j a v a 2 s. com*/ Stopwatch sw = new Stopwatch().start(); final String description = "Cosmos:groupResults"; try { State s = PersistedStores.getState(id); if (!State.LOADING.equals(s) && !State.LOADED.equals(s)) { sw.stop(); throw unexpectedState(id, new State[] { State.LOADING, State.LOADED }, s); } checkNotNull(column); Text colf = new Text(column.name()); BatchScanner bs = id.connector().createBatchScanner(id.dataTable(), id.auths(), id.readThreads()); bs.setRanges(Collections.singleton(Range.prefix(id.uuid()))); bs.fetchColumnFamily(colf); // Filter on cq-prefix to only look at the ordering we want IteratorSetting filter = new IteratorSetting(50, "cqFilter", OrderFilter.class); filter.addOption(OrderFilter.PREFIX, Order.FORWARD); bs.addScanIterator(filter); IteratorSetting cfg = new IteratorSetting(60, GroupByRowSuffixIterator.class); bs.addScanIterator(cfg); return CloseableIterable.transform(bs, new GroupByFunction(), id.tracer(), description, sw); } catch (TableNotFoundException e) { // In the exceptional case, stop the timer sw.stop(); id.tracer().addTiming(description, sw.elapsed(TimeUnit.MILLISECONDS)); throw e; } catch (UnexpectedStateException e) { // In the exceptional case, stop the timer sw.stop(); id.tracer().addTiming(description, sw.elapsed(TimeUnit.MILLISECONDS)); throw e; } catch (RuntimeException e) { // In the exceptional case, stop the timer sw.stop(); id.tracer().addTiming(description, sw.elapsed(TimeUnit.MILLISECONDS)); throw e; } // no finally as the trace is stopped by the CloseableIterable }
From source file:com.facebook.buck.parser.cache.impl.LocalCacheStorage.java
@Override public void storeBuildFileManifest(HashCode weakFingerprint, HashCode strongFingerprint, byte[] serializedBuildFileManifest) throws IOException { Stopwatch timer = null; if (LOG.isVerboseEnabled()) { timer = Stopwatch.createStarted(); }//from ww w . jav a 2s . c o m try { if (!isWriteAllowed()) { return; } Path weakFingerprintCachePath = getOrCreateWeakFingerprintFolder(weakFingerprint); Path cachedBuildFileManifestPath = weakFingerprintCachePath.resolve(strongFingerprint.toString()); Path relativePathToRoot = cachedBuildFileManifestPath.isAbsolute() ? filesystem.getRootPath().relativize(cachedBuildFileManifestPath) : cachedBuildFileManifestPath; try (OutputStream fw = filesystem.newFileOutputStream(relativePathToRoot)) { fw.write(serializedBuildFileManifest); } } finally { if (timer != null) { LOG.verbose("Time to complete storeBuildFileManifest: %d ns.", timer.stop().elapsed(TimeUnit.NANOSECONDS)); } } }
From source file:com.google.devtools.build.lib.buildtool.ExecutionTool.java
/** * Computes the result of the build. Sets the list of successful (up-to-date) * targets in the request object.//from w w w . j ava 2 s. c om * * @param configuredTargets The configured targets whose artifacts are to be * built. * @param timer A timer that was started when the execution phase started. */ private Collection<ConfiguredTarget> determineSuccessfulTargets(Collection<ConfiguredTarget> configuredTargets, Set<ConfiguredTarget> builtTargets, Stopwatch timer) { // Maintain the ordering by copying builtTargets into a LinkedHashSet in the same iteration // order as configuredTargets. Collection<ConfiguredTarget> successfulTargets = new LinkedHashSet<>(); for (ConfiguredTarget target : configuredTargets) { if (builtTargets.contains(target)) { successfulTargets.add(target); } } env.getEventBus().post(new ExecutionPhaseCompleteEvent(timer.stop().elapsed(MILLISECONDS))); return successfulTargets; }
From source file:com.google.api.ads.adwords.awreporting.processors.onfile.ReportProcessorOnFile.java
/** * Generate all the mapped reports to the given account IDs. * * @param dateRangeType the date range type. * @param dateStart the starting date./*from w w w .j a v a2s . c om*/ * @param dateEnd the ending date. * @param accountIdsSet the account IDs. * @param properties the properties file * @throws Exception error reaching the API. */ @Override public void generateReportsForMCC(String mccAccountId, ReportDefinitionDateRangeType dateRangeType, String dateStart, String dateEnd, Set<Long> accountIdsSet, Properties properties, ReportDefinitionReportType onDemandReportType, List<String> reportFieldsToInclude) throws Exception { LOGGER.info("*** Retrieving account IDs ***"); if (accountIdsSet == null || accountIdsSet.size() == 0) { accountIdsSet = this.retrieveAccountIds(mccAccountId); } else { LOGGER.info("Accounts loaded from file."); } AdWordsSessionBuilderSynchronizer sessionBuilder = new AdWordsSessionBuilderSynchronizer( authenticator.authenticate(mccAccountId, false), getIncludeZeroImpressions(properties)); LOGGER.info("*** Generating Reports for " + accountIdsSet.size() + " accounts ***"); Stopwatch stopwatch = Stopwatch.createStarted(); Set<ReportDefinitionReportType> reports = this.csvReportEntitiesMapping.getDefinedReports(); // reports Set<Object> propertiesKeys = properties.keySet(); for (Object key : propertiesKeys) { String reportDefinitionKey = key.toString(); ReportDefinitionReportType reportType = this.extractReportTypeFromKey(reportDefinitionKey); if (reportType != null && reports.contains(reportType)) { this.downloadAndProcess(mccAccountId, sessionBuilder, reportType, dateRangeType, dateStart, dateEnd, accountIdsSet, reportDefinitionKey, properties); } } this.multipleClientReportDownloader.finalizeExecutorService(); stopwatch.stop(); LOGGER.info("*** Finished processing all reports in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000) + " seconds ***\n"); }
From source file:fr.ens.transcriptome.aozan.RunDataGenerator.java
/** * Collect data and return a RunData object. * @return a RunData object with all data about the run * @throws AozanException if an error occurs while collecting data *//* www . j ava 2s . c o m*/ public RunData collect() throws AozanException { final RunData data = new RunData(); if (this.properties.containsKey(COLLECT_DONE)) { throw new AozanException("Collect has been already done."); } if (!this.properties.containsKey(QC.RTA_OUTPUT_DIR)) { throw new AozanException("RTA output directory is not set."); } if (!this.properties.containsKey(QC.CASAVA_DESIGN_PATH)) { throw new AozanException("Casava design file path is not set."); } if (!this.properties.containsKey(QC.CASAVA_OUTPUT_DIR)) { throw new AozanException("Casava output directory is not set."); } if (!this.properties.containsKey(QC.QC_OUTPUT_DIR)) { throw new AozanException("QC output directory is not set."); } if (!this.properties.containsKey(QC.TMP_DIR)) { throw new AozanException("Temporary directory is not set."); } // Timer final Stopwatch timerGlobal = Stopwatch.createStarted(); LOGGER.info("Step collector start"); // For all collectors for (final Collector collector : this.collectors) { final Stopwatch timerCollector = Stopwatch.createStarted(); LOGGER.info(collector.getName().toUpperCase() + " start"); // Configure collector.configure(new Properties(this.properties)); // And collect data collector.collect(data); LOGGER.info(collector.getName().toUpperCase() + " end in " + toTimeHumanReadable(timerCollector.elapsed(TimeUnit.MILLISECONDS))); } for (final Collector collector : this.collectors) { collector.clear(); } LOGGER.info("Step collector end in " + toTimeHumanReadable(timerGlobal.elapsed(TimeUnit.MILLISECONDS))); timerGlobal.stop(); this.properties.setProperty(COLLECT_DONE, "true"); return data; }
From source file:org.apache.eagle.alert.coordinator.trigger.CoordinatorTrigger.java
@Override public void run() { if (Coordinator.isPeriodicallyForceBuildEnable()) { LOG.info("CoordinatorTrigger started ... "); Stopwatch watch = Stopwatch.createStarted(); ZKConfig zkConfig = ZKConfigBuilder.getZKConfig(config); try (ExclusiveExecutor executor = new ExclusiveExecutor(zkConfig)) { executor.execute(Coordinator.GREEDY_SCHEDULER_ZK_PATH, () -> { // schedule IScheduleContext context = new ScheduleContextBuilder(config, client).buildContext(); TopologyMgmtService mgmtService = new TopologyMgmtService(); IPolicyScheduler scheduler = PolicySchedulerFactory.createScheduler(); scheduler.init(context, mgmtService); ScheduleState state = scheduler.schedule(new ScheduleOption()); // use try catch to use AutoCloseable interface to close producer automatically try (ConfigBusProducer producer = new ConfigBusProducer(ZKConfigBuilder.getZKConfig(config))) { Coordinator.postSchedule(client, state, producer); }//from w ww .j av a 2 s .c o m watch.stop(); LOG.info("CoordinatorTrigger ended, used time {} sm.", watch.elapsed(TimeUnit.MILLISECONDS)); }); } catch (Exception e) { LOG.error("trigger schedule failed!", e); } } else { LOG.info("CoordinatorTrigger found isPeriodicallyForceBuildEnable = false, skipped build"); } }
From source file:qa.qcri.nadeef.core.utils.CSVTools.java
/** * Dumps CSV file content into a specified database. It replaces the table if the table * already existed.//from w w w.jav a 2s. c om * @param dbConfig JDBC connection config. * @param file CSV file. * @param dialectManager SQL dialect manager. * @param tableName new created table name. * @param overwrite it overwrites existing table if it exists. * * @return new created table name. */ public static String dump(final DBConfig dbConfig, final SQLDialectBase dialectManager, final File file, final String tableName, final String schema, final boolean overwrite) throws SQLException { Preconditions.checkNotNull(dbConfig); Preconditions.checkNotNull(dialectManager); Preconditions.checkArgument(!Strings.isNullOrEmpty(schema)); Stopwatch stopwatch = Stopwatch.createStarted(); String fullTableName = null; String sql; try (BufferedReader reader = new BufferedReader(new FileReader(file))) { // overwrites existing tables if necessary fullTableName = "TB_" + tableName; boolean hasTableExist = DBMetaDataTool.isTableExist(dbConfig, fullTableName); // Create table if (hasTableExist && !overwrite) { logger.info("Found table " + fullTableName + " exists and choose not to overwrite."); return fullTableName; } else { Statement stat = null; try (Connection conn = DBConnectionPool.createConnection(dbConfig, true)) { stat = conn.createStatement(); if (hasTableExist && overwrite) { sql = dialectManager.dropTable(fullTableName); logger.fine(sql); stat.execute(sql); } sql = dialectManager.createTableFromCSV(fullTableName, schema); logger.fine(sql); stat.execute(sql); logger.info("Successfully created table " + fullTableName); } finally { if (stat != null) { stat.close(); } } // load the data int size = 0; if (dialectManager.supportBulkLoad()) { size = dialectManager.bulkLoad(dbConfig, fullTableName, file.toPath(), true); } else { size = dialectManager.fallbackLoad(dbConfig, fullTableName, file, true); } logger.info("Dumped " + size + " bytes in " + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms."); stopwatch.stop(); } } catch (Exception ex) { logger.error("Cannot load file " + file.getName(), ex); } return fullTableName; }
From source file:org.locationtech.geogig.repository.WorkingTree.java
/** * Inserts the given {@code features} into the working tree, using the {@code treePathResolver} * function to determine to which tree each feature is added. * // ww w .jav a 2 s .com * @param treePathResolver a function that determines the path of the tree where each feature * node is stored * @param features the features to insert, possibly of different schema and targetted to * different tree paths * @param listener a progress listener * @param insertedTarget if provided, all nodes created will be added to this list. Beware of * possible memory implications when inserting a lot of features. * @param collectionSize if given, used to determine progress and notify the {@code listener} * @return the total number of inserted features */ public void insert(final Function<Feature, String> treePathResolver, Iterator<? extends Feature> features, final ProgressListener listener, @Nullable final List<Node> insertedTarget, @Nullable final Integer collectionSize) { checkArgument(collectionSize == null || collectionSize.intValue() > -1); final int nTreeThreads = Math.max(2, Runtime.getRuntime().availableProcessors() / 2); final ExecutorService treeBuildingService = Executors.newFixedThreadPool(nTreeThreads, new ThreadFactoryBuilder().setNameFormat("WorkingTree-tree-builder-%d").build()); final WorkingTreeInsertHelper insertHelper; insertHelper = new WorkingTreeInsertHelper(context, getTree(), treePathResolver, treeBuildingService); UnmodifiableIterator<? extends Feature> filtered = Iterators.filter(features, new Predicate<Feature>() { @Override public boolean apply(Feature feature) { if (listener.isCanceled()) { return false; } if (feature instanceof FeatureToDelete) { insertHelper.remove((FeatureToDelete) feature); return false; } else { return true; } } }); Iterator<RevObject> objects = Iterators.transform(filtered, new Function<Feature, RevObject>() { private int count; @Override public RevFeature apply(Feature feature) { final RevFeature revFeature = RevFeatureBuilder.build(feature); ObjectId id = revFeature.getId(); final Node node = insertHelper.put(id, feature); if (insertedTarget != null) { insertedTarget.add(node); } count++; if (collectionSize == null) { listener.setProgress(count); } else { listener.setProgress((float) (count * 100) / collectionSize.intValue()); } return revFeature; } }); try { listener.started(); indexDatabase.putAll(objects); if (listener.isCanceled()) { return; } listener.setDescription("Building trees for " + new TreeSet<String>(insertHelper.getTreeNames())); Stopwatch sw = Stopwatch.createStarted(); Map<NodeRef, RevTree> trees = insertHelper.buildTrees(); listener.setDescription(String.format("Trees built in %s", sw.stop())); for (Map.Entry<NodeRef, RevTree> treeEntry : trees.entrySet()) { if (!listener.isCanceled()) { NodeRef treeRef = treeEntry.getKey(); RevTree newFeatureTree = treeEntry.getValue(); String treePath = treeRef.path(); ObjectId newRootTree = context.command(WriteBack.class).setAncestor(getTreeSupplier()) .setChildPath(treePath).setMetadataId(treeRef.getMetadataId()).setTree(newFeatureTree) .call(); updateWorkHead(newRootTree); } } listener.complete(); } finally { treeBuildingService.shutdownNow(); } }
From source file:org.geogig.osm.cli.commands.OSMHistoryImport.java
/** * @param primitive// w w w . j a va 2 s . c o m * @param thisChangePointCache * @return */ private Geometry parseGeometry(Context context, Primitive primitive, Map<Long, Coordinate> thisChangePointCache) { if (primitive instanceof Relation) { return null; } if (primitive instanceof Node) { Optional<Point> location = ((Node) primitive).getLocation(); return location.orNull(); } final Way way = (Way) primitive; final ImmutableList<Long> nodes = way.getNodes(); List<Coordinate> coordinates = Lists.newArrayList(nodes.size()); FindTreeChild findTreeChild = context.command(FindTreeChild.class); Optional<ObjectId> nodesTreeId = context.command(ResolveTreeish.class) .setTreeish(Ref.STAGE_HEAD + ":" + NODE_TYPE_NAME).call(); if (nodesTreeId.isPresent()) { RevTree headTree = context.objectDatabase().getTree(nodesTreeId.get()); findTreeChild.setParent(headTree); } int findTreeChildCalls = 0; Stopwatch findTreeChildSW = Stopwatch.createUnstarted(); ObjectStore objectDatabase = context.objectDatabase(); for (Long nodeId : nodes) { Coordinate coord = thisChangePointCache.get(nodeId); if (coord == null) { findTreeChildCalls++; String fid = String.valueOf(nodeId); findTreeChildSW.start(); Optional<NodeRef> nodeRef = findTreeChild.setChildPath(fid).call(); findTreeChildSW.stop(); Optional<org.locationtech.geogig.model.Node> ref = Optional.absent(); if (nodeRef.isPresent()) { ref = Optional.of(nodeRef.get().getNode()); } if (ref.isPresent()) { final int locationAttIndex = 6; ObjectId objectId = ref.get().getObjectId(); RevFeature revFeature = objectDatabase.getFeature(objectId); Point p = (Point) revFeature.get(locationAttIndex, GEOMF).orNull(); if (p != null) { coord = p.getCoordinate(); thisChangePointCache.put(Long.valueOf(nodeId), coord); } } } if (coord != null) { coordinates.add(coord); } } if (findTreeChildCalls > 0) { // System.err.printf("%,d findTreeChild calls (%s)\n", findTreeChildCalls, // findTreeChildSW); } if (coordinates.size() < 2) { return null; } return GEOMF.createLineString(coordinates.toArray(new Coordinate[coordinates.size()])); }