List of usage examples for com.google.common.base Stopwatch createStarted
@CheckReturnValue public static Stopwatch createStarted()
From source file:io.takari.maven.plugins.compile.ProjectClasspathDigester.java
private boolean digest(String key, List<File> dependencies) { Stopwatch stopwatch = Stopwatch.createStarted(); Map<File, ArtifactFile> previousArtifacts = getPreviousDependencies(key); LinkedHashMap<File, ArtifactFile> digest = new LinkedHashMap<>(); if (dependencies != null) { for (final File dependency : dependencies) { File normalized = normalize(dependency); ArtifactFile previousArtifact = previousArtifacts.get(normalized); ArtifactFile artifact = CACHE.get(normalized); if (artifact == null) { if (normalized.isFile()) { artifact = newFileArtifact(normalized, previousArtifact); } else if (normalized.isDirectory()) { artifact = newDirectoryArtifact(normalized, previousArtifact); } else { // happens with reactor dependencies with empty source folders continue; }// w w w .j a v a2 s . c o m CACHE.put(normalized, artifact); } digest.put(normalized, artifact); if (!equals(artifact, previousArtifact)) { log.debug("New or changed classpath entry {}", normalized); } } } for (File reviousDependency : previousArtifacts.keySet()) { if (!digest.containsKey(reviousDependency)) { log.debug("Removed classpath entry {}", reviousDependency); } } boolean changed = !equals(digest.values(), previousArtifacts.values()); context.setAttribute(key, new ArrayList<>(digest.values())); log.debug("Analyzed {} classpath dependencies ({} ms)", dependencies.size(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); return changed; }
From source file:edu.anu.spice.SpiceScorer.java
public void scoreBatch(SpiceArguments args) throws IOException, ScriptException { Stopwatch timer = Stopwatch.createStarted(); SpiceParser parser = new SpiceParser(args.cache, args.numThreads, args.synsets); // Build filters for tuple categories Map<String, TupleFilter> filters = new HashMap<String, TupleFilter>(); if (args.tupleSubsets) { filters.put("Object", TupleFilter.objectFilter); filters.put("Attribute", TupleFilter.attributeFilter); filters.put("Relation", TupleFilter.relationFilter); filters.put("Cardinality", TupleFilter.cardinalityFilter); filters.put("Color", TupleFilter.colorFilter); filters.put("Size", TupleFilter.sizeFilter); }/*from w ww . j a v a 2 s . co m*/ // Parse test and refs from input file ArrayList<Object> image_ids = new ArrayList<Object>(); ArrayList<String> testCaptions = new ArrayList<String>(); ArrayList<String> refCaptions = new ArrayList<String>(); ArrayList<Integer> refChunks = new ArrayList<Integer>(); JSONParser json = new JSONParser(); JSONArray input; try { input = (JSONArray) json.parse(new FileReader(args.inputPath)); for (Object o : input) { JSONObject item = (JSONObject) o; image_ids.add(item.get("image_id")); testCaptions.add((String) item.get("test")); JSONArray refs = (JSONArray) item.get("refs"); refChunks.add(refs.size()); for (Object ref : refs) { refCaptions.add((String) ref); } } } catch (ParseException e) { System.err.println("Could not read input: " + args.inputPath); System.err.println(e.toString()); e.printStackTrace(); } System.err.println("Parsing reference captions"); List<SceneGraph> refSgs = parser.parseCaptions(refCaptions, refChunks); System.err.println("Parsing test captions"); List<SceneGraph> testSgs = parser.parseCaptions(testCaptions); this.stats = new SpiceStats(filters, args.detailed); for (int i = 0; i < testSgs.size(); ++i) { this.stats.score(image_ids.get(i), testSgs.get(i), refSgs.get(i), args.synsets); } if (!args.silent) { System.out.println(this.stats.toString()); } if (args.outputPath != null) { BufferedWriter outputWriter = new BufferedWriter(new FileWriter(args.outputPath)); // Pretty print output using javascript String jsonStringNoWhitespace = this.stats.toJSONString(); ScriptEngineManager manager = new ScriptEngineManager(); ScriptEngine scriptEngine = manager.getEngineByName("JavaScript"); scriptEngine.put("jsonString", jsonStringNoWhitespace); scriptEngine.eval("result = JSON.stringify(JSON.parse(jsonString), null, 2)"); String prettyPrintedJson = (String) scriptEngine.get("result"); outputWriter.write(prettyPrintedJson); outputWriter.close(); } System.out.println("SPICE evaluation took: " + timer.stop()); }
From source file:ch.icclab.cyclops.health.HealthStatus.java
private HealthStatus() { this.healthy = true; this.reason = null; this.watch = Stopwatch.createStarted(); }
From source file:org.smartdeveloperhub.harvesters.it.frontend.publisher.PublisherTask.java
@Override public final Boolean call() { LOGGER.info("Starting {} task...", this.taskName); final Stopwatch watch = Stopwatch.createStarted(); try {/*from w ww . java 2 s . c o m*/ doPublish(); LOGGER.info("{} completed.", this.taskName); return true; } catch (final Exception e) { LOGGER.warn("{} failed. Full stacktrace follows.", this.taskName, e); return false; } finally { watch.stop(); LOGGER.info("{} task finished. Elapsed time (ms): {}", this.taskName, watch.elapsed(TimeUnit.MILLISECONDS)); } }
From source file:com.facebook.buck.cli.DistBuildLogsCommand.java
@Override public ExitCode runWithoutHelp(CommandRunnerParams params) throws Exception { Console console = params.getConsole(); PrintStream stdout = console.getStdOut(); StampedeId stampedeId = getStampedeId(); try (DistBuildService service = DistBuildFactory.newDistBuildService(params)) { stdout.println(String.format("Fetching build information for StampedeId=[%s].", stampedeId.getId())); Stopwatch stopwatch = Stopwatch.createStarted(); BuildJob buildJob = service.getCurrentBuildJobState(getStampedeId()); stdout.println(String.format("Successfully downloaded build information in [%d millis].", stopwatch.elapsed(TimeUnit.MILLISECONDS))); stopwatch.reset().start();//from ww w. j a v a 2s. c o m List<BuildSlaveRunId> buildSlaves = buildJob.getBuildSlaves().stream().map(x -> x.getBuildSlaveRunId()) .collect(Collectors.toList()); stdout.println(String.format("Materializing logs for [%d] BuildSlaves. (%s)", buildSlaves.size(), Joiner.on(", ").join(buildSlaves))); Path logDir = params.getInvocationInfo().get().getLogDirectoryPath(); ProjectFilesystem filesystem = params.getCell().getFilesystem(); BuildSlaveLogsMaterializer materializer = new BuildSlaveLogsMaterializer(service, filesystem, logDir); List<BuildSlaveRunId> notMaterialized = materializer .fetchAndMaterializeAvailableLogs(buildJob.getStampedeId(), buildSlaves); if (notMaterialized.isEmpty()) { console.printSuccess(String.format("Successfully materialized all logs into [%s] in [%d millis].", logDir.toAbsolutePath().toString(), stopwatch.elapsed(TimeUnit.MILLISECONDS))); return ExitCode.SUCCESS; } else if (notMaterialized.size() == buildSlaves.size()) { console.printErrorText(String.format("Failed to materialize all logs. Duration=[%d millis].", stopwatch.elapsed(TimeUnit.MILLISECONDS))); // TODO: buck(team) proper disambiguate between user errors and fatals return ExitCode.BUILD_ERROR; } else { stdout.println(console.getAnsi() .asWarningText(String.format("Materialized [%d] out of [%d] logs into [%s] in [%d millis].", buildSlaves.size() - notMaterialized.size(), buildSlaves.size(), logDir.toAbsolutePath().toString(), stopwatch.elapsed(TimeUnit.MILLISECONDS)))); return ExitCode.BUILD_ERROR; } } }
From source file:dk.dma.nogoservice.controller.ApiController.java
@PostMapping(value = "/area/wkt") @ApiOperation(value = "Get NoGo area as WKT", notes = "Returns a single MultiPolygon with all the nogo areas. If time is included the tidal information will be included in the NoGo calculation.") public MultiPolygon getNoGoAreasAsWKT(@Valid @RequestBody NoGoRequest request) { Stopwatch timer = Stopwatch.createStarted(); NoGoResponse nogo = noGoService.getNoGoAreas(request); log.info("NoGo (wkt) request processed in {} ms", timer.stop().elapsed(TimeUnit.MILLISECONDS)); return nogo.toMultiPolygon(); }
From source file:org.openqa.selenium.javascript.ClosureTestStatement.java
@Override public void evaluate() throws Throwable { URL testUrl = filePathToUrlFn.apply(testPath); LOG.info("Running: " + testUrl); Stopwatch stopwatch = Stopwatch.createStarted(); WebDriver driver = driverSupplier.get(); // Attempt to make the window as big as possible. try {//from w ww . j a v a 2s . co m driver.manage().window().maximize(); } catch (RuntimeException ignored) { // We tried. } JavascriptExecutor executor = (JavascriptExecutor) driver; // Avoid Safari JS leak between tests. executor.executeScript("if (window && window.top) window.top.G_testRunner = null"); try { driver.get(testUrl.toString()); } catch (WebDriverException e) { fail("Test failed to load: " + e.getMessage()); } while (!getBoolean(executor, Query.IS_FINISHED)) { long elapsedTime = stopwatch.elapsed(TimeUnit.SECONDS); if (timeoutSeconds > 0 && elapsedTime > timeoutSeconds) { throw new JavaScriptAssertionError("Tests timed out after " + elapsedTime + " s"); } TimeUnit.MILLISECONDS.sleep(100); } if (!getBoolean(executor, Query.IS_SUCCESS)) { String report = getString(executor, Query.GET_REPORT); throw new JavaScriptAssertionError(report); } }
From source file:io.ecarf.core.cloud.task.processor.analyze.ExtractAndCountTermsTask.java
@Override public void run() throws IOException { Stopwatch stopwatch = Stopwatch.createStarted(); log.info("START: ExtractAndCountTermsTask task" + ", memory usage: " + Utils.getMemoryUsageInGB() + "GB"); EcarfGoogleCloudService cloudService = (EcarfGoogleCloudService) this.getCloudService(); TermCounter counter = null;//from w w w . jav a2 s . co m if (StringUtils.isNoneBlank(this.schemaTermsFile)) { log.info("Downloading schema terms file: " + schemaTermsFile); Set<String> schemaTerms = cloudService.getSetFromCloudStorageFile(schemaTermsFile, bucket); counter = new TermCounter(); counter.setTermsToCount(schemaTerms); } Set<String> filesSet = ObjectUtils.csvToSet(files); log.info("Processing files: " + filesSet); for (final String file : filesSet) { Stopwatch stopwatch1 = Stopwatch.createStarted(); log.info("Downloading file: " + file + ", timer: 0s"); String localFile = Utils.TEMP_FOLDER + file; cloudService.downloadObjectFromCloudStorage(file, localFile, sourceBucket); log.info("Processing file: " + localFile + ", timer: " + stopwatch1); NxGzipProcessor processor = new NxGzipProcessor(localFile); ExtractTermsCallback callback = new ExtractTermsCallback(); callback.setCounter(counter); processor.read(callback); Set<String> terms = callback.getResources(); terms.addAll(callback.getBlankNodes()); log.info("Finished processing file: " + localFile + ", memory usage: " + Utils.getMemoryUsageInGB() + "GB" + ", timer: " + stopwatch1); log.info("Number of unique URIs: " + callback.getResources().size()); log.info("Number of blank nodes: " + callback.getBlankNodes().size()); log.info("Number of literals: " + callback.getLiteralCount()); String termsFile = Utils.TEMP_FOLDER + file + Constants.DOT_SER + Constants.GZIP_EXT; Utils.objectToFile(termsFile, terms, true); log.info("Serialized terms file: " + termsFile + ", memory usage: " + Utils.getMemoryUsageInGB() + "GB" + ", timer: " + stopwatch1); cloudService.uploadFileToCloudStorage(termsFile, bucket); log.info("Uploaded terms file: " + termsFile + ", memory usage: " + Utils.getMemoryUsageInGB() + "GB" + ", timer: " + stopwatch1); } // write term stats to file and upload if (counter != null) { log.info("Saving terms stats"); String countStatsFile = Utils.TEMP_FOLDER + cloudService.getInstanceId() + Constants.DOT_JSON; FileUtils.objectToJsonFile(countStatsFile, counter.getCount()); cloudService.uploadFileToCloudStorage(countStatsFile, bucket); } log.info("TIMER# All files are processed successfully, elapsed time: " + stopwatch); }
From source file:org.apache.eagle.alert.coordinator.trigger.DynamicPolicyLoader.java
/** * When it is run at the first time, due to cachedPolicies being empty, all existing policies are expected * to be addedPolicies.//from w w w .j av a 2 s . c om */ @SuppressWarnings("unchecked") @Override public void run() { // we should catch every exception to avoid zombile thread try { final Stopwatch watch = Stopwatch.createStarted(); LOG.info("Starting to load policies"); List<PolicyDefinition> current = client.listPolicies(); Map<String, PolicyDefinition> currPolicies = new HashMap<>(); current.forEach(pe -> currPolicies.put(pe.getName(), pe)); Collection<String> addedPolicies = CollectionUtils.subtract(currPolicies.keySet(), cachedPolicies.keySet()); Collection<String> removedPolicies = CollectionUtils.subtract(cachedPolicies.keySet(), currPolicies.keySet()); Collection<String> potentiallyModifiedPolicies = CollectionUtils.intersection(currPolicies.keySet(), cachedPolicies.keySet()); List<String> reallyModifiedPolicies = new ArrayList<>(); for (String updatedPolicy : potentiallyModifiedPolicies) { if (currPolicies.get(updatedPolicy) != null && !currPolicies.get(updatedPolicy).equals(cachedPolicies.get(updatedPolicy))) { reallyModifiedPolicies.add(updatedPolicy); } } boolean policyChanged = false; if (addedPolicies.size() != 0 || removedPolicies.size() != 0 || reallyModifiedPolicies.size() != 0) { policyChanged = true; } if (!policyChanged) { LOG.info("No policy (totally {}) changed since last round", current.size()); return; } synchronized (this) { for (PolicyChangeListener listener : listeners) { listener.onPolicyChange(current, addedPolicies, removedPolicies, reallyModifiedPolicies); } } watch.stop(); LOG.info("Finished loading {} policies, added: {}, removed: {}, modified: {}, taken: {} ms", current.size(), addedPolicies.size(), removedPolicies.size(), potentiallyModifiedPolicies.size(), watch.elapsed(TimeUnit.MILLISECONDS)); // reset cached policies cachedPolicies = currPolicies; } catch (Throwable t) { LOG.warn("Error loading policy, but continue to run", t); } }
From source file:com.heliosphere.demeter.base.runner.processor.AbstractProcessor.java
/** * Creates a new abstract processor given a context. * <hr>/*from w w w .j a va 2 s . c o m*/ * @param context Context to process. */ public AbstractProcessor(final IContext context) { super(context.getEntity().getName()); this.context = context; setName(context.getEntity().getName()); result = new ExecutionResult(getName()); result.setParameters(context.getParameters()); watch = Stopwatch.createStarted(); }