Example usage for com.google.common.base Stopwatch toString

List of usage examples for com.google.common.base Stopwatch toString

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch toString.

Prototype

@GwtIncompatible("String.format()")
@Override
public String toString() 

Source Link

Document

Returns a string representation of the current elapsed time.

Usage

From source file:mil.nga.giat.geowave.adapter.vector.delete.CQLDelete.java

@Override
public void execute(OperationParams params) throws ParseException {
    if (debug) {//from  w ww . jav a2 s .  c o  m
        org.apache.log4j.Logger.getRootLogger().setLevel(org.apache.log4j.Level.DEBUG);
    }

    final Stopwatch stopWatch = new Stopwatch();

    // Ensure we have all the required arguments
    if (parameters.size() != 1) {
        throw new ParameterException("Requires arguments: <storename>");
    }

    String storeName = parameters.get(0);

    // Config file
    File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);

    // Attempt to load store.
    StoreLoader storeOptions = new StoreLoader(storeName);
    if (!storeOptions.loadFromConfig(configFile)) {
        throw new ParameterException("Cannot find store name: " + storeOptions.getStoreName());
    }

    DataStore dataStore;
    AdapterStore adapterStore;
    try {
        dataStore = storeOptions.createDataStore();
        adapterStore = storeOptions.createAdapterStore();

        final GeotoolsFeatureDataAdapter adapter;
        if (adapterId != null) {
            adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(adapterId);
        } else {
            final CloseableIterator<DataAdapter<?>> it = adapterStore.getAdapters();
            adapter = (GeotoolsFeatureDataAdapter) it.next();
            it.close();
        }

        if (debug && (adapter != null)) {
            LOGGER.debug(adapter.toString());
        }

        stopWatch.start();
        final long results = delete(adapter, adapterId, indexId, dataStore, debug);
        stopWatch.stop();

        if (debug) {
            LOGGER.debug(results + " results remaining after delete; time = " + stopWatch.toString());
        }
    } catch (IOException e) {
        LOGGER.warn("Unable to read adapter", e);
    }
}

From source file:com.edduarte.vokter.keyword.KeywordBuilder.java

public Keyword build(ParserPool parserPool) {
    Stopwatch sw = Stopwatch.createStarted();

    // step 3) Takes a parser from the parser-pool.
    Parser parser;//  ww  w  .j a va 2s .  c  o m
    try {
        parser = parserPool.take();
    } catch (InterruptedException ex) {
        logger.error(ex.getMessage(), ex);
        return null;
    }

    KeywordPipeline pipeline = new KeywordPipeline(

            // the language detection model
            langDetector,

            // the textual input of the keyword
            keywordInput,

            // the parser that will be used for query parsing and term
            // detection
            parser,

            // the set of stopwords that will be filtered during tokenization
            isStoppingEnabled,

            // the stemmer class that will be used to stem the detected tokens
            isStemmingEnabled,

            // flag that forces every found token to be
            // lower case, matching, for example, the words
            // 'be' and 'Be' as the same token
            ignoreCase);

    // step 5) Process the document asynchronously.
    Keyword aux;
    try {
        aux = pipeline.call();
    } catch (Exception ex) {
        logger.error(ex.getMessage(), ex);
        return null;
    }
    final Keyword keyword = aux;

    // step 6) Place the parser back in the parser-pool.
    try {
        parserPool.place(parser);
    } catch (InterruptedException ex) {
        logger.error(ex.getMessage(), ex);
        return null;
    }

    logger.info("Completed building keywords '{}' in {}", keywordInput, sw.toString());
    return keyword;
}

From source file:org.n52.lod.csw.CSWLoDEnabler.java

private void async(final int startPos, final long recordCount, final Stopwatch overallTimer,
        final Stopwatch retrievingTimer, final Stopwatch mappingTimer, final TripleSink serverSink,
        final TripleSink fileSink) {
    // processing queue
    final ConcurrentLinkedQueue<Map<String, GetRecordByIdResponseDocument>> queue = Queues
            .newConcurrentLinkedQueue();

    // main loop download - producer
    ExecutorService downloadExecutor = Executors.newSingleThreadExecutor();
    downloadExecutor.submit(new Runnable() {

        private final Logger logger = LoggerFactory.getLogger("Download Runnable");

        @Override/*from   w w  w. j  a v a  2  s  . c  om*/
        public void run() {
            int i = startPos;
            while (i < recordCount) {
                retrievingTimer.start();
                // Map<String, GetRecordByIdResponseDocument> records =
                // retrieveRecords(i, NUMBER_OF_RECORDS_PER_ITERATION,
                // recordCount);
                Map<String, GetRecordByIdResponseDocument> records = retrieveRecordsThreaded(i,
                        NUMBER_OF_RECORDS_PER_ITERATION, recordCount);
                queue.add(records);
                retrievingTimer.stop();

                i = i + NUMBER_OF_RECORDS_PER_ITERATION;
                logger.debug("Finished intermediate download run at {}", overallTimer.toString());
                logger.info("Retrieved {} records, queue size is now {}", records.size(), queue.size());
            } // end of main loop

            logger.trace("Done - adding the poison pill!");
            queue.add(POISON_PILL);
        }
    });

    // consumer
    ExecutorService mapExecutor = Executors.newSingleThreadExecutor();
    mapExecutor.submit(new Runnable() {

        private final Logger logger = LoggerFactory.getLogger("Map Runnable");

        private boolean isRunning = true;

        @Override
        public void run() {
            while (isRunning) {
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    logger.error("Error sleeping in mapping runnable", e);
                }

                try {
                    Map<String, GetRecordByIdResponseDocument> records = queue.poll();

                    if (records == null)
                        continue;

                    if (records == POISON_PILL) {
                        queue.add(POISON_PILL); // notify other threads to
                                                // stop
                        isRunning = false;
                        logger.trace("Got the poison pill!");
                        return;
                    }

                    // process queueElement
                    mappingTimer.start();
                    if (addToServer && serverSink != null)
                        serverSink.addRecords(records, report);
                    if (saveToFile && fileSink != null)
                        fileSink.addRecords(records, report);
                    mappingTimer.stop();

                    logger.debug("Finished intermediate run at {}", overallTimer.toString());

                } catch (RuntimeException e) {
                    logger.error("Error in mapping runnable", e);
                }
            } // end of main loop
        }
    });

    downloadExecutor.shutdown();
    try {
        downloadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException e) {
        log.error("during shut down of download executor", e);
    }
    mapExecutor.shutdown();
    try {
        mapExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException e) {
        log.error("during shut down of map executor", e);
    }
}

From source file:nextmethod.web.razor.RazorEditorParser.java

/**
 * Determines if a change will cause a structural change to the document and if not, applies it to the existing tree.
 * If a structural change would occur, automatically starts a reparse
 * <p>//w  w  w  .  j av  a  2 s  .com
 * NOTE: The initial incremental parsing check and actual incremental parsing (if possible) occurs
 * on the callers thread. However, if a full reparse is needed, this occurs on a background thread.
 * </p>
 *
 * @param change The change to apply to the parse tree
 *
 * @return a PartialParseResult value indicating the result of the incremental parse
 */
public EnumSet<PartialParseResult> checkForStructureChanges(@Nonnull final TextChange change) {
    // Validate the change
    Stopwatch sw = null;
    if (Debug.isDebugArgPresent(DebugArgs.EditorTracing)) {
        sw = Stopwatch.createStarted();
    }
    RazorEditorTrace
            .traceLine(RazorResources().traceEditorReceivedChange(getFileName(fileName), change.toString()));
    if (change.getNewBuffer() == null) {
        throw new IllegalArgumentException(
                RazorResources().structureMemberCannotBeNull("Buffer", "TextChange"));
    }

    EnumSet<PartialParseResult> result = PartialParseResult.setOfRejected();

    // If there isn't already a parse underway, try partial-parsing
    String changeString = Strings.Empty;
    try (IDisposable ignored = parser.synchronizeMainThreadState()) {
        // Capture the string value of the change while we're synchronized
        changeString = change.toString();

        // Check if we can partial-parse
        if (getCurrentParseTree() != null && parser.isIdle()) {
            result = tryPartialParse(change);
        }
    }

    // If partial parsing failed or there were outstanding parser tasks, start a full reparse
    if (result.contains(PartialParseResult.Rejected)) {
        parser.queueChange(change);
    }

    // Otherwise, remember if this was provisionally accepted for next partial parse
    lastResultProvisional = result.contains(PartialParseResult.Provisional);
    verifyFlagsAreValid(result);

    if (sw != null) {
        sw.stop();
    }

    RazorEditorTrace.traceLine(RazorResources().traceEditorProcessedChange(getFileName(fileName), changeString,
            sw != null ? sw.toString() : "?", enumSetToString(result)));

    return result;
}

From source file:com.edduarte.vokter.diff.DifferenceMatcher.java

@Override
public Set<DifferenceMatcher.Result> call() {
    Stopwatch sw = Stopwatch.createStarted();

    Set<Result> matchedDiffs = new ConcurrentHashSet<>();

    DifferenceEvent lastAction = DifferenceEvent.nothing;
    BloomFilter<String> lastBloomFilter = null;
    for (Difference r : differences) {
        if (lastAction == DifferenceEvent.nothing || r.getAction() != lastAction) {
            // reset the bloom filter being used
            lastBloomFilter = BloomFilter.create((from, into) -> into.putUnencodedChars(from), 10);
            lastAction = r.getAction();/*  www .ja va2  s.  c  o  m*/
        }
        BloomFilter<String> bloomFilter = lastBloomFilter;
        bloomFilter.put(r.getOccurrenceText());

        // check if AT LEAST ONE of the keywords has ALL of its words
        // contained in the diff text
        keywords.parallelStream().unordered().filter(kw -> kw.textStream().allMatch(bloomFilter::mightContain))
                .map(kw -> new Pair<>(r, kw))
                .filter((pair) -> pair.b().textStream().anyMatch(pair.a().getOccurrenceText()::equals))
                .map((pair) -> {
                    Difference diff = pair.a();
                    Keyword keyword = pair.b();
                    DifferenceEvent i = diff.getAction();
                    if (i == DifferenceEvent.inserted && !ignoreAdded) {
                        return new Result(diff.getAction(), keyword, diff.getSnippet());

                    } else if (i == DifferenceEvent.deleted && !ignoreRemoved) {
                        return new Result(diff.getAction(), keyword, diff.getSnippet());
                    }
                    return null;
                }).filter(diff -> diff != null).forEach(matchedDiffs::add);
    }

    sw.stop();
    logger.info("Completed difference matching for keywords '{}' in {}", keywords.toString(), sw.toString());
    return matchedDiffs;
}

From source file:com.edduarte.argus.diff.DifferenceMatcher.java

@Override
public Set<DifferenceMatcher.Result> call() {
    Stopwatch sw = Stopwatch.createStarted();

    Set<Result> matchedDiffs = new ConcurrentHashSet<>();

    DifferenceAction lastAction = DifferenceAction.nothing;
    BloomFilter<String> lastBloomFilter = null;
    for (Difference r : differences) {
        if (lastAction == DifferenceAction.nothing || r.getAction() != lastAction) {
            // reset the bloom filter being used
            lastBloomFilter = BloomFilter.create((from, into) -> into.putUnencodedChars(from), 10);
            lastAction = r.getAction();/* w w w  .  j  a v a  2 s  .  c o m*/
        }
        BloomFilter<String> bloomFilter = lastBloomFilter;
        bloomFilter.put(r.getOccurrenceText());

        // check if AT LEAST ONE of the keywords has ALL of its words
        // contained in the diff text
        keywords.parallelStream().unordered().filter(kw -> kw.textStream().allMatch(bloomFilter::mightContain))
                .map(kw -> new Pair<>(r, kw))
                .filter((pair) -> pair.b().textStream().anyMatch(pair.a().getOccurrenceText()::equals))
                .map((pair) -> {
                    Difference diff = pair.a();
                    Keyword keyword = pair.b();
                    DifferenceAction i = diff.getAction();
                    if (i == DifferenceAction.inserted && !ignoreAdded) {
                        return new Result(diff.getAction(), keyword, diff.getSnippet());

                    } else if (i == DifferenceAction.deleted && !ignoreRemoved) {
                        return new Result(diff.getAction(), keyword, diff.getSnippet());
                    }
                    return null;
                }).filter(diff -> diff != null).forEach(matchedDiffs::add);
    }

    sw.stop();
    logger.info("Completed difference matching for keywords '{}' in {}", keywords.toString(), sw.toString());
    return matchedDiffs;
}

From source file:cosmos.mapred.MediawikiQueries.java

public long columnFetch(Store id, Column colToFetch, Map<Column, Long> counts, long totalResults)
        throws Exception {
    Stopwatch sw = new Stopwatch();
    String prev = null;//w ww. j  a  v  a 2  s. co  m
    String lastDocId = null;
    long resultCount = 0l;

    sw.start();
    final CloseableIterable<MultimapRecord> results = this.sorts.fetch(id, Index.define(colToFetch));
    Iterator<MultimapRecord> resultsIter = results.iterator();

    for (; resultsIter.hasNext();) {
        MultimapRecord r = resultsIter.next();

        sw.stop();
        resultCount++;

        Collection<RecordValue<?>> values = r.get(colToFetch);

        TreeSet<RecordValue<?>> sortedValues = Sets.newTreeSet(values);

        if (null == prev) {
            prev = sortedValues.first().value().toString();
        } else {
            boolean plausible = false;
            Iterator<RecordValue<?>> iter = sortedValues.iterator();
            for (; !plausible && iter.hasNext();) {
                String val = iter.next().value().toString();
                if (prev.compareTo(val) <= 0) {
                    plausible = true;
                }
            }

            if (!plausible) {
                System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - " + lastDocId
                        + " shouldn't have come before " + r.docId());
                System.out.println(prev + " compared to " + sortedValues);
                results.close();
                System.exit(1);
            }
        }

        lastDocId = r.docId();

        sw.start();
    }

    sw.stop();

    System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Took " + sw.toString()
            + " to fetch results");
    logTiming(totalResults, sw.elapsed(TimeUnit.MILLISECONDS), "fetch:" + colToFetch);

    results.close();

    long expected = counts.containsKey(colToFetch) ? counts.get(colToFetch) : -1;

    if (resultCount != expected) {
        System.out.println(Thread.currentThread().getName() + " " + colToFetch + ": Expected to get " + expected
                + " records but got " + resultCount);
        System.exit(1);
    }

    return resultCount;
}

From source file:cuchaz.enigma.Deobfuscator.java

public void writeSources(File dirOut, ProgressListener progress) {
    // get the classes to decompile
    Set<ClassEntry> classEntries = Sets.newHashSet();
    for (ClassEntry obfClassEntry : this.jarIndex.getObfClassEntries()) {
        // skip inner classes
        if (obfClassEntry.isInnerClass()) {
            continue;
        }//www .ja  va  2 s .co m

        classEntries.add(obfClassEntry);
    }

    if (progress != null) {
        progress.init(classEntries.size(), "Decompiling classes...");
    }

    //create a common instance outside the loop as mappings shouldn't be changing while this is happening
    //synchronized to make sure the parallelStream doesn't CME with the cache
    ITranslatingTypeLoader typeLoader = new SynchronizedTypeLoader(createTypeLoader());

    MetadataSystem metadataSystem = new NoRetryMetadataSystem(typeLoader);
    metadataSystem.setEagerMethodLoadingEnabled(true);//ensures methods are loaded on classload and prevents race conditions

    // DEOBFUSCATE ALL THE THINGS!! @_@
    Stopwatch stopwatch = Stopwatch.createStarted();
    AtomicInteger count = new AtomicInteger();
    classEntries.parallelStream().forEach(obfClassEntry -> {
        ClassEntry deobfClassEntry = deobfuscateEntry(new ClassEntry(obfClassEntry));
        if (progress != null) {
            progress.onProgress(count.getAndIncrement(), deobfClassEntry.toString());
        }

        try {
            // get the source
            CompilationUnit sourceTree = getSourceTree(obfClassEntry.getName(), typeLoader, metadataSystem);

            // write the file
            File file = new File(dirOut, deobfClassEntry.getName().replace('.', '/') + ".java");
            file.getParentFile().mkdirs();
            try (Writer writer = new BufferedWriter(new FileWriter(file))) {
                sourceTree.acceptVisitor(new InsertParenthesesVisitor(), null);
                sourceTree.acceptVisitor(new JavaOutputVisitor(new PlainTextOutput(writer), settings), null);
            }
        } catch (Throwable t) {
            // don't crash the whole world here, just log the error and keep going
            // TODO: set up logback via log4j
            System.err.println("Unable to deobfuscate class " + deobfClassEntry + " (" + obfClassEntry + ")");
            t.printStackTrace(System.err);
        }
    });
    stopwatch.stop();
    System.out.println("writeSources Done in : " + stopwatch.toString());
    if (progress != null) {
        progress.onProgress(count.get(), "Done:");
    }
}

From source file:org.n52.lod.csw.CSWLoDEnabler.java

/**
 * executes the program: 1.) retrieves the record descriptions from the CSW
 * 2.) transforms the descriptions to RDF 3.) inserts the produced RDF into
 * the triplestore/*from   w  w  w.  j a  va2s  . c  om*/
 * 
 * @param startPos
 * 
 * @throws IOException
 */
public void runStartingFrom(int startPos) throws IOException {
    log.info("STARTING CSW to LOD..");

    if (!(addToServer || saveToFile)) {
        log.warn("Neither triple store nor file output are activated.");
        return;
    }

    final Stopwatch overallTimer = new Stopwatch();
    overallTimer.start();

    final Stopwatch retrievingTimer = new Stopwatch();
    final Stopwatch mappingTimer = new Stopwatch();
    final Stopwatch otherTimer = new Stopwatch();

    otherTimer.start();
    XmlToRdfMapper mapper = new GluesMapper(config);

    TripleSink serverSink = null;
    if (addToServer) {
        try {
            serverSink = new VirtuosoServer(config, mapper);
        } catch (RuntimeException e) {
            log.error("Could not connect to graph", e);
        }
    }

    TripleSink fileSink = null;
    if (saveToFile) {
        fileSink = new FileTripleSink(mapper);
    }

    long recordsInTotal = FALLBACK_RECORDS_TOTAL;
    try {
        recordsInTotal = csw.getNumberOfRecords();
        log.debug("Retrieved number of records from server: {}", recordsInTotal);
    } catch (IllegalStateException | HttpClientException | XmlException e) {
        log.error("Could not retrieve number of records from catalog {}, falling back to {}", csw,
                FALLBACK_RECORDS_TOTAL, e);
    }
    report.startIndex = startPos;
    report.recordNumber = recordsInTotal;
    otherTimer.stop();

    // main loop
    while (startPos < recordsInTotal) {
        retrievingTimer.start();
        Map<String, GetRecordByIdResponseDocument> records = retrieveRecords(startPos,
                NUMBER_OF_RECORDS_PER_ITERATION, recordsInTotal);
        retrievingTimer.stop();

        mappingTimer.start();
        if (addToServer && serverSink != null)
            serverSink.addRecords(records, report);
        if (saveToFile && fileSink != null)
            fileSink.addRecords(records, report);
        mappingTimer.stop();

        startPos = startPos + NUMBER_OF_RECORDS_PER_ITERATION;

        log.debug("Finished intermediate run at {}", overallTimer.toString());
    } // end of main loop

    otherTimer.start();
    if (fileSink != null)
        try {
            fileSink.close();
        } catch (Exception e) {
            log.error("Could not close file sink {}", fileSink, e);
        }

    if (serverSink != null)
        try {
            serverSink.close();
        } catch (Exception e) {
            log.error("Could not close server sink {}", serverSink, e);
        }

    if (!report.issues.isEmpty())
        log.error(report.extendedToString());

    overallTimer.stop();
    otherTimer.stop();

    log.info("DONE with CSW to LOD.. duration = {} (retrieving: {}, mapping = {}, other = {})", overallTimer,
            retrievingTimer, mappingTimer, otherTimer);
    log.info("Results: {}", report);
    log.info("Sinks: server = {}, file = {}", addToServer, saveToFile);
    log.info("Server: {} | File: {}", serverSink, fileSink);
}

From source file:eu.project.ttc.tools.cli.TermSuiteAlignerCLI.java

public void run(String[] args, PrintStream out) {
    File logDir = new File("logs");
    if (!logDir.exists())
        logDir.mkdir();// ww  w  .j  av  a  2 s  . c o  m
    String logPath = Paths
            .get("logs",
                    "termsuite-aligner-" + new SimpleDateFormat("yyyyMMdd-HHmmss").format(new Date()) + ".log")
            .toAbsolutePath().toString();
    TermSuiteCLIUtils.logToFile(logPath);
    Stopwatch sw = Stopwatch.createStarted();
    LOGGER.info("Logging to {}", logPath);
    try {

        // usage
        // java -DconfigFile=myPropertiesFileName -Xms1g  -Xmx2g -cp ttc-term-suite-1.3.jar eu.project.ttc.tools.cli.TermSuiteSpotterCLI
        // if the option -DconfigFile is missing preferencesFileName is set to TermSuiteCLIUtils.USER_HOME+PREFERENCES_FILE_NAME
        // create the command line parser
        PosixParser parser = new PosixParser();

        // create the Options
        Options options = declareOptions();

        try {
            // Parse and set CL options
            CommandLine line = parser.parse(options, args, false);
            readArguments(line, out);
            TermSuiteCLIUtils.setGlobalLogLevel("info");
            TermSuiteCLIUtils.logCommandLineOptions(line);

            BilingualAligner aligner = TermSuiteAlignerBuilder.start().setSourceTerminology(sourceTermino.get())
                    .setTargetTerminology(targetTermino.get()).setDicoPath(dicoPath).setDistance(distance)
                    .create();

            for (String term : terms) {
                Term sourceTerm = readSourceTerm(term);
                if (sourceTerm == null) {
                    LOGGER.error("Cannot find term \"{}\" in {}", term, line.getOptionValue(SOURCE_TERMINO));
                } else {
                    if (terms.size() > 1) {
                        out.println("---");
                        out.println(sourceTerm);
                        out.println("-");
                    }
                    for (TranslationCandidate candidate : aligner.align(sourceTerm, n, 1)) {
                        if (showExplanation)
                            out.format("%s\t%.3f\t%s\t%s\n", candidate.getTerm(), candidate.getScore(),
                                    candidate.getMethod(), candidate.getExplanation().getText());
                        else
                            out.format("%s\t%.3f\t%s\n", candidate.getTerm(), candidate.getScore(),
                                    candidate.getMethod());
                    }
                }
            }

            LOGGER.info("Script executed in " + sw.toString());

        } catch (ParseException e) {
            TermSuiteCLIUtils.printUsage(e, USAGE, options);
        }

    } catch (Exception e) {
        e.printStackTrace(System.err);
        LOGGER.error(e.getMessage());
    }
}