Example usage for com.google.common.base Stopwatch start

List of usage examples for com.google.common.base Stopwatch start

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch start.

Prototype

public Stopwatch start() 

Source Link

Document

Starts the stopwatch.

Usage

From source file:org.apache.drill.exec.store.AffinityCalculator.java

/**
 * Builds a mapping of drillbit endpoints to hostnames
 *//*from  www  .  j a  v  a  2s  .c om*/
private void buildEndpointMap() {
    Stopwatch watch = new Stopwatch();
    watch.start();
    endPointMap = new HashMap<String, DrillbitEndpoint>();
    for (DrillbitEndpoint d : endpoints) {
        String hostName = d.getAddress();
        endPointMap.put(hostName, d);
    }
    watch.stop();
    logger.debug("Took {} ms to build endpoint map", watch.elapsed(TimeUnit.MILLISECONDS));
}

From source file:com.pedra.storefront.filters.RequestLoggerFilter.java

@Override
public void doFilterInternal(final HttpServletRequest request, final HttpServletResponse response,
        final FilterChain filterChain) throws IOException, ServletException {
    if (LOG.isDebugEnabled()) {
        final String requestDetails = buildRequestDetails(request);

        if (LOG.isDebugEnabled()) {
            LOG.debug(requestDetails + "Begin");
        }/*  w ww. ja va 2  s  .  c o m*/

        logCookies(request);

        final ResponseWrapper wrappedResponse = new ResponseWrapper(response);

        final Stopwatch stopwatch = new Stopwatch();
        stopwatch.start();
        try {
            filterChain.doFilter(request, wrappedResponse);
        } finally {
            stopwatch.stop();
            final int status = wrappedResponse.getStatus();

            if (status != 0) {
                LOG.debug(requestDetails + stopwatch.toString() + " (" + status + ")");
            } else {
                LOG.debug(requestDetails + stopwatch.toString());
            }
        }

        return;
    }

    filterChain.doFilter(request, response);
}

From source file:jobs.LuceneStartifiedIndexing.java

@Override
public void doJob() throws Exception {
    Logger.info("Stratified indexing started...");
    Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();

    Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_47);
    ShingleAnalyzerWrapper shingleAnalyzer = new ShingleAnalyzerWrapper(analyzer, 2, 5);
    int now = Integer.parseInt((String) play.Play.configuration.get("analysis.year"));

    //TODO could possibly do less indexes
    //Just focus on the previous year for simplicity sake
    for (int t = now; t >= now - 10; t--) {
        //Create a folder for the index
        VirtualFile.fromRelativePath("/indexes/index-" + t).getRealFile().mkdir();
        Directory directory = FSDirectory
                .open(VirtualFile.fromRelativePath("/indexes/index-" + t).getRealFile());
        IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_47, shingleAnalyzer);
        IndexWriter iwriter = new IndexWriter(directory, config);

        //Retrieve the citations given a year t
        SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
        Date start = sdf.parse("01/01/" + t);
        Date end = sdf.parse("31/12/" + t);

        //Get all the citations given a year
        List<Citation> citations = Citation.find("created between ? and ?", start, end).fetch();
        int total = citations.size();
        int counter = 0;

        //Iterate over the citations and create the index
        for (Citation citation : citations) {
            counter++;//from   w w w.ja v  a  2s  . c  o  m
            Logger.info("i (" + t + "): " + counter + "/" + total);
            Document doc = new Document();
            String contents = "";

            if (citation.abstractText != null) {
                contents += citation.abstractText;
            }

            if (citation.title != null) {
                contents += citation.title;
            }

            if (!contents.equals("")) {
                doc.add(new Field("contents", contents, TextField.TYPE_STORED));
            }

            iwriter.addDocument(doc);

        }

        iwriter.close();

    }
    Logger.info("Job done!");
    stopwatch.stop();
    Utils.emailAdmin("Stratified index built",
            "Job finished in " + stopwatch.elapsed(TimeUnit.MINUTES) + " minutes.");

}

From source file:com.vmm.storefront.filters.RequestLoggerFilter.java

@Override
public void doFilterInternal(final HttpServletRequest request, final HttpServletResponse response,
        final FilterChain filterChain) throws IOException, ServletException {
    if (LOG.isDebugEnabled()) {
        final String requestDetails = buildRequestDetails(request);
        writeDebugLog(requestDetails, "Begin");
        logCookies(request);//from www  . j a v  a  2 s  .com

        final ResponseWrapper wrappedResponse = new ResponseWrapper(response);

        final Stopwatch stopwatch = Stopwatch.createUnstarted();
        stopwatch.start();
        try {
            filterChain.doFilter(request, wrappedResponse);
        } finally {
            stopwatch.stop();
            final int status = wrappedResponse.getStatus();

            if (status != 0) {
                writeDebugLog(requestDetails, stopwatch.toString(), " (", String.valueOf(status), ")");
            } else {
                writeDebugLog(requestDetails, stopwatch.toString());
            }
        }

        return;
    }

    filterChain.doFilter(request, response);
}

From source file:jobs.ComputeStratifiedFrequencies2.java

@Override
public void doJob() throws Exception {

    Logger.info("trends computation started...");
    Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();

    int now = Integer.parseInt((String) play.Play.configuration.get("analysis.year"));
    int y5 = now - 5;

    //iterate over all the years and save the values
    Logger.info("Reading index...");
    Directory directory = FSDirectory.open(VirtualFile.fromRelativePath("/indexes/index-" + y5).getRealFile());
    DirectoryReader ireader = DirectoryReader.open(directory);

    Terms terms = SlowCompositeReaderWrapper.wrap(ireader).terms("contents");
    TermsEnum iterator = terms.iterator(null);
    BytesRef byteRef;//w ww .  j av  a2 s. com

    Map<Long, Double> frequencies = new HashMap<Long, Double>();

    while ((byteRef = iterator.next()) != null) {
        String term = new String(byteRef.bytes, byteRef.offset, byteRef.length);
        if (!term.contains("_")) {
            Logger.info("Term: " + term);
            Stopwatch time = Stopwatch.createUnstarted();
            time.start();

            Phrase phrase = Phrase.find("byValue", term).first();
            if (phrase != null) {
                Logger.info("Term: " + phrase.value + " (" + term + ")");
                int frequency = iterator.docFreq();
                frequencies.put(phrase.id, (double) frequency);
            }
            time.stop();
            Logger.info("- Query time: " + time.elapsed(TimeUnit.MILLISECONDS));
        }
    }

    ireader.close();
    directory.close();

    Phrase.em().flush();
    Phrase.em().clear();
    int counter = 0;
    for (Long id : frequencies.keySet()) {
        Phrase phrase = Phrase.findById(id);
        phrase.frequency5y = frequencies.get(id);
        phrase.save();
        counter++;
        Logger.info("Counter: " + counter);

        if (counter % 1000 == 0) {
            Phrase.em().flush();
            Phrase.em().clear();
        }
    }

    stopwatch.stop();
    Utils.emailAdmin("Yearly frequency calculated. ",
            "Job finished in " + stopwatch.elapsed(TimeUnit.MINUTES) + " minutes.");

    Logger.info("Job done.");

}

From source file:org.apache.drill.exec.store.couchbase.CouchbaseRecordReader.java

@Override
public int next() {
    Stopwatch watch = new Stopwatch();
    watch.start();

    keyVector.clear();//from   w ww. j  av  a  2  s  . com
    keyVector.allocateNew();
    valueVector.clear();
    valueVector.allocateNew();
    int rowCount = 0;
    done: for (; rowCount < TARGET_RECORD_COUNT && tapClient.hasMoreMessages();) {
        ResponseMessage message = null;
        if (leftOver != null) {
            message = leftOver;
            leftOver = null;
        } else {
            if ((message = tapClient.getNextMessage()) == null) {
                continue;
            }
        }

        if (!keyVector.getMutator().setSafe(rowCount, message.getKey().getBytes())) {
            setOutputRowCount(rowCount);
            leftOver = message;
            break done;
        }

        if (!valueVector.getMutator().setSafe(rowCount, message.getValue())) {
            setOutputRowCount(rowCount);
            leftOver = message;
            break done;
        }

        rowCount++;
    }

    setOutputRowCount(rowCount);
    logger.debug("Took {} ms to get {} records", watch.elapsed(TimeUnit.MILLISECONDS), rowCount);
    return rowCount;
}

From source file:hu.vodafone.storefront.filters.RequestLoggerFilter.java

@Override
public void doFilterInternal(final HttpServletRequest request, final HttpServletResponse response,
        final FilterChain filterChain) throws IOException, ServletException {
    if (LOG.isDebugEnabled()) {
        final String requestDetails = buildRequestDetails(request);

        if (LOG.isDebugEnabled()) {
            LOG.debug(requestDetails + "Begin");
        }/*w ww  . j av a  2 s. c o m*/

        logCookies(request);

        final ResponseWrapper wrappedResponse = new ResponseWrapper(response);

        final Stopwatch stopwatch = Stopwatch.createUnstarted();
        stopwatch.start();
        try {
            filterChain.doFilter(request, wrappedResponse);
        } finally {
            stopwatch.stop();
            final int status = wrappedResponse.getStatus();

            if (status != 0) {
                LOG.debug(requestDetails + stopwatch.toString() + " (" + status + ")");
            } else {
                LOG.debug(requestDetails + stopwatch.toString());
            }
        }

        return;
    }

    filterChain.doFilter(request, response);
}

From source file:com.stevpet.sonar.plugins.dotnet.mscover.sensor.VSTestCoverageSaver.java

/**
 * Parse the coverage file, with loading the block coverage, sourcefilenames observers
 * @param fileBlocksRegistry - block coverage
 * @param sourceFileNamesRegistry - sourcefilenames
 * @throws XMLStreamException//  w ww .  j a va2 s .  c  o m
 * @throws IOException 
 */
private void invokeParserSubject(VsTestCoverageRegistry registry, File coverageFile) throws XMLStreamException {
    VsTestParserFactory parserFactory = new ConcreteVsTestParserFactory();
    XmlParserSubject parserSubject = parserFactory.createCoverageParser(registry, artifactNames);

    Stopwatch sw = new Stopwatch();
    sw.start();
    parserSubject.parseFile(coverageFile);
    LOG.info("----------------------Parsing took {}ms -------------------", sw.elapsedMillis());
}

From source file:jobs.ComputeOpenIF.java

@Override
public void doJob() throws Exception {

    Logger.info("Job open IF started...");
    Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();

    //IF(2013) = articles published in 2011 and 2012
    //A = # of times articles published in journal in 2011 and 2012 are cited - ideally calculated in very beginning 2014, when all citations that happened
    //in 2013 have been considered.
    //B = # of articles published by the journal in 2011 and 2012.
    //IF(2013) = A/B
    int now = Integer.parseInt((String) play.Play.configuration.get("analysis.year"));
    int y2 = now - 2;

    SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
    Date start = sdf.parse("01/01/" + now);
    Date end = sdf.parse("31/12/" + y2);

    Logger.info("Getting citations...");
    List<Citation> citations = Citation.find("created between ? and ?", end, start).fetch();

    int counter = 0;
    int total = citations.size();

    //Map holding the journal's journalAbbreviation and their citation counts.
    HashMap<String, List<Integer>> citationMap = new HashMap<String, List<Integer>>();

    for (Citation citation : citations) {
        counter++;//w  w w .  j a v  a 2  s . com
        Logger.info(counter + "/" + total);
        if (citationMap.containsKey(citation.journalAbbreviation)) {
            List<Integer> citationCounts = citationMap.get(citation.journalAbbreviation);
            citationCounts.add(citation.citationCount);
        } else {
            List<Integer> citationCounts = new ArrayList<Integer>();
            citationCounts.add(citation.citationCount);
            citationMap.put(citation.journalAbbreviation, citationCounts);
        }
        Logger.info(citation.journalAbbreviation + ": " + citationMap.get(citation.journalAbbreviation));
    }

    //Save the jazz

    //            double openIF = 0.0;
    //            double deviationIF = 0.0;
    //Compute the IF = mean citations per article published in the two previous years.
    //            if (citations.size() > 0) {
    //                openIF = (double) sum(citationCounts) / citations.size();
    //            }
    //            Logger.info("- IF: " + openIF);
    //Compute the standard deviation of the sample (population)
    //            double squaredDiff = 0.0;
    //            for (Integer citationCount : citationCounts) {
    //                double diff = Math.pow(citationCount - openIF, 2);
    //                squaredDiff += diff;
    //            }
    //
    //            if (citations.size() > 0) {
    //                deviationIF = Math.sqrt(squaredDiff / citations.size());
    //            }
    //            Logger.info("- Deviation: " + deviationIF);
    //Save the modifications
    //            journal.openImpactFactor = openIF;
    //            journal.deviationIF = deviationIF;
    //            journal.save();
    stopwatch.stop();
    Utils.emailAdmin("Stratified index built",
            "Job finished in " + stopwatch.elapsed(TimeUnit.MINUTES) + " minutes.");
}

From source file:org.jnbis.imageio.WSQImageReader.java

private void processInput(final int imageIndex) {
    try {/*from  w  w  w . j  a  va  2s .c o  m*/
        if (imageIndex != 0) {
            throw new IndexOutOfBoundsException("imageIndex " + imageIndex);
        }

        /* Already processed */
        if (image != null) {
            return;
        }

        final Object input = getInput();
        if (input == null) {
            this.image = null;
            return;
        }
        if (!(input instanceof ImageInputStream)) {
            throw new IllegalArgumentException("bad input: " + input.getClass().getCanonicalName());
        }
        final Stopwatch stopwatch = new Stopwatch();
        stopwatch.start();
        log.debug("Input:{}", getInput());
        final BitmapWithMetadata bitmap = WSQDecoder.decode((ImageInputStream) getInput());
        stopwatch.stop();
        //log.debug("Decode took: {}",stopwatch.elapsed(TimeUnit.MILLISECONDS));

        metadata = new WSQMetadata();

        for (final Map.Entry<String, String> entry : bitmap.getMetadata().entrySet()) {
            //System.out.println(entry.getKey() + ": " + entry.getValue());
            metadata.setProperty(entry.getKey(), entry.getValue());
        }
        for (final String s : bitmap.getComments()) {
            //System.out.println("//"+s);
            metadata.addComment(s);
        }

        image = new BufferedImage(bitmap.getWidth(), bitmap.getHeight(), BufferedImage.TYPE_BYTE_GRAY);
        final byte[] imageData = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
        System.arraycopy(bitmap.getPixels(), 0, imageData, 0, bitmap.getLength());
    } catch (final IOException ioe) {
        ioe.printStackTrace();
        this.image = null;
    }
}