Example usage for com.google.common.base Stopwatch start

List of usage examples for com.google.common.base Stopwatch start

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch start.

Prototype

public Stopwatch start() 

Source Link

Document

Starts the stopwatch.

Usage

From source file:com.diskoverorta.legal.LegalManager.java

public String tagLegalTextAnalyticsComponents(String sDoc, Map<String, String> apiConfig) {
    logger.info("tagging legal text analytics components");
    Stopwatch allTimer = Stopwatch.createUnstarted();
    Stopwatch entitiesTimer = Stopwatch.createUnstarted();
    Stopwatch ontologyTimer = Stopwatch.createUnstarted();
    allTimer.start();
    Set<String> personEntities = new HashSet<String>();
    Set<String> orgEntities = new HashSet<String>();

    List<String> sentList = m_snlp.splitSentencesINDocument(sDoc);
    String chunkSize = null;/*from  w w  w.  jav  a  2s .co  m*/
    logger.info("getting chunk size");
    if ((apiConfig != null) && (apiConfig.containsKey("chunksize") == true))
        chunkSize = apiConfig.get("chunksize");
    logger.info("Chunking sentences");
    if (chunkSize != null)
        sentList = chunkSentences(sentList, chunkSize);

    String jsonOutput = "";
    Gson gson = new GsonBuilder().setPrettyPrinting().create();

    Map<String, Set<String>> ontologyTemp = null;

    List<LegalObject> legalcomponents = new ArrayList<LegalObject>();
    for (String temp : sentList) {
        LegalObject legalcomponent = new LegalObject();
        ontologyTimer.start();
        ontologyTemp = m_oManager.getOntologyForSelectedTerms(temp, m_config.ontologyConfig);
        ontologyTimer.stop();
        legalcomponent.sentence = temp;
        entitiesTimer.start();
        legalcomponent.entities = m_eManager.getSelectedEntitiesForSentence(temp, m_config.entityConfig);
        entitiesTimer.stop();
        logger.info("Inserting person entities");
        insertEntity(personEntities, legalcomponent.entities.person);
        logger.info("Inserting OrganiZation entities");
        insertEntity(orgEntities, legalcomponent.entities.organization);
        legalcomponent.events = ontologyTemp.get("Events");
        legalcomponent.topics = ontologyTemp.get("Topics");

        legalcomponents.add(legalcomponent);
    }
    logger.info("getting coref for selected entities and store it in a map");
    Map<String, Map<String, Set<String>>> coref_out = m_coref.getCorefForSelectedEntites(sDoc, personEntities,
            orgEntities, m_config.corefConfig);
    logger.info("getting coref Inverse Map for person entity");

    Map<String, Set<String>> coref_person = Duke.getCoref(personEntities);
    Map<String, Set<String>> coref_org = Duke.getCoref(orgEntities);

    Map<String, String> gpersonCoref = getCorefInvMap(coref_person);
    logger.info("getting coref Inverse Map for Organization entity");
    Map<String, String> gorgCoref = getCorefInvMap(coref_org);

    for (LegalObject temp : legalcomponents) {
        temp.personAlias = getMatchedCoref(gpersonCoref, temp.entities.person);
        temp.orgAlias = getMatchedCoref(gorgCoref, temp.entities.organization);
    }

    jsonOutput = gson.toJson(legalcomponents);
    logger.info("Person Organization took" + entitiesTimer);
    logger.info("Topics and Events took" + ontologyTimer);
    logger.info("Total time taken" + allTimer);
    allTimer.stop();

    return jsonOutput;
}

From source file:com.Grande.GSM.BACCWS_WAR.WS.REST.EOS.FirmwareDefinitionsEndpoint.java

@GET
public String fetchFirmwareDefinitionsByMakeModel(@QueryParam("make") final String strMake,
        @QueryParam("model") final String strModel) {

    // <editor-fold defaultstate="collapsed" desc="****** Method vars ******">
    final Stopwatch timer = new Stopwatch();
    final QueryResponse qRes = new QueryResponse();
    String strResponse = null;/*from  w  ww.  j a  va2  s.c  o  m*/
    FirmwareDefinition fdThis = null;
    // start the execution timer
    timer.start();
    // </editor-fold>

    try {

        qRes.vSetNode(java.net.InetAddress.getLocalHost().getHostName());

        fdThis = this.trnBN.pGetFirmwareDefinitionByMakeAndModel(strMake, strModel)
                .orElseThrow(() -> new Exception(
                        "Make/model \"" + strMake + "/" + strModel + "\" not found in the firmware table"));
        qRes.vSetSuccessFlag(true);
        qRes.vAddResult(fdThis);

    } catch (Exception e) {

        // <editor-fold defaultstate="collapsed" desc="****** Handle failures ******">
        qRes.vSetSuccessFlag(false);
        // handle NPE differently since getMessage() is null
        if (e instanceof NullPointerException) {
            qRes.vSetMessage("NPE occured when serializing result to JSON! " + "File: "
                    + e.getStackTrace()[0].getFileName() + ", " + "Method: "
                    + e.getStackTrace()[0].getMethodName() + ", " + "Line: "
                    + e.getStackTrace()[0].getLineNumber());
        } else {
            qRes.vSetMessage(e.getMessage());
        }
        SimpleLogging.vLogException(this.strThreadId, e);
        // </editor-fold>

    } finally {

        // <editor-fold defaultstate="collapsed" desc="****** Stop timer, convert response to JSON ******">
        timer.stop();
        qRes.vSetRoundTrip(String.valueOf(timer.elapsedTime(TimeUnit.SECONDS)) + "."
                + String.valueOf(timer.elapsedTime(TimeUnit.MILLISECONDS)));
        strResponse = this.trnBN.strQueryResponseToJSON(qRes);
        SimpleLogging.vLogEvent(this.strThreadId + "|" + qRes.strGetRoundTripInSeconds() + "s",
                "retrieved " + qRes.intGetDataCount() + " records");
        // </editor-fold>

    }
    return strResponse;
}

From source file:org.apache.hadoop.hbase.ScanPerformanceEvaluation.java

protected void testHdfsStreaming(Path filename) throws IOException {
    byte[] buf = new byte[1024];
    FileSystem fs = filename.getFileSystem(getConf());

    // read the file from start to finish
    Stopwatch fileOpenTimer = new Stopwatch();
    Stopwatch streamTimer = new Stopwatch();

    fileOpenTimer.start();
    FSDataInputStream in = fs.open(filename);
    fileOpenTimer.stop();/*  w w w  .  j a v a2 s. c  om*/

    long totalBytes = 0;
    streamTimer.start();
    while (true) {
        int read = in.read(buf);
        if (read < 0) {
            break;
        }
        totalBytes += read;
    }
    streamTimer.stop();

    double throughput = (double) totalBytes / streamTimer.elapsedTime(TimeUnit.SECONDS);

    System.out.println("HDFS streaming: ");
    System.out.println("total time to open: " + fileOpenTimer.elapsedMillis() + " ms");
    System.out.println("total time to read: " + streamTimer.elapsedMillis() + " ms");
    System.out.println(
            "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
    System.out.println("throghput  : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
}

From source file:com.springer.omelet.mail.Email.java

/***
 * Return List of Message filter by Subject,From_ADD,To_ADDR
 * /*from   w w w .j  av  a2  s .co m*/
 * @param emailFilter
 * @param filterText
 *            :text present in Subject of email
 * @return
 */
public List<Message> getMessages(EMAIL_FILTER emailFilter, String filterText) {
    Stopwatch sw = new Stopwatch();
    sw.start();

    List<Message> returnMessage = new ArrayList<Message>();
    int loopCount;
    try {
        folder.open(Folder.READ_ONLY);
        Message[] msgs = folder.getMessages();
        int inboMessageCount = folder.getMessageCount();
        LOGGER.info("Message count is:" + inboMessageCount);
        if (inboMessageCount < maxcountEMailCheck) {
            loopCount = 0;
        } else {
            loopCount = inboMessageCount - maxcountEMailCheck;
        }
        for (int i = inboMessageCount - 1; i >= loopCount; i--) {
            switch (emailFilter) {
            case SUBJECT:
                if (msgs[i].getSubject().toString().equalsIgnoreCase(filterText)) {
                    returnMessage.add(msgs[i]);
                }
                break;
            case FROM_ADD:
                // Assumption is from address is only one
                if (msgs[i].getFrom()[0].toString().contains(filterText)) {
                    returnMessage.add(msgs[i]);
                }
                break;
            case TO_ADDR:
                for (Address addr : msgs[i].getRecipients(RecipientType.TO)) {
                    LOGGER.info("Sno:" + i + "To Email Add is" + addr.toString());
                    if (addr.toString().contains(filterText)) {
                        returnMessage.add(msgs[i]);
                    }
                }
                break;
            default:
                break;
            }
        }
        // CLose the folder
        folder.close(true);
    } catch (MessagingException e) {
        LOGGER.error(e);
    }
    sw.stop();
    LOGGER.info("Time Taken by getMessage is" + sw.elapsedTime(TimeUnit.SECONDS));
    return returnMessage;
}

From source file:annis.gui.exporter.GeneralTextExporter.java

@Override
public boolean convertText(String queryAnnisQL, int contextLeft, int contextRight, Set<String> corpora,
        List<String> keys, String argsAsString, WebResource annisResource, Writer out, EventBus eventBus) {
    try {//from ww  w .ja  va 2  s  .c om
        // int count = service.getCount(corpusIdList, queryAnnisQL);

        if (keys == null || keys.isEmpty()) {
            // auto set
            keys = new LinkedList<>();
            keys.add("tok");
            List<AnnisAttribute> attributes = new LinkedList<>();

            for (String corpus : corpora) {
                attributes.addAll(annisResource.path("corpora").path(urlPathEscape.escape(corpus))
                        .path("annotations").queryParam("fetchvalues", "false")
                        .queryParam("onlymostfrequentvalues", "false").get(new AnnisAttributeListType()));
            }

            for (AnnisAttribute a : attributes) {
                if (a.getName() != null) {
                    String[] namespaceAndName = a.getName().split(":", 2);
                    if (namespaceAndName.length > 1) {
                        keys.add(namespaceAndName[1]);
                    } else {
                        keys.add(namespaceAndName[0]);
                    }
                }
            }
        }

        Map<String, String> args = new HashMap<>();
        for (String s : argsAsString.split("&|;")) {
            String[] splitted = s.split("=", 2);
            String key = splitted[0];
            String val = "";
            if (splitted.length > 1) {
                val = splitted[1];
            }
            args.put(key, val);
        }

        int stepSize = 10;

        // 1. Get all the matches as Salt ID
        InputStream matchStream = annisResource.path("search/find/")
                .queryParam("q", Helper.encodeJersey(queryAnnisQL))
                .queryParam("corpora", StringUtils.join(corpora, ",")).accept(MediaType.TEXT_PLAIN_TYPE)
                .get(InputStream.class);

        try (BufferedReader inReader = new BufferedReader(new InputStreamReader(matchStream, "UTF-8"))) {
            WebResource subgraphRes = annisResource.path("search/subgraph");
            MatchGroup currentMatches = new MatchGroup();
            String currentLine;
            int offset = 0;
            // 2. iterate over all matches and get the sub-graph for a group of matches
            while (!Thread.currentThread().isInterrupted() && (currentLine = inReader.readLine()) != null) {
                Match match = Match.parseFromString(currentLine);

                currentMatches.getMatches().add(match);

                if (currentMatches.getMatches().size() >= stepSize) {
                    WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right",
                            "" + contextRight);

                    if (args.containsKey("segmentation")) {
                        res = res.queryParam("segmentation", args.get("segmentation"));
                    }

                    SubgraphFilter filter = getSubgraphFilter();
                    if (filter != null) {
                        res = res.queryParam("filter", filter.name());
                    }

                    Stopwatch stopwatch = new Stopwatch();
                    stopwatch.start();
                    SaltProject p = res.post(SaltProject.class, currentMatches);
                    stopwatch.stop();

                    // dynamically adjust the number of items to fetch if single subgraph
                    // export was fast enough
                    if (stopwatch.elapsed(TimeUnit.MILLISECONDS) < 500 && stepSize < 50) {
                        stepSize += 10;
                    }

                    convertText(LegacyGraphConverter.convertToResultSet(p), keys, args, out,
                            offset - currentMatches.getMatches().size());

                    currentMatches.getMatches().clear();

                    if (eventBus != null) {
                        eventBus.post(offset + 1);
                    }
                }
                offset++;
            } // end for each line

            if (Thread.interrupted()) {
                // return from loop and abort export
                log.info("Exporter job was interrupted");
                return false;
            }

            // query the left over matches
            if (!currentMatches.getMatches().isEmpty()) {
                WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right",
                        "" + contextRight);
                if (args.containsKey("segmentation")) {
                    res = res.queryParam("segmentation", args.get("segmentation"));
                }

                SubgraphFilter filter = getSubgraphFilter();
                if (filter != null) {
                    res = res.queryParam("filter", filter.name());
                }

                SaltProject p = res.post(SaltProject.class, currentMatches);
                convertText(LegacyGraphConverter.convertToResultSet(p), keys, args, out,
                        offset - currentMatches.getMatches().size() - 1);
            }
            offset = 0;

        }

        out.append("\n");
        out.append("\n");
        out.append("finished");

        return true;

    } catch (AnnisQLSemanticsException | AnnisQLSyntaxException | AnnisCorpusAccessException
            | RemoteException ex) {
        log.error(null, ex);
    } catch (IOException ex) {
        log.error(null, ex);
    }
    return false;
}

From source file:org.zanata.service.impl.MergeTranslationsServiceImpl.java

private Integer mergeTranslations(final Long sourceVersionId, final Long targetVersionId, final int batchStart,
        final int batchLength, final boolean useNewerTranslation, final List<HLocale> supportedLocales)
        throws Exception {
    final Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();
    List<HTextFlow[]> matches = textFlowDAO.getSourceByMatchedContext(sourceVersionId, targetVersionId,
            batchStart, batchLength);/*ww  w  .  j  a v a2s. c o  m*/
    Multimap<DocumentLocaleKey, TextFlowTargetStateChange> eventMap = HashMultimap.create();
    Map<DocumentLocaleKey, Map<ContentState, Long>> docStatsMap = Maps.newHashMap();
    Map<DocumentLocaleKey, Long> lastUpdatedTargetId = Maps.newHashMap();
    ;
    for (HTextFlow[] results : matches) {
        HTextFlow sourceTf = results[0];
        HTextFlow targetTf = results[1];
        boolean foundChange = false;
        Map<Long, ContentState> localeContentStateMap = Maps.newHashMap();
        for (HLocale hLocale : supportedLocales) {
            HTextFlowTarget sourceTft = sourceTf.getTargets().get(hLocale.getId());
            // only process translated state
            if (sourceTft == null || !sourceTft.getState().isTranslated()) {
                continue;
            }
            HTextFlowTarget targetTft = targetTf.getTargets().get(hLocale.getId());
            if (targetTft == null) {
                targetTft = new HTextFlowTarget(targetTf, hLocale);
                targetTft.setVersionNum(0);
                targetTf.getTargets().put(hLocale.getId(), targetTft);
            }
            if (MergeTranslationsServiceImpl.shouldMerge(sourceTft, targetTft, useNewerTranslation)) {
                foundChange = true;
                ContentState oldState = targetTft.getState();
                localeContentStateMap.put(hLocale.getId(), oldState);
                mergeTextFlowTarget(sourceTft, targetTft);
            }
        }
        if (foundChange) {
            translationStateCacheImpl.clearDocumentStatistics(targetTf.getDocument().getId());
            textFlowDAO.makePersistent(targetTf);
            textFlowDAO.flush();
            for (Map.Entry<Long, ContentState> entry : localeContentStateMap.entrySet()) {
                HTextFlowTarget updatedTarget = targetTf.getTargets().get(entry.getKey());
                DocumentLocaleKey key = new DocumentLocaleKey(targetTf.getDocument().getId(),
                        updatedTarget.getLocale().getLocaleId());
                eventMap.put(key, new TextFlowTargetStateEvent.TextFlowTargetStateChange(targetTf.getId(),
                        updatedTarget.getId(), updatedTarget.getState(), entry.getValue()));
                lastUpdatedTargetId.put(key, updatedTarget.getId());
                Map<ContentState, Long> contentStateDeltas = docStatsMap.get(key) == null ? Maps.newHashMap()
                        : docStatsMap.get(key);
                DocStatsEvent.updateContentStateDeltas(contentStateDeltas, updatedTarget.getState(),
                        entry.getValue(), targetTf.getWordCount());
                docStatsMap.put(key, contentStateDeltas);
            }
        }
    }
    Long actorId = authenticatedAccount.getPerson().getId();
    for (Map.Entry<DocumentLocaleKey, Collection<TextFlowTargetStateChange>> entry : eventMap.asMap()
            .entrySet()) {
        TextFlowTargetStateEvent tftUpdatedEvent = new TextFlowTargetStateEvent(entry.getKey(), targetVersionId,
                actorId, ImmutableList.copyOf(entry.getValue()));
        textFlowTargetStateEvent.fire(tftUpdatedEvent);
    }
    for (Map.Entry<DocumentLocaleKey, Map<ContentState, Long>> entry : docStatsMap.entrySet()) {
        DocStatsEvent docEvent = new DocStatsEvent(entry.getKey(), targetVersionId, entry.getValue(),
                lastUpdatedTargetId.get(entry.getKey()));
        docStatsEvent.fire(docEvent);
    }
    stopwatch.stop();
    log.info("Complete merge translations of {} in {}", matches.size() * supportedLocales.size(), stopwatch);
    return matches.size() * supportedLocales.size();
}

From source file:org.eclipse.osee.orcs.core.internal.types.impl.OrcsTypesIndexer.java

public OrcsTypesIndex index(IResource source) throws Exception {
    Stopwatch stopwatch = new Stopwatch();
    stopwatch.start();

    OseeDslResource resource = null;/*from   w ww . j av  a2  s . co m*/
    InputStream inputStream = null;
    try {
        inputStream = source.getContent();
        inputStream = upConvertTo17(inputStream);
        resource = OseeDslResourceUtil.loadModel(source.getLocation().toASCIIString(), inputStream);
    } finally {
        Lib.close(inputStream);
    }
    logger.trace("Converted OrcsTypes to model in [%s]", Lib.getElapseString(stopwatch.elapsedMillis()));

    Conditions.checkNotNull(resource, "osee dsl model", "Error reading osee dsl resource");
    OseeDsl model = resource.getModel();
    ArtifactTypeIndex artifactTypeIndex = new ArtifactTypeIndex(hierarchyProvider);
    AttributeTypeIndex attributeTypeIndex = new AttributeTypeIndex();
    EnumTypeIndex enumTypeIndex = new EnumTypeIndex();
    RelationTypeIndex relationTypeIndex = new RelationTypeIndex(artifactTypeIndex);
    OrcsIndeces index = new OrcsIndeces(source, artifactTypeIndex, attributeTypeIndex, enumTypeIndex,
            relationTypeIndex);

    try {
        for (XOseeArtifactTypeOverride xArtifactTypeOverride : model.getArtifactTypeOverrides()) {
            applyArtifactTypeOverrides(xArtifactTypeOverride);
        }

        for (XOseeEnumOverride xEnumOverride : model.getEnumOverrides()) {
            applyEnumOverrides(xEnumOverride);
        }

        for (XAttributeType dslType : model.getAttributeTypes()) {
            getOrCreateToken(attributeTypeIndex, dslType);
        }

        for (XArtifactType dslType : model.getArtifactTypes()) {
            IArtifactType token = getOrCreateToken(artifactTypeIndex, dslType);
            indexSuperTypes(artifactTypeIndex, token, dslType);
            indexAttributes(artifactTypeIndex, attributeTypeIndex, dslType);
        }

        for (XRelationType dslType : model.getRelationTypes()) {
            getOrCreateToken(relationTypeIndex, dslType);
        }

        for (XOseeEnumType dslType : model.getEnumTypes()) {
            getOrCreateEnumType(enumTypeIndex, dslType);
        }
    } finally {
        logger.trace("Indexed OseeDsl model in [%s]", Lib.getElapseString(stopwatch.elapsedMillis()));
        stopwatch.stop();
    }
    return index;
}

From source file:com.Grande.GSM.BACCWS_WAR.WS.REST.EOS.BACCAdminEndpoint.java

@Path("/SystemProperties")
@GET//from   www  .j av  a 2s.  c  o  m
public String fetchSystemProperties() {

    // <editor-fold defaultstate="collapsed" desc="****** Method vars ******">
    final Stopwatch timer = new Stopwatch();
    final QueryResponse qRes = new QueryResponse();
    String strResponse = null;
    Map mapResult = null;
    // start the execution timer
    timer.start();
    // </editor-fold>

    try {

        qRes.vSetNode(java.net.InetAddress.getLocalHost().getHostName());
        mapResult = this.bacEJB.lstFetchSystemProperties();
        qRes.vSetSuccessFlag(true);
        qRes.vAddResult(mapResult);

    } catch (Exception e) {

        // <editor-fold defaultstate="collapsed" desc="****** Handle failures ******">
        qRes.vSetSuccessFlag(false);
        // handle NPE differently since getMessage() is null
        if (e instanceof NullPointerException) {
            qRes.vSetMessage("NPE occured when serializing result to JSON! " + "File: "
                    + e.getStackTrace()[0].getFileName() + ", " + "Method: "
                    + e.getStackTrace()[0].getMethodName() + ", " + "Line: "
                    + e.getStackTrace()[0].getLineNumber());
        } else {
            qRes.vSetMessage(e.getMessage());
        }
        SimpleLogging.vLogException(this.strThreadId, e);
        // </editor-fold>

    } finally {

        // <editor-fold defaultstate="collapsed" desc="****** Stop timer, convert response to JSON ******">
        timer.stop();
        qRes.vSetRoundTrip(String.valueOf(timer.elapsedTime(TimeUnit.SECONDS)) + "."
                + String.valueOf(timer.elapsedTime(TimeUnit.MILLISECONDS)));
        strResponse = this.trnBN.strQueryResponseToJSON(qRes);
        SimpleLogging.vLogEvent(this.strThreadId + "|" + qRes.strGetRoundTripInSeconds() + "s",
                "retrieved " + qRes.intGetDataCount() + " records");
        // </editor-fold>

    }
    return strResponse;
}

From source file:com.Grande.GSM.BACCWS_WAR.WS.REST.EOS.BACCAdminEndpoint.java

@Path("/AllowedProperties")
@GET/*from  www  .  j  a  v a  2  s .c  o  m*/
public String fetchAllowedProperties() {

    // <editor-fold defaultstate="collapsed" desc="****** Method vars ******">
    final Stopwatch timer = new Stopwatch();
    final QueryResponse qRes = new QueryResponse();
    String strResponse = null;
    List lstResult = null;
    // start the execution timer
    timer.start();
    // </editor-fold>

    try {

        qRes.vSetNode(java.net.InetAddress.getLocalHost().getHostName());
        lstResult = this.bacEJB.lstFetchAllowedProperties();
        qRes.vSetSuccessFlag(true);
        qRes.vSquashResult(lstResult);

    } catch (Exception e) {

        // <editor-fold defaultstate="collapsed" desc="****** Handle failures ******">
        qRes.vSetSuccessFlag(false);
        // handle NPE differently since getMessage() is null
        if (e instanceof NullPointerException) {
            qRes.vSetMessage("NPE occured when serializing result to JSON! " + "File: "
                    + e.getStackTrace()[0].getFileName() + ", " + "Method: "
                    + e.getStackTrace()[0].getMethodName() + ", " + "Line: "
                    + e.getStackTrace()[0].getLineNumber());
        } else {
            qRes.vSetMessage(e.getMessage());
        }
        SimpleLogging.vLogException(this.strThreadId, e);
        // </editor-fold>

    } finally {

        // <editor-fold defaultstate="collapsed" desc="****** Stop timer, convert response to JSON ******">
        timer.stop();
        qRes.vSetRoundTrip(String.valueOf(timer.elapsedTime(TimeUnit.SECONDS)) + "."
                + String.valueOf(timer.elapsedTime(TimeUnit.MILLISECONDS)));
        strResponse = this.trnBN.strQueryResponseToJSON(qRes);
        SimpleLogging.vLogEvent(this.strThreadId + "|" + qRes.strGetRoundTripInSeconds() + "s",
                "retrieved " + qRes.intGetDataCount() + " records");
        // </editor-fold>

    }
    return strResponse;
}

From source file:com.Grande.GSM.BACCWS_WAR.WS.REST.EOS.BACCAdminEndpoint.java

@Path("/AllowedDHCPCriterias")
@GET//  w  w w.  ja  v a2  s  . com
public String fetchAllowedDHCPCriterias() {

    // <editor-fold defaultstate="collapsed" desc="****** Method vars ******">
    final Stopwatch timer = new Stopwatch();
    final QueryResponse qRes = new QueryResponse();
    String strResponse = null;
    List lstResult = null;
    // start the execution timer
    timer.start();
    // </editor-fold>

    try {

        qRes.vSetNode(java.net.InetAddress.getLocalHost().getHostName());
        lstResult = this.bacEJB.lstFetchAllowedDHCPCriterias();
        qRes.vSetSuccessFlag(true);
        qRes.vSquashResult(lstResult);

    } catch (Exception e) {

        // <editor-fold defaultstate="collapsed" desc="****** Handle failures ******">
        qRes.vSetSuccessFlag(false);
        // handle NPE differently since getMessage() is null
        if (e instanceof NullPointerException) {
            qRes.vSetMessage("NPE occured when serializing result to JSON! " + "File: "
                    + e.getStackTrace()[0].getFileName() + ", " + "Method: "
                    + e.getStackTrace()[0].getMethodName() + ", " + "Line: "
                    + e.getStackTrace()[0].getLineNumber());
        } else {
            qRes.vSetMessage(e.getMessage());
        }
        SimpleLogging.vLogException(this.strThreadId, e);
        // </editor-fold>

    } finally {

        // <editor-fold defaultstate="collapsed" desc="****** Stop timer, convert response to JSON ******">
        timer.stop();
        qRes.vSetRoundTrip(String.valueOf(timer.elapsedTime(TimeUnit.SECONDS)) + "."
                + String.valueOf(timer.elapsedTime(TimeUnit.MILLISECONDS)));
        strResponse = this.trnBN.strQueryResponseToJSON(qRes);
        SimpleLogging.vLogEvent(this.strThreadId + "|" + qRes.strGetRoundTripInSeconds() + "s",
                "retrieved " + qRes.intGetDataCount() + " records");
        // </editor-fold>

    }
    return strResponse;
}