Example usage for org.apache.commons.lang.time StopWatch stop

List of usage examples for org.apache.commons.lang.time StopWatch stop

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch stop.

Prototype

public void stop() 

Source Link

Document

Stop the stopwatch.

This method ends a new timing session, allowing the time to be retrieved.

Usage

From source file:eagle.service.generic.ListQueryResource.java

/**
 * TODO refactor the code structure,  now it's messy
 * @param query/*w  w w.  jav  a2 s  . co  m*/
 * @param startTime
 * @param endTime
 * @param pageSize
 * @param startRowkey
 * @param treeAgg
 * @param timeSeries
 * @param intervalmin
 * @return
 */
@GET
@Produces({ MediaType.APPLICATION_JSON })
public ListQueryAPIResponseEntity listQuery(@QueryParam("query") String query,
        @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime,
        @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey,
        @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries,
        @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top,
        @QueryParam("filterIfMissing") boolean filterIfMissing, @QueryParam("parallel") int parallel,
        @QueryParam("metricName") String metricName, @QueryParam("verbose") Boolean verbose) {
    if (!EagleConfigFactory.load().isCoprocessorEnabled())
        return listQueryWithoutCoprocessor(query, startTime, endTime, pageSize, startRowkey, treeAgg,
                timeSeries, intervalmin, top, filterIfMissing, parallel, metricName, verbose);

    StopWatch watch = new StopWatch();
    watch.start();
    ListQueryAPIResponseEntity result = new ListQueryAPIResponseEntity();
    try {
        validateQueryParameters(startRowkey, pageSize);

        // 1. Compile query to parse parameters and HBase Filter
        ListQueryCompiler comp = new ListQueryCompiler(query, filterIfMissing);
        String serviceName = comp.serviceName();

        SearchCondition condition = new SearchCondition();
        condition.setOutputVerbose(verbose == null || verbose);
        condition.setOutputAlias(comp.getOutputAlias());
        condition.setFilter(comp.filter());
        condition.setQueryExpression(comp.getQueryExpression());
        if (comp.sortOptions() == null && top > 0) {
            LOG.warn(
                    "Parameter \"top\" is only used for sort query! Ignore top parameter this time since it's not a sort query");
        }

        // 2. Initialize partition values if set
        // TODO: For now we don't support one query to query multiple partitions. In future 
        // if partition is defined for the entity, internally We need to spawn multiple
        // queries and send one query for each search condition for each partition
        final List<String[]> partitionValues = comp.getQueryPartitionValues();
        if (partitionValues != null) {
            condition.setPartitionValues(Arrays.asList(partitionValues.get(0)));
        }

        // 3. Set time range if it's timeseries service
        EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName);
        if (ed.isTimeSeries()) {
            // TODO check timestamp exists for timeseries or topology data
            condition.setStartTime(startTime);
            condition.setEndTime(endTime);
        }

        // 4. Set HBase start scanning rowkey if given
        condition.setStartRowkey(startRowkey);

        // 5. Set page size
        condition.setPageSize(pageSize);

        // 6. Generate output,group-by,aggregated fields
        List<String> outputFields = comp.outputFields();
        List<String> groupbyFields = comp.groupbyFields();
        List<String> aggregateFields = comp.aggregateFields();
        Set<String> filterFields = comp.getFilterFields();

        // Start to generate output fields list {
        condition.setOutputAll(comp.isOutputAll());
        if (outputFields == null)
            outputFields = new ArrayList<String>();
        if (comp.hasAgg()) {
            if (groupbyFields != null)
                outputFields.addAll(groupbyFields);
            if (aggregateFields != null)
                outputFields.addAll(aggregateFields);
            if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName)
                    && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) {
                outputFields.add(GenericMetricEntity.VALUE_FIELD);
            }
        }
        if (filterFields != null)
            outputFields.addAll(filterFields);
        condition.setOutputFields(outputFields);
        if (comp.isOutputAll()) {
            LOG.info("Output fields: ALL");
        } else {
            LOG.info("Output fields: " + StringUtils.join(outputFields, ","));
        }
        // } END

        // 7. Build GenericQuery
        GenericQuery reader = GenericQueryBuilder.select(outputFields).from(serviceName, metricName)
                .where(condition)
                .groupBy(comp.hasAgg(), groupbyFields, comp.aggregateFunctionTypes(), aggregateFields)
                .timeSeries(timeSeries, intervalmin).treeAgg(treeAgg)
                .orderBy(comp.sortOptions(), comp.sortFunctions(), comp.sortFields()).top(top)
                .parallel(parallel).build();

        // 8. Fill response object
        List entities = reader.result();
        result.setObj(entities);
        result.setTotalResults(entities.size());
        result.setSuccess(true);
        result.setLastTimestamp(reader.getLastTimestamp());
        result.setFirstTimestamp(reader.getFirstTimeStamp());
    } catch (Exception ex) {
        LOG.error("Fail executing list query", ex);
        result.setException(EagleExceptionWrapper.wrap(ex));
        result.setSuccess(false);
        return result;
    } finally {
        watch.stop();
        result.setElapsedms(watch.getTime());
    }
    LOG.info("Query done " + watch.getTime() + " ms");
    return result;
}

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

@SuppressWarnings("unchecked")
public Graph getRelatedTerms(final String queryString, final Long userId, final int howMany) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();//  www .  j av a 2 s .  c o m

    final FullTextQuery fullTextQuery = this.buildFullTextQuery(queryString, userId, NO_DATE, NO_DATE, false,
            DocumentState.MATCHED_TO_TOPICS, FullTextQuery.ID);

    // find the specified number of terms from the most recent 100 documents
    // that match the query
    final Sort sort = new Sort(new SortField("creationDate", SortField.LONG, true));
    fullTextQuery.setSort(sort);
    fullTextQuery.setFirstResult(0);
    fullTextQuery.setMaxResults(100);

    final List<Long> documentIds = new ArrayList<Long>(100);
    final List<Long> termIds = new ArrayList<Long>(100);

    final List<Object[]> results = fullTextQuery.list();

    for (final Object[] ith : results) {
        final Long id = (Long) ith[0];
        documentIds.add(id);
    }

    final Map<String, Node> nodes = new LinkedHashMap<String, Node>();
    final Node root = new Node(queryString, Boolean.TRUE);
    nodes.put(queryString, root);

    final Map<String, Edge> edges = new HashMap<String, Edge>();

    if (!documentIds.isEmpty()) {
        final Session session = (Session) this.em.getDelegate();
        final org.hibernate.SQLQuery termsQuery = session.createSQLQuery("SELECT term.id "
                + "        FROM document_term dt INNER JOIN term on term.id = dt.term_id "
                + "        WHERE dt.document_id IN (:documentIds) GROUP BY term.id ORDER BY SUM(dt.tf_idf) DESC");
        termsQuery.setParameterList("documentIds", documentIds);
        termsQuery.setMaxResults(100);
        termIds.addAll((List<Long>) termsQuery.list());
    }

    if (!documentIds.isEmpty() && !termIds.isEmpty()) {

        final Session session = (Session) this.em.getDelegate();
        final org.hibernate.SQLQuery associationsQuery = session.createSQLQuery(
                "SELECT CONCAT(a.term_value) term_a_value, CONCAT(b.term_value) term_b_value, SUM(da.association_weight) sum_weight "
                        + "      FROM document_association da "
                        + "      INNER JOIN term a ON da.term_a_id = a.id "
                        + "        AND a.part_of_speech NOT IN (1, 3, 18, 19, 25, 39, 40) "
                        + "        AND length(a.term_value) > 2 "
                        + "      INNER JOIN term b ON da.term_b_id = b.id "
                        + "        AND b.part_of_speech NOT IN (1, 3, 18, 19, 25, 39, 40) "
                        + "        AND length(b.term_value) > 2 "
                        + "      WHERE da.document_id IN (:documentIds) AND (da.term_a_id IN (:termIds) OR da.term_b_id IN (:termIds)) "
                        + "      GROUP BY a.id, b.id ORDER BY sum_weight DESC");
        associationsQuery.setParameterList("documentIds", documentIds);
        associationsQuery.setParameterList("termIds", termIds);
        associationsQuery.setMaxResults(howMany);

        final List<Object[]> relatedTermsResults = associationsQuery.list();

        final Set<String> aNodeKeys = new HashSet<String>();
        final Set<String> bNodeKeys = new HashSet<String>();

        for (final Object[] ith : relatedTermsResults) {
            final String a = (String) ith[0];
            final String b = (String) ith[1];

            if (!nodes.containsKey(a)) {
                final Node node = new Node(a);
                nodes.put(a, node);
            }

            if (!nodes.containsKey(b)) {
                final Node node = new Node(b);
                nodes.put(b, node);
            }

            if (a.equals(b)) {
                continue;
            }

            final String edgeKey = a + "||" + b;
            final String edgeKeyInverse = b + "||" + a;
            if (!edges.containsKey(edgeKey) && !edges.containsKey(edgeKeyInverse)) {
                final Node nodeA = nodes.get(a);
                final Node nodeB = nodes.get(b);

                aNodeKeys.add(a);
                bNodeKeys.add(b);

                final Edge edge = new Edge(nodeA, nodeB);
                edges.put(edgeKey, edge);
            }
        }

        // "orphan" handling, any b that is not also an a needs an edge from
        // root
        final Set<String> orphanKeys = new HashSet<String>();
        orphanKeys.addAll(bNodeKeys);
        orphanKeys.removeAll(aNodeKeys);

        for (final String orphanKey : orphanKeys) {
            final Node orphan = nodes.get(orphanKey);
            final Edge orphanToParent = new Edge(root, orphan);
            edges.put(root.getName() + "||" + orphan.getName(), orphanToParent);
        }
    }

    final List<Node> nodeList = new ArrayList<Node>(nodes.size());
    // keep root as first element
    nodes.remove(root.getName());
    nodeList.add(root);
    nodeList.addAll(nodes.values());

    final Graph graph = new Graph(nodeList, new ArrayList<Edge>(edges.values()));

    stopWatch.stop();
    logger.info("Related terms search took: " + stopWatch.toString());

    return graph;
}

From source file:eionet.meta.service.VocabularyServiceImpl.java

/**
 * {@inheritDoc}/*from  w  w  w  .ja  v  a2s.c  o  m*/
 */
@Override
@Transactional(rollbackFor = ServiceException.class)
public int checkInVocabularyFolder(int vocabularyFolderId, String userName) throws ServiceException {
    if (StringUtils.isBlank(userName)) {
        throw new IllegalArgumentException("User name must not be blank!");
    }

    try {
        StopWatch timer = new StopWatch();
        timer.start();
        VocabularyFolder vocabularyFolder = vocabularyFolderDAO.getVocabularyFolder(vocabularyFolderId);

        if (!vocabularyFolder.isWorkingCopy()) {
            throw new ServiceException("Vocabulary is not a working copy.");
        }

        if (!StringUtils.equals(userName, vocabularyFolder.getWorkingUser())) {
            throw new ServiceException("Check-in user is not the current working user.");
        }

        int originalVocabularyFolderId = vocabularyFolder.getCheckedOutCopyId();

        if (!vocabularyFolder.isSiteCodeType()) {

            List<VocabularyConcept> concepts = vocabularyConceptDAO
                    .getVocabularyConcepts(originalVocabularyFolderId);

            // TODO all elements can be queried in once with getVocabularyConceptsDataElementValues
            for (VocabularyConcept concept : concepts) {

                int conceptId = concept.getId();
                Map<Integer, List<List<DataElement>>> vocabularyConceptsDataElementValues = dataElementDAO
                        .getVocabularyConceptsDataElementValues(originalVocabularyFolderId,
                                new int[] { conceptId }, true);
                List<List<DataElement>> elems = vocabularyConceptsDataElementValues.get(conceptId);
                for (List<DataElement> elemMeta : elems) {
                    if (!elemMeta.isEmpty() && elemMeta.get(0).getDatatype().equals("reference")) {
                        dataElementDAO.deleteReferringInverseElems(concept.getId(), elemMeta);
                    }
                }
            }

            // referenced attribute values in this vocabulary must get new id's
            vocabularyConceptDAO.updateReferringReferenceConcepts(originalVocabularyFolderId);

            // Remove old vocabulary concepts
            vocabularyConceptDAO.deleteVocabularyConcepts(originalVocabularyFolderId);

            // Remove old data element relations
            dataElementDAO.deleteVocabularyDataElements(originalVocabularyFolderId);
            // update ch3 element reference
            dataElementDAO.moveVocabularySources(originalVocabularyFolderId, vocabularyFolderId);

        }

        // Update original vocabulary folder
        vocabularyFolder.setCheckedOutCopyId(0);
        vocabularyFolder.setId(originalVocabularyFolderId);
        vocabularyFolder.setUserModified(userName);
        vocabularyFolder.setDateModified(new Date());
        vocabularyFolder.setWorkingCopy(false);
        vocabularyFolder.setWorkingUser(null);
        vocabularyFolderDAO.updateVocabularyFolder(vocabularyFolder);

        if (!vocabularyFolder.isSiteCodeType()) {

            // Move new vocabulary concepts to folder
            vocabularyConceptDAO.moveVocabularyConcepts(vocabularyFolderId, originalVocabularyFolderId);

            // Move bound data elements to new vocabulary
            dataElementDAO.moveVocabularyDataElements(vocabularyFolderId, originalVocabularyFolderId);

            List<VocabularyConcept> concepts = vocabularyConceptDAO
                    .getVocabularyConcepts(originalVocabularyFolderId);
            for (VocabularyConcept concept : concepts) {
                int conceptId = concept.getId();
                Map<Integer, List<List<DataElement>>> vocabularyConceptsDataElementValues = dataElementDAO
                        .getVocabularyConceptsDataElementValues(originalVocabularyFolderId,
                                new int[] { conceptId }, true);
                List<List<DataElement>> elems = vocabularyConceptsDataElementValues.get(conceptId);
                concept.setElementAttributes(elems);
            }
            fixRelatedReferenceElements(vocabularyFolderId, concepts);

        }

        // Delete old attributes first and then change the parent ID of the new ones
        attributeDAO.deleteAttributes(Collections.singletonList(originalVocabularyFolderId),
                DElemAttribute.ParentType.VOCABULARY_FOLDER.toString());
        attributeDAO.replaceParentId(vocabularyFolderId, originalVocabularyFolderId,
                DElemAttribute.ParentType.VOCABULARY_FOLDER);

        // Delete checked out version
        vocabularyFolderDAO.deleteVocabularyFolders(Collections.singletonList(vocabularyFolderId), false);

        timer.stop();
        LOGGER.debug("Check-in lasted: " + timer.toString());
        return originalVocabularyFolderId;
    } catch (Exception e) {
        throw new ServiceException("Failed to check-in vocabulary folder: " + e.getMessage(), e);
    }
}

From source file:com.mothsoft.alexis.engine.predictive.OpenNLPMaxentModelExecutorTask.java

private boolean doExecute(final Model model) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();/*from w ww.java2 s  .  com*/

    boolean result = false;

    try {
        logger.info(String.format("Executing model %d", model.getId()));

        // load model file
        final File userDirectory = new File(baseDirectory, "" + model.getUserId());
        final File modelFile = new File(userDirectory, model.getId() + BIN_GZ_EXT);
        final AbstractModel maxentModel = new SuffixSensitiveGISModelReader(modelFile).getModel();

        final Date now = new Date();
        final TimeUnits timeUnits = model.getTimeUnits();
        final Timestamp topOfPeriod = new Timestamp(TimeUnits.floor(now, timeUnits).getTime());
        final Timestamp endOfPeriod = new Timestamp(topOfPeriod.getTime() + timeUnits.getDuration() - 1);

        // first position: sum of changes predicted, second position: number
        // of samples--will calculate a boring old mean...
        final double[][] changeByPeriod = new double[model.getLookahead()][2];

        // initialize
        for (int i = 0; i < changeByPeriod.length; i++) {
            changeByPeriod[i][0] = 0.0d;
            changeByPeriod[i][1] = 0.0d;
        }

        // find the most recent point value
        // FIXME - some sparse data sets may require executing the model on
        // all documents since that point or applying some sort of
        // dead-reckoning logic for smoothing
        final DataSetPoint initial = this.dataSetPointDao.findLastPointBefore(model.getTrainingDataSet(),
                endOfPeriod);

        // let's get the corner cases out of the way
        if (initial == null) {
            logger.warn("Insufficient data to execute model!");
            return false;
        }

        // happy path
        // build consolidated context of events in this period
        // find current value of training data set for this period
        final double[] probs = eval(model, topOfPeriod, endOfPeriod, maxentModel);

        // predict from the last available point, adjusted for time
        // remaining in period
        final double y0 = initial.getY();

        // map outcomes to periods in the future (at least no earlier than
        // this period)
        for (int i = 0; i < probs.length; i++) {
            // in the form +nU:+/-x, where n is the number of periods, U is
            // the unit type for the period, +/- is the direction, and x is
            // a discrete value from Model.OUTCOME_ARRAY
            final String outcome = maxentModel.getOutcome(i);

            final Matcher matcher = OUTCOME_PATTERN.matcher(outcome);

            if (!matcher.matches()) {
                logger.warn("Can't process outcome: " + outcome + "; skipping");
                continue;
            }

            final int period = Integer.valueOf(matcher.group(1));
            final String units = matcher.group(2);
            final double percentChange = Double.valueOf(matcher.group(3));

            // record the observation and the count of observations
            changeByPeriod[period][0] += percentChange;
            changeByPeriod[period][1] += 1.0d;

            if (logger.isDebugEnabled()) {
                final double yi = y0 * (1 + percentChange);
                logger.debug(String.format("Outcome: %s, %s: +%d, change: %f, new value: %f, probability: %f",
                        outcome, units, period, percentChange, yi, probs[i]));
            }
        }

        // build points for predictive data set
        double yn = y0;

        // we need to track the points and remove any that were not
        // predicted by this execution of the model
        final Timestamp endOfPredictionRange = new Timestamp(
                topOfPeriod.getTime() + (changeByPeriod.length * timeUnits.getDuration()));
        final List<DataSetPoint> existingPoints = this.dataSetPointDao
                .findByTimeRange(model.getPredictionDataSet(), topOfPeriod, endOfPredictionRange);

        for (int period = 0; period < changeByPeriod.length; period++) {
            final double totalPercentChange = changeByPeriod[period][0];
            final double sampleCount = changeByPeriod[period][1];
            double percentChange;

            if (totalPercentChange == 0.0d || sampleCount == 0.0d) {
                percentChange = 0.0d;
            } else {
                percentChange = totalPercentChange / sampleCount;
            }

            // apply adjustments only if the initial point is within the
            // time period, and only for the first time period
            boolean applyAdjustment = period == 0 && topOfPeriod.before(initial.getX());

            if (applyAdjustment) {
                final double adjustmentFactor = findAdjustmentFactor(initial.getX(), timeUnits);
                percentChange = (totalPercentChange / sampleCount) * adjustmentFactor;
            }

            // figure out the next value and coerce to a sane number of
            // decimal places (2);
            final double newValue = (double) Math.round(yn * (1.0d + percentChange) * 100) / 100;

            final Timestamp timestamp = new Timestamp(
                    topOfPeriod.getTime() + (period * timeUnits.getDuration()));

            if (logger.isDebugEnabled()) {
                logger.debug(String.format("Model %d for data set %d predicted point: (%s, %f)", model.getId(),
                        model.getTrainingDataSet().getId(), DateFormat.getInstance().format(timestamp),
                        newValue));
            }

            DataSetPoint ithPoint = this.dataSetPointDao.findByTimestamp(model.getPredictionDataSet(),
                    timestamp);

            // conditionally create
            if (ithPoint == null) {
                ithPoint = new DataSetPoint(model.getPredictionDataSet(), timestamp, newValue);
                this.dataSetPointDao.add(ithPoint);
            } else {
                // or update
                ithPoint.setY(newValue);

                // updated points retained, other existing removed
                existingPoints.remove(ithPoint);
            }

            // store current and use as starting point for next iteration
            yn = newValue;
        }

        // remove stale points from an old model execution
        for (final DataSetPoint toRemove : existingPoints) {
            this.dataSetPointDao.remove(toRemove);
        }

        result = true;

    } catch (final Exception e) {
        logger.warn("Model " + model.getId() + " failed with: " + e, e);
        result = false;
    } finally {
        stopWatch.stop();
        logger.info(String.format("Executing model %d took %s", model.getId(), stopWatch.toString()));
    }

    return result;
}

From source file:eagle.service.generic.ListQueryResource.java

/**
 * <b>TODO</b> remove the legacy deprecated implementation of listQueryWithoutCoprocessor
 *
 * @see #listQuery(String, String, String, int, String, boolean, boolean, long, int, boolean, int, String,Boolean)
 *
 * @param query/*www . j a  v a2s.com*/
 * @param startTime
 * @param endTime
 * @param pageSize
 * @param startRowkey
 * @param treeAgg
 * @param timeSeries
 * @param intervalmin
 * @return
 */
@GET
@Path("/legacy")
@Produces({ MediaType.APPLICATION_JSON })
@Deprecated
public ListQueryAPIResponseEntity listQueryWithoutCoprocessor(@QueryParam("query") String query,
        @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime,
        @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey,
        @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries,
        @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top,
        @QueryParam("filterIfMissing") boolean filterIfMissing, @QueryParam("parallel") int parallel,
        @QueryParam("metricName") String metricName, @QueryParam("verbose") Boolean verbose) {
    StopWatch watch = new StopWatch();
    watch.start();
    ListQueryAPIResponseEntity result = new ListQueryAPIResponseEntity();
    try {
        validateQueryParameters(startRowkey, pageSize);
        ListQueryCompiler comp = new ListQueryCompiler(query, filterIfMissing);
        String serviceName = comp.serviceName();

        SearchCondition condition = new SearchCondition();
        condition.setFilter(comp.filter());
        condition.setQueryExpression(comp.getQueryExpression());
        if (comp.sortOptions() == null && top > 0) {
            LOG.warn(
                    "Parameter \"top\" is only used for sort query! Ignore top parameter this time since it's not a sort query");
        }

        // TODO: For now we don't support one query to query multiple partitions. In future
        // if partition is defined for the entity, internally We need to spawn multiple
        // queries and send one query for each search condition for each partition
        final List<String[]> partitionValues = comp.getQueryPartitionValues();
        if (partitionValues != null) {
            condition.setPartitionValues(Arrays.asList(partitionValues.get(0)));
        }
        EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName);
        if (ed.isTimeSeries()) {
            // TODO check timestamp exists for timeseries or topology data
            condition.setStartTime(startTime);
            condition.setEndTime(endTime);
        }
        condition.setOutputVerbose(verbose == null || verbose);
        condition.setOutputAlias(comp.getOutputAlias());
        condition.setOutputAll(comp.isOutputAll());
        condition.setStartRowkey(startRowkey);
        condition.setPageSize(pageSize);

        List<String> outputFields = comp.outputFields();
        if (outputFields == null)
            outputFields = new ArrayList<String>();

        /**
         * TODO ugly logic, waiting for refactoring
         */
        if (!comp.hasAgg() && !serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)) { // pure list query
            //            List<String> outputFields = comp.outputFields();
            Set<String> filterFields = comp.getFilterFields();
            if (filterFields != null)
                outputFields.addAll(filterFields);
            condition.setOutputFields(outputFields);
            if (condition.isOutputAll()) {
                LOG.info("Output: ALL");
            } else {
                LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", "));
            }
            GenericEntityBatchReader reader = new GenericEntityBatchReader(serviceName, condition);
            List<? extends TaggedLogAPIEntity> entityList = reader.read();
            result.setObj(entityList);
            result.setTotalResults(entityList.size());
            result.setSuccess(true);
            result.setLastTimestamp(reader.getLastTimestamp());
            result.setFirstTimestamp(reader.getFirstTimestamp());
        } else if (!comp.hasAgg() && serviceName.equals(GenericMetricEntity.GENERIC_METRIC_SERVICE)) {
            // validate metric name
            if (metricName == null || metricName.isEmpty()) {
                throw new IllegalArgumentException("metricName should not be empty for metric list query");
            }
            //            List<String> outputFields = comp.outputFields();
            Set<String> filterFields = comp.getFilterFields();
            if (filterFields != null)
                outputFields.addAll(filterFields);
            condition.setOutputFields(outputFields);
            if (condition.isOutputAll()) {
                LOG.info("Output: ALL");
            } else {
                LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", "));
            }
            GenericMetricEntityBatchReader reader = new GenericMetricEntityBatchReader(metricName, condition);
            List<? extends TaggedLogAPIEntity> entityList = reader.read();
            result.setObj(entityList);
            result.setTotalResults(entityList.size());
            result.setSuccess(true);
            result.setLastTimestamp(reader.getLastTimestamp());
            result.setFirstTimestamp(reader.getFirstTimestamp());
        } else if (!treeAgg && !timeSeries && parallel <= 0) { // non time-series based aggregate query, not hierarchical
            List<String> groupbyFields = comp.groupbyFields();
            List<String> aggregateFields = comp.aggregateFields();
            Set<String> filterFields = comp.getFilterFields();
            //            List<String> outputFields = new ArrayList<String>();
            if (groupbyFields != null)
                outputFields.addAll(groupbyFields);
            if (filterFields != null)
                outputFields.addAll(filterFields);
            outputFields.addAll(aggregateFields);

            if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName)
                    && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) {
                outputFields.add(GenericMetricEntity.VALUE_FIELD);
            }

            FlatAggregator agg = new FlatAggregator(groupbyFields, comp.aggregateFunctionTypes(),
                    comp.aggregateFields());
            StreamReader reader = null;
            if (ed.getMetricDefinition() == null) {
                reader = new GenericEntityStreamReader(serviceName, condition);
            } else { // metric aggregation need metric reader
                reader = new GenericMetricEntityDecompactionStreamReader(metricName, condition);
            }
            condition.setOutputFields(outputFields);
            if (condition.isOutputAll()) {
                LOG.info("Output: ALL");
            } else {
                LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", "));
            }
            reader.register(agg);
            reader.readAsStream();
            ArrayList<Map.Entry<List<String>, List<Double>>> obj = new ArrayList<Map.Entry<List<String>, List<Double>>>();
            obj.addAll(agg.result().entrySet());
            if (comp.sortOptions() == null) {
                result.setObj(obj);
            } else { // has sort options
                result.setObj(PostFlatAggregateSort.sort(agg.result(), comp.sortOptions(), top));
            }
            result.setTotalResults(0);
            result.setSuccess(true);
            result.setLastTimestamp(reader.getLastTimestamp());
            result.setFirstTimestamp(reader.getFirstTimestamp());
        } else if (!treeAgg && !timeSeries && parallel > 0) { // TODO ugly branch, let us refactor
            List<String> groupbyFields = comp.groupbyFields();
            List<String> aggregateFields = comp.aggregateFields();
            Set<String> filterFields = comp.getFilterFields();
            //            List<String> outputFields = new ArrayList<String>();
            if (groupbyFields != null)
                outputFields.addAll(groupbyFields);
            if (filterFields != null)
                outputFields.addAll(filterFields);
            outputFields.addAll(aggregateFields);
            if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName)
                    && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) {
                outputFields.add(GenericMetricEntity.VALUE_FIELD);
            }
            condition.setOutputFields(outputFields);
            if (condition.isOutputAll()) {
                LOG.info("Output: ALL");
            } else {
                LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", "));
            }
            FlatAggregator agg = new FlatAggregator(groupbyFields, comp.aggregateFunctionTypes(),
                    comp.aggregateFields());
            EntityCreationListener listener = EntityCreationListenerFactory
                    .synchronizedEntityCreationListener(agg);
            StreamReader reader = new GenericEntityStreamReaderMT(serviceName, condition, parallel);
            reader.register(listener);
            reader.readAsStream();
            ArrayList<Map.Entry<List<String>, List<Double>>> obj = new ArrayList<Map.Entry<List<String>, List<Double>>>();
            obj.addAll(agg.result().entrySet());
            if (comp.sortOptions() == null) {
                result.setObj(obj);
            } else { // has sort options
                result.setObj(PostFlatAggregateSort.sort(agg.result(), comp.sortOptions(), top));
            }
            result.setTotalResults(0);
            result.setSuccess(true);
            result.setLastTimestamp(reader.getLastTimestamp());
            result.setFirstTimestamp(reader.getFirstTimestamp());
        } else if (!treeAgg && timeSeries) { // time-series based aggregate query, not hierarchical
            List<String> groupbyFields = comp.groupbyFields();
            List<String> sortFields = comp.sortFields();
            List<String> aggregateFields = comp.aggregateFields();
            Set<String> filterFields = comp.getFilterFields();
            //            List<String> outputFields = new ArrayList<String>();
            if (groupbyFields != null)
                outputFields.addAll(groupbyFields);
            if (filterFields != null)
                outputFields.addAll(filterFields);
            if (sortFields != null)
                outputFields.addAll(sortFields);
            outputFields.addAll(aggregateFields);
            if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName)
                    && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) {
                outputFields.add(GenericMetricEntity.VALUE_FIELD);
            }
            StreamReader reader = null;
            if (ed.getMetricDefinition() == null) {
                if (parallel <= 0) { // TODO ugly quick win
                    reader = new GenericEntityStreamReader(serviceName, condition);
                } else {
                    reader = new GenericEntityStreamReaderMT(serviceName, condition, parallel);
                }
            } else { // metric aggregation need metric reader
                reader = new GenericMetricEntityDecompactionStreamReader(metricName, condition);
                if (!outputFields.contains(GenericMetricEntity.VALUE_FIELD)) {
                    outputFields.add(GenericMetricEntity.VALUE_FIELD);
                }
            }
            condition.setOutputFields(outputFields);
            if (condition.isOutputAll()) {
                LOG.info("Output: ALL");
            } else {
                LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", "));
            }
            TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(groupbyFields, comp.aggregateFunctionTypes(),
                    aggregateFields, DateTimeUtil.humanDateToDate(condition.getStartTime()).getTime(),
                    DateTimeUtil.humanDateToDate(condition.getEndTime()).getTime(), intervalmin * 60 * 1000);
            if (parallel <= 0) {
                reader.register(tsAgg);
            } else {
                EntityCreationListener listener = EntityCreationListenerFactory
                        .synchronizedEntityCreationListener(tsAgg);
                reader.register(listener);
            }
            // for sorting
            FlatAggregator sortAgg = null;
            if (comp.sortOptions() != null) {
                sortAgg = new FlatAggregator(groupbyFields, comp.sortFunctions(), comp.sortFields());
                if (parallel <= 0) {
                    reader.register(sortAgg);
                } else {
                    EntityCreationListener listener = EntityCreationListenerFactory
                            .synchronizedEntityCreationListener(sortAgg);
                    reader.register(listener);
                }
            }
            reader.readAsStream();
            ArrayList<Map.Entry<List<String>, List<double[]>>> obj = new ArrayList<Map.Entry<List<String>, List<double[]>>>();
            obj.addAll(tsAgg.getMetric().entrySet());
            if (comp.sortOptions() == null) {
                result.setObj(obj);
            } else { // has sort options
                result.setObj(TimeSeriesPostFlatAggregateSort.sort(sortAgg.result(), tsAgg.getMetric(),
                        comp.sortOptions(), top));
            }
            result.setTotalResults(0);
            result.setSuccess(true);
            result.setLastTimestamp(reader.getLastTimestamp());
            result.setFirstTimestamp(reader.getFirstTimestamp());
        } else { // use hierarchical aggregate mode
            List<String> groupbyFields = comp.groupbyFields();
            List<String> aggregateFields = comp.aggregateFields();
            Set<String> filterFields = comp.getFilterFields();
            //            List<String> outputFields = new ArrayList<String>();
            if (groupbyFields != null)
                outputFields.addAll(groupbyFields);
            if (filterFields != null)
                outputFields.addAll(filterFields);
            outputFields.addAll(aggregateFields);
            if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName)
                    && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) {
                outputFields.add(GenericMetricEntity.VALUE_FIELD);
            }
            condition.setOutputFields(outputFields);
            if (condition.isOutputAll()) {
                LOG.info("Output: ALL");
            } else {
                LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", "));
            }
            GenericEntityStreamReader reader = new GenericEntityStreamReader(serviceName, condition);
            HierarchicalAggregator agg = new HierarchicalAggregator(groupbyFields,
                    comp.aggregateFunctionTypes(), comp.aggregateFields());
            reader.register(agg);
            reader.readAsStream();
            if (comp.sortOptions() == null) {
                result.setObj(agg.result());
            } else { // has sort options
                result.setObj(PostHierarchicalAggregateSort.sort(agg.result(), comp.sortOptions()));
            }
            result.setTotalResults(0);
            result.setSuccess(true);
            result.setLastTimestamp(reader.getLastTimestamp());
            result.setFirstTimestamp(reader.getFirstTimestamp());
        }
    } catch (Exception ex) {
        LOG.error("Fail executing list query: " + query, ex);
        result.setException(EagleExceptionWrapper.wrap(ex));
        result.setSuccess(false);
        return result;
    } finally {
        watch.stop();
        result.setElapsedms(watch.getTime());
    }
    LOG.info("Query done " + watch.getTime() + " ms");
    return result;
}

From source file:net.sourceforge.squirrel_sql.plugins.sqlscript.table_script.CreateFileOfCurrentSQLCommand.java

/**
 * Do the work.//from   www.  ja  v  a  2 s .  c o m
 * @param owner
 */
private void doCreateFileOfCurrentSQL(JFrame owner) {
    try {

        ISQLConnection unmanagedConnection = null;
        try {
            unmanagedConnection = createUnmanagedConnection();

            // TODO maybe, we should use a SQLExecutorTask for taking advantage of some ExecutionListeners like the parameter replacement. But how to get the right Listeners?
            if (unmanagedConnection != null) {

                stmt = createStatementForStreamingResults(unmanagedConnection.getConnection());
            } else {
                stmt = createStatementForStreamingResults(getSession().getSQLConnection().getConnection());
            }

            ProgressAbortFactoryCallback progressFactory = new ProgressAbortFactoryCallback() {
                @Override
                public ProgressAbortCallback create() {
                    createProgressAbortDialog();
                    return progressDialog;
                }
            };

            StopWatch stopWatch = new StopWatch();
            stopWatch.start();

            DialectType dialectType = DialectFactory.getDialectType(getSession().getMetaData());
            resultSetExportCommand = new ResultSetExportCommand(stmt, currentSQL, dialectType, progressFactory);
            resultSetExportCommand.execute(owner);

            stopWatch.stop();

            if (isAborted()) {
                return;
            } else if (resultSetExportCommand.getWrittenRows() >= 0) {
                NumberFormat nf = NumberFormat.getIntegerInstance();

                String rows = nf.format(resultSetExportCommand.getWrittenRows());
                File targetFile = resultSetExportCommand.getTargetFile();
                String seconds = nf.format(stopWatch.getTime() / 1000);
                String msg = s_stringMgr.getString("CreateFileOfCurrentSQLCommand.progress.sucessMessage", rows,
                        targetFile, seconds);
                getSession().showMessage(msg);
            }
        } finally {
            SQLUtilities.closeStatement(stmt);
            if (unmanagedConnection != null) {
                unmanagedConnection.close();
            }
        }
    } catch (Exception e) {
        if (e.getCause() != null) {
            getSession().showErrorMessage(e.getCause());
        }
        getSession().showErrorMessage(e.getMessage());
    } finally {
        SwingUtilities.invokeLater(new Runnable() {
            public void run() {
                hideProgressMonitor();
            }
        });
    }
}

From source file:net.ymate.module.webproxy.WebProxy.java

@SuppressWarnings("unchecked")
public void transmission(HttpServletRequest request, HttpServletResponse response, String url,
        Type.HttpMethod method) throws Exception {
    StopWatch _consumeTime = null;
    long _threadId = 0;
    if (_LOG.isDebugEnabled()) {
        _consumeTime = new StopWatch();
        _consumeTime.start();/*from  w ww  . j a v a 2s  .  c o m*/
        _threadId = Thread.currentThread().getId();
        _LOG.debug("-------------------------------------------------");
        _LOG.debug("--> [" + _threadId + "] URL: " + url);
    }
    //
    HttpURLConnection _conn = null;
    try {
        if (__moduleCfg.isUseProxy()) {
            _conn = (HttpURLConnection) new URL(url).openConnection(__moduleCfg.getProxy());
        } else {
            _conn = (HttpURLConnection) new URL(url).openConnection();
        }
        _conn.setUseCaches(__moduleCfg.isUseCaches());
        _conn.setInstanceFollowRedirects(__moduleCfg.isInstanceFollowRedirects());
        //
        boolean _postFlag = Type.HttpMethod.POST.equals(method);
        boolean _multipartFlag = _postFlag && StringUtils.contains(request.getContentType(), "multipart/");
        if (_postFlag) {
            _conn.setDoOutput(true);
            _conn.setDoInput(true);
            _conn.setRequestMethod(method.name());
        }
        if (__moduleCfg.getConnectTimeout() > 0) {
            _conn.setConnectTimeout(__moduleCfg.getConnectTimeout());
        }
        if (__moduleCfg.getReadTimeout() > 0) {
            _conn.setReadTimeout(__moduleCfg.getReadTimeout());
        }
        //
        if (_LOG.isDebugEnabled()) {
            _LOG.debug("--> [" + _threadId + "] Method: " + method.name());
            _LOG.debug("--> [" + _threadId + "] Request Headers: ");
        }
        //
        Enumeration _header = request.getHeaderNames();
        while (_header.hasMoreElements()) {
            String _name = (String) _header.nextElement();
            String _value = request.getHeader(_name);
            boolean _flag = false;
            if (_postFlag && StringUtils.equalsIgnoreCase(_name, "content-type")
                    || __moduleCfg.isTransferHeaderEnabled()
                            && (!__moduleCfg.getTransferHeaderBlackList().isEmpty()
                                    && !__moduleCfg.getTransferHeaderBlackList().contains(_name)
                                    || !__moduleCfg.getTransferHeaderWhiteList().isEmpty()
                                            && __moduleCfg.getTransferHeaderWhiteList().contains(_name))) {
                _conn.setRequestProperty(_name, _value);
                _flag = true;
            }
            //
            if (_LOG.isDebugEnabled()) {
                _LOG.debug("--> [" + _threadId + "] \t " + (_flag ? " - " : " > ") + _name + ": " + _value);
            }
        }
        _conn.connect();
        //
        if (_postFlag) {
            DataOutputStream _output = new DataOutputStream(_conn.getOutputStream());
            try {
                if (_multipartFlag) {
                    if (_LOG.isDebugEnabled()) {
                        _LOG.debug("--> [" + _threadId + "] Multipart: TRUE");
                    }
                    IOUtils.copyLarge(request.getInputStream(), _output);
                } else {
                    String _charset = request.getCharacterEncoding();
                    String _queryStr = ParamUtils.buildQueryParamStr(request.getParameterMap(), true, _charset);
                    IOUtils.write(_queryStr, _output, _charset);
                    //
                    if (_LOG.isDebugEnabled()) {
                        _LOG.debug("--> [" + _threadId + "] Request Parameters: ");
                        Map<String, String> _paramsMap = ParamUtils.parseQueryParamStr(_queryStr, true,
                                _charset);
                        for (Map.Entry<String, String> _param : _paramsMap.entrySet()) {
                            _LOG.debug("--> [" + _threadId + "] \t - " + _param.getKey() + ": "
                                    + _param.getValue());
                        }
                    }
                }
                _output.flush();
            } finally {
                IOUtils.closeQuietly(_output);
            }
        }
        //
        int _code = _conn.getResponseCode();
        //
        if (_LOG.isDebugEnabled()) {
            _LOG.debug("--> [" + _threadId + "] Response Code: " + _code);
            _LOG.debug("--> [" + _threadId + "] Response Headers: ");
        }
        //
        Map<String, List<String>> _headers = _conn.getHeaderFields();
        for (Map.Entry<String, List<String>> _entry : _headers.entrySet()) {
            if (_entry.getKey() != null) {
                boolean _flag = false;
                String _values = StringUtils.join(_entry.getValue(), ",");
                if (StringUtils.equalsIgnoreCase(_entry.getKey(), "content-type")
                        || __moduleCfg.isTransferHeaderEnabled()
                                && !__moduleCfg.getResponseHeaderWhileList().isEmpty()
                                && __moduleCfg.getResponseHeaderWhileList().contains(_entry.getKey())) {
                    response.setHeader(_entry.getKey(), _values);
                    _flag = true;
                }
                if (_LOG.isDebugEnabled()) {
                    _LOG.debug("--> [" + _threadId + "] \t " + (_flag ? " - " : " > ") + _entry.getKey() + ": "
                            + _values);
                }
            }
        }
        if (HttpURLConnection.HTTP_BAD_REQUEST <= _conn.getResponseCode()) {
            response.sendError(_code);
        } else {
            if (HttpURLConnection.HTTP_OK == _code) {
                InputStream _inputStream = _conn.getInputStream();
                if (_inputStream != null) {
                    if (!_multipartFlag) {
                        byte[] _content = IOUtils.toByteArray(_inputStream);
                        IOUtils.write(_content, response.getOutputStream());
                        //
                        if (_LOG.isDebugEnabled()) {
                            _LOG.debug("--> [" + _threadId + "] Response Content: " + __doParseContentBody(
                                    _conn, _content, WebMVC.get().getModuleCfg().getDefaultCharsetEncoding()));
                        }
                    } else {
                        IOUtils.copyLarge(_conn.getInputStream(), response.getOutputStream());
                        //
                        if (_LOG.isDebugEnabled()) {
                            _LOG.debug("--> [" + _threadId + "] Response Content: MultipartBody");
                        }
                    }
                } else if (_LOG.isDebugEnabled()) {
                    _LOG.debug("--> [" + _threadId + "] Response Content: NULL");
                }
                response.flushBuffer();
            } else {
                InputStream _inputStream = _conn.getInputStream();
                if (_inputStream != null) {
                    byte[] _content = IOUtils.toByteArray(_inputStream);
                    IOUtils.write(_content, response.getOutputStream());
                    //
                    if (_LOG.isDebugEnabled()) {
                        _LOG.debug("--> [" + _threadId + "] Response Content: " + __doParseContentBody(_conn,
                                _content, WebMVC.get().getModuleCfg().getDefaultCharsetEncoding()));
                    }
                } else if (_LOG.isDebugEnabled()) {
                    _LOG.debug("--> [" + _threadId + "] Response Content: NULL");
                }
                response.setStatus(_code);
                response.flushBuffer();
            }
        }
    } catch (Throwable e) {
        _LOG.warn("An exception occurred while processing request mapping '" + url + "': ",
                RuntimeUtils.unwrapThrow(e));
    } finally {
        IOUtils.close(_conn);
        //
        if (_LOG.isDebugEnabled()) {
            if (_consumeTime != null) {
                _consumeTime.stop();
                _LOG.debug("--> [" + _threadId + "] Total execution time: " + _consumeTime.getTime() + "ms");
            }
            _LOG.debug("-------------------------------------------------");
        }
    }
}

From source file:net.ymate.platform.base.YMP.java

/**
 * ??/*from w w w .  jav a 2 s  .  c  om*/
 */
public static void initialize() {
    if (!IS_INITED) {
        System.out.println(I18N.formatMessage(__LSTRING_FILE, null, null, "ymp.base.platform_version_show",
                VERSION, BUILD_DATE));
        //
        Properties _configs = new Properties();
        InputStream _in = null;
        if (RuntimeUtils.isWindows()) {
            _in = YMP.class.getClassLoader().getResourceAsStream("ymp-conf_WIN.properties");
        } else if (RuntimeUtils.isUnixOrLinux()) {
            _in = YMP.class.getClassLoader().getResourceAsStream("ymp-conf_UNIX.properties");
        }
        if (_in == null) {
            _in = YMP.class.getClassLoader().getResourceAsStream("ymp-conf.properties");
        }
        if (_in != null) {
            try {
                _configs.load(_in);
                IS_DEV_MODEL = new BlurObject(_configs.getProperty("ymp.dev_model")).toBooleanValue();
                __MODULE_LOADER = (IModuleLoader) Class
                        .forName(_configs.getProperty("ymp.module_loader_impl_class")).newInstance();
            } catch (Exception e) {
                __MODULE_LOADER = new DefaultModuleLoader();
            }
        } else {
            System.err.println(I18N.formatMessage(__LSTRING_FILE, null, null, "ymp.base.error_load_conf_file"));
        }
        StopWatch _stopWatch = new StopWatch();
        _stopWatch.start();
        try {
            __MODULE_LOADER.initialize(_configs);
            IS_INITED = true;
        } catch (Throwable e) {
            e.printStackTrace(System.err);
        } finally {
            _stopWatch.stop();
            if (IS_INITED) {
                System.out.println(I18N.formatMessage(__LSTRING_FILE, null, null,
                        "ymp.base.platform_init_successed", _stopWatch.getTime()));
            } else {
                System.err.println(
                        I18N.formatMessage(__LSTRING_FILE, null, null, "ymp.base.platform_init_failed"));
            }
        }
    }
}

From source file:net.ymate.platform.core.YMP.java

/**
 * ?YMP//w w  w .j a  va  2 s  .  com
 *
 * @return ?YMP?
 * @throws Exception ?
 */
public YMP init() throws Exception {
    if (!__inited) {
        //
        _LOG.info("\n__   ____  __ ____          ____  \n" + "\\ \\ / /  \\/  |  _ \\  __   _|___ \\ \n"
                + " \\ V /| |\\/| | |_) | \\ \\ / / __) |\n" + "  | | | |  | |  __/   \\ V / / __/ \n"
                + "  |_| |_|  |_|_|       \\_/ |_____|  Website: http://www.ymate.net/");
        //
        StopWatch _watch = new StopWatch();
        _watch.start();
        //
        _LOG.info("Initializing ymate-platform-core-" + VERSION + " - debug:" + __config.isDevelopMode());

        // ?I18N
        I18N.initialize(__config.getDefaultLocale(), __config.getI18NEventHandlerClass());
        // ????
        __events = Events.create(new DefaultEventConfig(__config.getEventConfigs()));
        __events.registerEvent(ApplicationEvent.class);
        __events.registerEvent(ModuleEvent.class);
        // 
        __beanFactory = new DefaultBeanFactory();
        __beanFactory.setLoader(new DefaultBeanLoader(__config.getExcudedFiles()));
        __beanFactory.registerHandler(Bean.class);
        // ??
        __modules = new HashMap<Class<? extends IModule>, IModule>();
        // ?
        __moduleFactory = new BeanFactory(this);
        __moduleFactory.setLoader(new DefaultBeanLoader(__config.getExcudedFiles()));
        __moduleFactory.registerHandler(Module.class, new ModuleHandler(this));
        __moduleFactory.registerHandler(Proxy.class, new ProxyHandler(this));
        __moduleFactory.registerHandler(EventRegister.class, new EventRegisterHandler(this));
        // ??
        __registerScanPackages(__moduleFactory);
        __registerScanPackages(__beanFactory);
        // ??
        __proxyFactory = new DefaultProxyFactory(this).registerProxy(new InterceptProxy());
        // ?
        __moduleFactory.init();
        for (IModule _module : __modules.values()) {
            if (!_module.isInited()) {
                _module.init(this);
                // ????
                __events.fireEvent(Events.MODE.NORMAL,
                        new ModuleEvent(_module, ModuleEvent.EVENT.MODULE_INITED));
            }
        }
        // ?
        __beanFactory.init();
        // ??
        __beanFactory.initProxy(__proxyFactory);
        // IoC?
        __beanFactory.initIoC();
        //
        __inited = true;
        //
        _watch.stop();
        _LOG.info("Initialization completed, Total time: " + _watch.getTime() + "ms");
        // ???
        __events.fireEvent(Events.MODE.NORMAL,
                new ApplicationEvent(this, ApplicationEvent.EVENT.APPLICATION_INITED));
    }
    return this;
}

From source file:net.ymate.platform.persistence.jdbc.base.AbstractOperator.java

public void execute() throws Exception {
    if (!this.executed) {
        StopWatch _time = new StopWatch();
        _time.start();/*  w  w w  . j  av a  2  s .co m*/
        int _effectCounts = 0;
        try {
            _effectCounts = __doExecute();
            // ?????
            this.executed = true;
        } finally {
            _time.stop();
            this.expenseTime = _time.getTime();
            //
            if (this.connectionHolder.getDataSourceCfgMeta().isShowSQL()) {
                _LOG.info(ExpressionUtils.bind("[${sql}]${param}[${count}][${time}]")
                        .set("sql", StringUtils.defaultIfBlank(this.sql, "@NULL"))
                        .set("param", __doSerializeParameters()).set("count", _effectCounts + "")
                        .set("time", this.expenseTime + "ms").getResult());
            }
        }
    }
}