Example usage for org.apache.commons.lang.time StopWatch toString

List of usage examples for org.apache.commons.lang.time StopWatch toString

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch toString.

Prototype

public String toString() 

Source Link

Document

Gets a summary of the time that the stopwatch recorded as a string.

The format used is ISO8601-like, hours:minutes:seconds.milliseconds.

Usage

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

public DataRange<Document> listDocumentsInTopicsByOwner(final Long userId, final int first, final int count) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();/*from  w  w  w . ja  va 2 s.c o  m*/

    final SortOrder sortOrder = SortOrder.DATE_DESC;
    final DataRange<DocumentScore> scoredRange = this.searchWithAllOptions(userId, true,
            DocumentState.MATCHED_TO_TOPICS, null, sortOrder, null, null, first, count);

    final List<Document> range = new ArrayList<Document>(scoredRange.getRange().size());

    for (final DocumentScore scoredDoc : scoredRange.getRange()) {
        range.add(scoredDoc.getDocument());
    }

    final DataRange<Document> dataRange = new DataRange<Document>(range, scoredRange.getFirstRow(),
            scoredRange.getTotalRowsAvailable());

    stopWatch.stop();
    logger.debug(stopWatch.toString());

    return dataRange;
}

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

public DataRange<Document> listDocumentsByOwner(final Long userId, final int first, final int count) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();/*w  w  w .  j av  a2  s  .co m*/

    final SortOrder sortOrder = SortOrder.DATE_DESC;
    final DataRange<DocumentScore> scoredRange = this.searchWithAllOptions(userId, false, null, null, sortOrder,
            null /* ignore start date */, null /* ignore end date */, first, count);

    final List<Document> range = new ArrayList<Document>(scoredRange.getRange().size());

    for (final DocumentScore scoredDoc : scoredRange.getRange()) {
        range.add(scoredDoc.getDocument());
    }

    final DataRange<Document> dataRange = new DataRange<Document>(range, scoredRange.getFirstRow(),
            scoredRange.getTotalRowsAvailable());

    stopWatch.stop();
    logger.debug(stopWatch.toString());

    return dataRange;
}

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

public List<Document> listTopDocuments(Long userId, Date startDate, Date endDate, int count) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();//from  w  w w. j  a va  2s.c o  m

    final Query query = this.em
            .createQuery("select d from Topic topic join topic.topicDocuments td join td.document d "
                    + "   where topic.userId = :userId "
                    + "     and td.creationDate > :startDate and td.creationDate < :endDate "
                    + "     and td.score > 0.2                                            "
                    + "     order by td.score desc");
    query.setParameter("userId", userId);
    query.setParameter("startDate", startDate);
    query.setParameter("endDate", endDate);
    query.setFirstResult(0);
    query.setMaxResults(count);

    query.setLockMode(LockModeType.NONE);

    @SuppressWarnings("unchecked")
    final List<Document> range = query.getResultList();

    stopWatch.stop();
    logger.debug(stopWatch.toString());

    return range;
}

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

@Override
public ScrollableResults scrollableSearch(Long userId, DocumentState state, String queryString,
        SortOrder sortOrder, Date startDate, Date endDate) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();/*from ww w .j ava2  s . c  o  m*/

    final FullTextQuery fullTextQuery = this.buildFullTextQuery(queryString, userId, startDate, endDate, false,
            state, FullTextQuery.THIS, FullTextQuery.SCORE);

    final Sort sort;
    switch (sortOrder) {
    case DATE_ASC:
        sort = new Sort(new SortField("id", SortField.LONG));
        break;
    case DATE_DESC:
        sort = new Sort(new SortField("id", SortField.LONG, true));
        break;
    case RELEVANCE:
        sort = new Sort(SortField.FIELD_SCORE, new SortField("id", SortField.LONG, true));
        break;
    default:
        throw new IllegalArgumentException("Unexpected SortOrder: " + sortOrder.name());
    }
    fullTextQuery.setSort(sort);

    fullTextQuery.setFetchSize(50);
    fullTextQuery.setReadOnly(true);
    fullTextQuery.setCacheable(false);
    fullTextQuery.setCacheMode(CacheMode.IGNORE);

    final ScrollableResults result = fullTextQuery.scroll(ScrollMode.FORWARD_ONLY);

    stopWatch.stop();
    logger.debug(stopWatch.toString());

    return result;
}

From source file:ch.systemsx.cisd.openbis.generic.server.business.bo.samplelister.SampleListingWorker.java

/**
 * Load the samples defined by the criteria given to the constructor. The samples will be
 * enriched with sample properties and dependencies to parents and container will be resolved.
 *//*  ww w . j  a  va  2 s.  c  o  m*/
public List<Sample> load() {
    final StopWatch watch = new StopWatch();
    watch.start();
    final Experiment expOrNull = tryLoadExperiment();
    final boolean oneGroupPerSample = isOneGroupPerSamples();
    final Group groupOrNull;
    if (oneGroupPerSample) {
        groupOrNull = null;
        final Group[] groups = referencedEntityDAO.getAllGroups(databaseInstanceId);
        for (Group group : groups) {
            group.setInstance(databaseInstance);
            groupMap.put(group.getId(), group);
        }
    } else {
        groupOrNull = tryLoadGroup(expOrNull);
        if (groupOrNull != null) {
            // For use by dependent samples.
            groupMap.put(groupOrNull.getId(), groupOrNull);
        }
    }
    loadSampleTypes();
    retrievePrimaryBasicSamples(tryGetIteratorForSamplesByIds(), groupOrNull, oneGroupPerSample);
    retrievePrimaryBasicSamples(tryGetIteratorForGroupSamples(), groupOrNull, oneGroupPerSample);
    retrievePrimaryBasicSamples(tryGetIteratorForSharedSamples(), groupOrNull, oneGroupPerSample);
    retrievePrimaryBasicSamples(tryGetIteratorForExperimentSamples(), groupOrNull, oneGroupPerSample);
    retrievePrimaryBasicSamples(tryGetIteratorForContainedSamples(), groupOrNull, oneGroupPerSample);
    if (operationLog.isDebugEnabled()) {
        watch.stop();
        operationLog.debug(
                String.format("Basic retrieval of %d samples took %s s", sampleList.size(), watch.toString()));
        watch.reset();
        watch.start();
    }

    // Only enrich the "primary" samples (matching the criteria) with properties, not
    // dependent samples.
    if (samplePropertiesEnricherOrNull != null) {
        samplePropertiesEnricherOrNull.enrich(sampleMap.keySet(), new IEntityPropertiesHolderResolver() {
            public Sample get(long id) {
                return sampleMap.get(id);
            }
        });
        if (operationLog.isDebugEnabled()) {
            watch.stop();
            operationLog.debug(String.format("Enrichment with properties took %s s", watch.toString()));
        }
    }

    retrieveDependentSamplesRecursively();
    resolveParents();
    resolveContainers();

    return sampleList;
}

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

private DataRange<DocumentScore> searchWithAllOptions(final Long userId, final boolean requireTopicsForUser,
        final DocumentState state, final String queryString, final SortOrder sortOrder, final Date startDate,
        final Date endDate, final int first, final int count) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();//  w w  w  .j a  v a 2 s .  co  m

    final FullTextQuery fullTextQuery = this.buildFullTextQuery(queryString, userId, startDate, endDate,
            requireTopicsForUser, state, FullTextQuery.THIS, FullTextQuery.SCORE);

    fullTextQuery.setFirstResult(first);
    fullTextQuery.setMaxResults(count);

    // optional sort order
    if (sortOrder == null || sortOrder == SortOrder.RELEVANCE) {
        final Sort defaultSort = new Sort(SortField.FIELD_SCORE, new SortField("id", SortField.LONG, true));
        fullTextQuery.setSort(defaultSort);
    } else if (sortOrder == SortOrder.DATE_DESC) {
        final Sort sort = new Sort(new SortField("creationDate", SortField.LONG, true));
        fullTextQuery.setSort(sort);
    } else if (sortOrder == SortOrder.DATE_ASC) {
        final Sort sort = new Sort(new SortField("creationDate", SortField.LONG));
        fullTextQuery.setSort(sort);
    }

    @SuppressWarnings("unchecked")
    final List<Object[]> results = fullTextQuery.list();
    final List<DocumentScore> range = new ArrayList<DocumentScore>(results.size());

    // copy to DocumentScore holder objects
    for (final Object[] ith : results) {
        final Document ithDoc = (Document) ith[0];
        final Float ithScore = (Float) ith[1];
        range.add(new DocumentScore(ithDoc, ithScore));
    }

    final int totalRows = fullTextQuery.getResultSize();
    final DataRange<DocumentScore> result = new DataRange<DocumentScore>(range, first, totalRows);

    stopWatch.stop();
    logger.debug(stopWatch.toString());
    return result;
}

From source file:eionet.meta.service.VocabularyServiceImpl.java

/**
 * {@inheritDoc}/*from  w w  w  . ja  v a  2 s  . co  m*/
 */
@Override
@Transactional(rollbackFor = ServiceException.class)
public int checkOutVocabularyFolder(int vocabularyFolderId, String userName) throws ServiceException {
    if (StringUtils.isBlank(userName)) {
        throw new IllegalArgumentException("User name must not be blank!");
    }

    try {
        StopWatch timer = new StopWatch();
        timer.start();
        VocabularyFolder vocabularyFolder = vocabularyFolderDAO.getVocabularyFolder(vocabularyFolderId);

        if (vocabularyFolder.isWorkingCopy()) {
            throw new ServiceException("Cannot check out a working copy!");
        }

        if (StringUtils.isNotBlank(vocabularyFolder.getWorkingUser())) {
            throw new ServiceException("Cannot check out an already checked-out vocabulary folder!");
        }

        // Update existing working user
        vocabularyFolder.setWorkingUser(userName);
        vocabularyFolderDAO.updateVocabularyFolder(vocabularyFolder);

        // Make new copy of vocabulary folder
        vocabularyFolder.setCheckedOutCopyId(vocabularyFolderId);
        vocabularyFolder.setWorkingCopy(true);
        int newVocabularyFolderId = vocabularyFolderDAO.createVocabularyFolder(vocabularyFolder);

        // Copy simple attributes.
        attributeDAO.copySimpleAttributes(vocabularyFolderId,
                DElemAttribute.ParentType.VOCABULARY_FOLDER.toString(), newVocabularyFolderId);

        // Copy the vocabulary concepts under new vocabulary folder (except of site code type)
        if (!vocabularyFolder.isSiteCodeType()) {
            vocabularyConceptDAO.copyVocabularyConcepts(vocabularyFolderId, newVocabularyFolderId);

            dataElementDAO.checkoutVocabularyConceptDataElementValues(newVocabularyFolderId);
            // dataElementDAO.updateRelatedConceptIds(newVocabularyFolderId);
        }

        // Copy data element relations
        dataElementDAO.copyVocabularyDataElements(vocabularyFolderId, newVocabularyFolderId);

        timer.stop();
        LOGGER.debug("Check-out lasted: " + timer.toString());
        return newVocabularyFolderId;
    } catch (Exception e) {
        throw new ServiceException("Failed to check-out vocabulary folder: " + e.getMessage(), e);
    }
}

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

@SuppressWarnings("unchecked")
public Graph getRelatedTerms(final String queryString, final Long userId, final int howMany) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();//from  ww  w  . jav  a2  s .c  o m

    final FullTextQuery fullTextQuery = this.buildFullTextQuery(queryString, userId, NO_DATE, NO_DATE, false,
            DocumentState.MATCHED_TO_TOPICS, FullTextQuery.ID);

    // find the specified number of terms from the most recent 100 documents
    // that match the query
    final Sort sort = new Sort(new SortField("creationDate", SortField.LONG, true));
    fullTextQuery.setSort(sort);
    fullTextQuery.setFirstResult(0);
    fullTextQuery.setMaxResults(100);

    final List<Long> documentIds = new ArrayList<Long>(100);
    final List<Long> termIds = new ArrayList<Long>(100);

    final List<Object[]> results = fullTextQuery.list();

    for (final Object[] ith : results) {
        final Long id = (Long) ith[0];
        documentIds.add(id);
    }

    final Map<String, Node> nodes = new LinkedHashMap<String, Node>();
    final Node root = new Node(queryString, Boolean.TRUE);
    nodes.put(queryString, root);

    final Map<String, Edge> edges = new HashMap<String, Edge>();

    if (!documentIds.isEmpty()) {
        final Session session = (Session) this.em.getDelegate();
        final org.hibernate.SQLQuery termsQuery = session.createSQLQuery("SELECT term.id "
                + "        FROM document_term dt INNER JOIN term on term.id = dt.term_id "
                + "        WHERE dt.document_id IN (:documentIds) GROUP BY term.id ORDER BY SUM(dt.tf_idf) DESC");
        termsQuery.setParameterList("documentIds", documentIds);
        termsQuery.setMaxResults(100);
        termIds.addAll((List<Long>) termsQuery.list());
    }

    if (!documentIds.isEmpty() && !termIds.isEmpty()) {

        final Session session = (Session) this.em.getDelegate();
        final org.hibernate.SQLQuery associationsQuery = session.createSQLQuery(
                "SELECT CONCAT(a.term_value) term_a_value, CONCAT(b.term_value) term_b_value, SUM(da.association_weight) sum_weight "
                        + "      FROM document_association da "
                        + "      INNER JOIN term a ON da.term_a_id = a.id "
                        + "        AND a.part_of_speech NOT IN (1, 3, 18, 19, 25, 39, 40) "
                        + "        AND length(a.term_value) > 2 "
                        + "      INNER JOIN term b ON da.term_b_id = b.id "
                        + "        AND b.part_of_speech NOT IN (1, 3, 18, 19, 25, 39, 40) "
                        + "        AND length(b.term_value) > 2 "
                        + "      WHERE da.document_id IN (:documentIds) AND (da.term_a_id IN (:termIds) OR da.term_b_id IN (:termIds)) "
                        + "      GROUP BY a.id, b.id ORDER BY sum_weight DESC");
        associationsQuery.setParameterList("documentIds", documentIds);
        associationsQuery.setParameterList("termIds", termIds);
        associationsQuery.setMaxResults(howMany);

        final List<Object[]> relatedTermsResults = associationsQuery.list();

        final Set<String> aNodeKeys = new HashSet<String>();
        final Set<String> bNodeKeys = new HashSet<String>();

        for (final Object[] ith : relatedTermsResults) {
            final String a = (String) ith[0];
            final String b = (String) ith[1];

            if (!nodes.containsKey(a)) {
                final Node node = new Node(a);
                nodes.put(a, node);
            }

            if (!nodes.containsKey(b)) {
                final Node node = new Node(b);
                nodes.put(b, node);
            }

            if (a.equals(b)) {
                continue;
            }

            final String edgeKey = a + "||" + b;
            final String edgeKeyInverse = b + "||" + a;
            if (!edges.containsKey(edgeKey) && !edges.containsKey(edgeKeyInverse)) {
                final Node nodeA = nodes.get(a);
                final Node nodeB = nodes.get(b);

                aNodeKeys.add(a);
                bNodeKeys.add(b);

                final Edge edge = new Edge(nodeA, nodeB);
                edges.put(edgeKey, edge);
            }
        }

        // "orphan" handling, any b that is not also an a needs an edge from
        // root
        final Set<String> orphanKeys = new HashSet<String>();
        orphanKeys.addAll(bNodeKeys);
        orphanKeys.removeAll(aNodeKeys);

        for (final String orphanKey : orphanKeys) {
            final Node orphan = nodes.get(orphanKey);
            final Edge orphanToParent = new Edge(root, orphan);
            edges.put(root.getName() + "||" + orphan.getName(), orphanToParent);
        }
    }

    final List<Node> nodeList = new ArrayList<Node>(nodes.size());
    // keep root as first element
    nodes.remove(root.getName());
    nodeList.add(root);
    nodeList.addAll(nodes.values());

    final Graph graph = new Graph(nodeList, new ArrayList<Edge>(edges.values()));

    stopWatch.stop();
    logger.info("Related terms search took: " + stopWatch.toString());

    return graph;
}

From source file:eionet.meta.service.VocabularyServiceImpl.java

/**
 * {@inheritDoc}//from  ww  w . j  a  v a2s. com
 */
@Override
@Transactional(rollbackFor = ServiceException.class)
public int checkInVocabularyFolder(int vocabularyFolderId, String userName) throws ServiceException {
    if (StringUtils.isBlank(userName)) {
        throw new IllegalArgumentException("User name must not be blank!");
    }

    try {
        StopWatch timer = new StopWatch();
        timer.start();
        VocabularyFolder vocabularyFolder = vocabularyFolderDAO.getVocabularyFolder(vocabularyFolderId);

        if (!vocabularyFolder.isWorkingCopy()) {
            throw new ServiceException("Vocabulary is not a working copy.");
        }

        if (!StringUtils.equals(userName, vocabularyFolder.getWorkingUser())) {
            throw new ServiceException("Check-in user is not the current working user.");
        }

        int originalVocabularyFolderId = vocabularyFolder.getCheckedOutCopyId();

        if (!vocabularyFolder.isSiteCodeType()) {

            List<VocabularyConcept> concepts = vocabularyConceptDAO
                    .getVocabularyConcepts(originalVocabularyFolderId);

            // TODO all elements can be queried in once with getVocabularyConceptsDataElementValues
            for (VocabularyConcept concept : concepts) {

                int conceptId = concept.getId();
                Map<Integer, List<List<DataElement>>> vocabularyConceptsDataElementValues = dataElementDAO
                        .getVocabularyConceptsDataElementValues(originalVocabularyFolderId,
                                new int[] { conceptId }, true);
                List<List<DataElement>> elems = vocabularyConceptsDataElementValues.get(conceptId);
                for (List<DataElement> elemMeta : elems) {
                    if (!elemMeta.isEmpty() && elemMeta.get(0).getDatatype().equals("reference")) {
                        dataElementDAO.deleteReferringInverseElems(concept.getId(), elemMeta);
                    }
                }
            }

            // referenced attribute values in this vocabulary must get new id's
            vocabularyConceptDAO.updateReferringReferenceConcepts(originalVocabularyFolderId);

            // Remove old vocabulary concepts
            vocabularyConceptDAO.deleteVocabularyConcepts(originalVocabularyFolderId);

            // Remove old data element relations
            dataElementDAO.deleteVocabularyDataElements(originalVocabularyFolderId);
            // update ch3 element reference
            dataElementDAO.moveVocabularySources(originalVocabularyFolderId, vocabularyFolderId);

        }

        // Update original vocabulary folder
        vocabularyFolder.setCheckedOutCopyId(0);
        vocabularyFolder.setId(originalVocabularyFolderId);
        vocabularyFolder.setUserModified(userName);
        vocabularyFolder.setDateModified(new Date());
        vocabularyFolder.setWorkingCopy(false);
        vocabularyFolder.setWorkingUser(null);
        vocabularyFolderDAO.updateVocabularyFolder(vocabularyFolder);

        if (!vocabularyFolder.isSiteCodeType()) {

            // Move new vocabulary concepts to folder
            vocabularyConceptDAO.moveVocabularyConcepts(vocabularyFolderId, originalVocabularyFolderId);

            // Move bound data elements to new vocabulary
            dataElementDAO.moveVocabularyDataElements(vocabularyFolderId, originalVocabularyFolderId);

            List<VocabularyConcept> concepts = vocabularyConceptDAO
                    .getVocabularyConcepts(originalVocabularyFolderId);
            for (VocabularyConcept concept : concepts) {
                int conceptId = concept.getId();
                Map<Integer, List<List<DataElement>>> vocabularyConceptsDataElementValues = dataElementDAO
                        .getVocabularyConceptsDataElementValues(originalVocabularyFolderId,
                                new int[] { conceptId }, true);
                List<List<DataElement>> elems = vocabularyConceptsDataElementValues.get(conceptId);
                concept.setElementAttributes(elems);
            }
            fixRelatedReferenceElements(vocabularyFolderId, concepts);

        }

        // Delete old attributes first and then change the parent ID of the new ones
        attributeDAO.deleteAttributes(Collections.singletonList(originalVocabularyFolderId),
                DElemAttribute.ParentType.VOCABULARY_FOLDER.toString());
        attributeDAO.replaceParentId(vocabularyFolderId, originalVocabularyFolderId,
                DElemAttribute.ParentType.VOCABULARY_FOLDER);

        // Delete checked out version
        vocabularyFolderDAO.deleteVocabularyFolders(Collections.singletonList(vocabularyFolderId), false);

        timer.stop();
        LOGGER.debug("Check-in lasted: " + timer.toString());
        return originalVocabularyFolderId;
    } catch (Exception e) {
        throw new ServiceException("Failed to check-in vocabulary folder: " + e.getMessage(), e);
    }
}

From source file:com.mothsoft.alexis.engine.predictive.OpenNLPMaxentModelExecutorTask.java

private boolean doExecute(final Model model) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();//from   www .ja va  2 s  . c  o  m

    boolean result = false;

    try {
        logger.info(String.format("Executing model %d", model.getId()));

        // load model file
        final File userDirectory = new File(baseDirectory, "" + model.getUserId());
        final File modelFile = new File(userDirectory, model.getId() + BIN_GZ_EXT);
        final AbstractModel maxentModel = new SuffixSensitiveGISModelReader(modelFile).getModel();

        final Date now = new Date();
        final TimeUnits timeUnits = model.getTimeUnits();
        final Timestamp topOfPeriod = new Timestamp(TimeUnits.floor(now, timeUnits).getTime());
        final Timestamp endOfPeriod = new Timestamp(topOfPeriod.getTime() + timeUnits.getDuration() - 1);

        // first position: sum of changes predicted, second position: number
        // of samples--will calculate a boring old mean...
        final double[][] changeByPeriod = new double[model.getLookahead()][2];

        // initialize
        for (int i = 0; i < changeByPeriod.length; i++) {
            changeByPeriod[i][0] = 0.0d;
            changeByPeriod[i][1] = 0.0d;
        }

        // find the most recent point value
        // FIXME - some sparse data sets may require executing the model on
        // all documents since that point or applying some sort of
        // dead-reckoning logic for smoothing
        final DataSetPoint initial = this.dataSetPointDao.findLastPointBefore(model.getTrainingDataSet(),
                endOfPeriod);

        // let's get the corner cases out of the way
        if (initial == null) {
            logger.warn("Insufficient data to execute model!");
            return false;
        }

        // happy path
        // build consolidated context of events in this period
        // find current value of training data set for this period
        final double[] probs = eval(model, topOfPeriod, endOfPeriod, maxentModel);

        // predict from the last available point, adjusted for time
        // remaining in period
        final double y0 = initial.getY();

        // map outcomes to periods in the future (at least no earlier than
        // this period)
        for (int i = 0; i < probs.length; i++) {
            // in the form +nU:+/-x, where n is the number of periods, U is
            // the unit type for the period, +/- is the direction, and x is
            // a discrete value from Model.OUTCOME_ARRAY
            final String outcome = maxentModel.getOutcome(i);

            final Matcher matcher = OUTCOME_PATTERN.matcher(outcome);

            if (!matcher.matches()) {
                logger.warn("Can't process outcome: " + outcome + "; skipping");
                continue;
            }

            final int period = Integer.valueOf(matcher.group(1));
            final String units = matcher.group(2);
            final double percentChange = Double.valueOf(matcher.group(3));

            // record the observation and the count of observations
            changeByPeriod[period][0] += percentChange;
            changeByPeriod[period][1] += 1.0d;

            if (logger.isDebugEnabled()) {
                final double yi = y0 * (1 + percentChange);
                logger.debug(String.format("Outcome: %s, %s: +%d, change: %f, new value: %f, probability: %f",
                        outcome, units, period, percentChange, yi, probs[i]));
            }
        }

        // build points for predictive data set
        double yn = y0;

        // we need to track the points and remove any that were not
        // predicted by this execution of the model
        final Timestamp endOfPredictionRange = new Timestamp(
                topOfPeriod.getTime() + (changeByPeriod.length * timeUnits.getDuration()));
        final List<DataSetPoint> existingPoints = this.dataSetPointDao
                .findByTimeRange(model.getPredictionDataSet(), topOfPeriod, endOfPredictionRange);

        for (int period = 0; period < changeByPeriod.length; period++) {
            final double totalPercentChange = changeByPeriod[period][0];
            final double sampleCount = changeByPeriod[period][1];
            double percentChange;

            if (totalPercentChange == 0.0d || sampleCount == 0.0d) {
                percentChange = 0.0d;
            } else {
                percentChange = totalPercentChange / sampleCount;
            }

            // apply adjustments only if the initial point is within the
            // time period, and only for the first time period
            boolean applyAdjustment = period == 0 && topOfPeriod.before(initial.getX());

            if (applyAdjustment) {
                final double adjustmentFactor = findAdjustmentFactor(initial.getX(), timeUnits);
                percentChange = (totalPercentChange / sampleCount) * adjustmentFactor;
            }

            // figure out the next value and coerce to a sane number of
            // decimal places (2);
            final double newValue = (double) Math.round(yn * (1.0d + percentChange) * 100) / 100;

            final Timestamp timestamp = new Timestamp(
                    topOfPeriod.getTime() + (period * timeUnits.getDuration()));

            if (logger.isDebugEnabled()) {
                logger.debug(String.format("Model %d for data set %d predicted point: (%s, %f)", model.getId(),
                        model.getTrainingDataSet().getId(), DateFormat.getInstance().format(timestamp),
                        newValue));
            }

            DataSetPoint ithPoint = this.dataSetPointDao.findByTimestamp(model.getPredictionDataSet(),
                    timestamp);

            // conditionally create
            if (ithPoint == null) {
                ithPoint = new DataSetPoint(model.getPredictionDataSet(), timestamp, newValue);
                this.dataSetPointDao.add(ithPoint);
            } else {
                // or update
                ithPoint.setY(newValue);

                // updated points retained, other existing removed
                existingPoints.remove(ithPoint);
            }

            // store current and use as starting point for next iteration
            yn = newValue;
        }

        // remove stale points from an old model execution
        for (final DataSetPoint toRemove : existingPoints) {
            this.dataSetPointDao.remove(toRemove);
        }

        result = true;

    } catch (final Exception e) {
        logger.warn("Model " + model.getId() + " failed with: " + e, e);
        result = false;
    } finally {
        stopWatch.stop();
        logger.info(String.format("Executing model %d took %s", model.getId(), stopWatch.toString()));
    }

    return result;
}