Example usage for org.apache.commons.lang.time StopWatch start

List of usage examples for org.apache.commons.lang.time StopWatch start

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch start.

Prototype

public void start() 

Source Link

Document

Start the stopwatch.

This method starts a new timing session, clearing any previous values.

Usage

From source file:org.openmrs.module.reporting.data.patient.service.PatientDataServiceImpl.java

@Override
protected Evaluated<PatientDataDefinition> executeEvaluator(
        DefinitionEvaluator<PatientDataDefinition> evaluator, PatientDataDefinition definition,
        EvaluationContext context) throws EvaluationException {

    EvaluatedPatientData ret = new EvaluatedPatientData(definition, context);
    int batchSize = ReportingConstants.GLOBAL_PROPERTY_DATA_EVALUATION_BATCH_SIZE();

    // Do not evaluate in batches if no base cohort is supplied, or no batch size is specified
    if (context.getBaseCohort() == null || batchSize <= 0 || context.getBaseCohort().size() <= batchSize) {
        return super.executeEvaluator(evaluator, definition, context);
    }//from  w  ww  .  j a v a2s.  c  o  m

    if (context.getBaseCohort().size() > 0) {

        List<Cohort> batches = new ArrayList<Cohort>();
        List<Integer> ids = new ArrayList<Integer>(context.getBaseCohort().getMemberIds());
        for (int i = 0; i < ids.size(); i += batchSize) {
            batches.add(new Cohort(ids.subList(i, i + Math.min(batchSize, ids.size() - i))));
        }
        log.info("Number of batches to execute: " + batches.size());

        // Evaluate each batch
        for (Cohort batchCohort : batches) {
            EvaluationContext batchContext = context.shallowCopy();
            batchContext.setBaseCohort(batchCohort);
            batchContext.clearCache(); // Setting base cohort should do this, but just to be sure

            StopWatch timer = new StopWatch();
            timer.start();

            EvaluatedPatientData batchData = (EvaluatedPatientData) super.executeEvaluator(evaluator,
                    definition, batchContext);
            ret.getData().putAll(batchData.getData());

            timer.stop();
            log.debug("Evaluated batch: " + timer.toString());
            log.debug("Number of running data evaluated: " + ret.getData().size());

            timer.reset();

            Context.flushSession();
            Context.clearSession();
        }
    }
    return ret;
}

From source file:org.openmrs.module.reporting.dataset.definition.evaluator.EncounterDataSetEvaluator.java

/**
 * @see DataSetEvaluator#evaluate(DataSetDefinition, EvaluationContext)
 *//*from  ww w.j  av a2s .  c o m*/
@SuppressWarnings("unchecked")
public DataSet evaluate(DataSetDefinition dataSetDefinition, EvaluationContext context)
        throws EvaluationException {

    EncounterDataSetDefinition dsd = (EncounterDataSetDefinition) dataSetDefinition;
    context = ObjectUtil.nvl(context, new EvaluationContext());

    SimpleDataSet dataSet = new SimpleDataSet(dsd, context);
    dataSet.setSortCriteria(dsd.getSortCriteria());

    // Construct an EncounterEvaluationContext based on the encounter filter
    EncounterIdSet r = null;
    if (dsd.getRowFilters() != null) {
        for (Mapped<? extends EncounterQuery> q : dsd.getRowFilters()) {
            EncounterIdSet s = Context.getService(EncounterQueryService.class).evaluate(q, context);
            r = QueryUtil.intersectNonNull(r, s);
        }
    }
    if (r == null) {
        r = Context.getService(EncounterQueryService.class).evaluate(new AllEncounterQuery(), context);
    }
    EncounterEvaluationContext eec = new EncounterEvaluationContext(context, r);
    eec.setBaseCohort(null); // We can do this because the encounterIdSet is already limited by these

    // Evaluate each specified ColumnDefinition for all of the included rows and add these to the dataset
    for (RowPerObjectColumnDefinition cd : dsd.getColumnDefinitions()) {

        if (log.isDebugEnabled()) {
            log.debug("Evaluating column: " + cd.getName());
            log.debug("With Data Definition: "
                    + DefinitionUtil.format(cd.getDataDefinition().getParameterizable()));
            log.debug("With Mappings: " + cd.getDataDefinition().getParameterMappings());
            log.debug("With Parameters: " + eec.getParameterValues());
        }

        StopWatch sw = new StopWatch();
        sw.start();

        MappedData<? extends EncounterDataDefinition> dataDef = (MappedData<? extends EncounterDataDefinition>) cd
                .getDataDefinition();
        EvaluatedEncounterData data = Context.getService(EncounterDataService.class).evaluate(dataDef, eec);

        DataSetColumn column = new DataSetColumn(cd.getName(), cd.getName(),
                dataDef.getParameterizable().getDataType()); // TODO: Support One-Many column definition to column

        for (Integer id : r.getMemberIds()) {
            Object val = data.getData().get(id);
            val = DataUtil.convertData(val, dataDef.getConverters());
            dataSet.addColumnValue(id, column, val);
        }

        sw.stop();
        if (log.isDebugEnabled()) {
            log.debug("Added encounter column: " + sw.toString());
        }

    }

    return dataSet;
}

From source file:org.openmrs.module.reporting.dataset.definition.evaluator.PatientDataSetEvaluator.java

/**
 * @see DataSetEvaluator#evaluate(DataSetDefinition, EvaluationContext)
 *//*  w w w.j  av  a2 s .  c o m*/
@SuppressWarnings("unchecked")
public DataSet evaluate(DataSetDefinition dataSetDefinition, EvaluationContext context)
        throws EvaluationException {

    PatientDataSetDefinition dsd = (PatientDataSetDefinition) dataSetDefinition;
    context = ObjectUtil.nvl(context, new EvaluationContext());

    SimpleDataSet dataSet = new SimpleDataSet(dsd, context);
    dataSet.setSortCriteria(dsd.getSortCriteria());

    // Construct a new EvaluationContext based on the passed filters
    Cohort c = context.getBaseCohort();
    if (dsd.getRowFilters() != null) {
        for (Mapped<? extends CohortDefinition> q : dsd.getRowFilters()) {
            Cohort s = Context.getService(CohortDefinitionService.class).evaluate(q, context);
            c = CohortUtil.intersectNonNull(c, s);
        }
    }
    if (c == null) {
        c = Context.getService(CohortDefinitionService.class).evaluate(new AllPatientsCohortDefinition(),
                context);
    }

    EvaluationContext ec = context.shallowCopy();
    if (!CohortUtil.areEqual(ec.getBaseCohort(), c)) {
        ec.setBaseCohort(c);
    }

    // Evaluate each specified ColumnDefinition for all of the included rows and add these to the dataset
    for (RowPerObjectColumnDefinition cd : dsd.getColumnDefinitions()) {

        if (log.isDebugEnabled()) {
            log.debug("Evaluating column: " + cd.getName());
            log.debug("With Data Definition: "
                    + DefinitionUtil.format(cd.getDataDefinition().getParameterizable()));
            log.debug("With Mappings: " + cd.getDataDefinition().getParameterMappings());
            log.debug("With Parameters: " + ec.getParameterValues());
        }
        StopWatch sw = new StopWatch();
        sw.start();

        MappedData<? extends PatientDataDefinition> dataDef = (MappedData<? extends PatientDataDefinition>) cd
                .getDataDefinition();
        EvaluatedPatientData data = Context.getService(PatientDataService.class).evaluate(dataDef, ec);

        for (Integer id : c.getMemberIds()) {
            for (DataSetColumn column : cd.getDataSetColumns()) {
                Object val = data.getData().get(id);
                val = DataUtil.convertData(val, dataDef.getConverters());
                dataSet.addColumnValue(id, column, val);
            }
        }

        sw.stop();
        if (log.isDebugEnabled()) {
            log.debug("Evaluated column. Duration: " + sw.toString());
        }
    }

    return dataSet;
}

From source file:org.openmrs.module.reporting.dataset.definition.evaluator.VisitDataSetEvaluator.java

/**
 * @see DataSetEvaluator#evaluate(org.openmrs.module.reporting.dataset.definition.DataSetDefinition, org.openmrs.module.reporting.evaluation.EvaluationContext)
 *//*from w  ww .j  av a  2  s  .  c o m*/
@SuppressWarnings("unchecked")
public DataSet evaluate(DataSetDefinition dataSetDefinition, EvaluationContext context)
        throws EvaluationException {

    VisitDataSetDefinition dsd = (VisitDataSetDefinition) dataSetDefinition;
    context = ObjectUtil.nvl(context, new EvaluationContext());

    SimpleDataSet dataSet = new SimpleDataSet(dsd, context);
    dataSet.setSortCriteria(dsd.getSortCriteria());

    // Construct a VisitEvaluationContext based on the visit filter
    VisitIdSet r = null;
    if (dsd.getRowFilters() != null) {
        for (Mapped<? extends VisitQuery> q : dsd.getRowFilters()) {
            VisitIdSet s = Context.getService(VisitQueryService.class).evaluate(q, context);
            r = QueryUtil.intersectNonNull(r, s);
        }
    }
    if (r == null) {
        r = Context.getService(VisitQueryService.class).evaluate(new AllVisitQuery(), context);
    }
    VisitEvaluationContext vec = new VisitEvaluationContext(context, r);
    vec.setBaseCohort(null); // We can do this because the visitIdSet is already limited by these

    // Evaluate each specified ColumnDefinition for all of the included rows and add these to the dataset
    for (RowPerObjectColumnDefinition cd : dsd.getColumnDefinitions()) {

        if (log.isDebugEnabled()) {
            log.debug("Evaluating column: " + cd.getName());
            log.debug("With Data Definition: "
                    + DefinitionUtil.format(cd.getDataDefinition().getParameterizable()));
            log.debug("With Mappings: " + cd.getDataDefinition().getParameterMappings());
            log.debug("With Parameters: " + vec.getParameterValues());
        }

        StopWatch sw = new StopWatch();
        sw.start();

        MappedData<? extends VisitDataDefinition> dataDef = (MappedData<? extends VisitDataDefinition>) cd
                .getDataDefinition();
        EvaluatedVisitData data = Context.getService(VisitDataService.class).evaluate(dataDef, vec);

        DataSetColumn column = new DataSetColumn(cd.getName(), cd.getName(),
                dataDef.getParameterizable().getDataType()); // TODO: Support One-Many column definition to column

        for (Integer id : r.getMemberIds()) {
            Object val = data.getData().get(id);
            val = DataUtil.convertData(val, dataDef.getConverters());
            dataSet.addColumnValue(id, column, val);
        }

        sw.stop();
        if (log.isDebugEnabled()) {
            log.debug("Added encounter column: " + sw.toString());
        }

    }

    return dataSet;
}

From source file:org.openscore.lang.cli.SlangCLI.java

@CliCommand(value = "run", help = "triggers a slang flow")
public String run(@CliOption(key = { "", "f",
        "file" }, mandatory = true, help = "Path to filename. e.g. slang run --f C:\\Slang\\flow.yaml") final File file,
        @CliOption(key = { "cp",
                "classpath" }, mandatory = false, help = "Classpath , a directory comma separated list to flow dependencies, by default it will take flow file dir") final List<String> classPath,
        @CliOption(key = { "i",
                "inputs" }, mandatory = false, help = "inputs in a key=value comma separated list") final Map<String, Serializable> inputs,
        @CliOption(key = { "spf",
                "system-property-file" }, mandatory = false, help = "comma separated list of system property file locations") final List<String> systemPropertyFiles)
        throws IOException {

    CompilationArtifact compilationArtifact = compilerHelper.compile(file.getAbsolutePath(), null, classPath);
    Map<String, ? extends Serializable> systemProperties = compilerHelper
            .loadSystemProperties(systemPropertyFiles);
    Long id;/*from w w w  .jav  a2 s . co  m*/
    if (!triggerAsync) {
        StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        id = scoreServices.triggerSync(compilationArtifact, inputs, systemProperties);
        stopWatch.stop();
        return triggerSyncMsg(id, stopWatch.toString());
    }
    id = scoreServices.trigger(compilationArtifact, inputs, systemProperties);
    return triggerAsyncMsg(id, compilationArtifact.getExecutionPlan().getName());
}

From source file:org.opentestsystem.delivery.testreg.rest.StudentPackageController.java

@ResponseStatus(HttpStatus.OK)
@RequestMapping(value = "/studentpackage", method = RequestMethod.GET, produces = MediaType.APPLICATION_XML_VALUE)
@Secured({ "ROLE_Entity Read" })
@ResponseBody/*w  ww  .  ja v  a  2s .com*/
public void extractStudentPackage(@RequestParam(value = "ssid", required = false) final String studentId,
        @RequestParam(value = "externalId", required = false) final String externalSsid,
        @RequestParam("stateabbreviation") final String stateAbbreviation, final HttpServletResponse response)
        throws IOException {
    StopWatch sw = new StopWatch();
    sw.start();
    Student student = null;

    if (hasText(studentId) && hasText(externalSsid)) {
        response.setStatus(HttpServletResponse.SC_CONFLICT);
    } else if (hasText(studentId)) {
        student = studentService.findByStudentIdAndStateAbbreviation(studentId, stateAbbreviation);
    } else if (hasText(externalSsid)) {
        student = studentService.findByExternalSsidAndStateAbbreviation(externalSsid, stateAbbreviation);
    }

    if (student != null) {
        String studentPackage = studentPackageService.exportStudentPackage(student);
        response.setContentType(MediaType.APPLICATION_XML_VALUE);
        ServletOutputStream out = response.getOutputStream();
        IOUtils.copy(new ByteArrayInputStream(studentPackage.getBytes()), out);
        out.flush();
    } else {
        response.setStatus(HttpServletResponse.SC_NO_CONTENT);
    }
    sw.stop();
    this.metricClient.sendPerformanceMetricToMna("StudentPackage for " + externalSsid + " (ms) ", sw.getTime());
}

From source file:org.pdfsam.guiclient.gui.frames.JMainFrame.java

public JMainFrame() {
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    log.info("Starting " + GuiClient.getApplicationName() + " Ver. " + GuiClient.getVersion());
    runSplash();/*from  ww w . j  a v  a 2 s  .  c  o m*/
    ToolTipManager.sharedInstance().setDismissDelay(300000);
    initialize();
    closeSplash();
    stopWatch.stop();
    log.info(GuiClient.getApplicationName() + " Ver. " + GuiClient.getVersion() + " "
            + GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(), "started in ")
            + DurationFormatUtils.formatDurationWords(stopWatch.getTime(), true, true));
}

From source file:org.pentaho.platform.web.http.api.resources.WorkerNodeActionInvokerAuditor.java

@Override
public IActionInvokeStatus invokeAction(IAction action, String user, Map<String, Serializable> params)
        throws Exception {
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    Map<String, Serializable> auditParams = new HashMap<>(params); // the prams list key change after invokeAction. Need to preserve.
    makeAuditRecord(0, MessageTypes.INSTANCE_START, auditParams, action.getClass().getName());
    try {//from   w  w w .j  a v a 2 s.  c om
        return actionInvoker.invokeAction(action, user, params);
    } finally {
        makeAuditRecord(stopWatch.getTime() / 1000, MessageTypes.INSTANCE_END, auditParams,
                action.getClass().getName());
    }
}

From source file:org.rhq.server.metrics.migrator.DataSourceTest.java

public static void main(String[] args) throws Exception {
    BasicConfigurator.configure();/*from  w  w w .  ja  va 2  s  .c om*/
    Logger.getRootLogger().setLevel(Level.INFO);
    Logger.getLogger("org.rhq").setLevel(Level.DEBUG);
    EntityManagerFactory entityManagerFactory = null;
    EntityManager entityManager = null;
    ExistingDataBulkExportSource source = null;
    try {
        entityManagerFactory = createEntityManager();
        entityManager = entityManagerFactory.createEntityManager();
        source = new ExistingPostgresDataBulkExportSource(entityManager,
                "SELECT  schedule_id, time_stamp, value, minvalue, maxvalue FROM RHQ_MEASUREMENT_DATA_NUM_1D");
        StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        source.initialize();
        int rowIndex = 0;
        int maxResults = 30000;
        for (;;) {
            List<Object[]> existingData = source.getData(rowIndex, maxResults);
            if (existingData.size() < maxResults) {
                break;
            } else {
                rowIndex += maxResults;
            }
        }
        stopWatch.stop();
        System.out.println("Execution: " + stopWatch);
    } finally {
        if (source != null) {
            source.close();
        }
        if (entityManager != null) {
            entityManager.close();
        }
        if (entityManagerFactory != null) {
            entityManagerFactory.close();
        }
    }
}

From source file:org.rhq.server.metrics.migrator.DataSourceTest.java

public static void main2(String[] args) throws Exception {
    BasicConfigurator.configure();/*from   w  w w.j a v  a  2  s. c  o  m*/
    Logger.getRootLogger().setLevel(Level.INFO);
    Logger.getLogger("org.rhq").setLevel(Level.DEBUG);
    EntityManagerFactory entityManagerFactory = null;
    EntityManager entityManager = null;
    ExistingDataBulkExportSource source = null;
    try {
        entityManagerFactory = createEntityManager();
        entityManager = entityManagerFactory.createEntityManager();
        source = new ExistingPostgresDataBulkExportSource(entityManager,
                "SELECT  schedule_id, time_stamp, value, minvalue, maxvalue FROM RHQ_MEASUREMENT_DATA_NUM_1D");
        StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        source.initialize();
        int rowIndex = 0;
        int maxResults = 30000;
        for (;;) {
            List<Object[]> existingData = source.getData(rowIndex, maxResults);
            if (existingData.size() < maxResults) {
                break;
            } else {
                rowIndex += maxResults;
            }
        }
        stopWatch.stop();
        System.out.println("Execution: " + stopWatch);
    } finally {
        if (source != null) {
            source.close();
        }
        if (entityManager != null) {
            entityManager.close();
        }
        if (entityManagerFactory != null) {
            entityManagerFactory.close();
        }
    }
}