Example usage for com.google.common.base Stopwatch start

List of usage examples for com.google.common.base Stopwatch start

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch start.

Prototype

public Stopwatch start() 

Source Link

Document

Starts the stopwatch.

Usage

From source file:org.apache.drill.exec.store.parquet.Metadata.java

/**
 * Get the parquet metadata for the parquet files in a directory
 *
 * @param path the path of the directory
 * @return//  w  ww.  j ava2 s  .  com
 * @throws IOException
 */
private ParquetTableMetadata_v2 getParquetTableMetadata(String path) throws IOException {
    Path p = new Path(path);
    FileStatus fileStatus = fs.getFileStatus(p);
    final Stopwatch watch = Stopwatch.createStarted();
    List<FileStatus> fileStatuses = getFileStatuses(fileStatus);
    logger.info("Took {} ms to get file statuses", watch.elapsed(TimeUnit.MILLISECONDS));
    watch.reset();
    watch.start();
    ParquetTableMetadata_v2 metadata_v1 = getParquetTableMetadata(fileStatuses);
    logger.info("Took {} ms to read file metadata", watch.elapsed(TimeUnit.MILLISECONDS));
    return metadata_v1;
}

From source file:com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder.java

/**
 * Build the NiFi flow instance/*from  www  . ja va  2s .co  m*/
 *
 * @return an object indicating if the feed flow was successfully built or not
 */
public NifiProcessGroup build() throws FeedCreationException {
    try {
        log.info("Creating the feed {}.{} ", category, feedName);
        newProcessGroup = null;
        Stopwatch totalTime = Stopwatch.createStarted();
        Stopwatch eventTime = Stopwatch.createStarted();

        TemplateDTO template = getTemplate();
        log.debug("Time to get Template {}.  ElapsedTime: {} ms", template.getName(), eventTime(eventTime));
        if (template != null) {

            //create the encompassing process group
            eventTime.start();
            ProcessGroupDTO feedProcessGroup = createProcessGroupForFeed();
            log.debug("Time to create process group.  ElapsedTime: {} ms", eventTime(eventTime));
            if (feedProcessGroup != null) {
                String processGroupId = feedProcessGroup.getId();
                //snapshot the existing controller services
                eventTime.start();
                templateCreationHelper.snapshotControllerServiceReferences();
                log.debug("Time to snapshotControllerServices.  ElapsedTime: {} ms", eventTime(eventTime));

                //create the flow from the template
                eventTime.start();
                TemplateInstance instance = templateCreationHelper.instantiateFlowFromTemplate(processGroupId,
                        templateId);
                FlowSnippetDTO feedInstance = instance.getFlowSnippetDTO();
                feedProcessGroup.setContents(feedInstance);
                log.debug("Time to instantiateFlowFromTemplate.  ElapsedTime: {} ms", eventTime(eventTime));

                eventTime.start();
                String feedCategoryId = feedProcessGroup.getParentGroupId();
                ProcessGroupDTO categoryGroup = this.categoryGroup;
                if (categoryGroup == null) {
                    categoryGroup = this.categoryGroup = restClient.getProcessGroup(feedCategoryId, false,
                            false);
                }
                //update the group with this template?
                updatePortConnectionsForProcessGroup(feedProcessGroup, categoryGroup);
                log.debug("Time to updatePortConnectionsForProcessGroup.  ElapsedTime: {} ms",
                        eventTime(eventTime));

                eventTime.start();
                //mark the new services that were created as a result of creating the new flow from the template
                templateCreationHelper.identifyNewlyCreatedControllerServiceReferences(instance);
                log.debug("Time to identifyNewlyCreatedControllerServiceReferences.  ElapsedTime: {} ms",
                        eventTime(eventTime));

                eventTime.start();
                //match the properties incoming to the defined properties
                updateProcessGroupProperties(processGroupId, feedProcessGroup.getName());
                log.debug("Time to updateProcessGroupProperties.  ElapsedTime: {} ms", eventTime(eventTime));

                eventTime.start();
                //Fetch the Feed Group now that it has the flow in it
                ProcessGroupDTO entity = restClient.getProcessGroup(processGroupId, true, true);
                log.debug("Time to getProcessGroup.  ElapsedTime: {} ms", eventTime(eventTime));

                eventTime.start();
                ProcessorDTO input = fetchInputProcessorForProcessGroup(entity);
                ProcessorDTO cleanupProcessor = NifiProcessUtil.findFirstProcessorsByType(
                        NifiProcessUtil.getInputProcessors(entity),
                        "com.thinkbiganalytics.nifi.v2.metadata.TriggerCleanup");
                List<ProcessorDTO> nonInputProcessors = NifiProcessUtil.getNonInputProcessors(entity);
                log.debug("Time to fetchInputProcessorForProcessGroup.  ElapsedTime: {} ms",
                        eventTime(eventTime));

                eventTime.start();
                List<NifiProperty> updatedControllerServiceProperties = new ArrayList<>();
                //update any references to the controller services and try to assign the value to an enabled service if it is not already
                if (input != null) {
                    updatedControllerServiceProperties.addAll(templateCreationHelper
                            .updateControllerServiceReferences(Lists.newArrayList(input), instance));
                }
                if (cleanupProcessor != null) {
                    updatedControllerServiceProperties
                            .addAll(templateCreationHelper.updateControllerServiceReferences(
                                    Collections.singletonList(cleanupProcessor), instance));
                }
                updatedControllerServiceProperties.addAll(
                        templateCreationHelper.updateControllerServiceReferences(nonInputProcessors, instance));
                log.debug("Time to updatedControllerServiceProperties.  ElapsedTime: {} ms",
                        eventTime(eventTime));

                eventTime.start();
                //refetch processors for updated errors
                entity = restClient.getProcessGroup(processGroupId, true, true);
                input = fetchInputProcessorForProcessGroup(entity);
                nonInputProcessors = NifiProcessUtil.getNonInputProcessors(entity);

                newProcessGroup = new NifiProcessGroup(entity, input, nonInputProcessors);
                log.debug("Time to re-fetchInputProcessorForProcessGroup.  ElapsedTime: {} ms",
                        eventTime(eventTime));
                //Validate and if invalid Delete the process group
                if (newProcessGroup.hasFatalErrors()) {
                    eventTime.start();
                    removeProcessGroup(entity);
                    // cleanupControllerServices();
                    newProcessGroup.setSuccess(false);
                    log.debug("Time to removeProcessGroup. Errors found.  ElapsedTime: {} ms",
                            eventTime(eventTime));
                } else {
                    eventTime.start();
                    //update the input schedule
                    updateFeedSchedule(newProcessGroup, input);
                    log.debug("Time to update feed schedule.  ElapsedTime: {} ms", eventTime(eventTime));
                    eventTime.start();

                    //just need to update for this processgroup
                    Collection<ProcessorDTO> processors = NifiProcessUtil.getProcessors(entity);
                    Collection<ConnectionDTO> connections = NifiConnectionUtil.getAllConnections(entity);
                    nifiFlowCache.updateFlowForFeed(feedMetadata, entity.getId(), processors, connections);
                    log.debug(
                            "Time to build flow graph with {} processors and {} connections.  ElapsedTime: {} ms",
                            processors.size(), connections.size(), eventTime(eventTime));
                    /*
                            
                    //Cache the processorIds to the respective flowIds for availability in the ProvenanceReportingTask
                    NifiVisitableProcessGroup group = nifiFlowCache.getFlowOrder(newProcessGroup.getProcessGroupEntity(), true);
                    log.debug("Time to get the flow order.  ElapsedTime: {} ms", eventTime(eventTime));
                            
                    eventTime.start();
                    NifiFlowProcessGroup
                    flow =
                    new NifiFlowBuilder().build(
                        group);
                    log.debug("Time to build flow graph with {} processors.  ElapsedTime: {} ms", flow.getProcessorMap().size(), eventTime(eventTime));
                            
                    eventTime.start();
                    nifiFlowCache.updateFlow(feedMetadata, flow);
                    log.debug("Time to update NiFiFlowCache with {} processors.  ElapsedTime: {} ms", flow.getProcessorMap().size(), eventTime(eventTime));
                    */
                    eventTime.start();
                    //disable all inputs
                    restClient.disableInputProcessors(newProcessGroup.getProcessGroupEntity().getId());
                    log.debug("Time to disableInputProcessors.  ElapsedTime: {} ms", eventTime(eventTime));

                    eventTime.start();
                    //mark everything else as running
                    templateCreationHelper.markProcessorsAsRunning(newProcessGroup);
                    log.debug("Time to markNonInputsAsRunning.  ElapsedTime: {} ms", eventTime(eventTime));

                    //if desired start the input processor
                    if (input != null) {
                        eventTime.start();
                        if (enabled) {
                            markInputAsRunning(newProcessGroup, input);
                            ///make the input/output ports in the category group as running
                            if (hasConnectionPorts()) {
                                templateCreationHelper.markConnectionPortsAsRunning(entity);
                            }
                        } else {
                            ///make the input/output ports in the category group as running
                            if (hasConnectionPorts()) {
                                templateCreationHelper.markConnectionPortsAsRunning(entity);
                            }
                            markInputAsStopped(newProcessGroup, input);
                        }
                        log.debug("Time to mark input as {}.  ElapsedTime: {} ms",
                                (enabled ? "Running" : "Stopped"), eventTime(eventTime));
                    }

                    if (newProcessGroup.hasFatalErrors()) {
                        eventTime.start();
                        rollback();
                        newProcessGroup.setRolledBack(true);
                        //  cleanupControllerServices();
                        newProcessGroup.setSuccess(false);
                        log.debug("Time to rollback on Fatal Errors.  ElapsedTime: {} ms",
                                eventTime(eventTime));
                    }
                    List<NifiError> templateCreationErrors = templateCreationHelper.getErrors();
                    if (templateCreationErrors != null) {
                        errors.addAll(templateCreationErrors);
                    }

                    //add any global errors to the object
                    if (errors != null && !errors.isEmpty()) {
                        for (NifiError error : errors) {
                            newProcessGroup.addError(error);
                            if (error.isFatal()) {
                                newProcessGroup.setSuccess(false);
                                if (!newProcessGroup.isRolledBack()) {
                                    rollback();
                                    newProcessGroup.setRolledBack(true);
                                }
                            }
                        }
                    }
                }

                eventTime.start();
                templateCreationHelper.cleanupControllerServices();
                //fix the feed metadata controller service references
                updateFeedMetadataControllerServiceReferences(updatedControllerServiceProperties);
                log.debug("Time cleanup controller services.  ElapsedTime: {} ms", eventTime(eventTime));

                //align items
                if (this.autoAlign) {
                    eventTime.start();
                    log.info("Aligning Feed flows in NiFi ");
                    AlignProcessGroupComponents alignProcessGroupComponents = new AlignProcessGroupComponents(
                            restClient.getNiFiRestClient(), entity.getParentGroupId());
                    alignProcessGroupComponents.autoLayout();
                    //if this is a new feedProcessGroup (i.e. new category), align the root level items also
                    //fetch the parent to get that id to align
                    if (newCategory) {
                        log.info(
                                "This is the first feed created in the category {}.  Aligning the categories. ",
                                feedMetadata.getCategory().getSystemName());
                        new AlignProcessGroupComponents(restClient.getNiFiRestClient(),
                                this.categoryGroup.getParentGroupId()).autoLayout();
                    }
                    log.info("Time align feed process groups.  ElapsedTime: {} ms", eventTime(eventTime));

                } else {
                    log.info(
                            "Skipping auto alignment in NiFi. You can always manually align this category and all of its feeds by using the rest api: /v1/feedmgr/nifi/auto-align/{}",
                            entity.getParentGroupId());
                    if (newCategory) {
                        log.info("To re align the categories: /v1/feedmgr/nifi/auto-align/{}",
                                this.categoryGroup.getParentGroupId());
                    }
                }

            }
        }
        log.info("Time save Feed flow in NiFi.  ElapsedTime: {} ms", eventTime(totalTime));
        return newProcessGroup;
    } catch (NifiClientRuntimeException e) {
        throw new FeedCreationException("Unable to create the feed [" + feedName + "]. " + e.getMessage(), e);
    }
}

From source file:mil.nga.giat.geowave.adapter.vector.delete.CQLDelete.java

@Override
public void execute(OperationParams params) throws ParseException {
    if (debug) {/*from   w  w  w.jav  a  2  s.c o  m*/
        org.apache.log4j.Logger.getRootLogger().setLevel(org.apache.log4j.Level.DEBUG);
    }

    final Stopwatch stopWatch = new Stopwatch();

    // Ensure we have all the required arguments
    if (parameters.size() != 1) {
        throw new ParameterException("Requires arguments: <storename>");
    }

    String storeName = parameters.get(0);

    // Config file
    File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT);

    // Attempt to load store.
    StoreLoader storeOptions = new StoreLoader(storeName);
    if (!storeOptions.loadFromConfig(configFile)) {
        throw new ParameterException("Cannot find store name: " + storeOptions.getStoreName());
    }

    DataStore dataStore;
    AdapterStore adapterStore;
    try {
        dataStore = storeOptions.createDataStore();
        adapterStore = storeOptions.createAdapterStore();

        final GeotoolsFeatureDataAdapter adapter;
        if (adapterId != null) {
            adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(adapterId);
        } else {
            final CloseableIterator<DataAdapter<?>> it = adapterStore.getAdapters();
            adapter = (GeotoolsFeatureDataAdapter) it.next();
            it.close();
        }

        if (debug && (adapter != null)) {
            LOGGER.debug(adapter.toString());
        }

        stopWatch.start();
        final long results = delete(adapter, adapterId, indexId, dataStore, debug);
        stopWatch.stop();

        if (debug) {
            LOGGER.debug(results + " results remaining after delete; time = " + stopWatch.toString());
        }
    } catch (IOException e) {
        LOGGER.warn("Unable to read adapter", e);
    }
}

From source file:com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder.java

private void connectFeedToReusableTemplatexx(String feedGroupId, String feedCategoryId)
        throws NifiComponentNotFoundException {
    Stopwatch stopwatch = Stopwatch.createStarted();
    ProcessGroupDTO reusableTemplateCategory = niFiObjectCache.getReusableTemplateCategoryProcessGroup();

    if (reusableTemplateCategory == null) {
        throw new NifiClientRuntimeException(
                "Unable to find the Reusable Template Group. Please ensure NiFi has the 'reusable_templates' processgroup and appropriate reusable flow for this feed."
                        + " You may need to import the base reusable template for this feed.");
    }/*from www .  j  a  va2s . c  o  m*/
    String reusableTemplateCategoryGroupId = reusableTemplateCategory.getId();
    for (InputOutputPort port : inputOutputPorts) {
        stopwatch.start();
        restClient.connectFeedToGlobalTemplate(feedGroupId, port.getOutputPortName(), feedCategoryId,
                reusableTemplateCategoryGroupId, port.getInputPortName());
        stopwatch.stop();
        log.debug("Time to connect feed to {} port. ElapsedTime: {} ", port.getInputPortName(),
                stopwatch.elapsed(TimeUnit.MILLISECONDS));
    }
}

From source file:put.ci.cevo.framework.algorithms.ApacheCMAES.java

/**
 * {@inheritDoc}//from  www  .  j  av a 2  s.  c o  m
 */
@Override
protected PointValuePair doOptimize() {
    // -------------------- Initialization --------------------------------

    isMinimize = getGoalType().equals(GoalType.MINIMIZE);
    final double[] guess = getStartPoint();
    // number of objective variables/problem dimension
    dimension = guess.length;
    initializeCMA(guess);
    iterations = 0;
    double bestValue = (isMinimize ? Double.MAX_VALUE : Double.MIN_VALUE);
    push(fitnessHistory, bestValue);
    PointValuePair optimum = new PointValuePair(getStartPoint(), isMinimize ? bestValue : -bestValue);
    PointValuePair lastResult = null;

    // -------------------- Generation Loop --------------------------------
    EvaluatedPopulation<double[]> evaluatedPopulation = null;

    Stopwatch stopwatch = Stopwatch.createUnstarted();
    generationLoop: for (iterations = 1; iterations <= maxIterations; iterations++) {
        stopwatch.reset();
        stopwatch.start();
        incrementIterationCount();

        // Generate and evaluate lambda offspring
        final RealMatrix arz = randn1(dimension, lambda);
        final RealMatrix arx = zeros(dimension, lambda);
        final double[] fitness = new double[lambda];
        // generate random offspring
        for (int k = 0; k < lambda; k++) {
            RealMatrix arxk = null;
            for (int i = 0; i < checkFeasableCount + 1; i++) {
                if (diagonalOnly <= 0) {
                    arxk = xmean.add(BD.multiply(arz.getColumnMatrix(k)).scalarMultiply(sigma)); // m + sig * Normal(0,C)
                } else {
                    arxk = xmean.add(times(diagD, arz.getColumnMatrix(k)).scalarMultiply(sigma));
                }
                //if (i >= checkFeasableCount ||
                //      fitfun.isFeasible(arxk.getColumn(0))) {
                //   break;
                //}
                // regenerate random arguments for row
                arz.setColumn(k, randn(dimension));
            }
            copyColumn(arxk, 0, arx, k);
            //try {
            //   valuePenaltyPairs[k] = fitfun.value(arx.getColumn(k)); // compute fitness
            //} catch (TooManyEvaluationsException e) {
            //   break generationLoop;
            //}
        }

        double newPopTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        stopwatch.reset();
        stopwatch.start();
        ArrayList<double[]> population = new ArrayList<>(lambda);
        // This is mine. I ignore constraints.
        for (int k = 0; k < lambda; ++k) {
            population.add(arx.getColumn(k));
        }

        evaluatedPopulation = populationEvaluator.evaluate(population, iterations - 1, random);
        final ValuePenaltyPair[] valuePenaltyPairs = new ValuePenaltyPair[lambda];
        for (int k = 0; k < lambda; ++k) {
            valuePenaltyPairs[k] = new ValuePenaltyPair(evaluatedPopulation.getPopulation().get(k).getFitness(),
                    0.0);
        }

        // Compute fitnesses by adding value and penalty after scaling by value range.
        double valueRange = valueRange(valuePenaltyPairs);
        for (int iValue = 0; iValue < valuePenaltyPairs.length; iValue++) {
            fitness[iValue] = valuePenaltyPairs[iValue].value + valuePenaltyPairs[iValue].penalty * valueRange;
            if (!isMinimize)
                fitness[iValue] = -fitness[iValue];
        }
        double evalTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        stopwatch.reset();
        stopwatch.start();

        // Sort by fitness and compute weighted mean into xmean
        final int[] arindex = sortedIndices(fitness);
        // Calculate new xmean, this is selection and recombination
        final RealMatrix xold = xmean; // for speed up of Eq. (2) and (3)
        final RealMatrix bestArx = selectColumns(arx, MathArrays.copyOf(arindex, mu));
        xmean = bestArx.multiply(weights);
        final RealMatrix bestArz = selectColumns(arz, MathArrays.copyOf(arindex, mu));
        final RealMatrix zmean = bestArz.multiply(weights);
        final boolean hsig = updateEvolutionPaths(zmean, xold);
        if (diagonalOnly <= 0) {
            updateCovariance(hsig, bestArx, arz, arindex, xold);
        } else {
            updateCovarianceDiagonalOnly(hsig, bestArz);
        }
        // Adapt step size sigma - Eq. (5)
        sigma *= FastMath.exp(FastMath.min(1, (normps / chiN - 1) * cs / damps));
        final double bestFitness = fitness[arindex[0]];
        final double worstFitness = fitness[arindex[arindex.length - 1]];
        if (bestValue > bestFitness) {
            bestValue = bestFitness;
            lastResult = optimum;
            optimum = new PointValuePair(bestArx.getColumn(0), isMinimize ? bestFitness : -bestFitness);
            if (getConvergenceChecker() != null && lastResult != null
                    && getConvergenceChecker().converged(iterations, optimum, lastResult)) {
                break generationLoop;
            }
        }
        // handle termination criteria
        // Break, if fitness is good enough
        if (stopFitness != 0 && bestFitness < (isMinimize ? stopFitness : -stopFitness)) {
            break generationLoop;
        }
        final double[] sqrtDiagC = sqrt(diagC).getColumn(0);
        final double[] pcCol = pc.getColumn(0);
        for (int i = 0; i < dimension; i++) {
            if (sigma * FastMath.max(FastMath.abs(pcCol[i]), sqrtDiagC[i]) > stopTolX) {
                break;
            }
            if (i >= dimension - 1) {
                break generationLoop;
            }
        }
        for (int i = 0; i < dimension; i++) {
            if (sigma * sqrtDiagC[i] > stopTolUpX) {
                break generationLoop;
            }
        }
        final double historyBest = min(fitnessHistory);
        final double historyWorst = max(fitnessHistory);
        if (iterations > 2 && FastMath.max(historyWorst, worstFitness)
                - FastMath.min(historyBest, bestFitness) < stopTolFun) {
            break generationLoop;
        }
        if (iterations > fitnessHistory.length && historyWorst - historyBest < stopTolHistFun) {
            break generationLoop;
        }
        // condition number of the covariance matrix exceeds 1e14
        if (max(diagD) / min(diagD) > 1e7) {
            break generationLoop;
        }
        // user defined termination
        if (getConvergenceChecker() != null) {
            final PointValuePair current = new PointValuePair(bestArx.getColumn(0),
                    isMinimize ? bestFitness : -bestFitness);
            if (lastResult != null && getConvergenceChecker().converged(iterations, current, lastResult)) {
                break generationLoop;
            }
            lastResult = current;
        }
        // Adjust step size in case of equal function values (flat fitness)
        if (bestValue == fitness[arindex[(int) (0.1 + lambda / 4.)]]) {
            sigma *= FastMath.exp(0.2 + cs / damps);
        }
        if (iterations > 2
                && FastMath.max(historyWorst, bestFitness) - FastMath.min(historyBest, bestFitness) == 0) {
            sigma *= FastMath.exp(0.2 + cs / damps);
        }
        // store best in history
        push(fitnessHistory, bestFitness);
        if (generateStatistics) {
            statisticsSigmaHistory.add(sigma);
            statisticsFitnessHistory.add(bestFitness);
            statisticsMeanHistory.add(xmean.transpose());
            statisticsDHistory.add(diagD.transpose().scalarMultiply(1E5));
        }

        double cmaesTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        stopwatch.reset();
        stopwatch.start();
        listener.onNextIteraction(evaluatedPopulation);
        double listernerTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        logger.info(String.format("NewPop: %.2f, Eval: %.2f, CMAES: %.2f, Listerner: %.2f", newPopTime,
                evalTime, cmaesTime, listernerTime));
    }
    listener.onLastIteraction(evaluatedPopulation);

    return optimum;
}

From source file:com.arpnetworking.tsdaggregator.perf.FilePerfTestBase.java

/**
 * Runs a test./*from w w  w .j a  v a  2  s. c  o  m*/
 *
 * @param pipelineConfigurationFile Pipeline configuration file.
 * @param duration Timeout period.
 */
protected void benchmark(final File pipelineConfigurationFile, final Duration duration) {
    LOGGER.debug(String.format("Launching pipeline; configuration=%s", pipelineConfigurationFile));

    // Create custom "canary" sink
    final CountDownLatch latch = new CountDownLatch(1);
    final Stopwatch timer = Stopwatch.createUnstarted();
    final ListeningSink sink = new ListeningSink(new Function<Collection<AggregatedData>, Void>() {
        @Nullable
        @Override
        public Void apply(@Nullable final Collection<AggregatedData> input) {
            if (input != null) {
                final AggregatedData datum = Iterables.getFirst(input, null);
                if (datum != null && TestFileGenerator.CANARY.equals(datum.getFQDSN().getMetric())
                        && timer.isRunning()) {
                    timer.stop();
                    latch.countDown();
                }
            }
            return null;
        }
    });

    // Load the specified stock configuration
    final PipelineConfiguration stockPipelineConfiguration = new StaticConfiguration.Builder()
            .addSource(new JsonNodeFileSource.Builder().setFile(pipelineConfigurationFile).build())
            .setObjectMapper(PipelineConfiguration.createObjectMapper(_injector)).build()
            .getRequiredAs(PipelineConfiguration.class);

    // Add the custom "canary" sink
    final List<Sink> benchmarkSinks = Lists.newArrayList(stockPipelineConfiguration.getSinks());
    benchmarkSinks.add(sink);

    // Create the custom configuration
    final PipelineConfiguration benchmarkPipelineConfiguration = OvalBuilder.<PipelineConfiguration, PipelineConfiguration.Builder>clone(
            stockPipelineConfiguration).setSinks(benchmarkSinks).build();

    // Instantiate the pipeline
    final Pipeline pipeline = new Pipeline(benchmarkPipelineConfiguration);

    // Execute the pipeline until the canary flies the coop
    try {
        timer.start();
        pipeline.launch();

        if (!latch.await(duration.getMillis(), TimeUnit.MILLISECONDS)) {
            LOGGER.error("Test timed out");
            throw new RuntimeException("Test timed out");
        }
    } catch (final InterruptedException e) {
        Thread.interrupted();
        throw new RuntimeException("Test interrupted");
    } finally {
        pipeline.shutdown();
    }
}

From source file:com.twitter.hraven.datasource.JobHistoryService.java

/**
 * Returns a list of {@link Flow} instances generated from the given results.
 * For the moment, this assumes that the given scanner provides results
 * ordered first by flow ID./*from  w w  w  .java2  s .  c  o  m*/
 * 
 * @param scan
 *          the Scan instance setup for retrieval
 * @return
 */
private List<Flow> createFromResults(Scan scan, boolean populateTasks, int maxCount) throws IOException {
    List<Flow> flows = new ArrayList<Flow>();
    ResultScanner scanner = null;
    try {
        Stopwatch timer = new Stopwatch().start();
        Stopwatch timerJob = new Stopwatch();
        int rowCount = 0;
        long colCount = 0;
        long resultSize = 0;
        int jobCount = 0;
        scanner = historyTable.getScanner(scan);
        Flow currentFlow = null;
        for (Result result : scanner) {
            if (result != null && !result.isEmpty()) {
                rowCount++;
                colCount += result.size();
                resultSize += result.getWritableSize();
                JobKey currentKey = jobKeyConv.fromBytes(result.getRow());
                // empty runId is special cased -- we need to treat each job as it's own flow
                if (currentFlow == null || !currentFlow.contains(currentKey) || currentKey.getRunId() == 0) {
                    // return if we've already hit the limit
                    if (flows.size() >= maxCount) {
                        break;
                    }
                    currentFlow = new Flow(new FlowKey(currentKey));
                    flows.add(currentFlow);
                }
                timerJob.start();
                JobDetails job = new JobDetails(currentKey);
                job.populate(result);
                currentFlow.addJob(job);
                jobCount++;
                timerJob.stop();
            }
        }
        timer.stop();
        LOG.info("Fetched from hbase " + rowCount + " rows, " + colCount + " columns, " + flows.size()
                + " flows and " + jobCount + " jobs taking up " + resultSize + " bytes ( "
                + (double) resultSize / (1024.0 * 1024.0) + " atomic double: "
                + new AtomicDouble(resultSize / (1024.0 * 1024.0)) + ") MB, in total time of " + timer
                + " with  " + timerJob + " spent inJobDetails & Flow population");

        // export the size of data fetched from hbase as a metric
        HravenResponseMetrics.FLOW_HBASE_RESULT_SIZE_VALUE.set((double) (resultSize / (1024.0 * 1024.0)));
    } finally {
        if (scanner != null) {
            scanner.close();
        }
    }

    if (populateTasks) {
        populateTasks(flows);
    }

    return flows;
}

From source file:com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder.java

private ProcessGroupDTO createProcessGroupForFeed() throws FeedCreationException {
    Stopwatch stopwatch = Stopwatch.createStarted();
    //create Category Process group
    this.categoryGroup = niFiObjectCache.getCategoryProcessGroup(category);
    if (categoryGroup == null) {
        try {//  w  w  w. ja va  2  s  . co  m
            ProcessGroupDTO group = restClient.createProcessGroup(category);
            this.categoryGroup = group;
            this.newCategory = true;
            if (this.categoryGroup != null) {
                niFiObjectCache.addCategoryProcessGroup(this.categoryGroup);
            }
        } catch (Exception e) {
            //Swallow exception... it will be handled later
        }
    }
    if (this.categoryGroup == null) {
        throw new FeedCreationException("Unable to get or create the Process group for the Category " + category
                + ". Error occurred while creating instance of template " + templateId + " for Feed "
                + feedName);
    }
    stopwatch.stop();
    log.debug("Time to get/create Category Process Group:{} was: {} ms", category,
            stopwatch.elapsed(TimeUnit.MILLISECONDS));
    stopwatch.reset();

    stopwatch.start();
    //1 create the processGroup
    //check to see if the feed exists... if so version off the old group and create a new group with this feed
    ProcessGroupDTO feedGroup = restClient.getProcessGroupByName(this.categoryGroup.getId(), feedName);
    stopwatch.stop();
    log.debug("Time to find feed Process Group: {} was: {} ms", feedName,
            stopwatch.elapsed(TimeUnit.MILLISECONDS));
    stopwatch.reset();
    if (feedGroup != null) {
        try {
            previousFeedProcessGroup = feedGroup;
            templateCreationHelper.versionProcessGroup(feedGroup);
        } catch (Exception e) {
            throw new FeedCreationException("Previous version of the feed " + feedName
                    + " was found.  Error in attempting to version the previous feed.  Please go into Nifi and address any issues with the Feeds Process Group",
                    e);
        }
    }

    ProcessGroupDTO group = restClient.createProcessGroup(this.categoryGroup.getId(), feedName);

    return group;
}

From source file:fr.inria.eventcloud.overlay.can.StaticLoadBalancingTestBuilder.java

public Test build() {

    return new Test() {

        private static final String CENTROID_SHORT_RDF_TERM_PREFIX = "http://aaa";

        private static final String CENTROID_LONG_RDF_TERM_PREFIX = "http://zzz";

        @Override//from  ww w  .j  a va2s .  c om
        protected void _execute() throws EventCloudIdNotManaged, NetworkAlreadyJoinedException,
                FileNotFoundException, PeerNotActivatedException {

            if (StaticLoadBalancingTestBuilder.this.enableLoadBalancing) {
                EventCloudProperties.STATIC_LOAD_BALANCING.setValue(true);
            }

            EventCloudProperties.RECORD_STATS_MISC_DATASTORE.setValue(true);

            if (StaticLoadBalancingTestBuilder.this.statsRecorderClass != null) {
                EventCloudProperties.STATS_RECORDER_CLASS
                        .setValue(StaticLoadBalancingTestBuilder.this.statsRecorderClass);
            }

            this.eventCloudId = this.deployer.newEventCloud(1, 1);

            SemanticPeer firstPeer = this.deployer.getRandomSemanticPeer(this.eventCloudId);

            final PutGetApi putgetProxy = ProxyFactory.newPutGetProxy(this.deployer.getEventCloudsRegistryUrl(),
                    this.eventCloudId);

            final Stopwatch stopwatch = Stopwatch.createUnstarted();

            Node graph = null;

            if (StaticLoadBalancingTestBuilder.this.trigResource == null) {
                if (this.simulateCompoundEvents()) {
                    graph = NodeGenerator.randomUri(StaticLoadBalancingTestBuilder.this.rdfTermSize);
                }

                int tmpNbQuadsToInsert = StaticLoadBalancingTestBuilder.this.nbQuadsToInsert;
                if (this.isCentroidStatsRecorderUsed()
                        && StaticLoadBalancingTestBuilder.this.nbPeersToInject > 0) {
                    tmpNbQuadsToInsert = StaticLoadBalancingTestBuilder.this.nbQuadsToInsert / 3 * 2;
                }

                for (int i = 0; i < tmpNbQuadsToInsert; i++) {
                    Quadruple quad = null;

                    if (this.simulateCompoundEvents()
                            && i % StaticLoadBalancingTestBuilder.this.nbQuadsPerCompoundEvent == 0) {
                        if (this.isCentroidStatsRecorderUsed()
                                && StaticLoadBalancingTestBuilder.this.nbPeersToInject > 1) {
                            graph = NodeGenerator.randomUri(CENTROID_SHORT_RDF_TERM_PREFIX,
                                    StaticLoadBalancingTestBuilder.this.rdfTermSize);
                        } else {
                            graph = NodeGenerator.randomUri(StaticLoadBalancingTestBuilder.this.rdfTermSize);
                        }
                    }

                    quad = this.buildQuadruple(graph, StaticLoadBalancingTestBuilder.this.rdfTermSize);

                    stopwatch.start();
                    putgetProxy.add(quad);
                    stopwatch.stop();
                }
            } else {
                List<Quadruple> quads = StaticLoadBalancingTestBuilder.this
                        .loadEvents(StaticLoadBalancingTestBuilder.this.trigResource);
                StaticLoadBalancingTestBuilder.this.nbQuadsToInsert = quads.size();

                LOG.info("{} quadruples loaded from {}", quads.size(),
                        StaticLoadBalancingTestBuilder.this.trigResource);

                for (Quadruple q : quads) {
                    stopwatch.start();
                    putgetProxy.add(q);
                    stopwatch.stop();
                }
            }

            if (StaticLoadBalancingTestBuilder.this.insertSkewedData && this.isCentroidStatsRecorderUsed()
                    && StaticLoadBalancingTestBuilder.this.nbPeersToInject > 0) {
                // add 1/3 of the data which are 10 times longer
                int longRdfTermSize = StaticLoadBalancingTestBuilder.this.rdfTermSize * 10;

                if (this.simulateCompoundEvents()) {
                    graph = NodeGenerator.randomUri(CENTROID_LONG_RDF_TERM_PREFIX, longRdfTermSize);
                }

                for (int i = 0; i < StaticLoadBalancingTestBuilder.this.nbQuadsToInsert / 3; i++) {
                    Quadruple quad = null;

                    if (this.simulateCompoundEvents()
                            && i % StaticLoadBalancingTestBuilder.this.nbQuadsPerCompoundEvent == 0) {
                        graph = NodeGenerator.randomUri(CENTROID_LONG_RDF_TERM_PREFIX + longRdfTermSize);
                    }

                    quad = this.buildQuadruple(graph, longRdfTermSize);

                    stopwatch.start();
                    putgetProxy.add(quad);
                    stopwatch.stop();
                }
            }

            LOG.info("It took {} to insert {} quadruples", stopwatch.toString(),
                    StaticLoadBalancingTestBuilder.this.nbQuadsToInsert);

            this.executionTime = stopwatch.elapsed(TimeUnit.MILLISECONDS);

            if (StaticLoadBalancingTestBuilder.this.nbPeersToInject > 0) {
                LOG.info("Before join, first peer dump:\n" + firstPeer.dump());

                for (int i = 0; i < StaticLoadBalancingTestBuilder.this.nbPeersToInject; i++) {
                    long maxNumQuads = -1;
                    Peer electedPeer = null;
                    List<Peer> peers = this.deployer.getRandomSemanticTracker(this.eventCloudId).getPeers();

                    // we select the peer which has the higher number of
                    // quadruples in the misc datastore in order to
                    // perform the next split
                    for (Peer p : peers) {
                        GetStatsRecordeResponseOperation response = (GetStatsRecordeResponseOperation) PAFuture
                                .getFutureValue(p.receive(new GetStatsRecorderOperation()));
                        if (response.getStatsRecorder().getNbQuadruples() > maxNumQuads) {
                            maxNumQuads = response.getStatsRecorder().getNbQuadruples();
                            electedPeer = p;
                        }
                    }

                    Peer newPeer = SemanticFactory.newSemanticPeer(new SemanticOverlayProvider(true));

                    newPeer.join(electedPeer);

                    this.deployer.getRandomSemanticTracker(this.eventCloudId).storePeer(newPeer);

                    LOG.info("Join operation " + (i + 1));
                }

                LOG.info("After injections, other peers dump:\n");
                for (Peer p : this.deployer.getRandomSemanticTracker(this.eventCloudId).getPeers()) {
                    LOG.info(p.dump());
                }

                if (StaticLoadBalancingTestBuilder.this.nbLookupsAfterJoinOperations > 0) {
                    for (int i = 0; i < StaticLoadBalancingTestBuilder.this.nbLookupsAfterJoinOperations; i++) {
                        // long size =
                        putgetProxy.find(QuadruplePattern.ANY).size();

                        // Assert.assertEquals(
                        // StaticLoadBalancingTestBuilder.this.nbQuadsToInsert,
                        // size);
                    }
                }
            } else {
                LOG.info("Peer dump:\n" + firstPeer.dump());
            }

            ComponentUtils.terminateComponent(putgetProxy);
        }

        private Quadruple buildQuadruple(Node graph, int rdfTermSize) {
            if (this.simulateCompoundEvents()) {
                if (this.isCentroidStatsRecorderUsed()
                        && StaticLoadBalancingTestBuilder.this.nbPeersToInject > 1) {
                    if (rdfTermSize > StaticLoadBalancingTestBuilder.this.rdfTermSize) {
                        return QuadrupleGenerator.randomWithoutLiteral(graph, CENTROID_LONG_RDF_TERM_PREFIX,
                                rdfTermSize);
                    } else {
                        return QuadrupleGenerator.randomWithoutLiteral(graph, CENTROID_SHORT_RDF_TERM_PREFIX,
                                rdfTermSize);
                    }
                } else {
                    if (graph == null) {
                        return QuadrupleGenerator.randomWithoutLiteral(rdfTermSize);
                    } else {
                        return QuadrupleGenerator.randomWithoutLiteral(graph, rdfTermSize);
                    }
                }
            } else {
                if (this.isCentroidStatsRecorderUsed()
                        && StaticLoadBalancingTestBuilder.this.nbPeersToInject > 1) {
                    if (rdfTermSize > StaticLoadBalancingTestBuilder.this.rdfTermSize) {
                        return QuadrupleGenerator.randomWithoutLiteral(CENTROID_LONG_RDF_TERM_PREFIX,
                                rdfTermSize);
                    } else {
                        return QuadrupleGenerator.randomWithoutLiteral(CENTROID_SHORT_RDF_TERM_PREFIX,
                                rdfTermSize);
                    }
                } else {
                    if (graph == null) {
                        return QuadrupleGenerator.randomWithoutLiteral(rdfTermSize);
                    } else {
                        return QuadrupleGenerator.randomWithoutLiteral(graph, rdfTermSize);
                    }
                }
            }
        }

        private boolean isCentroidStatsRecorderUsed() {
            return (StaticLoadBalancingTestBuilder.this.statsRecorderClass != null)
                    && (StaticLoadBalancingTestBuilder.this.statsRecorderClass
                            .isAssignableFrom(CentroidStatsRecorder.class));
        }

        private boolean simulateCompoundEvents() {
            return StaticLoadBalancingTestBuilder.this.nbQuadsPerCompoundEvent != -1;
        }

    };

}

From source file:com.cinchapi.concourse.importer.cli.ImportCli.java

@Override
protected void doTask() {
    final ImportOptions opts = (ImportOptions) options;
    final Set<Long> records;
    final Constructor<? extends Importer> constructor = getConstructor(opts.type);
    if (opts.data == null) { // Import data from stdin
        Importer importer = Reflection.newInstance(constructor, concourse);
        if (!opts.dynamic.isEmpty()) {
            importer.setParams(options.dynamic);
        }/*ww w.j  a  v  a  2s  . co m*/
        if (importer instanceof Headered && !opts.header.isEmpty()) {
            ((Headered) importer).parseHeader(opts.header);
        }
        try {
            ConsoleReader reader = new ConsoleReader();
            String line;
            records = Sets.newLinkedHashSet();
            Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {

                // Interactive import is ended when user presses CTRL + C,
                // so we need this shutdown hook to ensure that they get
                // feedback about the import before the JVM dies.

                @Override
                public void run() {
                    if (options.verbose) {
                        System.out.println(records);
                    }
                    System.out.println(Strings.format("Imported data into {} records", records.size()));
                }

            }));
            try {
                final AtomicBoolean lock = new AtomicBoolean(false);
                new Thread(new Runnable() { // If there is no input in
                                            // 100ms, assume that the
                                            // session is interactive (i.e.
                                            // not piped) and display a
                                            // prompt

                    @Override
                    public void run() {
                        try {
                            Thread.sleep(100);
                            if (lock.compareAndSet(false, true)) {
                                System.out.println("Importing from stdin. Press " + "CTRL + C when finished");
                            }
                        } catch (InterruptedException e) {
                        }
                    }

                }).start();
                while ((line = reader.readLine()) != null) {
                    try {
                        lock.set(true);
                        records.addAll(importer.importString(line));
                    } catch (Exception e) {
                        System.err.println(e);
                    }
                }
            } catch (IOException e) {
                throw Throwables.propagate(e);
            }
        } catch (IOException e) {
            throw Throwables.propagate(e);
        } finally {
            try {
                TerminalFactory.get().restore();
            } catch (Exception e) {
                throw Throwables.propagate(e);
            }
        }
    } else {
        String path = FileOps.expandPath(opts.data, getLaunchDirectory());
        Collection<String> files = FileOps.isDirectory(path) ? scan(Paths.get(path)) : ImmutableList.of(path);
        Stopwatch watch = Stopwatch.createUnstarted();
        if (files.size() > 1) {
            records = Sets.newConcurrentHashSet();
            final Queue<String> filesQueue = (Queue<String>) files;
            List<Runnable> runnables = Lists.newArrayListWithCapacity(opts.numThreads);
            // Create just enough Runnables with instantiated Importers in
            // advance. Each of those Runnables will work until #filesQueue
            // is exhausted.
            opts.numThreads = Math.min(opts.numThreads, files.size());
            for (int i = 0; i < opts.numThreads; ++i) {
                final Importer importer0 = Reflection.newInstance(constructor,
                        i == 0 ? concourse
                                : Concourse.connect(opts.host, opts.port, opts.username, opts.password,
                                        opts.environment));
                if (!opts.dynamic.isEmpty()) {
                    importer0.setParams(opts.dynamic);
                }
                if (importer0 instanceof Headered && !opts.header.isEmpty()) {
                    ((Headered) importer0).parseHeader(opts.header);
                }
                runnables.add(new Runnable() {

                    private final Importer importer = importer0;

                    @Override
                    public void run() {
                        String file;
                        while ((file = filesQueue.poll()) != null) {
                            records.addAll(importer.importFile(file));
                        }
                    }

                });
            }
            ExecutorService executor = Executors.newFixedThreadPool(runnables.size());
            System.out.println("Starting import...");
            watch.start();
            for (Runnable runnable : runnables) {
                executor.execute(runnable);
            }
            executor.shutdown();
            try {
                if (!executor.awaitTermination(1, TimeUnit.MINUTES)) {
                    while (!executor.isTerminated()) {
                        System.out.print('.'); // block until all tasks are
                                               // completed and provide some
                                               // feedback to the user
                    }
                }
            } catch (InterruptedException e) {
                throw Throwables.propagate(e);
            }
        } else {
            Importer importer = Reflection.newInstance(constructor, concourse);
            if (!opts.dynamic.isEmpty()) {
                importer.setParams(opts.dynamic);
            }
            if (importer instanceof Headered && !opts.header.isEmpty()) {
                ((Headered) importer).parseHeader(opts.header);
            }
            System.out.println("Starting import...");
            watch.start();
            records = importer.importFile(files.iterator().next());
        }
        watch.stop();
        long elapsed = watch.elapsed(TimeUnit.MILLISECONDS);
        double seconds = elapsed / 1000.0;
        if (options.verbose) {
            System.out.println(records);
        }
        System.out.println(MessageFormat.format("Imported data " + "into {0} records in {1} seconds",
                records.size(), seconds));
    }
}