Example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics SummaryStatistics

List of usage examples for org.apache.commons.math3.stat.descriptive SummaryStatistics SummaryStatistics

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics SummaryStatistics.

Prototype

public SummaryStatistics() 

Source Link

Document

Construct a SummaryStatistics instance

Usage

From source file:gov.llnl.lc.infiniband.opensm.plugin.data.OSM_FabricDeltaAnalyzer.java

public double getNodeUtilization(IB_Guid guid, PFM_Port.PortCounterName pcn) {
    // find all the ports for this node, and average up their utilization numbers
    IB_Vertex v = getIB_Vertex(guid);//  ww w . j a  v a2 s.  c  o m
    if (v == null)
        return 0.0;

    SummaryStatistics nodeStats = new SummaryStatistics();

    // loop through all the ports in this vertex
    int num_ports = v.getNode().sbnNode.num_ports;
    for (int pn = 1; pn <= num_ports; pn++) {
        nodeStats.addValue(getPortUtilization(guid, pn, pcn));
    }
    return nodeStats.getMean();
}

From source file:model.scenario.CompetitiveScenarioTest.java

@Test
public void rightPriceAndQuantityTestAsMarginalNoPIDAlreadyLearnedFlows() {

    for (int competitors = 4; competitors <= 7; competitors++) {
        //  System.out.println("FORCED COMPETITIVE FIRMS: " + (competitors+1));

        for (int i = 0; i < 5; i++) {

            final MacroII macroII = new MacroII(System.currentTimeMillis());
            final TripolistScenario scenario1 = new TripolistScenario(macroII);
            scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
            scenario1.setAskPricingStrategy(InventoryBufferSalesControl.class);
            scenario1.setControlType(//  w w  w .jav a  2s .  co m
                    MonopolistScenario.MonopolistScenarioIntegratedControlEnum.MARGINAL_PLANT_CONTROL);
            scenario1.setAdditionalCompetitors(competitors);
            scenario1.setWorkersToBeRehiredEveryDay(true);
            scenario1.setDemandIntercept(102);

            scenario1.setSalesPricePreditorStrategy(FixedDecreaseSalesPredictor.class);

            //assign scenario
            macroII.setScenario(scenario1);

            macroII.start();

            macroII.schedule.step(macroII);
            for (Firm firm : scenario1.getCompetitors()) {
                for (HumanResources hr : firm.getHRs())
                    hr.setPredictor(new FixedIncreasePurchasesPredictor(0));

                firm.getSalesDepartment(UndifferentiatedGoodType.GENERIC)
                        .setPredictorStrategy(new FixedDecreaseSalesPredictor(0));
            }

            while (macroII.schedule.getTime() < 5000) {
                macroII.schedule.step(macroII);

            }

            SummaryStatistics prices = new SummaryStatistics();
            SummaryStatistics quantities = new SummaryStatistics();
            SummaryStatistics target = new SummaryStatistics();
            for (int j = 0; j < 500; j++) {
                macroII.schedule.step(macroII);
                assert !Float.isNaN(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
                prices.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
                quantities.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayVolume());

                for (EconomicAgent agent : macroII.getMarket(UndifferentiatedGoodType.GENERIC).getSellers()) {
                    SalesDepartment department = ((Firm) agent)
                            .getSalesDepartment(UndifferentiatedGoodType.GENERIC);
                    target.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayVolume());
                }

            }

            System.out.println(prices.getMean() + " - " + quantities.getMean() + "/" + target.getMean() + "----"
                    + macroII.seed() + " | "
                    + macroII.getMarket(UndifferentiatedGoodType.GENERIC).getLastDaysAveragePrice());
            System.out.println("standard deviations: price : " + prices.getStandardDeviation() + " , quantity: "
                    + quantities.getStandardDeviation());

            assertEquals(prices.getMean(), 58, 5);
            assertTrue(prices.getStandardDeviation() < 5.5);
            assertEquals(quantities.getMean(), 44, 5);
            assertTrue(quantities.getStandardDeviation() < 5.5);
        }

    }

}

From source file:fr.gael.dhus.server.http.valve.processings.ProcessingValve.java

protected void checkAccess(UserKey user, Cache window) throws ProcessingQuotaException {
    ProcessingInformation pi = user.getPi();
    if (isWhiteListed(pi) || isInternal(pi)) {
        return;//from   w  w  w  .  j  ava2  s.  c o m
    }

    SummaryStatistics cpu_time_stats = new SummaryStatistics();
    SummaryStatistics user_time_stats = new SummaryStatistics();
    SummaryStatistics memory_stats = new SummaryStatistics();
    long now = System.nanoTime();

    for (Object o : window.getKeysWithExpiryCheck()) {
        Long date_ns = (Long) o;
        if ((now - date_ns) > sToNs(getTimeWindow())) {
            // Cache returns element out of the expected time window
            // (delta is {} ns)." (now-date_ns)
            // This can happen when the cache settings are modified
            // on the fly by JMX.
            continue;
        }
        ProcessingInformation proc = (ProcessingInformation) window.get(o).getObjectValue();

        if (proc.getCpuTimeNs() != null) {
            cpu_time_stats.addValue(proc.getCpuTimeNs().doubleValue());
        }
        if (proc.getUserTimeNs() != null) {
            user_time_stats.addValue(proc.getUserTimeNs().doubleValue());
        }
        if (proc.getMemoryUsage() != null) {
            memory_stats.addValue(proc.getMemoryUsage().doubleValue());
        }
    }
    String username = pi.getUsername();
    // Checks the system CPU time used in the time frame
    if (checkParameter(getMaxElapsedTimePerUserPerWindow())
            && cpu_time_stats.getSum() > sToNs(getMaxElapsedTimePerUserPerWindow())) {
        StringBuilder sb = new StringBuilder();
        sb.append("CPU usage quota exceeded (").append(formatMn(sToMn(getMaxElapsedTimePerUserPerWindow())))
                .append("mn per period of ").append(formatMn(sToMn(getTimeWindow())))
                .append("mn) - please wait and retry.");

        LOGGER.warn("[{}]  CPU usage quota exceeded: {} (max={})", username,
                formatInterval((long) cpu_time_stats.getSum()),
                formatInterval((long) getMaxElapsedTimePerUserPerWindow() * 1000000000));

        throw new ProcessingQuotaException(sb.toString());
    }

    // Checks the user CPU time used in the time frame
    if (checkParameter(getMaxElapsedTimePerUserPerWindow())
            && user_time_stats.getSum() >= sToNs(getMaxElapsedTimePerUserPerWindow())) {
        StringBuilder sb = new StringBuilder();
        sb.append("User CPU usage quota exceeded (")
                .append(formatMn(sToMn(getMaxElapsedTimePerUserPerWindow()))).append("mn per period of ")
                .append(formatMn(sToMn(getTimeWindow()))).append("mn) - please wait and retry.");

        LOGGER.warn("[{}] User CPU usage quota exceeded: {} (max={})", username,
                formatInterval((long) user_time_stats.getSum()),
                formatInterval((long) getMaxElapsedTimePerUserPerWindow() * 1000000000));

        throw new ProcessingQuotaException(sb.toString());
    }
    // Checks the total memory used in the time frame
    if (checkParameter(getMaxUsedMemoryPerUserPerWindow())
            && memory_stats.getSum() >= getMaxUsedMemoryPerUserPerWindow()) {
        StringBuilder sb = new StringBuilder();
        sb.append("Memory quota exceeded (").append(formatSize(getMaxUsedMemoryPerUserPerWindow()))
                .append(" used in a period of ").append(formatMn(sToMn(getTimeWindow())))
                .append("mn) - please wait and retry.");

        LOGGER.warn("[{}] Memory quota exceeded: {} (max={})", username,
                formatSize((long) memory_stats.getSum()),
                formatSize((long) getMaxUsedMemoryPerUserPerWindow()));

        throw new ProcessingQuotaException(sb.toString());
    }
    // Checks the number of request in the time frame
    if (checkParameter(getMaxRequestNumberPerUserPerWindow())
            && user_time_stats.getN() >= getMaxRequestNumberPerUserPerWindow()) {
        StringBuilder sb = new StringBuilder();
        sb.append("Maximum number of request exceeded (").append(getMaxRequestNumberPerUserPerWindow())
                .append("max calls in a period of ").append(formatMn(sToMn(getTimeWindow())))
                .append("mn) - please wait and retry.");

        LOGGER.warn("[{}] Maximum number of request exceeded: {} (max={})", username, user_time_stats.getN(),
                getMaxRequestNumberPerUserPerWindow());

        throw new ProcessingQuotaException(sb.toString());
    }

    LOGGER.info("Time Window cumuls for user {}:{} cpu_time={}mn,user_time={}mn,memory={}", username,
            user_time_stats.getN(), formatMn(nsToMn(cpu_time_stats.getSum())),
            formatMn(nsToMn(user_time_stats.getSum())), formatSize((long) memory_stats.getSum()));
}

From source file:com.civprod.writerstoolbox.OpenNLP.training.WordSplitingTokenizerTrainer.java

private void cmdTrainActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cmdTrainActionPerformed
    final WordSplitingTokenizerTrainer tempThis = this;
    final Charset utf8 = Charset.forName("UTF-8");
    new Thread(() -> {
        textTestResults.setText("");
        //create TokenizerFactory part of the training context
        WordSplittingTokenizerFactory myTokenizerFactory = new WordSplittingTokenizerFactory("EN",
                mAbbreviationDictionary, false, null, mSpellingDictionary,
                (TimeComplexity) comboTimeComplexity.getSelectedItem());

        Tokenizer stdTokenizer = null;/*w  ww  .jav a2s .co m*/
        try {
            stdTokenizer = OpenNLPUtils.createTokenizer();
        } catch (IOException ex) {
            Logger.getLogger(WordSplitingTokenizerTrainer.class.getName()).log(Level.SEVERE, null, ex);
        }
        Tokenizer myNonSplitingTokenizer = null;
        try {
            myNonSplitingTokenizer = OpenNLPUtils.createTokenizer(OpenNLPUtils.readTokenizerModel(
                    OpenNLPUtils.buildModelFileStream(".\\data\\OpenNLP\\en-fiction-token.bin")));
        } catch (IOException ex) {
            Logger.getLogger(WordSplitingTokenizerTrainer.class.getName()).log(Level.SEVERE, null, ex);
        }
        List<FileSplit> FileSplits = FileSplit.generateFileSplitsLOO(mFileCollectionListModel);
        File trainingFile = new File("en-token.train");
        File testFile = new File("en-token.test");
        SummaryStatistics curFStats = new SummaryStatistics();
        SummaryStatistics curRecallStats = new SummaryStatistics();
        SummaryStatistics curPrecisionStats = new SummaryStatistics();
        SummaryStatistics stdFStats = new SummaryStatistics();
        SummaryStatistics stdRecallStats = new SummaryStatistics();
        SummaryStatistics stdPrecisionStats = new SummaryStatistics();
        SummaryStatistics myNonSplitFStats = new SummaryStatistics();
        SummaryStatistics myNonSplitRecallStats = new SummaryStatistics();
        SummaryStatistics myNonSplitPrecisionStats = new SummaryStatistics();
        java.io.BufferedWriter trainingFileWriter = null;
        for (FileSplit curFileSplit : FileSplits) {
            try {
                //create training file
                trainingFileWriter = new java.io.BufferedWriter(
                        new java.io.OutputStreamWriter(new java.io.FileOutputStream(trainingFile), utf8));
                for (File curTrainingFile : curFileSplit.getTrainingFiles()) {
                    java.io.BufferedReader curTrainingFileReader = null;
                    try {
                        Charset fileCharset = FileUtils.determineCharset(curTrainingFile);
                        if (fileCharset == null) {
                            fileCharset = utf8;
                        }
                        curTrainingFileReader = new java.io.BufferedReader(new java.io.InputStreamReader(
                                new java.io.FileInputStream(curTrainingFile), fileCharset));
                        while (curTrainingFileReader.ready()) {
                            String curLine = curTrainingFileReader.readLine();
                            trainingFileWriter.append(curLine).append("\n");
                        }
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (curTrainingFileReader != null) {
                            curTrainingFileReader.close();
                        }
                    }
                }
                trainingFileWriter.write('\n');
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (trainingFileWriter != null) {
                    try {
                        trainingFileWriter.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            //create test file
            java.io.BufferedWriter testFileWriter = null;
            try {
                //create training file
                testFileWriter = new java.io.BufferedWriter(
                        new java.io.OutputStreamWriter(new java.io.FileOutputStream(testFile), utf8));
                for (File curTrainingFile : curFileSplit.getTestFiles()) {
                    String testingFileName = curTrainingFile.getCanonicalPath();
                    textTestResults
                            .setText(textTestResults.getText() + "testing with " + testingFileName + "\n");
                    java.io.BufferedReader curTrainingFileReader = null;
                    try {
                        Charset fileCharset = FileUtils.determineCharset(curTrainingFile);
                        if (fileCharset == null) {
                            fileCharset = utf8;
                        }
                        curTrainingFileReader = new java.io.BufferedReader(new java.io.InputStreamReader(
                                new java.io.FileInputStream(curTrainingFile), fileCharset));
                        while (curTrainingFileReader.ready()) {
                            String curLine = curTrainingFileReader.readLine();
                            testFileWriter.append(curLine).append("\n");
                        }
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (curTrainingFileReader != null) {
                            curTrainingFileReader.close();
                        }
                    }
                }
                testFileWriter.write('\n');
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (testFileWriter != null) {
                    try {
                        testFileWriter.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            //create and train model
            ObjectStream<String> trainingLineStream = null;
            TokenizerModel train = null;
            try {
                trainingLineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), utf8);
                ObjectStream<TokenSample> sampleStream = null;
                try {
                    sampleStream = new TokenSampleStream(trainingLineStream);
                    train = TokenizerME.train(sampleStream, myTokenizerFactory,
                            TrainingParameters.defaultParams());
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (sampleStream != null) {
                        try {
                            sampleStream.close();
                        } catch (IOException ex) {
                            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null,
                                    ex);
                        }
                    }
                }
            } catch (FileNotFoundException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (trainingLineStream != null) {
                    try {
                        trainingLineStream.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            if (train != null) {
                ObjectStream<String> testingLineStream = null;
                try {
                    testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), utf8);
                    ObjectStream<TokenSample> sampleStream = null;
                    try {
                        sampleStream = new TokenSampleStream(testingLineStream);
                        TokenizerME testDetector = new TokenizerME(train);
                        TokenizerEvaluator evaluator = new TokenizerEvaluator(testDetector);
                        evaluator.evaluate(sampleStream);
                        FMeasure testFMeasure = evaluator.getFMeasure();
                        curFStats.addValue(testFMeasure.getFMeasure());
                        curRecallStats.addValue(testFMeasure.getRecallScore());
                        curPrecisionStats.addValue(testFMeasure.getPrecisionScore());
                        textTestResults.setText(textTestResults.getText() + testFMeasure.getFMeasure() + " "
                                + testFMeasure.getPrecisionScore() + " " + testFMeasure.getRecallScore()
                                + "\n");
                        if (stdTokenizer != null) {
                            testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), utf8);
                            sampleStream = new TokenSampleStream(testingLineStream);
                            TokenizerEvaluator stdEvaluator = new TokenizerEvaluator(stdTokenizer);
                            stdEvaluator.evaluate(sampleStream);
                            FMeasure stdFMeasure = stdEvaluator.getFMeasure();
                            stdFStats.addValue(stdFMeasure.getFMeasure());
                            stdRecallStats.addValue(stdFMeasure.getRecallScore());
                            stdPrecisionStats.addValue(stdFMeasure.getPrecisionScore());
                            textTestResults.setText(textTestResults.getText() + " " + stdFMeasure.getFMeasure()
                                    + " " + stdFMeasure.getPrecisionScore() + " " + stdFMeasure.getRecallScore()
                                    + "\n");
                        }
                        if (myNonSplitingTokenizer != null) {
                            testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), utf8);
                            sampleStream = new TokenSampleStream(testingLineStream);
                            TokenizerEvaluator myNonSplitingEvaluator = new TokenizerEvaluator(
                                    myNonSplitingTokenizer);
                            myNonSplitingEvaluator.evaluate(sampleStream);
                            FMeasure myNonSplitFMeasure = myNonSplitingEvaluator.getFMeasure();
                            myNonSplitFStats.addValue(myNonSplitFMeasure.getFMeasure());
                            myNonSplitRecallStats.addValue(myNonSplitFMeasure.getRecallScore());
                            myNonSplitPrecisionStats.addValue(myNonSplitFMeasure.getPrecisionScore());
                            textTestResults
                                    .setText(textTestResults.getText() + " " + myNonSplitFMeasure.getFMeasure()
                                            + " " + myNonSplitFMeasure.getPrecisionScore() + " "
                                            + myNonSplitFMeasure.getRecallScore() + "\n");
                        }
                        textTestResults.setText(textTestResults.getText() + "\n");
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (sampleStream != null) {
                            try {
                                sampleStream.close();
                            } catch (IOException ex) {
                                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE,
                                        null, ex);
                            }
                        }
                    }
                } catch (FileNotFoundException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (testingLineStream != null) {
                        try {
                            testingLineStream.close();
                        } catch (IOException ex) {
                            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null,
                                    ex);
                        }
                    }
                }
            }
        }
        textTestResults.setText(textTestResults.getText() + "\n");
        textTestResults.setText(textTestResults.getText() + "test model\n");
        textTestResults.setText(textTestResults.getText() + "f score mean " + curFStats.getMean() + " stdDev "
                + curFStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "recall mean " + curRecallStats.getMean()
                + " stdDev " + curRecallStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "precision score mean "
                + curPrecisionStats.getMean() + " stdDev " + curPrecisionStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "std model\n");
        textTestResults.setText(textTestResults.getText() + "f score mean " + stdFStats.getMean() + " stdDev "
                + stdFStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "recall mean " + stdRecallStats.getMean()
                + " stdDev " + stdRecallStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "precision score mean "
                + stdPrecisionStats.getMean() + " stdDev " + stdPrecisionStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "my non spliting model\n");
        textTestResults.setText(textTestResults.getText() + "f score mean " + myNonSplitFStats.getMean()
                + " stdDev " + myNonSplitFStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "recall mean " + myNonSplitRecallStats.getMean()
                + " stdDev " + myNonSplitRecallStats.getStandardDeviation() + "\n");
        textTestResults.setText(
                textTestResults.getText() + "precision score mean " + myNonSplitPrecisionStats.getMean()
                        + " stdDev " + myNonSplitPrecisionStats.getStandardDeviation() + "\n");
        //create combinded training file
        trainingFileWriter = null;
        try {
            trainingFileWriter = new java.io.BufferedWriter(
                    new java.io.OutputStreamWriter(new java.io.FileOutputStream(trainingFile), utf8));
            for (File curTrainingFile : mFileCollectionListModel) {
                java.io.BufferedReader curTrainingFileReader = null;
                try {
                    Charset fileCharset = FileUtils.determineCharset(curTrainingFile);
                    if (fileCharset == null) {
                        fileCharset = utf8;
                    }
                    curTrainingFileReader = new java.io.BufferedReader(new java.io.InputStreamReader(
                            new java.io.FileInputStream(curTrainingFile), fileCharset));
                    while (curTrainingFileReader.ready()) {
                        String curLine = curTrainingFileReader.readLine();
                        trainingFileWriter.append(curLine).append("\n");
                    }
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (curTrainingFileReader != null) {
                        curTrainingFileReader.close();
                    }
                }
            }
            trainingFileWriter.write('\n');
        } catch (IOException ex) {
            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            if (trainingFileWriter != null) {
                try {
                    trainingFileWriter.close();
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        //create and train model
        ObjectStream<String> lineStream = null;
        this.createdObject = null;
        try {
            lineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), utf8);
            ObjectStream<TokenSample> sampleStream = null;
            try {
                sampleStream = new TokenSampleStream(lineStream);
                this.createdObject = TokenizerME.train(sampleStream, myTokenizerFactory,
                        TrainingParameters.defaultParams());
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (sampleStream != null) {
                    try {
                        sampleStream.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
        } catch (FileNotFoundException ex) {
            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            if (lineStream != null) {
                try {
                    lineStream.close();
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        if (createdObject != null) {
            OutputStream modelOut = null;
            File modelFile = new File("en-fiction-token.bin");
            try {
                modelOut = new BufferedOutputStream(new FileOutputStream(modelFile));
                createdObject.serialize(modelOut);
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (modelOut != null) {
                    try {
                        modelOut.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
        }
        textTestResults.setText(textTestResults.getText() + "done");
    }).start();
}

From source file:edu.uchc.octane.OctaneWindowControl.java

/**
 * Show mean square displacement.//from  w w w.j  a  v a 2  s  .co m
 */
public void showMSD(int maxSteps) {
    int[] selected = frame_.getTrajsTable().getSelectedTrajectoriesOrAll();
    ArrayList<SummaryStatistics> stat = new ArrayList<SummaryStatistics>();

    for (int i = 0; i < selected.length; i++) {

        Trajectory t = dataset_.getTrajectoryByIndex(selected[i]);

        for (int j = 0; j < t.size() - 1; j++) {

            int frame = t.get(j).frame;
            for (int k = j + 1; k < t.size(); k++) {

                int deltaframe = t.get(k).frame - frame;
                if (deltaframe <= maxSteps) {

                    while (deltaframe > stat.size()) {

                        stat.add(new SummaryStatistics());
                    }

                    stat.get(deltaframe - 1).addValue(t.get(j).distance2(t.get(k)));

                }
            }
        }

        IJ.showProgress(i, selected.length);
    }

    double[] x = new double[stat.size()];
    double[] y = new double[stat.size()];
    double[] e = new double[stat.size()];

    if (stat.size() > 0) {

        double ps = dataset_.getPixelSize() * dataset_.getPixelSize();

        for (int i = 0; i < stat.size(); i++) {

            x[i] = 1.0 + i;
            if (stat.get(i).getN() > 1) {

                y[i] = stat.get(i).getMean() * ps;
                e[i] = stat.get(i).getStandardDeviation() / Math.sqrt(stat.get(i).getN()) * ps;

            }
        }

        Plot plotWin = new Plot("MSD Plot", "T/T-frame", "MSD (nm^2)", x, y);
        plotWin.addPoints(x, y, Plot.BOX);
        plotWin.addErrorBars(e);
        plotWin.show();

    }
}

From source file:gdsc.smlm.ij.plugins.PSFEstimator.java

private boolean checkAngleSignificance() {
    boolean tryAgain = false;
    if (ignore[ANGLE])
        return tryAgain;

    // The angle is relative to the major axis (X). 
    // It could be close to 0, 90 or 180 to allow it to be ignored in favour of a free circular function.

    final double[] angles = sampleNew[ANGLE].getValues();

    for (double testAngle : new double[] { 90, 0, 180 }) {
        // The angle will be in the 0-180 domain.
        // We need to compute the Statistical summary around the testAngle.
        StatisticalSummary sampleStats;//  ww  w.  jav a2  s .  co  m
        if (testAngle == 0 || testAngle == 180) {
            SummaryStatistics stats = new SummaryStatistics();
            boolean zeroAngle = (testAngle == 0);
            for (double a : angles) {
                if (zeroAngle) {
                    // Convert to -90-90 domain
                    if (a > 90)
                        a -= 180;
                } else {
                    // Convert to 90-270 domain
                    if (a < 90)
                        a += 180;
                }
                stats.addValue(a);
            }
            sampleStats = stats;
        } else {
            // Already in the 0-180 domain around the angle 90
            sampleStats = sampleNew[ANGLE];
        }

        final double p = TestUtils.tTest(testAngle, sampleStats);
        if (p > settings.pValue) {
            log("NOTE: Angle is not significant: %g ~ %g (p=%g) => Re-run with fixed zero angle",
                    sampleStats.getMean(), testAngle, p);
            ignore[ANGLE] = true;
            config.getFitConfiguration().setFitFunction(FitFunction.FREE_CIRCULAR);
            tryAgain = true;
            break;
        } else
            debug("  NOTE: Angle is significant: %g !~ %g (p=%g)", sampleNew[ANGLE].getMean(), testAngle, p);
    }
    return tryAgain;
}

From source file:model.scenario.OneLinkSupplyChainScenarioRegressionTest.java

private OneLinkSupplyChainResult testFoodMonopolistWithFixedProductionRun(int random,
        final boolean competitorsLearned, int speed, float divideGainsByThis, File csvFileToWrite) {
    final MacroII macroII = new MacroII(random);
    final OneLinkSupplyChainScenarioCheatingBuyPriceAndForcedMonopolist scenario1 = new OneLinkSupplyChainScenarioCheatingBuyPriceAndForcedMonopolist(
            macroII, OneLinkSupplyChainScenario.OUTPUT_GOOD) {
        @Override/*from   ww  w  .  j a v a 2  s. c  om*/
        protected void buildBeefSalesPredictor(SalesDepartment dept) {
            if (competitorsLearned)
                dept.setPredictorStrategy(new FixedDecreaseSalesPredictor(0));
        }
    };
    scenario1.setControlType(MarginalMaximizer.class);
    scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
    //use standard PID parameters
    scenario1.setDivideProportionalGainByThis(divideGainsByThis);
    scenario1.setDivideIntegrativeGainByThis(divideGainsByThis);
    //100 days delay
    scenario1.setBeefPricingSpeed(speed);
    //no need for filter with the cheating price
    scenario1.setBeefPriceFilterer(null);
    scenario1.setBeefTargetInventory(10);

    //add csv writer if needed
    if (csvFileToWrite != null)
        DailyStatCollector.addDailyStatCollectorToModel(csvFileToWrite, macroII);

    macroII.setScenario(scenario1);
    macroII.start();

    while (macroII.schedule.getTime() < 9000) {
        macroII.schedule.step(macroII);
    }

    //I used to assert this:
    //Assert.assertEquals(macroII.getMarket(OneLinkSupplyChainScenario.OUTPUT_GOOD).getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE),85,6 );
    //but that's too hard because while on average the price hovers there, competition is noisy. Sometimes a lot.
    //so what I did was to attach a daily stat collector and then check the average of the last 10 prices
    SummaryStatistics averageFoodPrice = new SummaryStatistics();
    SummaryStatistics averageBeefProduced = new SummaryStatistics();
    SummaryStatistics averageBeefPrice = new SummaryStatistics();
    for (int j = 0; j < 1000; j++) {
        //make the model run one more day:
        macroII.schedule.step(macroII);
        averageFoodPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.OUTPUT_GOOD)
                .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE));
        averageBeefProduced
                .addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD).getYesterdayVolume());
        averageBeefPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD)
                .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE));
    }

    System.out.println("beef price: " + averageBeefPrice.getMean());
    System.out.println("food price: " + averageFoodPrice.getMean());
    System.out.println("produced: " + averageBeefProduced.getMean());
    System.out.println();

    return new OneLinkSupplyChainResult(averageBeefPrice.getMean(), averageFoodPrice.getMean(),
            averageBeefProduced.getMean(), macroII);

}

From source file:edu.uchc.octane.OctaneWindowControl.java

/**
 * Test Ergodicity //ww  w .j  a va 2 s  . co m
 */
public void showErgodicityTest(int maxSteps) {
    int[] selected = frame_.getTrajsTable().getSelectedTrajectoriesOrAll();
    ArrayList<SummaryStatistics> stat = new ArrayList<SummaryStatistics>();

    for (int i = 0; i < maxSteps; i++) {
        stat.add(new SummaryStatistics());
    }

    for (int i = 0; i < selected.length; i++) {

        Trajectory t = dataset_.getTrajectoryByIndex(selected[i]);
        int firstFrame = t.get(0).frame;

        for (int j = 0; j < t.size() - 1; j++) {

            int frame = t.get(j).frame;
            if (frame - firstFrame < maxSteps && t.get(j + 1).frame - frame == 1) {
                stat.get(frame - firstFrame).addValue(t.get(j + 1).distance2(t.get(j)));
            }
        }
        IJ.showProgress(i, selected.length);
    }

    double[] x = new double[stat.size()];
    double[] y = new double[stat.size()];
    double[] e = new double[stat.size()];

    if (stat.size() > 0) {
        double ps = dataset_.getPixelSize() * dataset_.getPixelSize();

        for (int i = 0; i < stat.size(); i++) {
            x[i] = 1.0 + i;
            if (stat.get(i).getN() > 1) {
                y[i] = stat.get(i).getMean() * ps;
                e[i] = stat.get(i).getStandardDeviation() / Math.sqrt(stat.get(i).getN() - 1) * ps;
            }
        }

        Plot plotWin = new Plot("Ergodicity Test", "dt (frame)", "D^2 (nm^2)", x, y);
        plotWin.addPoints(x, y, Plot.BOX);
        plotWin.addErrorBars(e);
        plotWin.show();
    }
}

From source file:model.scenario.OneLinkSupplyChainResult.java

public static OneLinkSupplyChainResult foodMonopolistOneRun(long random, float divideMonopolistGainsByThis,
        int beefSpeed, final boolean beefLearned, final boolean foodLearned, File csvFileToWrite,
        Path regressionCSV) {//from   w  w  w .j a v  a 2 s.c  o  m
    final MacroII macroII = new MacroII(random);
    final OneLinkSupplyChainScenarioWithCheatingBuyingPrice scenario1 = new OneLinkSupplyChainScenarioWithCheatingBuyingPrice(
            macroII) {

        @Override
        protected void buildBeefSalesPredictor(SalesDepartment dept) {
            if (beefLearned) {
                FixedDecreaseSalesPredictor predictor = SalesPredictor.Factory
                        .newSalesPredictor(FixedDecreaseSalesPredictor.class, dept);
                predictor.setDecrementDelta(0);
                dept.setPredictorStrategy(predictor);
            } else {

            }
        }

        @Override
        public void buildFoodPurchasesPredictor(PurchasesDepartment department) {
            if (foodLearned)
                department.setPredictor(new FixedIncreasePurchasesPredictor(1));
            else {
                final ErrorCorrectingPurchasePredictor predictor = new ErrorCorrectingPurchasePredictor(macroII,
                        department);
                try {
                    if (regressionCSV != null)
                        predictor.setDebugWriter(regressionCSV);
                } catch (IOException e) {
                    e.printStackTrace();
                }
                department.setPredictor(predictor);

            }

        }

        @Override
        protected SalesDepartment createSalesDepartment(Firm firm, Market goodmarket) {
            SalesDepartment department = super.createSalesDepartment(firm, goodmarket);
            if (goodmarket.getGoodType().equals(OneLinkSupplyChainScenario.OUTPUT_GOOD)) {
                if (foodLearned)
                    department.setPredictorStrategy(new FixedDecreaseSalesPredictor(1));
            }
            return department;
        }

        @Override
        protected HumanResources createPlant(Blueprint blueprint, Firm firm, Market laborMarket) {
            HumanResources hr = super.createPlant(blueprint, firm, laborMarket);
            if (blueprint.getOutputs().containsKey(OneLinkSupplyChainScenario.INPUT_GOOD)) {
                if (beefLearned) {
                    hr.setPredictor(new FixedIncreasePurchasesPredictor(0));
                }
            }
            if (blueprint.getOutputs().containsKey(OneLinkSupplyChainScenario.OUTPUT_GOOD)) {
                if (foodLearned)
                    hr.setPredictor(new FixedIncreasePurchasesPredictor(1));

            }
            return hr;
        }
    };
    scenario1.setControlType(MarginalMaximizer.class);
    scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
    scenario1.setBeefPriceFilterer(null);

    //competition!
    scenario1.setNumberOfBeefProducers(5);
    scenario1.setBeefTargetInventory(100);
    scenario1.setNumberOfFoodProducers(1);
    scenario1.setFoodTargetInventory(100);

    scenario1.setDivideProportionalGainByThis(divideMonopolistGainsByThis);
    scenario1.setDivideIntegrativeGainByThis(divideMonopolistGainsByThis);
    //no delay
    scenario1.setBeefPricingSpeed(beefSpeed);

    //add csv writer if needed
    if (csvFileToWrite != null)
        DailyStatCollector.addDailyStatCollectorToModel(csvFileToWrite, macroII);

    macroII.setScenario(scenario1);
    macroII.start();

    while (macroII.schedule.getTime() < 14000) {
        macroII.schedule.step(macroII);

    }

    SummaryStatistics averageFoodPrice = new SummaryStatistics();
    SummaryStatistics averageBeefProduced = new SummaryStatistics();
    SummaryStatistics averageBeefPrice = new SummaryStatistics();
    for (int j = 0; j < 1000; j++) {
        //make the model run one more day:
        macroII.schedule.step(macroII);

        averageFoodPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.OUTPUT_GOOD)
                .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE));
        averageBeefProduced
                .addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD).getYesterdayVolume());
        averageBeefPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD)
                .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE));
    }

    System.out.println("seed: " + random);
    System.out.println("beef price: " + averageBeefPrice.getMean());
    System.out.println("food price: " + averageFoodPrice.getMean());
    System.out.println("produced: " + averageBeefProduced.getMean());
    extractAndPrintSlopesOfBeefSellers(macroII);

    System.out.println();

    ((Firm) (macroII.getMarket(OneLinkSupplyChainScenario.OUTPUT_GOOD).getSellers().iterator().next()))
            .getPurchaseDepartment(OneLinkSupplyChainScenario.INPUT_GOOD).getData()
            .writeToCSVFile(Paths.get("runs", "purchases.csv").toFile());
    macroII.finish();

    return new OneLinkSupplyChainResult(averageBeefPrice.getMean(), averageFoodPrice.getMean(),
            averageBeefProduced.getMean(), macroII);

}

From source file:model.experiments.stickyprices.StickyPricesCSVPrinter.java

private static void competitiveSweepRun(int additionalCompetitors) throws IOException {

    CSVWriter writer = new CSVWriter(new FileWriter(
            Paths.get("runs", "rawdata", "competitivePeriodSweep" + additionalCompetitors + ".csv").toFile()));
    writer.writeNext(new String[] { "speed", "distance", "finaldistance", "variance" });

    for (int speed = 1; speed < 30; speed++) {
        SummaryStatistics distance = new SummaryStatistics();
        SummaryStatistics finalDistance = new SummaryStatistics();
        SummaryStatistics variance = new SummaryStatistics();
        for (int seed = 0; seed < 50; seed++) {

            final double[] result = competitiveSweepRun(seed, 101, 1, 1, 14, .1f, .1f, speed, 58, null,
                    additionalCompetitors);
            distance.addValue(result[0]);
            finalDistance.addValue(result[1]);
            variance.addValue(result[2]);

        }//from   w  w  w  . ja  v  a  2  s. c o  m

        final String[] nextLine = { String.valueOf(speed), String.valueOf(distance.getMean()),
                String.valueOf(finalDistance.getMean()), String.valueOf(variance.getMean()) };
        System.out.println(Arrays.toString(nextLine));
        writer.writeNext(nextLine);
        writer.flush();
    }

    writer.close();

}