Example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics SummaryStatistics

List of usage examples for org.apache.commons.math3.stat.descriptive SummaryStatistics SummaryStatistics

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics SummaryStatistics.

Prototype

public SummaryStatistics() 

Source Link

Document

Construct a SummaryStatistics instance

Usage

From source file:ijfx.ui.plugin.overlay.OverlayPanel.java

protected XYChart.Series<Double, Double> getOverlayHistogram(Overlay overlay) {

    Timer timer = timerService.getTimer(this.getClass());
    timer.start();//www .j  a v  a 2s.com
    Double[] valueList = statsService.getValueListFromImageDisplay(currentDisplay(), overlay);
    timer.elapsed("Getting the stats");
    SummaryStatistics sumup = new SummaryStatistics();
    for (Double v : valueList) {
        sumup.addValue(v);
    }
    timer.elapsed("Building the sumup");

    double min = sumup.getMin();
    double max = sumup.getMax();
    double range = max - min;
    int bins = 100;//new Double(max - min).intValue();

    EmpiricalDistribution distribution = new EmpiricalDistribution(bins);

    double[] values = ArrayUtils.toPrimitive(valueList);
    Arrays.parallelSort(values);
    distribution.load(values);

    timer.elapsed("Sort and distrubution repartition up");

    XYChart.Series<Double, Double> serie = new XYChart.Series<>();
    ArrayList<Data<Double, Double>> data = new ArrayList<>(bins);
    double k = min;
    for (SummaryStatistics st : distribution.getBinStats()) {
        data.add(new Data<Double, Double>(k, new Double(st.getN())));
        k += range / bins;
    }

    serie.getData().clear();
    serie.getData().addAll(data);
    timer.elapsed("Creating charts");
    return serie;
}

From source file:com.github.rinde.rinsim.scenario.measure.Metrics.java

static StatisticalSummary toStatisticalSummary(Iterable<? extends Number> values) {
    final SummaryStatistics ss = new SummaryStatistics();
    for (final Number n : values) {
        ss.addValue(n.doubleValue());/* www . ja v  a  2s.  com*/
    }
    return ss.getSummary();
}

From source file:model.experiments.stickyprices.StickyPricesCSVPrinter.java

private static void simpleDelaySweep(int maxDivider, int maxSpeed, int demandDelay, int experimentsPerSetup)
        throws IOException {

    CSVWriter writer = new CSVWriter(new FileWriter(Paths.get("runs", "rawdata", "delaySweep.csv").toFile()));
    writer.writeNext(new String[] { "speed", "divider", "distance", "variance", "success" });

    for (int divider = 1; divider < maxDivider; divider++) {
        //for all speeds
        for (int speed = 0; speed < maxSpeed; speed++) {

            SummaryStatistics averageSquaredDistance = new SummaryStatistics();
            SummaryStatistics averageVariance = new SummaryStatistics();
            int successes = 0;

            for (int runNumber = 0; runNumber < experimentsPerSetup; runNumber++) {
                float totalDistance = 0;
                SummaryStatistics prices = new SummaryStatistics();

                //runNumber!
                final SimpleSellerScenario run = runWithDelay(demandDelay, speed, divider, false, true,
                        runNumber);/*from   w  ww.j ava2  s . co m*/

                final double[] pricesInRun = run.getDepartments().get(0).getData()
                        .getObservationsRecordedTheseDays(SalesDataType.LAST_ASKED_PRICE, 0, 14999);
                for (double price : pricesInRun) {
                    totalDistance += Math.pow(price - 51, 2);
                    prices.addValue(price);
                }

                averageSquaredDistance.addValue(Math.sqrt(totalDistance));
                averageVariance.addValue(prices.getVariance());
                if (pricesInRun[pricesInRun.length - 1] == 51)
                    successes++;
            }

            String[] csvLine = new String[5];
            csvLine[0] = String.valueOf(speed);
            csvLine[1] = String.valueOf(divider);
            csvLine[2] = String.valueOf(averageSquaredDistance.getMean());
            csvLine[3] = String.valueOf(averageVariance.getMean());
            csvLine[4] = String.valueOf(successes);
            writer.writeNext(csvLine);
            writer.flush();
            System.out.println(Arrays.toString(csvLine));
        }

    }

}

From source file:model.scenario.CompetitiveScenarioTest.java

@Test
public void rightPriceAndQuantityTestAsMarginalInventoryTargetAlreadyLearned() {

    List<Integer> competitors = new LinkedList<>();
    for (int competitor = 0; competitor <= 7; competitor++)
        competitors.add(competitor);/*from  ww w  .  j a  v a2  s  . c  om*/
    competitors.add(25);

    for (Integer competitor : competitors) {
        System.out.println("FORCED COMPETITIVE FIRMS: " + (competitor + 1));
        float averageResultingPrice = 0;
        float averageResultingQuantity = 0;
        for (int i = 0; i < 5; i++) {

            final MacroII macroII = new MacroII(System.currentTimeMillis()); //1387582416533
            final TripolistScenario scenario1 = new TripolistScenario(macroII);

            scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
            scenario1.setAskPricingStrategy(SalesControlWithFixedInventoryAndPID.class);
            scenario1.setControlType(
                    MonopolistScenario.MonopolistScenarioIntegratedControlEnum.MARGINAL_PLANT_CONTROL);
            scenario1.setAdditionalCompetitors(competitor);
            scenario1.setWorkersToBeRehiredEveryDay(true);
            scenario1.setDemandIntercept(102);

            scenario1.setSalesPricePreditorStrategy(FixedDecreaseSalesPredictor.class);

            //assign scenario
            macroII.setScenario(scenario1);

            macroII.start();

            macroII.schedule.step(macroII);
            for (Firm firm : scenario1.getCompetitors()) {
                for (HumanResources hr : firm.getHRs())
                    hr.setPredictor(new FixedIncreasePurchasesPredictor(0));
                for (SalesDepartment dept : firm.getSalesDepartments().values())
                    dept.setPredictorStrategy(new FixedDecreaseSalesPredictor(0));
            }

            while (macroII.schedule.getTime() < 10000) {
                macroII.schedule.step(macroII);

            }

            SummaryStatistics prices = new SummaryStatistics();
            SummaryStatistics quantities = new SummaryStatistics();
            SummaryStatistics target = new SummaryStatistics();
            for (int j = 0; j < 500; j++) {
                macroII.schedule.step(macroII);
                //                    assert !Float.isNaN(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
                prices.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
                quantities.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayVolume());

                for (EconomicAgent agent : macroII.getMarket(UndifferentiatedGoodType.GENERIC).getSellers()) {
                    SalesDepartment department = ((Firm) agent)
                            .getSalesDepartment(UndifferentiatedGoodType.GENERIC);
                    target.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayVolume());
                }

            }

            System.out.println(prices.getMean() + " - " + quantities.getMean() + "/" + target.getMean() + "----"
                    + macroII.seed() + " | "
                    + macroII.getMarket(UndifferentiatedGoodType.GENERIC).getLastDaysAveragePrice());
            System.out.println("standard deviations: price : " + prices.getStandardDeviation() + " , quantity: "
                    + quantities.getStandardDeviation());

            averageResultingPrice += prices.getMean();
            averageResultingQuantity += quantities.getMean();

            assertEquals(prices.getMean(), 58, 5);
            //                assertTrue(String.valueOf(prices.getStandardDeviation()),prices.getStandardDeviation() < 5.5);
            assertEquals(quantities.getMean(), 44, 5);
            //                assertTrue(String.valueOf(prices.getStandardDeviation()),quantities.getStandardDeviation() < 5.5);

        }

        System.out.println(averageResultingPrice / 5f + " --- " + averageResultingQuantity / 5f);
    }

}

From source file:com.civprod.writerstoolbox.OpenNLP.training.SentenceDetectorTrainer.java

private void cmdTrainSentenceDetectorActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cmdTrainSentenceDetectorActionPerformed
    final SentenceDetectorTrainer tempThis = this;
    new Thread(() -> {
        textTestResults.setText("");
        Charset charset = Charset.forName("UTF-8");
        //read other models
        SentenceDetector stdDetector = null;
        try {/*w w  w .j a  v a  2 s  . co  m*/
            stdDetector = OpenNLPUtils.createSentenceDetector();

        } catch (IOException ex) {
        }

        List<FileSplit> FileSplits = FileSplit.generateFileSplitsLOO(mFileCollectionListModel);
        File trainingFile = new File("en-sent.train");
        File testFile = new File("en-sent.test");
        SummaryStatistics curFStats = new SummaryStatistics();
        SummaryStatistics curRecallStats = new SummaryStatistics();
        SummaryStatistics curPrecisionStats = new SummaryStatistics();
        SummaryStatistics stdFStats = new SummaryStatistics();
        SummaryStatistics stdRecallStats = new SummaryStatistics();
        SummaryStatistics stdPrecisionStats = new SummaryStatistics();
        java.io.BufferedOutputStream trainingFileWriter = null;
        for (FileSplit curFileSplit : FileSplits) {
            try {
                //create training file
                trainingFileWriter = new java.io.BufferedOutputStream(
                        new java.io.FileOutputStream(trainingFile));
                for (File curTrainingFile : curFileSplit.getTrainingFiles()) {
                    java.io.BufferedInputStream curTrainingFileReader = null;
                    try {
                        curTrainingFileReader = new java.io.BufferedInputStream(
                                new java.io.FileInputStream(curTrainingFile));
                        while (curTrainingFileReader.available() > 0) {
                            trainingFileWriter.write(curTrainingFileReader.read());
                        }
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (curTrainingFileReader != null) {
                            curTrainingFileReader.close();
                        }
                    }
                }
                trainingFileWriter.write('\n');
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (trainingFileWriter != null) {
                    try {
                        trainingFileWriter.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            //create test file
            java.io.BufferedOutputStream testFileWriter = null;
            try {
                //create training file
                testFileWriter = new java.io.BufferedOutputStream(new java.io.FileOutputStream(testFile));
                for (File curTrainingFile : curFileSplit.getTestFiles()) {
                    String testingFileName = curTrainingFile.getCanonicalPath();
                    textTestResults
                            .setText(textTestResults.getText() + "testing with " + testingFileName + "\n");
                    java.io.BufferedInputStream curTrainingFileReader = null;
                    try {
                        curTrainingFileReader = new java.io.BufferedInputStream(
                                new java.io.FileInputStream(curTrainingFile));
                        while (curTrainingFileReader.available() > 0) {
                            int read = curTrainingFileReader.read();
                            testFileWriter.write(read);
                        }
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (curTrainingFileReader != null) {
                            curTrainingFileReader.close();
                        }
                    }
                }
                testFileWriter.write('\n');
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (testFileWriter != null) {
                    try {
                        testFileWriter.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            //create SentenceDetectorFactory part of the training context
            SentenceDetectorFactory mySentenceDetectorFactory = new SentenceDetectorFactory("EN",
                    cbUseTokenEnd.isSelected(), mAbbreviationDictionary, txtEosChars.getText().toCharArray());

            ObjectStream<String> trainingLineStream = null;
            SentenceModel train = null;
            try {
                trainingLineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), charset);
                ObjectStream<SentenceSample> sampleStream = null;
                try {
                    sampleStream = new SentenceSampleStream(trainingLineStream);
                    train = SentenceDetectorME.train("EN", sampleStream, mySentenceDetectorFactory,
                            TrainingParameters.defaultParams());
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (sampleStream != null) {
                        try {
                            sampleStream.close();
                        } catch (IOException ex) {
                            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null,
                                    ex);
                        }
                    }
                }
            } catch (FileNotFoundException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (trainingLineStream != null) {
                    try {
                        trainingLineStream.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            trainingLineStream = null;
            if (train != null) {
                ObjectStream<String> testingLineStream = null;
                try {
                    testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), charset);
                    ObjectStream<SentenceSample> sampleStream = null;
                    try {
                        sampleStream = new SentenceSampleStream(testingLineStream);
                        SentenceDetectorME testDetector = new SentenceDetectorME(train);
                        SentenceDetectorEvaluator evaluator = new SentenceDetectorEvaluator(testDetector);
                        evaluator.evaluate(sampleStream);
                        FMeasure testFMeasure = evaluator.getFMeasure();
                        curFStats.addValue(testFMeasure.getFMeasure());
                        curRecallStats.addValue(testFMeasure.getRecallScore());
                        curPrecisionStats.addValue(testFMeasure.getPrecisionScore());
                        textTestResults.setText(textTestResults.getText() + testFMeasure.getFMeasure() + " "
                                + testFMeasure.getPrecisionScore() + " " + testFMeasure.getRecallScore()
                                + "\n");
                        if (stdDetector != null) {
                            testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile),
                                    charset);
                            sampleStream = new SentenceSampleStream(testingLineStream);
                            SentenceDetectorEvaluator stdEvaluator = new SentenceDetectorEvaluator(stdDetector);
                            stdEvaluator.evaluate(sampleStream);
                            FMeasure stdFMeasure = stdEvaluator.getFMeasure();
                            stdFStats.addValue(stdFMeasure.getFMeasure());
                            stdRecallStats.addValue(stdFMeasure.getRecallScore());
                            stdPrecisionStats.addValue(stdFMeasure.getPrecisionScore());
                            textTestResults.setText(textTestResults.getText() + " " + stdFMeasure.getFMeasure()
                                    + " " + stdFMeasure.getPrecisionScore() + " " + stdFMeasure.getRecallScore()
                                    + "\n");
                        }
                        textTestResults.setText(textTestResults.getText() + "\n");
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (sampleStream != null) {
                            try {
                                sampleStream.close();
                            } catch (IOException ex) {
                                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE,
                                        null, ex);
                            }
                        }
                    }
                } catch (FileNotFoundException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (testingLineStream != null) {
                        try {
                            testingLineStream.close();
                        } catch (IOException ex) {
                            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null,
                                    ex);
                        }
                    }
                }
            }
        }
        textTestResults.setText(textTestResults.getText() + "\n");
        textTestResults.setText(textTestResults.getText() + "test model\n");
        textTestResults.setText(textTestResults.getText() + "f score mean " + curFStats.getMean() + " stdDev "
                + curFStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "recall mean " + curRecallStats.getMean()
                + " stdDev " + curRecallStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "precision score mean "
                + curPrecisionStats.getMean() + " stdDev " + curPrecisionStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "std model\n");
        textTestResults.setText(textTestResults.getText() + "f score mean " + stdFStats.getMean() + " stdDev "
                + stdFStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "recall mean " + stdRecallStats.getMean()
                + " stdDev " + stdRecallStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "precision score mean "
                + stdPrecisionStats.getMean() + " stdDev " + stdPrecisionStats.getStandardDeviation() + "\n");
        //create combinded training file
        trainingFileWriter = null;
        try {
            trainingFileWriter = new java.io.BufferedOutputStream(new java.io.FileOutputStream(trainingFile));
            for (File curTrainingFile : mFileCollectionListModel) {
                java.io.BufferedInputStream curTrainingFileReader = null;
                try {
                    curTrainingFileReader = new java.io.BufferedInputStream(
                            new java.io.FileInputStream(curTrainingFile));
                    while (curTrainingFileReader.available() > 0) {
                        trainingFileWriter.write(curTrainingFileReader.read());
                    }
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (curTrainingFileReader != null) {
                        curTrainingFileReader.close();
                    }
                }
            }
            trainingFileWriter.write('\n');
        } catch (IOException ex) {
            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            if (trainingFileWriter != null) {
                try {
                    trainingFileWriter.close();
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        //create SentenceDetectorFactory part of the training context
        SentenceDetectorFactory mySentenceDetectorFactory = new SentenceDetectorFactory("EN",
                cbUseTokenEnd.isSelected(), mAbbreviationDictionary, txtEosChars.getText().toCharArray());
        //create and train model
        ObjectStream<String> lineStream = null;
        this.createdObject = null;
        try {
            lineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), charset);
            ObjectStream<SentenceSample> sampleStream = null;
            try {
                sampleStream = new SentenceSampleStream(lineStream);
                this.createdObject = SentenceDetectorME.train("EN", sampleStream, mySentenceDetectorFactory,
                        TrainingParameters.defaultParams());
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (sampleStream != null) {
                    try {
                        sampleStream.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
        } catch (FileNotFoundException ex) {
            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            if (lineStream != null) {
                try {
                    lineStream.close();
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        if (createdObject != null) {
            OutputStream modelOut = null;
            File modelFile = new File("en-fiction-sent.bin");
            try {
                modelOut = new BufferedOutputStream(new FileOutputStream(modelFile));
                createdObject.serialize(modelOut);
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (modelOut != null) {
                    try {
                        modelOut.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
        }
        textTestResults.setText(textTestResults.getText() + "done");
    }).start();
}

From source file:com.civprod.writerstoolbox.OpenNLP.training.TokenizerTrainer.java

private void cmdTrainActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cmdTrainActionPerformed
    final TokenizerTrainer tempThis = this;
    new Thread(() -> {
        textTestResults.setText("");
        Charset charset = Charset.forName("UTF-8");
        //create TokenizerFactory part of the training context
        String alphaNumericRegex = txtAlphaNumericPattern.getText();
        alphaNumericRegex = alphaNumericRegex.trim();
        if (alphaNumericRegex.isEmpty()) {
            alphaNumericRegex = "^[A-Za-z0-9]+$";
        }//from   ww  w.j  a v a2 s .com
        Pattern alphaNumericPattern = Pattern.compile(alphaNumericRegex);
        TokenizerFactory myTokenizerFactory = new TokenizerFactory("EN", mAbbreviationDictionary,
                this.cbUseAlphaNumericOptimization.isSelected(), alphaNumericPattern);

        Tokenizer stdTokenizer = null;
        try {
            stdTokenizer = OpenNLPUtils.createTokenizer();
        } catch (IOException ex) {
            Logger.getLogger(TokenizerTrainer.class.getName()).log(Level.SEVERE, null, ex);
        }
        List<FileSplit> FileSplits = FileSplit.generateFileSplitsLOO(mFileCollectionListModel);
        File trainingFile = new File("en-token.train");
        File testFile = new File("en-token.test");
        SummaryStatistics curFStats = new SummaryStatistics();
        SummaryStatistics curRecallStats = new SummaryStatistics();
        SummaryStatistics curPrecisionStats = new SummaryStatistics();
        SummaryStatistics stdFStats = new SummaryStatistics();
        SummaryStatistics stdRecallStats = new SummaryStatistics();
        SummaryStatistics stdPrecisionStats = new SummaryStatistics();
        java.io.BufferedOutputStream trainingFileWriter = null;
        for (FileSplit curFileSplit : FileSplits) {
            try {
                //create training file
                trainingFileWriter = new java.io.BufferedOutputStream(
                        new java.io.FileOutputStream(trainingFile));
                for (File curTrainingFile : curFileSplit.getTrainingFiles()) {
                    java.io.BufferedInputStream curTrainingFileReader = null;
                    try {
                        curTrainingFileReader = new java.io.BufferedInputStream(
                                new java.io.FileInputStream(curTrainingFile));
                        while (curTrainingFileReader.available() > 0) {
                            trainingFileWriter.write(curTrainingFileReader.read());
                        }
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (curTrainingFileReader != null) {
                            curTrainingFileReader.close();
                        }
                    }
                }
                trainingFileWriter.write('\n');
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (trainingFileWriter != null) {
                    try {
                        trainingFileWriter.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            //create test file
            java.io.BufferedOutputStream testFileWriter = null;
            try {
                //create training file
                testFileWriter = new java.io.BufferedOutputStream(new java.io.FileOutputStream(testFile));
                for (File curTrainingFile : curFileSplit.getTestFiles()) {
                    String testingFileName = curTrainingFile.getCanonicalPath();
                    textTestResults
                            .setText(textTestResults.getText() + "testing with " + testingFileName + "\n");
                    java.io.BufferedInputStream curTrainingFileReader = null;
                    try {
                        curTrainingFileReader = new java.io.BufferedInputStream(
                                new java.io.FileInputStream(curTrainingFile));
                        while (curTrainingFileReader.available() > 0) {
                            int read = curTrainingFileReader.read();
                            testFileWriter.write(read);
                        }
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (curTrainingFileReader != null) {
                            curTrainingFileReader.close();
                        }
                    }
                }
                testFileWriter.write('\n');
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (testFileWriter != null) {
                    try {
                        testFileWriter.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            //create and train model
            ObjectStream<String> trainingLineStream = null;
            TokenizerModel train = null;
            try {
                trainingLineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), charset);
                ObjectStream<TokenSample> sampleStream = null;
                try {
                    sampleStream = new TokenSampleStream(trainingLineStream);
                    train = TokenizerME.train(sampleStream, myTokenizerFactory,
                            TrainingParameters.defaultParams());
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (sampleStream != null) {
                        try {
                            sampleStream.close();
                        } catch (IOException ex) {
                            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null,
                                    ex);
                        }
                    }
                }
            } catch (FileNotFoundException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (trainingLineStream != null) {
                    try {
                        trainingLineStream.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            if (train != null) {
                ObjectStream<String> testingLineStream = null;
                try {
                    testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), charset);
                    ObjectStream<TokenSample> sampleStream = null;
                    try {
                        sampleStream = new TokenSampleStream(testingLineStream);
                        TokenizerME testDetector = new TokenizerME(train);
                        TokenizerEvaluator evaluator = new TokenizerEvaluator(testDetector);
                        evaluator.evaluate(sampleStream);
                        FMeasure testFMeasure = evaluator.getFMeasure();
                        curFStats.addValue(testFMeasure.getFMeasure());
                        curRecallStats.addValue(testFMeasure.getRecallScore());
                        curPrecisionStats.addValue(testFMeasure.getPrecisionScore());
                        textTestResults.setText(textTestResults.getText() + testFMeasure.getFMeasure() + " "
                                + testFMeasure.getPrecisionScore() + " " + testFMeasure.getRecallScore()
                                + "\n");
                        if (stdTokenizer != null) {
                            testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile),
                                    charset);
                            sampleStream = new TokenSampleStream(testingLineStream);
                            TokenizerEvaluator stdEvaluator = new TokenizerEvaluator(stdTokenizer);
                            stdEvaluator.evaluate(sampleStream);
                            FMeasure stdFMeasure = stdEvaluator.getFMeasure();
                            stdFStats.addValue(stdFMeasure.getFMeasure());
                            stdRecallStats.addValue(stdFMeasure.getRecallScore());
                            stdPrecisionStats.addValue(stdFMeasure.getPrecisionScore());
                            textTestResults.setText(textTestResults.getText() + " " + stdFMeasure.getFMeasure()
                                    + " " + stdFMeasure.getPrecisionScore() + " " + stdFMeasure.getRecallScore()
                                    + "\n");
                        }
                        textTestResults.setText(textTestResults.getText() + "\n");
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    } finally {
                        if (sampleStream != null) {
                            try {
                                sampleStream.close();
                            } catch (IOException ex) {
                                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE,
                                        null, ex);
                            }
                        }
                    }
                } catch (FileNotFoundException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (testingLineStream != null) {
                        try {
                            testingLineStream.close();
                        } catch (IOException ex) {
                            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null,
                                    ex);
                        }
                    }
                }
            }
        }
        textTestResults.setText(textTestResults.getText() + "\n");
        textTestResults.setText(textTestResults.getText() + "test model\n");
        textTestResults.setText(textTestResults.getText() + "f score mean " + curFStats.getMean() + " stdDev "
                + curFStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "recall mean " + curRecallStats.getMean()
                + " stdDev " + curRecallStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "precision score mean "
                + curPrecisionStats.getMean() + " stdDev " + curPrecisionStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "std model\n");
        textTestResults.setText(textTestResults.getText() + "f score mean " + stdFStats.getMean() + " stdDev "
                + stdFStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "recall mean " + stdRecallStats.getMean()
                + " stdDev " + stdRecallStats.getStandardDeviation() + "\n");
        textTestResults.setText(textTestResults.getText() + "precision score mean "
                + stdPrecisionStats.getMean() + " stdDev " + stdPrecisionStats.getStandardDeviation() + "\n");
        //create combinded training file
        trainingFileWriter = null;
        try {
            trainingFileWriter = new java.io.BufferedOutputStream(new java.io.FileOutputStream(trainingFile));
            for (File curTrainingFile : mFileCollectionListModel) {
                java.io.BufferedInputStream curTrainingFileReader = null;
                try {
                    curTrainingFileReader = new java.io.BufferedInputStream(
                            new java.io.FileInputStream(curTrainingFile));
                    while (curTrainingFileReader.available() > 0) {
                        trainingFileWriter.write(curTrainingFileReader.read());
                    }
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (curTrainingFileReader != null) {
                        curTrainingFileReader.close();
                    }
                }
            }
            trainingFileWriter.write('\n');
        } catch (IOException ex) {
            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            if (trainingFileWriter != null) {
                try {
                    trainingFileWriter.close();
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        //create and train model
        ObjectStream<String> lineStream = null;
        this.createdObject = null;
        try {
            lineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), charset);
            ObjectStream<TokenSample> sampleStream = null;
            try {
                sampleStream = new TokenSampleStream(lineStream);
                this.createdObject = TokenizerME.train(sampleStream, myTokenizerFactory,
                        TrainingParameters.defaultParams());
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (sampleStream != null) {
                    try {
                        sampleStream.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
        } catch (FileNotFoundException ex) {
            Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            if (lineStream != null) {
                try {
                    lineStream.close();
                } catch (IOException ex) {
                    Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        if (createdObject != null) {
            OutputStream modelOut = null;
            File modelFile = new File("en-fiction-token.bin");
            try {
                modelOut = new BufferedOutputStream(new FileOutputStream(modelFile));
                createdObject.serialize(modelOut);
            } catch (IOException ex) {
                Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (modelOut != null) {
                    try {
                        modelOut.close();
                    } catch (IOException ex) {
                        Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
        }
        textTestResults.setText(textTestResults.getText() + "done");
    }).start();
}

From source file:gr.aueb.cs.nlp.wordtagger.data.structure.features.FeatureBuilder.java

/**
 * normalize feature vectors of words using mean and stadard devaition
 *  provided the vectora values are//from w w w.  ja  v a2 s  .  co m
 * higher than 1.
 * @param words
 * @param indeces
 */
private static void normalize(List<Word> words, List<Integer> indeces) {
    Map<Integer, Double> means = new WeakHashMap<>();
    Map<Integer, Double> stds = new WeakHashMap<>();
    for (Integer i : indeces) {
        SummaryStatistics smt = new SummaryStatistics();
        for (Word w : words) {
            smt.addValue(w.getFeatureVec().getValues()[i]);
        }
        means.put(i, smt.getMean());
        stds.put(i, smt.getStandardDeviation());
    }
    for (Integer i : indeces) {
        for (Word w : words) {
            double value = w.getFeatureVec().getValues()[i];
            w.getFeatureVec().getValues()[i] = (value - means.get(i)) / stds.get(i);
        }
    }
}

From source file:gdsc.smlm.ij.plugins.DensityImage.java

/**
 * Output a log message of the results including the average density for localisations and the expected average.
 * /*from   ww  w  . ja  va  2 s.  com*/
 * @param results
 * @param density
 * @param radius
 * @param filtered
 * @return
 */
private SummaryStatistics logDensityResults(MemoryPeakResults results, int[] density, float radius,
        int filtered) {
    float region = (float) (radius * radius * ((useSquareApproximation) ? 4 : Math.PI));

    Rectangle bounds = results.getBounds();
    float area = bounds.width * bounds.height;
    float expected = results.size() * region / area;
    SummaryStatistics summary = new SummaryStatistics();

    for (int i = 0; i < results.size(); i++) {
        summary.addValue(density[i]);
    }

    DensityManager dm = new DensityManager(results);

    // Compute this using the input density scores since the radius is the same.
    final double l = (useSquareApproximation) ? dm.ripleysLFunction(radius)
            : dm.ripleysLFunction(density, radius);

    String msg = String.format("Density %s : N=%d, %.0fpx : Radius=%s : L(r) - r = %s : E = %s, Obs = %s (%sx)",
            results.getName(), summary.getN(), area, rounded(radius), rounded(l - radius), rounded(expected),
            rounded(summary.getMean()), rounded(summary.getMean() / expected));
    if (filterLocalisations)
        msg += String.format(" : Filtered=%d (%s%%)", filtered, rounded(filtered * 100.0 / density.length));
    IJ.log(msg);

    return summary;
}

From source file:model.experiments.stickyprices.StickyPricesCSVPrinter.java

private static void woodMonopolistSweep(final BigDecimal minimumP, final BigDecimal maximumP,
        final BigDecimal minimumI, final BigDecimal maximumI, final BigDecimal increment,
        final int runsPerParameterCombination) throws IOException {

    CSVWriter writer = new CSVWriter(new FileWriter(Paths.get("runs", "rawdata", "monoSweep.csv").toFile()));
    writer.writeNext(new String[] { "P", "I", "distance", "variance", "success" });

    BigDecimal currentP = minimumP;
    while (currentP.compareTo(maximumP) <= 0) {
        BigDecimal currentI = minimumI;

        while (currentI.compareTo(maximumI) <= 0) {

            SummaryStatistics averageSquaredDistance = new SummaryStatistics();
            SummaryStatistics averageVariance = new SummaryStatistics();
            int successes = 0;

            for (int run = 0; run < runsPerParameterCombination; run++) {

                //create the run
                MacroII macroII = new MacroII(run);
                MonopolistScenario scenario = new MonopolistScenario(macroII);
                macroII.setScenario(scenario);
                //set the demand
                scenario.setDemandIntercept(102);
                scenario.setDemandSlope(2);
                scenario.setDailyWageSlope(1);
                scenario.setDailyWageIntercept(0);
                scenario.setAskPricingStrategy(SimpleFlowSellerPID.class);
                scenario.setWorkersToBeRehiredEveryDay(true);
                scenario.setControlType(
                        MonopolistScenario.MonopolistScenarioIntegratedControlEnum.MARGINAL_PLANT_CONTROL);
                scenario.setBuyerDelay(0);

                //start it and have one step
                macroII.start();//  w w w  . j ava 2s. c om
                macroII.schedule.step(macroII);

                //now set the right parameters
                final SalesDepartment salesDepartment = scenario.getMonopolist()
                        .getSalesDepartment(UndifferentiatedGoodType.GENERIC);
                final SimpleFlowSellerPID strategy = new SimpleFlowSellerPID(salesDepartment,
                        currentP.floatValue(), currentI.floatValue(), 0f, 0, salesDepartment.getMarket(),
                        salesDepartment.getRandom().nextInt(100), salesDepartment.getFirm().getModel());
                //  strategy.setInitialPrice(102);
                //start them all at the same price, otherwise you advantage the slow by being so slow initially that they end up being right later

                salesDepartment.setAskPricingStrategy(strategy);

                //and make it learned!
                salesDepartment.setPredictorStrategy(new FixedDecreaseSalesPredictor(2));
                final HumanResources hr = scenario.getMonopolist().getHRs().iterator().next();
                hr.setPredictor(new FixedIncreasePurchasesPredictor(1));

                float totalDistance = 0;
                SummaryStatistics prices = new SummaryStatistics();
                //run the model
                double price = 0;
                double quantity = 0;
                for (int i = 0; i < 1000; i++) {
                    macroII.schedule.step(macroII);
                    price = strategy.getTargetPrice();
                    quantity = salesDepartment.getTodayInflow();
                    totalDistance += Math
                            .pow(Math.min(price - (102 - 2 * quantity), price - (102 - 2 * quantity - 1)), 2);
                    prices.addValue(price);
                }

                //Model over, now compute statistics

                averageSquaredDistance.addValue(Math.sqrt(totalDistance));
                averageVariance.addValue(prices.getVariance());
                if (price <= 68 && price >= 67)
                    successes++;

                //            System.out.println(salesDepartment.getLatestObservation(SalesDataType.LAST_ASKED_PRICE));
                macroII.finish();

            }

            String[] csvLine = new String[5];
            csvLine[0] = currentP.toString();
            csvLine[1] = currentI.toString();
            csvLine[2] = String.valueOf(averageSquaredDistance.getMean());
            csvLine[3] = String.valueOf(averageVariance.getMean());
            csvLine[4] = String.valueOf(successes);
            writer.writeNext(csvLine);
            writer.flush();
            System.out.println(Arrays.toString(csvLine));

            currentI = currentI.add(increment).setScale(2);
            System.out.println();

        }

        currentP = currentP.add(increment).setScale(2);

    }

}

From source file:model.scenario.OneLinkSupplyChainResult.java

public static OneLinkSupplyChainResult everybodyLearnedCompetitivePIDRun(long random,
        final float dividePIByThis, final int beefPricingSpeed, File csvFileToWrite) {
    final MacroII macroII = new MacroII(random);
    final OneLinkSupplyChainScenarioWithCheatingBuyingPrice scenario1 = new OneLinkSupplyChainScenarioWithCheatingBuyingPrice(
            macroII) {//from  ww w  .  jav  a 2  s. c  o m

        @Override
        protected void buildBeefSalesPredictor(SalesDepartment dept) {
            FixedDecreaseSalesPredictor predictor = SalesPredictor.Factory
                    .newSalesPredictor(FixedDecreaseSalesPredictor.class, dept);
            predictor.setDecrementDelta(0);
            dept.setPredictorStrategy(predictor);
        }

        @Override
        public void buildFoodPurchasesPredictor(PurchasesDepartment department) {
            department.setPredictor(new FixedIncreasePurchasesPredictor(0));

        }

        @Override
        protected SalesDepartment createSalesDepartment(Firm firm, Market goodmarket) {
            SalesDepartment department = super.createSalesDepartment(firm, goodmarket);
            if (goodmarket.getGoodType().equals(OneLinkSupplyChainScenario.OUTPUT_GOOD))
                department.setPredictorStrategy(new FixedDecreaseSalesPredictor(0));
            return department;
        }

        @Override
        protected HumanResources createPlant(Blueprint blueprint, Firm firm, Market laborMarket) {
            HumanResources hr = super.createPlant(blueprint, firm, laborMarket);
            hr.setPredictor(new FixedIncreasePurchasesPredictor(0));
            return hr;
        }
    };

    scenario1.setControlType(MarginalMaximizer.class);
    scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
    scenario1.setBeefPriceFilterer(null);

    //competition!
    scenario1.setNumberOfBeefProducers(5);
    scenario1.setNumberOfFoodProducers(5);

    scenario1.setDivideProportionalGainByThis(dividePIByThis);
    scenario1.setDivideIntegrativeGainByThis(dividePIByThis);
    //no delay
    scenario1.setBeefPricingSpeed(beefPricingSpeed);

    //add csv writer if needed
    if (csvFileToWrite != null)
        DailyStatCollector.addDailyStatCollectorToModel(csvFileToWrite, macroII);

    macroII.setScenario(scenario1);
    macroII.start();

    while (macroII.schedule.getTime() < 14000) {
        macroII.schedule.step(macroII);

    }

    //I used to assert this:
    //Assert.assertEquals(macroII.getMarket(OneLinkSupplyChainScenario.OUTPUT_GOOD).getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE),85l,6l );
    //but that's too hard because while on average the price hovers there, competition is noisy. Sometimes a lot.
    //so what I did was to attach a daily stat collector and then check the average of the last 10 prices
    SummaryStatistics averageFoodPrice = new SummaryStatistics();
    SummaryStatistics averageBeefProduced = new SummaryStatistics();
    SummaryStatistics averageBeefPrice = new SummaryStatistics();
    for (int j = 0; j < 1000; j++) {
        //make the model run one more day:
        macroII.schedule.step(macroII);
        averageFoodPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.OUTPUT_GOOD)
                .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE));
        averageBeefProduced.addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD)
                .countTodayProductionByRegisteredSellers());
        averageBeefPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD)
                .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE));
    }

    System.out.println("beef price: " + averageBeefPrice.getMean());
    System.out.println("food price: " + averageFoodPrice.getMean());
    System.out.println("produced: " + averageBeefProduced.getMean());
    extractAndPrintSlopesOfBeefSellers(macroII);
    System.out.println();
    macroII.finish();

    return new OneLinkSupplyChainResult(averageBeefPrice.getMean(), averageFoodPrice.getMean(),
            averageBeefProduced.getMean(), macroII);

}