List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics getSkewness
public double getSkewness()
From source file:org.datacleaner.beans.NumberAnalyzer.java
@Override public NumberAnalyzerResult getResult() { CrosstabDimension measureDimension = new CrosstabDimension(DIMENSION_MEASURE); measureDimension.addCategory(MEASURE_ROW_COUNT); measureDimension.addCategory(MEASURE_NULL_COUNT); measureDimension.addCategory(MEASURE_HIGHEST_VALUE); measureDimension.addCategory(MEASURE_LOWEST_VALUE); measureDimension.addCategory(MEASURE_SUM); measureDimension.addCategory(MEASURE_MEAN); measureDimension.addCategory(MEASURE_GEOMETRIC_MEAN); measureDimension.addCategory(MEASURE_STANDARD_DEVIATION); measureDimension.addCategory(MEASURE_VARIANCE); measureDimension.addCategory(MEASURE_SECOND_MOMENT); measureDimension.addCategory(MEASURE_SUM_OF_SQUARES); if (descriptiveStatistics) { measureDimension.addCategory(MEASURE_MEDIAN); measureDimension.addCategory(MEASURE_PERCENTILE25); measureDimension.addCategory(MEASURE_PERCENTILE75); measureDimension.addCategory(MEASURE_SKEWNESS); measureDimension.addCategory(MEASURE_KURTOSIS); }//from w ww. j ava 2s . c om CrosstabDimension columnDimension = new CrosstabDimension(DIMENSION_COLUMN); for (InputColumn<? extends Number> column : _columns) { columnDimension.addCategory(column.getName()); } Crosstab<Number> crosstab = new Crosstab<Number>(Number.class, columnDimension, measureDimension); for (InputColumn<? extends Number> column : _columns) { CrosstabNavigator<Number> nav = crosstab.navigate().where(columnDimension, column.getName()); NumberAnalyzerColumnDelegate delegate = _columnDelegates.get(column); StatisticalSummary s = delegate.getStatistics(); int nullCount = delegate.getNullCount(); nav.where(measureDimension, MEASURE_NULL_COUNT).put(nullCount); if (nullCount > 0) { addAttachment(nav, delegate.getNullAnnotation(), column); } int numRows = delegate.getNumRows(); nav.where(measureDimension, MEASURE_ROW_COUNT).put(numRows); long nonNullCount = s.getN(); if (nonNullCount > 0) { final double highestValue = s.getMax(); final double lowestValue = s.getMin(); final double sum = s.getSum(); final double mean = s.getMean(); final double standardDeviation = s.getStandardDeviation(); final double variance = s.getVariance(); final double geometricMean; final double secondMoment; final double sumOfSquares; if (descriptiveStatistics) { final DescriptiveStatistics descriptiveStats = (DescriptiveStatistics) s; geometricMean = descriptiveStats.getGeometricMean(); sumOfSquares = descriptiveStats.getSumsq(); secondMoment = new SecondMoment().evaluate(descriptiveStats.getValues()); } else { final SummaryStatistics summaryStats = (SummaryStatistics) s; geometricMean = summaryStats.getGeometricMean(); secondMoment = summaryStats.getSecondMoment(); sumOfSquares = summaryStats.getSumsq(); } nav.where(measureDimension, MEASURE_HIGHEST_VALUE).put(highestValue); addAttachment(nav, delegate.getMaxAnnotation(), column); nav.where(measureDimension, MEASURE_LOWEST_VALUE).put(lowestValue); addAttachment(nav, delegate.getMinAnnotation(), column); nav.where(measureDimension, MEASURE_SUM).put(sum); nav.where(measureDimension, MEASURE_MEAN).put(mean); nav.where(measureDimension, MEASURE_GEOMETRIC_MEAN).put(geometricMean); nav.where(measureDimension, MEASURE_STANDARD_DEVIATION).put(standardDeviation); nav.where(measureDimension, MEASURE_VARIANCE).put(variance); nav.where(measureDimension, MEASURE_SUM_OF_SQUARES).put(sumOfSquares); nav.where(measureDimension, MEASURE_SECOND_MOMENT).put(secondMoment); if (descriptiveStatistics) { final DescriptiveStatistics descriptiveStatistics = (DescriptiveStatistics) s; final double kurtosis = descriptiveStatistics.getKurtosis(); final double skewness = descriptiveStatistics.getSkewness(); final double median = descriptiveStatistics.getPercentile(50.0); final double percentile25 = descriptiveStatistics.getPercentile(25.0); final double percentile75 = descriptiveStatistics.getPercentile(75.0); nav.where(measureDimension, MEASURE_MEDIAN).put(median); nav.where(measureDimension, MEASURE_PERCENTILE25).put(percentile25); nav.where(measureDimension, MEASURE_PERCENTILE75).put(percentile75); nav.where(measureDimension, MEASURE_SKEWNESS).put(skewness); nav.where(measureDimension, MEASURE_KURTOSIS).put(kurtosis); } } } return new NumberAnalyzerResult(_columns, crosstab); }
From source file:org.fhaes.analysis.FHInterval.java
/** * Actually perform the analysis.//from ww w. j a v a 2 s . c om */ @SuppressWarnings("deprecation") private void doAnalysis() { log.debug("INPUT PARAMETERS"); log.debug("inputFileArray = " + inputFileArray); log.debug("analyissType = " + analysisType); log.debug("startYear = " + startYear); log.debug("endYear = " + endYear); log.debug("fireFilterType = " + fireFilterType); log.debug("filterValue = " + filterValue); log.debug("includeIncomplete = " + includeIncomplete); log.debug("alphaLevel = " + alphaLevel); boolean highway = true; ArrayList<FHX2FileReader> myReader = new ArrayList<FHX2FileReader>(); Integer minFirstYear = new Integer(9999); Integer maxLastYear = new Integer(0); String savePath = new String(); savePath = inputFileArray[0].getAbsolutePath(); for (int i = 0; i < inputFileArray.length; i++) { myReader.add(new FHX2FileReader(inputFileArray[i])); /* * set the beginning year accounting for the filter */ if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_EVENT)) { // myReader.get(i).makeClimate2d(); if (startYear == 0 && highway) { if (myReader.get(i).getFirstFireYear() < minFirstYear) { minFirstYear = myReader.get(i).getFirstFireYear(); } } else if (startYear != 0 && highway) { if (myReader.get(i).getFirstYear() < minFirstYear) { minFirstYear = myReader.get(i).getFirstYear(); } } if (startYear != 0) { minFirstYear = startYear; // minFirstYear = minFirstYear+1; } } else if (eventTypeToProcess.equals(EventTypeToProcess.INJURY_EVENT)) { // myReader.get(i).makeClimate2dII(); if (startYear == 0 && highway) { if (myReader.get(i).getFirstInjuryYear() < minFirstYear) { minFirstYear = myReader.get(i).getFirstInjuryYear(); } } else if (startYear != 0 && highway) { if (myReader.get(i).getFirstYear() < minFirstYear) { minFirstYear = myReader.get(i).getFirstYear(); } } if (startYear != 0) { minFirstYear = startYear; // minFirstYear = minFirstYear+1; } } else if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_AND_INJURY_EVENT)) { // myReader.get(i).makeClimate2dII(); if (startYear == 0 && highway) { if (myReader.get(i).getFirstIndicatorYear() < minFirstYear) { minFirstYear = myReader.get(i).getFirstIndicatorYear(); } } else if (startYear != 0 && highway) { if (myReader.get(i).getFirstYear() < minFirstYear) { minFirstYear = myReader.get(i).getFirstYear(); } } if (startYear != 0) { minFirstYear = startYear; // minFirstYear = minFirstYear+1; } } else { log.error("Unsupported event type caught"); } /* * Set the last year accounting for the filter */ if (myReader.get(i).getLastYear() > maxLastYear) { maxLastYear = myReader.get(i).getLastYear(); } if (endYear != 0) { maxLastYear = endYear; } } // end of i loop log.debug("the input filelength is " + inputFileArray.length); log.debug("The FIRST FIRE YEAR is " + minFirstYear); log.debug("The LAST YEAR is " + maxLastYear); log.debug("Minimum and Maximum years are " + minFirstYear + " " + maxLastYear); /* * set the format for the output of the numbers to 2 decimal formats */ DecimalFormat twoPlace = new DecimalFormat("0.00"); DecimalFormat threePlace = new DecimalFormat("0.000"); /* * Calculate the listYears the common years where the files will be analyzed */ ArrayList<Integer> listYears = new ArrayList<Integer>(); for (int i = 0; i < maxLastYear - minFirstYear + 1; i++) { listYears.add(minFirstYear + i); } /* * create arraylist need for the Interval Analysis */ ArrayList<ArrayList<Integer>> climateMatrixSite = new ArrayList<ArrayList<Integer>>(); ArrayList<ArrayList<Double>> filterMatrix = new ArrayList<ArrayList<Double>>(); ArrayList<Integer> climateVector = new ArrayList<Integer>(); ArrayList<ArrayList<Double>> climateVectorFilter2 = new ArrayList<ArrayList<Double>>(); // ArrayList<Double> fireintervalspersite = new ArrayList<Double>(); ArrayList<Integer> climateVectorActualSite = null; ArrayList<Double> filterVectorActual = null; ArrayList<Integer> climateYear = new ArrayList<Integer>(); ArrayList<Integer> minSampleFilter = null; ArrayList<Double> percentOfRecordingfilter = null; Double[] Dfireintervalspersite; double[] dfireintervalspersite; String[] statsparam = new String[22]; if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_EVENT)) { statsparam[0] = "Total intervals"; statsparam[1] = "Mean fire interval"; statsparam[2] = "Median fire interval"; statsparam[3] = "Standard deviation"; statsparam[4] = "Fire frequency"; statsparam[5] = "Coefficient of variation"; statsparam[6] = "Skewness"; statsparam[7] = "Kurtosis"; statsparam[8] = "Minimum fire interval"; statsparam[9] = "Maximum fire interval"; statsparam[10] = "Weibull scale parameter"; statsparam[11] = "Weibull shape parameter"; statsparam[12] = "Weibull mean"; statsparam[13] = "Weibull median"; statsparam[14] = "Weibull mode"; statsparam[15] = "Weibull standard deviation"; statsparam[16] = "Weibull fire frequency"; statsparam[17] = "Weibull skewness"; statsparam[18] = "Lower exceedance interval"; statsparam[19] = "Upper exceedance interval"; statsparam[20] = "Significantly short interval upper bound"; statsparam[21] = "Significantly long interval lower bound"; } else if (eventTypeToProcess.equals(EventTypeToProcess.INJURY_EVENT)) { statsparam[0] = "Total intervals"; statsparam[1] = "Mean indicator interval"; statsparam[2] = "Median indicator interval"; statsparam[3] = "Standard deviation"; statsparam[4] = "Indicator frequency"; statsparam[5] = "Coefficient of variation"; statsparam[6] = "Skewness"; statsparam[7] = "Kurtosis"; statsparam[8] = "Minimum fire interval"; statsparam[9] = "Maximum indicator interval"; statsparam[10] = "Weibull scale parameter"; statsparam[11] = "Weibull shape parameter"; statsparam[12] = "Weibull mean"; statsparam[13] = "Weibull median"; statsparam[14] = "Weibull mode"; statsparam[15] = "Weibull standard deviation"; statsparam[16] = "Weibull indicator frequency"; statsparam[17] = "Weibull skewness"; statsparam[18] = "Lower exceedance interval"; statsparam[19] = "Upper exceedance interval"; statsparam[20] = "Significantly short interval upper bound"; statsparam[21] = "Significantly long interval lower bound"; } else if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_AND_INJURY_EVENT)) { statsparam[0] = "Total intervals"; statsparam[1] = "Mean fire and other indicator interval"; statsparam[2] = "Median fire and other indicator interval"; statsparam[3] = "Standard deviation"; statsparam[4] = "Fire and other indicator frequency"; statsparam[5] = "Coefficient of variation"; statsparam[6] = "Skewness"; statsparam[7] = "Kurtosis"; statsparam[8] = "Minimum fire and other indicator interval"; statsparam[9] = "Maximum fire interval"; statsparam[10] = "Weibull scale parameter"; statsparam[11] = "Weibull shape parameter"; statsparam[12] = "Weibull mean"; statsparam[13] = "Weibull median"; statsparam[14] = "Weibull mode"; statsparam[15] = "Weibull standard deviation"; statsparam[16] = "Weibull indicator frequency"; statsparam[17] = "Weibull skewness"; statsparam[18] = "Lower exceedance interval"; statsparam[19] = "Upper exceedance interval"; statsparam[20] = "Significantly short interval upper bound"; statsparam[21] = "Significantly long interval lower bound"; } else { log.error("Unsupported event type caught"); } double[] fixvalt = { 0.999, 0.99, 0.975, 0.95, 0.9, 0.875, 0.8, 0.75, 0.7, 0.667, 0.5, 0.333, 0.3, 0.25, 0.2, 0.125, 0.1, 0.05, 0.025, 0.01, 0.001 }; double[][] ExceeProbcomp = new double[fixvalt.length][myReader.size()]; double[][] ExceeProbsample = new double[fixvalt.length][myReader.size()]; // log.debug("the size of statsparam is " + // statsparam.length); double[][] summaryComp = new double[statsparam.length][myReader.size()]; double[] numberOfintervalscomp = new double[myReader.size()]; // ArrayList<ArrayList<Integer>>(); // ArrayList<ArrayList<Integer>> FIyearperSample = new // ArrayList<ArrayList<Integer>>(); ArrayList<Integer> FyearperSampletemp; ArrayList<Integer> FIyearperSampletemp; // ArrayList<Double> fireintervalspersample = new double[] numberOfintervalssamp = new double[myReader.size()]; double[][] summarySample = new double[statsparam.length][myReader.size()]; Double[] Dfireintervalspersample; double[] dfireintervalspersample; /* * Set up either of the two filters two create the binary matrix on the case of composite analysis there are two possible filters: * Number of fires and percentage of scarred trees. */ Integer firesFilter1 = new Integer(0); Double firesFilter2 = new Double(0); if (fireFilterType.equals(FireFilterType.NUMBER_OF_EVENTS)) { if (filterValue != 1) firesFilter1 = filterValue.intValue(); // log.debug("number of fires is selected is: "+ // firesFilter1); } else if (fireFilterType.equals(FireFilterType.PERCENTAGE_OF_ALL_TREES)) { if (filterValue != 1) firesFilter2 = filterValue / 100.0; // log.debug("percentage of fires is selected is: "+ // firesFilter2); } else if (fireFilterType.equals(FireFilterType.PERCENTAGE_OF_RECORDING)) { if (filterValue != 1) firesFilter2 = filterValue / 100.0; // TODO ELENA TO CHECK } else { log.error("Unknown FireFilterType"); return; } boolean[] enoughIntComp = new boolean[myReader.size()]; boolean[] enoughIntSamp = new boolean[myReader.size()]; // NEW FOR ELENA log.debug("Sample depth filter type = " + sampleDepthFilterType); log.debug("Sample depth value = " + sampleDepthFilterValue); // if (sampleDepthFilterType.equals(SampleDepthFilterType.MIN_NUM_SAMPLES)) // { // // TODO ELENA // } // else if (sampleDepthFilterType.equals(SampleDepthFilterType.MIN_NUM_RECORDER_SAMPLES)) // { // // TODO ELENA // } /* * start processing each file individually: The analysis can be done by either tree (by sample/non-binary) or by site * (composite/binary). by tree the box selected is: jCheckTree. by site the box selected is: */ for (int i = 0; i < myReader.size(); i++) { log.debug(" Starting to Process file : " + myReader.get(i).getName()); /* * get the vector Year containing the vector of year of a given fhx file load it into the array list climateYear. */ climateYear = myReader.get(i).getYearArray(); // new stuff // Create filter based on min number of samples/recorder samples int[] depths = null; if (sampleDepthFilterType.equals(SampleDepthFilterType.MIN_NUM_SAMPLES)) { depths = myReader.get(i).getSampleDepths(); log.debug("MIN_NUM_SAMPLES "); } else if (sampleDepthFilterType.equals(SampleDepthFilterType.MIN_NUM_RECORDER_SAMPLES)) { depths = myReader.get(i).getRecordingDepths(eventTypeToProcess); log.debug(" MIN_NUM_RECORDER_SAMPLES"); } else { log.error("Unknown sample depth filter type."); return; } minSampleFilter = new ArrayList<Integer>(); for (int ij = 0; ij < listYears.size(); ij++) { if (climateYear.indexOf(listYears.get(ij)) == -1) { minSampleFilter.add(-1); } else { // log.debug("the sample depth is " // + myReader.get(i).getSampleDepths()[climateYear.indexOf(listYearsComp.get(ij))]); minSampleFilter.add(new Integer(depths[climateYear.indexOf(listYears.get(ij))])); } // log.debug(" " + minSampleFilter.get(ij)); } // end new stuff /* * get filter matrix for each file. * * filters2d matrix composed of the 3 filters number of fires (total capital letter per row) total number of tree (total lower * case letter plus bars counting only after a fire) percent of scared trees total fires/total trees */ // climateVectorFilter2 = myReader.get(i).getFilterArrays(eventTypeToProcess); /* * More new stuff */ if (filterValue != 1) { /* * get both matrices: * * 2. filters2d matrix composed of the 3 filters number of fires (total capital letter per row) total number of tree (total * lower case letter plus bars counting only after a fire) percent of scared trees total fires/total trees */ climateVectorFilter2 = myReader.get(i).getFilterArrays(eventTypeToProcess); /* * if by tree analysis is selected create two matrices (array list) 1. filterMatrix containing the three filter vectors only * in between common years (so using the listYearComp array list subset of the years vector) 2. climateMatrix 2 dimensional * array list containing binary matrices restricted to the listYear list. */ if (fireFilterType.equals(FireFilterType.PERCENTAGE_OF_RECORDING)) { percentOfRecordingfilter = new ArrayList<Double>(); for (int ij = 0; ij < listYears.size(); ij++) { if (climateYear.indexOf(listYears.get(ij)) == -1) { percentOfRecordingfilter.add(-1.0); } else { if (myReader.get(i).getRecordingDepths(eventTypeToProcess)[climateYear .indexOf(listYears.get(ij))] != 0) { percentOfRecordingfilter.add(new Double( climateVectorFilter2.get(0).get(climateYear.indexOf(listYears.get(ij))) / myReader.get(i).getRecordingDepths(eventTypeToProcess)[climateYear .indexOf(listYears.get(ij))])); } else { percentOfRecordingfilter.add(-99.0); } } log.debug("PERCENTAGE_OF_RECORDING is: " + percentOfRecordingfilter.get(ij)); } } else { for (int ik = 0; ik < 3; ik++) { log.debug("filter number is: " + ik); filterVectorActual = new ArrayList<Double>(); for (int ij = 0; ij < listYears.size(); ij++) { // log.debug(" climateYear.indexOf(listYearsComp.get(j))" + // climateYear.indexOf(listYearsComp.get(ij))); // if(ik==0){log.debug("number of fires // "+climateVectorFilter2.get(0).get(climateYear.indexOf(listYears.get(ij)))+" year // "+listYearsComp.get(ij));} if (climateYear.indexOf(listYears.get(ij)) == -1) { filterVectorActual.add(-1.0); } else { filterVectorActual.add(new Double( climateVectorFilter2.get(ik).get(climateYear.indexOf(listYears.get(ij))))); } if (ik == 2) { log.debug("filteperc " + filterVectorActual.get(ij)); } } // log.debug("size of filterVectorActual is : "+filterVectorActual.size()); filterMatrix.add(filterVectorActual); // if(ik==0){log.debug("filters is: "+filter); } } // end of if-else percentageofrecording // log.debug("size of the FilterMatrix is" + filterMatrix.size()); } // end of if filters not equal to 1 /* * end of more new stuff */ /* * * 1. Create the filterMatrix containing the tree filter vectors only in between common years (so using the listYearComp array * list subset of the years vector) */ // for (int ik = 0; ik < 3; ik++) // { // filterVectorActual = new ArrayList<Double>(); // for (int ij = 0; ij < listYears.size(); ij++) // { // if (climateYear.indexOf(listYears.get(ij)) == -1) // { // filterVectorActual.add(-1.0); // } // else // { // filterVectorActual.add(new Double(climateVectorFilter2.get(ik).get(climateYear.indexOf(listYears.get(ij))))); // } // } /* * ArrayList filterMatrix containes the filter matrix for each of the files */ // filterMatrix.add(filterVectorActual); // }//end of creating the filter matrix. /* * get matrix climate binary matrix by site (binary analysis) if Composite is selected. */ // if ((doComposite)&&(!jTextOfFires.getText().equals("1"))){ if (analysisType.equals(AnalysisType.COMPOSITE)) { log.debug("inside the comp"); // System.out.println("inside the comp " + " working on file "+ myReader.get(i).getName() ); if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_EVENT)) { climateVector = myReader.get(i).getFireEventsArray(); } else if (eventTypeToProcess.equals(EventTypeToProcess.INJURY_EVENT)) { climateVector = myReader.get(i).getOtherInjuriesArray(); } else if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_AND_INJURY_EVENT)) { climateVector = myReader.get(i).getFiresAndInjuriesArray(); } else { log.error("Unsupported event type caught"); } climateVectorActualSite = new ArrayList<Integer>(); for (int j = 0; j < listYears.size(); j++) { if (climateYear.indexOf(listYears.get(j)) == -1) { climateVectorActualSite.add(-1); } else { if (minSampleFilter.get(j).intValue() >= sampleDepthFilterValue.intValue()) { if (filterValue != 1) { if (fireFilterType.equals(FireFilterType.NUMBER_OF_EVENTS)) { // log.debug("fire filter: "+firesFilter1+" year is: "+listYears.get(j) // +" fires: "+filterMatrix.get(3*i).get(j)+" climatevector: // "+climateVector.get(climateYear.indexOf(listYears.get(j)))); if ((filterMatrix.get(3 * i).get(j) < firesFilter1) && (climateVector.get(climateYear.indexOf(listYears.get(j)))) != -1.0) { climateVectorActualSite.add(0); } else { climateVectorActualSite .add(climateVector.get(climateYear.indexOf(listYears.get(j)))); } } else if (fireFilterType.equals(FireFilterType.PERCENTAGE_OF_ALL_TREES)) { // log.debug("percent of fires is selected is: "+ // firesFilter2+" "+climateVector.get(climateYear.indexOf(listYearsComp.get(j)))); // log.debug("the filter percent of fires is"+filterMatrix.get((3*i+2)).get(j)); if ((filterMatrix.get(3 * i + 2).get(j) == -99)) { climateVectorActualSite.add(-1); } else { if ((filterMatrix.get(3 * i + 2).get(j) < firesFilter2) && ((climateVector .get(climateYear.indexOf(listYears.get(j)))) != -1.0)) { climateVectorActualSite.add(0); } else { climateVectorActualSite .add(climateVector.get(climateYear.indexOf(listYears.get(j)))); } } } else if (fireFilterType.equals(FireFilterType.PERCENTAGE_OF_RECORDING)) { // TODO // ELENA TO IMPLEMENT if (percentOfRecordingfilter.get(j) == -99) { climateVectorActualSite.add(-1); } else { if ((percentOfRecordingfilter.get(j) < firesFilter2) && ((climateVector .get(climateYear.indexOf(listYears.get(j)))) != -1.0)) { climateVectorActualSite.add(0); } else { climateVectorActualSite .add(climateVector.get(climateYear.indexOf(listYears.get(j)))); } } } else { log.error("Unknown FireFilterType"); return; } } // end of if filter not equal to 1 else { climateVectorActualSite .add(climateVector.get(climateYear.indexOf(listYears.get(j)))); } // end of else of if filter not equal to 1 } // end of if the filter minsampedepth else { // log.debug("j is " + j + "minSampleFilter is " + minSampleFilter.get(j)); climateVectorActualSite.add(-1); } } // end else 645 to 721 } // end of j loop listyears (420-459) /* * climateMatrixSite has the composite information taking in consideration both the filters and common years */ climateMatrixSite.add(climateVectorActualSite); /* * Must get the years with Fires from the climateMatrixSite which has been filter already */ ArrayList<Double> fireintervalspersite = new ArrayList<Double>(); ArrayList<Integer> yearsWithFires = new ArrayList<Integer>(); for (int ij = 0; ij < listYears.size(); ij++) { if (climateMatrixSite.get(i).get(ij) == 1) { yearsWithFires.add(listYears.get(ij)); log.debug("year with fires " + listYears.get(ij)); } } /* * check that the number of years with fires is bigger of equal than 3 if so make the fire intervals else the test can not * be run and report NA */ // new swich if (yearsWithFires.size() != 0) { if (includeIncomplete) { if (maxLastYear.compareTo(yearsWithFires.get(yearsWithFires.size() - 1)) != 0) { log.debug("here"); numberOfintervalscomp[i] = yearsWithFires.size(); } else { numberOfintervalscomp[i] = yearsWithFires.size() - 1; } } else { numberOfintervalscomp[i] = yearsWithFires.size() - 1; } } log.debug("number of invervlas in comp is: " + numberOfintervalscomp[i]); // end of new switch if (numberOfintervalscomp[i] >= 3) { enoughIntComp[i] = true; ArrayList<Integer> fireIntervals = generateFireIntervals(yearsWithFires); for (int ij = 0; ij < fireIntervals.size(); ij++) { // log.debug("fire intervals are: "+ // test1.getFireIntervals().get(ij)); fireintervalspersite.add(fireIntervals.get(ij) * 1.0); } /* * Add extra interval if include incomplete is selected. the interval goes from the last fire scar year to the last year * of the fire history. */ if (includeIncomplete) { double includeinterval = maxLastYear - yearsWithFires.get(yearsWithFires.size() - 1); if (includeinterval > 0) { fireintervalspersite.add(includeinterval); System.out.println("the last year is " + maxLastYear + "the last year with fire is " + yearsWithFires.get(yearsWithFires.size() - 1)); log.debug("the included interval is " + includeinterval); } } log.debug("FireintervalsPerSite =" + fireintervalspersite); /* * Get the normal statistics for the fire intervals add the values to the stats and then call them for the stats */ DescriptiveStatistics stats = new DescriptiveStatistics(); Dfireintervalspersite = new Double[fireintervalspersite.size()]; Dfireintervalspersite = fireintervalspersite.toArray(Dfireintervalspersite); dfireintervalspersite = new double[fireintervalspersite.size()]; // summaryComp = new // double[statsparam.length][myReader.size()]; for (int ik = 0; ik < fireintervalspersite.size(); ik++) { stats.addValue(Dfireintervalspersite[ik].doubleValue()); dfireintervalspersite[ik] = Dfireintervalspersite[ik].doubleValue(); } /* * load the Summary Analysis for the Composite fire intervals */ summaryComp[0][i] = fireintervalspersite.size(); // double mean = stats.getMean(); summaryComp[1][i] = stats.getMean(); // double median = // StatUtils.percentile(dfireintervalspersite, 50); summaryComp[2][i] = StatUtils.percentile(dfireintervalspersite, 50); // double std = stats.getStandardDeviation(); summaryComp[3][i] = stats.getStandardDeviation(); // double skew = stats.getSkewness(); summaryComp[4][i] = 1.0 / summaryComp[1][i]; summaryComp[5][i] = summaryComp[3][i] / summaryComp[1][i]; summaryComp[6][i] = stats.getSkewness(); // double kurt = stats.getKurtosis(); if (numberOfintervalscomp[i] == 3) { summaryComp[7][i] = -99; } else { summaryComp[7][i] = stats.getKurtosis(); } // log.debug("nomean \t\t nostd \t\t nokurt \t noskew \t\t nomedian"); // log.debug(twoPlace.format(mean)+"\t\t"+twoPlace.format(std)+"\t\t"+twoPlace.format(kurt)+"\t\t"+twoPlace.format(skew)+"\t\t"+twoPlace.format(median)); Weibull weibull = new Weibull(fireintervalspersite); ArrayList<Double> weibullProb = weibull.getWeibullProbability(fireintervalspersite); ArrayList<Double> siglonglowbound = new ArrayList<Double>(); ArrayList<Double> sigshortupbound = new ArrayList<Double>(); log.debug("the weibull probability of first element is " + weibullProb.get(0)); log.debug("the index the size of the interval is " + weibullProb.indexOf(weibullProb.get(0))); for (int ij = 0; ij < weibullProb.size() - 1; ij++) { if (weibullProb.get(ij) <= alphaLevel) { siglonglowbound.add(fireintervalspersite.get(ij)); } if (weibullProb.get(ij) >= (1 - alphaLevel)) { sigshortupbound.add(fireintervalspersite.get(ij)); } } summaryComp[10][i] = weibull.getScale(); summaryComp[11][i] = weibull.getShape(); summaryComp[12][i] = weibull.getMean(); summaryComp[13][i] = weibull.getMedian(); summaryComp[14][i] = weibull.getMode(); summaryComp[15][i] = weibull.getSigma(); summaryComp[16][i] = 1.0 / summaryComp[13][i]; summaryComp[17][i] = weibull.getSkew(); summaryComp[18][i] = weibull.getExceedenceProbability2()[0]; summaryComp[19][i] = weibull.getExceedenceProbability2()[1]; Collections.sort(sigshortupbound); log.debug("siglonglowbound is " + siglonglowbound); try { summaryComp[20][i] = sigshortupbound.get(sigshortupbound.size() - 1); } catch (Exception e) { summaryComp[20][i] = Double.NaN; } Collections.sort(siglonglowbound); try { summaryComp[21][i] = siglonglowbound.get(0); } catch (Exception e) { summaryComp[21][i] = Double.NaN; } log.debug("sigshortupbound is " + sigshortupbound); Collections.sort(fireintervalspersite); summaryComp[8][i] = fireintervalspersite.get(0); summaryComp[9][i] = fireintervalspersite.get(fireintervalspersite.size() - 1); // log.debug("shape \t\t scale \t\t median "); // log.debug(twoPlace.format(test1.Weibull_Parameters(fireintervalspersite)[0])+"\t\t"+twoPlace.format(test1.Weibull_Parameters(fireintervalspersite)[1])+"\t\t"+twoPlace.format(test1.weibull_median(test1.Weibull_Parameters(fireintervalspersite)))); // log.debug("mean \t\t sigma \t\t mode \t\t skewness"); // log.debug(twoPlace.format(test1.weibull_mean(test1.Weibull_Parameters(fireintervalspersite)))+"\t\t"+twoPlace.format(test1.weibull_sigma(test1.Weibull_Parameters(fireintervalspersite)))+"\t\t"+twoPlace.format(test1.weibull_mode(test1.Weibull_Parameters(fireintervalspersite)))+"\t\t"+twoPlace.format(test1.weibull_skew(test1.Weibull_Parameters(fireintervalspersite)))); // log.debug("maxhazard \t\t lei \t\t uei "); // log.debug(twoPlace.format(test1.maxhazard_int(test1.Weibull_Parameters(fireintervalspersite)))+"\t\t"+twoPlace.format(test1.weibull_lowuppexcint(test1.Weibull_Parameters(fireintervalspersite))[0])+"\t\t"+twoPlace.format(test1.weibull_lowuppexcint(test1.Weibull_Parameters(fireintervalspersite))[1])); // log.debug("the size of YearWith Fires is "+YearsWithFires.size()); System.out.println("the size of the prb exdc is " + weibull.getExceedenceProbability().length); for (int kk = 0; kk < weibull.getExceedenceProbability().length; kk++) { ExceeProbcomp[kk][i] = weibull.getExceedenceProbability()[kk]; // log.debug("file "+i+"Exce probability "+ // ExceeProbcomp[kk][i]); } } // end of if enoughIntComp else { enoughIntComp[i] = false; } } // end the if composite is selected /* * starting the process for the sample mode. */ if (analysisType.equals(AnalysisType.SAMPLE)) { log.debug("I am in sample "); ArrayList<Double> fireintervalspersample = new ArrayList<Double>(); FIyearperSampletemp = new ArrayList<Integer>(); // FyearperSampletemp = new ArrayList<Integer>(); for (int k = 0; k < myReader.get(i).getNumberOfSeries(); k++) { log.debug("Parsing file index " + i + ", series number " + (k + 1)); FyearperSampletemp = new ArrayList<Integer>(); // log.debug("the size of the years of the file is:"+ // myReader.get(i).getYear().size()); // log.debug("years with fires in sample "+k + // "years are "); // for (int j = 0; j < myReader.get(i).getYearArray().size(); j++) for (int j = 0; j < listYears.size(); j++) { // log.debug("the size climate2d is "+myReader.get(i).getClimate2d().get(k).get(j)); if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_EVENT)) { if (climateYear.indexOf(listYears.get(j)) != -1) { if (myReader.get(i).getClimate2d().get(k) .get(climateYear.indexOf(listYears.get(j))) == 1) { // FyearperSampletemp.add((j + myReader.get(i).getFirstYear())); FyearperSampletemp.add(listYears.get(j)); } } } // { // if ((myReader.get(i).getClimate2d().get(k).get(j) == 1)) // { // / log.debug("I here inside ==1 "+ // / j+" "+myReader.get(i).getFirstYear()); // / int temp=j+myReader.get(i).getFirstYear(); // / log.debug((j+myReader.get(i).getFirstYear())); // /// FyearperSampletemp.add((j + myReader.get(i).getFirstYear())); // } // } else if (eventTypeToProcess.equals(EventTypeToProcess.INJURY_EVENT)) { if (climateYear.indexOf(listYears.get(j)) != -1) { if (myReader.get(i).getClimate2dII().get(k) .get(climateYear.indexOf(listYears.get(j))) == 1) { FyearperSampletemp.add(listYears.get(j)); } } // if ((myReader.get(i).getClimate2dII().get(k).get(j) == 1)) // { // FyearperSampletemp.add((j + myReader.get(i).getFirstYear())); // } } else if (eventTypeToProcess.equals(EventTypeToProcess.FIRE_AND_INJURY_EVENT)) { if (climateYear.indexOf(listYears.get(j)) != -1) { if (myReader.get(i).getClimate2dIII().get(k) .get(climateYear.indexOf(listYears.get(j))) == 1) { FyearperSampletemp.add(listYears.get(j)); } } // if ((myReader.get(i).getClimate2dIII().get(k).get(j) == 1)) // { // FyearperSampletemp.add((j + myReader.get(i).getFirstYear())); // } } else { log.error("Unsupported event type caught"); } } // / end of the loop for listYears in common (finish loading the fire year per sample log.debug( "series number " + (k + 1) + " FyearperSampletemp.size() " + FyearperSampletemp.size()); if (FyearperSampletemp.size() != 0) { if (includeIncomplete) { if (maxLastYear.compareTo(FyearperSampletemp.get(FyearperSampletemp.size() - 1)) != 0) { numberOfintervalssamp[i] = numberOfintervalssamp[i] + FyearperSampletemp.size(); } } else { numberOfintervalssamp[i] = numberOfintervalssamp[i] + (FyearperSampletemp.size() - 1); } } log.debug("series number: " + (k + 1) + " number of intervals " + numberOfintervalssamp[i]); // new if ((FyearperSampletemp.size() == 1) && (includeIncomplete)) { log.debug("last index per sample is " + myReader.get(i).getLastYearIndexPerSample()[k]); log.debug("first year per sample is " + myReader.get(i).getFirstYear()); log.debug("maxLastyear is " + maxLastYear); // if (maxLastYear != FyearperSampletemp.get(FyearperSampletemp.size() - 1)) if (maxLastYear.compareTo(FyearperSampletemp.get(FyearperSampletemp.size() - 1)) != 0) { log.debug("I am in not equal "); log.debug( "last year in the sample is " + (myReader.get(i).getLastYearIndexPerSample()[k] + myReader.get(i).getFirstYear())); log.debug("maxLastyear is " + maxLastYear); log.debug("the last fire year in the sample " + FyearperSampletemp.get(FyearperSampletemp.size() - 1)); if (maxLastYear <= (myReader.get(i).getLastYearIndexPerSample()[k] + myReader.get(i).getFirstYear())) { Integer temp = ((maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1)); // int temp1 = maxLastYear.intValue() - FyearperSampletemp.get(FyearperSampletemp.size() - 1).intValue(); log.debug("in less than or equal to "); // temp = (maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1); log.debug("the resta temp is " + temp); // FIyearperSampletemp.add(((maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1))); if ((maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1) > 0) { FIyearperSampletemp.add( (maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1)); log.debug("the fire intervals for sample " + k + " is " + FIyearperSampletemp.get(0)); } // FIyearperSampletemp.add(temp); // log.debug("the fire intervals for sample " + k + " is " + FIyearperSampletemp.get(0)); } else { log.debug("in else "); FIyearperSampletemp.add((myReader.get(i).getLastYearIndexPerSample()[k] + myReader.get(i).getFirstYear()) - FyearperSampletemp.get(FyearperSampletemp.size() - 1)); log.debug("fire intervals for sample " + k + " is " + FIyearperSampletemp.get(0)); } // FIyearperSampletemp.add((myReader.get(i).getFirstYear() + myReader.get(i).getLastYearIndexPerSample()[k]) // - FyearperSampletemp.get(FyearperSampletemp.size() - 1)); } // log.debug("fire intervals for sample " + k + " is " + FIyearperSampletemp.get(0)); } // end of if one fire year and includelastyear so we have at least one interval in a given series. // endofnew if ((FyearperSampletemp.size() >= 2)) { log.debug("Series number is " + (k + 1)); for (int jk = 0; jk < FyearperSampletemp.size() - 1; jk++) { // FIyearperSampletemp.add(FyearperSample.get(k).get(jk+1) // - FyearperSample.get(k).get(jk)); log.debug("FyearperSampletemp is " + FyearperSampletemp.get(jk)); if ((FyearperSampletemp.get(jk + 1) - FyearperSampletemp.get(jk)) > 0) { FIyearperSampletemp .add(FyearperSampletemp.get(jk + 1) - FyearperSampletemp.get(jk)); } // FIyearperSampletemp.add(FyearperSampletemp.get(jk+1) // - FyearperSampletemp.get(jk)); log.debug("fire intervals for sample " + k + " is " + FIyearperSampletemp.get(jk)); // fisumtemp= fisumtemp + // FIyearperSampletemp.get(jk).intValue(); } if (includeIncomplete) { // if (maxLastYear != FyearperSampletemp.get(FyearperSampletemp.size() - 1)) if (maxLastYear.compareTo(FyearperSampletemp.get(FyearperSampletemp.size() - 1)) != 0) // if ((myReader.get(i).getLastYearIndexPerSample()[k] + myReader.get(i).getFirstYear()) != FyearperSampletemp // .get(FyearperSampletemp.size() - 1)) { if (maxLastYear <= (myReader.get(i).getLastYearIndexPerSample()[k] + myReader.get(i).getFirstYear())) { if (((maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1)) > 0) { FIyearperSampletemp.add(((maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1))); } } else { FIyearperSampletemp.add((myReader.get(i).getLastYearIndexPerSample()[k] + myReader.get(i).getFirstYear()) - FyearperSampletemp.get(FyearperSampletemp.size() - 1)); } // log.debug("the sample number is "+k+ // " the size of the fyearpersampletemp is "+ // FyearperSampletemp.size() ); // log.debug("the last year per sample is " // + (myReader.get(i).getLastYearIndexPerSample()[k] + myReader.get(i).getFirstYear())); // log.debug(" the last fire year per sample " + FyearperSampletemp.get(FyearperSampletemp.size() - 1)); // FIyearperSampletemp.add((maxLastYear) - FyearperSampletemp.get(FyearperSampletemp.size() - 1)); // log.debug("the last intrval in included is on is " // + FIyearperSampletemp.get(FIyearperSampletemp.size() - 1)); } } } // end of if at least 2 fier years so we have at least one interval in a given series. log.debug("size of FIyearperSampletemp " + FIyearperSampletemp.size() + " at series is :" + (k + 1)); // FIyearperSampletemp.size()+ // " X "+FIyearperSampletemp.get(0).size()); } // end of the loop for number of series. // log.debug("size of FIyearperSample "+ // FIyearperSampletemp.size()); for (int j = 0; j < FIyearperSampletemp.size(); j++) { fireintervalspersample.add(FIyearperSampletemp.get(j) * 1.0); } /* * Get the normal statistics for the fire intervals add the values to the stats and then call them for the stats */ if (fireintervalspersample.size() >= 3) { enoughIntSamp[i] = true; DescriptiveStatistics stasample = new DescriptiveStatistics(); Dfireintervalspersample = new Double[fireintervalspersample.size()]; Dfireintervalspersample = fireintervalspersample.toArray(Dfireintervalspersample); dfireintervalspersample = new double[fireintervalspersample.size()]; // summarySample = new // double[statsparam.length][myReader.size()]; for (int ik = 0; ik < fireintervalspersample.size(); ik++) { stasample.addValue(Dfireintervalspersample[ik].doubleValue()); dfireintervalspersample[ik] = Dfireintervalspersample[ik].doubleValue(); log.debug("the " + ik + " fire interval is " + dfireintervalspersample[ik]); } log.debug("the size for dfireintervalspersample is " + dfireintervalspersample.length); // ADDED BY PETE if (dfireintervalspersample.length == 0) continue; /* * load the Summary Analysis for the Sample fire intervals */ summarySample[0][i] = fireintervalspersample.size(); // double mean = stats.getMean(); summarySample[1][i] = stasample.getMean(); log.debug("mean sample is " + stasample.getMean()); // double median = // StatUtils.percentile(dfireintervalspersite, 50); summarySample[2][i] = StatUtils.percentile(dfireintervalspersample, 50); log.debug("summarySample[2][] " + i + " " + summarySample[2][i]); // double std = stats.getStandardDeviation(); summarySample[3][i] = stasample.getStandardDeviation(); log.debug("summarySample[3][] " + i + " " + summarySample[3][i]); // double skew = stats.getSkewness(); summarySample[4][i] = 1.0 / summarySample[1][i]; log.debug("summarySample[4][] " + i + " " + summarySample[4][i]); summarySample[5][i] = summarySample[3][i] / summarySample[1][i]; log.debug("summarySample[5][] " + i + " " + summarySample[5][i]); summarySample[6][i] = stasample.getSkewness(); log.debug("summarySample[6][] " + i + " " + summarySample[6][i]); // double kurt = stats.getKurtosis(); if (numberOfintervalssamp[i] == 3) { summarySample[7][i] = -99; } else { summarySample[7][i] = stasample.getKurtosis(); } // summarySample[7][i] = stasample.getKurtosis(); log.debug("summarySample[7][] " + i + " " + summarySample[7][i]); // log.debug("nomean \t\t nostd \t\t nokurt \t noskew \t\t nomedian"); // log.debug(twoPlace.format(mean)+"\t\t"+twoPlace.format(std)+"\t\t"+twoPlace.format(kurt)+"\t\t"+twoPlace.format(skew)+"\t\t"+twoPlace.format(median)); Weibull weibull = new Weibull(fireintervalspersample); // ArrayList<Double> weibullProb = weibull.getWeibullProbability(fireintervalspersample); ArrayList<Double> siglonglowbound = new ArrayList<Double>(); ArrayList<Double> sigshortupbound = new ArrayList<Double>(); log.debug("the weibull probability of first element is " + weibullProb.get(0)); log.debug("the index the size of the interval is " + weibullProb.indexOf(weibullProb.get(0))); for (int ij = 0; ij < weibullProb.size() - 1; ij++) { if (weibullProb.get(ij) <= alphaLevel) { siglonglowbound.add(fireintervalspersample.get(ij)); } if (weibullProb.get(ij) >= (1 - alphaLevel)) { sigshortupbound.add(fireintervalspersample.get(ij)); } } // summarySample[10][i] = weibull.getScale(); log.debug("summarySample[10][] " + i + " " + summarySample[10][i]); summarySample[11][i] = weibull.getShape(); log.debug("summarySample[11][] " + i + " " + summarySample[11][i]); summarySample[12][i] = weibull.getMean(); summarySample[13][i] = weibull.getMedian(); summarySample[14][i] = weibull.getMode(); summarySample[15][i] = weibull.getSigma(); summarySample[16][i] = 1.0 / summarySample[13][i]; summarySample[17][i] = weibull.getSkew(); summarySample[18][i] = weibull.getExceedenceProbability2()[0]; summarySample[19][i] = weibull.getExceedenceProbability2()[1]; Collections.sort(sigshortupbound); log.debug("siglonglowbound is " + siglonglowbound); try { summarySample[20][i] = sigshortupbound.get(sigshortupbound.size() - 1); } catch (Exception e) { summarySample[20][i] = Double.NaN; } Collections.sort(siglonglowbound); try { summarySample[21][i] = siglonglowbound.get(0); } catch (Exception e) { summarySample[21][i] = Double.NaN; } log.debug("sigshortupbound is " + sigshortupbound); Collections.sort(fireintervalspersample); try { summarySample[8][i] = fireintervalspersample.get(0); } catch (Exception ex) { log.error("Index out of bounds exception caught: "); log.error(" summarySample[8][i] = fireintervalspersample.get(0)"); ex.printStackTrace(); } summarySample[9][i] = fireintervalspersample.get(fireintervalspersample.size() - 1); // log.debug("shape \t\t scale \t\t median "); // log.debug(twoPlace.format(test2.Weibull_Parameters(fireintervalspersample)[0])+"\t\t"+twoPlace.format(test2.Weibull_Parameters(fireintervalspersample)[1])+"\t\t"+twoPlace.format(test2.weibull_median(test1.Weibull_Parameters(fireintervalspersample)))); // log.debug("mean \t\t sigma \t\t mode \t\t skewness"); // log.debug(twoPlace.format(test1.weibull_mean(test2.Weibull_Parameters(fireintervalspersample)))+"\t\t"+twoPlace.format(test1.weibull_sigma(test2.Weibull_Parameters(fireintervalspersample)))+"\t\t"+twoPlace.format(test2.weibull_mode(test1.Weibull_Parameters(fireintervalspersample)))+"\t\t"+twoPlace.format(test1.weibull_skew(test2.Weibull_Parameters(fireintervalspersample)))); // log.debug("maxhazard \t\t lei \t\t uei "); // log.debug(twoPlace.format(test2.maxhazard_int(test2.Weibull_Parameters(fireintervalspersample)))+"\t\t"+twoPlace.format(test2.weibull_lowuppexcint(test2.Weibull_Parameters(fireintervalspersample))[0])+"\t\t"+twoPlace.format(test2.weibull_lowuppexcint(test2.Weibull_Parameters(fireintervalspersample))[1])); // log.debug("the size of YearWith Fires is "+YearsWithFires.size()); // log.debug("the size of the prb exdc is // "+test2.weibull_Exprob(test2.Weibull_Parameters(fireintervalspersample)).length); System.out.println( "the size of the prb exdc sample is " + weibull.getExceedenceProbability().length); for (int kk = 0; kk < weibull.getExceedenceProbability().length; kk++) { ExceeProbsample[kk][i] = weibull.getExceedenceProbability()[kk]; log.debug("file " + i + " Exce probability " + ExceeProbsample[kk][i]); // log.debug("the size is "+ExceeProbsample.length); } } // end of if at least 4 fireintervals else { enoughIntSamp[i] = false; } } // end of if jRadioSample selected. // log.debug("the size of exceeprobsample is "ExceeProbsample.length+" X "+ExceeProbsample[0].length); } // end of i readering each file loop do loop (354-1185) /* * */ // log.debug("size of the climateMatrixSite is "+climateMatrixSite.size()+" X "+climateMatrixSite.get(0).size()); // for (int j = 0; j < listYears.size(); j++){ // log.debug(climateMatrixSite.get(0).get(j) + " " + // listYears.get(j)); // } // setCursor(Cursor.getDefaultCursor()); /* * create JFileChooser object to generate a browsing capabilities */ JFileChooser fileBrowse = new JFileChooser(); fileBrowse = new JFileChooser(savePath.substring(0, savePath.lastIndexOf(File.separator))); /* * set multiselect on (even though we don't need it) */ fileBrowse.setMultiSelectionEnabled(true); /* * set file and folder directive */ fileBrowse.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); /* * set file type: coma delimited file csv */ // FileFilter filter1 = new CSVFileFilter(); fileBrowse.setFileFilter(filter1); /* * set dialog text: select the name and location of the matrix files */ fileBrowse.setDialogTitle("Select the name and location of the Stats Summary file:"); /* * create the writer object for each of the files to be created */ Writer wr; Writer wrWDE; Writer wrSample; Writer wrWDESample; /* * set delimiter in this case we are using comas "," */ String delim = ","; /* * Start writing information into the files */ try { if (analysisType.equals(AnalysisType.COMPOSITE)) { wr = new BufferedWriter(new FileWriter(summaryFile)); wrWDE = new BufferedWriter(new FileWriter(exceedenceFile)); /* * write the heading to the files */ String buffer = ""; buffer = buffer + "Composite Parameters" + delim; for (int i = 0; i < inputFileArray.length; i++) { buffer = buffer + inputFileArray[i].getLabel() + delim; } ; wr.write(buffer.substring(0, buffer.length() - 1) + System.getProperty("line.separator")); buffer = ""; for (int j = 0; j < statsparam.length; j++) { buffer = buffer + statsparam[j] + delim; for (int k = 0; k < inputFileArray.length; k++) { if (j == 0) { if (numberOfintervalscomp[k] < 3) { buffer = buffer + twoPlace.format(numberOfintervalscomp[k]) + delim; } else { buffer = buffer + twoPlace.format(summaryComp[0][k]) + delim; } } else { if (enoughIntComp[k]) { if (summaryComp[j][k] == -99) { buffer = buffer + "" + delim; } else { buffer = buffer + twoPlace.format(summaryComp[j][k]) + delim; } } else { buffer = buffer + "" + delim; } } } // end of k loop filearray wr.write(buffer.substring(0, buffer.length() - 1) + System.getProperty("line.separator")); buffer = ""; } // end of j loop Stats // wr.close(); // // // wrWDE = new BufferedWriter(new // FileWriter(outputWDExceeTable)); /* * write the heading to the files */ buffer = ""; wrWDE.write("Exceedence Prob" + delim); for (int i = 0; i < inputFileArray.length; i++) { buffer = buffer + inputFileArray[i].getLabel() + delim; } wrWDE.write(buffer.substring(0, buffer.length() - 1) + System.getProperty("line.separator")); buffer = ""; for (int j = 0; j < fixvalt.length; j++) { buffer = buffer + threePlace.format(fixvalt[j]) + delim; for (int k = 0; k < inputFileArray.length; k++) { if (enoughIntComp[k]) { buffer = buffer + twoPlace.format(ExceeProbcomp[j][k]) + delim; } else { buffer = buffer + "" + delim; } } wrWDE.write(buffer.substring(0, buffer.length() - 1) + System.getProperty("line.separator")); buffer = ""; } wr.close(); wrWDE.close(); } // end of if jRadioComp is selecte if (analysisType.equals(AnalysisType.SAMPLE)) { wrSample = new BufferedWriter(new FileWriter(summaryFile)); wrWDESample = new BufferedWriter(new FileWriter(exceedenceFile)); /* * write the heading to the files */ wrSample.write("Sample Parameters" + delim); for (int i = 0; i < inputFileArray.length; i++) { wrSample.write(inputFileArray[i].getLabel() + delim); } wrSample.write(System.getProperty("line.separator")); for (int j = 0; j < statsparam.length; j++) { wrSample.write(statsparam[j] + delim); for (int k = 0; k < inputFileArray.length; k++) { if (j == 0) { if (numberOfintervalssamp[k] < 3) { wrSample.write(twoPlace.format(numberOfintervalssamp[k]) + delim); } else { wrSample.write(twoPlace.format(summarySample[0][k]) + delim); } } else { if (enoughIntSamp[k]) { if (summarySample[j][k] == -99) { wrSample.write("" + delim); } else { wrSample.write(twoPlace.format(summarySample[j][k]) + delim); } } else { wrSample.write("" + delim); } } } // end of k loop file array wrSample.write(System.getProperty("line.separator")); } // end of loop j loop stats // wrSample.close(); // // // log.debug("the size is "+fixvalt.length+" X "+inputFile.length); // wrWDESample = new BufferedWriter(new // FileWriter(outputWDExceeTablesample)); /* * write the heading to the files */ wrWDESample.write("Exceedence Prob" + delim); for (int i = 0; i < inputFileArray.length; i++) { wrWDESample.write(inputFileArray[i].getLabel() + delim); } wrWDESample.write(System.getProperty("line.separator")); for (int j = 0; j < fixvalt.length; j++) { wrWDESample.write(threePlace.format(fixvalt[j]) + delim); for (int k = 0; k < inputFileArray.length; k++) { // System.out.print(ExceeProbcomp[j][k]+delim); if (enoughIntSamp[k]) { wrWDESample.write(twoPlace.format(ExceeProbsample[j][k]) + delim); } else { wrWDESample.write("" + delim); } } // System.out.print(System.getProperty("line.separator")); wrWDESample.write(System.getProperty("line.separator")); } wrSample.close(); wrWDESample.close(); } // end of jradiosample } // end of Try catch (IOException ex) { ex.printStackTrace(); } finally { } }
From source file:org.fhaes.jsea.JSEAStatsFunctions.java
/** * TODO//from w ww. j a va2 s .com * * @param titleForRun * @param outputFilePrefix * @param seedNumber * @param yearsPriorToEvent * @param yearsAfterTheEvent * @param numberOfSimulations * @param firstYearOfProcess * @param lastYearOfProcess * @param includeIncompleteEpochs * @param randomSampling * @param chronologyYears * @param chronologyActual * @param events * @param growth * @param save * @param usingSegmentation * @param segmentTable * @param chronologyFile * @param alphaLevel95 * @param alphaLevel99 * @param alphaLevel999 */ public JSEAStatsFunctions(String titleForRun, String outputFilePrefix, Integer seedNumber, Integer yearsPriorToEvent, Integer yearsAfterTheEvent, Integer numberOfSimulations, Integer firstYearOfProcess, Integer lastYearOfProcess, boolean includeIncompleteEpochs, boolean randomSampling, ArrayList<Integer> chronologyYears, ArrayList<Double> chronologyActual, ArrayList<Integer> events, boolean growth, boolean save, boolean usingSegmentation, SegmentTable segmentTable, String chronologyFile, boolean alphaLevel95, boolean alphaLevel99, boolean alphaLevel999, boolean doZScore) { long begintime = System.currentTimeMillis(); this.titleForRun = titleForRun; this.outputFilePrefix = outputFilePrefix; this.yearsPriorToEvent = yearsPriorToEvent; this.yearsAfterTheEvent = yearsAfterTheEvent; this.randomSampling = randomSampling; this.numberOfSimulations = numberOfSimulations; this.seedNumber = seedNumber; this.firstYearOfProcess = firstYearOfProcess; this.lastYearOfProcess = lastYearOfProcess; // this.excludeIncompleteEpochs = excludeIncompleteEpochs; this.includeIncompleteEpochs = includeIncompleteEpochs; this.chronologyYears = chronologyYears; this.chronologyActual = chronologyActual; this.events = events; this.isFirstIteration = true; this.save = save; this.growth = growth; this.usingSegmentation = usingSegmentation; this.segmentTable = segmentTable; this.chronologyFile = chronologyFile; this.alphaLevel95 = alphaLevel95; this.alphaLevel99 = alphaLevel99; this.alphaLevel999 = alphaLevel999; this.doZScore = doZScore; log.debug("this.titleForRun = " + titleForRun); log.debug("this.outputFilePrefix = " + outputFilePrefix); log.debug("this.yearsPriorToEvent = " + yearsPriorToEvent); log.debug("this.yearsAfterTheEvent = " + yearsAfterTheEvent); log.debug("this.randomSampling = " + randomSampling); log.debug("this.numberOfSimulations = " + numberOfSimulations); log.debug("this.seedNumber = " + seedNumber); log.debug("this.firstYearOfProcess = " + firstYearOfProcess); log.debug("this.lastYearOfProcess = " + lastYearOfProcess); // log.debug("this.excludeIncompleteEpochs = "+excludeIncompleteEpochs); log.debug("this.includeIncompleteEpochs = " + includeIncompleteEpochs); log.debug("this.chronologyYears = " + chronologyYears); log.debug("this.chronologyActual = " + chronologyActual); log.debug("this.events = " + events); log.debug("this.save = " + save); log.debug("this.growth = " + growth); log.debug("this.usingSegmentation = " + usingSegmentation); // log.debug("this.segmentTable = earliestYear " + segmentTable.getEarliestYear() + ", latestYear " + segmentTable.getLatestYear()); log.debug("this.chronologyFile = " + chronologyFile); log.debug("this.alphaLevel95 = " + alphaLevel95); log.debug("this.alphaLevel99 = " + alphaLevel99); log.debug("this.alphaLevel999 = " + alphaLevel999); /* * Setting the three decimal format */ DecimalFormat threePlacess = new DecimalFormat("0.000"); /* * Creating the date of the run of the program */ Date now = new Date(); /* * Creating the files necessary (two txt files) */ // File outputFile = new File(outputFilePrefix + ".out"); // Writer wr; // String bigbuffer = ""; report = new String(""); actualTable = new String(""); simulationTable = new String(""); cdbuffer = new String(""); pdfbufferA = new String(""); pdfbufferB = new String(""); pdfbufferpar1 = new String(""); pdfbufferpar2 = new String(""); /* * Converting Arraylists into arrays chronologyActual into chronoActual chronologyYears into yearsActual events into keyEvents */ chronoActual = new Double[chronologyActual.size()]; chronoActual = chronologyActual.toArray(chronoActual); yearsActual = new Integer[chronologyYears.size()]; yearsActual = chronologyYears.toArray(yearsActual); Collections.sort(events); /* * Setting default values for first yearofprocess, lastyearofprocess recall the firstYearchrono is set as the default on the * firtYearOfProcess. also firstYearchrono is set as the default for firstYearsegment lastYearchrono is set as the default of the * lastYearOfProcess */ if (firstYearOfProcess == 0) { firstYearOfProcess = yearsActual[0]; } if (lastYearOfProcess == 0) { lastYearOfProcess = yearsActual[yearsActual.length]; } if (numberOfSimulations == 0) { System.out.println("the number of simulations need to be set"); } /* * 1. statistical Analysis of the whole time series chronology 2. statistical Analysis of the adjusted time series chronologyAdj 3. * statistical Analysis of the whole Event list events 4. print using the method printReport */ // Statistical Analysis for the whole Climate Series DescriptiveStatistics stats = new DescriptiveStatistics(); dchronoActual = new double[chronologyActual.size()]; // Add the data from the array for (int i = 0; i < chronoActual.length; i++) { stats.addValue(chronoActual[i].doubleValue()); dchronoActual[i] = chronoActual[i].doubleValue(); } // Obtain the mean sensitivity meanSensitivity = 0; for (int i = 1; i < chronoActual.length; i++) { double senDenominator = Math.abs(dchronoActual[i]) + Math.abs(dchronoActual[i - 1]); if (senDenominator != 0) { meanSensitivity = meanSensitivity + Math.abs(2 * (dchronoActual[i] - dchronoActual[i - 1])) / senDenominator; } } meanSensitivity = meanSensitivity / (dchronoActual.length - 1); /* * Obtain and display the general statistical information on the whole climate series. */ mean = stats.getMean(); std = stats.getStandardDeviation(); median = StatUtils.percentile(dchronoActual, 50); kurt = stats.getKurtosis(); skew = stats.getSkewness(); /* * is segmentlength is different than 0 find the beginning and end year for each segment */ firstYearsArray = new ArrayList<Integer>(); lastYearsArray = new ArrayList<Integer>(); // NO SEGMENTATION IS USED if (!usingSegmentation) { firstYearsArray.add(firstYearOfProcess); lastYearsArray.add(lastYearOfProcess); } // SEGMENTATION IS USED AND HAS BEEN DEFINED if (usingSegmentation) { for (int i = 0; i < segmentTable.tableModel.getSegments().size(); i++) { firstYearsArray.add(segmentTable.tableModel.getSegment(i).getFirstYear()); lastYearsArray.add(segmentTable.tableModel.getSegment(i).getLastYear()); } } /* * set up the loop for the typed of segmentation */ /* * set the adjusted time series 1. set up the loop for the typed of segmentation 3.find the index of the first event in the actual * array. 2. adjust the series by yearsActual[indexofthefirstevent]-yearsPriortToEvent 3. adjust the series by * yearsActual[indexofthelasteventinseries]+yearsAfterTheEvent */ for (int segmentIndex = 0; segmentIndex < firstYearsArray.size(); segmentIndex++) { beginingYearAdj = chronologyYears.get(0).intValue(); lastYearAdj = chronologyYears.get(chronologyYears.size() - 1).intValue(); firstYearOfProcess = firstYearsArray.get(segmentIndex); lastYearOfProcess = lastYearsArray.get(segmentIndex); if (firstYearOfProcess.intValue() > beginingYearAdj) { beginingYearAdj = firstYearOfProcess.intValue(); } if (lastYearOfProcess.intValue() < lastYearAdj) { lastYearAdj = lastYearOfProcess.intValue(); } /* * Obtain and display information on the Events actual Time span same as the adjusted. number of events. Events.size() and total * number of Events used. Mean years between events minimun differece between event years. * */ keventsinadj = new ArrayList<Integer>(); keventsinadjyeprior = new ArrayList<Integer>(); keventsinadjyeafter = new ArrayList<Integer>(); kevents = new ArrayList<Integer>(); numberOfEventsinAdj = 0; for (int i = 0; i < events.size(); i++) { if (chronologyYears.contains(events.get(i))) { // System.out.println("the chronologyYears contains event " + i + "\t" // + beginingYearAdj + "\t" + lastYearAdj); if ((beginingYearAdj <= events.get(i).intValue()) && (events.get(i).intValue() <= lastYearAdj)) { kevents.add(events.get(i)); } } if ((chronologyYears.contains(events.get(i))) && (!includeIncompleteEpochs)) { if (((events.get(i).intValue() - beginingYearAdj) >= yearsPriorToEvent.intValue()) && ((lastYearAdj - events.get(i).intValue()) >= yearsAfterTheEvent.intValue())) { numberOfEventsinAdj = numberOfEventsinAdj + 1; keventsinadj.add(events.get(i)); } ; } ;// end of exclude incomplete epochs if ((chronologyYears.contains(events.get(i))) && (includeIncompleteEpochs)) { if ((beginingYearAdj <= events.get(i).intValue()) && (events.get(i).intValue() <= lastYearAdj)) { numberOfEventsinAdj = numberOfEventsinAdj + 1; keventsinadj.add(events.get(i)); // if ((events.get(i).intValue() - beginingYearAdj) < yearsPriorToEvent.intValue()) { keventsinadjyeprior.add(events.get(i).intValue() - beginingYearAdj); } else { keventsinadjyeprior.add(yearsPriorToEvent); } if ((lastYearAdj - events.get(i).intValue()) < yearsAfterTheEvent.intValue()) { keventsinadjyeafter.add(lastYearAdj - events.get(i).intValue()); } else { keventsinadjyeafter.add(yearsAfterTheEvent.intValue()); } // } ; } ; // end of include incomplete } ;// end of the loop for all events /* * set up if statement so that if we have two or less key events in the chronology we do not do anything */ // System.out.println("size of kevents is " + kevents.size()); if (kevents.size() >= 2) { keyEvents = new int[kevents.size()]; for (int i = 0; i < kevents.size(); i++) { keyEvents[i] = kevents.get(i).intValue(); } ; /* * Sorting keyEvents */ Arrays.sort(keyEvents); if (keventsinadj.size() >= 2) { keyEventsAdj = new int[numberOfEventsinAdj]; keyEventsAdjBeYear = new int[numberOfEventsinAdj]; keyEventsAdjLaYear = new int[numberOfEventsinAdj]; for (int i = 0; i < keventsinadj.size(); i++) { keyEventsAdj[i] = keventsinadj.get(i).intValue(); keyEventsAdjBeYear[i] = keyEventsAdj[i] - yearsPriorToEvent.intValue(); keyEventsAdjLaYear[i] = keyEventsAdj[i] + yearsAfterTheEvent.intValue(); } ; Arrays.sort(keyEventsAdj); // Calculate the difference between events load in array diffBetweenEvents = new double[keyEvents.length - 1]; sumOfDiff = 0; for (int i = 1; i < keyEvents.length; i++) { diffBetweenEvents[i - 1] = keyEvents[i] - keyEvents[i - 1]; sumOfDiff = sumOfDiff + diffBetweenEvents[i - 1]; } ; // Calculate the mean difference between events = // sum(y(i)-y(i-1))/total number of differences meanDiffBetweenEvents = sumOfDiff / diffBetweenEvents.length; // adjusting the beginning year that that it account for the events // years // and the beginning year of the process etc beginingYearAdj = Math.max(beginingYearAdj, (keyEvents[0] - yearsPriorToEvent)); lastYearAdj = Math.min(lastYearAdj, (keyEvents[keyEvents.length - 1] + yearsAfterTheEvent)); DescriptiveStatistics statsAdj = new DescriptiveStatistics(); chronoAdj = new double[lastYearAdj - beginingYearAdj + 1]; // Add data from the array for (int i = beginingYearAdj; i < lastYearAdj + 1; i++) { statsAdj.addValue(chronoActual[chronologyYears.indexOf(i)].doubleValue()); chronoAdj[i - beginingYearAdj] = chronoActual[chronologyYears.indexOf(i)].doubleValue(); } ; // Obtain the mean sensativity meanSensitivityAdj = 0; for (int i = 1; i < chronoAdj.length; i++) { double senDenominatorAdj = Math.abs(chronoAdj[i]) + Math.abs(chronoAdj[i - 1]); if (senDenominatorAdj != 0) { meanSensitivityAdj = meanSensitivityAdj + Math.abs(2 * (chronoAdj[i] - chronoAdj[i - 1])) / senDenominatorAdj; } } meanSensitivityAdj = meanSensitivityAdj / (chronoAdj.length - 1); /* * Obtain and display the general statistical information on the whole time series data. */ meanAdj = statsAdj.getMean(); stdAdj = statsAdj.getStandardDeviation(); medianAdj = StatUtils.percentile(chronoAdj, 50); kurtAdj = statsAdj.getKurtosis(); skewAdj = statsAdj.getSkewness(); // new PearsonsCorrelation().correlation(chronoAdj, chronoAdj); double autoNumSum = 0.0; double autoDemSum = 0.0; System.out.println("the length of chronoAdj is " + chronoAdj.length); for (int j = 0; j < (chronoAdj.length - 1); j++) { // System.out.println("j is: "+j + "mean is "+ meanAdj + "chronoadj is "+chronoAdj[j] ); autoNumSum = autoNumSum + (chronoAdj[j] - meanAdj) * (chronoAdj[j + 1] - meanAdj); } for (int j = 0; j < chronoAdj.length; j++) { autoDemSum = autoDemSum + (chronoAdj[j] - meanAdj) * (chronoAdj[j] - meanAdj); } autocorrelationAdj = autoNumSum / autoDemSum; // autocorrelationAdj=new PearsonsCorrelation().correlation(chronoAdj, chronoAdj); System.out.println("the autocorrelation of the adjustchonology is: " + autocorrelationAdj); /* * Calculate the statistical information per window of the Actual Events. load the values of the choronoActual per * window in window into a two dimensional array calculate the mean per row calculate the standard deviation per row * calculate end values of the confidence interval for 95%,99%.99.9% per row */ // Definition of the length of the window of interest. lengthOfWindow = yearsPriorToEvent + yearsAfterTheEvent + 1; // define the two dimensional array for the calculations of the Actual // Event windows stats meanByWindow = new double[lengthOfWindow]; varianceByWindow = new double[lengthOfWindow]; standardDevByWindow = new double[lengthOfWindow]; maximunByWindow = new double[lengthOfWindow]; minimunByWindow = new double[lengthOfWindow]; eventWindowsAct = new double[lengthOfWindow][]; eventWindowPattern = new int[lengthOfWindow][]; Simnumdates = new int[lengthOfWindow]; test = new ArrayList<Double>(); for (int k = 0; k < lengthOfWindow; k++) { eventWindowPattern[k] = new int[keventsinadj.size()]; int kWindow = k - yearsPriorToEvent.intValue(); for (int i = 0; i < keventsinadj.size(); i++) { if ((beginingYearAdj <= (keventsinadj.get(i).intValue() + kWindow)) && ((keventsinadj.get(i).intValue() + kWindow) <= lastYearAdj)) { test.add(chronologyActual .get(chronologyYears.indexOf(keventsinadj.get(i).intValue() + kWindow))); eventWindowPattern[k][i] = 1; } else { eventWindowPattern[k][i] = 0; } } Simnumdates[k] = test.size(); eventWindowsAct[k] = new double[test.size()]; // new line for (int ij = 0; ij < test.size(); ij++) { eventWindowsAct[k][ij] = test.get(ij).doubleValue(); } test.clear(); meanByWindow[k] = StatUtils.mean(eventWindowsAct[k]); varianceByWindow[k] = StatUtils.variance(eventWindowsAct[k]); standardDevByWindow[k] = Math.sqrt(varianceByWindow[k]); maximunByWindow[k] = StatUtils.max(eventWindowsAct[k]); minimunByWindow[k] = StatUtils.min(eventWindowsAct[k]); } // end k loop Arrays.sort(Simnumdates); temp = Simnumdates[0]; leftEndPoint = new double[lengthOfWindow][3]; rightEndPoint = new double[lengthOfWindow][3]; for (int i = 0; i < lengthOfWindow; i++) { for (int j = 0; j < 3; j++) { leftEndPoint[i][j] = meanByWindow[i] - stdDevMultiplier[j] * standardDevByWindow[i]; rightEndPoint[i][j] = meanByWindow[i] + stdDevMultiplier[j] * standardDevByWindow[i]; } } /* * calculate the percentile Marks for simulation table */ percentileMark = new int[4]; percentileMark[1] = (int) Math.max(Math.round(this.numberOfSimulations / 40.0), 1) - 1; percentileMark[3] = (int) Math.max(Math.round(this.numberOfSimulations / 200.0), 1) - 1; percentileMark[0] = this.numberOfSimulations - percentileMark[1] - 1; percentileMark[2] = this.numberOfSimulations - percentileMark[3] - 1; // System.out.println("percentailmarks "+percentileMark[0]+" , " // +percentileMark[1]+" , " + percentileMark[2]+" , " + // percentileMark[3]); // start the simulations: by selecting events.size() number of random // years Random myrand = new Random(); myrand.setSeed(seedNumber); double[][] meanByWindowSim = new double[lengthOfWindow][this.numberOfSimulations]; int[] eventYearSimulation = new int[keventsinadj.size()];// changed // keventsinadj.size() // by temp double[][] eventWindowsSims = new double[lengthOfWindow][]; simulationtest = new ArrayList<Double>(); /* * Simulation Start */ System.out .println("Before Simulation Time " + (System.currentTimeMillis() - begintime) / 1000F); for (int ii = 0; ii < this.numberOfSimulations; ii++) { for (int i = 0; i < keventsinadj.size(); i++) { // Here add the two if statement for include and exclude so the // range of the selection of years if (includeIncompleteEpochs) { eventYearSimulation[i] = (beginingYearAdj + keventsinadjyeprior.get(i).intValue()) + myrand.nextInt((lastYearAdj - keventsinadjyeafter.get(i).intValue()) - (beginingYearAdj + keventsinadjyeprior.get(i).intValue()) + 1); } if (!includeIncompleteEpochs) { eventYearSimulation[i] = (beginingYearAdj + 6) + myrand.nextInt((lastYearAdj - 4) - (beginingYearAdj + 6) + 1); } } // end i loop Arrays.sort(eventYearSimulation); // System.out.println("after selection of key events in sim " + ii + " time " + (System.currentTimeMillis() - // start) / 1000F); /* * Once the events have been simulated build the two sised matrix (lengthOfWindow) by events.size() */ for (int k = 0; k < lengthOfWindow; k++) { eventWindowsSims[k] = new double[keventsinadj.size()];// new line int kWindow = k - yearsPriorToEvent.intValue(); for (int i = 0; i < keventsinadj.size(); i++) { if (eventWindowPattern[k][i] == 1) { simulationtest.add(chronologyActual .get(chronologyYears.indexOf(eventYearSimulation[i] + kWindow))); } } // i loop eventWindowsSims[k] = new double[simulationtest.size()]; // new // line for (int ij = 0; ij < simulationtest.size(); ij++) { eventWindowsSims[k][ij] = simulationtest.get(ij).doubleValue(); } // edn ij loop simulationtest.clear(); meanByWindowSim[k][ii] = StatUtils.mean(eventWindowsSims[k]); } // end k loop numberofsimulation loop } // end simulatrion loop System.out.println("I am done with simulation"); // calculate the mean of the means double sum = 0.0; meanMeanByWindow = new double[lengthOfWindow]; varianceMeanByWindow = new double[lengthOfWindow]; standardDevMeanByWindow = new double[lengthOfWindow]; maxMeanByWindow = new double[lengthOfWindow]; minMeanByWindow = new double[lengthOfWindow]; double[] tempMeanMean = new double[this.numberOfSimulations]; leftEndPointPer = new double[lengthOfWindow][2]; rightEndPointPer = new double[lengthOfWindow][2]; for (int i = 0; i < lengthOfWindow; i++) { // int kWindow = i - yearsPriorToEvent.intValue(); for (int k = 0; k < this.numberOfSimulations; k++) { // for(int k=0;k < (Integer)numberOfSimulations.intValue();k++){ if (k < 1) { // /eSystem.out.println("on the " +i+","+k+" the value is " + // meanByWindowSim[i][k]); } ; tempMeanMean[k] = meanByWindowSim[i][k]; sum = sum + tempMeanMean[k]; // System.out.println("tempMeanMean is " + tempMeanMean[k]); } meanMeanByWindow[i] = StatUtils.mean(tempMeanMean); varianceMeanByWindow[i] = StatUtils.variance(tempMeanMean); standardDevMeanByWindow[i] = Math.sqrt(varianceMeanByWindow[i]); Arrays.sort(tempMeanMean); maxMeanByWindow[i] = StatUtils.max(tempMeanMean); minMeanByWindow[i] = StatUtils.min(tempMeanMean); leftEndPointPer[i][0] = tempMeanMean[percentileMark[1]]; rightEndPointPer[i][0] = tempMeanMean[percentileMark[0]]; leftEndPointPer[i][1] = tempMeanMean[percentileMark[3]]; rightEndPointPer[i][1] = tempMeanMean[percentileMark[2]]; // System.out.println("[ "+ // Math.round(leftEndPoint[i][j]*1000.0)/1000.0 + " , " + // Math.round(rightEndPoint[i][j]*1000.0)/1000.0+"]"); // System.out.println("meanMeanByWindow is " + meanMeanByWindow[i]); if (i < 1) { // /eSystem.out.println("the window "+i+" has mean: " + // Math.round(meanMeanByWindow[i]*1000.0)/1000.0); } ; // System.out.println("the window "+i+" has variance: " + // Math.round(varianceMeanByWindow[i]*1000.0)/1000.0); // System.out.println("the window "+i+" has standard dev: " + // Math.round(standardDevMeanByWindow[i]*1000.0)/1000.0); } ;// end of i loop // }//end of ikj loop // Calculate the confidence interval for 95%,99%,99.9% leftEndPointSim = new double[lengthOfWindow][3]; rightEndPointSim = new double[lengthOfWindow][3]; for (int i = 0; i < lengthOfWindow; i++) { for (int j = 0; j < 3; j++) { leftEndPointSim[i][j] = meanMeanByWindow[i] - stdDevMultiplier[j] * standardDevMeanByWindow[i]; rightEndPointSim[i][j] = meanMeanByWindow[i] + stdDevMultiplier[j] * standardDevMeanByWindow[i]; // System.out.println("[ "+ // Math.round(leftEndPoint[i][j]*1000.0)/1000.0 + " , " + // Math.round(rightEndPoint[i][j]*1000.0)/1000.0+"]"); } } // }//end of ikj loop /* * detecting which p-level was selected in gui */ if (alphaLevel95) { alphaLevel = 0; } else if (alphaLevel99) { alphaLevel = 1; } else { alphaLevel = 2; } /* * adding the chart and the creation on the buffer here */ // BarChartParametersModel m = new BarChartParametersModel(titleForRun, meanByWindow, lengthOfWindow, yearsPriorToEvent, // yearsAfterTheEvent, leftEndPointSim, rightEndPointSim, outputFilePrefix, alphaLevel, segmentIndex, // firstYearsArray.size(), firstYearsArray.get(segmentIndex), lastYearsArray.get(segmentIndex)); BarChartParametersModel m = new BarChartParametersModel(titleForRun, meanByWindow, lengthOfWindow, yearsPriorToEvent, yearsAfterTheEvent, leftEndPointSim, rightEndPointSim, outputFilePrefix, alphaLevel, segmentIndex, firstYearsArray.size(), beginingYearAdj, lastYearAdj); // m.setChart(new JSEABarChart(m).getChart()); this.chartList.add(m); /* * try { // ChartUtilities.saveChartAsJPEG(new File(outputFilePrefix+"chart"+ikj+ ".jpg"), chart, 500, 300); * ChartUtilities.saveChartAsJPEG(new File(outputFilePrefix+"chart.jpg"), chart, 500, 300); } catch (IOException ex) { * System.err.println(ex.getLocalizedMessage()); } */ // Date now = new Date(); // System.out.println("the date today is: " + now); // adding the cdbuffer stuff log.debug("the value of the beginingyear of the adj crono is " + beginingYearAdj); String delim = ","; cdbuffer = cdbuffer + "Range:" + "\n"; cdbuffer = cdbuffer + beginingYearAdj + delim + lastYearAdj + "\n"; cdbuffer = cdbuffer + "Lags" + delim + "Events Mean" + delim + "95% CONF INT" + delim + "95% CONF INT" + delim + "99% CONF INT" + delim + "99% CONF INT" + delim + "99.9% CONF INT" + delim + "99.9% CONF INT" + delim + "\n"; for (int i = 0; i < lengthOfWindow; i++) { cdbuffer = cdbuffer + (i - yearsPriorToEvent.intValue()) + delim + threePlacess.format(meanByWindow[i]) + delim + threePlacess.format(leftEndPointSim[i][0]) + delim + threePlacess.format(rightEndPointSim[i][0]) + delim + threePlacess.format(leftEndPointSim[i][1]) + "," + threePlacess.format(rightEndPointSim[i][1]) + delim + threePlacess.format(leftEndPointSim[i][2]) + delim + threePlacess.format(rightEndPointSim[i][2]) + "\n"; } // adding the bigbuffer and pdfbufferpar1 stuff // Paragraph pdfbufferpar11 = new Paragraph( ); report = report + "\n"; report = report + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; report = report + "Date: " + now + "\n"; report = report + "Name of the time series file: " + chronologyFile; pdfbufferpar1 = pdfbufferpar1 + "\n"; pdfbufferpar1 = pdfbufferpar1 + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Date: " + now + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Name of the time series file: " + chronologyFile; if (firstYearOfProcess.intValue() > chronologyYears.get(0).intValue()) { report = report + "\n" + "First Year= " + firstYearOfProcess; pdfbufferpar1 = pdfbufferpar1 + "\n" + "First Year= " + firstYearOfProcess; } else { report = report + "\n" + "First Year= " + chronologyYears.get(0); pdfbufferpar1 = pdfbufferpar1 + "\n" + "First Year= " + chronologyYears.get(0); } if (lastYearOfProcess.intValue() < chronologyYears.get(chronologyYears.size() - 1).intValue()) { report = report + "\n" + "Last Year= " + lastYearOfProcess; pdfbufferpar1 = pdfbufferpar1 + "\n" + "Last Year= " + lastYearOfProcess; } else { report = report + "\n" + "Last Year= " + chronologyYears.get(chronologyYears.size() - 1); pdfbufferpar1 = pdfbufferpar1 + "\n" + "Last Year= " + chronologyYears.get(chronologyYears.size() - 1); } /* * Display the general statistical information on the Adjusted time series data. */ report = report + "\n" + "DESCRIPTIVE STATISTICS INFORMATION ABOUT THE ADJUSTED CONTINUOUS TIME SERIES: " + "\n" + "\n"; report = report + "\t" + "The adjusted time series RANGES from " + beginingYearAdj + " to " + lastYearAdj + "\n"; report = report + "\t" + "The NUMBER OF YEARS in the adjusted time series is " + chronoAdj.length + "\n"; report = report + "\t" + "MEAN of the adjusted time series is " + threePlacess.format(meanAdj) + "\n"; report = report + "\t" + "MEDIAN of the adjusted time series is " + threePlacess.format(medianAdj) + "\n"; report = report + "\t" + "MEAN SENSITIVITY for the adjusted time series is " + threePlacess.format(meanSensitivityAdj) + "\n"; report = report + "\t" + "STANDARD DEVIATION of the adjusted time series is " + threePlacess.format(stdAdj) + "\n"; report = report + "\t" + "SKEWNESS of the adjusted time series is " + threePlacess.format(skewAdj) + "\n"; report = report + "\t" + "KURTOSIS of the adjusted time series is " + threePlacess.format(kurtAdj) + "\n"; report = report + "\t" + "First Order AUTOCORRELATION Index of the adjusted time series is " + threePlacess.format(autocorrelationAdj) + "\n"; /* * save the general statistical information on the Adjusted time series data in pdf fie. */ pdfbufferpar1 = pdfbufferpar1 + "\n" + "DESCRIPTIVE STATISTICS INFORMATION ABOUT THE ADJUSTED CONTINUOUS TIME SERIES: " + "\n" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "The adjusted time series RANGES from " + beginingYearAdj + " to " + lastYearAdj + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "The NUMBER OF YEARS in the adjusted time series is " + chronoAdj.length + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "MEAN of the adjusted time series is " + threePlacess.format(meanAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "MEDIAN of the adjusted time series is " + threePlacess.format(medianAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "MEAN SENSITIVITY for the adjusted time series is " + threePlacess.format(meanSensitivityAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "STANDARD DEVIATION of the adjusted time series is " + threePlacess.format(stdAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "SKEWNESS of the adjusted time series is " + threePlacess.format(skewAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "KURTOSIS of the adjusted time series is " + threePlacess.format(kurtAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "First Order AUTOCORRELATION Index of the adjusted time series is " + threePlacess.format(autocorrelationAdj) + "\n"; /* * Display the general information on the Actual Event list. */ report = report + "\n" + "THE INFORMATION ON THE ACTUAL KEY EVENTS IS" + "\n" + "\n"; report = report + "\t" + "Number of key events: " + keyEvents.length + "\n"; report = report + "\t" + "Number of key events used in analysis: " + numberOfEventsinAdj + "\n"; report = report + "\t" + "Mean years between events is " + threePlacess.format(meanDiffBetweenEvents) + "\n"; report = report + "\t" + "Minimum difference is " + StatUtils.min(diffBetweenEvents) + "\n"; /* * write the general information on the Actual Event list to pdf file. */ pdfbufferpar1 = pdfbufferpar1 + "\n" + "THE INFORMATION ON THE ACTUAL KEY EVENTS IS" + "\n" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Number of key events: " + keyEvents.length + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Number of key events used in analysis: " + numberOfEventsinAdj + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Mean years between events is " + threePlacess.format(meanDiffBetweenEvents) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Minimum difference is " + StatUtils.min(diffBetweenEvents) + "\n"; pdfbufferpar11.add(pdfbufferpar1); para1.add(pdfbufferpar11); printTableActFlag.add(true); /* * Write out everything that goes into the actualTable. */ PdfPTable tableAct = new PdfPTable(7); if (isFirstIteration) { String tempStrA = ""; if (alphaLevel95) { tempStrA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 95% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel99) { tempStrA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel999) { tempStrA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99.9% CONF INT ", " MIN ", " MAX "); } report = report + tempStrA + "\n"; actualTable = actualTable + tempStrA.substring(1) + "\n"; PdfPCell cell00A = new PdfPCell(new Paragraph(" ADJ SEG ")); tableAct.addCell(cell00A); PdfPCell cell01A = new PdfPCell(new Paragraph(" LAGS ")); tableAct.addCell(cell01A); PdfPCell cell02A = new PdfPCell(new Paragraph(" MEAN ")); tableAct.addCell(cell02A); PdfPCell cell03A = new PdfPCell(new Paragraph(" STA DEV ")); tableAct.addCell(cell03A); if (alphaLevel95) { PdfPCell cell04A = new PdfPCell(new Paragraph(" 95% CONF INT ")); tableAct.addCell(cell04A); } else if (alphaLevel99) { PdfPCell cell04A = new PdfPCell(new Paragraph(" 99% CONF INT ")); tableAct.addCell(cell04A); } else if (alphaLevel999) { PdfPCell cell04A = new PdfPCell(new Paragraph(" 99.9% CONF INT ")); tableAct.addCell(cell04A); } PdfPCell cell05A = new PdfPCell(new Paragraph(" MIN ")); tableAct.addCell(cell05A); PdfPCell cell06A = new PdfPCell(new Paragraph(" MAX ")); tableAct.addCell(cell06A); } for (int i = 0; i < lengthOfWindow; i++) { if (alphaLevel95) { pdfbufferA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanByWindow[i]), threePlacess.format(standardDevByWindow[i]), "[" + threePlacess.format(leftEndPoint[i][0]) + "," + threePlacess.format(rightEndPoint[i][0]) + "]", threePlacess.format(minimunByWindow[i]), threePlacess.format(maximunByWindow[i])); } else if (alphaLevel99) { pdfbufferA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanByWindow[i]), threePlacess.format(standardDevByWindow[i]), "[" + threePlacess.format(leftEndPoint[i][1]) + "," + threePlacess.format(rightEndPoint[i][1]) + "]", threePlacess.format(minimunByWindow[i]), threePlacess.format(maximunByWindow[i])); } else if (alphaLevel999) { pdfbufferA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanByWindow[i]), threePlacess.format(standardDevByWindow[i]), "[" + threePlacess.format(leftEndPoint[i][2]) + "," + threePlacess.format(rightEndPoint[i][2]) + "]", threePlacess.format(minimunByWindow[i]), threePlacess.format(maximunByWindow[i])); } report = report + pdfbufferA + "\n"; actualTable = actualTable + pdfbufferA.substring(1) + "\n"; PdfPCell cell00A = new PdfPCell(new Paragraph( firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex))); tableAct.addCell(cell00A); PdfPCell cell01A = new PdfPCell(new Paragraph((i - yearsPriorToEvent.intValue()))); tableAct.addCell(cell01A); PdfPCell cell02A = new PdfPCell(new Paragraph(threePlacess.format(meanByWindow[i]))); tableAct.addCell(cell02A); PdfPCell cell03A = new PdfPCell(new Paragraph(threePlacess.format(standardDevByWindow[i]))); tableAct.addCell(cell03A); if (alphaLevel95) { PdfPCell cell04A = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPoint[i][0]) + "," + threePlacess.format(rightEndPoint[i][0]) + "]")); tableAct.addCell(cell04A); } else if (alphaLevel99) { PdfPCell cell04A = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPoint[i][1]) + "," + threePlacess.format(rightEndPoint[i][1]) + "]")); tableAct.addCell(cell04A); } else if (alphaLevel999) { PdfPCell cell04A = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPoint[i][2]) + "," + threePlacess.format(rightEndPoint[i][2]) + "]")); tableAct.addCell(cell04A); } PdfPCell cell05A = new PdfPCell(new Paragraph(threePlacess.format(minimunByWindow[i]))); tableAct.addCell(cell05A); PdfPCell cell06A = new PdfPCell(new Paragraph(threePlacess.format(maximunByWindow[i]))); tableAct.addCell(cell06A); } printTableAct.add(tableAct); /* * Display the general information on the Simulations. (Normality is assumed) */ report = report + "\n" + "SIMULATIONS RESULTS: " + "\n" + "\n"; report = report + "\t" + "NUMBER OF SIMULATIONS is: " + this.numberOfSimulations + "\n"; report = report + "\t" + "RANDOM SEED: " + seedNumber + "\n"; /* * Save the general information on the Simulations. (Normality is assumed) for the pdf file */ pdfbufferpar2 = pdfbufferpar2 + "\n" + "SIMULATIONS RESULTS: " + "\n" + "\n"; pdfbufferpar2 = pdfbufferpar2 + "\t" + "NUMBER OF SIMULATIONS is: " + numberOfSimulations + "\n"; pdfbufferpar2 = pdfbufferpar2 + "\t" + "RANDOM SEED: " + seedNumber + "\n"; pdfbufferpar12.add(pdfbufferpar2); para2.add(pdfbufferpar12); /* * Write out everything that goes into the simulationTable. */ PdfPTable tableSim = new PdfPTable(7); if (isFirstIteration) { String tempStrB = ""; if (alphaLevel95) { tempStrB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 95% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel99) { tempStrB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel999) { tempStrB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99.9% CONF INT ", " MIN ", " MAX "); } report = report + tempStrB + "\n"; simulationTable = simulationTable + tempStrB.substring(1) + "\n"; PdfPCell cell00B = new PdfPCell(new Paragraph(" ADJ SEG ")); tableSim.addCell(cell00B); PdfPCell cell01B = new PdfPCell(new Paragraph(" LAGS ")); tableSim.addCell(cell01B); PdfPCell cell02B = new PdfPCell(new Paragraph(" MEAN ")); tableSim.addCell(cell02B); PdfPCell cell03B = new PdfPCell(new Paragraph(" STA DEV ")); tableSim.addCell(cell03B); if (alphaLevel95) { PdfPCell cell04B = new PdfPCell(new Paragraph(" 95% CONF INT ")); tableSim.addCell(cell04B); } else if (alphaLevel99) { PdfPCell cell04B = new PdfPCell(new Paragraph(" 99% CONF INT ")); tableSim.addCell(cell04B); } else if (alphaLevel999) { PdfPCell cell04B = new PdfPCell(new Paragraph(" 99.9% CONF INT ")); tableSim.addCell(cell04B); } PdfPCell cell05B = new PdfPCell(new Paragraph(" MIN ")); tableSim.addCell(cell05B); PdfPCell cell06B = new PdfPCell(new Paragraph(" MAX ")); tableSim.addCell(cell06B); isFirstIteration = false; } for (int i = 0; i < lengthOfWindow; i++) { if (alphaLevel95) { pdfbufferB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanMeanByWindow[i]), threePlacess.format(standardDevMeanByWindow[i]), "[" + threePlacess.format(leftEndPointSim[i][0]) + "," + threePlacess.format(rightEndPointSim[i][0]) + "]", threePlacess.format(minMeanByWindow[i]), threePlacess.format(maxMeanByWindow[i])); } else if (alphaLevel99) { pdfbufferB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanMeanByWindow[i]), threePlacess.format(standardDevMeanByWindow[i]), "[" + threePlacess.format(leftEndPointSim[i][1]) + "," + threePlacess.format(rightEndPointSim[i][1]) + "]", threePlacess.format(minMeanByWindow[i]), threePlacess.format(maxMeanByWindow[i])); } else if (alphaLevel999) { pdfbufferB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanMeanByWindow[i]), threePlacess.format(standardDevMeanByWindow[i]), "[" + threePlacess.format(leftEndPointSim[i][2]) + "," + threePlacess.format(rightEndPointSim[i][2]) + "]", threePlacess.format(minMeanByWindow[i]), threePlacess.format(maxMeanByWindow[i])); } report = report + pdfbufferB + "\n"; simulationTable = simulationTable + pdfbufferB.substring(1) + "\n"; PdfPCell cell00B = new PdfPCell(new Paragraph( firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex))); tableSim.addCell(cell00B); PdfPCell cell01B = new PdfPCell(new Paragraph((i - yearsPriorToEvent.intValue()))); tableSim.addCell(cell01B); PdfPCell cell02B = new PdfPCell(new Paragraph(threePlacess.format(meanMeanByWindow[i]))); tableSim.addCell(cell02B); PdfPCell cell03B = new PdfPCell( new Paragraph(threePlacess.format(standardDevMeanByWindow[i]))); tableSim.addCell(cell03B); if (alphaLevel95) { PdfPCell cell04B = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPointSim[i][0]) + "," + threePlacess.format(rightEndPointSim[i][0]) + "]")); tableSim.addCell(cell04B); // PdfPCell cell05B = new PdfPCell(new Paragraph("[" + threePlacess.format(leftEndPointPer[i][0]) + "," // + threePlacess.format(rightEndPointPer[i][0]) + "]")); // tableSim.addCell(cell05B); } else if (alphaLevel99) { PdfPCell cell04B = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPointSim[i][1]) + "," + threePlacess.format(rightEndPointSim[i][1]) + "]")); tableSim.addCell(cell04B); // PdfPCell cell05B = new PdfPCell(new Paragraph("[" + threePlacess.format(leftEndPointPer[i][0]) + "," // + threePlacess.format(rightEndPointPer[i][0]) + "]")); // tableSim.addCell(cell05B); } else if (alphaLevel999) { PdfPCell cell04B = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPointSim[i][2]) + "," + threePlacess.format(rightEndPointSim[i][2]) + "]")); tableSim.addCell(cell04B); // PdfPCell cell05B = new PdfPCell(new Paragraph("[" + threePlacess.format(leftEndPointPer[i][0]) + "," // + threePlacess.format(rightEndPointPer[i][0]) + "]")); // tableSim.addCell(cell05B); } PdfPCell cell06B = new PdfPCell(new Paragraph(threePlacess.format(minMeanByWindow[i]))); tableSim.addCell(cell06B); PdfPCell cell07B = new PdfPCell(new Paragraph(threePlacess.format(maxMeanByWindow[i]))); tableSim.addCell(cell07B); } printTableSim.add(tableSim); } // end of if keventsinadj >=2 else { cdbuffer = cdbuffer + "Range:" + "\n"; cdbuffer = cdbuffer + beginingYearAdj + "," + lastYearAdj + "\n"; cdbuffer = cdbuffer + "Segment: " + (segmentIndex + 1) + "has not enough events to run the analysis" + "\n"; // ADDED SO THAT BAD SEGMENTS CANNOT BE SELECTED FOR DISPLAY ON THE CHART segmentTable.tableModel.getSegment(segmentIndex).setBadSegmentFlag(true); printTableActFlag.add(false); pdfbufferpar1 = pdfbufferpar1 + "\n"; pdfbufferpar1 = pdfbufferpar1 + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Date: " + now + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Name of the time series file: " + chronologyFile + "\n"; if (firstYearOfProcess.intValue() > chronologyYears.get(0).intValue()) { report = report + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; } else { report = report + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; } if (lastYearOfProcess.intValue() < chronologyYears.get(chronologyYears.size() - 1).intValue()) { report = report + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; } else { report = report + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; } report = report + "Not enough events within the window in the time series (or segment of the time series) to proceed with the analysis " + keventsinadj.size() + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Not enough events within the window in the time series (or segment of the time series) to proceed with the analysis " + keventsinadj.size() + "\n"; } ;// end of else for if keventsinadd >=2 } // end of if kevents >=2 else { cdbuffer = cdbuffer + "Range:" + "\n"; cdbuffer = cdbuffer + beginingYearAdj + "," + lastYearAdj + "\n"; cdbuffer = cdbuffer + "Segement: " + (segmentIndex + 1) + "has not enough events to run the analysis" + "\n"; // ADDED SO THAT BAD SEGMENTS CANNOT BE SELECTED FOR DISPLAY ON THE CHART segmentTable.tableModel.getSegment(segmentIndex).setBadSegmentFlag(true); printTableActFlag.add(false); pdfbufferpar1 = pdfbufferpar1 + "\n"; pdfbufferpar1 = pdfbufferpar1 + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Date: " + now + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Name of the time series file: " + chronologyFile + "\n"; if (firstYearOfProcess.intValue() > chronologyYears.get(0).intValue()) { report = report + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; } else { report = report + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; } if (lastYearOfProcess.intValue() < chronologyYears.get(chronologyYears.size() - 1).intValue()) { report = report + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; } else { report = report + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; } report = report + "Not enough events in the time series (or segment of the time series) to proceed with the analysis " + kevents.size() + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Not enough events in the time series (or segment of the time series) to proceed with the analysis " + kevents.size() + "\n"; } pdfbufferpar1 = ""; pdfbufferpar2 = ""; } ; // ending the huge loop ikj // ending of additions }
From source file:org.fusesource.eca.processor.StatisticsCalculator.java
protected void process(StatisticsType type, Number value, ObjectNode statsNode) throws Exception { EventCache<Number> cache = this.eventCache; if (value != null && cache != null) { cache.add(value);//from w ww . j av a 2 s .co m if (type.equals(StatisticsType.RATE)) { calculateRate(statsNode); } else { List<Number> list = this.eventCache.getWindow(); DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics(); if (list != null && !list.isEmpty()) { for (Number number : list) { descriptiveStatistics.addValue(number.doubleValue()); } switch (type) { case MEAN: statsNode.put("mean", descriptiveStatistics.getMean()); break; case GEOMETRIC_MEAN: statsNode.put("gemetric mean", descriptiveStatistics.getGeometricMean()); break; case STDDEV: statsNode.put("std-dev", descriptiveStatistics.getStandardDeviation()); break; case MIN: statsNode.put("minimum", descriptiveStatistics.getMin()); break; case MAX: statsNode.put("maximum", descriptiveStatistics.getMax()); break; case SKEWNESS: statsNode.put("skewness", descriptiveStatistics.getSkewness()); break; case KUTOSIS: statsNode.put("kurtosis", descriptiveStatistics.getKurtosis()); break; case VARIANCE: statsNode.put("variance", descriptiveStatistics.getVariance()); break; case COUNT: statsNode.put("count", list.size()); default: statsNode.put("number", descriptiveStatistics.getN()); statsNode.put("mean", descriptiveStatistics.getMean()); statsNode.put("gemetric mean", descriptiveStatistics.getGeometricMean()); statsNode.put("minimum", descriptiveStatistics.getMin()); statsNode.put("maximum", descriptiveStatistics.getMax()); statsNode.put("std-dev", descriptiveStatistics.getStandardDeviation()); statsNode.put("median", descriptiveStatistics.getPercentile(50)); statsNode.put("skewness", descriptiveStatistics.getSkewness()); statsNode.put("kurtosis", descriptiveStatistics.getKurtosis()); statsNode.put("variance", descriptiveStatistics.getVariance()); calculateRate(statsNode); statsNode.put("count", list.size()); } } } } }
From source file:org.jgap.gp.function.statistics.Skewness.java
@Override public double execute_double(ProgramChromosome c, int n, Object[] args) { int size = size(); DescriptiveStatistics stats = new DescriptiveStatistics(); for (int i = 0; i < size; i++) { stats.addValue(c.execute_double(n, i, args)); }/*from w w w. j ava2 s . co m*/ return stats.getSkewness(); }
From source file:org.jgap.gp.function.statistics.Skewness.java
@Override public float execute_float(ProgramChromosome c, int n, Object[] args) { int size = size(); DescriptiveStatistics stats = new DescriptiveStatistics(); for (int i = 0; i < size; i++) { stats.addValue(c.execute_float(n, i, args)); }//from w w w . ja v a2 s. c om return (float) stats.getSkewness(); }
From source file:playground.johannes.snowball2.GraphStatistic.java
protected TObjectDoubleHashMap<String> getStatisticsMap(DescriptiveStatistics stats) { TObjectDoubleHashMap<String> statsMap = new TObjectDoubleHashMap<String>(); statsMap.put(MIN_KEY, stats.getMin()); statsMap.put(MAX_KEY, stats.getMax()); statsMap.put(MEAN_KEY, stats.getMean()); statsMap.put(VARIANCE_KEY, stats.getVariance()); statsMap.put(SKEWNESS_KEY, stats.getSkewness()); statsMap.put(KURTOSIS_KEY, stats.getKurtosis()); return statsMap; }