List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics getValues
public double[] getValues()
From source file:playground.johannes.gsv.synPop.analysis.AnalyzerTask.java
protected void writeHistograms(DescriptiveStatistics stats, String name, int bins, int minsize) throws IOException { double[] values = stats.getValues(); if (values.length > 0) { if (overwrite) { logger.warn("Overwriting stratification!"); bins = overwriteBins;// ww w . j a va2s . c om minsize = overwriteMinsize; } TDoubleDoubleHashMap hist = Histogram.createHistogram(stats, FixedSampleSizeDiscretizer.create(values, minsize, bins), true); Histogram.normalize(hist); TXTWriter.writeMap(hist, name, "p", String.format("%1$s/%2$s.strat.txt", getOutputDirectory(), name)); } else { logger.debug(String.format("Cannot create histogram: No samples for %s.", name)); } }
From source file:playground.johannes.gsv.synPop.sim3.HamiltonianLogger.java
@Override public void afterStep(Collection<ProxyPerson> population, Collection<ProxyPerson> mutations, boolean accepted) { if (iter.get() % logInterval == 0) { long iterNow = iter.get(); double[] values = new double[population.size()]; int i = 0; for (ProxyPerson person : population) { values[i] = h.evaluate(person); i++;/*from ww w. ja va 2s. c o m*/ } DescriptiveStatistics stats = new DescriptiveStatistics(values); double sum = stats.getSum(); double avr = stats.getMean(); // double med = stats.getPercentile(50); double max = stats.getMax(); double min = stats.getMin(); StringBuilder builder = new StringBuilder(); builder.append("Statistics for "); builder.append(h.getClass().getSimpleName()); builder.append(String.format(Locale.US, ": Sum = %.4f, ", sum)); builder.append(String.format(Locale.US, ": Avr = %.4f, ", avr)); // builder.append(String.format(Locale.US, ": Med = %.4f, ", med)); builder.append(String.format(Locale.US, ": Max = %.4f, ", max)); builder.append(String.format(Locale.US, ": Min = %.4f", min)); logger.info(builder.toString()); if (writer != null) { try { writer.write(String.valueOf(iterNow)); writer.write(TAB); writer.write(String.valueOf(sum)); writer.write(TAB); writer.write(String.valueOf(avr)); writer.write(TAB); // writer.write(String.valueOf(med)); // writer.write(TAB); writer.write(String.valueOf(min)); writer.write(TAB); writer.write(String.valueOf(max)); writer.newLine(); writer.flush(); } catch (IOException e) { e.printStackTrace(); } } TDoubleDoubleHashMap hist = Histogram.createHistogram(stats, FixedSampleSizeDiscretizer.create(stats.getValues(), 1, 100), true); Histogram.normalize(hist); // String file = String.format("%s/%s.%s.txt", outdir, h.getClass().getSimpleName(), iterNow); String file = String.format("%s/%s", outdir, h.getClass().getSimpleName()); File afile = new File(file); afile.mkdirs(); try { TXTWriter.writeMap(hist, "value", "frequency", String.format("%s/%s.txt", file, iterNow)); } catch (IOException e) { e.printStackTrace(); } } iter.incrementAndGet(); }
From source file:playground.johannes.sna.graph.analysis.AnalyzerTask.java
protected TDoubleDoubleHashMap writeHistograms(DescriptiveStatistics stats, String name, int bins, int minsize) throws IOException { double[] values = stats.getValues(); if (values.length > 0) { TDoubleDoubleHashMap hist = Histogram.createHistogram(stats, FixedSampleSizeDiscretizer.create(values, minsize, bins), true); TXTWriter.writeMap(hist, name, "p", String.format("%1$s/%2$s.n%3$s.nonorm.txt", getOutputDirectory(), name, values.length / bins)); Histogram.normalize(hist);/*ww w .java 2 s . co m*/ TXTWriter.writeMap(hist, name, "p", String.format("%1$s/%2$s.n%3$s.txt", getOutputDirectory(), name, values.length / bins)); return hist; } else { logger.warn("Cannot create histogram. No samples."); return null; } }
From source file:playground.johannes.sna.graph.analysis.AnalyzerTask.java
protected TDoubleDoubleHashMap writeCumulativeHistograms(DescriptiveStatistics stats, String name, int bins, int minsize) throws IOException { double[] values = stats.getValues(); if (values.length > 0) { TDoubleDoubleHashMap hist = Histogram.createHistogram(stats, FixedSampleSizeDiscretizer.create(values, minsize, bins), false, false); hist = Histogram.createCumulativeHistogram(hist); Histogram.normalizeCumulative(hist); Histogram.complementary(hist);//ww w . ja v a 2 s. c om TXTWriter.writeMap(hist, name, "P", String.format("%1$s/%2$s.n%3$s.cum.txt", getOutputDirectory(), name, values.length / bins)); return hist; } else { logger.warn("Cannot create histogram. No samples."); return null; } }
From source file:playground.johannes.sna.math.Histogram.java
public static TDoubleDoubleHashMap createHistogram(DescriptiveStatistics stats, Discretizer discretizer, boolean reweight, boolean reverse) { if (stats instanceof DescriptivePiStatistics) return createHistogram((DescriptivePiStatistics) stats, discretizer, reweight, reverse); else//from w ww . j a va 2s . c om return createHistogram(stats.getValues(), discretizer, reweight, reverse); }
From source file:playground.johannes.snowball2.Clustering.java
@SuppressWarnings("unchecked") @Override/* w w w . j a v a2s . c o m*/ public DescriptiveStatistics calculate(Graph g, int iteration, DescriptiveStatistics reference) { Map<Vertex, Double> values = GraphStatistics.clusteringCoefficients(g); DescriptiveStatistics stats = new DescriptiveStatistics(); TIntDoubleHashMap degreeClustering = new TIntDoubleHashMap(); TIntIntHashMap numDegree = new TIntIntHashMap(); double sum = 0; double wsum = 0; if (g instanceof SampledGraph) { for (Vertex v : values.keySet()) { int k = v.degree(); if (!((SampledVertex) v).isAnonymous()) { double cc = degreeClustering.get(k); if (v.degree() == 1) { stats.addValue(0.0); // sum += (cc / ((SampledVertex)v).getSampleProbability()); } else { double C = values.get(v); stats.addValue(C); cc += C; sum += (C / ((SampledVertex) v).getSampleProbability()); } degreeClustering.put(k, cc); numDegree.put(k, numDegree.get(k) + 1); wsum += (1 / ((SampledVertex) v).getSampleProbability()); } } } else { for (Vertex v : values.keySet()) { int k = v.degree(); double cc = degreeClustering.get(k); wsum++; if (v.degree() == 1) stats.addValue(0.0); else { double C = values.get(v); stats.addValue(C); cc += C; sum += C; } degreeClustering.put(k, cc); numDegree.put(k, numDegree.get(k) + 1); } } wMean = sum / wsum; try { BufferedWriter writer = IOUtils .getBufferedWriter(String.format("%1$s/%2$s.degreeDependency.txt", outputDir, iteration)); int[] keys = numDegree.keys(); Arrays.sort(keys); for (int k : keys) { double bc = degreeClustering.get(k); int numV = numDegree.get(k); writer.write(String.valueOf(k)); writer.write("\t"); writer.write(String.valueOf(bc / (double) numV)); writer.newLine(); } writer.close(); } catch (Exception e) { e.printStackTrace(); } dumpStatistics(getStatisticsMap(stats), iteration); if (reference != null) { Histogram hist = new Histogram(100, reference.getMin(), reference.getMax()); plotHistogram(stats.getValues(), hist, iteration); } else { plotHistogram(stats.getValues(), new Histogram(100), iteration); } return stats; }
From source file:playground.johannes.socialnets.NetworkGenerator2.java
private static void dumpStats(SocialNetwork socialNet, String histfile, int it) throws FileNotFoundException, IOException { int numEdges = socialNet.getEdges().size(); int numVertices = socialNet.getVertices().size(); logger.info(String.format("%1$s vertices, %2$s edges.", numVertices, numEdges)); int isolates = 0; for (Vertex v : socialNet.getVertices()) if (v.getEdges().size() == 0) isolates++;/*from w w w.j a va 2 s.c o m*/ logger.info(String.format("%1$s isolates.", isolates)); DescriptiveStatistics stats = GraphStatistics.getDegreeStatistics(socialNet); double meanDegree = stats.getMean(); logger.info(String.format("Mean degree is %1$s.", meanDegree)); WeightedStatistics wstats = new WeightedStatistics(); wstats.addAll(stats.getValues()); WeightedStatistics.writeHistogram(wstats.absoluteDistribution(), "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".degreehist.txt"); double clustering = GraphStatistics.getClusteringStatistics(socialNet).getMean(); logger.info(String.format("Mean clustering coefficient is %1$s.", clustering)); double mutuality = GraphStatistics.getMutuality(socialNet); logger.info(String.format("Mutuality is %1$s.", mutuality)); double dcorrelation = GraphStatistics.getDegreeCorrelation(socialNet); logger.info(String.format("Degree correlation is %1$s.", dcorrelation)); // logger.info(String.format("Closeness is %1$s.", GraphStatistics.getCentrality(socialNet).getGraphCloseness())); logger.info("Calculating distance distribution..."); WeightedStatistics stats3 = new WeightedStatistics(); // HashMap<Ego, TDoubleDoubleHashMap> egoHist = new HashMap<Ego, TDoubleDoubleHashMap>(); double binsize = 1000; for (Ego ego : socialNet.getVertices()) { TDoubleDoubleHashMap hist = new TDoubleDoubleHashMap(); Coord c1 = ego.getPerson().getSelectedPlan().getFirstActivity().getCoord(); for (Ego p2 : socialNet.getVertices()) { Coord c2 = p2.getPerson().getSelectedPlan().getFirstActivity().getCoord(); double d = c1.calcDistance(c2); double bin = Math.floor(d / binsize); double val = hist.get(bin); val++; hist.put(bin, val); } // egoHist.put(ego, hist); for (Vertex n : ego.getNeighbours()) { Coord c2 = ((Ego) n).getPerson().getSelectedPlan().getFirstActivity().getCoord(); double dist = c1.calcDistance(c2); stats3.add(dist, 1 / hist.get(Math.floor(dist / binsize))); } } // WeightedStatistics stats3 = new WeightedStatistics(); // for(Object o : socialNet.getEdges()) { // Edge e = (Edge)o; // Ego e1 = (Ego) e.getVertices().getFirst(); // Ego e2 = (Ego) e.getVertices().getSecond(); // Coord c1 = e1.getPerson().getSelectedPlan().getFirstActivity().getCoord(); // Coord c2 = e2.getPerson().getSelectedPlan().getFirstActivity().getCoord(); // double dist = c1.calcDistance(c2); // stats3.add(dist); // } WeightedStatistics.writeHistogram(stats3.absoluteDistribution(1000), "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".edgelength.txt"); PajekVisWriter pWriter = new PajekVisWriter(); pWriter.write(socialNet, "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".socialnet.net"); WeightedStatistics stats4 = new WeightedStatistics(); for (Object o : socialNet.getEdges()) { Edge e = (Edge) o; Ego e1 = (Ego) e.getVertices().getFirst(); Ego e2 = (Ego) e.getVertices().getSecond(); int age1 = e1.getPerson().getAge(); int age2 = e2.getPerson().getAge(); double dAge = 0; if (age1 > age2) dAge = age1 / (double) age2; else dAge = age2 / (double) age1; stats4.add(dAge); } WeightedStatistics.writeHistogram(stats4.absoluteDistribution(0.05), "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".agedist.txt"); logger.info("Random link selection = " + rndLinkCount); logger.info("Number of components = " + GraphStatistics.getComponents(socialNet).size()); }
From source file:playground.johannes.socialnets.SpatialNetworkGenerator.java
private static void dumpStats(SocialNetwork socialNet, String histfile, int it) throws FileNotFoundException, IOException { int numEdges = socialNet.getEdges().size(); int numVertices = socialNet.getVertices().size(); logger.info(String.format("%1$s vertices, %2$s edges.", numVertices, numEdges)); int isolates = 0; for (Vertex v : socialNet.getVertices()) if (v.getEdges().size() == 0) isolates++;/*from ww w . j av a 2 s . c o m*/ logger.info(String.format("%1$s isolates.", isolates)); DescriptiveStatistics stats = GraphStatistics.getDegreeStatistics(socialNet); double meanDegree = stats.getMean(); logger.info(String.format("Mean degree is %1$s.", meanDegree)); WeightedStatistics wstats = new WeightedStatistics(); wstats.addAll(stats.getValues()); WeightedStatistics.writeHistogram(wstats.absoluteDistribution(), "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".degreehist.txt"); double clustering = GraphStatistics.getClusteringStatistics(socialNet).getMean(); logger.info(String.format("Mean clustering coefficient is %1$s.", clustering)); double mutuality = GraphStatistics.getMutuality(socialNet); logger.info(String.format("Mutuality is %1$s.", mutuality)); double dcorrelation = GraphStatistics.getDegreeCorrelation(socialNet); logger.info(String.format("Degree correlation is %1$s.", dcorrelation)); // logger.info(String.format("Closeness is %1$s.", GraphStatistics.getCentrality(socialNet).getGraphCloseness())); WeightedStatistics stats3 = new WeightedStatistics(); double binsize = 1000; for (Ego ego : socialNet.getVertices()) { TDoubleDoubleHashMap hist = new TDoubleDoubleHashMap(); Coord c1 = ego.getPerson().getSelectedPlan().getFirstActivity().getCoord(); for (Ego p2 : socialNet.getVertices()) { Coord c2 = p2.getPerson().getSelectedPlan().getFirstActivity().getCoord(); double d = c1.calcDistance(c2); double bin = Math.floor(d / binsize); double val = hist.get(bin); val++; hist.put(bin, val); } // egoHist.put(ego, hist); for (Vertex n : ego.getNeighbours()) { Coord c2 = ((Ego) n).getPerson().getSelectedPlan().getFirstActivity().getCoord(); double dist = c1.calcDistance(c2); stats3.add(dist, 1 / hist.get(Math.floor(dist / binsize))); } } // for(Object o : socialNet.getEdges()) { // Edge e = (Edge)o; // Ego e1 = (Ego) e.getVertices().getFirst(); // Ego e2 = (Ego) e.getVertices().getSecond(); // Coord c1 = e1.getPerson().getSelectedPlan().getFirstActivity().getCoord(); // Coord c2 = e2.getPerson().getSelectedPlan().getFirstActivity().getCoord(); // double dist = c1.calcDistance(c2); // stats3.add(dist); // } WeightedStatistics.writeHistogram(stats3.absoluteDistribution(1000), "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".edgelength.txt"); PajekVisWriter pWriter = new PajekVisWriter(); pWriter.write(socialNet, "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".socialnet.net"); WeightedStatistics stats4 = new WeightedStatistics(); for (Object o : socialNet.getEdges()) { Edge e = (Edge) o; Ego e1 = (Ego) e.getVertices().getFirst(); Ego e2 = (Ego) e.getVertices().getSecond(); int age1 = e1.getPerson().getAge(); int age2 = e2.getPerson().getAge(); double dAge = 0; if (age1 > age2) dAge = age1 / (double) age2; else dAge = age2 / (double) age1; stats4.add(dAge); } WeightedStatistics.writeHistogram(stats4.absoluteDistribution(0.05), "/Users/fearonni/vsp-work/socialnets/data-analysis/socialnetgenerator/" + it + ".agedist.txt"); }
From source file:playground.johannes.socialnetworks.graph.spatial.analysis.AcceptanceProbaDegreeTask.java
@Override public void analyze(Graph graph, Map<String, DescriptiveStatistics> results) { TObjectDoubleHashMap<Vertex> normValues = ObservedDegree.getInstance().values(graph.getVertices()); AttributePartition partitioner = new AttributePartition( FixedSampleSizeDiscretizer.create(normValues.getValues(), 1, 2)); TDoubleObjectHashMap<?> partitions = partitioner.partition(normValues); TDoubleObjectIterator<?> it = partitions.iterator(); AcceptanceProbability proba = new ObservedAcceptanceProbability(); Map<String, TDoubleDoubleHashMap> histograms = new HashMap<String, TDoubleDoubleHashMap>(); Map<String, DescriptiveStatistics> distributions = new HashMap<String, DescriptiveStatistics>(); double sum = 0; for (int i = 0; i < partitions.size(); i++) { it.advance();/* w ww .j a va 2 s. co m*/ double key = it.key(); Set<SpatialVertex> partition = (Set<SpatialVertex>) it.value(); System.out.println("Partition size = " + partition.size() + "; key = " + key); DescriptiveStatistics distr = proba.distribution(partition, destinations); double[] values = distr.getValues(); System.out.println("Num samples = " + values.length); if (values.length > 0) { TDoubleDoubleHashMap hist = Histogram.createHistogram(distr, FixedSampleSizeDiscretizer.create(values, 1, 50), true); sum += Histogram.sum(hist); histograms.put(String.format("p_accept-k%1$.4f", key), hist); distributions.put(String.format("p_accept-k%1$.4f", key), distr); } } for (Entry<String, TDoubleDoubleHashMap> entry : histograms.entrySet()) { String key = entry.getKey(); TDoubleDoubleHashMap histogram = entry.getValue(); Histogram.normalize(histogram, sum); try { TXTWriter.writeMap(histogram, "d", "p", String.format("%1$s/%2$s.txt", getOutputDirectory(), key)); } catch (IOException e) { e.printStackTrace(); } histogram = Histogram.createCumulativeHistogram(histogram); Histogram.complementary(histogram); try { TXTWriter.writeMap(histogram, "d", "p", String.format("%1$s/%2$s.cum.txt", getOutputDirectory(), key)); } catch (IOException e) { e.printStackTrace(); } DescriptiveStatistics stats = distributions.get(key); writeRawData(stats, key); } }
From source file:playground.johannes.socialnetworks.graph.spatial.analysis.AcceptancePropaCategoryTask.java
@Override public void analyze(Graph graph, Map<String, DescriptiveStatistics> statsMap) { Accessibility access = module;//from w w w .jav a 2 s . co m // access.setTargets(destinations); // destinations = new HashSet<Point>(); Set<Vertex> vertices = new HashSet<Vertex>(); for (Vertex v : graph.getVertices()) { Point p = ((SpatialVertex) v).getPoint(); if (p != null) { // if (boundary.contains(p)) { vertices.add(v); // destinations.add(((SpatialVertex) v).getPoint()); // } } } // access.setTargets(destinations); TObjectDoubleHashMap<Vertex> normValues = access.values(vertices); // TObjectDoubleHashMap<Vertex> normValues = ObservedDegree.getInstance().values(vertices); // TObjectDoubleHashMap<Vertex> normValues = ObservedAge.getInstance().values(vertices); // AttributePartition partitioner = new AttributePartition(new FixedBordersDiscretizer(normValues.getValues(), new double[]{21.0})); AttributePartition partitioner = new AttributePartition( FixedSampleSizeDiscretizer.create(normValues.getValues(), 1, 2)); TDoubleObjectHashMap<?> partitions = partitioner.partition(normValues); TDoubleObjectIterator<?> it = partitions.iterator(); // AcceptanceProbability propa = new ObservedAcceptanceProbability(); AcceptanceProbability propa = new AcceptanceProbability(); Map<String, TDoubleDoubleHashMap> histograms = new HashMap<String, TDoubleDoubleHashMap>(); Map<String, DescriptiveStatistics> distributions = new HashMap<String, DescriptiveStatistics>(); double sum = 0; for (int i = 0; i < partitions.size(); i++) { it.advance(); double key = it.key(); Set<SpatialVertex> partition = (Set<SpatialVertex>) it.value(); System.out.println("Partition size = " + partition.size() + "; key = " + key); DescriptiveStatistics distr = propa.distribution(partition, destinations); try { double[] values = distr.getValues(); System.out.println("Num samples = " + values.length); if (values.length > 0) { TDoubleDoubleHashMap hist = Histogram.createHistogram(distr, FixedSampleSizeDiscretizer.create(values, 1, 50), true); sum += Histogram.sum(hist); histograms.put(String.format("p_accept-cat%1$.4f", key), hist); distributions.put(String.format("p_accept-cat%1$.4f", key), distr); } writeHistograms(distr, new LinLogDiscretizer(1000.0, 2), String.format("p_accept-cat%1$.4f.log", key), true); } catch (IOException e) { e.printStackTrace(); } } for (Entry<String, TDoubleDoubleHashMap> entry : histograms.entrySet()) { String key = entry.getKey(); TDoubleDoubleHashMap histogram = entry.getValue(); Histogram.normalize(histogram, sum); try { TXTWriter.writeMap(histogram, "d", "p", String.format("%1$s/%2$s.txt", getOutputDirectory(), key)); } catch (IOException e) { e.printStackTrace(); } histogram = Histogram.createCumulativeHistogram(histogram); Histogram.complementary(histogram); try { TXTWriter.writeMap(histogram, "d", "p", String.format("%1$s/%2$s.cum.txt", getOutputDirectory(), key)); } catch (IOException e) { e.printStackTrace(); } DescriptiveStatistics stats = distributions.get(key); writeRawData(stats, key); } }