List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics getValues
public double[] getValues()
From source file:playground.johannes.socialnetworks.graph.spatial.analysis.EdgeLengthCategoryTask.java
@Override public void analyze(Graph graph, Map<String, DescriptiveStatistics> statsMap) { Accessibility access = module;/*from w ww .j av a2 s. co m*/ // access.setTargets(destinations); // destinations = new HashSet<Point>(); Set<Vertex> vertices = new HashSet<Vertex>(); for (Vertex v : graph.getVertices()) { Point p = ((SpatialVertex) v).getPoint(); if (p != null) { // if (boundary.contains(p)) { vertices.add(v); // destinations.add(((SpatialVertex) v).getPoint()); // } } } // access.setTargets(destinations); TObjectDoubleHashMap<Vertex> normValues = access.values(vertices); // TObjectDoubleHashMap<Vertex> normValues = ObservedDegree.getInstance().values(vertices); // TObjectDoubleHashMap<Vertex> normValues = ObservedAge.getInstance().values(vertices); // AttributePartition partitioner = new AttributePartition(new FixedBordersDiscretizer(normValues.getValues(), new double[]{21.0})); AttributePartition partitioner = new AttributePartition( FixedSampleSizeDiscretizer.create(normValues.getValues(), 1, 2)); TDoubleObjectHashMap<?> partitions = partitioner.partition(normValues); TDoubleObjectIterator<?> it = partitions.iterator(); // AcceptanceProbability propa = new ObservedAcceptanceProbability(); // AcceptanceProbability propa = new AcceptanceProbability(); EdgeLength propa = new EdgeLength(); Map<String, TDoubleDoubleHashMap> histograms = new HashMap<String, TDoubleDoubleHashMap>(); Map<String, DescriptiveStatistics> distributions = new HashMap<String, DescriptiveStatistics>(); double sum = 0; for (int i = 0; i < partitions.size(); i++) { it.advance(); double key = it.key(); Set<SpatialVertex> partition = (Set<SpatialVertex>) it.value(); System.out.println("Partition size = " + partition.size() + "; key = " + key); // DescriptiveStatistics distr = propa.distribution(partition, destinations); Set<SpatialEdge> edges = new HashSet<SpatialEdge>(); for (SpatialVertex v : partition) { edges.addAll(v.getEdges()); } DescriptiveStatistics distr = propa.statistics(edges); try { double[] values = distr.getValues(); System.out.println("Num samples = " + values.length); if (values.length > 0) { TDoubleDoubleHashMap hist = Histogram.createHistogram(distr, FixedSampleSizeDiscretizer.create(values, 1, 50), true); sum += Histogram.sum(hist); histograms.put(String.format("d-cat%1$.4f", key), hist); distributions.put(String.format("d-cat%1$.4f", key), distr); } writeHistograms(distr, new LinLogDiscretizer(1000.0, 2), String.format("d-cat%1$.4f.log", key), true); } catch (IOException e) { e.printStackTrace(); } } for (Entry<String, TDoubleDoubleHashMap> entry : histograms.entrySet()) { String key = entry.getKey(); TDoubleDoubleHashMap histogram = entry.getValue(); Histogram.normalize(histogram, sum); try { TXTWriter.writeMap(histogram, "d", "p", String.format("%1$s/%2$s.txt", getOutputDirectory(), key)); } catch (IOException e) { e.printStackTrace(); } histogram = Histogram.createCumulativeHistogram(histogram); Histogram.complementary(histogram); try { TXTWriter.writeMap(histogram, "d", "p", String.format("%1$s/%2$s.cum.txt", getOutputDirectory(), key)); } catch (IOException e) { e.printStackTrace(); } DescriptiveStatistics stats = distributions.get(key); writeRawData(stats, key); } }
From source file:playground.johannes.socialnetworks.survey.ivt2009.analysis.AcceptPropConst.java
@Override public TObjectDoubleHashMap<Vertex> values(Set<? extends Vertex> vertices) { TObjectDoubleHashMap<Vertex> c_i = new TObjectDoubleHashMap<Vertex>(); logger.info("Creating partitions..."); AttributePartition partitioner = new AttributePartition( FixedSampleSizeDiscretizer.create(partitionAttributes.getValues(), 20, 100)); // AttributePartition partitioner = new AttributePartition(new LinearDiscretizer(values.getValues(), 20)); TDoubleObjectHashMap<Set<Vertex>> partitions = partitioner.partition(partitionAttributes); logger.info(String.format("Created %1$s partitions.", partitions.size())); // Discretizer discretizer = new LinearDiscretizer(1000.0); DistanceCalculator distanceCalculator = new CartesianDistanceCalculator(); logger.info("Calculating prop const..."); TDoubleObjectIterator<?> it = partitions.iterator(); for (int i = 0; i < partitions.size(); i++) { it.advance();// w ww . j av a 2 s. co m Set<? extends SpatialVertex> partition = (Set<? extends SpatialVertex>) it.value(); DescriptiveStatistics stats = Distance.getInstance().statistics(partition); Discretizer discretizer = FixedSampleSizeDiscretizer.create(stats.getValues(), 20, 100); TDoubleDoubleHashMap m_d = Histogram.createHistogram(stats, discretizer, true); /* * count number of destinations at d */ TDoubleIntHashMap M_d = new TDoubleIntHashMap(); for (SpatialVertex vertex : partition) { Point p1 = vertex.getPoint(); if (p1 != null) { for (Point p2 : destinations) { if (p2 != null) { double d = distanceCalculator.distance(p1, p2); d = discretizer.discretize(d); M_d.adjustOrPutValue(d, 1, 1); } } } } /* * */ double c_sum = 0; int cnt = 0; TDoubleDoubleIterator mdIt = m_d.iterator(); for (int k = 0; k < m_d.size(); k++) { mdIt.advance(); double d = it.key(); // d = discretizer.discretize(d); d = Math.max(d, 1.0); int M = M_d.get(discretizer.discretize(d)); if (M > 0) { c_sum += mdIt.value() / (Math.pow(d, gamma) * M); System.err.println(String.valueOf(mdIt.value() / (Math.pow(d, gamma) * M))); cnt++; } } double c_mean = c_sum / (double) cnt; System.out.println(it.key() + "\t" + c_mean); /* * */ for (SpatialVertex vertex : partition) { c_i.put(vertex, c_mean); } } return c_i; }
From source file:playground.johannes.socialnetworks.survey.ivt2009.analysis.deprecated.DistanceSocioAttribute.java
@Override public void analyze(Graph g, Map<String, DescriptiveStatistics> stats) { try {/*w ww.j ava2s . c om*/ SocialGraph graph = (SocialGraph) g; Distance dist = new ObservedDistance(); AcceptanceProbability acc = new AcceptanceProbability(); Degree degree = new ObservedDegree(); Discretizer discretizer = new LinearDiscretizer(1.0); /* * male */ Set<SocialVertex> vertices = new HashSet<SocialVertex>(); for (SocialVertex vertex : graph.getVertices()) { if ("m".equalsIgnoreCase(vertex.getPerson().getPerson().getSex())) vertices.add(vertex); } DescriptiveStatistics ds = new DescriptiveStatistics(); ds.addValue(ResponseRate.responseRate((Set) vertices)); stats.put("alpha_male", ds); Distribution distr = dist.distribution(vertices); Distribution.writeHistogram(distr.absoluteDistributionLog2(1000), getOutputDirectory() + "/d_male_txt"); ds = new DescriptiveStatistics(); ds.addValue(distr.mean()); stats.put("d_mean_male", ds); DescriptiveStatistics dStats = acc.distribution(vertices, choiceSet); TDoubleDoubleHashMap hist = Histogram.createHistogram(dStats, new LinearDiscretizer(1000.0), false); TXTWriter.writeMap(hist, "d", "p", getOutputDirectory() + "/p_acc_male.txt"); // Distribution.writeHistogram(dStats.absoluteDistributionLog2(1000), getOutputDirectory()+"/p_acc_male.txt"); DescriptiveStatistics kDistr = degree.statistics(vertices); stats.put("k_male", kDistr); Distribution.writeHistogram(Histogram.createHistogram(kDistr, new LinearDiscretizer(1.0), false), getOutputDirectory() + "/k_male.txt"); System.out.println("Male: " + kDistr.getValues().length); /* * female */ vertices = new HashSet<SocialVertex>(); for (SocialVertex vertex : graph.getVertices()) { if ("f".equalsIgnoreCase(vertex.getPerson().getPerson().getSex())) vertices.add(vertex); } ds = new DescriptiveStatistics(); ds.addValue(ResponseRate.responseRate((Set) vertices)); stats.put("alpha_female", ds); distr = dist.distribution(vertices); Distribution.writeHistogram(distr.absoluteDistributionLog2(1000), getOutputDirectory() + "/d_female_txt"); ds = new DescriptiveStatistics(); ds.addValue(distr.mean()); stats.put("d_femean_male", ds); dStats = acc.distribution(vertices, choiceSet); hist = Histogram.createHistogram(dStats, new LinearDiscretizer(1000.0), false); TXTWriter.writeMap(hist, "d", "p", getOutputDirectory() + "/p_acc_female.txt"); // Distribution.writeHistogram(acc.distribution(vertices, choiceSet).absoluteDistributionLog2(1000), getOutputDirectory()+"/p_acc_female.txt"); kDistr = degree.statistics(vertices); stats.put("k_female", kDistr); Distribution.writeHistogram(Histogram.createHistogram(kDistr, new LinearDiscretizer(1.0), false), getOutputDirectory() + "/k_female.txt"); System.out.println("Female: " + kDistr.getValues().length); /* * edu */ // Education edu = new Education(); // edu.socioMatrix(graph); // for(String att : edu.getAttributes()) { // vertices = new HashSet<SocialVertex>(); // for (SocialVertex vertex : graph.getVertices()) { // if (att.equalsIgnoreCase(vertex.getPerson().getEducation())) // vertices.add(vertex); // } // // stats.put("alpha_edu"+att, ResponseRate.responseRate((Set) vertices)); // // distr = dist.distribution(vertices); // Distribution.writeHistogram(distr.absoluteDistributionLog2(1000), getOutputDirectory() + "/d_edu"+att+".txt"); // stats.put("d_mean_edu"+att, distr.mean()); // // dStats = acc.distribution(vertices, choiceSet); // hist = Histogram.createHistogram(dStats, new LinearDiscretizer(1000.0), false); // TXTWriter.writeMap(hist, "d", "p", getOutputDirectory()+"/p_acc_edu"+att+".txt"); //// Distribution.writeHistogram(acc.distribution(vertices, choiceSet).absoluteDistributionLog2(1000), getOutputDirectory()+"/p_acc_edu"+att+".txt"); // // kDistr = degree.distribution(vertices); // stats.put("k_mean_edu"+att, kDistr.getMean()); // Distribution.writeHistogram(Histogram.createHistogram(kDistr, new LinearDiscretizer(1.0), false), getOutputDirectory() + "/k_edu"+att+".txt"); // // System.out.println("Edu"+att+": " + kDistr.getValues().length); // } /* * income */ Map<Integer, Set<SocialVertex>> incomes = new HashMap<Integer, Set<SocialVertex>>(); for (SocialVertex vertex : graph.getVertices()) { Set<SocialVertex> set = incomes.get(vertex.getPerson().getIncome()); if (set == null) { set = new HashSet<SocialVertex>(); incomes.put(vertex.getPerson().getIncome(), set); } set.add(vertex); } for (Entry<Integer, Set<SocialVertex>> entry : incomes.entrySet()) { vertices = entry.getValue(); distr = dist.distribution(vertices); String att = String.valueOf(entry.getKey()); Distribution.writeHistogram(distr.absoluteDistributionLog2(1000), getOutputDirectory() + "/d_income" + att + ".txt"); ds = new DescriptiveStatistics(); ds.addValue(distr.mean()); stats.put("d_mean_income" + att, ds); dStats = acc.distribution(vertices, choiceSet); hist = Histogram.createHistogram(dStats, new LinearDiscretizer(1000.0), false); TXTWriter.writeMap(hist, "d", "p", getOutputDirectory() + "/p_acc_income" + att + ".txt"); // Distribution.writeHistogram(acc.distribution(vertices, choiceSet).absoluteDistributionLog2(1000), getOutputDirectory()+"/p_acc_income"+att+".txt"); kDistr = degree.statistics(vertices); stats.put("k_income" + att, kDistr); Distribution.writeHistogram(Histogram.createHistogram(kDistr, discretizer, false), getOutputDirectory() + "/k_income" + att + ".txt"); System.out.println("Income" + att + ": " + kDistr.getValues().length); } /* * civil status */ Map<String, Set<SocialVertex>> civilstatus = new HashMap<String, Set<SocialVertex>>(); for (SocialVertex vertex : graph.getVertices()) { Set<SocialVertex> set = civilstatus.get(vertex.getPerson().getCiviStatus()); if (set == null) { set = new HashSet<SocialVertex>(); civilstatus.put(vertex.getPerson().getCiviStatus(), set); } set.add(vertex); } for (Entry<String, Set<SocialVertex>> entry : civilstatus.entrySet()) { vertices = entry.getValue(); distr = dist.distribution(vertices); String att = entry.getKey(); Distribution.writeHistogram(distr.absoluteDistributionLog2(1000), getOutputDirectory() + "/d_cstatus" + att + ".txt"); ds = new DescriptiveStatistics(); ds.addValue(distr.mean()); stats.put("d_mean_cstatus" + att, ds); dStats = acc.distribution(vertices, choiceSet); hist = Histogram.createHistogram(dStats, new LinearDiscretizer(1000.0), false); TXTWriter.writeMap(hist, "d", "p", getOutputDirectory() + "/p_acc_cstatus" + att + ".txt"); // Distribution.writeHistogram(acc.distribution(vertices, choiceSet).absoluteDistributionLog2(1000), getOutputDirectory()+"/p_acc_cstatus"+att+".txt"); kDistr = degree.statistics(vertices); stats.put("k_cstatus" + att, kDistr); Distribution.writeHistogram(Histogram.createHistogram(kDistr, discretizer, false), getOutputDirectory() + "/k_cstatus" + att + ".txt"); System.out.println("CivStatus" + att + ": " + kDistr.getValues().length); } } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:playground.johannes.socialnetworks.survey.ivt2009.analysis.FrequencyTask.java
@Override public void analyze(Graph graph, Map<String, DescriptiveStatistics> statsMap) { DescriptiveStatistics distr = new Frequency().statistics((Set<SocialEdge>) graph.getEdges()); if (getOutputDirectory() != null) { Distribution distr2 = new Distribution(distr.getValues()); try {// w w w . ja va2 s . c o m writeHistograms(distr2, 1.0, false, "freq.txt"); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
From source file:playground.johannes.socialnetworks.survey.ivt2009.graph.io.GraphBuilder.java
private void loadSociogramData(Collection<VertexRecord> records, SQLDumpReader sqlData) { logger.info("Loading sociogram data..."); Map<String, VertexRecord> map = sqlData.getFullAlterKeyMappping(records); TObjectIntHashMap<Vertex> rawDegrees = new TObjectIntHashMap<Vertex>(); for (Vertex v : proj.getVertices()) { rawDegrees.put(v, v.getNeighbours().size()); }//from w w w . j a v a2 s . c o m int edgecnt = 0; int doublecnt = 0; int egoEdge = 0; Set<Vertex> notOkVertices = new HashSet<Vertex>(); Set<Vertex> okVertices = new HashSet<Vertex>(); DescriptiveStatistics notOkStats = new DescriptiveStatistics(); DescriptiveStatistics okStats = new DescriptiveStatistics(); DescriptiveStatistics numDistr = new DescriptiveStatistics(); DescriptiveStatistics numDistrNoZero = new DescriptiveStatistics(); DescriptiveStatistics sizeDistr = new DescriptiveStatistics(); TDoubleArrayList sizeValues = new TDoubleArrayList(); TDoubleArrayList kSizeValues = new TDoubleArrayList(); TDoubleArrayList numValues = new TDoubleArrayList(); TDoubleArrayList numValues2 = new TDoubleArrayList(); TDoubleArrayList kNumValues = new TDoubleArrayList(); for (VertexRecord record : records) { if (record.isEgo) { List<Set<String>> cliques = sqlData.getCliques(record); numDistr.addValue(cliques.size()); Vertex v = idMap.get(record.id); numValues.add(cliques.size()); kNumValues.add(v.getNeighbours().size()); if (!cliques.isEmpty()) numDistrNoZero.addValue(cliques.size()); for (Set<String> clique : cliques) { sizeDistr.addValue(clique.size()); sizeValues.add(clique.size()); kSizeValues.add(rawDegrees.get(projMap.get(v))); numValues2.add(cliques.size()); List<SocialSparseVertex> vertices = new ArrayList<SocialSparseVertex>(clique.size()); for (String alter : clique) { VertexRecord r = map.get(record.egoSQLId + alter); if (r != null) { SocialSparseVertex vertex = idMap.get(r.id); if (vertex != null) { vertices.add(vertex); } else { logger.warn("Vertex not found."); } } else { logger.warn("Record not found."); } } for (int i = 0; i < vertices.size(); i++) { for (int j = i + 1; j < vertices.size(); j++) { SampledVertexDecorator<SocialSparseVertex> vProj1 = projMap.get(vertices.get(i)); SampledVertexDecorator<SocialSparseVertex> vProj2 = projMap.get(vertices.get(j)); if (!vProj1.isSampled() && !vProj2.isSampled()) { if (Math.random() < 0.62) { SocialSparseEdge socialEdge = builder.addEdge(graph, vertices.get(i), vertices.get(j)); if (socialEdge != null) { projBuilder.addEdge(proj, vProj1, vProj2, socialEdge); edgecnt++; if (vProj1.isSampled() || vProj2.isSampled()) { egoEdge++; if (vProj1.isSampled()) notOkVertices.add(vProj1); else notOkVertices.add(vProj2); } } else { doublecnt++; if (vProj1.isSampled()) okVertices.add(vProj1); else if (vProj2.isSampled()) okVertices.add(vProj2); } } } } } } } } for (Vertex v : okVertices) okStats.addValue(rawDegrees.get(v)); for (Vertex v : notOkVertices) notOkStats.addValue(rawDegrees.get(v)); try { TDoubleDoubleHashMap hist = Histogram.createHistogram(okStats, new LinearDiscretizer(1), false); TXTWriter.writeMap(hist, "k", "n", "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/k_ok.txt"); TDoubleDoubleHashMap hist2 = Histogram.createHistogram(notOkStats, new LinearDiscretizer(1), false); TXTWriter.writeMap(hist2, "k", "n", "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/k_notok.txt"); TDoubleDoubleHashMap ratio = new TDoubleDoubleHashMap(); double[] keys = hist.keys(); for (double k : keys) { double val1 = hist2.get(k); double val2 = hist.get(k); ratio.put(k, val1 / (val2 + val1)); } TXTWriter.writeMap(ratio, "k", "p", "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/k_ratio.txt"); logger.info("Mean num of cliques: " + numDistrNoZero.getMean()); logger.info("Mean size: " + sizeDistr.getMean()); logger.info("Median num of cliques: " + StatUtils.percentile(numDistrNoZero.getValues(), 50)); logger.info("Median size: " + StatUtils.percentile(sizeDistr.getValues(), 50)); TDoubleDoubleHashMap histNum = Histogram.createHistogram(numDistrNoZero, FixedSampleSizeDiscretizer.create(numDistrNoZero.getValues(), 2, 20), true); Histogram.normalize(histNum); TXTWriter.writeMap(histNum, "num", "freq", "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/numCliques.txt"); TDoubleDoubleHashMap histSize = Histogram.createHistogram(sizeDistr, FixedSampleSizeDiscretizer.create(sizeDistr.getValues(), 2, 20), true); Histogram.normalize(histSize); TXTWriter.writeMap(histSize, "size", "freq", "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/numPersons.txt"); Discretizer discretizer = FixedSampleSizeDiscretizer.create(kSizeValues.toNativeArray(), 20, 20); TDoubleArrayList valuesX = new TDoubleArrayList(); for (int i = 0; i < kSizeValues.size(); i++) { valuesX.add(discretizer.discretize(kSizeValues.get(i))); } Correlations.writeToFile(Correlations.mean(valuesX.toNativeArray(), sizeValues.toNativeArray()), "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/size_k.txt", "k", "size"); discretizer = FixedSampleSizeDiscretizer.create(kNumValues.toNativeArray(), 20, 20); valuesX = new TDoubleArrayList(); for (int i = 0; i < kNumValues.size(); i++) { valuesX.add(discretizer.discretize(kNumValues.get(i))); } Correlations.writeToFile(Correlations.mean(valuesX.toNativeArray(), numValues.toNativeArray()), "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/num_k.txt", "k", "n"); Correlations.writeToFile(Correlations.mean(numValues2.toNativeArray(), sizeValues.toNativeArray()), "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/size_num.txt", "num", "size"); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.info(String.format("Inserted %1$s edges, %2$s edges already present.", edgecnt, doublecnt)); logger.info(String.format("Inserted %1$s edges between at least one ego.", egoEdge)); }
From source file:playground.johannes.studies.coopsim.TimeSamplerTest.java
/** * @param args//from ww w . j ava2 s . c o m * @throws FunctionEvaluationException * @throws IOException */ public static void main(String[] args) throws FunctionEvaluationException, IOException { Random random = new Random(); AdditiveDistribution arrTimePDF = new AdditiveDistribution(); arrTimePDF.addComponent(new GaussDistribution(2151, 43030, 267)); arrTimePDF.addComponent(new GaussDistribution(14227, 58036, 1382)); TimeSampler arrivalSampler = new TimeSampler(arrTimePDF, 86400, random); AdditiveDistribution arrDurPDF = new AdditiveDistribution(); arrDurPDF.addComponent(new GaussDistribution(7210, 41513, 133759479)); arrDurPDF.addComponent(new GaussDistribution(15660, 73033, 277912890)); // TimeSampler arrDurSampler = new TimeSampler(arrDurPDF, 86400, random); // // LogNormalDistribution durPDF = new LogNormalDistribution(0.6883, 8.4954, 522.9869); // TimeSampler durSampler = new TimeSampler(durPDF, 86400, random); DescriptiveStatistics durations = new DescriptiveStatistics(); DescriptiveStatistics arrivals = new DescriptiveStatistics(); ProgressLogger.init(10000, 1, 5); double sigma = 0.6883; for (int i = 0; i < 10000; i++) { int t_arr = arrivalSampler.nextSample(); int dur_mean = (int) arrDurPDF.value(t_arr); if (dur_mean > 0) { double mu = Math.log(dur_mean) - Math.pow(sigma, 2) / 2.0; TimeSampler sampler = new TimeSampler(new LogNormalDistribution(sigma, mu, 522), 86400, random); double dur = sampler.nextSample(); durations.addValue(dur); arrivals.addValue(t_arr); ProgressLogger.step(); } } TDoubleDoubleHashMap hist = Histogram.createHistogram(durations, FixedSampleSizeDiscretizer.create(durations.getValues(), 1, 30), true); Histogram.normalize(hist); TXTWriter.writeMap(hist, "t", "n", "/Users/jillenberger/Work/socialnets/locationChoice/output/durations.txt"); TDoubleDoubleHashMap correl = Correlations.mean(arrivals.getValues(), durations.getValues(), FixedSampleSizeDiscretizer.create(arrivals.getValues(), 1, 24)); TXTWriter.writeMap(correl, "arr", "dur", "/Users/jillenberger/Work/socialnets/locationChoice/output/dur_arr.txt"); }
From source file:playground.johannes.studies.ivt.FracDimOpportunities.java
/** * @param args/*from w ww . j a va2s . c o m*/ * @throws IOException */ public static void main(String[] args) throws IOException { String targetsFile = "/Users/jillenberger/Work/socialnets/data/schweiz/complete/plans/plans.0.001.xml"; String chborderFile = "/Users/jillenberger/Work/socialnets/data/schweiz/complete/zones/G1L08.shp"; String graphFile = "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/graph/graph.graphml"; String outFile = "/Users/jillenberger/Work/phd/doc/tex/ch3/fig/data/fdim.txt"; SpatialSparseGraph targetGraph = new Population2SpatialGraph(CRSUtils.getCRS(21781)).read(targetsFile); List<Point> targetPoints = new ArrayList<Point>(targetGraph.getVertices().size()); for (SpatialVertex v : targetGraph.getVertices()) { targetPoints.add(v.getPoint()); } SimpleFeature feature = FeatureSHP.readFeatures(chborderFile).iterator().next(); Geometry geometry = (Geometry) feature.getDefaultGeometry(); geometry.setSRID(21781); SocialSampledGraphProjection<SocialSparseGraph, SocialSparseVertex, SocialSparseEdge> graph = GraphReaderFacade .read(graphFile); graph.getDelegate().transformToCRS(CRSUtils.getCRS(21781)); Set<? extends SpatialVertex> egos = SnowballPartitions.createSampledPartition(graph.getVertices()); List<Point> startPoints = new ArrayList<Point>(egos.size()); for (SpatialVertex v : egos) { if (v.getPoint() != null) { if (geometry.contains(v.getPoint())) startPoints.add(v.getPoint()); } } DescriptiveStatistics stats = new DescriptiveStatistics(); DistanceCalculator dCalc = new CartesianDistanceCalculator(); int N = (int) (startPoints.size() * targetPoints.size()); ProgressLogger.init(N, 1, 5); for (int i = 0; i < startPoints.size(); i++) { Point p1 = startPoints.get(i); for (int j = 0; j < targetPoints.size(); j++) { Point p2 = targetPoints.get(j); double d = dCalc.distance(p1, p2); if (d > 0) stats.addValue(d); ProgressLogger.step(); } } System.out.println("Creating histograms..."); TDoubleDoubleHashMap hist = Histogram.createHistogram(stats, FixedSampleSizeDiscretizer.create(stats.getValues(), 100, 500), true); Histogram.normalize(hist); TXTWriter.writeMap(hist, "d", "p", outFile); }