List of usage examples for org.apache.commons.math.distribution GammaDistributionImpl GammaDistributionImpl
public GammaDistributionImpl(double alpha, double beta)
From source file:com.cloudera.science.pig.EBCI.java
public double eval(int n, double e) { GammaDistribution g1 = new GammaDistributionImpl(alpha1 + n, beta1 + e); GammaDistribution g2 = new GammaDistributionImpl(alpha2 + n, beta2 + e); PiFunction pi = new PiFunction(q.eval(n, e), g1, g2); PiFunctionIntegral ipi = new PiFunctionIntegral(pi, target); try {/* ww w . jav a 2 s. c o m*/ return (new BrentSolver()).solve(ipi, 0.0, 10.0, 0.01); } catch (ConvergenceException e1) { e1.printStackTrace(); } catch (FunctionEvaluationException e1) { e1.printStackTrace(); } catch (RuntimeException e1) { //MathRuntimeException function values at endpoints do not have different signs e1.printStackTrace(); } return -1.0; }
From source file:dr.inference.distribution.GammaDistributionModel.java
public double quantile(double y) { try {/* w ww.j a v a 2s .c o m*/ return (new GammaDistributionImpl(getShape(), getScale())).inverseCumulativeProbability(y) + offset; } catch (MathException e) { return Double.NaN; } }
From source file:adams.gui.visualization.stats.paintlet.Gamma.java
/** * For calculating the dimensions of the plot area. */// w ww .j a va2 s . c o m @Override public void calculateDimensions() { double median; GammaDistributionImpl gam = new GammaDistributionImpl(m_Shape, m_Scale); m_Sorted = SpreadSheetUtils.getNumericColumn(m_Data, m_Index); m_TransformedY = new double[m_Sorted.length]; Arrays.sort(m_Sorted); for (int i = 0; i < m_Sorted.length; i++) { median = ((i + 1) - 0.3) / (m_Sorted.length + 0.4); try { m_TransformedY[i] = gam.inverseCumulativeProbability(median); } catch (MathException e) { e.printStackTrace(); } } //If axis can handle the data if (m_AxisBottom.getType().canHandle(m_Sorted[0], m_Sorted[m_Sorted.length - 1])) { m_AxisBottom.setMinimum(m_Sorted[0]); m_AxisBottom.setMaximum(m_Sorted[m_Sorted.length - 1]); } else { getLogger().severe("errors in plotting"); } if (m_AxisLeft.getType().canHandle(m_TransformedY[0], m_TransformedY[m_TransformedY.length - 1])) { m_AxisLeft.setMinimum(m_TransformedY[0]); m_AxisLeft.setMaximum(m_TransformedY[m_TransformedY.length - 1]); } else { getLogger().severe("errors in plotting"); } m_AxisBottom.setAxisName(m_Data.getColumnName(m_Index) + ")"); m_AxisLeft.setAxisName("Inverse Gamma"); }
From source file:edu.utexas.cs.tactex.servercustomers.factoredcustomer.ProbabilityDistribution.java
ProbabilityDistribution(FactoredCustomerService service, Element xml) { if (null == randomSeedRepo) randomSeedRepo = (RandomSeedRepo) SpringApplicationContext.getBean("randomSeedRepo"); type = Enum.valueOf(DistType.class, xml.getAttribute("distribution")); switch (type) { case POINTMASS: case DEGENERATE: param1 = Double.parseDouble(xml.getAttribute("value")); sampler = new DegenerateSampler(param1); break;//from w w w. j av a2 s .co m case UNIFORM: param1 = Double.parseDouble(xml.getAttribute("low")); param2 = Double.parseDouble(xml.getAttribute("high")); sampler = new UniformSampler(param1, param2); break; case INTERVAL: param1 = Double.parseDouble(xml.getAttribute("mean")); param2 = Double.parseDouble(xml.getAttribute("stdDev")); param3 = Double.parseDouble(xml.getAttribute("low")); param4 = Double.parseDouble(xml.getAttribute("high")); sampler = new IntervalSampler(param1, param2, param3, param4); break; case NORMAL: case GAUSSIAN: param1 = Double.parseDouble(xml.getAttribute("mean")); param2 = Double.parseDouble(xml.getAttribute("stdDev")); sampler = new ContinuousSampler(new NormalDistributionImpl(param1, param2)); break; case STDNORMAL: param1 = 0; param2 = 1; sampler = new ContinuousSampler(new NormalDistributionImpl(param1, param2)); break; case LOGNORMAL: param1 = Double.parseDouble(xml.getAttribute("expMean")); param2 = Double.parseDouble(xml.getAttribute("expStdDev")); sampler = new LogNormalSampler(param1, param2); break; case CAUCHY: param1 = Double.parseDouble(xml.getAttribute("median")); param2 = Double.parseDouble(xml.getAttribute("scale")); sampler = new ContinuousSampler(new CauchyDistributionImpl(param1, param2)); break; case BETA: param1 = Double.parseDouble(xml.getAttribute("alpha")); param2 = Double.parseDouble(xml.getAttribute("beta")); sampler = new ContinuousSampler(new BetaDistributionImpl(param1, param2)); break; case BINOMIAL: param1 = Double.parseDouble(xml.getAttribute("trials")); param2 = Double.parseDouble(xml.getAttribute("success")); sampler = new DiscreteSampler(new BinomialDistributionImpl((int) param1, param2)); break; case POISSON: param1 = Double.parseDouble(xml.getAttribute("lambda")); sampler = new DiscreteSampler(new PoissonDistributionImpl(param1)); break; case CHISQUARED: param1 = Double.parseDouble(xml.getAttribute("dof")); sampler = new ContinuousSampler(new ChiSquaredDistributionImpl(param1)); break; case EXPONENTIAL: param1 = Double.parseDouble(xml.getAttribute("mean")); sampler = new ContinuousSampler(new ExponentialDistributionImpl(param1)); break; case GAMMA: param1 = Double.parseDouble(xml.getAttribute("alpha")); param2 = Double.parseDouble(xml.getAttribute("beta")); sampler = new ContinuousSampler(new GammaDistributionImpl(param1, param2)); break; case WEIBULL: param1 = Double.parseDouble(xml.getAttribute("alpha")); param2 = Double.parseDouble(xml.getAttribute("beta")); sampler = new ContinuousSampler(new WeibullDistributionImpl(param1, param2)); break; case STUDENT: param1 = Double.parseDouble(xml.getAttribute("dof")); sampler = new ContinuousSampler(new TDistributionImpl(param1)); break; case SNEDECOR: param1 = Double.parseDouble(xml.getAttribute("d1")); param2 = Double.parseDouble(xml.getAttribute("d2")); sampler = new ContinuousSampler(new FDistributionImpl(param1, param2)); break; default: throw new Error("Invalid probability distribution type!"); } sampler.reseedRandomGenerator(service.getRandomSeedRepo() .getRandomSeed("factoredcustomer.ProbabilityDistribution", SeedIdGenerator.getId(), "Sampler") .getValue()); }
From source file:geogebra.kernel.statistics.AlgoDistribution.java
GammaDistribution getGammaDistribution(double param, double param2) { if (gamma == null) gamma = new GammaDistributionImpl(param, param2); else {/* www . ja v a 2 s .c o m*/ gamma.setAlpha(param); gamma.setBeta(param2); } return gamma; }
From source file:dr.math.distributions.GammaDistribution.java
public static void main(String[] args) { testQuantile(1e-10, 0.878328435043444, 0.0013696236839573005); testQuantile(0.5, 0.878328435043444, 0.0013696236839573005); testQuantile(1.0 - 1e-10, 0.878328435043444, 0.0013696236839573005); testQuantileCM(1e-10, 0.878328435043444, 0.0013696236839573005); testQuantileCM(0.5, 0.878328435043444, 0.0013696236839573005); testQuantileCM(1.0 - 1e-10, 0.878328435043444, 0.0013696236839573005); for (double i = 0.0125; i < 1.0; i += 0.025) { System.out.print(i + ": "); try {/*from w ww. j av a2 s .c om*/ System.out.println(new GammaDistributionImpl(0.878328435043444, 0.0013696236839573005) .inverseCumulativeProbability(i)); } catch (MathException e) { System.out.println(e.getMessage()); } } GammaDistribution gamma = new GammaDistribution(0.01, 100.0); double[] samples = new double[100000]; double sum = 0.0; for (int i = 0; i < samples.length; i++) { samples[i] = gamma.nextGamma(); sum += samples[i]; } double mean = sum / (double) samples.length; System.out.println("Mean = " + mean); double variance = 0.0; for (int i = 0; i < samples.length; i++) { variance += Math.pow((samples[i] - mean), 2.0); } variance = variance / (double) samples.length; System.out.println("Variance = " + variance); // System.out // .println("K-S critical values: 1.22(10%), 1.36(5%), 1.63(1%)\n"); // // int iters = 30000; // testExpGamma(1.0, 0.01, 7, iters); // testExpGamma(1.0, 0.01, 5, iters); // testExpGamma(2.0, 0.01, 10000, iters); // testExpGamma(1.0, 0.01, 10000, iters); // testExpGamma(0.1, 0.01, 10000, iters); // testExpGamma(0.01, 0.01, 10000, iters); // // testExpGamma(2.0, 0.01, 10, iters); // testExpGamma(1.5, 0.01, 10, iters); // testExpGamma(1.0, 0.01, 10, iters); // testExpGamma(0.9, 0.01, 10, iters); // testExpGamma(0.5, 0.01, 10, iters); // testExpGamma(0.4, 0.01, 10, iters); // testExpGamma(0.3, 0.01, 10, iters); // testExpGamma(0.2, 0.01, 10, iters); // testExpGamma(0.1, 0.01, 10, iters); // // // test distributions with severe bias, where rejection sampling doesn't // // work anymore // testExpGamma2(2.0, 0.01, 1, iters, 0.112946); // testExpGamma2(2.0, 0.01, 0.1, iters, 0.328874); // testExpGamma2(2.0, 0.01, 0.01, iters, 1.01255); // testExpGamma2(1.0, 0.01, 0.0003, iters, 5.781); // testExpGamma2(4.0, 0.01, 0.0003, iters, 5.79604); // testExpGamma2(20.0, 0.01, 0.0003, iters, 5.87687); // testExpGamma2(10.0, 0.01, 0.01, iters, 1.05374); // testExpGamma2(1.0, 0.01, 0.05, iters, 0.454734); // // test the basic Gamma distribution // test(1.0, 1.0, iters); // test(2.0, 1.0, iters); // test(3.0, 1.0, iters); // test(4.0, 1.0, iters); // test(100.0, 1.0, iters); // testAddition(0.5, 1.0, 2, iters); // testAddition(0.25, 1.0, 4, iters); // testAddition(0.1, 1.0, 10, iters); // testAddition(10, 1.0, 10, iters); // testAddition(20, 1.0, 10, iters); // test(0.001, 1.0, iters); // test(1.0, 2.0, iters); // test(10.0, 1.0, iters); // test(16.0, 1.0, iters); // test(16.0, 0.1, iters); // test(100.0, 1.0, iters); // test(0.5, 1.0, iters); // test(0.5, 0.1, iters); // test(0.1, 1.0, iters); // test(0.9, 1.0, iters); // // test distributions with milder biases, and compare with results from // // simple rejection sampling // testExpGamma(2.0, 0.000001, 1000000, iters); // testExpGamma(2.0, 0.000001, 100000, iters); // testExpGamma(2.0, 0.000001, 70000, iters); // testExpGamma(10.0, 0.01, 7, iters); // testExpGamma(10.0, 0.01, 5, iters); // testExpGamma(1.0, 0.01, 100, iters); // testExpGamma(1.0, 0.01, 10, iters); // testExpGamma(1.0, 0.01, 7, iters / 3); // testExpGamma(1.0, 0.01, 5, iters / 3); // testExpGamma(1.0, 0.00001, 1000000, iters); // testExpGamma(1.0, 0.00001, 100000, iters); // testExpGamma(1.0, 0.00001, 10000, iters); // testExpGamma(1.0, 0.00001, 5000, iters / 3); /* // * this one takes some // * time // */ // testExpGamma(2.0, 1.0, 0.5, iters); // testExpGamma(2.0, 1.0, 1.0, iters); // testExpGamma(2.0, 1.0, 2.0, iters); // testExpGamma(3.0, 3.0, 2.0, iters); // testExpGamma(10.0, 3.0, 5.0, iters); // testExpGamma(1.0, 3.0, 5.0, iters); // testExpGamma(1.0, 10.0, 5.0, iters); // testExpGamma(2.0, 10.0, 5.0, iters); // // test the basic Gamma distribution // test(1.0, 1.0, iters); // test(2.0, 1.0, iters); // test(3.0, 1.0, iters); // test(4.0, 1.0, iters); // test(100.0, 1.0, iters); // testAddition(0.5, 1.0, 2, iters); // testAddition(0.25, 1.0, 4, iters); // testAddition(0.1, 1.0, 10, iters); // testAddition(10, 1.0, 10, iters); // testAddition(20, 1.0, 10, iters); // test(0.001, 1.0, iters); // test(1.0, 2.0, iters); // test(10.0, 1.0, iters); // test(16.0, 1.0, iters); // test(16.0, 0.1, iters); // test(100.0, 1.0, iters); // test(0.5, 1.0, iters); // test(0.5, 0.1, iters); // test(0.1, 1.0, iters); // test(0.9, 1.0, iters); }
From source file:adams.gui.visualization.stats.paintlet.Gamma.java
/** * The paint routine of the paintlet.// ww w . ja va 2s. c om * * @param g the graphics context to use for painting * @param moment what {@link PaintMoment} is currently being painted */ @Override protected void doPerformPaint(Graphics g, PaintMoment moment) { if ((m_Data != null) && (m_Sorted != null) && m_Shape != -1.0) { GUIHelper.configureAntiAliasing(g, m_AntiAliasingEnabled); for (int i = 0; i < m_Sorted.length; i++) { Graphics2D g2d = (Graphics2D) g; //If data points are to be filled if (m_Fill) { g2d.setColor(m_FillColor); g2d.setStroke(new BasicStroke(0)); g2d.fillOval(m_AxisBottom.valueToPos(m_Sorted[i]) - m_Size / 2, m_AxisLeft.valueToPos(m_TransformedY[i]) - m_Size / 2, m_Size, m_Size); } //outline of data point g2d.setStroke(new BasicStroke(m_StrokeThickness)); g2d.setColor(m_Color); g2d.drawOval(m_AxisBottom.valueToPos(m_Sorted[i]) - m_Size / 2, m_AxisLeft.valueToPos(m_TransformedY[i]) - m_Size / 2, m_Size, m_Size); } //If drawing regression fit diagonal if (m_RegressionLine) { g.setColor(Color.BLACK); double[] newData = new double[m_Sorted.length]; for (int i = 0; i < m_Sorted.length; i++) { newData[i] = Math.log(m_Sorted[i]); } GammaDistributionImpl gd = new GammaDistributionImpl(m_Shape, m_Scale); //draw the expected diagonal line using the gamma distribution for (int i = 0; i < m_Sorted.length - 1; i++) { double p1; try { p1 = gd.cumulativeProbability(newData[i]); } catch (MathException e) { p1 = 0; } double p2; try { p2 = gd.cumulativeProbability(newData[i + 1]); } catch (MathException e) { p2 = 0; } g.drawLine(m_AxisBottom.valueToPos(m_Sorted[i]), m_AxisLeft.valueToPos(p1), m_AxisBottom.valueToPos(m_Sorted[i + 1]), m_AxisLeft.valueToPos(p2)); } } } }
From source file:geogebra.common.kernel.statistics.AlgoDistribution.java
/** * @param param/*from w w w . j a va 2 s . c o m*/ * parameter beta * @param param2 * parameter alpha * @return gamma distribution */ protected GammaDistribution getGammaDistribution(double param, double param2) { if (gamma == null || gamma.getBeta() != param2 || gamma.getAlpha() != param) gamma = new GammaDistributionImpl(param, param2); return gamma; }
From source file:beast.evolution.sitemodel.SiteModel.java
/** * discretisation of gamma distribution with equal proportions in each * category/*www.j a v a 2 s .c o m*/ * @param node */ protected void calculateCategoryRates(final Node node) { double propVariable = 1.0; int cat = 0; if (/*invarParameter != null && */invarParameter.getValue() > 0) { if (hasPropInvariantCategory) { categoryRates[0] = 0.0; categoryProportions[0] = invarParameter.getValue(); } propVariable = 1.0 - invarParameter.getValue(); if (hasPropInvariantCategory) { cat = 1; } } if (shapeParameter != null) { final double a = shapeParameter.getValue(); double mean = 0.0; final int gammaCatCount = categoryCount - cat; final GammaDistribution g = new GammaDistributionImpl(a, 1.0 / a); for (int i = 0; i < gammaCatCount; i++) { try { // RRB: alternative implementation that seems equally good in // the first 5 significant digits, but uses a standard distribution object if (useBeast1StyleGamma) { categoryRates[i + cat] = GammaDistributionQuantile((2.0 * i + 1.0) / (2.0 * gammaCatCount), a, 1.0 / a); } else { categoryRates[i + cat] = g .inverseCumulativeProbability((2.0 * i + 1.0) / (2.0 * gammaCatCount)); } } catch (Exception e) { e.printStackTrace(); Log.err.println("Something went wrong with the gamma distribution calculation"); System.exit(-1); } mean += categoryRates[i + cat]; categoryProportions[i + cat] = propVariable / gammaCatCount; } mean = (propVariable * mean) / gammaCatCount; for (int i = 0; i < gammaCatCount; i++) { categoryRates[i + cat] /= mean; } } else { categoryRates[cat] = 1.0 / propVariable; categoryProportions[cat] = propVariable; } ratesKnown = true; }
From source file:de.tum.bgu.msm.syntheticPopulationGenerator.kagawa.SyntheticPopJP.java
private void generateHouseholdsPersonsDwellings() { //Generate the synthetic population using Monte Carlo (select the households according to the weight) //Once the household is selected, all the characteristics of the household will be copied (including the household members) logger.info(" Starting to generate households and persons."); //List of households of the micro data int previousHouseholds = 0; int previousPersons = 0; //Define income distribution double incomeShape = ResourceUtil.getDoubleProperty(rb, PROPERTIES_INCOME_GAMMA_SHAPE); double incomeRate = ResourceUtil.getDoubleProperty(rb, PROPERTIES_INCOME_GAMMA_RATE); double[] incomeProbability = ResourceUtil.getDoubleArray(rb, PROPERTIES_INCOME_GAMMA_PROBABILITY); GammaDistributionImpl gammaDist = new GammaDistributionImpl(incomeShape, 1 / incomeRate); //Create a map to store the household IDs by municipality HashMap<Integer, HashMap<Integer, Integer>> householdByMunicipality = new HashMap<>(); generateCountersForValidation();// ww w . j a va2 s. com RealEstateDataManager realEstate = dataContainer.getRealEstateData(); HouseholdDataManager householdDataManager = dataContainer.getHouseholdData(); HouseholdFactory householdFactory = HouseholdUtil.getFactory(); regionsforFrequencyMatrix = SiloUtil.readCSVfile(rb.getString(PROPERTIES_ATRIBUTES_ZONAL_DATA)); regionsforFrequencyMatrix.buildIndex(regionsforFrequencyMatrix.getColumnPosition("V1")); householdsForFrequencyMatrix = new HashMap<>(); for (int i = 1; i <= microDataDwelling.getRowCount(); i++) { int v2Zone = (int) microDataDwelling.getValueAt(i, "PtResCode"); int ddID = (int) microDataDwelling.getValueAt(i, "id"); if (householdsForFrequencyMatrix.containsKey(v2Zone)) { householdsForFrequencyMatrix.get(v2Zone).put(ddID, 1); } else { HashMap<Integer, Integer> map = new HashMap<>(); map.put(ddID, 1); householdsForFrequencyMatrix.put(v2Zone, map); } } //Selection of households, persons, jobs and dwellings per municipality for (int municipality = 0; municipality < cityID.length; municipality++) { logger.info(" Municipality " + cityID[municipality] + ". Starting to generate households."); //-----------***** Data preparation *****------------------------------------------------------------------- //Create local variables to avoid accessing to the same variable on the parallel processing int municipalityID = cityID[municipality]; int v2zone = (int) regionsforFrequencyMatrix.getIndexedValueAt(municipalityID, "V2"); if (householdsForFrequencyMatrix.containsKey(v2zone)) { String[] attributesHouseholdIPU = attributesMunicipality; TableDataSet rasterCellsMatrix = cellsMatrix; TableDataSet microHouseholds = microDataHousehold; TableDataSet microPersons = microDataPerson; TableDataSet microDwellings = microDataDwelling; microHouseholds.buildIndex(microHouseholds.getColumnPosition("id")); microDwellings.buildIndex(microDwellings.getColumnPosition("id")); int totalHouseholds = (int) marginalsMunicipality.getIndexedValueAt(municipalityID, "hhTotal"); int[] agePerson = ageBracketsPerson; int[] levelEdu = new int[4]; double[] probEdu = new double[4]; for (int i = 0; i < levelEdu.length; i++) { probEdu[i] = marginalsMunicipality.getIndexedValueAt(municipalityID, "Ed_" + i); } //Probability of floor size for vacant dwellings double[] sizeDistribution = new double[sizeBracketsDwelling.length]; for (int row = 0; row < sizeBracketsDwelling.length; row++) { String name = "HA_LT_" + sizeBracketsDwelling[row] + "sqm"; sizeDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name); } //Probability for year and building size for vacant dwellings double[] yearDistribution = new double[yearBracketsDwelling.length]; for (int row = 0; row < yearBracketsDwelling.length; row++) { String name = "HY_" + yearBracketsDwelling[row]; yearDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name) / totalHouseholds; } //Average price per sqm of the zone according to building type float[] averagePriceDistribution = new float[typeBracketsDwelling.length]; for (int row = 0; row < typeBracketsDwelling.length; row++) { String name = "HPrice_" + typeBracketsDwelling[row]; yearDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name); } HashMap<Integer, Integer> hhs = householdsForFrequencyMatrix.get(v2zone); int[] hhFromV2 = hhs.keySet().stream().mapToInt(Integer::intValue).toArray(); HashMap<Integer, Integer> generatedHouseholds = new HashMap<>(); //obtain the raster cells of the municipality and their weight within the municipality int[] tazInCity = cityTAZ.get(municipalityID); double[] probTaz = new double[tazInCity.length]; double tazRemaining = 0; for (int i = 0; i < tazInCity.length; i++) { probTaz[i] = rasterCellsMatrix.getIndexedValueAt(tazInCity[i], "Population"); tazRemaining = tazRemaining + probTaz[i]; } double hhRemaining = 0; HashMap<Integer, Double> prob = new HashMap<>(); for (int row = 0; row < hhFromV2.length; row++) { double value = weightsTable.getIndexedValueAt(hhFromV2[row], Integer.toString(municipalityID)); hhRemaining = hhRemaining + value; prob.put(hhFromV2[row], value); } //marginals for the municipality int hhPersons = 0; int hhTotal = 0; int quartersTotal = 0; int id = 0; //for all the households that are inside the municipality (we will match perfectly the number of households. The total population will vary compared to the marginals.) for (int row = 0; row < totalHouseholds; row++) { //select the household to copy from the micro data(with replacement) double[] probability = prob.values().stream().mapToDouble(Double::doubleValue).toArray(); int[] hhIds = prob.keySet().stream().mapToInt(Integer::intValue).toArray(); int selectedHh = select(probability, hhIds, hhRemaining)[0]; if (prob.get(selectedHh) > 1) { prob.put(selectedHh, prob.get(selectedHh) - 1); hhRemaining = hhRemaining - 1; } else { hhRemaining = hhRemaining - prob.get(selectedHh); prob.remove(selectedHh); } //Select the taz to allocate the household (without replacement) int[] recordsCell = select(probTaz, tazInCity, tazRemaining); int selectedTAZ = recordsCell[0]; //copy the private household characteristics int householdSize = (int) microHouseholds.getIndexedValueAt(selectedHh, "HHsize"); int householdCars = Math.min((int) microHouseholds.getIndexedValueAt(selectedHh, "N_Car"), 3); id = householdDataManager.getNextHouseholdId(); int newDdId = RealEstateDataManager.getNextDwellingId(); Household household = householdFactory.createHousehold(id, newDdId, householdCars); //(int id, int dwellingID, int homeZone, int hhSize, int autos) householdDataManager.addHousehold(household); hhTotal++; //copy the household members characteristics PersonFactory factory = PersonUtils.getFactory(); for (int rowPerson = 0; rowPerson < householdSize; rowPerson++) { int idPerson = householdDataManager.getNextPersonId(); int personCounter = (int) microHouseholds.getIndexedValueAt(selectedHh, "firstPerson") + rowPerson; int age = (int) microPersons.getValueAt(personCounter, "age"); Gender gender = Gender.valueOf((int) microDataPerson.getValueAt(personCounter, "gender")); Occupation occupation = Occupation.UNEMPLOYED; int jobType = 1; if ((int) microDataPerson.getValueAt(personCounter, "occupation") == 1) { occupation = Occupation.EMPLOYED; if ((int) microDataPerson.getValueAt(personCounter, "jobType") == 1) { jobType = 1; } else if ((int) microDataPerson.getValueAt(personCounter, "jobType") == 2) { jobType = 2; } else { jobType = 3; } } int income = 0; int education = 0; if (age > 15) { education = SiloUtil.select(probEdu, levelEdu); try { income = (int) translateIncome((int) Math.random() * 10, incomeProbability, gammaDist) * 12; //convert monthly income to yearly income } catch (MathException e) { e.printStackTrace(); } } Person pers = factory.createPerson(idPerson, age, gender, Race.white, occupation, null, 0, income); //(int id, int hhid, int age, int gender, Race race, int occupation, int workplace, int income) householdDataManager.addPerson(pers); householdDataManager.addPersonToHousehold(pers, household); jobTypeByWorker.put(pers, jobType); PersonRole role = PersonRole.CHILD; //default value = child if ((int) microPersons.getValueAt(personCounter, "personRole") == 1) { //the person is single role = PersonRole.SINGLE; } else if ((int) microPersons.getValueAt(personCounter, "personRole") == 2) { // the person is married role = PersonRole.MARRIED; } pers.setRole(role); pers.setNationality(Nationality.GERMAN); boolean license = false; if (microPersons.getValueAt(personCounter, "DrivLicense") == 1) { license = true; } pers.setDriverLicense(license); pers.setSchoolType((int) microPersons.getValueAt(personCounter, "school")); hhPersons++; //counterMunicipality = updateCountersPerson(pers, counterMunicipality, municipality,ageBracketsPerson); } //counterMunicipality = updateCountersHousehold(household, counterMunicipality, municipality); //Copy the dwelling of that household int bedRooms = 1; //Not on the micro data int year = select(yearDistribution, yearBracketsDwelling)[0]; //the category int floorSpace = select(sizeDistribution, sizeBracketsDwelling)[0]; int usage = (int) microDwellings.getIndexedValueAt(selectedHh, "H_"); int buildingSize = (int) microDwellings.getIndexedValueAt(selectedHh, "ddT_"); DefaultDwellingTypeImpl ddType = translateDwellingType(buildingSize); int quality = 1; //depend on year built and type of heating year = selectDwellingYear(year); //convert from year class to actual 4-digit year int price = estimatePrice(ddType, floorSpace); Dwelling dwell = DwellingUtils.getFactory().createDwelling(newDdId, selectedTAZ, null, id, ddType, bedRooms, quality, price, 0, year); realEstate.addDwelling(dwell); dwell.setFloorSpace(floorSpace); dwell.setUsage(DwellingUsage.valueOf(usage)); dwell.setBuildingSize(buildingSize); generatedHouseholds.put(dwell.getId(), 1); } int households = householdDataManager.getHighestHouseholdIdInUse() - previousHouseholds; int persons = householdDataManager.getHighestPersonIdInUse() - previousPersons; previousHouseholds = householdDataManager.getHighestHouseholdIdInUse(); previousPersons = householdDataManager.getHighestPersonIdInUse(); //Consider if I need to add also the errors from other attributes. They must be at the marginals file, or one extra file //For county level they should be calculated on a next step, outside this loop. float averageError = 0f; /*for (int attribute = 1; attribute < attributesHouseholdIPU.length; attribute++){ float error = Math.abs((counterMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute]) - marginalsMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute])) / marginalsMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute])); errorMunicipality.setIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute],error); averageError = averageError + error; } averageError = averageError / (1 + attributesHouseholdIPU.length) * 100;*/ householdByMunicipality.put(municipalityID, generatedHouseholds); logger.info(" Municipality " + municipalityID + ". Generated " + hhPersons + " persons in " + hhTotal + " households. Average error of " + averageError + " %."); } else { logger.info(" Municipality " + municipalityID + " has no TAZ assigned."); } } int households = householdDataManager.getHighestHouseholdIdInUse(); int persons = householdDataManager.getHighestPersonIdInUse(); logger.info(" Finished generating households and persons. A population of " + persons + " persons in " + households + " households was generated."); //Vacant dwellings-------------------------------------------- //They have similar characteristics to the dwellings that are occupied (assume that there is no difference between the occupied and vacant dwellings in terms of quality) int vacantCounter = 0; for (int municipality = 0; municipality < cityID.length; municipality++) { logger.info(" Municipality " + cityID[municipality] + ". Starting to generate vacant dwellings."); int municipalityID = cityID[municipality]; int vacantDwellings = (int) marginalsMunicipality.getIndexedValueAt(cityID[municipality], "dd_Vacant"); TableDataSet rasterCellsMatrix = cellsMatrix; int[] occupiedDwellings = householdByMunicipality.get(municipalityID).keySet().stream() .mapToInt(Integer::intValue).toArray(); //obtain the raster cells of the municipality and their weight within the municipality int[] tazInCity = cityTAZ.get(municipalityID); double[] probTaz = new double[tazInCity.length]; double sumProbTaz = 0; for (int i = 0; i < tazInCity.length; i++) { probTaz[i] = rasterCellsMatrix.getIndexedValueAt(tazInCity[i], "Population"); sumProbTaz = sumProbTaz + probTaz[i]; } //Select the vacant dwelling and copy characteristics for (int row = 0; row < vacantDwellings; row++) { //Allocation int ddTAZ = select(probTaz, tazInCity, sumProbTaz)[0]; // I allocate vacant dwellings using the same proportion as occupied dwellings. //Select one occupied dwelling to copy int dd = selectEqualProbability(occupiedDwellings)[0]; //Copy characteristics int newDdId = realEstate.getNextDwellingId(); Dwelling ddToCopy = realEstate.getDwelling(dd); int bedRooms = ddToCopy.getBedrooms(); int price = ddToCopy.getPrice(); int quality = ddToCopy.getQuality(); int year = ddToCopy.getYearBuilt(); DwellingType type = ddToCopy.getType(); //using always type MF234 int floorSpaceDwelling = ddToCopy.getFloorSpace(); Dwelling dwell = DwellingUtils.getFactory().createDwelling(newDdId, ddTAZ, null, -1, DefaultDwellingTypeImpl.MF234, bedRooms, quality, price, 0, year); dwell.setUsage(DwellingUsage.VACANT); //vacant dwelling = 3; and hhID is equal to -1 dwell.setFloorSpace(floorSpaceDwelling); vacantCounter++; } logger.info(" The number of vacant dwellings is: " + vacantCounter); } //Write the files for all municipalities String name = ("microData/interimFiles/totalsSynPop.csv"); SiloUtil.writeTableDataSet(counterMunicipality, name); String name1 = ("microData/interimFiles/errorsSynPop.csv"); SiloUtil.writeTableDataSet(errorMunicipality, name1); }