List of usage examples for org.apache.commons.math.util MathUtils log
public static double log(double base, double x)
From source file:edu.umn.msi.tropix.proteomics.itraqquantitation.impl.Variance.java
static double[][] createVarianceMatrix(final List<ITraqLabel> labels, final Map<ITraqLabel, double[]> intensities, final double[] ds, final int binSize) { final int n = intensities.values().iterator().next().length; final double[] reference = new double[n]; for (int i = 0; i < n; i++) { double intensitiesProduct = 1.0d; for (final ITraqLabel label : labels) { final double intensity = intensities.get(label)[i]; intensitiesProduct *= intensity; }//from w w w .j ava 2s . com reference[i] = MathUtils.log(2.0, intensitiesProduct); } // final double[] sortedReference = Arrays.copyOf(reference, reference.length); final double[] sortedReference = new double[reference.length]; for (int i = 0; i < reference.length; i++) { sortedReference[i] = reference[i]; } Arrays.sort(sortedReference); final List<ITraqRatio> ratios = ITraqLabels.buildRatios(labels); final int numRatios = ratios.size(); final int numRows = numRatios * n; // Precompute ratios final double[] actualRatios = new double[numRatios]; final double[][] numerators = new double[numRatios][]; final double[][] denominators = new double[numRatios][]; for (int ratioIndex = 0; ratioIndex < numRatios; ratioIndex++) { final ITraqRatio ratio = ratios.get(ratioIndex); final int numeratorIndex = labels.indexOf(ratio.getNumerator()); final int denominatorIndex = labels.indexOf(ratio.getDenominator()); numerators[ratioIndex] = intensities.get(ratio.getNumerator()); denominators[ratioIndex] = intensities.get(ratio.getDenominator()); actualRatios[ratioIndex] = MathUtils.log(2.0, ds[numeratorIndex] / ds[denominatorIndex]); } final double[][] samplePoints = new double[numRows][2]; for (int sortedReferenceIndex = 0; sortedReferenceIndex < n; sortedReferenceIndex++) { final double referenceValue = sortedReference[sortedReferenceIndex]; final int referenceIndex = indexOf(referenceValue, reference); final int rowOffset = sortedReferenceIndex * numRatios; for (int ratioIndex = 0; ratioIndex < numRatios; ratioIndex++) { final double actualRatio = actualRatios[ratioIndex]; final double estimatedRatio = MathUtils.log(2.0, numerators[ratioIndex][referenceIndex] / denominators[ratioIndex][referenceIndex]); final double diff = (estimatedRatio - actualRatio); samplePoints[rowOffset + ratioIndex][0] = referenceValue; samplePoints[rowOffset + ratioIndex][1] = diff * diff; } } final int numBins = numRows / binSize; final double[][] matrixXV = new double[numBins][2]; double maxV = Double.MIN_VALUE; double sumX = 0.0, sumV = 0.0; int curBin = 0; for (int i = 0; i < numRows; i++) { sumX += samplePoints[i][0]; sumV += samplePoints[i][1]; if ((i + 1) % binSize == 0) { final double x = sumX / binSize; final double v = sumV / binSize; final double binWeight = 1 / v; if (!Double.isInfinite(binWeight)) { maxV = Math.max(binWeight, maxV); } matrixXV[curBin][0] = x; matrixXV[curBin][1] = binWeight; curBin++; sumX = 0.0; sumV = 0.0; } } for (int i = 0; i < numBins; i++) { if (Double.isInfinite(matrixXV[i][1])) { matrixXV[i][1] = maxV; } } for (int i = 0; i < numBins - 1; i++) { matrixXV[i][0] = (matrixXV[i][0] + matrixXV[i + 1][0]) / 2.0; } return matrixXV; }
From source file:edu.umn.msi.tropix.proteomics.itraqquantitation.impl.GroupSummary.java
public GroupSummary(final Iterable<ITraqMatch> inputDataEntries, final Iterable<ITraqLabel> labels) { this.iTraqMatchs = Lists.newArrayList(inputDataEntries); this.numEntries = iTraqMatchs.size(); final Map<ITraqLabel, double[]> labelIntensities = Maps.newHashMap(); for (ITraqLabel label : labels) { labelIntensities.put(label, new double[numEntries]); }/*from w w w. j a va 2 s.c o m*/ weights = new double[numEntries]; for (int i = 0; i < numEntries; i++) { final ITraqMatch entry = iTraqMatchs.get(i); final ITraqScanSummary scan = entry.getScan(); double intensitiesProduct = 1.0; for (ITraqLabel label : labels) { final double intensity = scan.getIntensity(label); labelIntensities.get(label)[i] = scan.getIntensity(label); intensitiesProduct *= intensity; } weights[i] = MathUtils.log(2.0, intensitiesProduct); } this.labelIntensities = ImmutableMap.copyOf(labelIntensities); }
From source file:afest.datastructures.tree.decision.erts.informationfunctions.GeneralizedNormalizedShannonEntropy.java
@Override public <T extends ITrainingPoint<R, C>> double getScore(Collection<T> set, ISplit<R> split) { HashMap<Boolean, ArrayList<T>> splitSeparation = InformationFunctionsUtils.performSplit(set, split); HashMap<Boolean, Integer> countSeparation = new HashMap<Boolean, Integer>(); for (Boolean key : splitSeparation.keySet()) { ArrayList<T> elements = splitSeparation.get(key); countSeparation.put(key, elements.size()); }/*w w w . ja v a 2 s. c o m*/ HashMap<C, Integer> countContent = groupElementsByContent(set); HashMap<C, Integer> countContentTrue = groupElementsByContent(splitSeparation.get(true)); HashMap<C, Integer> countContentFalse = groupElementsByContent(splitSeparation.get(false)); double ht = getEntropy(countSeparation, set.size()); double hc = getEntropy(countContent, set.size()); double dSize = (double) set.size(); double pTrue = countSeparation.get(true) / dSize; double hct = 0; for (Integer count : countContentTrue.values()) { double prob1 = count / dSize; double prob2 = prob1 / pTrue; hct -= prob1 * MathUtils.log(2, prob2); } for (Integer count : countContentFalse.values()) { double prob1 = count / dSize; double prob2 = 1 - (prob1 / pTrue); // pFalse hct -= prob1 * MathUtils.log(2, prob2); } // Mutual Information double itc = hc - hct; // Normalization double ctc = 2 * itc / (hc + ht); return ctc; }
From source file:afest.datastructures.tree.decision.erts.informationfunctions.GeneralizedNormalizedShannonEntropy.java
/** * Return the entropy of a given set of classes. * @param counts number of elements in each class. * @param size total number of elements. (denominator of the probability) * @return the entropy of a given set of classes. */// www . ja v a 2s . com protected double getEntropy(HashMap<?, Integer> counts, int size) { double dSize = (double) size; double h = 0; for (Integer count : counts.values()) { double prob = count / dSize; h -= prob * MathUtils.log(2, prob); } return h; }
From source file:org.apache.mahout.freqtermsets.PFPGrowth.java
public static int getGroupFromHash(int attrHash, int numGroups) { // TODO save log(2) as a constant and devide by it.. this function is SLOW int maskLen = (int) MathUtils.log(2, numGroups - 1) + 1; int mask = (int) Math.pow(2, maskLen) - 1; int result = 0; int attrLSBs = 0; int byteMask = 255; int numBytes = (maskLen / 8) + 1; int leftShifts = 0; while (leftShifts < maskLen) { // No folding.. the murmer hash seems to be a repeating: 32) { for (int i = 0; i < numBytes; ++i) { attrLSBs += attrHash & byteMask; byteMask <<= 8;//from www. j a v a 2 s . c o m } result ^= attrLSBs & mask; leftShifts += maskLen; attrHash >>>= maskLen; } return result + 1; // group numbers are not zero based }
From source file:org.renjin.MathExt.java
public static double log(double x, double base) { //Method cannot be called directly as R and Apache Commons Math argument order // are reversed return MathUtils.log(base, x); }
From source file:org.renjin.MathExt.java
public static double log2(double d) { return MathUtils.log(2, d); }
From source file:org.renjin.primitives.MathExt.java
@Deferrable @Builtin// ww w . j a v a 2 s .co m @DataParallel public static double log(double x, double base) { //Method cannot be called directly as R and Apache Commons Math argument order // are reversed return MathUtils.log(base, x); }
From source file:org.renjin.primitives.MathExt.java
@Deferrable @Builtin @DataParallel public static double log2(double d) { return MathUtils.log(2, d); }
From source file:org.renjin.primitives.MathGroup.java
@Deferrable @Builtin/*w w w . j a v a 2 s.co m*/ @DataParallel(value = PreserveAttributeStyle.ALL, passNA = true) public static double log(double x, double base) { return MathUtils.log(base, x); }