Example usage for weka.core.matrix Matrix Matrix

List of usage examples for weka.core.matrix Matrix Matrix

Introduction

In this page you can find the example usage for weka.core.matrix Matrix Matrix.

Prototype

public Matrix(Reader r) throws Exception 

Source Link

Document

Reads a matrix from a reader.

Usage

From source file:cyber009.ann.ANN.java

public void weightFindMatrix() {
    Matrix X = new Matrix(v.X);
    Matrix Y = new Matrix(v.D, 1);
    Matrix W = new Matrix(v.N, 1);

    for (int d = 0; d < v.D; d++) {
        Y.set(d, 0, v.TARGET[d]);/*from ww  w .j a v  a  2  s .  co  m*/
    }

    for (int n = 0; n < v.N; n++) {
        W.set(n, 0, 0.0);
        //W.set(n, 0, v.WEIGHT[n]);
    }

    Matrix temp = X.transpose().times(X);
    //        System.out.println(temp.toString());
    temp = temp.inverse().times(X.transpose());
    //        System.out.println(temp.toString());
    temp = temp.times(Y);
    //System.out.println(temp.toString());
    W = temp;
    for (int n = 0; n <= v.N; n++) {
        v.WEIGHT[n] = W.get(n, 0);
    }
    //System.out.println(YI.toString());

}

From source file:cyber009.main.UDAL.java

public static void main(String[] args) {
    double min = Double.MAX_VALUE;
    UDAL udal = new UDAL(0.014013, 100);
    Statistics statis = new Statistics(udal.v);
    long timeStart = 0, timeEnd = 0;
    for (int f = 2; f <= 2; f++) {
        udal.initUDAL(4, 400);//  www  .ja  va2 s. co m
        udal.activeLearning(0, 100);
        udal.ann.weightReset();
        timeStart = System.currentTimeMillis();
        udal.ann.gradientDescent(10000L, 3, 100);
        for (Double target : udal.v.CLASSES) {
            statis.calMVMuSigma(target);
            System.out.println(udal.v.N_DATA_IN_CLASS.get(target));
            System.out.println(statis.mu.get(target));
            System.out.println(statis.sigma.get(target));
        }
        double pp = 0.0D;
        for (int d = 0; d < udal.v.D; d++) {
            if (udal.v.LABEL[d] == false) {
                double[][] val = new double[udal.v.N - 1][1];
                for (int n = 1; n < udal.v.N; n++) {
                    val[n - 1][0] = udal.v.X[d][n];
                    //                        System.out.print(udal.v.X[d][n] + "   ");
                    //                        System.out.println(val[n-1][0]);
                }
                Matrix mVal = new Matrix(val);
                pp = 0.0D;
                for (Double target : udal.v.CLASSES) {
                    //System.out.println("-----------------------\nClass:"+ target);
                    pp += (statis.posteriorDistribution(target, mVal)
                            * statis.conditionalEntropy(target, mVal, d));
                }
                udal.v.X_FL[d] = pp;
                udal.addToMinQueue(d);
                //System.out.println("x["+ d +"] :"+ pp);                  
            }
        }
        System.out.println("min values");
        for (int q = 0; q < udal.MINQUEUESIZE; q++)
            System.out.println(udal.MinQueue[q] + " = " + udal.v.X_FL[udal.MinQueue[q]]);
        System.out.println("-----------------------");
        timeEnd = System.currentTimeMillis();
        System.out.println("feature #:" + udal.v.N + " time:(" + (timeEnd - timeStart) + ")");
        udal.v.showResult();

    }
}

From source file:edu.stanford.rsl.apps.gui.roi.ComputeIndependentComponents.java

License:Open Source License

@Override
public Object evaluate() {
    double[][] signals = null;
    if (roi.getMask() == null) {
        signals = new double[numChannels][roi.getBounds().height * roi.getBounds().width];
        for (int j = 0; j < roi.getBounds().height; j++) {
            for (int i = 0; i < roi.getBounds().width; i++) {
                int x = roi.getBounds().x + i;
                int y = roi.getBounds().y + j;
                for (int k = 0; k < numChannels; k++) {
                    signals[k][(j * roi.getBounds().width)
                            + i] = ((MultiChannelGrid2D) multiGrid.getSubGrid(currentImage)).getChannel(k)
                                    .getPixelValue(x, y);
                }//from   w  ww  .  j a  v  a2s . co  m
            }
        }

    } else {
        // Count pixels in mask
        int count = 0;
        ByteProcessor mask = (ByteProcessor) roi.getMask();
        for (int j = 0; j < roi.getBounds().height; j++) {
            for (int i = 0; i < roi.getBounds().width; i++) {
                if (mask.getPixel(i, j) == 255) {
                    count++;
                }
            }
        }
        signals = new double[numChannels][count];
        int index = 0;
        for (int j = 0; j < roi.getBounds().height; j++) {
            for (int i = 0; i < roi.getBounds().width; i++) {
                int x = roi.getBounds().x + i;
                int y = roi.getBounds().y + j;
                if (mask.getPixel(i, j) == 255) {
                    for (int k = 0; k < numChannels; k++) {
                        signals[k][index] = ((MultiChannelGrid2D) multiGrid.getSubGrid(currentImage))
                                .getChannel(k).getPixelValue(x, y);
                    }
                    index++;
                }
            }
        }
    }
    try {

        double[][] vectors = null;
        if (operation.equals(ICA)) {
            FastICA ica = new FastICA(signals, numChannels);
            vectors = ica.getICVectors();
        }
        if (operation.equals(PCA)) {
            org.fastica.PCA pca = new org.fastica.PCA(signals);
            vectors = org.fastica.math.Matrix.mult(pca.getEigenVectors(), pca.getVectorsZeroMean());
            for (int k = 0; k < numChannels; k++) {
                System.out.println("Eigen Value " + k + " " + pca.getEigenValues()[k]);
            }
        }
        if (operation.equals(SVD)) {
            weka.core.matrix.SingularValueDecomposition svd = new weka.core.matrix.SingularValueDecomposition(
                    new Matrix(signals).transpose());
            vectors = svd.getU().transpose().getArray();
            for (int k = 0; k < numChannels; k++) {
                System.out.println("Singular Value " + k + " " + svd.getSingularValues()[k]);
            }
        }
        ByteProcessor mask = (ByteProcessor) roi.getMask();
        MultiChannelGrid2D out = new MultiChannelGrid2D(roi.getBounds().width, roi.getBounds().height,
                numChannels);

        if (roi.getMask() == null) {
            for (int j = 0; j < roi.getBounds().height; j++) {
                for (int i = 0; i < roi.getBounds().width; i++) {
                    for (int k = 0; k < numChannels; k++) {
                        out.getChannel(k).putPixelValue(i, j, vectors[k][(j * roi.getBounds().width) + i]);
                    }
                }
            }
        } else {
            int index = 0;
            for (int j = 0; j < roi.getBounds().height; j++) {
                for (int i = 0; i < roi.getBounds().width; i++) {
                    if (mask.getPixel(i, j) == 255) {
                        for (int k = 0; k < numChannels; k++) {
                            out.getChannel(k).putPixelValue(i, j, vectors[k][index]);
                        }
                        index++;
                    }
                }
            }
        }
        out.show("Components using" + operation);
    } catch (FastICAException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    return null;
}

From source file:faster_pca.faster_pca.java

License:Open Source License

/**
* Initializes the filter with the given input data.
* 
* @param instances the data to process/*  w  w  w . ja va  2 s . c o  m*/
* @throws Exception in case the processing goes wrong
* @see #batchFinished()
*/
protected void setup(Instances instances) throws Exception {
    int i;
    int j;
    Vector<Integer> deleteCols;
    int[] todelete;
    double[][] v;
    Matrix corr;
    EigenvalueDecomposition eig;
    Matrix V;

    m_TrainInstances = new Instances(instances);

    // make a copy of the training data so that we can get the class
    // column to append to the transformed data (if necessary)
    m_TrainCopy = new Instances(m_TrainInstances, 0);

    /*m_ReplaceMissingFilter = new ReplaceMissingValues();
    m_ReplaceMissingFilter.setInputFormat(m_TrainInstances);
    m_TrainInstances = Filter.useFilter(m_TrainInstances,
      m_ReplaceMissingFilter);*/

    m_NominalToBinaryFilter = new NominalToBinary();
    m_NominalToBinaryFilter.setInputFormat(m_TrainInstances);
    m_TrainInstances = Filter.useFilter(m_TrainInstances, m_NominalToBinaryFilter);

    // delete any attributes with only one distinct value or are all missing
    deleteCols = new Vector<Integer>();
    /*for (i = 0; i < m_TrainInstances.numAttributes(); i++) {
      if (m_TrainInstances.numDistinctValues(i) <= 1) {
        deleteCols.addElement(i);
      }
    }*/

    if (m_TrainInstances.classIndex() >= 0) {
        // get rid of the class column
        m_HasClass = true;
        m_ClassIndex = m_TrainInstances.classIndex();
        deleteCols.addElement(new Integer(m_ClassIndex));
    }

    // remove columns from the data if necessary
    if (deleteCols.size() > 0) {
        m_AttributeFilter = new Remove();
        todelete = new int[deleteCols.size()];
        for (i = 0; i < deleteCols.size(); i++) {
            todelete[i] = (deleteCols.elementAt(i)).intValue();
        }
        m_AttributeFilter.setAttributeIndicesArray(todelete);
        m_AttributeFilter.setInvertSelection(false);
        m_AttributeFilter.setInputFormat(m_TrainInstances);
        m_TrainInstances = Filter.useFilter(m_TrainInstances, m_AttributeFilter);
    }

    // can evaluator handle the processed data ? e.g., enough attributes?
    getCapabilities().testWithFail(m_TrainInstances);

    m_NumInstances = m_TrainInstances.numInstances();
    m_NumAttribs = m_TrainInstances.numAttributes();

    // fillCorrelation();
    fillCovariance();

    // get eigen vectors/values
    corr = new Matrix(m_Correlation);
    eig = corr.eig();
    V = eig.getV();
    v = new double[m_NumAttribs][m_NumAttribs];
    for (i = 0; i < v.length; i++) {
        for (j = 0; j < v[0].length; j++) {
            v[i][j] = V.get(i, j);
        }
    }
    m_Eigenvectors = v.clone();
    m_Eigenvalues = eig.getRealEigenvalues().clone();

    // any eigenvalues less than 0 are not worth anything --- change to 0
    for (i = 0; i < m_Eigenvalues.length; i++) {
        if (m_Eigenvalues[i] < 0) {
            m_Eigenvalues[i] = 0.0;
        }
    }
    m_SortedEigens = Utils.sort(m_Eigenvalues);
    m_SumOfEigenValues = Utils.sum(m_Eigenvalues);

    m_TransformedFormat = determineOutputFormat(m_TrainInstances);
    setOutputFormat(m_TransformedFormat);

    m_TrainInstances = null;
}

From source file:meka.classifiers.multilabel.PLST.java

License:Open Source License

/**
 * The method to transform the labels into another set of latent labels,
 * typically a compression method is used, e.g., Boolean matrix decomposition
 * in the case of MLC-BMaD, or matrix multiplication based on SVD for PLST.
 *
 * @param D the instances to transform into new instances with transformed labels. The
 * Instances consist of features and original labels.
 * @return The resulting instances. Instances consist of features and transformed labels.
 *///from  w w w .j a  va 2 s.c  o m
@Override
public Instances transformLabels(Instances D) throws Exception {
    Instances features = this.extractPart(D, false);
    Instances labels = this.extractPart(D, true);

    Matrix labelMatrix = MatrixUtils.instancesToMatrix(labels);

    // first, lets do the preprocessing as in the original implementation
    double[] averages = new double[labels.numAttributes()];

    for (int i = 0; i < labels.numAttributes(); i++) {
        double[] column = labels.attributeToDoubleArray(i);
        double sum = 0.0;
        for (int j = 0; j < column.length; j++) {
            if (column[j] == 1.0) {
                sum += 1.0;
            } else {
                sum += -1;
                // The algorithm needs 1/-1 coding, so let's
                // change the matrix here
                labelMatrix.set(j, i, -1.0);
            }
        }
        averages[i] = sum / column.length;
    }

    double[][] shiftMatrix = new double[1][labels.numAttributes()];

    shiftMatrix[0] = averages;

    // remember shift for prediction
    this.m_Shift = new Matrix(shiftMatrix);

    double[][] shiftTrainMatrix = new double[labels.numInstances()][labels.numAttributes()];

    for (int i = 0; i < labels.numInstances(); i++) {
        shiftTrainMatrix[i] = averages;
    }

    Matrix trainShift = new Matrix(shiftTrainMatrix);

    SingularValueDecomposition svd = new SingularValueDecomposition(labelMatrix.minus(trainShift));

    // The paper uses U here, but the implementation by the authors uses V, so
    // we used V here too.
    m_v = svd.getV();

    //remove columns so only size are left
    double[][] newArr = new double[m_v.getRowDimension()][this.getSize()];

    for (int i = 0; i < newArr.length; i++) {
        for (int j = 0; j < newArr[i].length; j++) {
            newArr[i][j] = m_v.getArray()[i][j];
        }
    }

    m_v = new Matrix(newArr);

    // now the multiplication (last step of the algorithm)
    Matrix compressed = MatrixUtils.instancesToMatrix(labels).times(this.m_v);

    // and transform it to Instances
    ArrayList<Attribute> attinfos = new ArrayList<Attribute>();

    for (int i = 0; i < compressed.getColumnDimension(); i++) {

        Attribute att = new Attribute("att" + i);
        attinfos.add(att);
    }

    // create pattern instances (also used in prediction) note: this is a regression
    // problem now, labels are not binary
    this.m_PatternInstances = new Instances("compressedlabels", attinfos, compressed.getRowDimension());

    // fill result Instances
    Instances result = Instances.mergeInstances(MatrixUtils.matrixToInstances(compressed, m_PatternInstances),
            features);

    result.setClassIndex(this.getSize());
    return result;
}

From source file:meka.classifiers.multilabel.PLST.java

License:Open Source License

/**
 * Transforms the predictions of the internal classifier back to the original labels.
 *
 * @param y The predictions that should be transformed back. The array consists only of
 * the predictions as they are returned from the internal classifier.
 * @return The transformed predictions./*  w ww. j a v a 2 s  . co m*/
 */
@Override
public double[] transformPredictionsBack(double[] y) {
    // y consists of predictions and maxindex, we need only predictions
    double[] predictions = new double[y.length / 2];

    for (int i = 0; i < predictions.length; i++) {
        predictions[i] = y[predictions.length + i];
    }

    double[][] dataArray = new double[1][predictions.length];

    dataArray[0] = predictions;

    Matrix yMat = new Matrix(dataArray);

    Matrix multiplied = yMat.times(this.m_v.transpose()).plus(m_Shift);

    double[] res = new double[multiplied.getColumnDimension()];

    // change back from -1/1 coding to 0/1
    for (int i = 0; i < res.length; i++) {
        res[i] = multiplied.getArray()[0][i] < 0.0 ? 0.0 : 1.0;
    }

    return res;
}

From source file:meka.core.MatrixUtils.java

License:Open Source License

/**
 * Helper method that transforma an Instances object to a Matrix object.
 *
 * @param inst The Instances to transform.
 * @return  The resulting Matrix object.
 *//*from w w  w .  j a v a  2 s  . c om*/
public static Matrix instancesToMatrix(Instances inst) {
    double[][] darr = new double[inst.numInstances()][inst.numAttributes()];
    for (int i = 0; i < inst.numAttributes(); i++) {
        for (int j = 0; j < inst.attributeToDoubleArray(i).length; j++) {
            darr[j][i] = inst.attributeToDoubleArray(i)[j];
        }
    }
    return new Matrix(darr);
}

From source file:mulan.transformations.ColumnSubsetSelection.java

License:Open Source License

public MultiLabelInstances transform(MultiLabelInstances data, int kappa, long seed) {
    try {/*from   w  w w .  j  a  va 2  s . c  om*/

        if (kappa >= data.getNumLabels()) {
            throw new MulanRuntimeException(
                    "Dimensionality reduction parameter should not exceed or be equal to the total count of labels!");
        }

        // integer indices of physical label assignments
        int[] labelIndices = data.getLabelIndices();
        int[] indices = new int[labelIndices.length];

        System.arraycopy(labelIndices, 0, indices, 0, labelIndices.length);

        // load label indicator matrix in a Matrix object
        double[][] datmatrix = new double[data.getDataSet().numInstances()][labelIndices.length];
        Matrix mat = new Matrix(datmatrix);

        for (int i = 0; i < data.getDataSet().numInstances(); i++) {
            Instance instance = data.getDataSet().instance(i);
            for (int j = 0; j < labelIndices.length; j++) {
                mat.set(i, j, Double.parseDouble(instance.toString(labelIndices[j])));
                //DEBUG: System.out.print("" + Double.parseDouble(instance.toString(labelIndices[j])) + ",");
            }
        }

        // make private copy of the label matrix
        this.Y = mat;

        // compute eigenvalue analysis of label indicator matrix
        SingularValueDecomposition svd = new SingularValueDecomposition(mat);

        //DEBUG: System.out.println("rows = " + svd.getV().getRowDimension() + ", cols = " + svd.getV().getColumnDimension());

        assert (svd.getV().getRowDimension() == svd.getV().getColumnDimension());

        Matrix rVec = svd.getV();
        Matrix Vk = new Matrix(new double[svd.getV().getRowDimension()][kappa]);

        // snippet (2)
        for (int i = 0; i < kappa; i++) {
            for (int j = 0; j < svd.getV().getColumnDimension(); j++) {
                Vk.set(j, i, rVec.get(i, j));
            }
        }

        // compute column selection probabilitites
        double[] selectionProbabilities = new double[Vk.getRowDimension()];
        double[] selectionProbabilitiesCDF = new double[Vk.getRowDimension()];

        for (int i = 0; i < Vk.getRowDimension(); i++) {
            selectionProbabilities[i] = 0.0;
            for (int j = 0; j < kappa; j++) {
                selectionProbabilities[i] += Math.pow(Vk.get(i, j), 2);
            }
            selectionProbabilities[i] = Math.sqrt(selectionProbabilities[i]);
        }

        // normalize probabilities
        double psum = 0.0;
        for (int i = 0; i < Vk.getRowDimension(); i++) {
            psum += selectionProbabilities[i];
            //System.out.println("psum = " + psum);
        }
        //System.out.println("psum = " + psum);
        //assert (psum != 0 && psum == 1.0); // must be non-zero and unitary

        for (int i = 0; i < Vk.getRowDimension(); i++) {
            selectionProbabilities[i] /= psum;
        }

        psum = 0.0;
        for (int i = 0; i < Vk.getRowDimension(); i++) {
            psum += selectionProbabilities[i];
            selectionProbabilitiesCDF[i] = psum;
        }

        // add selected columns on a linked list
        sampledIndiceSet = new java.util.HashSet();

        // run column-sampling loop
        int sampling_count = 0;

        Random generator = new Random(seed);
        while (sampledIndiceSet.size() < kappa) // ...loop until knapsack gets filled...
        {
            // pick a random number

            //DEBUG:
            //double roulette = generator.nextDouble() * 0.5;
            double roulette = generator.nextDouble();

            // seek closest match according to sampling probabilities
            int closest_match = -1;

            // iterate label cols
            for (int i = 0; i < Vk.getRowDimension(); i++) {
                if (roulette < selectionProbabilitiesCDF[i]) // ...spot a possible match...
                {
                    // ...if so, select and quit scope...
                    closest_match = i; // BEWARE! "i" is an index over the label enumeration, not an ordering index!
                    break;
                }
            }

            // if we stepped on the flag, something serious is going on!
            assert (closest_match != -1);

            // see if column was selected; if not, add it
            if (!sampledIndiceSet.contains((Object) closest_match)) {
                sampledIndiceSet.add((Object) closest_match);
                //System.out.println("DEBUG(CSSP): Added column " + closest_match + " to the sampled column set!");
            }

            sampling_count += 1;
        }

        System.out.println("Sampling loop completed in " + sampling_count + " runs.");

        // compute indices-to-remove array
        indicesToRemove = new int[labelIndices.length - sampledIndiceSet.size()];

        // compute all **PHYSICAL** (not VIRTUAL) indices of label columns for CSSP to remove
        int idx = 0;
        for (int i = 0; i < labelIndices.length; i++) {
            if (!sampledIndiceSet.contains((Object) i)) {
                indicesToRemove[idx] = indices[i];
                idx += 1;
            }
        }

        // apply CSSP: select columns to remove
        int[] selectedIndicesObj = indicesToRemove.clone();
        selectedIndicesInt = new int[selectedIndicesObj.length];
        for (int i = 0; i < selectedIndicesObj.length; i++) {
            selectedIndicesInt[i] = (int) selectedIndicesObj[i];
        }

        // compute Moore-Penrose pseudo-inverse matrix of the column-reduced label indicator matrix
        double[][] datmatrix2 = new double[data.getDataSet().numInstances()][labelIndices.length
                - selectedIndicesInt.length];
        Matrix matC = new Matrix(datmatrix2);

        //DEBUG:
        //System.out.println("Selecting only " + matC.getColumnDimension() + " columns; removing " + selectedIndicesInt.length + " columns out of an original total of " + data.getLabelIndices().length + " labels!");

        // compute indices to keep
        java.util.LinkedList<Integer> indicesToKeep = new java.util.LinkedList();
        for (int i = 0; i < labelIndices.length; i++) {
            boolean keep = true;

            // see if this col has to be removed
            for (int k = 0; k < selectedIndicesInt.length; k++) {
                if (selectedIndicesInt[k] == labelIndices[i]) {
                    keep = false;
                    break;
                }
            }

            // add if we actually should keep this...
            if (keep) {
                indicesToKeep.add(labelIndices[i]);
            }
        }

        assert (indicesToKeep.size() == matC.getColumnDimension());

        for (int i = 0; i < matC.getRowDimension(); i++) {
            // get data instance
            Instance instance = data.getDataSet().instance(i);

            // replicate data from ALL columns that WOULD not be removed by CSSP           
            for (int j = 0; j < matC.getColumnDimension(); j++) {
                // get label indice
                int corrIdx = (int) indicesToKeep.get(j);

                // update matC
                matC.set(i, j, Double.parseDouble(instance.toString(corrIdx)));
            }
        }

        //DEBUG: System.out.println("matC rows = " + matC.getRowDimension() + ", cols = " + matC.getColumnDimension() + "\n data original label cols # = " + data.getLabelIndices().length);

        // make private copy of projection matrices

        // Moore-Penrose pseudo-inverse of the label matrix matC
        // see http://robotics.caltech.edu/~jwb/courses/ME115/handouts/pseudo.pdf for an SVD-based workaround for MP-inverse

        // Moore-Penrose pseudoinverse computation based on Singular Value Decomposition (SVD)
        /*
         SingularValueDecomposition decomp = Vk.svd();
                
         Matrix S = decomp.getS();
         Matrix Scross = new Matrix(selectedIndicesInt.length,selectedIndicesInt.length);
         for(int i = 0; i < selectedIndicesInt.length; i++) {
         for(int j = 0; j < selectedIndicesInt.length; j++) {
         if(i == j) {
         if(S.get(i, j) == 0) {
         Scross.set(i, j, 0.0);
         } else {
         Scross.set(i, j, 1 / S.get(i, j));
         }
         } else {
         Scross.set(i, j, 0.0);
         }
         }
         }
                
         this.Yc = decomp.getV().times(Scross).times(decomp.getU().transpose());
         */

        // DEBUG: traditional way of computing the Moore-Penrose pseudoinverse
        if (matC.getRowDimension() >= matC.getColumnDimension()) {
            this.Yc = ((matC.transpose().times(matC)).inverse()).times(matC.transpose());
        } else {
            this.Yc = matC.transpose().times((matC.times(matC.transpose()).inverse()));
        }

        //System.out.println("Yc rows: " + Yc.getRowDimension() + "\nYc cols: " + Yc.getColumnDimension() + "\n Y rows: " + Y.getRowDimension() + "\nY cols: " + Y.getColumnDimension());

        this.ProjectionMatrix = Yc.times(Y); // compute projection matrix

        // add sampled indices to Remove object
        remove = new Remove();
        remove.setAttributeIndicesArray(selectedIndicesInt);
        remove.setInvertSelection(false);
        remove.setInputFormat(data.getDataSet());

        // apply remove filter on the labels
        transformed = Filter.useFilter(data.getDataSet(), remove);

        this.sampledIndicesObj = indicesToKeep.toArray();

        return data.reintegrateModifiedDataSet(transformed);

    } catch (Exception ex) {
        // do nothing
        //Logger.getLogger(BinaryRelevanceTransformation.class.getName()).log(Level.SEVERE, null, ex);
        return null;
    }
}

From source file:org.mitre.ccv.CompleteCompositionVectorMain.java

License:Open Source License

/**
 * Performs Affinity Propagation Clustering
 * /*from www.j  ava 2 s  . c  om*/
 * @param dm
 * @param filename
 */
public AffinityPropagation cluster(DistanceMatrix dm, int type) {
    double[][] mVals = dm.getClonedDistances();
    Matrix m = new Matrix(mVals);
    if (distCalc == 1 || distCalc == 3) {
        m = m.times(-1.0);
    } else if (distCalc == 2) {
        Matrix o = new Matrix(m.getRowDimension(), m.getColumnDimension(), 1.0);
        m = o.minus(m);
    }

    int total = m.getColumnDimension();
    double values[] = new double[(total * total - total) / 2];
    int count = 0;
    for (int i = 1; i < total; i++) {
        for (int j = i + 1; j < total - 1; j++) {
            values[count] = m.get(i, j);
            count++;
        }

    }

    double preference = getPreference(values, type);

    AffinityPropagation ap = new AffinityPropagation(m, 5000, 300, 0.9, preference);

    return ap;
}

From source file:org.mitre.ccv.CompleteCompositionVectorMain.java

License:Open Source License

/**
 * Write out the distance matrix to the given BufferedWriter.
 * //from   www  .  ja  va  2 s.  c  om
 * @param dm the DistanceMatrix.
 * @param bw the BufferedWriter to write distance to.
 *
 */
public void writeDistanceMatrix(DistanceMatrix dm, BufferedWriter bw) throws IOException {

    double[][] mVals = dm.getClonedDistances();
    Matrix m = new Matrix(mVals);
    /** was when this was writeSimilarityMatrix
    if (distCalc == 1 || distCalc == 3) {
    m = m.times(-1.0);
    } else if (distCalc == 2 || distCalc == 4) {
    Matrix o = new Matrix(m.getRowDimension(), m.getColumnDimension(), 1.0);
    m = o.minus(m);
    }
    */
    try {
        m.write(bw);
        // Write the matrix to the file.
        /**
        bw.write(this.distCalc.toString() + "\n");
        for (int i = 0; i < m.getRowDimension(); i++) {
        for (int j = 0; j < m.getColumnDimension(); j++) {
                
            String s = String.format("%03d\t%03d\t%f\n", i + 1, j + 1, m.get(i, j));
                
            bw.write(s);
        }
        }
         */
        bw.close();

    } catch (Exception ex) {
        throw new IOException(ex.getMessage());
    }
}