Example usage for weka.core.matrix Matrix Matrix

List of usage examples for weka.core.matrix Matrix Matrix

Introduction

In this page you can find the example usage for weka.core.matrix Matrix Matrix.

Prototype

public Matrix(double vals[], int m) 

Source Link

Document

Construct a matrix from a one-dimensional packed array

Usage

From source file:adams.core.discovery.genetic.WekaGeneticHelper.java

License:Open Source License

/**
 * Convert bit string into weka Matrix/*from   w w w.  j  ava 2s .  c om*/
 * @param bits
 * @param min
 * @param max
 * @param numBits
 * @param splits
 * @param rows
 * @param columns
 * @return
 */
public static Matrix bitsToMatrix(String bits, double min, double max, int numBits, int splits, int rows,
        int columns) {

    Matrix m = new Matrix(rows, columns);

    for (int row = 0; row < rows; row++) {
        for (int column = 0; column < columns; column++) {
            int start = (row * columns * numBits) + (column * numBits);
            double j = 0;
            for (int i = start; i < start + numBits; i++) {
                if (bits.charAt(i) == '1') {
                    j = j + Math.pow(2, start + numBits - i - 1);
                }
            }
            j = Math.min(j, splits);
            double val = (min + j * ((max - min) / (double) (splits - 1)));
            m.set(row, column, val);
        }
    }

    return m;
}

From source file:adams.data.instancesanalysis.pls.PLS1.java

License:Open Source License

/**
 * Performs predictions on the data./*from   ww w  .  ja v  a2s . c  o m*/
 *
 * @param data   the input data
 * @return      the predicted data
 */
protected Instances predict(Instances data) {
    Instances result;
    Instances tmpInst;
    int i;
    int j;
    Matrix x;
    Matrix X;
    Matrix T;
    Matrix t;

    result = new Instances(getOutputFormat());

    for (i = 0; i < data.numInstances(); i++) {
        // work on each instance
        tmpInst = new Instances(data, 0);
        tmpInst.add((Instance) data.instance(i).copy());
        x = MatrixHelper.getX(tmpInst);
        X = new Matrix(1, getNumComponents());
        T = new Matrix(1, getNumComponents());

        for (j = 0; j < getNumComponents(); j++) {
            MatrixHelper.setVector(x, X, j);
            // 1. step: tj = xj * wj
            t = x.times(MatrixHelper.getVector(m_W, j));
            MatrixHelper.setVector(t, T, j);
            // 2. step: xj+1 = xj - tj*pj^T (tj is 1x1 matrix!)
            x = x.minus(MatrixHelper.getVector(m_P, j).transpose().times(t.get(0, 0)));
        }

        switch (m_PredictionType) {
        case ALL:
            tmpInst = MatrixHelper.toInstances(getOutputFormat(), T, T.times(m_b_hat));
            break;
        case NONE:
        case EXCEPT_CLASS:
            tmpInst = MatrixHelper.toInstances(getOutputFormat(), T, MatrixHelper.getY(tmpInst));
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result.add(tmpInst.instance(0));

    }

    return result;
}

From source file:adams.data.instancesanalysis.pls.PLS1.java

License:Open Source License

/**
 * Transforms the data, initializes if necessary.
 *
 * @param data   the data to use/* www . j  a v  a 2s .  c o  m*/
 */
protected Instances doTransform(Instances data, Map<String, Object> params) throws Exception {
    Matrix X, X_trans;
    Matrix y;
    Matrix W, w;
    Matrix T, t, t_trans;
    Matrix P, p, p_trans;
    double b;
    Matrix b_hat;
    int j;
    Matrix tmp;
    Instances result;

    // initialization
    if (!isInitialized()) {
        // split up data
        X = MatrixHelper.getX(data);
        y = MatrixHelper.getY(data);
        X_trans = X.transpose();

        // init
        W = new Matrix(data.numAttributes() - 1, getNumComponents());
        P = new Matrix(data.numAttributes() - 1, getNumComponents());
        T = new Matrix(data.numInstances(), getNumComponents());
        b_hat = new Matrix(getNumComponents(), 1);

        for (j = 0; j < getNumComponents(); j++) {
            // 1. step: wj
            w = X_trans.times(y);
            MatrixHelper.normalizeVector(w);
            MatrixHelper.setVector(w, W, j);

            // 2. step: tj
            t = X.times(w);
            t_trans = t.transpose();
            MatrixHelper.setVector(t, T, j);

            // 3. step: ^bj
            b = t_trans.times(y).get(0, 0) / t_trans.times(t).get(0, 0);
            b_hat.set(j, 0, b);

            // 4. step: pj
            p = X_trans.times(t).times(1 / t_trans.times(t).get(0, 0));
            p_trans = p.transpose();
            MatrixHelper.setVector(p, P, j);

            // 5. step: Xj+1
            X = X.minus(t.times(p_trans));
            y = y.minus(t.times(b));
        }

        // W*(P^T*W)^-1
        tmp = W.times(((P.transpose()).times(W)).inverse());

        // factor = W*(P^T*W)^-1 * b_hat
        m_r_hat = tmp.times(b_hat);

        // save matrices
        m_P = P;
        m_W = W;
        m_b_hat = b_hat;

        result = predict(data);
    }
    // prediction
    else {
        result = predict(data);
    }

    return result;
}

From source file:adams.data.instancesanalysis.pls.SIMPLS.java

License:Open Source License

/**
 * Transforms the data, initializes if necessary.
 *
 * @param data   the data to use/*from www .ja v a  2  s .c  o  m*/
 */
protected Instances doTransform(Instances data, Map<String, Object> params) throws Exception {
    Matrix A, A_trans;
    Matrix M;
    Matrix X, X_trans;
    Matrix X_new;
    Matrix Y, y;
    Matrix C, c;
    Matrix Q, q;
    Matrix W, w;
    Matrix P, p, p_trans;
    Matrix v, v_trans;
    Matrix T;
    Instances result;
    int h;

    if (!isInitialized()) {
        // init
        X = MatrixHelper.getX(data);
        X_trans = X.transpose();
        Y = MatrixHelper.getY(data);
        A = X_trans.times(Y);
        M = X_trans.times(X);
        C = Matrix.identity(data.numAttributes() - 1, data.numAttributes() - 1);
        W = new Matrix(data.numAttributes() - 1, getNumComponents());
        P = new Matrix(data.numAttributes() - 1, getNumComponents());
        Q = new Matrix(1, getNumComponents());

        for (h = 0; h < getNumComponents(); h++) {
            // 1. qh as dominant EigenVector of Ah'*Ah
            A_trans = A.transpose();
            q = MatrixHelper.getDominantEigenVector(A_trans.times(A));

            // 2. wh=Ah*qh, ch=wh'*Mh*wh, wh=wh/sqrt(ch), store wh in W as column
            w = A.times(q);
            c = w.transpose().times(M).times(w);
            w = w.times(1.0 / StrictMath.sqrt(c.get(0, 0)));
            MatrixHelper.setVector(w, W, h);

            // 3. ph=Mh*wh, store ph in P as column
            p = M.times(w);
            p_trans = p.transpose();
            MatrixHelper.setVector(p, P, h);

            // 4. qh=Ah'*wh, store qh in Q as column
            q = A_trans.times(w);
            MatrixHelper.setVector(q, Q, h);

            // 5. vh=Ch*ph, vh=vh/||vh||
            v = C.times(p);
            MatrixHelper.normalizeVector(v);
            v_trans = v.transpose();

            // 6. Ch+1=Ch-vh*vh', Mh+1=Mh-ph*ph'
            C = C.minus(v.times(v_trans));
            M = M.minus(p.times(p_trans));

            // 7. Ah+1=ChAh (actually Ch+1)
            A = C.times(A);
        }

        // finish
        if (getNumCoefficients() > 0)
            slim(W);
        m_W = W;
        T = X.times(m_W);
        X_new = T;
        m_B = W.times(Q.transpose());

        switch (m_PredictionType) {
        case ALL:
            y = T.times(P.transpose()).times(m_B);
            break;
        case NONE:
        case EXCEPT_CLASS:
            y = MatrixHelper.getY(data);
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result = MatrixHelper.toInstances(getOutputFormat(), X_new, y);
    } else {
        X = MatrixHelper.getX(data);
        X_new = X.times(m_W);

        switch (m_PredictionType) {
        case ALL:
            y = X.times(m_B);
            break;
        case NONE:
        case EXCEPT_CLASS:
            y = MatrixHelper.getY(data);
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result = MatrixHelper.toInstances(getOutputFormat(), X_new, y);
    }

    return result;
}

From source file:Classifier.supervised.LinearRegression.java

License:Open Source License

/**
 * Calculate a linear regression using the selected attributes
 *
 * @param selectedAttributes an array of booleans where each element
 * is true if the corresponding attribute should be included in the
 * regression./* w ww . j ava 2 s .c  o  m*/
 * @return an array of coefficients for the linear regression model.
 * @throws Exception if an error occurred during the regression.
 */
protected double[] doRegression(boolean[] selectedAttributes) throws Exception {

    if (m_Debug) {
        System.out.print("doRegression(");
        for (int i = 0; i < selectedAttributes.length; i++) {
            System.out.print(" " + selectedAttributes[i]);
        }
        System.out.println(" )");
    }
    int numAttributes = 0;
    for (int i = 0; i < selectedAttributes.length; i++) {
        if (selectedAttributes[i]) {
            numAttributes++;
        }
    }

    // Check whether there are still attributes left
    Matrix independent = null, dependent = null;
    if (numAttributes > 0) {
        independent = new Matrix(m_TransformedData.numInstances(), numAttributes);
        dependent = new Matrix(m_TransformedData.numInstances(), 1);
        for (int i = 0; i < m_TransformedData.numInstances(); i++) {
            Instance inst = m_TransformedData.instance(i);
            double sqrt_weight = Math.sqrt(inst.weight());
            int column = 0;
            for (int j = 0; j < m_TransformedData.numAttributes(); j++) {
                if (j == m_ClassIndex) {
                    dependent.set(i, 0, inst.classValue() * sqrt_weight);
                } else {
                    if (selectedAttributes[j]) {
                        double value = inst.value(j) - m_Means[j];

                        // We only need to do this if we want to
                        // scale the input
                        if (!m_checksTurnedOff) {
                            value /= m_StdDevs[j];
                        }
                        independent.set(i, column, value * sqrt_weight);
                        column++;
                    }
                }
            }
        }
    }

    // Compute coefficients (note that we have to treat the
    // intercept separately so that it doesn't get affected
    // by the ridge constant.)
    double[] coefficients = new double[numAttributes + 1];
    if (numAttributes > 0) {
        double[] coeffsWithoutIntercept = independent.regression(dependent, m_Ridge).getCoefficients();
        System.arraycopy(coeffsWithoutIntercept, 0, coefficients, 0, numAttributes);
    }
    coefficients[numAttributes] = m_ClassMean;

    // Convert coefficients into original scale
    int column = 0;
    for (int i = 0; i < m_TransformedData.numAttributes(); i++) {
        if ((i != m_TransformedData.classIndex()) && (selectedAttributes[i])) {

            // We only need to do this if we have scaled the
            // input.
            if (!m_checksTurnedOff) {
                coefficients[column] /= m_StdDevs[i];
            }

            // We have centred the input
            coefficients[coefficients.length - 1] -= coefficients[column] * m_Means[i];
            column++;
        }
    }

    return coefficients;
}

From source file:mulan.classifier.meta.MLCSSP.java

License:Open Source License

@Override
protected MultiLabelOutput makePredictionInternal(Instance instance) {
    try {/*from w  w  w . j a  v a  2 s.  c  o m*/

        Instance transformed = css.transformInstance(instance);
        MultiLabelOutput out = baseLearner.makePrediction(transformed);

        double[] confidences = out.getConfidences();

        // make response matrix
        Matrix conf = new Matrix(kappa, 1);
        for (int i = 0; i < kappa; i++) {
            conf.set(i, 0, confidences[i]);
        }

        // compute projected classifier response
        Matrix projectedResponse = conf.transpose().times(css.getProjectionMatrix());

        boolean[] projected_bipartition = new boolean[projectedResponse.getColumnDimension()];
        double[] projected_confidences = new double[projectedResponse.getColumnDimension()];

        for (int i = 0; i < projectedResponse.getColumnDimension(); i++) {
            projected_confidences[i] = projectedResponse.get(0, i);
            projected_bipartition[i] = (Math.ceil(projected_confidences[i]) == 1) ? true : false;
        }

        // return mlo
        MultiLabelOutput mlo = new MultiLabelOutput(projected_bipartition, projected_confidences);
        return mlo;

    } catch (InvalidDataException ex) {
        Logger.getLogger(MLCSSP.class.getName()).log(Level.SEVERE, null, ex);

    } catch (ModelInitializationException ex) {
        Logger.getLogger(MLCSSP.class.getName()).log(Level.SEVERE, null, ex);

    } catch (Exception ex) {
        Logger.getLogger(MLCSSP.class.getName()).log(Level.SEVERE, null, ex);
    }

    return null;
}

From source file:net.sf.jclal.activelearning.singlelabel.querystrategy.VarianceReductionQueryStrategy.java

License:Open Source License

/**
 *
 * Analyzes how informative is an instance.
 *
 * @param instance The instance to query.
 * @return The utility of the instance.//from w ww .  j ava 2  s . co m
 */
@Override
public double utilityInstance(Instance instance) {

    Instances unlabeled = getUnlabelledData().getDataset();

    if (unlabelledSize != unlabeled.numInstances()) {
        unlabelledSize = unlabeled.numInstances();

        //it is initialized q_sub_i
        int n = unlabeled.numInstances();
        double[] q = new double[n];
        //1. q_sub_i = 1/n, i = 1, 2, ..., n
        //Arrays.fill(q, 1.0 / n);
        //further on it fills, to optimize

        //it is initialized pi_sub_i
        //2. pi_sub_i
        double[] piSubI = getPiSubI(unlabeled);

        //to create the Fisher matrix
        int dimensionMatrix = unlabeled.numAttributes() - 1;
        int classIndex = unlabeled.classIndex();

        Matrix matrixFisher = null;
        try {
            matrixFisher = new Matrix(dimensionMatrix, dimensionMatrix);
        } catch (Exception ex) {
            Logger.getLogger(VarianceReductionQueryStrategy.class.getName()).log(Level.SEVERE, null, ex);
        }

        for (int i = 0; i < piSubI.length; i++) {
            double mult = piSubI[i] * (1 - piSubI[i]);

            //the values of the instance are had
            double[] atributos = unlabeled.instance(i).toDoubleArray();

            //the attribute class is eliminated, only the features are left
            double[] vectorX = DatasetUtils.copyFeatures(atributos, classIndex);

            Matrix current = null;
            try {
                current = new Matrix(vectorX.length, vectorX.length);
            } catch (Exception ex) {
                Logger.getLogger(VarianceReductionQueryStrategy.class.getName()).log(Level.SEVERE, null, ex);
            }

            productVector(current, vectorX);

            //it multiplies current * multi
            current.timesEquals(mult);

            //it adds current to matrixFisher
            //plusEquals saves the result in matrixFisher
            matrixFisher.plusEquals(current);

        }

        double factorRegularizationValue = getFactorRegularization();

        Matrix identity = Matrix.identity(dimensionMatrix, dimensionMatrix);

        identity.timesEquals(factorRegularizationValue);

        //the result joins to matrixFisher
        matrixFisher.plusEquals(identity);

        //do eigen decomposition
        EigenvalueDecomposition eigen = matrixFisher.eig();

        //in case of file, the matrix v takes the matrix file from eigen
        //in this case eigen cant not be destroy for the moment
        Matrix v = eigen.getV();

        double[] landa = eigen.getRealEigenvalues();

        double epsilonValue = getEpsilon();

        //variable copies of q to know if there has been some change
        double[] copiaQ = new double[q.length];
        Arrays.fill(copiaQ, 1.0 / n);

        //while it finds change in q, it keeps on iterating
        currentEpsilonIteration = 0;
        do {
            ++currentEpsilonIteration;
            //the value of q is updated
            //in the first iteration it fills with 1.0/n
            System.arraycopy(copiaQ, 0, q, 0, q.length);

            //process of finding f_sub_i
            double[] f = new double[landa.length];
            for (int j = 0; j < f.length; j++) {
                f[j] = 0;

                for (int i = 0; i < n; i++) {
                    double mult = q[i] * piSubI[i] * (1 - piSubI[i]);

                    //the values of the instance are had
                    double[] atributos = unlabeled.instance(i).toDoubleArray();

                    //the attribute class is eliminated, only the features are left
                    double[] vectorX = DatasetUtils.copyFeatures(atributos, classIndex);

                    //it multiplies vector_x with vector_columna of V
                    //vector_x it is: 1 X n
                    //vector_de_V it is: n X 1
                    //result: a number
                    double multVectores = 0;
                    for (int k = 0; k < vectorX.length; k++) {
                        multVectores += vectorX[k] * v.get(k, j);
                    }

                    //the result rises up to the square
                    multVectores *= multVectores;

                    //it joins to f[j]
                    f[j] += mult * multVectores;
                }
            }

            //the first process of finding q of the current iteration       
            for (int i = 0; i < n; i++) {
                double mult = copiaQ[i] * copiaQ[i] * piSubI[i] * (1 - piSubI[i]);

                //the values of the instance are had
                double[] atributos = unlabeled.instance(i).toDoubleArray();

                //the attribute class is eliminated, only the features are left
                double[] vectorX = DatasetUtils.copyFeatures(atributos, classIndex);

                //the following  is realized
                double sumatoria = 0;
                for (int j = 0; j < landa.length; j++) {

                    //it multiplies vector_x with vector_columna of V
                    //vector_x is: 1 X n
                    //vector_de_V is: n X 1
                    //result: a number
                    double multVectores = 0;
                    for (int k = 0; k < vectorX.length; k++) {
                        multVectores += vectorX[k] * v.get(k, j);
                    }

                    //the result multiplies with landa[j]
                    multVectores *= landa[j];

                    //it rises up to the square
                    multVectores *= multVectores;

                    //it splits between the square of f [j]
                    multVectores /= f[j] * f[j];

                    //the sumatoria is added
                    sumatoria += multVectores;
                }

                //the value of copia_q [i] is: mult * sumatoria
                copiaQ[i] = mult * sumatoria;
            }

            //the second step to find q in the iteration

            /*the sum must be out, if it was inside and with copia_q then 
             *one would give priority to the last instance and the last one 
             * would be always chosen
             */
            double suma = 0;
            for (int j = 0; j < n; j++) {
                suma += copiaQ[j];
            }

            for (int i = 0; i < n; i++) {
                copiaQ[i] = copiaQ[i] / suma;
            }

        } while (change(q, copiaQ, epsilonValue));

        //the values are saved
        tempValues = new double[copiaQ.length];

        System.arraycopy(copiaQ, 0, tempValues, 0, copiaQ.length);

    }

    int indice = unlabeled.indexOf(instance);

    return tempValues[indice];
}

From source file:org.knime.knip.suise.node.boundarymodel.BoundaryModel.java

License:Open Source License

private double calcLMDL(double epsilon) {

    // create sample matrix
    Matrix V = new Matrix(m_contourData.numFeatures(), m_contourData.numVectors());

    double[] vec;
    for (int i = 0; i < m_contourData.numVectors(); i++) {
        vec = m_contourData.getVector(i);
        for (int j = 0; j < vec.length; j++) {
            V.set(j, i, vec[j]);//from  w w  w  .jav a  2s .c o  m
        }
    }

    // estimate of the covariance matrix
    Matrix W = V.times(V.transpose());

    W.times(m_contourData.numFeatures() / (epsilon * epsilon * m_contourData.numVectors()));

    W = Matrix.identity(m_contourData.numFeatures(), m_contourData.numFeatures()).plus(W);

    return Utils.log2(W.det()) * (m_contourData.numFeatures() + m_contourData.numVectors()) / 2;

}

From source file:org.knime.knip.suise.node.boundarymodel.contourdata.ContourCluster.java

License:Open Source License

private double calcLMDL() {

    // create sample matrix
    Matrix V = new Matrix(m_cdata.numFeatures(), m_samples.size());

    double[] vec;
    for (int i = 0; i < m_samples.size(); i++) {
        vec = m_cdata.get(m_samples.get(i)[0], m_samples.get(i)[1]);
        for (int j = 0; j < vec.length; j++) {
            V.set(j, i, vec[j]);/*from  w  w w.j  a  va  2s  .  com*/
        }
    }

    double epsilon = 5;

    // estimate of the covariance matrix
    Matrix W = V.times(V.transpose());

    W.times(m_cdata.numFeatures() / (epsilon * epsilon * m_samples.size()));

    W = Matrix.identity(m_cdata.numFeatures(), m_cdata.numFeatures()).plus(W);

    return Utils.log2(W.det()) * (m_cdata.numFeatures() + m_samples.size()) / 2;

}

From source file:org.mitre.clustering.AffinityPropagation.java

License:Open Source License

private Matrix matrixIsEqual(Matrix m, double v) {
    Matrix r = new Matrix(count, 1);

    for (int c = 0; c < count; c++) {
        if (m.get(c, 0) == v) {
            r.set(c, 0, 1.0);/*w w w  .ja  va  2  s.  c  o  m*/
        }
    }

    return r;
}