Example usage for org.apache.commons.math3.linear BlockRealMatrix BlockRealMatrix

List of usage examples for org.apache.commons.math3.linear BlockRealMatrix BlockRealMatrix

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear BlockRealMatrix BlockRealMatrix.

Prototype

public BlockRealMatrix(final int rows, final int columns) throws NotStrictlyPositiveException 

Source Link

Document

Create a new matrix with the supplied row and column dimensions.

Usage

From source file:edu.cmu.tetrad.util.TetradMatrix1.java

public TetradMatrix1(int m, int n) {
    if (m == 0 || n == 0) {
        this.apacheData = new Array2DRowRealMatrix();
    } else {//w  w w  .j a v a2s. c  o  m
        //            this.apacheData = new OpenMapRealMatrix(m, n);
        this.apacheData = new BlockRealMatrix(m, n);
    }

    this.m = m;
    this.n = n;
}

From source file:com.datumbox.common.dataobjects.MatrixDataset.java

/**
 * Parses a dataset and converts it to MatrixDataset by using an already
 * existing mapping between feature names and column ids. Typically used
 * to parse the testing or validation dataset.
 * /* w w w.j av a 2s . c o  m*/
 * @param newDataset
 * @param featureIdsReference
 * @return 
 */
public static MatrixDataset parseDataset(Dataset newDataset, Map<Object, Integer> featureIdsReference) {
    if (featureIdsReference.isEmpty()) {
        throw new RuntimeException("The featureIdsReference map should not be empty.");
    }

    int n = newDataset.size();
    int d = featureIdsReference.size();

    MatrixDataset m = new MatrixDataset(new ArrayRealVector(n), new BlockRealMatrix(n, d), featureIdsReference);

    if (newDataset.isEmpty()) {
        return m;
    }

    boolean extractY = (Dataset
            .value2ColumnType(newDataset.iterator().next().getY()) == Dataset.ColumnType.NUMERICAL);

    boolean addConstantColumn = m.feature2ColumnId.containsKey(Dataset.constantColumnName);

    for (Record r : newDataset) {
        int row = r.getId();

        if (extractY) {
            m.Y.setEntry(row, Dataset.toDouble(r.getY()));
        }

        if (addConstantColumn) {
            m.X.setEntry(row, 0, 1.0); //add the constant column
        }
        for (Map.Entry<Object, Object> entry : r.getX().entrySet()) {
            Object feature = entry.getKey();
            Double value = Dataset.toDouble(entry.getValue());
            if (value != null) {
                Integer featureId = m.feature2ColumnId.get(feature);
                if (featureId != null) {//if the feature exists in our database
                    m.X.setEntry(row, featureId, value);
                }
            } else {
                //else the X matrix maintains the 0.0 default value
            }
        }
    }

    return m;
}

From source file:mase.spec.SilhouetteDistanceCalculator.java

private RealMatrix computeDistanceMatrixParallel(List<BehaviourResult> brs) {
    RealMatrix mat = new BlockRealMatrix(brs.size(), brs.size());
    Collection<Callable<Object>> div = new ArrayList<>();
    for (int i = 0; i < brs.size(); i++) {
        div.add(new DistanceMatrixCalculator(mat, brs, i, i));
    }//w w  w .  j  av  a2  s. c  o  m
    try {
        executor.invokeAll(div);
    } catch (InterruptedException ex) {
        ex.printStackTrace();
    }
    return mat;
}

From source file:ffnn.FFNNTubesAI.java

@Override
public void buildClassifier(Instances i) throws Exception {
    Instance temp_instance = null;/*from   www  .j a va2  s .  com*/
    RealMatrix error_output;
    RealMatrix error_hidden;
    RealMatrix input_matrix;
    RealMatrix hidden_matrix;
    RealMatrix output_matrix;
    Instances temp_instances;
    int r = 0;
    Scanner scan = new Scanner(System.in);

    output_layer = i.numDistinctValues(i.classIndex()); //3
    temp_instances = filterNominalNumeric(i);

    if (output_layer == 2) {
        Add filter = new Add();
        filter.setAttributeIndex("last");
        filter.setAttributeName("dummy");
        filter.setInputFormat(temp_instances);
        temp_instances = Filter.useFilter(temp_instances, filter);
        //            System.out.println(temp_instances);
        for (int j = 0; j < temp_instances.numInstances(); j++) {
            if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0);
            } else {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1);
            }
        }
    }

    //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1));
    //System.out.println(temp_instances);
    input_layer = temp_instances.numAttributes() - output_layer; //4
    hidden_layer = 0;
    while (hidden_layer < 1) {
        System.out.print("Hidden layer : ");
        hidden_layer = scan.nextInt();
    }
    int init_hidden = hidden_layer;
    error_hidden = new BlockRealMatrix(1, hidden_layer);
    error_output = new BlockRealMatrix(1, output_layer);
    input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias

    buildWeight(input_layer, hidden_layer, output_layer);

    long last_time = System.nanoTime();
    double last_error_rate = 1;
    double best_error_rate = 1;

    double last_update = System.nanoTime();

    // brp iterasi
    //        for( long itr = 0; last_error_rate > 0.001; ++ itr ){
    for (long itr = 0; itr < 50000; ++itr) {
        if (r == 10) {
            break;
        }
        long time = System.nanoTime();
        if (time - last_time > 2000000000) {
            Evaluation eval = new Evaluation(i);
            eval.evaluateModel(this, i);

            double accry = eval.correct() / eval.numInstances();
            if (eval.errorRate() < last_error_rate) {
                last_update = System.nanoTime();
                if (eval.errorRate() < best_error_rate)
                    SerializationHelper.write(accry + "-" + time + ".model", this);
            }

            if (accry > 0)
                last_error_rate = eval.errorRate();

            // 2 minute without improvement restart
            if (time - last_update > 30000000000L) {
                last_update = System.nanoTime();
                learning_rate = random() * 0.05;
                hidden_layer = (int) (10 + floor(random() * 15));
                hidden_layer = (int) floor((hidden_layer / 25) * init_hidden);
                if (hidden_layer == 0) {
                    hidden_layer = 1;
                }
                itr = 0;
                System.out.println("RESTART " + learning_rate + " " + hidden_layer);
                buildWeight(input_layer, hidden_layer, output_layer);
                r++;
            }

            System.out.println(accry + " " + itr);
            last_time = time;
        }

        for (int j = 0; j < temp_instances.numInstances(); j++) {
            // foward !!
            temp_instance = temp_instances.instance(j);

            for (int k = 0; k < input_layer; k++) {
                input_matrix.setEntry(0, k, temp_instance.value(k));
            }
            input_matrix.setEntry(0, input_layer, 1.0); // bias

            hidden_matrix = input_matrix.multiply(weight1);
            for (int y = 0; y < hidden_layer; ++y) {
                hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y)));
            }

            output_matrix = hidden_matrix.multiply(weight2).add(bias2);
            for (int y = 0; y < output_layer; ++y) {
                output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y)));
            }

            // backward <<

            // error layer 2
            double total_err = 0;
            for (int k = 0; k < output_layer; k++) {
                double o = output_matrix.getEntry(0, k);
                double t = temp_instance.value(input_layer + k);
                double err = o * (1 - o) * (t - o);
                total_err += err * err;
                error_output.setEntry(0, k, err);
            }

            // back propagation layer 2
            for (int y = 0; y < hidden_layer; y++) {
                for (int x = 0; x < output_layer; ++x) {
                    double wold = weight2.getEntry(y, x);
                    double correction = learning_rate * error_output.getEntry(0, x)
                            * hidden_matrix.getEntry(0, y);
                    weight2.setEntry(y, x, wold + correction);
                }
            }

            for (int x = 0; x < output_layer; ++x) {
                double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya
                bias2.setEntry(0, x, bias2.getEntry(0, x) + correction);
            }

            // error layer 1
            for (int k = 0; k < hidden_layer; ++k) {
                double o = hidden_matrix.getEntry(0, k);
                double t = 0;
                for (int x = 0; x < output_layer; ++x) {
                    t += error_output.getEntry(0, x) * weight2.getEntry(k, x);
                }
                double err = o * (1 - o) * t;
                error_hidden.setEntry(0, k, err);
            }

            // back propagation layer 1
            for (int y = 0; y < input_layer + 1; ++y) {
                for (int x = 0; x < hidden_layer; ++x) {
                    double wold = weight1.getEntry(y, x);
                    double correction = learning_rate * error_hidden.getEntry(0, x)
                            * input_matrix.getEntry(0, y);
                    weight1.setEntry(y, x, wold + correction);
                }
            }
        }
    }
}

From source file:edu.cmu.tetrad.util.TetradMatrix1.java

public TetradMatrix1 sqrt() {
    SingularValueDecomposition svd = new SingularValueDecomposition(getRealMatrix());
    RealMatrix U = svd.getU();/* w w w  .  j a  va2s.  c  o m*/
    RealMatrix V = svd.getV();
    double[] s = svd.getSingularValues();
    for (int i = 0; i < s.length; i++)
        s[i] = 1.0 / s[i];
    RealMatrix S = new BlockRealMatrix(s.length, s.length);
    for (int i = 0; i < s.length; i++)
        S.setEntry(i, i, s[i]);
    RealMatrix sqrt = U.multiply(S).multiply(V);
    return new TetradMatrix1(sqrt);
}

From source file:gamlss.algorithm.Gamlss.java

/** This is to emulate the Gamlss algorithm where 
 * the user specifies response variable vector 
 * and design matrix.//from ww  w  .  java  2 s .  c om
 * 
 * @param y - vector of response variable values 
 * @param designMatrices - design matrices for each 
 * of the distribution parameters
 * @param smoothMatrices - smoother matrices for each 
 * of the distribution parameters
 */
public Gamlss(final ArrayRealVector y, Hashtable<Integer, BlockRealMatrix> designMatrices,
        final HashMap<Integer, BlockRealMatrix> smoothMatrices) {

    GAMLSSFamilyDistribution distr = null;
    switch (DistributionSettings.DISTR) {
    case DistributionSettings.NO:
        distr = new NO();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.TF:
        distr = new TF();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.GA:
        distr = new GA();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.GT:
        distr = new GT();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.ST3:
        distr = new ST3();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.ST4:
        distr = new ST4();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.JSUo:
        distr = new JSUo();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.TF2:
        distr = new TF2();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.SST:
        distr = new SST();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.BCPE:
        distr = new BCPE();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.ST1:
        distr = new ST1();
        distr.initialiseDistributionParameters(y);
        break;
    case DistributionSettings.PE:
        distr = new PE();
        distr.initialiseDistributionParameters(y);
        break;
    default:
        System.err.println("The specific distribution " + "has not been implemented yet in Gamlss!");
    }

    if (smoothMatrices != null) {
        Controls.SMOOTHING = true;
    }

    if (designMatrices == null) {
        designMatrices = new Hashtable<Integer, BlockRealMatrix>();
        for (int i = 1; i < distr.getNumberOfDistribtionParameters() + 1; i++) {
            designMatrices.put(i, MatrixFunctions.buildInterceptMatrix(y.getDimension()));
            Controls.NO_INTERCEPT[i - 1] = true;
        }
    }

    ArrayRealVector w = new ArrayRealVector(y.getDimension());
    for (int i = 0; i < w.getDimension(); i++) {
        w.setEntry(i, Controls.DEFAULT_WEIGHT);
    }

    switch (DistributionSettings.FITTING_ALG) {
    case DistributionSettings.RS:
        rs = new RSAlgorithm(distr, y, designMatrices, smoothMatrices, w);
        rs.functionRS(distr, y, w);
        break;
    case DistributionSettings.RS20CG20:
        rs = new RSAlgorithm(distr, y, designMatrices, smoothMatrices, w);
        rs.functionRS(distr, y, w);
        rs = null;
        cg = new CGAlgorithm(y.getDimension());
        cg.setnCyc(Controls.GAMLSS_NUM_CYCLES);
        cg.CGfunction(distr, y, designMatrices, w);
        break;
    case DistributionSettings.CG:
        cg = new CGAlgorithm(y.getDimension());
        cg.CGfunction(distr, y, designMatrices, w);
        break;
    case DistributionSettings.GO:
        break;
    default:
        System.err.println(" Cannot recognise the " + "fitting algorithm");
    }

    int parNum = distr.getNumberOfDistribtionParameters();
    resultsMatrix = new BlockRealMatrix(y.getDimension(), parNum);
    for (int i = 0; i < y.getDimension(); i++) {
        for (int j = 0; j < parNum; j++) {
            resultsMatrix.setEntry(i, j, distr.getDistributionParameter(j + 1).getEntry(i));
        }
    }

}

From source file:edu.cmu.tetrad.util.ApacheTetradMatrix.java

public ApacheTetradMatrix appendRows(ApacheTetradMatrix rows) {
    RealMatrix m1 = new BlockRealMatrix(apacheData.getRowDimension() + rows.apacheData.getRowDimension(),
            apacheData.getColumnDimension());

    for (int i = 0; i < apacheData.getRowDimension(); i++) {
        for (int j = 0; j < apacheData.getColumnDimension(); j++) {
            m1.setEntry(i, j, apacheData.getEntry(i, j));
        }//from www .j  a v  a  2 s . c om
    }

    for (int i = 0; i < rows.apacheData.getRowDimension(); i++) {
        for (int j = 0; j < rows.apacheData.getColumnDimension(); j++) {
            m1.setEntry(apacheData.getRowDimension() + i, j, rows.apacheData.getEntry(i, j));
        }
    }

    return new ApacheTetradMatrix(m1);
}

From source file:gamlss.utilities.MatrixFunctions.java

/**
 * Append rows of the matrices./*from   w w  w.jav a2 s  .c  o  m*/
 * @param m1 - first matrix
 * @param m2 - second matrix
 * @return m1.append.m2
 */
public static BlockRealMatrix appendMatricesRows(final BlockRealMatrix m1, final BlockRealMatrix m2) {
    BlockRealMatrix out = new BlockRealMatrix(m1.getRowDimension(),
            m1.getColumnDimension() + m2.getColumnDimension());
    for (int i = 0; i < m1.getRowDimension(); i++) {
        out.setRowVector(i, m1.getRowVector(i).append(m2.getRowVector(i)));
    }
    return out;
}

From source file:gamlss.utilities.MatrixFunctions.java

/**
 * Create a matrix with length of y number of rows
 *  and one column, all entries of the matrix are 1.
 * @param y - vector /*from w  ww  . j av a2  s . co  m*/
 * @return matrix
 */
public static BlockRealMatrix setInterceptMatrix(final ArrayRealVector y) {
    BlockRealMatrix designMatrix = new BlockRealMatrix(y.getDimension(), 1);
    for (int i = 0; i < y.getDimension(); i++) {
        designMatrix.setEntry(i, 0, 1.0);
    }
    return designMatrix;
}

From source file:ffnn.FFNNTubesAI.java

/**
 *
 * @param input_layer/*w  w  w  . ja v a  2s  .co  m*/
 * @param hidden_layer
 * @param output_layer
 */
public void buildWeight(int input_layer, int hidden_layer, int output_layer) {
    weight1 = new BlockRealMatrix(input_layer + 1, hidden_layer);
    weight2 = new BlockRealMatrix(hidden_layer, output_layer);
    bias2 = new BlockRealMatrix(1, output_layer);
    for (int y = 0; y < input_layer + 1; ++y)
        for (int x = 0; x < hidden_layer; ++x)
            weight1.setEntry(y, x, random());
    for (int y = 0; y < hidden_layer; ++y)
        for (int x = 0; x < output_layer; ++x)
            weight2.setEntry(y, x, random());
    for (int x = 0; x < output_layer; ++x)
        bias2.setEntry(0, x, random());
}