Example usage for org.apache.commons.math3.linear Array2DRowRealMatrix getRow

List of usage examples for org.apache.commons.math3.linear Array2DRowRealMatrix getRow

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear Array2DRowRealMatrix getRow.

Prototype

public double[] getRow(final int row) throws OutOfRangeException 

Source Link

Usage

From source file:com.clust4j.algo.MeanShiftTests.java

@Test
public void testSeededIrisFunctional() {
    Array2DRowRealMatrix iris = data_;

    new MeanShift(iris, new MeanShiftParameters().setVerbose(true)
            .setSeeds(new double[][] { iris.getRow(3), iris.getRow(90), iris.getRow(120) })).fit();
    System.out.println();//from   ww w  .ja v  a2s  . co m
}

From source file:com.clust4j.algo.MeanShiftTests.java

@Test
public void testAutoEstimationWithScale() {
    Array2DRowRealMatrix iris = (Array2DRowRealMatrix) new StandardScaler().fit(data_).transform(data_);
    final double[][] X = iris.getData();

    // MS estimates bw at 1.6041295821313855
    final double bandwidth = 1.6041295821313855;

    assertTrue(Precision.equals(//from w w  w  .j ava  2s .c  om
            MeanShift.autoEstimateBW(iris, 0.3, Distance.EUCLIDEAN, GlobalState.DEFAULT_RANDOM_STATE, false),
            bandwidth, 1e-9));

    assertTrue(Precision.equals(
            MeanShift.autoEstimateBW(iris, 0.3, Distance.EUCLIDEAN, GlobalState.DEFAULT_RANDOM_STATE, true),
            bandwidth, 1e-9));

    // Asserting fit works without breaking things...
    RadiusNeighbors r = new RadiusNeighbors(iris, new RadiusNeighborsParameters(bandwidth)).fit();

    TreeSet<MeanShiftSeed> centers = new TreeSet<>();
    for (double[] seed : X)
        centers.add(MeanShift.singleSeed(seed, r, X, 300));

    assertTrue(centers.size() == 4);

    double[][] expected_dists = new double[][] {
            new double[] { 0.50161528154395962, -0.31685274298813487, 0.65388162422893481,
                    0.65270450741975761 },
            new double[] { 0.52001211065400177, -0.29561728795619946, 0.67106269515983397,
                    0.67390853215763813 },
            new double[] { 0.54861244890482475, -0.25718786696105495, 0.68964559485632182,
                    0.69326664641211422 },
            new double[] { -1.0595457115461515, 0.74408909010240054, -1.2995708885010491,
                    -1.2545442961404225 } };

    int[] expected_centers = new int[] { 82, 80, 77, 45 };

    int idx = 0;
    for (MeanShiftSeed seed : centers) {
        assertTrue(VecUtils.equalsWithTolerance(seed.dists, expected_dists[idx], 1e-1));
        assertTrue(seed.count == expected_centers[idx]);
        idx++;
    }

    ArrayList<EntryPair<double[], Integer>> center_intensity = new ArrayList<>();
    for (MeanShiftSeed seed : centers) {
        if (null != seed) {
            center_intensity.add(seed.getPair());
        }
    }

    final ArrayList<EntryPair<double[], Integer>> sorted_by_intensity = center_intensity;

    // test getting the unique vals
    idx = 0;
    final int m_prime = sorted_by_intensity.size();
    final Array2DRowRealMatrix sorted_centers = new Array2DRowRealMatrix(m_prime, iris.getColumnDimension());
    for (Map.Entry<double[], Integer> e : sorted_by_intensity)
        sorted_centers.setRow(idx++, e.getKey());

    // Create a boolean mask, init true
    final boolean[] unique = new boolean[m_prime];
    for (int i = 0; i < unique.length; i++)
        unique[i] = true;

    // Fit the new neighbors model
    RadiusNeighbors nbrs = new RadiusNeighbors(sorted_centers,
            new RadiusNeighborsParameters(bandwidth).setVerbose(false)).fit();

    // Iterate over sorted centers and query radii
    int[] indcs;
    double[] center;
    for (int i = 0; i < m_prime; i++) {
        if (unique[i]) {
            center = sorted_centers.getRow(i);
            indcs = nbrs.getNeighbors(new double[][] { center }, bandwidth, false).getIndices()[0];

            for (int id : indcs) {
                unique[id] = false;
            }

            unique[i] = true; // Keep this as true
        }
    }

    // Now assign the centroids...
    int redundant_ct = 0;
    final ArrayList<double[]> centroids = new ArrayList<>();
    for (int i = 0; i < unique.length; i++) {
        if (unique[i]) {
            centroids.add(sorted_centers.getRow(i));
        }
    }

    redundant_ct = unique.length - centroids.size();

    assertTrue(redundant_ct == 2);
    assertTrue(centroids.size() == 2);
    assertTrue(VecUtils.equalsWithTolerance(centroids.get(0),
            new double[] { 0.4999404345258691, -0.3157948009929614, 0.6516983739795399, 0.6505251874544873 },
            1e-6));

    assertTrue(VecUtils.equalsExactly(centroids.get(1),
            new double[] { -1.0560079864392702, 0.7416046454700266, -1.295231741534238, -1.2503554887998656 }));

    // also put the centroids into a matrix. We have to
    // wait to perform this op, because we have to know
    // the size of centroids first...
    Array2DRowRealMatrix clust_centers = new Array2DRowRealMatrix(centroids.size(), iris.getColumnDimension());
    for (int i = 0; i < clust_centers.getRowDimension(); i++)
        clust_centers.setRow(i, centroids.get(i));

    // The final nearest neighbors model -- if this works, we are in the clear...
    new NearestNeighbors(clust_centers, new NearestNeighborsParameters(1)).fit();
}

From source file:com.clust4j.algo.MeanShift.java

@Override
protected MeanShift fit() {
    synchronized (fitLock) {

        if (null != labels) // Already fit this model
            return this;

        // Put the results into a Map (hash because tree imposes comparable casting)
        final LogTimer timer = new LogTimer();
        centroids = new ArrayList<double[]>();

        /*/*from w  w  w . j  a v  a2s.co  m*/
         * Get the neighborhoods and center intensity object. Will iterate until
         * either the centers are found, or the max try count is exceeded. For each
         * iteration, will increase bandwidth.
         */
        RadiusNeighbors nbrs = new RadiusNeighbors(this, bandwidth).fit();

        // Compute the seeds and center intensity
        // If parallelism is permitted, try it. 
        CenterIntensity intensity = null;
        if (parallel) {
            try {
                intensity = new ParallelCenterIntensity(nbrs);
            } catch (RejectedExecutionException e) {
                // Shouldn't happen...
                warn("parallel search failed; falling back to serial");
            }
        }

        // Gets here if serial or if parallel failed...
        if (null == intensity)
            intensity = new SerialCenterIntensity(nbrs);

        // Check for points all too far from seeds
        if (intensity.isEmpty()) {
            error(new IllegalClusterStateException("No point " + "was within bandwidth=" + bandwidth
                    + " of any seed; try increasing bandwidth"));
        } else {
            converged = true;
            itersElapsed = intensity.getIters(); // max iters elapsed
        }

        // Extract the centroids
        int idx = 0, m_prime = intensity.size();
        final Array2DRowRealMatrix sorted_centers = new Array2DRowRealMatrix(m_prime, n);

        for (MeanShiftSeed entry : intensity)
            sorted_centers.setRow(idx++, entry.getPair().getKey());

        // Fit the new neighbors model
        nbrs = new RadiusNeighbors(sorted_centers, new RadiusNeighborsParameters(bandwidth)
                .setSeed(this.random_state).setMetric(this.dist_metric).setForceParallel(parallel), true).fit();

        // Post-processing. Remove near duplicate seeds
        // If dist btwn two kernels is less than bandwidth, remove one w fewer pts
        // Create a boolean mask, init true
        final boolean[] unique = new boolean[m_prime];
        for (int i = 0; i < unique.length; i++)
            unique[i] = true;

        // Pre-filtered summaries...
        ArrayList<SummaryLite> allSummary = intensity.getSummaries();

        // Iterate over sorted centers and query radii
        int redundant_ct = 0;
        int[] indcs;
        double[] center;
        for (int i = 0; i < m_prime; i++) {
            if (unique[i]) {
                center = sorted_centers.getRow(i);
                indcs = nbrs.getNeighbors(new double[][] { center }, bandwidth, false).getIndices()[0];

                for (int id : indcs)
                    unique[id] = false;

                unique[i] = true; // Keep this as true
            }
        }

        // Now assign the centroids...
        SummaryLite summ;
        for (int i = 0; i < unique.length; i++) {
            summ = allSummary.get(i);

            if (unique[i]) {
                summ.retained = true;
                centroids.add(sorted_centers.getRow(i));
            }

            fitSummary.add(summ.toArray());
        }

        // calc redundant ct
        redundant_ct = unique.length - centroids.size();

        // also put the centroids into a matrix. We have to
        // wait to perform this op, because we have to know
        // the size of centroids first...
        Array2DRowRealMatrix centers = new Array2DRowRealMatrix(centroids.size(), n);
        for (int i = 0; i < centroids.size(); i++)
            centers.setRow(i, centroids.get(i));

        // Build yet another neighbors model...
        NearestNeighbors nn = new NearestNeighbors(centers, new NearestNeighborsParameters(1)
                .setSeed(this.random_state).setMetric(this.dist_metric).setForceParallel(false), true).fit();

        info((numClusters = centroids.size()) + " optimal kernel" + (numClusters != 1 ? "s" : "")
                + " identified");
        info(redundant_ct + " nearly-identical kernel" + (redundant_ct != 1 ? "s" : "") + " removed");

        // Get the nearest...
        final LogTimer clustTimer = new LogTimer();
        Neighborhood knrst = nn.getNeighbors(data.getDataRef());
        labels = MatUtils.flatten(knrst.getIndices());

        // order the labels..
        /* 
         * Reduce labels to a sorted, gapless, list
         * sklearn line: cluster_centers_indices = np.unique(labels)
         */
        ArrayList<Integer> centroidIndices = new ArrayList<Integer>(numClusters);
        for (Integer i : labels) // force autobox
            if (!centroidIndices.contains(i)) // Not race condition because synchronized
                centroidIndices.add(i);

        /*
         * final label assignment...
         * sklearn line: labels = np.searchsorted(cluster_centers_indices, labels)
         */
        for (int i = 0; i < labels.length; i++)
            labels[i] = centroidIndices.indexOf(labels[i]);

        // Wrap up...
        // Count missing
        numNoisey = 0;
        for (int lab : labels)
            if (lab == NOISE_CLASS)
                numNoisey++;
        info(numNoisey + " record" + (numNoisey != 1 ? "s" : "") + " classified noise");

        info("completed cluster labeling in " + clustTimer.toString());

        sayBye(timer);
        return this;
    }

}

From source file:org.interpss.opf.dc.impl.EqIneqMatrixBuilder.java

private Array2DRowRealMatrix getReducedBusAdmittance() {
    int numOfBus = opfNet.getNoActiveBus();

    // form the reduced bus admittance matrix by omitting the row
    // corresponding to the SWING bus ;
    // here B1 formed by InterPSS itself is under consideration to be reused
    // ;//from ww w  . jav  a  2 s  .  c  o m

    Array2DRowRealMatrix tempBusAdm = new Array2DRowRealMatrix(numOfBus, numOfBus);

    for (Bus b : opfNet.getBusList()) {
        DclfOpfBus busi = (DclfOpfBus) b;
        int i = busi.getSortNumber();
        double Bii = 0;
        for (Branch bra : busi.getBranchList()) {
            //if (bra.isAclfBranch()) {
            DclfOpfBranch aclfBranch = (DclfOpfBranch) bra;
            Bus busj = bra.getToBus().getId().equals(busi.getId()) ? bra.getFromBus() : bra.getToBus();
            int j = busj.getSortNumber();
            double Bij = 1.0 / aclfBranch.getZ().getImaginary();// aclfBranch.b1ft();
            tempBusAdm.setEntry(i, j, -Bij);
            Bii += Bij;
            //}
        }
        tempBusAdm.setEntry(i, i, Bii);
    }

    Array2DRowRealMatrix busAdmReduced = new Array2DRowRealMatrix(numOfBus - 1, numOfBus); // reduced bus admittance matrix
    int[] selectedRows = this.getNonSwingBusRows();
    for (int index = 0; index < selectedRows.length; index++) {
        busAdmReduced.setRow(index, tempBusAdm.getRow(selectedRows[index]));
    }
    return busAdmReduced;
}

From source file:outlineDescriptor.FieldAnalyzer.java

private CellReference[] identifyMaxima(CellReference[] store, Array2DRowRealMatrix kernel) {

    ArrayList<CellReference> out = new ArrayList<CellReference>();
    int maxCount = 0;

    for (CellReference pt : store) {

        //this point has already been processed thus it is not a separate maximum
        if ((flags[getListOffset(pt.x, pt.y)] & (DISCARDED | PROCESSED)) != 0) {
            continue;
        }/*w  ww .  j av a 2  s  .com*/

        //boolean equalInRange = (flags[getListOffset(pt.x, pt.y)] & EQUAL) != 0;

        //checking the neighboring area
        for (int i = 0; i < kernel.getRowDimension(); i++) {

            double[] bounds = kernel.getRow(i);

            int upperLimit = (int) Math.max(bounds[1], bounds[2]);
            int bottom = (int) Math.min(bounds[1], bounds[2]);

            while (bottom <= upperLimit) {

                int yPos = pt.y + bottom;
                int xPos = (int) bounds[0] + pt.x;

                if (xPos >= 0 && xPos < coherenceKernel.length && yPos >= 0
                        && yPos < coherenceKernel[0].length) {

                    if ((flags[getListOffset(xPos, yPos)] & CANDIDATE) == 0) {
                        //this point isnt being considered for a maximum, continue
                        flags[getListOffset(xPos, yPos)] |= PROCESSED;

                    }
                    //checking for equality within tolerance
                    else if (pt.coh - FPERROR <= coherenceKernel[xPos][yPos]) {
                        flags[getListOffset(xPos, yPos)] |= (EQUAL);
                    } else {

                        flags[getListOffset(xPos, yPos)] |= (PROCESSED | DISCARDED);
                    }
                }
                flags[getListOffset(pt.x, pt.y)] |= MAX;
                bottom++;
            }
        }

        out.add(pt);
        maxCount++;

        if (maxCount >= count) {
            break;
        }
    }
    out.trimToSize();
    return out.toArray(new CellReference[out.size()]);

}