Example usage for org.apache.commons.lang3 ArrayUtils toPrimitive

List of usage examples for org.apache.commons.lang3 ArrayUtils toPrimitive

Introduction

In this page you can find the example usage for org.apache.commons.lang3 ArrayUtils toPrimitive.

Prototype

public static boolean[] toPrimitive(final Boolean[] array) 

Source Link

Document

Converts an array of object Booleans to primitives.

This method returns null for a null input array.

Usage

From source file:org.carbondata.scan.executor.util.QueryUtil.java

/**
 * Below method will be used to get the dimension block index in file based
 * on query dimension/*w  w w . j a v a 2 s . c  o  m*/
 *
 * @param queryDimensions                query dimension
 * @param dimensionOrdinalToBlockMapping mapping of dimension block in file to query dimension
 * @return block index of file
 */
public static int[] getDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
        Map<Integer, Integer> dimensionOrdinalToBlockMapping,
        List<CarbonDimension> customAggregationDimension) {
    // using set as in row group columns will point to same block
    Set<Integer> dimensionBlockIndex = new HashSet<Integer>();
    for (int i = 0; i < queryDimensions.size(); i++) {
        dimensionBlockIndex
                .add(dimensionOrdinalToBlockMapping.get(queryDimensions.get(i).getDimension().getOrdinal()));
    }
    for (int i = 0; i < customAggregationDimension.size(); i++) {
        dimensionBlockIndex
                .add(dimensionOrdinalToBlockMapping.get(customAggregationDimension.get(i).getOrdinal()));
    }
    return ArrayUtils.toPrimitive(dimensionBlockIndex.toArray(new Integer[dimensionBlockIndex.size()]));
}

From source file:org.dbgl.gui.ProfilesList.java

public int[] getSelectionIndices() {
    if (type == ProfilesListType.TABLE)
        return table.getSelectionIndices();
    else {/*from  w  w  w.j a  v a2s  .  c  om*/
        GalleryItem[] items = gallery.getSelection();
        if (items.length == 0)
            return new int[0];
        List<Integer> indices = new ArrayList<Integer>();
        for (GalleryItem i : items) {
            int index = group.indexOf(i);
            if (index != -1)
                indices.add(index);
        }
        return ArrayUtils.toPrimitive(indices.toArray(new Integer[0]));
    }
}

From source file:org.dbgl.gui.SettingsDialog.java

private void updateColumnSettings() {
    int[] sort = conf.getIntValues("gui", "sortcolumn");
    boolean[] ascs = conf.getBooleanValues("gui", "sortascending");
    java.util.List<Integer> sortColumnIDs = new ArrayList<Integer>(sort.length);
    java.util.List<Boolean> sortColumnAscs = new ArrayList<Boolean>(sort.length);

    for (int i = 0; i < sort.length; i++) {
        if (visibleColumns[allColumnIDs.indexOf(sort[i])].getChecked()) {
            sortColumnIDs.add(sort[i]);//from w w  w. j a v a  2 s .  co  m
            sortColumnAscs.add(ascs[i]);
        }
    }
    if (sortColumnIDs.isEmpty()) {
        sortColumnIDs.add(allColumnIDs.get(getFirstVisibleColumn()));
        sortColumnAscs.add(true);
    }

    conf.setIntValues("gui", "sortcolumn", ArrayUtils.toPrimitive(sortColumnIDs.toArray(new Integer[0])));
    conf.setBooleanValues("gui", "sortascending",
            ArrayUtils.toPrimitive(sortColumnAscs.toArray(new Boolean[0])));

    java.util.List<Integer> visColumns = new ArrayList<Integer>();
    for (int i = 0; i < MainWindow.columnNames.length; i++)
        if (visibleColumns[i].getChecked())
            visColumns.add(allColumnIDs.get(i));

    java.util.List<Integer> orderedVisColumns = new ArrayList<Integer>(visColumns);
    Collections.sort(orderedVisColumns);

    java.util.List<Integer> colOrder = new ArrayList<Integer>();
    for (int id : visColumns)
        colOrder.add(orderedVisColumns.indexOf(id));

    conf.setValue("gui", "columnorder", StringUtils.join(colOrder, ' '));
}

From source file:org.dbgl.util.FileUtils.java

public static int[] findRelatedEntryIds(IInArchive zArchive, File dirToBeExtracted) {
    List<Integer> result = new ArrayList<Integer>();
    for (int i = 0; i < zArchive.size(); i++)
        if (areRelated(dirToBeExtracted, new File(zArchive.getEntry(i).getName())))
            result.add(i);/*from   www .j ava2 s .c om*/
    return ArrayUtils.toPrimitive(result.toArray(new Integer[0]));
}

From source file:org.dbgl.util.ImportThread.java

public void preFinish() throws IOException {
    if (customFields != null) {
        for (int i = 0; i < Constants.EDIT_COLUMN_NAMES; i++) {
            if (!customFields[i].equalsIgnoreCase("Custom" + (i + 1))) {
                settings.getSettings().setValue("gui", "custom" + (i + 1), customFields[i]);
            }//from w  w  w.  j  a  v a  2 s  .c o  m
        }
    }
    if (sevenzip) {
        entryIdsToBeExtracted = ArrayUtils.toPrimitive(sevenzipDstFileMap.keySet().toArray(new Integer[0]));
        SevenzipExtractFilesCallback extractCallback = new SevenzipExtractFilesCallback(this, zArchive,
                sevenzipDstFileMap);
        zArchive.Extract(entryIdsToBeExtracted, entryIdsToBeExtracted.length,
                IInArchive.NExtract_NAskMode_kExtract, extractCallback);
        for (int i = 0; i < entryIdsToBeExtracted.length; i++) {
            int id = entryIdsToBeExtracted[i];
            FileUtils.fileSetLastModified(sevenzipDstFileMap.get(id), zArchive.getEntry(id).getTime());
        }
        zArchive.close();
    }
}

From source file:org.deeplearning4j.clustering.cluster.ClusterUtils.java

public static void deriveClusterInfoDistanceStatistics(ClusterInfo info) {
    int pointCount = info.getPointDistancesFromCenter().size();
    if (pointCount == 0)
        return;//from  w w w. j a  va2s  . co  m

    double[] distances = ArrayUtils
            .toPrimitive(info.getPointDistancesFromCenter().values().toArray(new Double[] {}));
    double max = MathUtils.max(distances);
    double total = MathUtils.sum(distances);

    info.setMaxPointDistanceFromCenter(max);
    info.setTotalPointDistanceFromCenter(total);
    info.setAveragePointDistanceFromCenter(total / pointCount);
    info.setPointDistanceFromCenterVariance(MathUtils.variance(distances));
}

From source file:org.deeplearning4j.examples.recurrent.encdec.CorpusIterator.java

@Override
public MultiDataSet next(int num) {
    int i = currentBatch * batchSize;
    int currentBatchSize = Math.min(batchSize, corpus.size() - i - 1);
    INDArray input = Nd4j.zeros(currentBatchSize, 1, rowSize);
    INDArray prediction = Nd4j.zeros(currentBatchSize, dictSize, rowSize);
    INDArray decode = Nd4j.zeros(currentBatchSize, dictSize, rowSize);
    INDArray inputMask = Nd4j.zeros(currentBatchSize, rowSize);
    // this mask is also used for the decoder input, the length is the same
    INDArray predictionMask = Nd4j.zeros(currentBatchSize, rowSize);
    for (int j = 0; j < currentBatchSize; j++) {
        List<Double> rowIn = new ArrayList<>(corpus.get(i));
        Collections.reverse(rowIn);
        List<Double> rowPred = new ArrayList<>(corpus.get(i + 1));
        rowPred.add(1.0); // add <eos> token
        // replace the entire row in "input" using NDArrayIndex, it's faster than putScalar(); input is NOT made of one-hot vectors
        // because of the embedding layer that accepts token indexes directly
        input.put(//from  w  w  w.  ja v a  2s.c om
                new INDArrayIndex[] { NDArrayIndex.point(j), NDArrayIndex.point(0),
                        NDArrayIndex.interval(0, rowIn.size()) },
                Nd4j.create(ArrayUtils.toPrimitive(rowIn.toArray(new Double[0]))));
        inputMask.put(new INDArrayIndex[] { NDArrayIndex.point(j), NDArrayIndex.interval(0, rowIn.size()) },
                Nd4j.ones(rowIn.size()));
        predictionMask.put(
                new INDArrayIndex[] { NDArrayIndex.point(j), NDArrayIndex.interval(0, rowPred.size()) },
                Nd4j.ones(rowPred.size()));
        // prediction (output) and decode ARE one-hots though, I couldn't add an embedding layer on top of the decoder and I'm not sure
        // it's a good idea either
        double predOneHot[][] = new double[dictSize][rowPred.size()];
        double decodeOneHot[][] = new double[dictSize][rowPred.size()];
        decodeOneHot[2][0] = 1; // <go> token
        int predIdx = 0;
        for (Double pred : rowPred) {
            predOneHot[pred.intValue()][predIdx] = 1;
            if (predIdx < rowPred.size() - 1) { // put the same vals to decode with +1 offset except the last token that is <eos>
                decodeOneHot[pred.intValue()][predIdx + 1] = 1;
            }
            ++predIdx;
        }
        prediction.put(new INDArrayIndex[] { NDArrayIndex.point(j), NDArrayIndex.interval(0, dictSize),
                NDArrayIndex.interval(0, rowPred.size()) }, Nd4j.create(predOneHot));
        decode.put(new INDArrayIndex[] { NDArrayIndex.point(j), NDArrayIndex.interval(0, dictSize),
                NDArrayIndex.interval(0, rowPred.size()) }, Nd4j.create(decodeOneHot));
        ++i;
    }
    ++currentBatch;
    return new org.nd4j.linalg.dataset.MultiDataSet(new INDArray[] { input, decode },
            new INDArray[] { prediction }, new INDArray[] { inputMask, predictionMask },
            new INDArray[] { predictionMask });
}

From source file:org.deeplearning4j.examples.recurrent.encdec.EncoderDecoderLSTM.java

private void output(List<Double> rowIn, boolean printUnknowns) {
    net.rnnClearPreviousState();/*from   ww w.  j  av  a  2 s .  co m*/
    Collections.reverse(rowIn);
    INDArray in = Nd4j.create(ArrayUtils.toPrimitive(rowIn.toArray(new Double[0])),
            new int[] { 1, 1, rowIn.size() });
    double[] decodeArr = new double[dict.size()];
    decodeArr[2] = 1;
    INDArray decode = Nd4j.create(decodeArr, new int[] { 1, dict.size(), 1 });
    net.feedForward(new INDArray[] { in, decode }, false, false);
    org.deeplearning4j.nn.layers.recurrent.LSTM decoder = (org.deeplearning4j.nn.layers.recurrent.LSTM) net
            .getLayer("decoder");
    Layer output = net.getLayer("output");
    GraphVertex mergeVertex = net.getVertex("merge");
    INDArray thoughtVector = mergeVertex.getInputs()[1];
    LayerWorkspaceMgr mgr = LayerWorkspaceMgr.noWorkspaces();
    for (int row = 0; row < ROW_SIZE; ++row) {
        mergeVertex.setInputs(decode, thoughtVector);
        INDArray merged = mergeVertex.doForward(false, mgr);
        INDArray activateDec = decoder.rnnTimeStep(merged, mgr);
        INDArray out = output.activate(activateDec, false, mgr);
        double d = rnd.nextDouble();
        double sum = 0.0;
        int idx = -1;
        for (int s = 0; s < out.size(1); s++) {
            sum += out.getDouble(0, s, 0);
            if (d <= sum) {
                idx = s;
                if (printUnknowns || s != 0) {
                    System.out.print(revDict.get((double) s) + " ");
                }
                break;
            }
        }
        if (idx == 1) {
            break;
        }
        double[] newDecodeArr = new double[dict.size()];
        newDecodeArr[idx] = 1;
        decode = Nd4j.create(newDecodeArr, new int[] { 1, dict.size(), 1 });
    }
    System.out.println();
}

From source file:org.dmlc.xgboost4j.demo.util.DataLoader.java

public static DenseData loadCSVFile(String filePath)
        throws FileNotFoundException, UnsupportedEncodingException, IOException {
    DenseData denseData = new DenseData();

    File f = new File(filePath);
    FileInputStream in = new FileInputStream(f);
    BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));

    denseData.nrow = 0;//from w  ww  .j  av  a  2 s .c  o  m
    denseData.ncol = -1;
    String line;
    List<Float> tlabels = new ArrayList<>();
    List<Float> tdata = new ArrayList<>();

    while ((line = reader.readLine()) != null) {
        String[] items = line.trim().split(",");
        if (items.length == 0) {
            continue;
        }
        denseData.nrow++;
        if (denseData.ncol == -1) {
            denseData.ncol = items.length - 1;
        }

        tlabels.add(Float.valueOf(items[items.length - 1]));
        for (int i = 0; i < items.length - 1; i++) {
            tdata.add(Float.valueOf(items[i]));
        }
    }

    reader.close();
    in.close();

    denseData.labels = ArrayUtils.toPrimitive(tlabels.toArray(new Float[tlabels.size()]));
    denseData.data = ArrayUtils.toPrimitive(tdata.toArray(new Float[tdata.size()]));

    return denseData;
}

From source file:org.dmlc.xgboost4j.demo.util.DataLoader.java

public static CSRSparseData loadSVMFile(String filePath)
        throws FileNotFoundException, UnsupportedEncodingException, IOException {
    CSRSparseData spData = new CSRSparseData();

    List<Float> tlabels = new ArrayList<>();
    List<Float> tdata = new ArrayList<>();
    List<Long> theaders = new ArrayList<>();
    List<Integer> tindex = new ArrayList<>();

    File f = new File(filePath);
    FileInputStream in = new FileInputStream(f);
    BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));

    String line;/* ww w  . j a v  a  2  s  .  c  o  m*/
    long rowheader = 0;
    theaders.add(rowheader);
    while ((line = reader.readLine()) != null) {
        String[] items = line.trim().split(" ");
        if (items.length == 0) {
            continue;
        }

        rowheader += items.length - 1;
        theaders.add(rowheader);
        tlabels.add(Float.valueOf(items[0]));

        for (int i = 1; i < items.length; i++) {
            String[] tup = items[i].split(":");
            assert tup.length == 2;

            tdata.add(Float.valueOf(tup[1]));
            tindex.add(Integer.valueOf(tup[0]));
        }
    }

    spData.labels = ArrayUtils.toPrimitive(tlabels.toArray(new Float[tlabels.size()]));
    spData.data = ArrayUtils.toPrimitive(tdata.toArray(new Float[tdata.size()]));
    spData.colIndex = ArrayUtils.toPrimitive(tindex.toArray(new Integer[tindex.size()]));
    spData.rowHeaders = ArrayUtils.toPrimitive(theaders.toArray(new Long[theaders.size()]));

    return spData;
}