Example usage for org.apache.commons.lang ArrayUtils toPrimitive

List of usage examples for org.apache.commons.lang ArrayUtils toPrimitive

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils toPrimitive.

Prototype

public static boolean[] toPrimitive(Boolean[] array) 

Source Link

Document

Converts an array of object Booleans to primitives.

Usage

From source file:org.apache.hadoop.mapred.NetCDFInputFormatPrunerByFileIndex.java

private NetCDFInfo getNetCDFInfo(Path file, FileSystem fs, JobConf job) {
    //traverse header and return chunk start and size arrays
    NetCDFInfo result = new NetCDFInfo();//library call

    NetcdfFile ncFile;//from w  ww. j  a  v  a2  s.c om
    Variable v;
    Variable time;
    Variable lat;
    Variable lon;
    ncFile = null;
    try {
        //if( file == null ){
        //System.out.println( "[SAMAN] NetCDFInputFormat.getNetCDFInfo  file is null" );
        //LOG.info( "[SAMAN] NetCDFInputFormat.getNetCDFInfo  file is null" );
        //}else{
        //System.out.println( "[SAMAN] NetCDFInputFormat.getNetCDFInfo file is " + file.toString() );
        //LOG.info( "[SAMAN] NetCDFInputFormat.getNetCDFInfo  file is null" );
        //}
        ncFile = NetcdfDataset.openFile(file.toString(), null);
        List<Variable> vs = ncFile.getVariables();
        for (int i = 0; i < vs.size(); i++) {
            System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getNetCDFInfo] variable is: "
                    + vs.get(i).getName());
        }

        v = ncFile.findVariable("rsut");
        time = ncFile.findVariable("time");
        lat = ncFile.findVariable("lat");
        lon = ncFile.findVariable("lon");

        //List<Variable> vs = ncFile.getVariables();
        //v = vs.get(vs.size()-1);

        //LOG.info("Variable is "+ v.getFullName());
        result.fileSize = ncFile.vfileSize;
        result.recStart = ncFile.vrecStart;
        Long[] metaArray = v.reallyReadMeta().toArray(new Long[(int) (ncFile.vnumRecs)]);
        result.chunkStarts = ArrayUtils.toPrimitive(metaArray);
        //result.chunkSizes = nc.chunkSizes;
        result.numRecs = ncFile.vnumRecs;
        result.recSize = ncFile.vrecSize;
        result.smallRecSize = ncFile.vsmallRecSize;
        result.timeLength = (int) (time.getSize());
        result.latLength = (int) (lat.getSize());
        result.lonLength = (int) (lon.getSize());
        //result.shape = v.shape;

    } catch (Exception e)

    {
        LOG.info("Bad... " + e);
        e.printStackTrace();
        e.printStackTrace(System.out);
        System.out.println("Bad... \n");
    }
    try {
        if (ncFile != null)
            ncFile.close();
    } catch (Exception e) {
        LOG.info("Bad2... " + e);
        System.out.println("Bad2... " + e);
    }

    return result;
}

From source file:org.apache.hadoop.mapred.NetCDFInputFormatPrunerByFileIndexMultiFileTwoDimensions.java

private NetCDFInfo getNetCDFInfo(Path file, FileSystem fs, JobConf job) {
    //traverse header and return chunk start and size arrays
    result = new NetCDFInfo();//library call

    NetcdfFile ncFile;/*from  w ww .  j  a  va 2 s  .co  m*/
    Variable v;
    Variable time;
    Variable lat;
    Variable lon;
    ncFile = null;
    try {
        //if( file == null ){
        //System.out.println( "[SAMAN] NetCDFInputFormat.getNetCDFInfo  file is null" );
        //LOG.info( "[SAMAN] NetCDFInputFormat.getNetCDFInfo  file is null" );
        //}else{
        //System.out.println( "[SAMAN] NetCDFInputFormat.getNetCDFInfo file is " + file.toString() );
        //LOG.info( "[SAMAN] NetCDFInputFormat.getNetCDFInfo  file is null" );
        //}
        ncFile = NetcdfDataset.openFile(file.toString(), null);

        v = ncFile.findVariable("rsut");
        time = ncFile.findVariable("time");
        lat = ncFile.findVariable("lat");
        lon = ncFile.findVariable("lon");

        //List<Variable> vs = ncFile.getVariables();
        //v = vs.get(vs.size()-1);

        //LOG.info("Variable is "+ v.getFullName());
        result.fileSize = ncFile.vfileSize;
        result.recStart = ncFile.vrecStart;
        Long[] metaArray = v.reallyReadMeta().toArray(new Long[(int) (ncFile.vnumRecs)]);
        result.chunkStarts = ArrayUtils.toPrimitive(metaArray);
        //result.chunkSizes = nc.chunkSizes;
        result.numRecs = ncFile.vnumRecs;
        result.recSize = ncFile.vrecSize;
        result.smallRecSize = ncFile.vsmallRecSize;
        result.timeLength = (int) (time.getSize());
        result.latLength = (int) (lat.getSize());
        result.lonLength = (int) (lon.getSize());
        //result.shape = v.shape;

    } catch (Exception e)

    {
        LOG.info("Bad... " + e);
        System.out.println("Bad... " + e);
    }
    try {
        if (ncFile != null)
            ncFile.close();
    } catch (Exception e) {
        LOG.info("Bad2... " + e);
        System.out.println("Bad2... " + e);
    }

    return result;
}

From source file:org.apache.solr.handler.AnalysisRequestHandlerTestBase.java

protected void assertToken(NamedList token, TokenInfo info) {
    assertEquals(info.getText(), token.get("text"));
    if (info.getRawText() != null) {
        assertEquals(info.getRawText(), token.get("raw_text"));
    }//from   w w w  .j a v  a 2 s  .  c o m
    assertEquals(info.getType(), token.get("type"));
    assertEquals(new Integer(info.getStart()), token.get("start"));
    assertEquals(new Integer(info.getEnd()), token.get("end"));
    assertEquals(new Integer(info.getPosition()), token.get("position"));
    assertArrayEquals(info.getPositionHistory(),
            ArrayUtils.toPrimitive((Integer[]) token.get("positionHistory")));
    if (info.isMatch()) {
        assertEquals(Boolean.TRUE, token.get("match"));
    }
    if (info.getPayload() != null) {
        assertEquals(info.getPayload(), token.get("payload"));
    }
}

From source file:org.apache.sysml.runtime.transform.BinAgent.java

public BinAgent(JSONObject parsedSpec, String[] colnames, int clen, boolean colsOnly)
        throws JSONException, IOException {
    super(null, clen);
    if (!parsedSpec.containsKey(TfUtils.TXMETHOD_BIN))
        return;/* w  w  w .ja va  2 s.  co m*/

    if (colsOnly) {
        List<Integer> collist = TfMetaUtils.parseBinningColIDs(parsedSpec, colnames);
        initColList(ArrayUtils.toPrimitive(collist.toArray(new Integer[0])));
    } else {
        JSONObject obj = (JSONObject) parsedSpec.get(TfUtils.TXMETHOD_BIN);
        JSONArray attrs = (JSONArray) obj.get(TfUtils.JSON_ATTRS);
        JSONArray nbins = (JSONArray) obj.get(TfUtils.JSON_NBINS);
        initColList(attrs);

        _numBins = new int[attrs.size()];
        for (int i = 0; i < _numBins.length; i++)
            _numBins[i] = UtilFunctions.toInt(nbins.get(i));

        // initialize internal transformation metadata
        _min = new double[_colList.length];
        Arrays.fill(_min, Double.MAX_VALUE);
        _max = new double[_colList.length];
        Arrays.fill(_max, -Double.MAX_VALUE);

        _binWidths = new double[_colList.length];
    }
}

From source file:org.apache.sysml.runtime.transform.decode.DecoderFactory.java

@SuppressWarnings("unchecked")
public static Decoder createDecoder(String spec, String[] colnames, ValueType[] schema, FrameBlock meta)
        throws DMLRuntimeException {
    Decoder decoder = null;/* w  w w  .j  ava  2 s .c  o  m*/

    try {
        //parse transform specification
        JSONObject jSpec = new JSONObject(spec);
        List<Decoder> ldecoders = new ArrayList<Decoder>();

        //create decoders 'recode', 'dummy' and 'pass-through'
        List<Integer> rcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_RECODE)));
        List<Integer> dcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_DUMMYCODE)));
        rcIDs = new ArrayList<Integer>(CollectionUtils.union(rcIDs, dcIDs));
        List<Integer> ptIDs = new ArrayList<Integer>(
                CollectionUtils.subtract(UtilFunctions.getSequenceList(1, meta.getNumColumns(), 1), rcIDs));

        //create default schema if unspecified (with double columns for pass-through)
        if (schema == null) {
            schema = UtilFunctions.nCopies(meta.getNumColumns(), ValueType.STRING);
            for (Integer col : ptIDs)
                schema[col - 1] = ValueType.DOUBLE;
        }

        if (!dcIDs.isEmpty()) {
            ldecoders.add(new DecoderDummycode(schema, ArrayUtils.toPrimitive(dcIDs.toArray(new Integer[0]))));
        }
        if (!rcIDs.isEmpty()) {
            ldecoders.add(new DecoderRecode(schema, !dcIDs.isEmpty(),
                    ArrayUtils.toPrimitive(rcIDs.toArray(new Integer[0]))));
        }
        if (!ptIDs.isEmpty()) {
            ldecoders.add(new DecoderPassThrough(schema, ArrayUtils.toPrimitive(ptIDs.toArray(new Integer[0])),
                    ArrayUtils.toPrimitive(dcIDs.toArray(new Integer[0]))));
        }

        //create composite decoder of all created decoders
        //and initialize with given meta data (recode, dummy, bin)
        decoder = new DecoderComposite(schema, ldecoders);
        if (meta != null)
            decoder.initMetaData(meta);
    } catch (Exception ex) {
        throw new DMLRuntimeException(ex);
    }

    return decoder;
}

From source file:org.apache.sysml.runtime.transform.encode.EncoderBin.java

public EncoderBin(JSONObject parsedSpec, String[] colnames, int clen, boolean colsOnly)
        throws JSONException, IOException {
    super(null, clen);
    if (!parsedSpec.containsKey(TfUtils.TXMETHOD_BIN))
        return;/*from   ww w.java2s .c  o m*/

    if (colsOnly) {
        List<Integer> collist = TfMetaUtils.parseBinningColIDs(parsedSpec, colnames);
        initColList(ArrayUtils.toPrimitive(collist.toArray(new Integer[0])));
    } else {
        JSONObject obj = (JSONObject) parsedSpec.get(TfUtils.TXMETHOD_BIN);
        JSONArray attrs = (JSONArray) obj.get(TfUtils.JSON_ATTRS);
        JSONArray nbins = (JSONArray) obj.get(TfUtils.JSON_NBINS);
        initColList(attrs);

        _numBins = new int[attrs.size()];
        for (int i = 0; i < _numBins.length; i++)
            _numBins[i] = UtilFunctions.toInt(nbins.get(i));

        // initialize internal transformation metadata
        _min = new double[_colList.length];
        Arrays.fill(_min, Double.POSITIVE_INFINITY);
        _max = new double[_colList.length];
        Arrays.fill(_max, Double.NEGATIVE_INFINITY);
    }
}

From source file:org.apache.sysml.runtime.transform.encode.EncoderFactory.java

@SuppressWarnings("unchecked")
public static Encoder createEncoder(String spec, String[] colnames, ValueType[] schema, FrameBlock meta)
        throws DMLRuntimeException {
    Encoder encoder = null;//from   ww w  .  j  av  a 2 s  .  c  o  m
    int clen = schema.length;

    try {
        //parse transform specification
        JSONObject jSpec = new JSONObject(spec);
        List<Encoder> lencoders = new ArrayList<Encoder>();

        //prepare basic id lists (recode, dummycode, pass-through)
        //note: any dummycode column requires recode as preparation
        List<Integer> rcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_RECODE)));
        List<Integer> dcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_DUMMYCODE)));
        rcIDs = new ArrayList<Integer>(CollectionUtils.union(rcIDs, dcIDs));
        List<Integer> binIDs = TfMetaUtils.parseBinningColIDs(jSpec, colnames);
        List<Integer> ptIDs = new ArrayList<Integer>(CollectionUtils
                .subtract(CollectionUtils.subtract(UtilFunctions.getSequenceList(1, clen, 1), rcIDs), binIDs));
        List<Integer> oIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_OMIT)));
        List<Integer> mvIDs = Arrays.asList(ArrayUtils
                .toObject(TfMetaUtils.parseJsonObjectIDList(jSpec, colnames, TfUtils.TXMETHOD_IMPUTE)));

        //create individual encoders
        if (!rcIDs.isEmpty()) {
            RecodeAgent ra = new RecodeAgent(jSpec, colnames, clen);
            ra.setColList(ArrayUtils.toPrimitive(rcIDs.toArray(new Integer[0])));
            lencoders.add(ra);
        }
        if (!ptIDs.isEmpty())
            lencoders.add(new EncoderPassThrough(ArrayUtils.toPrimitive(ptIDs.toArray(new Integer[0])), clen));
        if (!dcIDs.isEmpty())
            lencoders.add(new DummycodeAgent(jSpec, colnames, schema.length));
        if (!binIDs.isEmpty())
            lencoders.add(new BinAgent(jSpec, colnames, schema.length, true));
        if (!oIDs.isEmpty())
            lencoders.add(new OmitAgent(jSpec, colnames, schema.length));
        if (!mvIDs.isEmpty()) {
            MVImputeAgent ma = new MVImputeAgent(jSpec, colnames, schema.length);
            ma.initRecodeIDList(rcIDs);
            lencoders.add(ma);
        }

        //create composite decoder of all created encoders
        //and initialize meta data (recode, dummy, bin, mv)
        encoder = new EncoderComposite(lencoders);
        if (meta != null)
            encoder.initMetaData(meta);
    } catch (Exception ex) {
        throw new DMLRuntimeException(ex);
    }

    return encoder;
}

From source file:org.broad.igv.data.GenomeSummaryDataTest.java

/**
 * Build GenomeSummaryData from sorted (by chromosome and start position) set of features
 * @param genome/*from w ww .ja  v a  2s . c  o  m*/
 * @param trackId
 * @param features
 * @return
 */
private GenomeSummaryData buildGenomeSummaryData(Genome genome, String trackId, Iterable<BasicFeature> features,
        double scale) {
    GenomeSummaryData genomeSummaryData = new GenomeSummaryData(genome, new String[] { trackId });
    if (scale > 0) {
        genomeSummaryData.setScale(scale);
    }

    List<Integer> startLocations = new ArrayList<Integer>();
    List<Float> data = new ArrayList<Float>();
    String lastChr = null;

    for (BasicFeature feature : features) {
        String chr = feature.getChr();
        //Finish off last chromosome
        if (lastChr != null && !chr.equals(lastChr)) {
            Map<String, float[]> dMap = new HashMap<String, float[]>();
            dMap.put(trackId, ArrayUtils.toPrimitive(data.toArray(new Float[data.size()])));
            genomeSummaryData.addData(lastChr,
                    ArrayUtils.toPrimitive(startLocations.toArray(new Integer[startLocations.size()])), dMap);
            startLocations.clear();
            data.clear();
        }

        startLocations.add(feature.getStart());
        data.add(feature.getScore());

        lastChr = chr;
    }

    Map<String, float[]> dMap = new HashMap<String, float[]>();
    dMap.put(trackId, ArrayUtils.toPrimitive(data.toArray(new Float[data.size()])));
    genomeSummaryData.addData(lastChr,
            ArrayUtils.toPrimitive(startLocations.toArray(new Integer[startLocations.size()])), dMap);

    return genomeSummaryData;
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AFCalcTestBuilder.java

public VariantContext makeACTest(final List<Integer> ACs, final int nNonInformative, final int nonTypePL) {
    return makeACTest(ArrayUtils.toPrimitive(ACs.toArray(new Integer[] {})), nNonInformative, nonTypePL);
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AFCalculationUnitTest.java

private List<Genotype> toGenotypes(final List<List<Integer>> PLsPerSample) {
    final List<Allele> nocall = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
    final List<Genotype> genotypes = new ArrayList<Genotype>(PLsPerSample.size());

    for (final List<Integer> PLs : PLsPerSample) {
        final int[] pls = ArrayUtils.toPrimitive(PLs.toArray(new Integer[3]));
        final int min = MathUtils.arrayMin(pls);
        for (int i = 0; i < pls.length; i++)
            pls[i] -= min;// ww w . j  a  v  a2  s.co  m
        genotypes.add(makePL(nocall, pls));
    }

    return genotypes;
}