Example usage for org.apache.commons.lang ArrayUtils toPrimitive

List of usage examples for org.apache.commons.lang ArrayUtils toPrimitive

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils toPrimitive.

Prototype

public static boolean[] toPrimitive(Boolean[] array, boolean valueForNull) 

Source Link

Document

Converts an array of object Booleans to primitives handling null.

Usage

From source file:com.ikanow.aleph2.analytics.hadoop.assets.BeFileInputFormat.java

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException {
    logger.debug("BeFileInputFormat.getSplits");

    super.setMaxSplitSize(MAX_SPLIT_SIZE);

    try {//from www  .  j  av a2s .  c o  m
        final List<InputSplit> splits = Lambdas.get(Lambdas.wrap_u(() -> {
            final List<InputSplit> tmp = super.getSplits(context);

            String debug_max_str = context.getConfiguration().get(BatchEnrichmentJob.BE_DEBUG_MAX_SIZE);
            if (null != debug_max_str) {
                final int requested_records = Integer.parseInt(debug_max_str);

                // dump 5* the request number of splits into one mega split
                // to strike a balance between limiting the data and making sure for 
                // tests that enough records are generated

                final CombineFileSplit combined = new CombineFileSplit(
                        tmp.stream().map(split -> (CombineFileSplit) split)
                                .flatMap(split -> Arrays.stream(split.getPaths())).limit(5L * requested_records)
                                .<Path>toArray(size -> new Path[size]),
                        ArrayUtils.toPrimitive(
                                tmp.stream().map(split -> (CombineFileSplit) split)
                                        .flatMap(split -> Arrays.stream(split.getStartOffsets()).boxed())
                                        .limit(5L * requested_records).<Long>toArray(size -> new Long[size]),
                                0L),
                        ArrayUtils.toPrimitive(
                                tmp.stream().map(split -> (CombineFileSplit) split)
                                        .flatMap(split -> Arrays.stream(split.getLengths()).boxed())
                                        .limit(5L * requested_records).<Long>toArray(size -> new Long[size]),
                                0L),
                        tmp.stream().map(split -> (CombineFileSplit) split)
                                .flatMap(Lambdas.wrap_u(split -> Arrays.stream(split.getLocations())))
                                .limit(5L * requested_records).<String>toArray(size -> new String[size]));
                return Arrays.<InputSplit>asList(combined);
            } else
                return tmp;
        }));

        logger.debug("BeFileInputFormat.getSplits: " + ((splits != null) ? splits.size() : "null"));
        return splits;

    } catch (Throwable t) {
        logger.error(t);
        throw new IOException(t);
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.BeFileInputFormat_Pure.java

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException {
    logger.debug("BeFileInputFormat.getSplits");

    super.setMaxSplitSize(MAX_SPLIT_SIZE);

    try {/*from  w ww . j a  v  a 2  s.  c  om*/
        final List<InputSplit> splits = Lambdas.get(Lambdas.wrap_u(() -> {
            final List<InputSplit> tmp = super.getSplits(context);

            String debug_max_str = context.getConfiguration().get(HadoopBatchEnrichmentUtils.BE_DEBUG_MAX_SIZE);
            if (null != debug_max_str) {
                final int requested_records = Integer.parseInt(debug_max_str);

                // dump 5* the request number of splits into one mega split
                // to strike a balance between limiting the data and making sure for 
                // tests that enough records are generated

                final CombineFileSplit combined = new CombineFileSplit(
                        tmp.stream().map(split -> (CombineFileSplit) split)
                                .flatMap(split -> Arrays.stream(split.getPaths())).limit(5L * requested_records)
                                .<Path>toArray(size -> new Path[size]),
                        ArrayUtils.toPrimitive(
                                tmp.stream().map(split -> (CombineFileSplit) split)
                                        .flatMap(split -> Arrays.stream(split.getStartOffsets()).boxed())
                                        .limit(5L * requested_records).<Long>toArray(size -> new Long[size]),
                                0L),
                        ArrayUtils.toPrimitive(
                                tmp.stream().map(split -> (CombineFileSplit) split)
                                        .flatMap(split -> Arrays.stream(split.getLengths()).boxed())
                                        .limit(5L * requested_records).<Long>toArray(size -> new Long[size]),
                                0L),
                        tmp.stream().map(split -> (CombineFileSplit) split)
                                .flatMap(Lambdas.wrap_u(split -> Arrays.stream(split.getLocations())))
                                .limit(5L * requested_records).<String>toArray(size -> new String[size]));
                return Arrays.<InputSplit>asList(combined);
            } else
                return tmp;
        }));

        logger.debug("BeFileInputFormat.getSplits: " + ((splits != null) ? splits.size() : "null"));
        return splits;

    } catch (Throwable t) {
        logger.error(ErrorUtils.getLongForm("Error getting splits, error = {0}", t));

        return Collections.emptyList();
    }
}

From source file:gda.rcp.views.scan.AbstractCachedScanPlotView.java

@Override
protected IPlotData getX(IScanDataPoint... points) {

    if (cachedX == null)
        cachedX = new ArrayList<Double>(89);

    Double[] values = cachedX.toArray(new Double[] {});
    double[] primitiveValues = ArrayUtils.toPrimitive(values, values.length);
    Dataset xValues = new DoubleDataset(primitiveValues, primitiveValues.length);
    xValues.setName(getXAxisName());// w w w . j a v a2  s. c o  m
    return new DataSetPlotData(getXAxisName(), xValues);
}

From source file:ru.catssoftware.gameserver.skills.conditions.ConditionPlayerHasClanHall.java

public ConditionPlayerHasClanHall(List<Integer> clanHall) {
    _clanHall = ArrayUtils.toPrimitive(clanHall.toArray(new Integer[clanHall.size()]), 0);

    Arrays.sort(_clanHall);// ww  w  .  j av a  2 s.  co m
}

From source file:ru.catssoftware.gameserver.skills.conditions.ConditionTargetClassIdRestriction.java

public ConditionTargetClassIdRestriction(List<Integer> classId) {
    _classIds = ArrayUtils.toPrimitive(classId.toArray(new Integer[classId.size()]), 0);

    Arrays.sort(_classIds);/* ww w .j  a  v  a2s.  c o m*/
}

From source file:ru.catssoftware.gameserver.skills.conditions.ConditionTargetRaceId.java

public ConditionTargetRaceId(List<Integer> raceId) {
    _raceIds = ArrayUtils.toPrimitive(raceId.toArray(new Integer[raceId.size()]), 0);

    Arrays.sort(_raceIds);
}