Example usage for org.apache.commons.lang3 ArrayUtils toPrimitive

List of usage examples for org.apache.commons.lang3 ArrayUtils toPrimitive

Introduction

In this page you can find the example usage for org.apache.commons.lang3 ArrayUtils toPrimitive.

Prototype

public static boolean[] toPrimitive(final Boolean[] array) 

Source Link

Document

Converts an array of object Booleans to primitives.

This method returns null for a null input array.

Usage

From source file:org.apache.carbondata.scan.executor.util.QueryUtil.java

/**
 * Below method will used to get the method will be used to get the measure
 * block indexes to be read from the file
 *
 * @param queryMeasures              query measure
 * @param expressionMeasure          measure present in the expression
 * @param ordinalToBlockIndexMapping measure ordinal to block mapping
 * @return block indexes/*from ww w .j  a v a  2 s . c  o  m*/
 */
public static int[] getMeasureBlockIndexes(List<QueryMeasure> queryMeasures,
        List<CarbonMeasure> expressionMeasure, Map<Integer, Integer> ordinalToBlockIndexMapping,
        Set<CarbonMeasure> filterMeasures) {
    Set<Integer> measureBlockIndex = new HashSet<Integer>();
    Set<Integer> filterMeasureOrdinal = getFilterMeasureOrdinal(filterMeasures);
    for (int i = 0; i < queryMeasures.size(); i++) {
        if (!filterMeasureOrdinal.contains(queryMeasures.get(i).getMeasure().getOrdinal())) {
            measureBlockIndex
                    .add(ordinalToBlockIndexMapping.get(queryMeasures.get(i).getMeasure().getOrdinal()));
        }
    }
    for (int i = 0; i < expressionMeasure.size(); i++) {
        measureBlockIndex.add(ordinalToBlockIndexMapping.get(expressionMeasure.get(i).getOrdinal()));
    }
    int[] measureIndexes = ArrayUtils
            .toPrimitive(measureBlockIndex.toArray(new Integer[measureBlockIndex.size()]));
    Arrays.sort(measureIndexes);
    return measureIndexes;
}

From source file:org.apache.carbondata.scan.executor.util.QueryUtil.java

/**
 * Below method will be used to get the mapping of block index and its
 * restructuring info//from   w w  w  .ja va  2  s  .  c o m
 *
 * @param queryDimensions   query dimension from query model
 * @param segmentProperties segment properties
 * @return map of block index to its restructuring info
 * @throws KeyGenException if problem while key generation
 */
public static Map<Integer, KeyStructureInfo> getColumnGroupKeyStructureInfo(
        List<QueryDimension> queryDimensions, SegmentProperties segmentProperties) throws KeyGenException {
    Map<Integer, KeyStructureInfo> rowGroupToItsRSInfo = new HashMap<Integer, KeyStructureInfo>();
    // get column group id and its ordinal mapping of column group
    Map<Integer, List<Integer>> columnGroupAndItsOrdinalMappingForQuery = getColumnGroupAndItsOrdinalMapping(
            queryDimensions);
    Map<Integer, KeyGenerator> columnGroupAndItsKeygenartor = segmentProperties
            .getColumnGroupAndItsKeygenartor();

    Iterator<Entry<Integer, List<Integer>>> iterator = columnGroupAndItsOrdinalMappingForQuery.entrySet()
            .iterator();
    KeyStructureInfo restructureInfos = null;
    while (iterator.hasNext()) {
        Entry<Integer, List<Integer>> next = iterator.next();
        KeyGenerator keyGenerator = columnGroupAndItsKeygenartor.get(next.getKey());
        restructureInfos = new KeyStructureInfo();
        // sort the ordinal
        List<Integer> ordinal = next.getValue();
        List<Integer> mdKeyOrdinal = new ArrayList<Integer>();
        //Un sorted
        List<Integer> mdKeyOrdinalForQuery = new ArrayList<Integer>();
        for (Integer ord : ordinal) {
            mdKeyOrdinal.add(segmentProperties.getColumnGroupMdKeyOrdinal(next.getKey(), ord));
            mdKeyOrdinalForQuery.add(segmentProperties.getColumnGroupMdKeyOrdinal(next.getKey(), ord));
        }
        Collections.sort(mdKeyOrdinal);
        // get the masked byte range for column group
        int[] maskByteRanges = getMaskedByteRangeBasedOrdinal(mdKeyOrdinal, keyGenerator);
        // max key for column group
        byte[] maxKey = getMaxKeyBasedOnOrinal(mdKeyOrdinal, keyGenerator);
        // get masked key for column group
        int[] maksedByte = getMaskedByte(keyGenerator.getKeySizeInBytes(), maskByteRanges);
        restructureInfos.setKeyGenerator(keyGenerator);
        restructureInfos.setMaskByteRanges(maskByteRanges);
        restructureInfos.setMaxKey(maxKey);
        restructureInfos.setMaskedBytes(maksedByte);
        restructureInfos.setMdkeyQueryDimensionOrdinal(
                ArrayUtils.toPrimitive(mdKeyOrdinalForQuery.toArray(new Integer[mdKeyOrdinalForQuery.size()])));
        rowGroupToItsRSInfo.put(segmentProperties.getDimensionOrdinalToBlockMapping().get(ordinal.get(0)),
                restructureInfos);
    }
    return rowGroupToItsRSInfo;
}

From source file:org.apache.carbondata.scan.executor.util.QueryUtil.java

/**
 * Below method will be used to get the index of number type aggregator
 *
 * @param aggType/*from  w  w w.j  av  a  2  s  .  c o  m*/
 * @return index in aggregator
 */
public static int[] getNumberTypeIndex(List<String> aggType) {
    List<Integer> indexList = new ArrayList<Integer>();
    for (int i = 0; i < aggType.size(); i++) {
        if (CarbonCommonConstants.SUM.equals(aggType.get(i))
                || CarbonCommonConstants.AVERAGE.equals(aggType.get(i))) {
            indexList.add(i);
        }
    }
    return ArrayUtils.toPrimitive(indexList.toArray(new Integer[indexList.size()]));
}

From source file:org.apache.carbondata.spark.merger.CarbonCompactionUtil.java

/**
 * This method will return the updated cardinality according to the master schema
 *
 * @param columnCardinalityMap/*from   ww  w.j  av a2s. c o m*/
 * @param carbonTable
 * @param updatedColumnSchemaList
 * @return
 */
public static int[] updateColumnSchemaAndGetCardinality(Map<String, Integer> columnCardinalityMap,
        CarbonTable carbonTable, List<ColumnSchema> updatedColumnSchemaList) {
    List<CarbonDimension> masterDimensions = carbonTable
            .getDimensionByTableName(carbonTable.getFactTableName());
    List<Integer> updatedCardinalityList = new ArrayList<>(columnCardinalityMap.size());
    for (CarbonDimension dimension : masterDimensions) {
        Integer value = columnCardinalityMap.get(dimension.getColumnId());
        if (null == value) {
            updatedCardinalityList.add(getDimensionDefaultCardinality(dimension));
        } else {
            updatedCardinalityList.add(value);
        }
        updatedColumnSchemaList.add(dimension.getColumnSchema());
    }
    // add measures to the column schema list
    List<CarbonMeasure> masterSchemaMeasures = carbonTable
            .getMeasureByTableName(carbonTable.getFactTableName());
    for (CarbonMeasure measure : masterSchemaMeasures) {
        updatedColumnSchemaList.add(measure.getColumnSchema());
    }
    int[] updatedCardinality = ArrayUtils
            .toPrimitive(updatedCardinalityList.toArray(new Integer[updatedCardinalityList.size()]));
    return updatedCardinality;
}

From source file:org.apache.flink.table.runtime.functions.SqlLikeChainChecker.java

public SqlLikeChainChecker(String pattern) {
    final StringTokenizer tokens = new StringTokenizer(pattern, "%");
    final boolean leftAnchor = !pattern.startsWith("%");
    final boolean rightAnchor = !pattern.endsWith("%");
    int len = 0;//from   w  w w  .j  a v a2s.c o  m
    // at least 2 checkers always
    BinaryString leftPattern = null;
    BinaryString rightPattern = null;
    int leftLen = 0; // not -1
    int rightLen = 0; // not -1
    final List<BinaryString> middleCheckers = new ArrayList<>(2);
    final List<Integer> lengths = new ArrayList<>(2);

    for (int i = 0; tokens.hasMoreTokens(); i++) {
        String chunk = tokens.nextToken();
        if (chunk.length() == 0) {
            // %% is folded in the .*?.*? regex usually into .*?
            continue;
        }
        len += utf8Length(chunk);
        if (leftAnchor && i == 0) {
            // first item
            leftPattern = fromString(chunk);
            leftLen = utf8Length(chunk);
        } else if (rightAnchor && !tokens.hasMoreTokens()) {
            // last item
            rightPattern = fromString(chunk);
            rightLen = utf8Length(chunk);
        } else {
            // middle items in order
            middleCheckers.add(fromString(chunk));
            lengths.add(utf8Length(chunk));
        }
    }
    midLens = ArrayUtils.toPrimitive(lengths.toArray(ArrayUtils.EMPTY_INTEGER_OBJECT_ARRAY));
    middlePatterns = middleCheckers.toArray(new BinaryString[0]);
    minLen = len;
    beginPattern = leftPattern;
    endPattern = rightPattern;
    beginLen = leftLen;
    endLen = rightLen;
}

From source file:org.apache.flink.table.runtime.join.NullAwareJoinHelper.java

public static int[] getNullFilterKeys(boolean[] filterNulls) {
    checkArgument(filterNulls.length > 0);
    List<Integer> nullFilterKeyList = new ArrayList<>();
    for (int i = 0; i < filterNulls.length; i++) {
        if (filterNulls[i]) {
            nullFilterKeyList.add(i);//  w w w .ja  v  a  2  s  .  c om
        }
    }
    return ArrayUtils.toPrimitive(nullFilterKeyList.toArray(new Integer[0]));
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

/**
 * Write compressed data to the output ByteBuffer and update the position of the buffer.
 * @param boxedVals A list of boxed Java primitives.
 * @param output//www.  j  av a 2  s  . c om
 * @return The number of bytes written.
 * @throws IOException
 */
private int writeBoxedBytes(List<Byte> boxedVals, ByteBuffer output) throws IOException {
    return writePrimitives(ArrayUtils.toPrimitive(boxedVals.toArray(new Byte[0])), output);
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private int writeBoxedShorts(List<Short> boxedVals, ByteBuffer output) throws IOException {
    return writePrimitives(ArrayUtils.toPrimitive(boxedVals.toArray(new Short[0])), output);
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private int writeBoxedIntegers(List<Integer> boxedVals, ByteBuffer output) throws IOException {
    return writePrimitives(ArrayUtils.toPrimitive(boxedVals.toArray(new Integer[0])), output);
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private int writeBoxedLongs(List<Long> boxedVals, ByteBuffer output) throws IOException {
    return writePrimitives(ArrayUtils.toPrimitive(boxedVals.toArray(new Long[0])), output);
}