Example usage for org.apache.commons.lang ArrayUtils toPrimitive

List of usage examples for org.apache.commons.lang ArrayUtils toPrimitive

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils toPrimitive.

Prototype

public static boolean[] toPrimitive(Boolean[] array) 

Source Link

Document

Converts an array of object Booleans to primitives.

Usage

From source file:org.apache.carbondata.core.util.CarbonUtil.java

/**
 * Below method will be used to get the dimension
 *
 * @param tableDimensionList table dimension list
 * @return boolean array specifying true if dimension is dictionary
 * and false if dimension is not a dictionary column
 *///from  w  ww  .  j av  a 2  s  .  co m
public static boolean[] identifyDimensionType(List<CarbonDimension> tableDimensionList) {
    List<Boolean> isDictionaryDimensions = new ArrayList<Boolean>();
    Set<Integer> processedColumnGroup = new HashSet<Integer>();
    for (CarbonDimension carbonDimension : tableDimensionList) {
        List<CarbonDimension> childs = carbonDimension.getListOfChildDimensions();
        //assuming complex dimensions will always be atlast
        if (null != childs && childs.size() > 0) {
            break;
        }
        if (carbonDimension.isColumnar() && hasEncoding(carbonDimension.getEncoder(), Encoding.DICTIONARY)) {
            isDictionaryDimensions.add(true);
        } else if (!carbonDimension.isColumnar()) {
            if (processedColumnGroup.add(carbonDimension.columnGroupId())) {
                isDictionaryDimensions.add(true);
            }
        } else {
            isDictionaryDimensions.add(false);
        }
    }
    return ArrayUtils.toPrimitive(isDictionaryDimensions.toArray(new Boolean[isDictionaryDimensions.size()]));
}

From source file:org.apache.carbondata.core.util.CarbonUtil.java

/**
 * @param dictionaryColumnCardinality// ww w .ja  v  a  2  s  .c  om
 * @param wrapperColumnSchemaList
 * @return It returns formatted cardinality by adding -1 value for NoDictionary columns
 */
public static int[] getFormattedCardinality(int[] dictionaryColumnCardinality,
        List<ColumnSchema> wrapperColumnSchemaList) {
    List<Integer> cardinality = new ArrayList<>();
    int counter = 0;
    for (int i = 0; i < wrapperColumnSchemaList.size(); i++) {
        if (CarbonUtil.hasEncoding(wrapperColumnSchemaList.get(i).getEncodingList(),
                org.apache.carbondata.core.metadata.encoder.Encoding.DICTIONARY)) {
            cardinality.add(dictionaryColumnCardinality[counter]);
            counter++;
        } else if (!wrapperColumnSchemaList.get(i).isDimensionColumn()) {
            continue;
        } else {
            cardinality.add(-1);
        }
    }
    return ArrayUtils.toPrimitive(cardinality.toArray(new Integer[cardinality.size()]));
}

From source file:org.apache.cloudstack.kvm.ha.KVMHostActivityChecker.java

public long[] getNeighbors(Host agent) {
    List<Long> neighbors = new ArrayList<Long>();
    List<HostVO> cluster_hosts = resourceManager.listHostsInClusterByStatus(agent.getClusterId(), Status.Up);
    for (HostVO host : cluster_hosts) {
        if (host.getId() == agent.getId() || (host.getHypervisorType() != Hypervisor.HypervisorType.KVM
                && host.getHypervisorType() != Hypervisor.HypervisorType.LXC)) {
            continue;
        }/*from w  ww. ja va 2s  . co m*/
        neighbors.add(host.getId());
    }
    return ArrayUtils.toPrimitive(neighbors.toArray(new Long[neighbors.size()]));
}

From source file:org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorDateExpressions.java

private int[] getAllBoundaries() {
    List<Integer> boundaries = new ArrayList<Integer>(1);
    Calendar c = Calendar.getInstance();
    c.setTimeInMillis(0); // c.set doesn't reset millis
    for (int year = 1902; year <= 2038; year++) {
        c.set(year, Calendar.JANUARY, 1, 0, 0, 0);
        int exactly = (int) (c.getTimeInMillis() / (24 * 60 * 60 * 1000));
        int before = exactly - 1;
        int after = exactly + 1;
        boundaries.add(Integer.valueOf(before));
        boundaries.add(Integer.valueOf(exactly));
        boundaries.add(Integer.valueOf(after));
    }//from w  w w  . j av a2  s .  c o m
    Integer[] indices = boundaries.toArray(new Integer[1]);
    return ArrayUtils.toPrimitive(indices);
}

From source file:org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.java

public static MapJoinDesc createMapJoinDesc(MapJoinTestDescription testDesc, boolean isFullOuterIntersect) {

    MapJoinDesc mapJoinDesc = new MapJoinDesc();

    mapJoinDesc.setPosBigTable(0);/*from  www.  ja v a 2 s .co m*/

    List<ExprNodeDesc> bigTableKeyExpr = new ArrayList<ExprNodeDesc>();
    for (int i = 0; i < testDesc.bigTableKeyColumnNums.length; i++) {
        bigTableKeyExpr.add(new ExprNodeColumnDesc(testDesc.bigTableKeyTypeInfos[i],
                testDesc.bigTableKeyColumnNames[i], "B", false));
    }

    Map<Byte, List<ExprNodeDesc>> keyMap = new HashMap<Byte, List<ExprNodeDesc>>();
    keyMap.put((byte) 0, bigTableKeyExpr);

    // Big Table expression includes all columns -- keys and extra (value) columns.
    // UNDONE: Assumes all values retained...
    List<ExprNodeDesc> bigTableExpr = new ArrayList<ExprNodeDesc>();
    for (int i = 0; i < testDesc.bigTableColumnNames.length; i++) {
        bigTableExpr.add(new ExprNodeColumnDesc(testDesc.bigTableTypeInfos[i], testDesc.bigTableColumnNames[i],
                "B", false));
    }

    Map<Byte, List<ExprNodeDesc>> exprMap = new HashMap<Byte, List<ExprNodeDesc>>();
    exprMap.put((byte) 0, bigTableExpr);

    List<ExprNodeDesc> smallTableKeyExpr = new ArrayList<ExprNodeDesc>();

    for (int i = 0; i < testDesc.smallTableKeyTypeInfos.length; i++) {
        ExprNodeColumnDesc exprNodeColumnDesc = new ExprNodeColumnDesc(testDesc.smallTableKeyTypeInfos[i],
                testDesc.smallTableKeyColumnNames[i], "S", false);
        smallTableKeyExpr.add(exprNodeColumnDesc);
    }

    // Retained Small Table keys and values.
    List<ExprNodeDesc> smallTableExpr = new ArrayList<ExprNodeDesc>();
    final int smallTableRetainKeySize = testDesc.smallTableRetainKeyColumnNums.length;
    for (int i = 0; i < smallTableRetainKeySize; i++) {
        int smallTableKeyColumnNum = testDesc.smallTableRetainKeyColumnNums[i];
        smallTableExpr.add(new ExprNodeColumnDesc(testDesc.smallTableTypeInfos[smallTableKeyColumnNum],
                testDesc.smallTableColumnNames[smallTableKeyColumnNum], "S", false));
    }

    final int smallTableRetainValueSize = testDesc.smallTableRetainValueColumnNums.length;
    for (int i = 0; i < smallTableRetainValueSize; i++) {
        int smallTableValueColumnNum = smallTableRetainKeySize + testDesc.smallTableRetainValueColumnNums[i];
        smallTableExpr.add(new ExprNodeColumnDesc(testDesc.smallTableTypeInfos[smallTableValueColumnNum],
                testDesc.smallTableColumnNames[smallTableValueColumnNum], "S", false));
    }

    keyMap.put((byte) 1, smallTableKeyExpr);
    exprMap.put((byte) 1, smallTableExpr);

    mapJoinDesc.setKeys(keyMap);
    mapJoinDesc.setExprs(exprMap);

    Byte[] order = new Byte[] { (byte) 0, (byte) 1 };
    mapJoinDesc.setTagOrder(order);
    mapJoinDesc.setNoOuterJoin(testDesc.vectorMapJoinVariation != VectorMapJoinVariation.OUTER
            && testDesc.vectorMapJoinVariation != VectorMapJoinVariation.FULL_OUTER);

    Map<Byte, List<ExprNodeDesc>> filterMap = new HashMap<Byte, List<ExprNodeDesc>>();
    filterMap.put((byte) 0, new ArrayList<ExprNodeDesc>()); // None.
    mapJoinDesc.setFilters(filterMap);

    List<Integer> bigTableRetainColumnNumsList = intArrayToList(testDesc.bigTableRetainColumnNums);
    Map<Byte, List<Integer>> retainListMap = new HashMap<Byte, List<Integer>>();
    retainListMap.put((byte) 0, bigTableRetainColumnNumsList);

    // For now, just small table keys/values...
    if (testDesc.smallTableRetainKeyColumnNums.length == 0) {

        // Just the value columns numbers with retain.
        List<Integer> smallTableValueRetainColumnNumsList = intArrayToList(
                testDesc.smallTableRetainValueColumnNums);

        retainListMap.put((byte) 1, smallTableValueRetainColumnNumsList);
    } else {

        // Both the key/value columns numbers.

        // Zero and above numbers indicate a big table key is needed for
        // small table result "area".

        // Negative numbers indicate a column to be (deserialize) read from the small table's
        // LazyBinary value row.

        ArrayList<Integer> smallTableValueIndicesNumsList = new ArrayList<Integer>();
        ;
        for (int i = 0; i < testDesc.smallTableRetainKeyColumnNums.length; i++) {
            smallTableValueIndicesNumsList.add(testDesc.smallTableRetainKeyColumnNums[i]);
        }
        for (int i = 0; i < testDesc.smallTableRetainValueColumnNums.length; i++) {
            smallTableValueIndicesNumsList.add(-testDesc.smallTableRetainValueColumnNums[i] - 1);
        }
        int[] smallTableValueIndicesNums = ArrayUtils
                .toPrimitive(smallTableValueIndicesNumsList.toArray(new Integer[0]));

        Map<Byte, int[]> valueIndicesMap = new HashMap<Byte, int[]>();
        valueIndicesMap.put((byte) 1, smallTableValueIndicesNums);
        mapJoinDesc.setValueIndices(valueIndicesMap);
    }
    mapJoinDesc.setRetainList(retainListMap);

    switch (testDesc.mapJoinPlanVariation) {
    case DYNAMIC_PARTITION_HASH_JOIN:
        // FULL OUTER which behaves differently for dynamic partition hash join.
        mapJoinDesc.setDynamicPartitionHashJoin(true);
        break;
    default:
        throw new RuntimeException("Unexpected map join plan variation " + testDesc.mapJoinPlanVariation);
    }

    int joinDescType;
    switch (testDesc.vectorMapJoinVariation) {
    case INNER:
    case INNER_BIG_ONLY:
        joinDescType = JoinDesc.INNER_JOIN;
        break;
    case LEFT_SEMI:
        joinDescType = JoinDesc.LEFT_SEMI_JOIN;
        break;
    case OUTER:
        joinDescType = JoinDesc.LEFT_OUTER_JOIN;
        break;
    case FULL_OUTER:
        joinDescType = JoinDesc.FULL_OUTER_JOIN;
        break;
    default:
        throw new RuntimeException("unknown operator variation " + testDesc.vectorMapJoinVariation);
    }
    JoinCondDesc[] conds = new JoinCondDesc[1];
    conds[0] = new JoinCondDesc(0, 1, joinDescType);
    mapJoinDesc.setConds(conds);

    TableDesc keyTableDesc = PlanUtils.getMapJoinKeyTableDesc(testDesc.hiveConf,
            PlanUtils.getFieldSchemasFromColumnList(smallTableKeyExpr, ""));
    mapJoinDesc.setKeyTblDesc(keyTableDesc);

    // Small Table expression value columns.
    List<ExprNodeDesc> smallTableValueExpr = new ArrayList<ExprNodeDesc>();

    // All Small Table keys and values.
    for (int i = 0; i < testDesc.smallTableValueColumnNames.length; i++) {
        smallTableValueExpr.add(new ExprNodeColumnDesc(testDesc.smallTableValueTypeInfos[i],
                testDesc.smallTableValueColumnNames[i], "S", false));
    }

    TableDesc valueTableDesc = PlanUtils
            .getMapJoinValueTableDesc(PlanUtils.getFieldSchemasFromColumnList(smallTableValueExpr, ""));
    ArrayList<TableDesc> valueTableDescsList = new ArrayList<TableDesc>();

    // Big Table entry, then Small Table entry.
    valueTableDescsList.add(null);
    valueTableDescsList.add(valueTableDesc);
    mapJoinDesc.setValueTblDescs(valueTableDescsList);
    mapJoinDesc.setValueFilteredTblDescs(valueTableDescsList);

    mapJoinDesc.setOutputColumnNames(Arrays.asList(testDesc.outputColumnNames));

    return mapJoinDesc;
}

From source file:org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.java

private static Operator<SelectDesc> vectorizeInterceptSelectOperator(MapJoinOperator mapJoinOperator,
        int bigTableKeySize, int bigTableRetainSize, Operator<SelectDesc> selectOperator) throws HiveException {

    MapJoinDesc mapJoinDesc = (MapJoinDesc) mapJoinOperator.getConf();

    VectorizationContext vOutContext = ((VectorizationContextRegion) mapJoinOperator)
            .getOutputVectorizationContext();

    SelectDesc selectDesc = (SelectDesc) selectOperator.getConf();
    List<ExprNodeDesc> selectExprs = selectDesc.getColList();

    VectorExpression[] selectVectorExpr = new VectorExpression[bigTableRetainSize];
    for (int i = 0; i < bigTableRetainSize; i++) {

        TypeInfo typeInfo = selectExprs.get(i).getTypeInfo();
        if (i < bigTableKeySize) {

            // Big Table key.
            selectVectorExpr[i] = vOutContext.getVectorExpression(selectExprs.get(i));
        } else {//from w  w  w . j av  a  2  s .  co  m

            // For vector-mode, for test purposes we substitute a NO-OP (we don't want to modify
            // the batch).

            // FULL OUTER INTERCEPT does not look at non-key columns.

            NoOpExpression noOpExpression = new NoOpExpression(i);

            noOpExpression.setInputTypeInfos(typeInfo);
            noOpExpression.setInputDataTypePhysicalVariations(DataTypePhysicalVariation.NONE);

            noOpExpression.setOutputTypeInfo(typeInfo);
            noOpExpression.setOutputDataTypePhysicalVariation(DataTypePhysicalVariation.NONE);

            selectVectorExpr[i] = noOpExpression;
        }
    }

    System.out
            .println("*BENCHMARK* VectorSelectOperator selectVectorExpr " + Arrays.toString(selectVectorExpr));

    int[] projectedColumns = ArrayUtils.toPrimitive(
            vOutContext.getProjectedColumns().subList(0, bigTableRetainSize).toArray(new Integer[0]));
    System.out
            .println("*BENCHMARK* VectorSelectOperator projectedColumns " + Arrays.toString(projectedColumns));

    VectorSelectDesc vectorSelectDesc = new VectorSelectDesc();
    vectorSelectDesc.setSelectExpressions(selectVectorExpr);
    vectorSelectDesc.setProjectedOutputColumns(projectedColumns);

    Operator<SelectDesc> vectorSelectOperator = OperatorFactory.getVectorOperator(
            selectOperator.getCompilationOpContext(), selectDesc, vOutContext, vectorSelectDesc);

    return vectorSelectOperator;
}

From source file:org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.java

public static CountCollectorTestOperator addFullOuterIntercept(MapJoinTestImplementation mapJoinImplementation,
        MapJoinTestDescription testDesc, RowTestObjectsMultiSet outputTestRowMultiSet, MapJoinTestData testData,
        MapJoinOperator mapJoinOperator, MapJoinTableContainer mapJoinTableContainer,
        MapJoinTableContainerSerDe mapJoinTableContainerSerDe)
        throws SerDeException, IOException, HiveException {

    MapJoinDesc mapJoinDesc = (MapJoinDesc) mapJoinOperator.getConf();

    // For FULL OUTER MapJoin, we require all Big Keys to be present in the output result.
    // The first N output columns are the Big Table key columns.
    Map<Byte, List<ExprNodeDesc>> keyMap = mapJoinDesc.getKeys();
    List<ExprNodeDesc> bigTableKeyExprs = keyMap.get((byte) 0);
    final int bigTableKeySize = bigTableKeyExprs.size();

    Map<Byte, List<Integer>> retainMap = mapJoinDesc.getRetainList();
    List<Integer> bigTableRetainList = retainMap.get((byte) 0);
    final int bigTableRetainSize = bigTableRetainList.size();

    List<String> outputColumnNameList = mapJoinDesc.getOutputColumnNames();
    String[] mapJoinOutputColumnNames = outputColumnNameList.toArray(new String[0]);

    // Use a utility method to get the MapJoin output TypeInfo.
    TypeInfo[] mapJoinOutputTypeInfos = VectorMapJoinBaseOperator.getOutputTypeInfos(mapJoinDesc);

    final boolean isVectorOutput = MapJoinTestConfig.isVectorOutput(mapJoinImplementation);

    /*/*from   ww  w . jav a 2  s.  com*/
     * Always create a row-mode SelectOperator.  If we are vector-mode, next we will use its
     * expressions and replace it with a VectorSelectOperator.
     */
    Operator<SelectDesc> selectOperator = makeInterceptSelectOperator(mapJoinOperator, bigTableKeySize,
            bigTableRetainSize, mapJoinOutputColumnNames, mapJoinOutputTypeInfos);

    List<String> selectOutputColumnNameList = ((SelectDesc) selectOperator.getConf()).getOutputColumnNames();
    String[] selectOutputColumnNames = selectOutputColumnNameList.toArray(new String[0]);

    if (isVectorOutput) {
        selectOperator = vectorizeInterceptSelectOperator(mapJoinOperator, bigTableKeySize, bigTableRetainSize,
                selectOperator);
    }

    /*
     * Create test description just for FULL OUTER INTERCEPT with different
     */
    MapJoinTestDescription interceptTestDesc = new MapJoinTestDescription(testDesc.hiveConf,
            testDesc.vectorMapJoinVariation, selectOutputColumnNames,
            Arrays.copyOf(mapJoinOutputTypeInfos, bigTableRetainSize), testDesc.bigTableKeyColumnNums,
            testDesc.smallTableValueTypeInfos, testDesc.smallTableRetainKeyColumnNums,
            testDesc.smallTableGenerationParameters, testDesc.mapJoinPlanVariation);

    MapJoinDesc intersectMapJoinDesc = createMapJoinDesc(interceptTestDesc, /* isFullOuterIntersect */ true);

    /*
     * Create FULL OUTER INTERSECT MapJoin operator.
     */
    CreateMapJoinResult interceptCreateMapJoinResult = createMapJoinImplementation(mapJoinImplementation,
            interceptTestDesc, testData, intersectMapJoinDesc);
    MapJoinOperator intersectMapJoinOperator = interceptCreateMapJoinResult.mapJoinOperator;
    MapJoinTableContainer intersectMapJoinTableContainer = interceptCreateMapJoinResult.mapJoinTableContainer;
    MapJoinTableContainerSerDe interceptMapJoinTableContainerSerDe = interceptCreateMapJoinResult.mapJoinTableContainerSerDe;

    connectOperators(mapJoinOperator, selectOperator);

    connectOperators(selectOperator, intersectMapJoinOperator);

    CountCollectorTestOperator interceptTestCollectorOperator;
    if (!isVectorOutput) {
        interceptTestCollectorOperator = new TestMultiSetCollectorOperator(
                interceptTestDesc.outputObjectInspectors, outputTestRowMultiSet);
    } else {
        VectorizationContext vContext = ((VectorizationContextRegion) intersectMapJoinOperator)
                .getOutputVectorizationContext();
        int[] intersectProjectionColumns = ArrayUtils
                .toPrimitive(vContext.getProjectedColumns().toArray(new Integer[0]));
        interceptTestCollectorOperator = new TestMultiSetVectorCollectorOperator(intersectProjectionColumns,
                interceptTestDesc.outputTypeInfos, interceptTestDesc.outputObjectInspectors,
                outputTestRowMultiSet);
    }

    connectOperators(intersectMapJoinOperator, interceptTestCollectorOperator);

    // Setup the FULL OUTER INTERSECT MapJoin's inputObjInspector to include the Small Table, etc.
    intersectMapJoinOperator.setInputObjInspectors(interceptTestDesc.inputObjectInspectors);

    // Now, invoke initializeOp methods from the root MapJoin operator.
    mapJoinOperator.initialize(testDesc.hiveConf, testDesc.inputObjectInspectors);

    // Fixup the mapJoinTables container references to our test data.
    mapJoinOperator.setTestMapJoinTableContainer(1, mapJoinTableContainer, mapJoinTableContainerSerDe);
    intersectMapJoinOperator.setTestMapJoinTableContainer(1, intersectMapJoinTableContainer,
            interceptMapJoinTableContainerSerDe);

    return interceptTestCollectorOperator;
}

From source file:org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinCommonOperator.java

/**
 * Determine from a mapping which columns are BytesColumnVector columns.
 *///w w  w  . j a v a2  s .  c om
private int[] getByteColumnVectorColumns(VectorColumnMapping mapping) {
    // Search mapping for any strings and return their output columns.
    ArrayList<Integer> list = new ArrayList<Integer>();
    int count = mapping.getCount();
    int[] outputColumns = mapping.getOutputColumns();
    TypeInfo[] typeInfos = mapping.getTypeInfos();
    for (int i = 0; i < count; i++) {
        int outputColumn = outputColumns[i];
        String typeName = typeInfos[i].getTypeName();
        if (VectorizationContext.isStringFamily(typeName)) {
            list.add(outputColumn);
        }
    }
    return ArrayUtils.toPrimitive(list.toArray(new Integer[0]));
}

From source file:org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinGenerateResultOperator.java

private void setupSpillSerDe(VectorizedRowBatch batch) throws HiveException {

    TypeInfo[] inputObjInspectorsTypeInfos = VectorizedBatchUtil
            .typeInfosFromStructObjectInspector((StructObjectInspector) inputObjInspectors[posBigTable]);

    List<Integer> projectedColumns = vContext.getProjectedColumns();
    int projectionSize = vContext.getProjectedColumns().size();

    List<TypeInfo> typeInfoList = new ArrayList<TypeInfo>();
    List<Integer> noNullsProjectionList = new ArrayList<Integer>();
    for (int i = 0; i < projectionSize; i++) {
        int projectedColumn = projectedColumns.get(i);
        if (batch.cols[projectedColumn] != null
                && inputObjInspectorsTypeInfos[i].getCategory() == Category.PRIMITIVE) {
            // Only columns present in the batch and non-complex types.
            typeInfoList.add(inputObjInspectorsTypeInfos[i]);
            noNullsProjectionList.add(projectedColumn);
        }/*from  ww  w.j  av  a  2  s . c  o m*/
    }

    int[] noNullsProjection = ArrayUtils.toPrimitive(noNullsProjectionList.toArray(new Integer[0]));
    int noNullsProjectionSize = noNullsProjection.length;
    bigTableTypeInfos = typeInfoList.toArray(new TypeInfo[0]);

    bigTableVectorSerializeRow = new VectorSerializeRow<LazyBinarySerializeWrite>(
            new LazyBinarySerializeWrite(noNullsProjectionSize));

    bigTableVectorSerializeRow.init(bigTableTypeInfos, noNullsProjection);

    bigTableVectorDeserializeRow = new VectorDeserializeRow<LazyBinaryDeserializeRead>(
            new LazyBinaryDeserializeRead(bigTableTypeInfos, /* useExternalBuffer */ true));

    bigTableVectorDeserializeRow.init(noNullsProjection);
}

From source file:org.apache.hadoop.hive.ql.exec.vector.VectorColumnOrderedMap.java

public Mapping getMapping() {
    ArrayList<Integer> orderedColumns = new ArrayList<Integer>();
    ArrayList<Integer> valueColumns = new ArrayList<Integer>();
    ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();
    for (Map.Entry<Integer, Value> entry : orderedTreeMap.entrySet()) {
        orderedColumns.add(entry.getKey());
        Value value = entry.getValue();/*from ww  w  .j  av  a  2s.c o m*/
        valueColumns.add(value.valueColumn);
        typeInfos.add(value.typeInfo);
    }
    return new Mapping(ArrayUtils.toPrimitive(orderedColumns.toArray(new Integer[0])),
            ArrayUtils.toPrimitive(valueColumns.toArray(new Integer[0])), typeInfos.toArray(new TypeInfo[0]));
}