Example usage for org.apache.commons.lang3 ArrayUtils toPrimitive

List of usage examples for org.apache.commons.lang3 ArrayUtils toPrimitive

Introduction

In this page you can find the example usage for org.apache.commons.lang3 ArrayUtils toPrimitive.

Prototype

public static boolean[] toPrimitive(final Boolean[] array) 

Source Link

Document

Converts an array of object Booleans to primitives.

This method returns null for a null input array.

Usage

From source file:org.asqatasun.entity.audit.RelatedBinaryContentImpl.java

@Override
public byte[] getContent() {
    return ArrayUtils.toPrimitive(binaryContent);
}

From source file:org.carbondata.core.carbon.datastore.block.SegmentProperties.java

/**
 * Below method will fill the key generator detail of both the type of key
 * generator. This will be required for during both query execution and data
 * loading./*from   w  w  w . ja va 2s  .c  o m*/
 */
private void fillKeyGeneratorDetails() {
    // create a dimension partitioner list
    // this list will contain information about how dimension value are
    // stored
    // it is stored in group or individually
    List<Integer> dimensionPartitionList = new ArrayList<Integer>(
            CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    List<Boolean> isDictionaryColumn = new ArrayList<Boolean>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    int prvcolumnGroupId = -1;
    int counter = 0;
    while (counter < dimensions.size()) {
        CarbonDimension carbonDimension = dimensions.get(counter);
        // if dimension is not a part of mdkey then no need to add
        if (!carbonDimension.getEncoder().contains(Encoding.DICTIONARY)) {
            isDictionaryColumn.add(false);
            counter++;
            continue;
        }
        // columnar column is stored individually
        // so add one
        if (carbonDimension.isColumnar()) {
            dimensionPartitionList.add(1);
            isDictionaryColumn.add(true);
        }
        // if in a group then need to add how many columns a selected in
        // group
        if (!carbonDimension.isColumnar() && carbonDimension.columnGroupId() == prvcolumnGroupId) {
            // incrementing the previous value of the list as it is in same column group
            dimensionPartitionList.set(dimensionPartitionList.size() - 1,
                    dimensionPartitionList.get(dimensionPartitionList.size() - 1) + 1);
        } else if (!carbonDimension.isColumnar()) {
            dimensionPartitionList.add(1);
            isDictionaryColumn.add(true);
        }
        prvcolumnGroupId = carbonDimension.columnGroupId();
        counter++;
    }
    // get the partitioner
    int[] dimensionPartitions = ArrayUtils
            .toPrimitive(dimensionPartitionList.toArray(new Integer[dimensionPartitionList.size()]));
    // get the bit length of each column
    int[] bitLength = CarbonUtil.getDimensionBitLength(dimColumnsCardinality, dimensionPartitions);
    // create a key generator
    this.dimensionKeyGenerator = new MultiDimKeyVarLengthGenerator(bitLength);
    this.fixedLengthKeySplitter = new MultiDimKeyVarLengthVariableSplitGenerator(bitLength,
            dimensionPartitions);
    // get the size of each value in file block
    int[] dictionayDimColumnValueSize = fixedLengthKeySplitter.getBlockKeySize();
    int index = -1;
    this.eachDimColumnValueSize = new int[isDictionaryColumn.size()];
    for (int i = 0; i < eachDimColumnValueSize.length; i++) {
        if (!isDictionaryColumn.get(i)) {
            eachDimColumnValueSize[i] = -1;
            continue;
        }
        eachDimColumnValueSize[i] = dictionayDimColumnValueSize[++index];
    }
    if (complexDimensions.size() > 0) {
        int[] complexDimesionParition = new int[complexDimensions.size()];
        // as complex dimension will be stored in column format add one
        Arrays.fill(complexDimesionParition, 1);
        int[] complexDimensionBitLength = new int[complexDimesionParition.length];
        // number of bits will be 64
        Arrays.fill(complexDimensionBitLength, 64);
        this.complexDimensionKeyGenerator = new MultiDimKeyVarLengthGenerator(complexDimensionBitLength);
        ColumnarSplitter keySplitter = new MultiDimKeyVarLengthVariableSplitGenerator(complexDimensionBitLength,
                complexDimesionParition);
        eachComplexDimColumnValueSize = keySplitter.getBlockKeySize();
    } else {
        eachComplexDimColumnValueSize = new int[0];
    }
}

From source file:org.carbondata.processing.restructure.SchemaRestructurer.java

public boolean restructureSchema(List<CubeDimension> newDimensions, List<Measure> newMeasures,
        Map<String, String> defaultValues, CarbonDef.Schema origUnModifiedSchema, CarbonDef.Schema schema,
        List<String> validDropDimList, List<String> validDropMsrList) {
    String prevRSFolderPathPrefix = pathTillRSFolderParent + File.separator
            + CarbonCommonConstants.RESTRUCTRE_FOLDER;
    String sliceMetaDatapath = prevRSFolderPathPrefix + currentRestructFolderNumber + File.separator
            + factTableName;//from   www  .  j  av  a2  s .  c o  m

    SliceMetaData currentSliceMetaData = CarbonUtil.readSliceMetaDataFile(sliceMetaDatapath,
            currentRestructFolderNumber);
    if (null == currentSliceMetaData) {
        LOGGER.error("Failed to read current sliceMetaData from:" + sliceMetaDatapath);
        LOGGER.error("May be dataloading is not done even once:" + sliceMetaDatapath);
        return true;
    }

    CarbonDef.Cube origUnModifiedCube = CarbonSchemaParser.getMondrianCube(origUnModifiedSchema, cubeName);

    if (!processDroppedDimsMsrs(prevRSFolderPathPrefix, currentRestructFolderNumber, validDropDimList,
            validDropMsrList, origUnModifiedCube)) {
        LOGGER.error("Failed to drop the dimension/measure");
        return false;
    }

    if (newDimensions.isEmpty() && newMeasures.isEmpty()) {
        return true;
    }

    List<String> dimensions = new ArrayList<String>(Arrays.asList(currentSliceMetaData.getDimensions()));
    List<String> dimsToAddToOldSliceMetaData = new ArrayList<String>();
    List<String> measures = new ArrayList<String>(Arrays.asList(currentSliceMetaData.getMeasures()));
    List<String> measureAggregators = new ArrayList<String>(
            Arrays.asList(currentSliceMetaData.getMeasuresAggregator()));
    Map<String, String> defValuesWithFactTableNames = new HashMap<String, String>();

    for (Measure aMeasure : newMeasures) {
        measures.add(aMeasure.column);
        measureAggregators.add(aMeasure.aggregator);
    }

    String tmpsliceMetaDataPath = prevRSFolderPathPrefix + currentRestructFolderNumber + File.separator
            + factTableName;
    int curLoadCounter = CarbonUtil.checkAndReturnCurrentLoadFolderNumber(tmpsliceMetaDataPath);

    int newLoadCounter = curLoadCounter + 1;

    String newLevelFolderPath = newSliceMetaDataPath + File.separator + CarbonCommonConstants.LOAD_FOLDER
            + newLoadCounter + File.separator;

    if (!createLoadFolder(newLevelFolderPath)) {
        LOGGER.error("Failed to create load folder:" + newLevelFolderPath);
        return false;
    }

    CarbonDef.Cube cube = CarbonSchemaParser.getMondrianCube(schema, cubeName);
    if (!createAggregateTableAfterRestructure(newLoadCounter, cube)) {
        return false;
    }

    int[] currDimCardinality = null;

    try {
        currDimCardinality = readcurrentLevelCardinalityFile(
                tmpsliceMetaDataPath + File.separator + CarbonCommonConstants.LOAD_FOLDER + curLoadCounter,
                factTableName);
        if (null == currDimCardinality) {
            LOGGER.error("Level cardinality file is missing.Was empty load folder created to maintain load "
                    + "folder count in sync?");
        }
    } catch (CarbonUtilException e) {
        LOGGER.error(e.getMessage());
        return false;
    }

    List<Integer> dimLens = (null != currDimCardinality)
            ? new ArrayList<Integer>(Arrays.asList(ArrayUtils.toObject(currDimCardinality)))
            : new ArrayList<Integer>();

    String defaultVal = null;
    String levelColName;
    for (CubeDimension aDimension : newDimensions) {
        try {
            levelColName = ((CarbonDef.Dimension) aDimension).hierarchies[0].levels[0].column;

            RelationOrJoin relation = ((CarbonDef.Dimension) aDimension).hierarchies[0].relation;

            String tableName = relation == null ? factTableName
                    : ((Table) ((CarbonDef.Dimension) aDimension).hierarchies[0].relation).name;

            dimensions.add(tableName + '_' + levelColName);
            dimsToAddToOldSliceMetaData.add(tableName + '_' + levelColName);
            defaultVal = defaultValues.get(aDimension.name) == null ? null : defaultValues.get(aDimension.name);
            if (aDimension.noDictionary) {
                continue;
            }
            if (null != defaultVal) {
                defValuesWithFactTableNames.put(tableName + '_' + levelColName, defaultVal);
                dimLens.add(2);
            } else {
                dimLens.add(1);
            }
            levelFilePrefix = tableName + '_';
            createLevelFiles(newLevelFolderPath,
                    levelFilePrefix + ((CarbonDef.Dimension) aDimension).hierarchies[0].levels[0].column
                            + CarbonCommonConstants.LEVEL_FILE_EXTENSION,
                    defaultVal);
            LevelSortIndexWriterThread levelFileUpdater = new LevelSortIndexWriterThread(
                    newLevelFolderPath + levelFilePrefix
                            + ((CarbonDef.Dimension) aDimension).hierarchies[0].levels[0].column
                            + CarbonCommonConstants.LEVEL_FILE_EXTENSION,
                    ((CarbonDef.Dimension) aDimension).hierarchies[0].levels[0].type);
            levelFileUpdater.call();
        } catch (IOException e) {
            return false;
        } catch (Exception e) {
            return false;
        }
    }

    SliceMetaData newSliceMetaData = new SliceMetaData();

    newSliceMetaData.setDimensions(dimensions.toArray(new String[dimensions.size()]));
    newSliceMetaData.setActualDimensions(dimensions.toArray(new String[dimensions.size()]));
    newSliceMetaData.setMeasures(measures.toArray(new String[measures.size()]));
    newSliceMetaData.setTableNamesToLoadMandatory(null);
    newSliceMetaData.setMeasuresAggregator(measureAggregators.toArray(new String[measureAggregators.size()]));

    int[] updatedCardinality = ArrayUtils.toPrimitive(dimLens.toArray(new Integer[dimLens.size()]));
    try {
        writeLevelCardinalityFile(newLevelFolderPath, factTableName, updatedCardinality);
    } catch (KettleException e) {
        LOGGER.error(e.getMessage());
        return false;
    }

    newSliceMetaData.setDimLens(updatedCardinality);
    newSliceMetaData.setActualDimLens(updatedCardinality);
    newSliceMetaData.setKeyGenerator(KeyGeneratorFactory.getKeyGenerator(newSliceMetaData.getDimLens()));

    CarbonUtil.writeSliceMetaDataFile(newSliceMetaDataPath, newSliceMetaData, nextRestructFolder);

    SliceMetaData readSliceMetaDataFile = null;

    for (int folderNumber = currentRestructFolderNumber; folderNumber >= 0; folderNumber--) {
        sliceMetaDatapath = prevRSFolderPathPrefix + folderNumber + File.separator + factTableName;
        readSliceMetaDataFile = CarbonUtil.readSliceMetaDataFile(sliceMetaDatapath,
                currentRestructFolderNumber);
        if (null == readSliceMetaDataFile) {
            continue;
        }

        updateSliceMetadata(dimsToAddToOldSliceMetaData, newMeasures, defValuesWithFactTableNames,
                defaultValues, readSliceMetaDataFile, sliceMetaDatapath, newSliceMetaDataFileExtn);
        addNewSliceMetaDataForAggTables(folderNumber);
    }

    return true;
}

From source file:org.carbondata.processing.restructure.SchemaRestructurer.java

private void updateSliceMetadata(List<String> newDimensions, List<Measure> newMeasures,
        Map<String, String> dimDefaultValues, Map<String, String> defaultValues, SliceMetaData oldSliceMetaData,
        String oldSliceMetaDatapath, String newSliceMetaDataFileExtn) {
    List<String> existingNewDimensions = (null != oldSliceMetaData.getNewDimensions())
            ? new ArrayList<String>(Arrays.asList(oldSliceMetaData.getNewDimensions()))
            : new ArrayList<String>();

    List<Integer> existingNewDimLens = (null != oldSliceMetaData.getNewDimLens())
            ? new ArrayList<Integer>(Arrays.asList(ArrayUtils.toObject(oldSliceMetaData.getNewDimLens())))
            : new ArrayList<Integer>();

    List<Integer> existingNewDimsSurrogateKeys = (null != oldSliceMetaData.getNewDimsSurrogateKeys())
            ? new ArrayList<Integer>(
                    Arrays.asList(ArrayUtils.toObject(oldSliceMetaData.getNewDimsSurrogateKeys())))
            : new ArrayList<Integer>();

    List<String> existingNewDimsDefVals = (null != oldSliceMetaData.getNewDimsDefVals())
            ? new ArrayList<String>(Arrays.asList(oldSliceMetaData.getNewDimsDefVals()))
            : new ArrayList<String>();

    List<String> existingNewMeasures = (null != oldSliceMetaData.getNewMeasures())
            ? new ArrayList<String>(Arrays.asList(oldSliceMetaData.getNewMeasures()))
            : new ArrayList<String>();

    List<Double> existingNewMeasureDftVals = (null != oldSliceMetaData.getNewMsrDfts())
            ? new ArrayList<Double>(Arrays.asList(ArrayUtils.toObject(oldSliceMetaData.getNewMsrDfts())))
            : new ArrayList<Double>();

    List<String> existingNewMeasureAggregators = (null != oldSliceMetaData.getNewMeasuresAggregator())
            ? new ArrayList<String>(Arrays.asList(oldSliceMetaData.getNewMeasuresAggregator()))
            : new ArrayList<String>();

    existingNewDimensions.addAll(newDimensions);

    String dimDefVal;/*from   ww w  . j a  v a2 s .  co  m*/
    for (int i = 0; i < newDimensions.size(); i++) {
        dimDefVal = dimDefaultValues.get(newDimensions.get(i));
        if (null == dimDefVal) {
            existingNewDimsDefVals.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL);
            existingNewDimsSurrogateKeys.add(DEF_SURROGATE_KEY);
            existingNewDimLens.add(1);
        } else {
            existingNewDimsDefVals.add(dimDefVal);
            existingNewDimsSurrogateKeys.add(DEF_SURROGATE_KEY + 1);
            existingNewDimLens.add(2);
        }
    }

    oldSliceMetaData.setNewDimLens(
            ArrayUtils.toPrimitive(existingNewDimLens.toArray(new Integer[existingNewDimLens.size()])));
    oldSliceMetaData.setNewActualDimLens(
            ArrayUtils.toPrimitive(existingNewDimLens.toArray(new Integer[existingNewDimLens.size()])));
    oldSliceMetaData.setNewDimensions(existingNewDimensions.toArray(new String[existingNewDimensions.size()]));
    oldSliceMetaData
            .setNewActualDimensions(existingNewDimensions.toArray(new String[existingNewDimensions.size()]));
    oldSliceMetaData
            .setNewDimsDefVals(existingNewDimsDefVals.toArray(new String[existingNewDimsDefVals.size()]));
    oldSliceMetaData.setNewDimsSurrogateKeys(ArrayUtils.toPrimitive(
            existingNewDimsSurrogateKeys.toArray(new Integer[existingNewDimsSurrogateKeys.size()])));

    String doubleVal;
    Double val;

    for (Measure aMeasure : newMeasures) {
        existingNewMeasures.add(aMeasure.column);
        doubleVal = defaultValues.get(aMeasure.name);
        if (null != doubleVal && 0 != doubleVal.trim().length()) {
            try {
                val = Double.parseDouble(doubleVal);
                existingNewMeasureDftVals.add(val);
                existingNewMeasureAggregators.add(aMeasure.aggregator);
            } catch (NumberFormatException e) {
                existingNewMeasureDftVals.add(0.0);
                existingNewMeasureAggregators.add(aMeasure.aggregator);
            }
            continue;
        } else {
            existingNewMeasureDftVals.add(0.0);
            existingNewMeasureAggregators.add(aMeasure.aggregator);
        }
    }

    oldSliceMetaData.setNewMeasures(existingNewMeasures.toArray(new String[existingNewMeasures.size()]));

    oldSliceMetaData.setNewMsrDfts(ArrayUtils
            .toPrimitive(existingNewMeasureDftVals.toArray(new Double[existingNewMeasureDftVals.size()])));
    oldSliceMetaData.setNewMeasuresAggregator(
            existingNewMeasureAggregators.toArray(new String[existingNewMeasureAggregators.size()]));

    CarbonUtil.writeSliceMetaDataFile(oldSliceMetaDatapath, oldSliceMetaData, nextRestructFolder);
}

From source file:org.carbondata.processing.store.writer.AbstractFactDataWriter.java

public AbstractFactDataWriter(String storeLocation, int measureCount, int mdKeyLength, String tableName,
        boolean isNodeHolder, IFileManagerComposite fileManager, int[] keyBlockSize, boolean isUpdateFact,
        CarbonDataFileAttributes carbonDataFileAttributes, List<ColumnSchema> columnSchema) {

    // measure count
    this.measureCount = measureCount;
    // table name
    this.tableName = tableName;

    this.storeLocation = storeLocation;
    this.blockletInfoList = new ArrayList<BlockletInfoColumnar>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
    // get max file size;
    CarbonProperties propInstance = CarbonProperties.getInstance();
    this.fileSizeInBytes = Long
            .parseLong(propInstance.getProperty(CarbonCommonConstants.MAX_FILE_SIZE,
                    CarbonCommonConstants.MAX_FILE_SIZE_DEFAULT_VAL))
            * CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR
            * CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR * 1L;
    this.spaceReservedForBlockMetaSize = Integer
            .parseInt(propInstance.getProperty(CarbonCommonConstants.CARBON_BLOCK_META_RESERVED_SPACE,
                    CarbonCommonConstants.CARBON_BLOCK_META_RESERVED_SPACE_DEFAULT));
    this.dataBlockSize = fileSizeInBytes - (fileSizeInBytes * spaceReservedForBlockMetaSize) / 100;
    LOGGER.info("Total file size: " + fileSizeInBytes + " and dataBlock Size: " + dataBlockSize);
    this.isNodeHolderRequired = Boolean
            .valueOf(CarbonCommonConstants.WRITE_ALL_NODE_IN_SINGLE_TIME_DEFAULT_VALUE);
    this.fileManager = fileManager;

    /**// ww  w.  ja va 2s .c  o  m
     * keyBlockSize
     */
    this.keyBlockSize = keyBlockSize;
    /**
     *
     */
    this.mdkeySize = mdKeyLength;

    this.isNodeHolderRequired = this.isNodeHolderRequired && isNodeHolder;
    if (this.isNodeHolderRequired) {
        this.nodeHolderList = new CopyOnWriteArrayList<NodeHolder>();

        this.executorService = Executors.newFixedThreadPool(5);
    }

    //TODO: We should delete the levelmetadata file after reading here.
    this.localCardinality = CarbonMergerUtil.getCardinalityFromLevelMetadata(storeLocation, tableName);
    this.carbonDataFileAttributes = carbonDataFileAttributes;
    CarbonTableIdentifier tableIdentifier = new CarbonTableIdentifier(databaseName, tableName);
    carbonTablePath = CarbonStorePath.getCarbonTablePath(storeLocation, tableIdentifier);
    List<Integer> cardinalityList = new ArrayList<Integer>();
    thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality, columnSchema);
    localCardinality = ArrayUtils.toPrimitive(cardinalityList.toArray(new Integer[cardinalityList.size()]));
}

From source file:org.carbondata.query.carbon.executor.impl.AbstractQueryExecutor.java

/**
 * Below method will be used to get the block execution info which is
 * required to execute any block  based on query model
 *
 * @param queryModel query model from user query
 * @param blockIndex block index/*  w  w  w.j  a va  2 s . c o  m*/
 * @return block execution info
 * @throws QueryExecutionException any failure during block info creation
 */
protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel, AbstractIndex blockIndex)
        throws QueryExecutionException {
    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
    SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
    List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
    KeyGenerator blockKeyGenerator = segmentProperties.getDimensionKeyGenerator();

    // below is to get only those dimension in query which is present in the
    // table block
    List<QueryDimension> updatedQueryDimension = RestructureUtil
            .getUpdatedQueryDimension(queryModel.getQueryDimension(), tableBlockDimensions);
    // TODO add complex dimension children
    int[] maskByteRangesForBlock = QueryUtil.getMaskedByteRange(updatedQueryDimension, blockKeyGenerator);
    int[] maksedByte = QueryUtil.getMaskedByte(blockKeyGenerator.getKeySizeInBytes(), maskByteRangesForBlock);
    blockExecutionInfo.setDataBlock(blockIndex);
    blockExecutionInfo.setBlockKeyGenerator(blockKeyGenerator);
    // adding aggregation info for query
    blockExecutionInfo.setAggregatorInfo(getAggregatorInfoForBlock(queryModel, blockIndex));
    // adding custom aggregate expression of query
    blockExecutionInfo.setCustomAggregateExpressions(queryModel.getExpressions());

    // setting the limit
    blockExecutionInfo.setLimit(queryModel.getLimit());
    // setting whether detail query or not
    blockExecutionInfo.setDetailQuery(queryModel.isDetailQuery());
    // setting the masked byte of the block which will be
    // used to update the unpack the older block keys
    blockExecutionInfo.setMaskedByteForBlock(maksedByte);
    // total number dimension
    blockExecutionInfo
            .setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
    blockExecutionInfo
            .setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
    // to check whether older block key update is required or not
    blockExecutionInfo.setFixedKeyUpdateRequired(
            blockKeyGenerator.equals(queryProperties.keyStructureInfo.getKeyGenerator()));
    IndexKey startIndexKey = null;
    IndexKey endIndexKey = null;
    if (null != queryModel.getFilterExpressionResolverTree()) {
        // loading the filter executer tree for filter evaluation
        blockExecutionInfo.setFilterExecuterTree(FilterUtil
                .getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), blockKeyGenerator));
        startIndexKey = queryModel.getFilterExpressionResolverTree()
                .getstartKey(blockIndex.getSegmentProperties());
        endIndexKey = queryModel.getFilterExpressionResolverTree().getEndKey(blockIndex.getSegmentProperties(),
                queryModel.getAbsoluteTableIdentifier());
        if (null == startIndexKey && null == endIndexKey) {
            try {
                startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
                endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
            } catch (KeyGenException e) {
                throw new QueryExecutionException(e);
            }

        }
    } else {
        try {
            startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
            endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
        } catch (KeyGenException e) {
            throw new QueryExecutionException(e);
        }
    }
    blockExecutionInfo
            .setFileType(FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getStorePath()));
    //setting the start index key of the block node
    blockExecutionInfo.setStartKey(startIndexKey);
    //setting the end index key of the block node
    blockExecutionInfo.setEndKey(endIndexKey);
    // expression dimensions
    List<CarbonDimension> expressionDimensions = new ArrayList<CarbonDimension>(
            CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // expression measure
    List<CarbonMeasure> expressionMeasures = new ArrayList<CarbonMeasure>(
            CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // to get all the dimension and measure which required to get the chunk
    // indexes to be read from file
    QueryUtil.extractDimensionsAndMeasuresFromExpression(queryModel.getExpressions(), expressionDimensions,
            expressionMeasures);
    // setting all the dimension chunk indexes to be read from file
    blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(QueryUtil.getDimensionsBlockIndexes(
            updatedQueryDimension, segmentProperties.getDimensionOrdinalToBlockMapping(),
            queryModel.getDimAggregationInfo(), expressionDimensions));
    // setting all the measure chunk indexes to be read from file
    blockExecutionInfo
            .setAllSelectedMeasureBlocksIndexes(QueryUtil.getMeasureBlockIndexes(queryModel.getQueryMeasures(),
                    expressionMeasures, segmentProperties.getMeasuresOrdinalToBlockMapping()));
    // setting the key structure info which will be required
    // to update the older block key with new key generator
    blockExecutionInfo.setKeyStructureInfo(queryProperties.keyStructureInfo);
    // setting the size of fixed key column (dictionary column)
    blockExecutionInfo.setFixedLengthKeySize(getKeySize(updatedQueryDimension, segmentProperties));
    List<Integer> dictionaryColumnBlockIndex = new ArrayList<Integer>();
    List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
    // get the block index to be read from file for query dimension
    // for both dictionary columns and no dictionary columns
    QueryUtil.fillQueryDimensionsBlockIndexes(updatedQueryDimension,
            segmentProperties.getDimensionOrdinalToBlockMapping(), dictionaryColumnBlockIndex,
            noDictionaryColumnBlockIndex);
    int[] queryDictionaruColumnBlockIndexes = ArrayUtils
            .toPrimitive(dictionaryColumnBlockIndex.toArray(new Integer[dictionaryColumnBlockIndex.size()]));
    // need to sort the dictionary column as for all dimension
    // column key will be filled based on key order
    Arrays.sort(queryDictionaruColumnBlockIndexes);
    blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaruColumnBlockIndexes);
    // setting the no dictionary column block indexes
    blockExecutionInfo.setNoDictionaryBlockIndexes(ArrayUtils.toPrimitive(
            noDictionaryColumnBlockIndex.toArray(new Integer[noDictionaryColumnBlockIndex.size()])));
    // setting column id to dictionary mapping
    blockExecutionInfo.setColumnIdToDcitionaryMapping(queryProperties.columnToDictionayMapping);
    // setting each column value size
    blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
    blockExecutionInfo.setDimensionAggregator(QueryUtil.getDimensionDataAggregatorList1(
            queryModel.getDimAggregationInfo(), segmentProperties.getDimensionOrdinalToBlockMapping(),
            segmentProperties.getColumnGroupAndItsKeygenartor(), queryProperties.columnToDictionayMapping));
    try {
        // to set column group and its key structure info which will be used
        // to
        // for getting the column group column data in case of final row
        // and in case of dimension aggregation
        blockExecutionInfo.setColumnGroupToKeyStructureInfo(
                QueryUtil.getColumnGroupKeyStructureInfo(updatedQueryDimension, segmentProperties));
    } catch (KeyGenException e) {
        throw new QueryExecutionException(e);
    }
    return blockExecutionInfo;
}

From source file:org.carbondata.query.carbon.executor.util.QueryUtil.java

/**
 * Below method will be used to get the dimension block index in file based
 * on query dimension/*from www . j a va2  s  . c o m*/
 *
 * @param queryDimensions                query dimension
 * @param dimensionOrdinalToBlockMapping mapping of dimension block in file to query dimension
 * @return block index of file
 */
public static int[] getDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
        Map<Integer, Integer> dimensionOrdinalToBlockMapping, List<DimensionAggregatorInfo> dimAggInfo,
        List<CarbonDimension> customAggregationDimension) {
    // using set as in row group columns will point to same block
    Set<Integer> dimensionBlockIndex = new HashSet<Integer>();
    for (int i = 0; i < queryDimensions.size(); i++) {
        dimensionBlockIndex
                .add(dimensionOrdinalToBlockMapping.get(queryDimensions.get(i).getDimension().getOrdinal()));
    }
    for (int i = 0; i < dimAggInfo.size(); i++) {
        dimensionBlockIndex.add(dimensionOrdinalToBlockMapping.get(dimAggInfo.get(i).getDim().getOrdinal()));
    }
    for (int i = 0; i < customAggregationDimension.size(); i++) {
        dimensionBlockIndex
                .add(dimensionOrdinalToBlockMapping.get(customAggregationDimension.get(i).getOrdinal()));
    }
    return ArrayUtils.toPrimitive(dimensionBlockIndex.toArray(new Integer[dimensionBlockIndex.size()]));
}

From source file:org.carbondata.query.carbon.executor.util.QueryUtil.java

/**
 * Below method will used to get the method will be used to get the measure
 * block indexes to be read from the file
 *
 * @param queryMeasures              query measure
 * @param expressionMeasure          measure present in the expression
 * @param ordinalToBlockIndexMapping measure ordinal to block mapping
 * @return block indexes/*from w ww  .j  av  a2s.  c o  m*/
 */
public static int[] getMeasureBlockIndexes(List<QueryMeasure> queryMeasures,
        List<CarbonMeasure> expressionMeasure, Map<Integer, Integer> ordinalToBlockIndexMapping) {
    Set<Integer> measureBlockIndex = new HashSet<Integer>();
    for (int i = 0; i < queryMeasures.size(); i++) {
        measureBlockIndex.add(ordinalToBlockIndexMapping.get(queryMeasures.get(i).getMeasure().getOrdinal()));
    }
    for (int i = 0; i < expressionMeasure.size(); i++) {
        measureBlockIndex.add(ordinalToBlockIndexMapping.get(expressionMeasure.get(i).getOrdinal()));
    }
    return ArrayUtils.toPrimitive(measureBlockIndex.toArray(new Integer[measureBlockIndex.size()]));
}

From source file:org.carbondata.query.executer.impl.RestructureUtil.java

/**
 * Below method will be used to check if any new dimension is added after
 * restructure or not/*  w  w w .  ja  v a 2  s .c  o  m*/
 *
 * @param queryDims
 * @param sliceMataData
 * @param sMetaDims
 * @param currentDimList
 * @param holder
 * @return
 */
public static Dimension[] updateRestructureHolder(Dimension[] queryDims, SliceMetaData sliceMataData,
        List<Dimension> currentDimList, RestructureHolder holder, QueryExecuterProperties executerProperties) {
    boolean found/* = false*/;
    int len = 0;
    String[] sMetaDims = sliceMataData.getDimensions();
    //if high
    List<Boolean> isNoDictionaryDims = new ArrayList<Boolean>(executerProperties.dimTables.length);
    boolean[] isNoDictionaryDimsArray = new boolean[executerProperties.dimTables.length];
    List<Dimension> crntDims = new ArrayList<Dimension>();
    for (int i = 0; i < executerProperties.dimTables.length; i++) {
        found = false;
        for (int j = 0; j < sMetaDims.length; j++) {
            if (sMetaDims[j].equals(executerProperties.dimTables[i].getActualTableName() + '_'
                    + executerProperties.dimTables[i].getColName())) {
                //CHECKSTYLE:OFF    Approval No:Approval-V1R2C10_001,Approval-V1R2C10_002
                //                    currentDimTables[len] = executerProperties.dimTables[i];
                crntDims.add(executerProperties.dimTables[i]);
                len++;
                found = true;
                break;
            } //CHECKSTYLE:ON
        }

        if (!found) {
            holder.updateRequired = true;
            if (executerProperties.dimTables[i].isNoDictionaryDim()) {
                isNoDictionaryDims.add(true);
            }
        }

    }
    len = 0;
    for (int i = 0; i < queryDims.length; i++) {
        if (queryDims[i].isNoDictionaryDim()) {
            currentDimList.add(queryDims[i]);
            continue;
        }
        for (int j = 0; j < sMetaDims.length; j++) {
            if (sMetaDims[j].equals(queryDims[i].getActualTableName() + '_' + queryDims[i].getColName())) {
                currentDimList.add(queryDims[i]);
                break;

            }
        }
        len++;
    }
    if (holder.updateRequired) {
        isNoDictionaryDimsArray = ArrayUtils
                .toPrimitive(isNoDictionaryDims.toArray(new Boolean[isNoDictionaryDims.size()]));
        holder.setIsNoDictionaryNewDims(isNoDictionaryDimsArray);
    }
    return crntDims.toArray(new Dimension[crntDims.size()]);
}

From source file:org.carbondata.scan.executor.impl.AbstractQueryExecutor.java

/**
 * Below method will be used to get the block execution info which is
 * required to execute any block  based on query model
 *
 * @param queryModel query model from user query
 * @param blockIndex block index/*  w ww.  ja  v a 2 s.com*/
 * @return block execution info
 * @throws QueryExecutionException any failure during block info creation
 */
protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel, AbstractIndex blockIndex)
        throws QueryExecutionException {
    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
    SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
    List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
    KeyGenerator blockKeyGenerator = segmentProperties.getDimensionKeyGenerator();

    // below is to get only those dimension in query which is present in the
    // table block
    List<QueryDimension> updatedQueryDimension = RestructureUtil
            .getUpdatedQueryDimension(queryModel.getQueryDimension(), tableBlockDimensions);
    // TODO add complex dimension children
    int[] maskByteRangesForBlock = QueryUtil.getMaskedByteRange(updatedQueryDimension, blockKeyGenerator);
    int[] maksedByte = QueryUtil.getMaskedByte(blockKeyGenerator.getKeySizeInBytes(), maskByteRangesForBlock);
    blockExecutionInfo.setDimensionsExistInQuery(updatedQueryDimension.size() > 0);
    blockExecutionInfo.setDataBlock(blockIndex);
    blockExecutionInfo.setBlockKeyGenerator(blockKeyGenerator);
    // adding aggregation info for query
    blockExecutionInfo.setAggregatorInfo(getAggregatorInfoForBlock(queryModel, blockIndex));

    // setting the limit
    blockExecutionInfo.setLimit(queryModel.getLimit());
    // setting whether detail query or not
    blockExecutionInfo.setDetailQuery(queryModel.isDetailQuery());
    // setting whether raw record query or not
    blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
    // setting the masked byte of the block which will be
    // used to update the unpack the older block keys
    blockExecutionInfo.setMaskedByteForBlock(maksedByte);
    // total number dimension
    blockExecutionInfo
            .setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
    blockExecutionInfo
            .setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
    // to check whether older block key update is required or not
    blockExecutionInfo.setFixedKeyUpdateRequired(
            blockKeyGenerator.equals(queryProperties.keyStructureInfo.getKeyGenerator()));
    IndexKey startIndexKey = null;
    IndexKey endIndexKey = null;
    if (null != queryModel.getFilterExpressionResolverTree()) {
        // loading the filter executer tree for filter evaluation
        blockExecutionInfo.setFilterExecuterTree(FilterUtil
                .getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties));
        List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
        FilterUtil.traverseResolverTreeAndGetStartAndEndKey(segmentProperties,
                queryModel.getAbsoluteTableIdentifier(), queryModel.getFilterExpressionResolverTree(),
                listOfStartEndKeys);
        startIndexKey = listOfStartEndKeys.get(0);
        endIndexKey = listOfStartEndKeys.get(1);
    } else {
        try {
            startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
            endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
        } catch (KeyGenException e) {
            throw new QueryExecutionException(e);
        }
    }
    blockExecutionInfo
            .setFileType(FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getStorePath()));
    //setting the start index key of the block node
    blockExecutionInfo.setStartKey(startIndexKey);
    //setting the end index key of the block node
    blockExecutionInfo.setEndKey(endIndexKey);
    // expression dimensions
    List<CarbonDimension> expressionDimensions = new ArrayList<CarbonDimension>(
            CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // expression measure
    List<CarbonMeasure> expressionMeasures = new ArrayList<CarbonMeasure>(
            CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    // setting all the dimension chunk indexes to be read from file
    blockExecutionInfo
            .setAllSelectedDimensionBlocksIndexes(QueryUtil.getDimensionsBlockIndexes(updatedQueryDimension,
                    segmentProperties.getDimensionOrdinalToBlockMapping(), expressionDimensions));
    // setting all the measure chunk indexes to be read from file
    blockExecutionInfo
            .setAllSelectedMeasureBlocksIndexes(QueryUtil.getMeasureBlockIndexes(queryModel.getQueryMeasures(),
                    expressionMeasures, segmentProperties.getMeasuresOrdinalToBlockMapping()));
    // setting the key structure info which will be required
    // to update the older block key with new key generator
    blockExecutionInfo.setKeyStructureInfo(queryProperties.keyStructureInfo);
    // setting the size of fixed key column (dictionary column)
    blockExecutionInfo.setFixedLengthKeySize(getKeySize(updatedQueryDimension, segmentProperties));
    Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
    List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
    // get the block index to be read from file for query dimension
    // for both dictionary columns and no dictionary columns
    QueryUtil.fillQueryDimensionsBlockIndexes(updatedQueryDimension,
            segmentProperties.getDimensionOrdinalToBlockMapping(), dictionaryColumnBlockIndex,
            noDictionaryColumnBlockIndex);
    int[] queryDictionaruColumnBlockIndexes = ArrayUtils
            .toPrimitive(dictionaryColumnBlockIndex.toArray(new Integer[dictionaryColumnBlockIndex.size()]));
    // need to sort the dictionary column as for all dimension
    // column key will be filled based on key order
    Arrays.sort(queryDictionaruColumnBlockIndexes);
    blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaruColumnBlockIndexes);
    // setting the no dictionary column block indexes
    blockExecutionInfo.setNoDictionaryBlockIndexes(ArrayUtils.toPrimitive(
            noDictionaryColumnBlockIndex.toArray(new Integer[noDictionaryColumnBlockIndex.size()])));
    // setting column id to dictionary mapping
    blockExecutionInfo.setColumnIdToDcitionaryMapping(queryProperties.columnToDictionayMapping);
    // setting each column value size
    blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
    try {
        // to set column group and its key structure info which will be used
        // to
        // for getting the column group column data in case of final row
        // and in case of dimension aggregation
        blockExecutionInfo.setColumnGroupToKeyStructureInfo(
                QueryUtil.getColumnGroupKeyStructureInfo(updatedQueryDimension, segmentProperties));
    } catch (KeyGenException e) {
        throw new QueryExecutionException(e);
    }
    return blockExecutionInfo;
}