Example usage for org.apache.commons.lang ArrayUtils reverse

List of usage examples for org.apache.commons.lang ArrayUtils reverse

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils reverse.

Prototype

public static void reverse(boolean[] array) 

Source Link

Document

Reverses the order of the given array.

Usage

From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataInputStream.java

/**
 * Read DECIMAL(P, S).//from  www .ja  va2  s  . c o m
 * The representation of decimal in Teradata binary format is
 * the byte number to read is decided solely by the precision(P),
 * HiveDecimal is constructed through the byte array and scale.
 * the null DECIMAL will use 0x00 to pad.
 *
 * @param scale the scale
 * @param byteNum the byte num
 * @return the hive decimal
 * @throws IOException the io exception
 */
public HiveDecimal readDecimal(int scale, int byteNum) throws IOException {
    byte[] decimalContent = new byte[byteNum];
    int numOfBytesRead = in.read(decimalContent);
    if (decimalContent.length != 0 && numOfBytesRead != byteNum) {
        throw new EOFException(
                format("Fail to read the decimal. Expect %d bytes, get %d bytes", byteNum, numOfBytesRead));
    }
    ArrayUtils.reverse(decimalContent);
    return HiveDecimal.create(new BigInteger(decimalContent), scale);
}

From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataOutputStream.java

/**
 * Write DECIMAL(P, S)./*from  w  ww . ja va 2s.  c o  m*/
 * The representation of decimal in Teradata binary format is:
 * the byte number to read is decided solely by the precision(P),
 * HiveDecimal is constructed through the byte array and scale.
 * the rest of byte will use 0x00 to pad (positive) and use 0xFF to pad (negative).
 * the null DECIMAL will use 0x00 to pad.
 *
 * @param writable the writable
 * @param byteNum the byte num
 * @throws IOException the io exception
 */
public void writeDecimal(HiveDecimalWritable writable, int byteNum, int scale) throws IOException {
    if (writable == null) {
        byte[] pad = new byte[byteNum];
        write(pad);
        return;
    }
    // since the HiveDecimal will auto adjust the scale to save resource
    // we need to adjust it back otherwise the output bytes will be wrong
    int hiveScale = writable.getHiveDecimal().scale();
    BigInteger bigInteger = writable.getHiveDecimal().unscaledValue();
    if (hiveScale < scale) {
        BigInteger multiplicand = new BigInteger("1" + join("", Collections.nCopies(scale - hiveScale, "0")));
        bigInteger = bigInteger.multiply(multiplicand);
    }
    byte[] content = bigInteger.toByteArray();
    int signBit = content[0] >> 7 & 1;
    ArrayUtils.reverse(content);
    write(content);
    if (byteNum > content.length) {
        byte[] pad;
        if (signBit == 0) {
            pad = new byte[byteNum - content.length];
        } else {
            pad = new byte[byteNum - content.length];
            Arrays.fill(pad, (byte) 255);
        }
        write(pad);
    }
}

From source file:org.apache.jackrabbit.core.query.lucene.JahiaLuceneQueryFactoryImpl.java

private boolean checkIndexedAcl(Map<String, Boolean> checkedAcls, IndexedNodeInfo infos)
        throws RepositoryException {
    boolean canRead = true;

    String[] acls = infos.getAclUuid() != null ? Patterns.SPACE.split(infos.getAclUuid())
            : ArrayUtils.EMPTY_STRING_ARRAY;
    ArrayUtils.reverse(acls);

    for (String acl : acls) {
        if (acl.contains("/")) {
            // ACL indexed contains a single user ACE, get the username
            String singleUser = StringUtils.substringAfter(acl, "/");
            acl = StringUtils.substringBefore(acl, "/");
            if (singleUser.contains("/")) {
                // Granted roles are specified in the indexed entry
                String roles = StringUtils.substringBeforeLast(singleUser, "/");
                singleUser = StringUtils.substringAfterLast(singleUser, "/");
                if (!singleUser.equals(session.getUserID())) {
                    // If user does not match, skip this ACL
                    continue;
                } else {
                    // If user matches, check if one the roles gives the read permission
                    for (String role : StringUtils.split(roles, '/')) {
                        if (((JahiaAccessManager) session.getAccessControlManager()).matchPermission(
                                Sets.newHashSet(Privilege.JCR_READ + "_" + session.getWorkspace().getName()),
                                role)) {
                            // User and role matches, read is granted
                            return true;
                        }//from   w  w  w .  ja v  a  2 s.  c  o m
                    }
                }
            } else {
                if (!singleUser.equals(session.getUserID())) {
                    // If user does not match, skip this ACL
                    continue;
                }
                // Otherwise, do normal ACL check.
            }
        }
        // Verify first if this acl has already been checked
        Boolean aclChecked = checkedAcls.get(acl);
        if (aclChecked == null) {
            try {
                canRead = session.getAccessManager().canRead(null, new NodeId(acl));
                checkedAcls.put(acl, canRead);
            } catch (RepositoryException e) {
            }
        } else {
            canRead = aclChecked;
        }
        break;
    }
    return canRead;
}

From source file:org.apache.parquet.filter2.compat.RowGroupFilter.java

public static List<BlockMetaData> filterRowGroupsByCBFM(Filter filter, List<BlockMetaData> blocks,
        MessageType schema) {//from  ww  w .  j  a  va2s.c  o m
    if (blocks.isEmpty())
        return blocks;
    if (!(CBFM.ON || FullBitmapIndex.ON || MDBF.ON || CMDBF.ON))
        return blocks;
    // Only applying filters on indexed table
    if (CBFM.ON && blocks.get(0).getIndexTableStr() == null)
        return blocks;
    if (FullBitmapIndex.ON && (blocks.get(0).index == null))
        return blocks;
    if (MDBF.ON && (blocks.get(0).mdbfIndex == null))
        return blocks;
    if (CMDBF.ON && (blocks.get(0).cmdbfIndex == null))
        return blocks;
    List<BlockMetaData> cadidateBlocks = new ArrayList<>();
    if (filter instanceof FilterCompat.FilterPredicateCompat) {
        // only deal with FilterPredicateCompat
        FilterCompat.FilterPredicateCompat filterPredicateCompat = (FilterCompat.FilterPredicateCompat) filter;
        FilterPredicate filterPredicate = filterPredicateCompat.getFilterPredicate();
        List<Operators.Eq> eqFilters = new ArrayList<>();
        extractEqFilter(filterPredicate, eqFilters);

        String[] indexedColumns = null;
        if (CBFM.ON)
            indexedColumns = CBFM.indexedColumns;
        else if (FullBitmapIndex.ON)
            indexedColumns = FullBitmapIndex.dimensions;
        else if (MDBF.ON)
            indexedColumns = MDBF.dimensions;
        else if (CMDBF.ON)
            indexedColumns = CMDBF.dimensions;

        String[] currentComb = new String[eqFilters.size()];
        byte[][] indexedColumnBytes = new byte[eqFilters.size()][];
        for (int j = 0; j < eqFilters.size(); ++j) {
            Operators.Eq eqFilter = eqFilters.get(j);

            String[] columnPath = eqFilter.getColumn().getColumnPath().toArray();
            String columnName = columnPath[columnPath.length - 1];
            currentComb[j] = columnName;

            for (int i = 0; i < indexedColumns.length; ++i) {
                if (indexedColumns[i].equals(columnName)) {
                    Comparable value = eqFilter.getValue();
                    if (value instanceof Binary) {
                        indexedColumnBytes[j] = ((Binary) value).getBytes();
                    } else if (value instanceof Integer) {
                        indexedColumnBytes[j] = ByteBuffer.allocate(4).putInt((Integer) value).array();
                        ArrayUtils.reverse(indexedColumnBytes[j]);
                    } else if (value instanceof Long) {
                        indexedColumnBytes[j] = ByteBuffer.allocate(8).putLong((Long) value).array();
                        ArrayUtils.reverse(indexedColumnBytes[j]);
                    } else if (value instanceof Float) {
                        indexedColumnBytes[j] = ByteBuffer.allocate(4).putFloat((Float) value).array();
                        ArrayUtils.reverse(indexedColumnBytes[j]);
                    } else if (value instanceof Double) {
                        indexedColumnBytes[j] = ByteBuffer.allocate(8).putDouble((Double) value).array();
                        ArrayUtils.reverse(indexedColumnBytes[j]);
                    }
                }
            }
        }
        int blockHitCount = 0;
        long rowScanned = 0;
        long rowSkipped = 0;
        for (BlockMetaData block : blocks) {
            if (CBFM.ON) {
                try {
                    Path cbfmFile = new Path(block.getIndexTableStr());
                    FileSystem fs = cbfmFile.getFileSystem(new Configuration());
                    // TODO better way? Or is this right? escape the temp folder
                    cbfmFile = new Path(cbfmFile.getParent().getParent().getParent().getParent().getParent(),
                            cbfmFile.getName());
                    FSDataInputStream in = fs.open(cbfmFile);
                    BufferedReader br = new BufferedReader(new InputStreamReader(in));
                    String indexTableStr = br.readLine();
                    br.close();
                    in.close();
                    in = null;
                    br.close();
                    br = null;
                    CBFM cbfm = new CBFM(indexTableStr);
                    ArrayList<Long> searchIndex = cbfm.calculateIdxsForSearch(indexedColumnBytes);
                    if (cbfm.contains(searchIndex)) {
                        blockHitCount++;
                        cadidateBlocks.add(block);
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }
            } else if (FullBitmapIndex.ON) {
                FullBitmapIndex index = block.index;
                if (index.contains(currentComb, indexedColumnBytes)) {
                    blockHitCount++;
                    cadidateBlocks.add(block);
                    rowScanned += block.getRowCount();
                } else {
                    rowSkipped += block.getRowCount();
                }
            } else if (MDBF.ON) {
                MDBF index = block.mdbfIndex;
                if (index.contains(currentComb, indexedColumnBytes)) {
                    blockHitCount++;
                    cadidateBlocks.add(block);
                    rowScanned += block.getRowCount();
                } else {
                    rowSkipped += block.getRowCount();
                }
            } else if (CMDBF.ON) {
                CMDBF index = block.cmdbfIndex;
                if (index.contains(currentComb, indexedColumnBytes)) {
                    blockHitCount++;
                    cadidateBlocks.add(block);
                    rowScanned += block.getRowCount();
                } else {
                    rowSkipped += block.getRowCount();
                }
            }
        }
        int skippedCount = blocks.size() - blockHitCount;
        if (checkIndexed(currentComb)) {
            writeSkipResults(skippedCount, blocks.size(), rowScanned, rowSkipped);
        }
    }
    return cadidateBlocks;
}

From source file:org.apache.phoenix.end2end.DescColumnSortOrderTest.java

private static Object[][] reverse(Object[][] rows) {
    Object[][] reversedArray = new Object[rows.length][];
    System.arraycopy(rows, 0, reversedArray, 0, rows.length);
    ArrayUtils.reverse(reversedArray);
    return reversedArray;
}

From source file:org.apache.stratos.integration.common.ServerLogClient.java

/**
 * Return all log lines from server stratup till now
 *
 * @return Array of LogEvent instances corresponding to the logs
 * @throws AutomationUtilException//from w  ww . j  a  va2 s  .c  o m
 */
public LogEvent[] getAllLogLines() throws AutomationUtilException {

    LogEvent[] allLogs = null;

    try {
        allLogs = logViewerClient.getAllRemoteSystemLogs();
        // logViewerClient.getAllRemoteSystemLogs() returns most recent logs first, need to reverse
        ArrayUtils.reverse(allLogs);
        return allLogs;

    } catch (RemoteException e) {
        String errorMsg = "Error in creating getting remote system logs";
        log.error(errorMsg, e);
        throw new AutomationUtilException(errorMsg, e);

    } catch (LogViewerLogViewerException e) {
        String errorMsg = "Error in creating getting remote system logs";
        log.error(errorMsg, e);
        throw new AutomationUtilException(errorMsg, e);
    }
}

From source file:org.apache.stratos.integration.common.ServerLogClient.java

/**
 * Return all log lines up to now, starting from the previous location. The previous location
 * is the last log line returned, if this method was called earlier on the same
 * ServerLogClient instance./*w  w  w  . j a  v a 2s.com*/
 *
 * @return all log lines, starting from the last log line returned
 * @throws AutomationUtilException
 */
public String[] getLogLines() throws AutomationUtilException {

    LogEvent[] allLogs = null;
    try {
        allLogs = logViewerClient.getAllRemoteSystemLogs();

    } catch (RemoteException e) {
        String errorMsg = "Error in creating getting remote system logs";
        log.error(errorMsg, e);
        throw new AutomationUtilException(errorMsg, e);

    } catch (LogViewerLogViewerException e) {
        String errorMsg = "Error in creating getting remote system logs";
        log.error(errorMsg, e);
        throw new AutomationUtilException(errorMsg, e);
    }

    if (allLogs.length == 0) {
        allLogs = new LogEvent[0];
    }

    // logViewerClient.getAllRemoteSystemLogs() returns most recent logs first, need to reverse
    ArrayUtils.reverse(allLogs);

    if (logCount > allLogs.length) {
        // cannot happen, return
        return getLogsAsStrings(allLogs);
    }

    log.info("Total no. of log lines: " + Integer.toString(allLogs.length));
    log.info("Previously returned count : " + Integer.toString(logCount));
    log.info("Current count : " + Integer.toString(allLogs.length - logCount));

    LogEvent[] selectedLogs = Arrays.copyOfRange(allLogs, logCount, allLogs.length);
    logCount += (allLogs.length - logCount);

    return getLogsAsStrings(selectedLogs);
}

From source file:org.apache.sysml.runtime.compress.cocode.ColumnGroupPartitionerBinPacking.java

@Override
public List<List<Integer>> partitionColumns(List<Integer> groupCols,
        HashMap<Integer, GroupableColInfo> groupColsInfo) {
    //obtain column weights
    int[] items = new int[groupCols.size()];
    double[] itemWeights = new double[groupCols.size()];
    for (int i = 0; i < groupCols.size(); i++) {
        int col = groupCols.get(i);
        items[i] = col;//from   w  w w  .  ja va  2 s . c  o  m
        itemWeights[i] = groupColsInfo.get(col).cardRatio;
    }

    //sort items (first fit decreasing)
    if (FIRST_FIT_DEC) {
        SortUtils.sortByValue(0, items.length, itemWeights, items);
        ArrayUtils.reverse(items);
        ArrayUtils.reverse(itemWeights);
    }

    //partition columns via bin packing
    return packFirstFit(items, itemWeights);
}

From source file:org.apache.sysml.runtime.compress.UncompressedBitmap.java

public void sortValuesByFrequency() {
    int numVals = getNumValues();
    int numCols = getNumColumns();

    double[] freq = new double[numVals];
    int[] pos = new int[numVals];

    //populate the temporary arrays
    for (int i = 0; i < numVals; i++) {
        freq[i] = getNumOffsets(i);/*from   www .j a  va 2  s .  c  o m*/
        pos[i] = i;
    }

    //sort ascending and reverse (descending)
    SortUtils.sortByValue(0, numVals, freq, pos);
    ArrayUtils.reverse(pos);

    //create new value and offset list arrays
    double[] lvalues = new double[numVals * numCols];
    IntArrayList[] loffsets = new IntArrayList[numVals];
    for (int i = 0; i < numVals; i++) {
        System.arraycopy(_values, pos[i] * numCols, lvalues, i * numCols, numCols);
        loffsets[i] = _offsetsLists[pos[i]];
    }
    _values = lvalues;
    _offsetsLists = loffsets;
}

From source file:org.apache.tajo.util.history.HistoryReader.java

private synchronized List<QueryInfo> findQueryInfoInStorage(int page, int size, @Nullable QueryId queryId)
        throws IOException {
    List<QueryInfo> result = Lists.newLinkedList();

    FileSystem fs = HistoryWriter.getNonCrcFileSystem(historyParentPath, tajoConf);
    try {/*from   w  ww  .j a  v a  2s  .com*/
        if (!fs.exists(historyParentPath)) {
            return result;
        }
    } catch (Throwable e) {
        return result;
    }

    FileStatus[] files = fs.listStatus(historyParentPath);
    if (files == null || files.length == 0) {
        return result;
    }

    Set<QueryInfo> queryInfos = Sets.newTreeSet(Collections.reverseOrder());
    int startIndex = page < 1 ? page : ((page - 1) * size) + 1;
    int currentIndex = 0;
    int skipSize = 0;

    ArrayUtils.reverse(files);
    for (FileStatus eachDateFile : files) {
        Path queryListPath = new Path(eachDateFile.getPath(), HistoryWriter.QUERY_LIST);
        if (eachDateFile.isFile() || !fs.exists(queryListPath)) {
            continue;
        }

        FileStatus[] dateFiles = fs.listStatus(queryListPath);
        if (dateFiles == null || dateFiles.length == 0) {
            continue;
        }

        ArrayUtils.reverse(dateFiles);
        for (FileStatus eachFile : dateFiles) {
            Path path = eachFile.getPath();
            if (eachFile.isDirectory() || !path.getName().endsWith(HistoryWriter.HISTORY_FILE_POSTFIX)) {
                continue;
            }

            FSDataInputStream in = null;

            List<String> jsonList = Lists.newArrayList();
            try {
                in = fs.open(path);

                //If history file does not close, FileStatus.getLen() are not being updated
                //So, this code block should check the EOFException
                while (true) {
                    int length = in.readInt();

                    byte[] buf = new byte[length];
                    in.readFully(buf, 0, length);

                    jsonList.add(new String(buf, 0, length, Bytes.UTF8_CHARSET));
                    currentIndex++;
                }
            } catch (EOFException eof) {
            } catch (Throwable e) {
                LOG.warn("Reading error:" + path + ", " + e.getMessage());
            } finally {
                IOUtils.cleanup(LOG, in);
            }

            //skip previous page
            if (startIndex > currentIndex) {
                skipSize += jsonList.size();
            } else {
                for (String json : jsonList) {
                    QueryInfo queryInfo = QueryInfo.fromJson(json);
                    if (queryId != null) {
                        if (queryInfo.getQueryId().equals(queryId)) {
                            result.add(queryInfo);
                            return result;
                        }
                    } else {
                        queryInfos.add(queryInfo);
                    }
                }
            }

            if (currentIndex - (startIndex - 1) >= size) {
                result.addAll(queryInfos);
                int fromIndex = (startIndex - 1) - skipSize;
                return result.subList(fromIndex, fromIndex + size);
            }
        }
    }

    result.addAll(queryInfos);
    return result;
}