Example usage for org.apache.commons.lang ArrayUtils toObject

List of usage examples for org.apache.commons.lang ArrayUtils toObject

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils toObject.

Prototype

public static Boolean[] toObject(boolean[] array) 

Source Link

Document

Converts an array of primitive booleans to objects.

Usage

From source file:org.apache.hadoop.mapreduce.lib.partition.TestRehashPartitioner.java

/** test partitioner for patterns */
@Test/*  w ww. j  a v  a 2 s  .  c om*/
public void testPatterns() {
    int results[] = new int[PARTITIONS];
    RehashPartitioner<IntWritable, NullWritable> p = new RehashPartitioner<IntWritable, NullWritable>();
    /* test sequence 4, 8, 12, ... 128 */
    for (int i = 0; i < END; i += STEP) {
        results[p.getPartition(new IntWritable(i), null, PARTITIONS)]++;
    }
    int badbuckets = 0;
    Integer min = Collections.min(Arrays.asList(ArrayUtils.toObject(results)));
    Integer max = Collections.max(Arrays.asList(ArrayUtils.toObject(results)));
    Integer avg = (int) Math.round((max + min) / 2.0);
    System.out.println("Dumping buckets distribution: min=" + min + " avg=" + avg + " max=" + max);
    for (int i = 0; i < PARTITIONS; i++) {
        double var = (results[i] - avg) / (double) (avg);
        System.out.println("bucket " + i + " " + results[i] + " items, variance " + var);
        if (Math.abs(var) > MAX_ERROR)
            badbuckets++;
    }
    System.out.println(badbuckets + " of " + PARTITIONS + " are too small or large buckets");
    assertTrue("too many overflow buckets", badbuckets < PARTITIONS * MAX_BADBUCKETS);
}

From source file:org.apache.parquet.filter2.dictionarylevel.DictionaryFilterTest.java

@Test
public void testInverseUdp() throws Exception {
    InInt32UDP droppable = new InInt32UDP(ImmutableSet.of(42));
    InInt32UDP undroppable = new InInt32UDP(ImmutableSet.of(205));
    Set<Integer> allValues = ImmutableSet.copyOf(Arrays.asList(ArrayUtils.toObject(intValues)));
    InInt32UDP completeMatch = new InInt32UDP(allValues);

    FilterPredicate inverse = LogicalInverseRewriter
            .rewrite(not(userDefined(intColumn("int32_field"), droppable)));
    FilterPredicate inverse1 = LogicalInverseRewriter
            .rewrite(not(userDefined(intColumn("int32_field"), undroppable)));
    FilterPredicate inverse2 = LogicalInverseRewriter
            .rewrite(not(userDefined(intColumn("int32_field"), completeMatch)));

    assertFalse("Should not drop block for inverse of non-matching UDP", canDrop(inverse, ccmd, dictionaries));

    assertFalse("Should not drop block for inverse of UDP with some matches",
            canDrop(inverse1, ccmd, dictionaries));

    assertTrue("Should drop block for inverse of UDP with all matches", canDrop(inverse2, ccmd, dictionaries));
}

From source file:org.apache.sling.jcr.resource.internal.helper.JcrPropertyMapCacheEntry.java

/**
 * Convert the object to an array/*from w w  w  .j  av  a  2  s  .c  om*/
 * @param value The array
 * @return an object array
 */
private Object[] convertToObjectArray(final Object value) {
    final Object[] values;
    if (value instanceof long[]) {
        values = ArrayUtils.toObject((long[]) value);
    } else if (value instanceof int[]) {
        values = ArrayUtils.toObject((int[]) value);
    } else if (value instanceof double[]) {
        values = ArrayUtils.toObject((double[]) value);
    } else if (value instanceof byte[]) {
        values = ArrayUtils.toObject((byte[]) value);
    } else if (value instanceof float[]) {
        values = ArrayUtils.toObject((float[]) value);
    } else if (value instanceof short[]) {
        values = ArrayUtils.toObject((short[]) value);
    } else if (value instanceof boolean[]) {
        values = ArrayUtils.toObject((boolean[]) value);
    } else if (value instanceof char[]) {
        values = ArrayUtils.toObject((char[]) value);
    } else {
        values = (Object[]) value;
    }
    return values;
}

From source file:org.apache.sling.models.impl.model.AbstractInjectableElement.java

private static Object getDefaultValue(AnnotatedElement element, Type type,
        InjectAnnotationProcessor2 annotationProcessor) {
    if (annotationProcessor != null && annotationProcessor.hasDefault()) {
        return annotationProcessor.getDefault();
    }/* w  w w.j av  a  2 s  . c o  m*/

    Default defaultAnnotation = element.getAnnotation(Default.class);
    if (defaultAnnotation == null) {
        return null;
    }

    Object value = null;

    if (type instanceof Class) {
        Class<?> injectedClass = (Class<?>) type;
        if (injectedClass.isArray()) {
            Class<?> componentType = injectedClass.getComponentType();
            if (componentType == String.class) {
                value = defaultAnnotation.values();
            } else if (componentType == Integer.TYPE) {
                value = defaultAnnotation.intValues();
            } else if (componentType == Integer.class) {
                value = ArrayUtils.toObject(defaultAnnotation.intValues());
            } else if (componentType == Long.TYPE) {
                value = defaultAnnotation.longValues();
            } else if (componentType == Long.class) {
                value = ArrayUtils.toObject(defaultAnnotation.longValues());
            } else if (componentType == Boolean.TYPE) {
                value = defaultAnnotation.booleanValues();
            } else if (componentType == Boolean.class) {
                value = ArrayUtils.toObject(defaultAnnotation.booleanValues());
            } else if (componentType == Short.TYPE) {
                value = defaultAnnotation.shortValues();
            } else if (componentType == Short.class) {
                value = ArrayUtils.toObject(defaultAnnotation.shortValues());
            } else if (componentType == Float.TYPE) {
                value = defaultAnnotation.floatValues();
            } else if (componentType == Float.class) {
                value = ArrayUtils.toObject(defaultAnnotation.floatValues());
            } else if (componentType == Double.TYPE) {
                value = defaultAnnotation.doubleValues();
            } else if (componentType == Double.class) {
                value = ArrayUtils.toObject(defaultAnnotation.doubleValues());
            } else {
                log.warn("Default values for {} are not supported", componentType);
            }
        } else {
            if (injectedClass == String.class) {
                value = defaultAnnotation.values().length == 0 ? "" : defaultAnnotation.values()[0];
            } else if (injectedClass == Integer.class) {
                value = defaultAnnotation.intValues().length == 0 ? 0 : defaultAnnotation.intValues()[0];
            } else if (injectedClass == Long.class) {
                value = defaultAnnotation.longValues().length == 0 ? 0l : defaultAnnotation.longValues()[0];
            } else if (injectedClass == Boolean.class) {
                value = defaultAnnotation.booleanValues().length == 0 ? false
                        : defaultAnnotation.booleanValues()[0];
            } else if (injectedClass == Short.class) {
                value = defaultAnnotation.shortValues().length == 0 ? ((short) 0)
                        : defaultAnnotation.shortValues()[0];
            } else if (injectedClass == Float.class) {
                value = defaultAnnotation.floatValues().length == 0 ? 0f : defaultAnnotation.floatValues()[0];
            } else if (injectedClass == Double.class) {
                value = defaultAnnotation.doubleValues().length == 0 ? 0d : defaultAnnotation.doubleValues()[0];
            } else {
                log.warn("Default values for {} are not supported", injectedClass);
            }
        }
    } else {
        log.warn("Cannot provide default for {}", type);
    }
    return value;
}

From source file:org.apache.sqoop.repository.derby.DerbyUpgradeGenericJdbcConnectorConfigAndInputNames.java

private void renameConfigInputs(long configId, Map<String, String> inputNameMap) {
    PreparedStatement statement = null;

    try {/* www  .j a  v a 2 s  .  c om*/
        statement = connection.prepareStatement(DerbySchemaUpgradeQuery.QUERY_UPDATE_TABLE_SQ_INPUT_SQI_NAME);

        for (String inputName : inputNameMap.keySet()) {
            statement.setString(1, inputNameMap.get(inputName));
            statement.setString(2, inputName);
            statement.setLong(3, configId);
            statement.addBatch();

            LOG.debug("QUERY(" + DerbySchemaUpgradeQuery.QUERY_UPDATE_TABLE_SQ_INPUT_SQI_NAME + ") args ["
                    + inputNameMap.get(inputName) + "," + inputName + "," + configId + "]");
        }

        int[] updateCounts = statement.executeBatch();
        LOG.debug("QUERY(" + DerbySchemaUpgradeQuery.QUERY_UPDATE_TABLE_SQ_INPUT_SQI_NAME + ") update count: "
                + StringUtils.join(ArrayUtils.toObject(updateCounts), ","));
    } catch (SQLException e) {
        throw new SqoopException(DerbyRepoError.DERBYREPO_0002, e);
    } finally {
        handler.closeStatements(statement);
    }
}

From source file:org.apache.sysml.runtime.transform.decode.DecoderFactory.java

@SuppressWarnings("unchecked")
public static Decoder createDecoder(String spec, String[] colnames, ValueType[] schema, FrameBlock meta)
        throws DMLRuntimeException {
    Decoder decoder = null;/*from   ww w  .j  a  v a  2s  .co m*/

    try {
        //parse transform specification
        JSONObject jSpec = new JSONObject(spec);
        List<Decoder> ldecoders = new ArrayList<Decoder>();

        //create decoders 'recode', 'dummy' and 'pass-through'
        List<Integer> rcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_RECODE)));
        List<Integer> dcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_DUMMYCODE)));
        rcIDs = new ArrayList<Integer>(CollectionUtils.union(rcIDs, dcIDs));
        List<Integer> ptIDs = new ArrayList<Integer>(
                CollectionUtils.subtract(UtilFunctions.getSequenceList(1, meta.getNumColumns(), 1), rcIDs));

        //create default schema if unspecified (with double columns for pass-through)
        if (schema == null) {
            schema = UtilFunctions.nCopies(meta.getNumColumns(), ValueType.STRING);
            for (Integer col : ptIDs)
                schema[col - 1] = ValueType.DOUBLE;
        }

        if (!dcIDs.isEmpty()) {
            ldecoders.add(new DecoderDummycode(schema, ArrayUtils.toPrimitive(dcIDs.toArray(new Integer[0]))));
        }
        if (!rcIDs.isEmpty()) {
            ldecoders.add(new DecoderRecode(schema, !dcIDs.isEmpty(),
                    ArrayUtils.toPrimitive(rcIDs.toArray(new Integer[0]))));
        }
        if (!ptIDs.isEmpty()) {
            ldecoders.add(new DecoderPassThrough(schema, ArrayUtils.toPrimitive(ptIDs.toArray(new Integer[0])),
                    ArrayUtils.toPrimitive(dcIDs.toArray(new Integer[0]))));
        }

        //create composite decoder of all created decoders
        //and initialize with given meta data (recode, dummy, bin)
        decoder = new DecoderComposite(schema, ldecoders);
        if (meta != null)
            decoder.initMetaData(meta);
    } catch (Exception ex) {
        throw new DMLRuntimeException(ex);
    }

    return decoder;
}

From source file:org.apache.sysml.runtime.transform.encode.EncoderFactory.java

@SuppressWarnings("unchecked")
public static Encoder createEncoder(String spec, String[] colnames, ValueType[] schema, FrameBlock meta)
        throws DMLRuntimeException {
    Encoder encoder = null;/*from   w w  w  .j ava2  s . com*/
    int clen = schema.length;

    try {
        //parse transform specification
        JSONObject jSpec = new JSONObject(spec);
        List<Encoder> lencoders = new ArrayList<Encoder>();

        //prepare basic id lists (recode, dummycode, pass-through)
        //note: any dummycode column requires recode as preparation
        List<Integer> rcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_RECODE)));
        List<Integer> dcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_DUMMYCODE)));
        rcIDs = new ArrayList<Integer>(CollectionUtils.union(rcIDs, dcIDs));
        List<Integer> binIDs = TfMetaUtils.parseBinningColIDs(jSpec, colnames);
        List<Integer> ptIDs = new ArrayList<Integer>(CollectionUtils
                .subtract(CollectionUtils.subtract(UtilFunctions.getSequenceList(1, clen, 1), rcIDs), binIDs));
        List<Integer> oIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_OMIT)));
        List<Integer> mvIDs = Arrays.asList(ArrayUtils
                .toObject(TfMetaUtils.parseJsonObjectIDList(jSpec, colnames, TfUtils.TXMETHOD_IMPUTE)));

        //create individual encoders
        if (!rcIDs.isEmpty()) {
            RecodeAgent ra = new RecodeAgent(jSpec, colnames, clen);
            ra.setColList(ArrayUtils.toPrimitive(rcIDs.toArray(new Integer[0])));
            lencoders.add(ra);
        }
        if (!ptIDs.isEmpty())
            lencoders.add(new EncoderPassThrough(ArrayUtils.toPrimitive(ptIDs.toArray(new Integer[0])), clen));
        if (!dcIDs.isEmpty())
            lencoders.add(new DummycodeAgent(jSpec, colnames, schema.length));
        if (!binIDs.isEmpty())
            lencoders.add(new BinAgent(jSpec, colnames, schema.length, true));
        if (!oIDs.isEmpty())
            lencoders.add(new OmitAgent(jSpec, colnames, schema.length));
        if (!mvIDs.isEmpty()) {
            MVImputeAgent ma = new MVImputeAgent(jSpec, colnames, schema.length);
            ma.initRecodeIDList(rcIDs);
            lencoders.add(ma);
        }

        //create composite decoder of all created encoders
        //and initialize meta data (recode, dummy, bin, mv)
        encoder = new EncoderComposite(lencoders);
        if (meta != null)
            encoder.initMetaData(meta);
    } catch (Exception ex) {
        throw new DMLRuntimeException(ex);
    }

    return encoder;
}

From source file:org.apache.sysml.runtime.transform.meta.TfMetaUtils.java

/**
 * Parses the given json specification and extracts a list of column ids
 * that are subject to recoding./*from ww  w.ja v  a 2 s.  c  om*/
 * 
 * @param spec transform specification as json string
 * @param colnames column names
 * @return list of column ids
 * @throws IOException if IOException occurs
 */
@SuppressWarnings("unchecked")
private static List<Integer> parseRecodeColIDs(String spec, String[] colnames) throws IOException {
    if (spec == null)
        throw new IOException("Missing transform specification.");

    List<Integer> specRecodeIDs = null;

    try {
        //parse json transform specification for recode col ids
        JSONObject jSpec = new JSONObject(spec);
        List<Integer> rcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_RECODE)));
        List<Integer> dcIDs = Arrays.asList(
                ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_DUMMYCODE)));
        specRecodeIDs = new ArrayList<Integer>(CollectionUtils.union(rcIDs, dcIDs));
    } catch (Exception ex) {
        throw new IOException(ex);
    }

    return specRecodeIDs;
}

From source file:org.apache.sysml.runtime.transform.meta.TfMetaUtils.java

public static List<Integer> parseBinningColIDs(JSONObject jSpec, String[] colnames) throws IOException {
    try {// www.j  av a  2  s  . c o m
        if (jSpec.containsKey(TfUtils.TXMETHOD_BIN) && jSpec.get(TfUtils.TXMETHOD_BIN) instanceof JSONArray) {
            return Arrays.asList(ArrayUtils
                    .toObject(TfMetaUtils.parseJsonObjectIDList(jSpec, colnames, TfUtils.TXMETHOD_BIN)));
        } else { //internally generates
            return Arrays.asList(
                    ArrayUtils.toObject(TfMetaUtils.parseJsonIDList(jSpec, colnames, TfUtils.TXMETHOD_BIN)));
        }
    } catch (JSONException ex) {
        throw new IOException(ex);
    }
}

From source file:org.apache.sysml.test.integration.functions.jmlc.FrameReadMetaTest.java

/**
 * /* ww w .j  a  va 2 s. c o  m*/
 * @param M
 * @return
 * @throws DMLRuntimeException 
 */
@SuppressWarnings("unchecked")
private HashMap<String, Long>[] getRecodeMaps(String spec, FrameBlock M) throws DMLRuntimeException {
    List<Integer> collist = Arrays.asList(ArrayUtils
            .toObject(TfMetaUtils.parseJsonIDList(spec, M.getColumnNames(), TfUtils.TXMETHOD_RECODE)));
    HashMap<String, Long>[] ret = new HashMap[M.getNumColumns()];
    Iterator<Object[]> iter = M.getObjectRowIterator();
    while (iter.hasNext()) {
        Object[] tmp = iter.next();
        for (int j = 0; j < tmp.length; j++)
            if (collist.contains(j + 1) && tmp[j] != null) {
                if (ret[j] == null)
                    ret[j] = new HashMap<String, Long>();
                String[] parts = IOUtilFunctions.splitCSV(tmp[j].toString(), Lop.DATATYPE_PREFIX);
                ret[j].put(parts[0], Long.parseLong(parts[1]));
            }
    }

    return ret;
}