Example usage for org.apache.commons.lang SerializationUtils serialize

List of usage examples for org.apache.commons.lang SerializationUtils serialize

Introduction

In this page you can find the example usage for org.apache.commons.lang SerializationUtils serialize.

Prototype

public static byte[] serialize(Serializable obj) 

Source Link

Document

Serializes an Object to a byte array for storage/serialization.

Usage

From source file:io.pravega.segmentstore.server.host.ZKSegmentContainerMonitorTest.java

private void initializeHostContainerMapping(CuratorFramework zkClient) throws Exception {
    HashMap<Host, Set<Integer>> mapping = new HashMap<>();
    zkClient.create().creatingParentsIfNeeded().forPath(PATH, SerializationUtils.serialize(mapping));
}

From source file:com.janrain.backplane2.server.dao.redis.RedisBackplaneMessageDAO.java

@Override
public void persist(BackplaneMessage obj) throws BackplaneServerException {
    // the messages will not be immediately available for reading until they
    // are inserted by the message processing thread.
    Redis.getInstance().rpush(V2_MESSAGE_QUEUE.getBytes(), SerializationUtils.serialize(obj));
}

From source file:fr.inria.oak.paxquery.translation.Logical2Pact.java

private static final Operator<Record>[] translate(GroupBy gb) {
    final boolean withAggregation = gb instanceof GroupByWithAggregation;

    Operator<Record>[] childPlan = translate(gb.getChild());

    // create ReduceOperator for grouping
    ReduceOperator.Builder groupByBuilder;
    if (withAggregation)
        groupByBuilder = ReduceOperator.builder(GroupByWithAggregationOperator.class).input(childPlan)
                .name("GroupByAgg");
    else/*from   ww  w . j ava 2s .com*/
        groupByBuilder = ReduceOperator.builder(GroupByOperator.class).input(childPlan).name("GroupBy");
    for (int column : gb.getReduceByColumns())
        KeyFactoryOperations.addKey(groupByBuilder,
                MetadataTypesMapping.getKeyClass(gb.getChild().getNRSMD().getType(column)), column);
    ReduceOperator groupBy = groupByBuilder.build();

    // groupBy configuration
    final String encodedNRSMD = DatatypeConverter
            .printBase64Binary(SerializationUtils.serialize(gb.getNRSMD()));
    groupBy.setParameter(PACTOperatorsConfiguration.NRSMD1_BINARY.toString(), encodedNRSMD);
    final String encodedGroupByColumns = DatatypeConverter
            .printBase64Binary(SerializationUtils.serialize((gb.getGroupByColumns())));
    groupBy.setParameter(PACTOperatorsConfiguration.GROUP_BY_COLUMNS_BINARY.toString(), encodedGroupByColumns);
    final String encodedNestColumns = DatatypeConverter
            .printBase64Binary(SerializationUtils.serialize((gb.getNestColumns())));
    groupBy.setParameter(PACTOperatorsConfiguration.NEST_COLUMNS_BINARY.toString(), encodedNestColumns);
    if (withAggregation) {
        GroupByWithAggregation gba = (GroupByWithAggregation) gb;

        groupBy.setParameter(PACTOperatorsConfiguration.AGGREGATION_COLUMN_INT.toString(),
                gba.getAggregationColumn());

        final String encodedAggregationType = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(gba.getAggregationType()));
        groupBy.setParameter(PACTOperatorsConfiguration.AGGREGATION_TYPE_BINARY.toString(),
                encodedAggregationType);

        groupBy.setParameter(PACTOperatorsConfiguration.EXCLUDE_NESTED_FIELD_BOOLEAN.toString(),
                gba.isExcludeNestedField());
    }

    return new Operator[] { groupBy };
}

From source file:mazewar.Mazewar.java

@Subscribe
public void keyEvent(ClientAction action) throws Exception {
    /*System.out.println("Send action = " + action);*/

    /* Send action to server */
    MazePacket actionPacket = new MazePacket();
    actionPacket.type = PacketType.ACTION;
    actionPacket.clientId = Optional.of(clientId);
    actionPacket.action = Optional.of(action);
    actionPacket.sequenceNumber = getSequenceNumber();

    /*/*from  w w  w . ja v  a2s.c o m*/
    if(action == ClientAction.FIRE) {
    Thread.sleep(5000);
    }
    */

    publisher.send(SerializationUtils.serialize(actionPacket), 0);
}

From source file:com.github.seqware.queryengine.plugins.runners.hbasemr.MRHBasePluginRunner.java

public static String[] serializeParametersToString(Object[] parameters, PluginInterface mapReducePlugin,
        byte[][] sSet, byte[] dSet) {
    int num_guaranteed_parameters = 6;
    String[] str_params = new String[num_guaranteed_parameters];
    byte[] ext_serials = SerializationUtils.serialize(parameters);
    byte[] int_serials = SerializationUtils.serialize(mapReducePlugin.getInternalParameters());
    str_params[EXTERNAL_PARAMETERS] = Base64.encodeBase64String(ext_serials);
    str_params[INTERNAL_PARAMETERS] = Base64.encodeBase64String(int_serials);
    ByteBuffer bBuffer = ByteBuffer.allocate(1024 * 1024); // one MB should be enough for now
    bBuffer.putInt(sSet.length);//  ww  w.  j ava2  s  . c  o  m
    for (byte[] arr : sSet) {
        bBuffer.putInt(arr.length);
        bBuffer.put(arr);
    }
    str_params[NUM_AND_SOURCE_FEATURE_SETS] = Base64.encodeBase64String(bBuffer.array());
    str_params[DESTINATION_FEATURE_SET] = Base64.encodeBase64String(dSet);
    str_params[SETTINGS_MAP] = Base64
            .encodeBase64String(SerializationUtils.serialize(new Object[] { Constants.getSETTINGS_MAP() }));
    str_params[PLUGIN_CLASS] = Base64
            .encodeBase64String(SerializationUtils.serialize(mapReducePlugin.getClass()));

    return str_params;
}

From source file:com.feedzai.fos.impl.weka.WekaManager.java

@Override
public Model trainFile(ModelConfig config, String path) throws FOSException {
    checkNotNull(path, "Config must be supplied");
    checkNotNull(path, "Path must be supplied");

    long time = System.currentTimeMillis();
    WekaModelConfig wekaModelConfig = new WekaModelConfig(config, wekaManagerConfig);
    Classifier classifier = WekaClassifierFactory.create(config);
    List<Attribute> attributeList = config.getAttributes();
    FastVector attributes = WekaUtils.instanceFields2Attributes(wekaModelConfig.getClassIndex(),
            config.getAttributes());/*from   w  ww  .j  a  va2 s . c o m*/
    InstanceSetter[] instanceSetters = WekaUtils.instanceFields2ValueSetters(config.getAttributes(),
            InstanceType.TRAINING);

    List<Instance> instances = new ArrayList();

    String[] line;
    try {
        FileReader fileReader = new FileReader(path);
        CSVReader csvReader = new CSVReader(fileReader);
        while ((line = csvReader.readNext()) != null) {
            // parsing is done by InstanceSetter's
            instances.add(WekaUtils.objectArray2Instance(line, instanceSetters, attributes));
        }

    } catch (Exception e) {
        throw new FOSException(e.getMessage(), e);
    }

    Instances wekaInstances = new Instances(config.getProperty(WekaModelConfig.CLASSIFIER_IMPL), attributes,
            instances.size());

    for (Instance instance : instances) {
        wekaInstances.add(instance);
    }

    trainClassifier(wekaModelConfig.getClassIndex(), classifier, wekaInstances);

    final byte[] bytes = SerializationUtils.serialize(classifier);
    logger.debug("Trained model with {} instances in {}ms", instances.size(),
            (System.currentTimeMillis() - time));

    return new ModelBinary(bytes);
}

From source file:fr.inria.oak.paxquery.translation.Logical2Pact.java

private static final Operator<Record>[] translate(DuplicateElimination dupElim) {
    Operator<Record>[] childPlan = translate(dupElim.getChild());

    // create ReduceOperator for removing records
    ReduceOperator.Builder duplicateEliminationBuilder = ReduceOperator
            .builder(DuplicateEliminationOperator.class).input(childPlan).name("DupElim");
    for (int column : dupElim.getColumns())
        KeyFactoryOperations.addKey(duplicateEliminationBuilder,
                MetadataTypesMapping.getKeyClass(dupElim.getChild().getNRSMD().getType(column)), column);
    ReduceOperator duplicateElimination = duplicateEliminationBuilder.build();

    // projection configuration
    final String encodedNRSMD = DatatypeConverter
            .printBase64Binary(SerializationUtils.serialize(dupElim.getNRSMD()));
    duplicateElimination.setParameter(PACTOperatorsConfiguration.NRSMD1_BINARY.toString(), encodedNRSMD);
    final String encodedDuplicateEliminationColumns = DatatypeConverter
            .printBase64Binary(SerializationUtils.serialize(dupElim.getColumns()));
    duplicateElimination.setParameter(PACTOperatorsConfiguration.DUP_ELIM_COLUMNS_BINARY.toString(),
            encodedDuplicateEliminationColumns);

    return new Operator[] { duplicateElimination };
}

From source file:com.splicemachine.db.iapi.types.UserType.java

/**
 *
 * Write to a Project Tungsten Format.  Serializes the objects as byte[]
 *
 * @see UnsafeRowWriter#write(int, byte[])
 *
 * @param unsafeRowWriter/*w w  w. j  a  v a2s  . co m*/
 * @param ordinal
 * @throws StandardException
  */
@Override
public void write(UnsafeRowWriter unsafeRowWriter, int ordinal) throws StandardException {
    if (isNull())
        unsafeRowWriter.setNullAt(ordinal);
    else
        unsafeRowWriter.write(ordinal, SerializationUtils.serialize((Serializable) value));
}

From source file:io.pravega.controller.store.stream.ZKStream.java

@Override
public CompletableFuture<Void> setConfigurationData(final StreamConfiguration configuration) {
    return store.setData(configurationPath, new Data<>(SerializationUtils.serialize(configuration), null))
            .whenComplete((r, e) -> cache.invalidateCache(configurationPath));
}

From source file:fr.inria.oak.paxquery.translation.Logical2Pact.java

private static final Operator<Record>[] translate(Aggregation aggr) {
    Operator<Record>[] childPlan = translate(aggr.getChild());

    Operator<Record> aggregation;//from  w w  w . j a  v a  2s  . c  o m
    if (aggr.getAggregationPath().length > 1) {
        // create MapOperator for aggregating
        aggregation = MapOperator.builder(NestedAggregationOperator.class).input(childPlan).name("Aggr")
                .build();

        // aggregation configuration
        final String encodedNRSMD = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(aggr.getNRSMD()));
        aggregation.setParameter(PACTOperatorsConfiguration.NRSMD1_BINARY.toString(), encodedNRSMD);
        final String aggregationPath = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(aggr.getAggregationPath()));
        aggregation.setParameter(PACTOperatorsConfiguration.AGGREGATION_PATH_BINARY.toString(),
                aggregationPath);
        final String aggregationType = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(aggr.getAggregationType()));
        aggregation.setParameter(PACTOperatorsConfiguration.AGGREGATION_TYPE_BINARY.toString(),
                aggregationType);
    } else {
        //First, we create the NRSMD of the (pre) groupBy
        NestedMetadata groupByNRSMD = aggr.getNRSMD();
        MetadataTypes[] attScanMeta = new MetadataTypes[1];
        attScanMeta[0] = MetadataTypes.INTEGER_TYPE;
        final NestedMetadata auxColumnNRSMD = new NestedMetadata(1, attScanMeta);
        groupByNRSMD = NestedMetadataUtils.appendNRSMD(groupByNRSMD, auxColumnNRSMD);

        //Then, we create ReduceOperator for grouping using the document ID column
        ReduceOperator.Builder groupByBuilder = ReduceOperator.builder(GroupByWithAggregationOperator.class)
                .input(childPlan).name("GroupByAgg");
        KeyFactoryOperations.addKey(groupByBuilder, StringValue.class, aggr.getDocumentIDColumn());
        ReduceOperator groupBy = groupByBuilder.build();

        // groupBy configuration
        final String encodedNRSMDGroupBy = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(groupByNRSMD));
        groupBy.setParameter(PACTOperatorsConfiguration.NRSMD1_BINARY.toString(), encodedNRSMDGroupBy);

        final String encodedGroupByColumns = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(new int[] {}));
        groupBy.setParameter(PACTOperatorsConfiguration.GROUP_BY_COLUMNS_BINARY.toString(),
                encodedGroupByColumns);

        final NestedMetadata childNRSMD = aggr.getChild().getNRSMD();
        int[] nestColumns = new int[childNRSMD.getColNo()];
        for (int i = 0; i < childNRSMD.getColNo(); i++)
            nestColumns[i] = i;
        final String encodedNestColumns = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(nestColumns));
        groupBy.setParameter(PACTOperatorsConfiguration.NEST_COLUMNS_BINARY.toString(), encodedNestColumns);

        groupBy.setParameter(PACTOperatorsConfiguration.AGGREGATION_COLUMN_INT.toString(),
                aggr.getAggregationPath()[0]);

        final String encodedAggregationType = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(aggr.getAggregationType()));
        groupBy.setParameter(PACTOperatorsConfiguration.AGGREGATION_TYPE_BINARY.toString(),
                encodedAggregationType);

        groupBy.setParameter(PACTOperatorsConfiguration.EXCLUDE_NESTED_FIELD_BOOLEAN.toString(),
                aggr.isExcludeNestedField());

        groupBy.setParameter(PACTOperatorsConfiguration.ATTACH_DUMMY_COLUMN_BOOLEAN.toString(), true);

        // create ReduceOperator for aggregating
        ReduceOperator.Builder aggregationBuilder = ReduceOperator.builder(PostAggregationOperator.class)
                .input(groupBy).name("PostAggr");
        KeyFactoryOperations.addKey(aggregationBuilder, IntValue.class, groupByNRSMD.colNo - 1);
        aggregation = aggregationBuilder.build();

        //Post-aggregation configuration
        final String encodedNRSMDPostAggregation = DatatypeConverter
                .printBase64Binary(SerializationUtils.serialize(aggr.getNRSMD()));
        aggregation.setParameter(PACTOperatorsConfiguration.NRSMD1_BINARY.toString(),
                encodedNRSMDPostAggregation);
        if (aggr.isExcludeNestedField())
            aggregation.setParameter(PACTOperatorsConfiguration.POST_AGGREGATION_COLUMN_INT.toString(), 0);
        else {
            aggregation.setParameter(PACTOperatorsConfiguration.NESTED_RECORDS_COLUMN_INT.toString(), 0);
            aggregation.setParameter(PACTOperatorsConfiguration.POST_AGGREGATION_COLUMN_INT.toString(), 1);
        }

        aggregation.setParameter(PACTOperatorsConfiguration.AGGREGATION_TYPE_BINARY.toString(),
                encodedAggregationType);

        aggregation.setParameter(PACTOperatorsConfiguration.EXCLUDE_NESTED_FIELD_BOOLEAN.toString(),
                aggr.isExcludeNestedField());
    }

    return new Operator[] { aggregation };
}