Example usage for org.apache.commons.lang SerializationUtils serialize

List of usage examples for org.apache.commons.lang SerializationUtils serialize

Introduction

In this page you can find the example usage for org.apache.commons.lang SerializationUtils serialize.

Prototype

public static byte[] serialize(Serializable obj) 

Source Link

Document

Serializes an Object to a byte array for storage/serialization.

Usage

From source file:org.datacleaner.actions.PublishResultToMonitorActionListener.java

private byte[] getBytes() {
    if (_bytes == null) {
        final AnalysisResult analysisResult = _resultRef.get();
        _bytes = SerializationUtils.serialize(new SimpleAnalysisResult(analysisResult.getResultMap()));
    }/*from w  ww  .  ja va  2  s.  co m*/
    return _bytes;
}

From source file:org.datacleaner.api.AnalyzerResultFutureTest.java

public void testSerializationAndDeserialization() throws Exception {
    final NumberResult result1 = new NumberResult(42);

    final AnalyzerResultFuture<NumberResult> future = new AnalyzerResultFutureImpl<>("foo",
            new ImmutableRef<NumberResult>(result1));

    future.addListener(new Listener<NumberResult>() {
        @Override/*from w w w  . ja v a 2 s . c o m*/
        public void onSuccess(NumberResult result) {
            // do nothing - this is just a non-serializable listener
        }

        @Override
        public void onError(RuntimeException error) {
            // do nothing - this is just a non-serializable listener
        }
    });

    final byte[] bytes = SerializationUtils.serialize(future);

    final AnalyzerResultFuture<?> copy = (AnalyzerResultFuture<?>) SerializationUtils.deserialize(bytes);

    assertEquals("foo", copy.getName());
    assertEquals("42", copy.get().toString());
}

From source file:org.datacleaner.components.machinelearning.MLClassificationTrainingAnalyzer.java

@Override
public MLClassificationAnalyzerResult getResult() {
    final List<MLFeatureModifier> featureModifiers = featureModifierBuilders.stream()
            .map(MLFeatureModifierBuilder::build).collect(Collectors.toList());
    final List<String> columnNames = CollectionUtils.map(featureColumns, new HasNameMapper());
    final MLTrainingOptions options = new MLTrainingOptions(classification.getDataType(), columnNames,
            featureModifiers);// ww w . j  av  a2 s  .c  o m

    final MLClassificationTrainer trainer = createTrainer(options);
    log("Training model starting. Records=" + trainingRecords.size() + ", Columns=" + columnNames.size()
            + ", Features=" + MLFeatureUtils.getFeatureCount(featureModifiers) + ".");
    final MLClassifier classifier = trainer.train(trainingRecords, featureModifiers, new MLTrainerCallback() {
        @Override
        public void epochDone(int epochNo, int expectedEpochs) {
            if (expectedEpochs > 1) {
                log("Training progress: Epoch " + epochNo + " of " + expectedEpochs + " done.");
            }
        }
    });

    if (saveModelToFile != null) {
        logger.info("Saving model to file: {}", saveModelToFile);
        try {
            final byte[] bytes = SerializationUtils.serialize(classifier);
            Files.write(bytes, saveModelToFile);
        } catch (IOException e) {
            throw new UncheckedIOException("Failed to save model to file: " + saveModelToFile, e);
        }
    }

    log("Trained model. Creating evaluation matrices.");

    final Crosstab<Integer> trainedRecordsConfusionMatrix = createConfusionMatrixCrosstab(classifier,
            trainingRecords);
    final Crosstab<Integer> crossValidationConfusionMatrix = createConfusionMatrixCrosstab(classifier,
            crossValidationRecords);

    return new MLClassificationAnalyzerResult(classifier, trainedRecordsConfusionMatrix,
            crossValidationConfusionMatrix);
}

From source file:org.datacleaner.components.machinelearning.MLRegressionTrainingAnalyzer.java

@Override
public MLRegressionAnalyzerResult getResult() {
    final List<MLFeatureModifier> featureModifiers = featureModifierBuilders.stream()
            .map(MLFeatureModifierBuilder::build).collect(Collectors.toList());
    final List<String> columnNames = CollectionUtils.map(featureColumns, new HasNameMapper());
    final MLTrainingOptions options = new MLTrainingOptions(Double.class, columnNames, featureModifiers);

    final MLRegressorTrainer trainer = createTrainer(options);
    log("Training model starting. Records=" + trainingRecords.size() + ", Columns=" + columnNames.size()
            + ", Features=" + MLFeatureUtils.getFeatureCount(featureModifiers) + ".");
    final MLRegressor regressor = trainer.train(trainingRecords, featureModifiers, new MLTrainerCallback() {
        @Override//ww  w .  j  a va 2  s .c  om
        public void epochDone(int epochNo, int expectedEpochs) {
            if (expectedEpochs > 1) {
                log("Training progress: Epoch " + epochNo + " of " + expectedEpochs + " done.");
            }
        }
    });

    if (saveModelToFile != null) {
        logger.info("Saving model to file: {}", saveModelToFile);
        try {
            final byte[] bytes = SerializationUtils.serialize(regressor);
            Files.write(bytes, saveModelToFile);
        } catch (IOException e) {
            throw new UncheckedIOException("Failed to save model to file: " + saveModelToFile, e);
        }
    }

    log("Trained model. Creating evaluation matrices.");

    return new MLRegressionAnalyzerResult(regressor);
}

From source file:org.datacleaner.result.AnnotatedRowResultTest.java

public void testSerializeAndDeserialize() throws Exception {
    RowAnnotationFactory annotationFactory = RowAnnotations.getDefaultFactory();
    RowAnnotation annotation = annotationFactory.createAnnotation();
    InputColumn<String> col1 = new MockInputColumn<String>("foo", String.class);
    InputColumn<String> col2 = new MockInputColumn<String>("bar", String.class);

    annotationFactory.annotate(new MockInputRow().put(col1, "1").put(col2, "2"), 1, annotation);
    annotationFactory.annotate(new MockInputRow().put(col1, "3").put(col2, "4"), 1, annotation);

    AnnotatedRowsResult result1 = new AnnotatedRowsResult(annotation, annotationFactory, col1);
    performAssertions(result1);// w ww  .  j a va2s. co  m

    AnnotatedRowsResult result2 = (AnnotatedRowsResult) SerializationUtils
            .deserialize(SerializationUtils.serialize(result1));
    performAssertions(result2);
}

From source file:org.datacleaner.storage.RowAnnotationImplTest.java

public void testSerializeAndDeserializeCurrentVersion() throws Exception {
    RowAnnotationImpl annotation1 = new RowAnnotationImpl();
    annotation1.incrementRowCount(20);/*from   www .j a  v a2 s.co m*/

    byte[] bytes = SerializationUtils.serialize(annotation1);
    RowAnnotationImpl annotation2 = (RowAnnotationImpl) SerializationUtils.deserialize(bytes);

    assertEquals(20, annotation2.getRowCount());
}

From source file:org.datacleaner.test.full.scenarios.AnalyzeDateGapsCompareSchemasAndSerializeResultsTest.java

@SuppressWarnings("unchecked")
public void testScenario() throws Throwable {
    final DataCleanerConfiguration configuration;
    {/* w ww .  j a v  a 2s .  c  o m*/
        // create configuration
        SimpleDescriptorProvider descriptorProvider = new SimpleDescriptorProvider();
        descriptorProvider.addAnalyzerBeanDescriptor(Descriptors.ofAnalyzer(DateGapAnalyzer.class));
        descriptorProvider.addFilterBeanDescriptor(Descriptors.ofFilter(MaxRowsFilter.class));
        descriptorProvider
                .addTransformerBeanDescriptor(Descriptors.ofTransformer(ConvertToStringTransformer.class));
        Datastore datastore = TestHelper.createSampleDatabaseDatastore("orderdb");
        configuration = new DataCleanerConfigurationImpl().withDatastores(datastore)
                .withEnvironment(new DataCleanerEnvironmentImpl().withDescriptorProvider(descriptorProvider));
    }

    AnalysisJob job;
    {
        // create job
        AnalysisJobBuilder analysisJobBuilder = new AnalysisJobBuilder(configuration);
        Datastore datastore = configuration.getDatastoreCatalog().getDatastore("orderdb");
        analysisJobBuilder.setDatastore(datastore);
        analysisJobBuilder.addSourceColumns("PUBLIC.ORDERS.ORDERDATE", "PUBLIC.ORDERS.SHIPPEDDATE",
                "PUBLIC.ORDERS.CUSTOMERNUMBER");
        assertEquals(3, analysisJobBuilder.getSourceColumns().size());

        FilterComponentBuilder<MaxRowsFilter, MaxRowsFilter.Category> maxRows = analysisJobBuilder
                .addFilter(MaxRowsFilter.class);
        maxRows.getComponentInstance().setMaxRows(5);
        analysisJobBuilder.setDefaultRequirement(maxRows.getFilterOutcome(MaxRowsFilter.Category.VALID));

        TransformerComponentBuilder<ConvertToStringTransformer> convertToNumber = analysisJobBuilder
                .addTransformer(ConvertToStringTransformer.class);
        convertToNumber.addInputColumn(analysisJobBuilder.getSourceColumnByName("customernumber"));
        InputColumn<String> customer_no = (InputColumn<String>) convertToNumber.getOutputColumns().get(0);

        AnalyzerComponentBuilder<DateGapAnalyzer> dateGap = analysisJobBuilder
                .addAnalyzer(DateGapAnalyzer.class);
        dateGap.setName("date gap job");
        dateGap.getComponentInstance().setSingleDateOverlaps(true);
        dateGap.getComponentInstance()
                .setFromColumn((InputColumn<Date>) analysisJobBuilder.getSourceColumnByName("orderdate"));
        dateGap.getComponentInstance()
                .setToColumn((InputColumn<Date>) analysisJobBuilder.getSourceColumnByName("shippeddate"));
        dateGap.getComponentInstance().setGroupColumn(customer_no);

        job = analysisJobBuilder.toAnalysisJob();
        analysisJobBuilder.close();
    }

    AnalysisResultFuture future = new AnalysisRunnerImpl(configuration).run(job);
    if (future.isErrornous()) {
        throw future.getErrors().get(0);
    }
    assertTrue(future.isSuccessful());

    SimpleAnalysisResult result1 = new SimpleAnalysisResult(future.getResultMap());
    byte[] bytes = SerializationUtils.serialize(result1);
    SimpleAnalysisResult result2 = (SimpleAnalysisResult) SerializationUtils.deserialize(bytes);

    performResultAssertions(job, future);
    performResultAssertions(job, result1);
    performResultAssertions(job, result2);
}

From source file:org.datacleaner.test.full.scenarios.JobWithOutputDataStreamsTest.java

@Test(timeout = 30 * 1000)
public void testSimpleBuildAndExecuteScenario() throws Throwable {
    final AnalysisJob job;
    try (final AnalysisJobBuilder ajb = new AnalysisJobBuilder(configuration)) {
        ajb.setDatastore(datastore);//  ww  w  . j  a v  a2s  .  co m

        ajb.addSourceColumns("customers.contactfirstname");
        ajb.addSourceColumns("customers.contactlastname");
        ajb.addSourceColumns("customers.city");

        final AnalyzerComponentBuilder<MockOutputDataStreamAnalyzer> analyzer1 = ajb
                .addAnalyzer(MockOutputDataStreamAnalyzer.class);

        // analyzer is still unconfigured
        assertEquals(0, analyzer1.getOutputDataStreams().size());

        // now configure it
        final List<MetaModelInputColumn> sourceColumns = ajb.getSourceColumns();
        analyzer1.setName("analyzer1");
        analyzer1.addInputColumn(sourceColumns.get(0));
        assertTrue(analyzer1.isConfigured());

        final List<OutputDataStream> dataStreams = analyzer1.getOutputDataStreams();

        assertEquals(2, dataStreams.size());
        assertEquals("foo bar records", dataStreams.get(0).getName());
        assertEquals("counter records", dataStreams.get(1).getName());

        final OutputDataStream dataStream = analyzer1.getOutputDataStream("foo bar records");
        // assert that the same instance is reused when re-referred to
        assertSame(dataStreams.get(0), dataStream);

        // the stream is still not "consumed" yet
        assertFalse(analyzer1.isOutputDataStreamConsumed(dataStream));

        final AnalysisJobBuilder outputDataStreamJobBuilder = analyzer1
                .getOutputDataStreamJobBuilder(dataStream);
        final List<MetaModelInputColumn> outputDataStreamColumns = outputDataStreamJobBuilder
                .getSourceColumns();
        assertEquals(2, outputDataStreamColumns.size());
        assertEquals("MetaModelInputColumn[foo bar records.foo]", outputDataStreamColumns.get(0).toString());
        assertEquals("MetaModelInputColumn[foo bar records.bar]", outputDataStreamColumns.get(1).toString());

        // the stream is still not "consumed" because no components exist in
        // the output stream
        assertFalse(analyzer1.isOutputDataStreamConsumed(dataStream));

        final AnalyzerComponentBuilder<MockAnalyzer> analyzer2 = outputDataStreamJobBuilder
                .addAnalyzer(MockAnalyzer.class);
        analyzer2.addInputColumns(outputDataStreamColumns);
        analyzer2.setName("analyzer2");
        assertTrue(analyzer2.isConfigured());

        // now the stream is consumed
        assertTrue(analyzer1.isOutputDataStreamConsumed(dataStream));

        job = ajb.toAnalysisJob();
    }

    // do some assertions on the built job to check that the data stream is
    // represented there also
    assertEquals(1, job.getAnalyzerJobs().size());
    final AnalyzerJob analyzerJob1 = job.getAnalyzerJobs().get(0);
    assertEquals("analyzer1", analyzerJob1.getName());
    final OutputDataStreamJob[] outputDataStreamJobs = analyzerJob1.getOutputDataStreamJobs();
    assertEquals(1, outputDataStreamJobs.length);

    final OutputDataStreamJob outputDataStreamJob = outputDataStreamJobs[0];
    assertEquals("foo bar records", outputDataStreamJob.getOutputDataStream().getName());
    final AnalysisJob job2 = outputDataStreamJob.getJob();
    assertEquals(2, job2.getSourceColumns().size());
    assertEquals("foo", job2.getSourceColumns().get(0).getName());
    assertEquals("bar", job2.getSourceColumns().get(1).getName());
    assertEquals(1, job2.getAnalyzerJobs().size());
    final AnalyzerJob analyzerJob2 = job2.getAnalyzerJobs().get(0);
    assertEquals("analyzer2", analyzerJob2.getName());

    // now run the job(s)
    final AnalysisRunnerImpl runner = new AnalysisRunnerImpl(configuration);
    final AnalysisResultFuture resultFuture = runner.run(job);
    resultFuture.await();

    if (resultFuture.isErrornous()) {
        throw resultFuture.getErrors().get(0);
    }

    assertEquals(2, resultFuture.getResults().size());

    final byte[] serialized = SerializationUtils
            .serialize(new SimpleAnalysisResult(resultFuture.getResultMap()));

    final SimpleAnalysisResult deSerializedResult = (SimpleAnalysisResult) SerializationUtils
            .deserialize(serialized);

    // the first result should be trivial - it was also there before issue
    // #224
    final ListResult<?> result1 = (ListResult<?>) deSerializedResult.getResult(analyzerJob1);
    assertNotNull(result1);
    assertEquals(40, result1.getValues().size());

    // this result is the "new part" of issue #224
    final ListResult<?> result2 = (ListResult<?>) deSerializedResult.getResult(analyzerJob2);
    assertNotNull(result2);
    assertEquals(83, result2.getValues().size());
    final Object lastElement = result2.getValues().get(result2.getValues().size() - 1);
    assertEquals("MetaModelInputRow[Row[values=[baz, null]]]", lastElement.toString());
}

From source file:org.datacleaner.util.convert.StandardTypeConverter.java

@Override
public String toString(Object o) {
    if (o instanceof Calendar) {
        // will now be picked up by the date conversion
        o = ((Calendar) o).getTime();
    }/*from   w  ww .j av a  2  s.c  o m*/

    final String result;
    if (o instanceof Boolean || o instanceof Number || o instanceof String || o instanceof Character) {
        result = o.toString();
    } else if (o instanceof File) {
        final File file = (File) o;
        final FileResolver fileResolver = new FileResolver(_configuration);
        return fileResolver.toPath(file);
    } else if (o instanceof Date) {
        if (o instanceof ExpressionDate) {
            // preserve the expression if it is an ExpressionDate
            result = ((ExpressionDate) o).getExpression();
        } else {
            result = new SimpleDateFormat(dateFormatString).format((Date) o);
        }
    } else if (o instanceof Pattern) {
        result = o.toString();
    } else if (o instanceof Enum) {
        return ((Enum<?>) o).name();
    } else if (o instanceof Class) {
        result = ((Class<?>) o).getName();
    } else if (o instanceof EnumerationValue) {
        result = ((EnumerationValue) o).getValue();
    } else if (o instanceof Serializable) {
        logger.info("toString(...): No built-in handling of type: {}, using serialization.",
                o.getClass().getName());
        byte[] bytes = SerializationUtils.serialize((Serializable) o);
        result = _parentConverter.toString(bytes);
    } else {
        logger.warn("toString(...): Could not convert type: {}", o.getClass().getName());
        result = o.toString();
    }
    return result;
}

From source file:org.eclipse.jetty.nosql.kvs.KeyValueStoreSessionIdManager.java

/**
 * is the session id known to memcached, and is it valid
 *//*from  w ww.  j  av  a 2 s .c o  m*/
public boolean idInUse(final String idInCluster) {
    // reserve the id with a dummy session
    boolean exists = !addKey(idInCluster, SerializationUtils.serialize(new SerializableSession()));

    // do not check the validity of the session since
    // we do not save invalidated sessions anymore.

    _cache.getIfPresent(idInCluster);
    return exists;
}