Example usage for org.apache.commons.lang3 SerializationUtils clone

List of usage examples for org.apache.commons.lang3 SerializationUtils clone

Introduction

In this page you can find the example usage for org.apache.commons.lang3 SerializationUtils clone.

Prototype

public static <T extends Serializable> T clone(final T object) 

Source Link

Document

Deep clone an Object using serialization.

This is many times slower than writing clone methods by hand on all objects in your object graph.

Usage

From source file:com.mirth.connect.client.ui.ChannelSetup.java

public void cloneDestination() {
    if (parent.changesHaveBeenMade()) {
        if (!parent.alertOption(this.parent,
                "You must save your channel before cloning.  Would you like to save your channel now?")
                || !saveChanges()) {/*from  w  ww .j av a 2  s .c om*/
            return;
        }
    }
    List<Connector> destinationConnectors = currentChannel.getDestinationConnectors();

    Connector destination = null;
    try {
        destination = (Connector) SerializationUtils
                .clone(destinationConnectors.get(destinationTable.getSelectedModelIndex()));
    } catch (SerializationException e) {
        parent.alertThrowable(this.parent, e);
        return;
    }

    destination.setName(getNewDestinationName(destinationConnectors.size() + 1));
    currentChannel.addDestination(destination);
    // After adding the destination to the channel, make sure to update the cached resource ID map too
    resourceIds.put(destination.getMetaDataId(),
            ((DestinationConnectorPropertiesInterface) destination.getProperties())
                    .getDestinationConnectorProperties().getResourceIds());
    makeDestinationTable(false);
    parent.setSaveEnabled(true);
}

From source file:com.mirth.connect.client.ui.ChannelPanel.java

public void doCloneChannel() {
    if (isSaveEnabled() && !promptSave(true)) {
        return;//w w w.java  2  s. co m
    }

    if (isGroupSelected()) {
        JOptionPane.showMessageDialog(parent, "This operation can only be performed on channels.");
        return;
    }

    List<Channel> selectedChannels = getSelectedChannels();
    if (selectedChannels.size() > 1) {
        JOptionPane.showMessageDialog(parent, "This operation can only be performed on a single channel.");
        return;
    }

    Channel channel = selectedChannels.get(0);

    if (channel instanceof InvalidChannel) {
        InvalidChannel invalidChannel = (InvalidChannel) channel;
        Throwable cause = invalidChannel.getCause();
        parent.alertThrowable(parent, cause,
                "Channel \"" + channel.getName() + "\" is invalid and cannot be cloned. "
                        + getMissingExtensions(invalidChannel) + "Original cause:\n" + cause.getMessage());
        return;
    }

    try {
        channel = (Channel) SerializationUtils.clone(channel);
    } catch (SerializationException e) {
        parent.alertThrowable(parent, e);
        return;
    }

    try {
        channel.setRevision(0);
        channel.setId(parent.mirthClient.getGuid());
    } catch (ClientException e) {
        parent.alertThrowable(parent, e);
    }

    String channelName = channel.getName();
    do {
        channelName = JOptionPane.showInputDialog(this, "Please enter a new name for the channel.",
                channelName);
        if (channelName == null) {
            return;
        }
    } while (!parent.checkChannelName(channelName, channel.getId()));

    channel.setName(channelName);
    channelStatuses.put(channel.getId(), new ChannelStatus(channel));
    parent.updateChannelTags(false);

    parent.editChannel(channel);
    parent.setSaveEnabled(true);
}

From source file:com.mirth.connect.client.ui.ChannelPanel.java

public void doEditChannel() {
    if (isSaveEnabled() && !confirmLeave()) {
        return;/*from   w ww.  jav a  2s .co m*/
    }

    if (parent.isEditingChannel) {
        return;
    } else {
        parent.isEditingChannel = true;
    }

    if (isGroupSelected()) {
        JOptionPane.showMessageDialog(parent, "This operation can only be performed on channels.");
        return;
    }

    List<Channel> selectedChannels = getSelectedChannels();
    if (selectedChannels.size() > 1) {
        JOptionPane.showMessageDialog(parent, "This operation can only be performed on a single channel.");
    } else if (selectedChannels.size() == 0) {
        JOptionPane.showMessageDialog(parent, "Channel no longer exists.");
    } else {
        try {
            Channel channel = selectedChannels.get(0);

            if (channel instanceof InvalidChannel) {
                InvalidChannel invalidChannel = (InvalidChannel) channel;
                Throwable cause = invalidChannel.getCause();
                parent.alertThrowable(parent, cause,
                        "Channel \"" + channel.getName() + "\" is invalid and cannot be edited. "
                                + getMissingExtensions(invalidChannel) + "Original cause:\n"
                                + cause.getMessage());
            } else {
                parent.editChannel((Channel) SerializationUtils.clone(channel));
            }
        } catch (SerializationException e) {
            parent.alertThrowable(parent, e);
        }
    }
    parent.isEditingChannel = false;
}

From source file:objective.taskboard.data.Issue.java

public Issue copy() {
    Issue copy = SerializationUtils.clone(this);
    restoreServicesToIssue(copy);//from w w  w  .j a  va2s  .  c o m

    if (this.parentCard != null) {
        Issue parentCopy = this.parentCard.copy();
        copy.setParentCard(parentCopy);
    }

    return copy;
}

From source file:org.apache.apex.engine.plugin.AbstractApexPluginDispatcher.java

@Override
public void dispatch(Event event) {
    if (event.getType() == ApexPluginDispatcher.DAG_CHANGE) {
        clonedDAG = SerializationUtils.clone(((DAGChangeEvent) event).dag);
    } else if (!plugins.isEmpty() && (event instanceof DAGExecutionEvent)) {
        dispatchExecutionEvent((DAGExecutionEvent) event);
    }/*  w  w w .j ava  2s. c o  m*/
}

From source file:org.apache.beam.runners.flink.streaming.ExecutableStageDoFnOperatorTest.java

@Test
public void testSerialization() {
    WindowedValue.ValueOnlyWindowedValueCoder<Integer> coder = WindowedValue
            .getValueOnlyCoder(VarIntCoder.of());

    TupleTag<Integer> mainOutput = new TupleTag<>("main-output");
    TupleTag<Integer> additionalOutput = new TupleTag<>("additional-output");
    ImmutableMap<TupleTag<?>, OutputTag<?>> tagsToOutputTags = ImmutableMap.<TupleTag<?>, OutputTag<?>>builder()
            .put(additionalOutput, new OutputTag<>(additionalOutput.getId(), TypeInformation.of(Integer.class)))
            .build();/*ww w  .ja  v a 2s. com*/
    ImmutableMap<TupleTag<?>, Coder<WindowedValue<?>>> tagsToCoders = ImmutableMap
            .<TupleTag<?>, Coder<WindowedValue<?>>>builder().put(mainOutput, (Coder) coder)
            .put(additionalOutput, coder).build();
    ImmutableMap<TupleTag<?>, Integer> tagsToIds = ImmutableMap.<TupleTag<?>, Integer>builder()
            .put(mainOutput, 0).put(additionalOutput, 1).build();

    DoFnOperator.MultiOutputOutputManagerFactory<Integer> outputManagerFactory = new DoFnOperator.MultiOutputOutputManagerFactory(
            mainOutput, tagsToOutputTags, tagsToCoders, tagsToIds);

    FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);

    ExecutableStageDoFnOperator<Integer, Integer> operator = new ExecutableStageDoFnOperator<>("transform",
            null, null, Collections.emptyMap(), mainOutput, ImmutableList.of(additionalOutput),
            outputManagerFactory, Collections.emptyMap() /* sideInputTagMapping */,
            Collections.emptyList() /* sideInputs */, Collections.emptyMap() /* sideInputId mapping */, options,
            stagePayload, jobInfo, FlinkExecutableStageContext.factory(options),
            createOutputMap(mainOutput, ImmutableList.of(additionalOutput)), WindowingStrategy.globalDefault(),
            null, null);

    ExecutableStageDoFnOperator<Integer, Integer> clone = SerializationUtils.clone(operator);
    assertNotNull(clone);
    assertNotEquals(operator, clone);
}

From source file:org.apache.flink.api.common.typeutils.SerializerTestBase.java

@Test
public void testSerializabilityAndEquals() {
    try {//from   www.  j  a  v a  2  s  .c o  m
        TypeSerializer<T> ser1 = getSerializer();
        TypeSerializer<T> ser2;
        try {
            ser2 = SerializationUtils.clone(ser1);
        } catch (SerializationException e) {
            fail("The serializer is not serializable: " + e);
            return;
        }

        assertEquals("The copy of the serializer is not equal to the original one.", ser1, ser2);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        e.printStackTrace();
        fail("Exception in test: " + e.getMessage());
    }
}

From source file:org.apache.flink.api.java.record.CoGroupWrappingFunctionTest.java

@SuppressWarnings("unchecked")
@Test/*from   w  ww.  java 2 s .  co  m*/
public void testWrappedCoGroupObject() {
    try {
        AtomicInteger methodCounter = new AtomicInteger();

        CoGroupOperator coGroupOp = CoGroupOperator
                .builder(new TestCoGroupFunction(methodCounter), LongValue.class, 1, 2).build();

        RichFunction cogrouper = (RichFunction) coGroupOp.getUserCodeWrapper().getUserCodeObject();

        // test the method invocations
        cogrouper.close();
        cogrouper.open(new Configuration());
        assertEquals(2, methodCounter.get());

        // prepare the coGroup
        final List<Record> target = new ArrayList<Record>();
        Collector<Record> collector = new Collector<Record>() {
            @Override
            public void collect(Record record) {
                target.add(record);
            }

            @Override
            public void close() {
            }
        };

        List<Record> source1 = new ArrayList<Record>();
        source1.add(new Record(new IntValue(42)));
        source1.add(new Record(new IntValue(13)));

        List<Record> source2 = new ArrayList<Record>();
        source2.add(new Record(new LongValue(11)));
        source2.add(new Record(new LongValue(17)));

        // test coGroup
        ((org.apache.flink.api.common.functions.CoGroupFunction<Record, Record, Record>) cogrouper)
                .coGroup(source1, source2, collector);
        assertEquals(4, target.size());
        assertEquals(new IntValue(42), target.get(0).getField(0, IntValue.class));
        assertEquals(new IntValue(13), target.get(1).getField(0, IntValue.class));
        assertEquals(new LongValue(11), target.get(2).getField(0, LongValue.class));
        assertEquals(new LongValue(17), target.get(3).getField(0, LongValue.class));
        target.clear();

        // test the serialization
        SerializationUtils.clone((java.io.Serializable) cogrouper);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}

From source file:org.apache.flink.api.java.record.CoGroupWrappingFunctionTest.java

@Test
public void testWrappedCoGroupClass() {
    try {//ww  w.j  a  v  a2 s .  c o m
        CoGroupOperator coGroupOp = CoGroupOperator.builder(TestCoGroupFunction.class, LongValue.class, 1, 2)
                .build();

        UserCodeWrapper<org.apache.flink.api.common.functions.CoGroupFunction<Record, Record, Record>> udf = coGroupOp
                .getUserCodeWrapper();
        UserCodeWrapper<org.apache.flink.api.common.functions.CoGroupFunction<Record, Record, Record>> copy = SerializationUtils
                .clone(udf);
        org.apache.flink.api.common.functions.CoGroupFunction<Record, Record, Record> cogrouper = copy
                .getUserCodeObject();

        // prepare the coGpu
        final List<Record> target = new ArrayList<Record>();
        Collector<Record> collector = new Collector<Record>() {
            @Override
            public void collect(Record record) {
                target.add(record);
            }

            @Override
            public void close() {
            }
        };

        List<Record> source1 = new ArrayList<Record>();
        source1.add(new Record(new IntValue(42)));
        source1.add(new Record(new IntValue(13)));

        List<Record> source2 = new ArrayList<Record>();
        source2.add(new Record(new LongValue(11)));
        source2.add(new Record(new LongValue(17)));

        // test coGroup
        cogrouper.coGroup(source1, source2, collector);
        assertEquals(4, target.size());
        assertEquals(new IntValue(42), target.get(0).getField(0, IntValue.class));
        assertEquals(new IntValue(13), target.get(1).getField(0, IntValue.class));
        assertEquals(new LongValue(11), target.get(2).getField(0, LongValue.class));
        assertEquals(new LongValue(17), target.get(3).getField(0, LongValue.class));
        target.clear();
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}

From source file:org.apache.flink.api.java.record.ReduceWrappingFunctionTest.java

@SuppressWarnings("unchecked")
@Test/*from  ww w.  j  a  v a  2  s . c  om*/
public void testWrappedReduceObject() {
    try {
        AtomicInteger methodCounter = new AtomicInteger();

        ReduceOperator reduceOp = ReduceOperator.builder(new TestReduceFunction(methodCounter)).build();

        RichFunction reducer = (RichFunction) reduceOp.getUserCodeWrapper().getUserCodeObject();

        // test the method invocations
        reducer.close();
        reducer.open(new Configuration());
        assertEquals(2, methodCounter.get());

        // prepare the reduce / combine tests
        final List<Record> target = new ArrayList<Record>();
        Collector<Record> collector = new Collector<Record>() {
            @Override
            public void collect(Record record) {
                target.add(record);
            }

            @Override
            public void close() {
            }
        };

        List<Record> source = new ArrayList<Record>();
        source.add(new Record(new IntValue(42), new LongValue(11)));
        source.add(new Record(new IntValue(13), new LongValue(17)));

        // test reduce
        ((GroupReduceFunction<Record, Record>) reducer).reduce(source, collector);
        assertEquals(2, target.size());
        assertEquals(new IntValue(42), target.get(0).getField(0, IntValue.class));
        assertEquals(new LongValue(11), target.get(0).getField(1, LongValue.class));
        assertEquals(new IntValue(13), target.get(1).getField(0, IntValue.class));
        assertEquals(new LongValue(17), target.get(1).getField(1, LongValue.class));
        target.clear();

        // test combine
        ((GroupCombineFunction<Record, Record>) reducer).combine(source, collector);
        assertEquals(2, target.size());
        assertEquals(new IntValue(42), target.get(0).getField(0, IntValue.class));
        assertEquals(new LongValue(11), target.get(0).getField(1, LongValue.class));
        assertEquals(new IntValue(13), target.get(1).getField(0, IntValue.class));
        assertEquals(new LongValue(17), target.get(1).getField(1, LongValue.class));
        target.clear();

        // test the serialization
        SerializationUtils.clone((java.io.Serializable) reducer);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}