Example usage for org.apache.commons.configuration Configuration setProperty

List of usage examples for org.apache.commons.configuration Configuration setProperty

Introduction

In this page you can find the example usage for org.apache.commons.configuration Configuration setProperty.

Prototype

void setProperty(String key, Object value);

Source Link

Document

Set a property, this will replace any previously set values.

Usage

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.PersistedInputOutputRDDTest.java

@Test
public void testBulkLoaderVertexProgramChain() throws Exception {
    Spark.create("local[4]");
    final String rddName = TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class,
            UUID.randomUUID().toString());
    final Configuration readConfiguration = super.getBaseConfiguration();
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    readConfiguration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName);
    readConfiguration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    Graph pageRankGraph = GraphFactory.open(readConfiguration);
    ///////////////
    final Configuration writeConfiguration = new BaseConfiguration();
    writeConfiguration.setProperty(Graph.GRAPH, TinkerGraph.class.getCanonicalName());
    writeConfiguration.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "gryo");
    writeConfiguration.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION,
            TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class)
                    + "testBulkLoaderVertexProgramChain.kryo");
    final Graph bulkLoaderGraph = pageRankGraph.compute(SparkGraphComputer.class)
            .persist(GraphComputer.Persist.VERTEX_PROPERTIES)
            .program(PageRankVertexProgram.build().create(pageRankGraph)).submit().get().graph();
    bulkLoaderGraph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.NOTHING).workers(1)
            .configure(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD, PersistedInputRDD.class.getCanonicalName())
            .configure(Constants.GREMLIN_HADOOP_INPUT_LOCATION, rddName)
            .configure(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD, null)
            .configure(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, null).program(BulkLoaderVertexProgram.build()
                    .userSuppliedIds(true).writeGraph(writeConfiguration).create(bulkLoaderGraph))
            .submit().get();//from  w  ww.j ava  2s  . c o m
    ////
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertEquals(1, Spark.getContext().getPersistentRDDs().size());
    ////
    final Graph graph = TinkerGraph.open();
    final GraphTraversalSource g = graph.traversal();
    graph.io(IoCore.gryo()).readGraph(TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class)
            + "testBulkLoaderVertexProgramChain.kryo");
    assertEquals(6l, g.V().count().next().longValue());
    assertEquals(0l, g.E().count().next().longValue());
    assertEquals("marko", g.V().has("name", "marko").values("name").next());
    assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue());
    ////
    Spark.close();
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.PersistedInputOutputRDDTest.java

@Test
public void testBulkLoaderVertexProgramChainWithInputOutputHelperMapping() throws Exception {
    Spark.create("local[4]");

    final String rddName = TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class,
            UUID.randomUUID().toString());
    final Configuration readConfiguration = super.getBaseConfiguration();
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    readConfiguration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName);
    readConfiguration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    Graph pageRankGraph = GraphFactory.open(readConfiguration);
    ///////////////
    final Configuration writeConfiguration = new BaseConfiguration();
    writeConfiguration.setProperty(Graph.GRAPH, TinkerGraph.class.getCanonicalName());
    writeConfiguration.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "gryo");
    writeConfiguration.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION,
            TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class)
                    + "testBulkLoaderVertexProgramChainWithInputOutputHelperMapping.kryo");
    final Graph bulkLoaderGraph = pageRankGraph.compute(SparkGraphComputer.class)
            .persist(GraphComputer.Persist.EDGES).program(PageRankVertexProgram.build().create(pageRankGraph))
            .submit().get().graph();/* www  .  ja  v  a 2s  . c  om*/
    bulkLoaderGraph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.NOTHING).workers(1)
            .program(BulkLoaderVertexProgram.build().userSuppliedIds(true).writeGraph(writeConfiguration)
                    .create(bulkLoaderGraph))
            .submit().get();
    ////
    Spark.create(readConfiguration);
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertEquals(1, Spark.getContext().getPersistentRDDs().size());
    ////
    final Graph graph = TinkerGraph.open();
    final GraphTraversalSource g = graph.traversal();
    graph.io(IoCore.gryo()).readGraph(TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class)
            + "testBulkLoaderVertexProgramChainWithInputOutputHelperMapping.kryo");
    assertEquals(6l, g.V().count().next().longValue());
    assertEquals(6l, g.E().count().next().longValue());
    assertEquals("marko", g.V().has("name", "marko").values("name").next());
    assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue());
    ////
    Spark.close();
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.PersistedInputOutputRDDTest.java

@Test
public void testComplexChain() throws Exception {
    Spark.create("local[4]");

    final String rddName = TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class,
            "testComplexChain", "graphRDD");
    final String rddName2 = TestHelper.makeTestDataDirectory(PersistedInputOutputRDDTest.class,
            "testComplexChain", "graphRDD2");
    final Configuration configuration = super.getBaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);

    assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertEquals(0, Spark.getContext().getPersistentRDDs().size());
    Graph graph = GraphFactory.open(configuration);
    graph = graph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.EDGES)
            .program(PageRankVertexProgram.build().iterations(2).create(graph)).submit().get().graph();
    GraphTraversalSource g = graph.traversal();
    assertEquals(6l, g.V().count().next().longValue());
    assertEquals(6l, g.E().count().next().longValue());
    assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue());
    ////// www  .  j av  a  2  s . c  o m
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertEquals(1, Spark.getContext().getPersistentRDDs().size());
    ////
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            PersistedInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, rddName);
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName2);
    ////
    graph = GraphFactory.open(configuration);
    graph = graph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.EDGES)
            .mapReduce(PageRankMapReduce.build().create())
            .program(PageRankVertexProgram.build().iterations(2).create(graph)).submit().get().graph();
    g = graph.traversal();
    assertEquals(6l, g.V().count().next().longValue());
    assertEquals(6l, g.E().count().next().longValue());
    assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue());
    ////
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName2)));
    assertTrue(Spark.hasRDD(Constants.getMemoryLocation(rddName2, PageRankMapReduce.DEFAULT_MEMORY_KEY)));
    assertEquals(3, Spark.getContext().getPersistentRDDs().size());
    ////
    graph = GraphFactory.open(configuration);
    graph = graph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.VERTEX_PROPERTIES)
            .program(PageRankVertexProgram.build().iterations(2).create(graph)).submit().get().graph();
    g = graph.traversal();
    assertEquals(6l, g.V().count().next().longValue());
    assertEquals(0l, g.E().count().next().longValue());
    assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue());
    ////
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName2)));
    assertFalse(Spark.hasRDD(Constants.getMemoryLocation(rddName2, PageRankMapReduce.DEFAULT_MEMORY_KEY)));
    assertEquals(2, Spark.getContext().getPersistentRDDs().size());
    ////
    graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.NOTHING)
            .program(PageRankVertexProgram.build().iterations(2).create(graph)).submit().get().graph();
    assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName2)));
    g = graph.traversal();
    assertEquals(0l, g.V().count().next().longValue());
    assertEquals(0l, g.E().count().next().longValue());
    assertEquals(0l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue());
    ////
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName2)));
    assertFalse(Spark.hasRDD(Constants.getMemoryLocation(rddName2, PageRankMapReduce.DEFAULT_MEMORY_KEY)));
    assertEquals(1, Spark.getContext().getPersistentRDDs().size());
    Spark.close();
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.SparkContextStorage.java

@Override
public Iterator<Vertex> head(final String location, final Class parserClass, final int totalLines) {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, location);
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD, parserClass.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT, parserClass.getCanonicalName());
    try {/*w  w w. j a v  a2 s.  co m*/
        if (InputRDD.class.isAssignableFrom(parserClass)) {
            return IteratorUtils.map(((InputRDD) parserClass.getConstructor().newInstance())
                    .readGraphRDD(configuration, new JavaSparkContext(Spark.getContext())).take(totalLines)
                    .iterator(), tuple -> tuple._2().get());
        } else if (InputFormat.class.isAssignableFrom(parserClass)) {
            return IteratorUtils.map(
                    new InputFormatRDD().readGraphRDD(configuration, new JavaSparkContext(Spark.getContext()))
                            .take(totalLines).iterator(),
                    tuple -> tuple._2().get());
        }
    } catch (final Exception e) {
        throw new IllegalArgumentException(e.getMessage(), e);
    }
    throw new IllegalArgumentException(
            "The provided parserClass must be an " + InputFormat.class.getCanonicalName() + " or an "
                    + InputRDD.class.getCanonicalName() + ": " + parserClass.getCanonicalName());
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.SparkContextStorage.java

@Override
public <K, V> Iterator<KeyValue<K, V>> head(final String location, final String memoryKey,
        final Class parserClass, final int totalLines) {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, location);
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD, parserClass.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT, parserClass.getCanonicalName());
    try {/*from  w  w w . j a  v  a2 s . c o m*/
        if (InputRDD.class.isAssignableFrom(parserClass)) {
            return IteratorUtils.map(((InputRDD) parserClass.getConstructor().newInstance())
                    .readMemoryRDD(configuration, memoryKey, new JavaSparkContext(Spark.getContext()))
                    .take(totalLines).iterator(), tuple -> new KeyValue(tuple._1(), tuple._2()));
        } else if (InputFormat.class.isAssignableFrom(parserClass)) {
            return IteratorUtils.map(new InputFormatRDD()
                    .readMemoryRDD(configuration, memoryKey, new JavaSparkContext(Spark.getContext()))
                    .take(totalLines).iterator(), tuple -> new KeyValue(tuple._1(), tuple._2()));
        }
    } catch (final Exception e) {
        throw new IllegalArgumentException(e.getMessage(), e);
    }
    throw new IllegalArgumentException(
            "The provided parserClass must be an " + InputFormat.class.getCanonicalName() + " or an "
                    + InputRDD.class.getCanonicalName() + ": " + parserClass.getCanonicalName());
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.SparkTest.java

@Test
public void testSparkRDDPersistence() throws Exception {
    final String root = TestHelper.makeTestDataDirectory(SparkTest.class, "testSparkRDDPersistence");
    final String prefix = root + File.separator + "graphRDD-";
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    Spark.create(configuration);//from  w w w  . j  a v a  2s. com

    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);

    for (int i = 0; i < 10; i++) {
        final String graphRDDName = Constants.getGraphLocation(prefix + i);
        assertEquals(i, Spark.getRDDs().size());
        configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, prefix + i);
        Graph graph = GraphFactory.open(configuration);
        graph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.VERTEX_PROPERTIES)
                .program(PageRankVertexProgram.build().iterations(1).create(graph)).submit().get();
        assertNotNull(Spark.getRDD(graphRDDName));
        assertEquals(i + 1, Spark.getRDDs().size());
    }

    for (int i = 9; i >= 0; i--) {
        final String graphRDDName = Constants.getGraphLocation(prefix + i);
        assertEquals(i + 1, getPersistedRDDSize());
        assertEquals(i + 1, Spark.getRDDs().size());
        assertTrue(hasPersistedRDD(graphRDDName));
        Spark.removeRDD(graphRDDName);
        assertFalse(hasPersistedRDD(graphRDDName));
    }

    assertEquals(0, getPersistedRDDSize());
    assertEquals(0, Spark.getRDDs().size());
    Spark.close();
}

From source file:org.apache.tinkerpop.gremlin.structure.io.gryo.GryoPoolTest.java

@Test
public void shouldConfigPoolOnConstructionWithCustomIoRegistryConstructor() throws Exception {
    final Configuration conf = new BaseConfiguration();
    conf.setProperty(GryoPool.CONFIG_IO_REGISTRY, IoXIoRegistry.ConstructorBased.class.getName());
    final GryoPool pool = GryoPool.build()
            .ioRegistries(conf.getList(GryoPool.CONFIG_IO_REGISTRY, Collections.emptyList())).create();
    assertReaderWriter(pool.takeWriter(), pool.takeReader(), new IoX("test"), IoX.class);
}

From source file:org.apache.tinkerpop.gremlin.structure.io.gryo.GryoPoolTest.java

@Test
public void shouldConfigPoolOnConstructionWithCustomIoRegistryInstance() throws Exception {
    final Configuration conf = new BaseConfiguration();
    conf.setProperty(GryoPool.CONFIG_IO_REGISTRY, IoXIoRegistry.InstanceBased.class.getName());
    final GryoPool pool = GryoPool.build()
            .ioRegistries(conf.getList(GryoPool.CONFIG_IO_REGISTRY, Collections.emptyList())).create();
    assertReaderWriter(pool.takeWriter(), pool.takeReader(), new IoX("test"), IoX.class);
}

From source file:org.apache.tinkerpop.gremlin.structure.io.gryo.GryoPoolTest.java

@Test
public void shouldConfigPoolOnConstructionWithMultipleCustomIoRegistries() throws Exception {
    final Configuration conf = new BaseConfiguration();
    conf.setProperty(GryoPool.CONFIG_IO_REGISTRY,
            IoXIoRegistry.InstanceBased.class.getName() + "," + IoYIoRegistry.InstanceBased.class.getName());
    final GryoPool pool = GryoPool.build()
            .ioRegistries(conf.getList(GryoPool.CONFIG_IO_REGISTRY, Collections.emptyList())).create();
    assertReaderWriter(pool.takeWriter(), pool.takeReader(), new IoX("test"), IoX.class);
    assertReaderWriter(pool.takeWriter(), pool.takeReader(), new IoY(100, 200), IoY.class);
}

From source file:org.apache.tinkerpop.gremlin.structure.io.gryo.GryoPoolTest.java

@Test(expected = IllegalStateException.class)
public void shouldConfigPoolOnConstructionWithoutBadIoRegistryAndFail() throws Exception {
    final Configuration conf = new BaseConfiguration();
    conf.setProperty(GryoPool.CONFIG_IO_REGISTRY, "some.class.that.does.not.exist");
    GryoPool.build().ioRegistries(conf.getList(GryoPool.CONFIG_IO_REGISTRY, Collections.emptyList())).create();
}