Example usage for org.apache.commons.configuration BaseConfiguration BaseConfiguration

List of usage examples for org.apache.commons.configuration BaseConfiguration BaseConfiguration

Introduction

In this page you can find the example usage for org.apache.commons.configuration BaseConfiguration BaseConfiguration.

Prototype

BaseConfiguration

Source Link

Usage

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.io.InputRDDTest.java

@Test
public void shouldReadFromArbitraryRDD() {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT,
            GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, "target/test-output");
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    /////////*from   w ww .j a v a  2 s .  c  om*/
    Graph graph = GraphFactory.open(configuration);
    assertEquals(Double.valueOf(123.0d), graph
            .traversal(GraphTraversalSource.computer(SparkGraphComputer.class)).V().values("age").sum().next());
    assertEquals(Long.valueOf(4l),
            graph.traversal(GraphTraversalSource.computer(SparkGraphComputer.class)).V().count().next());
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.io.OutputRDDTest.java

@Test
public void shouldWriteToArbitraryRDD() throws Exception {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            ExampleOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, "target/test-output");
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    /////////*from   w ww .j a  v a  2 s.  c  om*/
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(
                    GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(
                            GraphTraversalSource.build()
                                    .engine(ComputerTraversalEngine.build().computer(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.LocalPropertyTest.java

@Test
public void shouldSetThreadLocalProperties() throws Exception {
    final String testName = "ThreadLocalProperties";
    final String rddName = TestHelper.makeTestDataDirectory(LocalPropertyTest.class)
            + UUID.randomUUID().toString();
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName);
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    configuration.setProperty("spark.jobGroup.id", "22");
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))//from   w  ww .j a  v  a 2 s . co m
            .submit().get();
    ////////
    SparkConf sparkConfiguration = new SparkConf();
    sparkConfiguration.setAppName(testName);
    ConfUtil.makeHadoopConfiguration(configuration)
            .forEach(entry -> sparkConfiguration.set(entry.getKey(), entry.getValue()));
    JavaSparkContext sparkContext = new JavaSparkContext(SparkContext.getOrCreate(sparkConfiguration));
    JavaSparkStatusTracker statusTracker = sparkContext.statusTracker();
    assertTrue(statusTracker.getJobIdsForGroup("22").length >= 1);
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    ///////
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            PersistedInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, rddName);
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD, null);
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, null);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, false);
    configuration.setProperty("spark.jobGroup.id", "44");
    graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.NOTHING)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
    ///////
    assertTrue(statusTracker.getJobIdsForGroup("44").length >= 1);
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.gryo.GryoSerializer.java

private static Configuration makeApacheConfiguration(final SparkConf sparkConfiguration) {
    final BaseConfiguration apacheConfiguration = new BaseConfiguration();
    apacheConfiguration.setDelimiterParsingDisabled(true);
    for (final Tuple2<String, String> tuple : sparkConfiguration.getAll()) {
        apacheConfiguration.setProperty(tuple._1(), tuple._2());
    }/*from w ww .j  a  v a 2 s .c o  m*/
    return apacheConfiguration;
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.InputOutputHelper.java

public static HadoopGraph getOutputGraph(final Configuration configuration,
        final GraphComputer.ResultGraph resultGraph, final GraphComputer.Persist persist) {
    try {/*from   w  ww.  j a  v  a  2  s. c  o m*/
        final HadoopConfiguration hadoopConfiguration = new HadoopConfiguration(configuration);
        final BaseConfiguration newConfiguration = new BaseConfiguration();
        newConfiguration.copy(org.apache.tinkerpop.gremlin.hadoop.structure.io.InputOutputHelper
                .getOutputGraph(configuration, resultGraph, persist).configuration());
        if (resultGraph.equals(GraphComputer.ResultGraph.NEW)
                && hadoopConfiguration.containsKey(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD)) {
            newConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
                    InputRDDFormat.class.getCanonicalName());
            //newConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT, OutputRDDFormat.class.getCanonicalName());
            newConfiguration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
                    InputOutputHelper
                            .getInputFormat((Class) Class.forName(
                                    hadoopConfiguration.getString(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD)))
                            .getCanonicalName());
        }
        return HadoopGraph.open(newConfiguration);
    } catch (final ClassNotFoundException e) {
        throw new IllegalArgumentException(e.getMessage(), e);
    }
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.InputOutputRDDTest.java

@Test
public void shouldReadFromWriteToArbitraryRDD() throws Exception {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            ExampleOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldReadFromWriteToArbitraryRDD"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    /////////*from ww  w  .j  av a  2s . c o  m*/
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.InputRDDTest.java

@Test
public void shouldReadFromArbitraryRDD() {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT,
            GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldReadFromArbitraryRDD"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    /////////*from   w  w w .ja v  a  2s .com*/
    Graph graph = GraphFactory.open(configuration);
    assertEquals(123l, graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)).V()
            .values("age").sum().next());
    assertEquals(Long.valueOf(4l), graph.traversal().withComputer(SparkGraphComputer.class).V().count().next());
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.InputRDDTest.java

@Test
public void shouldSupportHadoopGraphOLTP() {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            InputRDDFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT,
            GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldSupportHadoopGraphOLTP"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    //////////  ww  w . j  a va2  s  .  c  o  m
    Graph graph = GraphFactory.open(configuration);
    GraphTraversalSource g = graph.traversal(); // OLTP;
    assertEquals("person", g.V().has("age", 29).next().label());
    assertEquals(Long.valueOf(4), g.V().count().next());
    assertEquals(Long.valueOf(0), g.E().count().next());
    assertEquals(Long.valueOf(2), g.V().has("age", P.gt(30)).count().next());
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.OutputRDDTest.java

@Test
public void shouldWriteToArbitraryRDD() throws Exception {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            ExampleOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldWriteToArbitraryRDD"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    //////////from  ww w.  ja  v  a 2  s .  c om
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.PersistedInputOutputRDDIntegrateTest.java

@Test
public void testBulkLoaderVertexProgramChain() throws Exception {
    Spark.create("local[4]");
    final String rddName = TestHelper.makeTestDataDirectory(PersistedInputOutputRDDIntegrateTest.class,
            UUID.randomUUID().toString());
    final Configuration readConfiguration = super.getBaseConfiguration();
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER,
            GryoInputFormat.class.getCanonicalName());
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER,
            PersistedOutputRDD.class.getCanonicalName());
    readConfiguration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName);
    readConfiguration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    Graph pageRankGraph = GraphFactory.open(readConfiguration);
    ///////////////
    final Configuration writeConfiguration = new BaseConfiguration();
    writeConfiguration.setProperty(Graph.GRAPH, TinkerGraph.class.getCanonicalName());
    writeConfiguration.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "gryo");
    writeConfiguration.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION,
            TestHelper.makeTestDataDirectory(PersistedInputOutputRDDIntegrateTest.class)
                    + "testBulkLoaderVertexProgramChain.kryo");
    final Graph bulkLoaderGraph = pageRankGraph.compute(SparkGraphComputer.class)
            .persist(GraphComputer.Persist.VERTEX_PROPERTIES)
            .program(PageRankVertexProgram.build().create(pageRankGraph)).submit().get().graph();
    bulkLoaderGraph.compute(SparkGraphComputer.class).persist(GraphComputer.Persist.NOTHING).workers(1)
            .configure(Constants.GREMLIN_HADOOP_GRAPH_READER, PersistedInputRDD.class.getCanonicalName())
            .configure(Constants.GREMLIN_HADOOP_INPUT_LOCATION, rddName)
            .configure(Constants.GREMLIN_HADOOP_GRAPH_WRITER, null)
            .configure(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, null).program(BulkLoaderVertexProgram.build()
                    .userSuppliedIds(true).writeGraph(writeConfiguration).create(bulkLoaderGraph))
            .submit().get();//  w w w.  j av a 2s  . co m
    ////
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertEquals(1, Spark.getContext().getPersistentRDDs().size());
    ////
    final Graph graph = TinkerGraph.open();
    final GraphTraversalSource g = graph.traversal();
    graph.io(IoCore.gryo())
            .readGraph(TestHelper.makeTestDataDirectory(PersistedInputOutputRDDIntegrateTest.class)
                    + "testBulkLoaderVertexProgramChain.kryo");
    assertEquals(6l, g.V().count().next().longValue());
    assertEquals(0l, g.E().count().next().longValue());
    assertEquals("marko", g.V().has("name", "marko").values("name").next());
    assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue());
    ////
    Spark.close();
}