Example usage for org.apache.commons.configuration BaseConfiguration BaseConfiguration

List of usage examples for org.apache.commons.configuration BaseConfiguration BaseConfiguration

Introduction

In this page you can find the example usage for org.apache.commons.configuration BaseConfiguration BaseConfiguration.

Prototype

BaseConfiguration

Source Link

Usage

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.io.InputOutputHelper.java

public static HadoopGraph getOutputGraph(final Configuration configuration,
        final GraphComputer.ResultGraph resultGraph, final GraphComputer.Persist persist) {
    final HadoopConfiguration hadoopConfiguration = new HadoopConfiguration(configuration);
    final BaseConfiguration newConfiguration = new BaseConfiguration();
    newConfiguration.copy(hadoopConfiguration);
    if (resultGraph.equals(GraphComputer.ResultGraph.NEW)) {
        newConfiguration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
                hadoopConfiguration.getOutputLocation());
        if (hadoopConfiguration.containsKey(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT))
            newConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT, InputOutputHelper
                    .getInputFormat(hadoopConfiguration.getGraphOutputFormat()).getCanonicalName());
        newConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT_HAS_EDGES,
                persist.equals(GraphComputer.Persist.EDGES));
    }//from  w  ww .j av a 2s.  co m
    newConfiguration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            hadoopConfiguration.getOutputLocation() + "_");
    return HadoopGraph.open(newConfiguration);
}

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.util.ConfUtil.java

public static org.apache.commons.configuration.Configuration makeApacheConfiguration(
        final Configuration hadoopConfiguration) {
    final BaseConfiguration apacheConfiguration = new BaseConfiguration();
    apacheConfiguration.setDelimiterParsingDisabled(true);
    hadoopConfiguration.iterator()//from  w w w.  j a  va 2  s.co m
            .forEachRemaining(e -> apacheConfiguration.setProperty(e.getKey(), e.getValue()));
    return apacheConfiguration;
}

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.util.HadoopHelper.java

public static HadoopGraph getOutputGraph(final HadoopGraph hadoopGraph,
        final GraphComputer.ResultGraph resultGraph, final GraphComputer.Persist persist) {
    final BaseConfiguration newConfiguration = new BaseConfiguration();
    newConfiguration.copy(hadoopGraph.configuration());
    if (resultGraph.equals(GraphComputer.ResultGraph.NEW)) {
        newConfiguration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
                hadoopGraph.configuration().getOutputLocation() + "/" + Constants.HIDDEN_G);
        if (hadoopGraph.configuration().containsKey(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT))
            newConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT, InputOutputHelper
                    .getInputFormat(hadoopGraph.configuration().getGraphOutputFormat()).getCanonicalName());
        newConfiguration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
                hadoopGraph.configuration().getOutputLocation() + "_");
        newConfiguration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT_HAS_EDGES,
                persist.equals(GraphComputer.Persist.EDGES));
    } else {/*www.  java2s. c  om*/
        newConfiguration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
                hadoopGraph.configuration().getOutputLocation() + "_");
    }
    return HadoopGraph.open(newConfiguration);
}

From source file:org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jGraph.java

/**
 * Construct a Neo4jGraph instance by specifying the directory to create the database in..
 *//*from ww w. jav a 2 s.  co m*/
public static Neo4jGraph open(final String directory) {
    final Configuration config = new BaseConfiguration();
    config.setProperty(CONFIG_DIRECTORY, directory);
    return open(config);
}

From source file:org.apache.tinkerpop.gremlin.process.computer.bulkloading.BulkLoaderVertexProgram.java

@Override
public void loadState(final Graph graph, final Configuration config) {
    configuration = new BaseConfiguration();
    if (config != null) {
        ConfigurationUtils.copy(config, configuration);
    }//from  www.  j a  v a2  s .c  om
    intermediateBatchSize = configuration.getLong(INTERMEDIATE_BATCH_SIZE_CFG_KEY, 0L);
    elementComputeKeys.add(VertexComputeKey.of(DEFAULT_BULK_LOADER_VERTEX_ID, true));
    bulkLoader = createBulkLoader();
}

From source file:org.apache.tinkerpop.gremlin.process.computer.bulkloading.BulkLoaderVertexProgramTest.java

private Configuration getWriteGraphConfiguration() {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty(Graph.GRAPH, "org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph");
    configuration.setProperty("gremlin.tinkergraph.graphLocation", TINKERGRAPH_LOCATION);
    configuration.setProperty("gremlin.tinkergraph.graphFormat", "gryo");
    return configuration;
}

From source file:org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategy.java

/**
 * Get the configuration representation of this strategy.
 * This is useful for converting a strategy into a serialized form.
 *
 * @return the configuration used to create this strategy
 *///from   w  w w . ja va 2 s . com
public default Configuration getConfiguration() {
    return new BaseConfiguration();
}

From source file:org.apache.tinkerpop.gremlin.spark.AbstractSparkTest.java

protected Configuration getBaseConfiguration() {
    final BaseConfiguration configuration = new BaseConfiguration();
    configuration.setDelimiterParsingDisabled(true);
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty(Constants.SPARK_SERIALIZER, GryoSerializer.class.getCanonicalName());
    configuration.setProperty("spark.kryo.registrationRequired", true);
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    return configuration;
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.groovy.plugin.SparkGremlinPluginTest.java

@Test
public void shouldSupportBasicRDDOperations() throws Exception {
    final String root = TestHelper.makeTestDataDirectory(SparkGremlinPluginTest.class,
            "shouldSupportBasicRDDOperations");
    final String rddName1 = TestHelper.makeTestDataDirectory(SparkGremlinPluginTest.class,
            "shouldSupportBasicRDDOperations", "graph-1");
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName1);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    Graph graph = GraphFactory.open(configuration);

    Spark.create("local[4]");

    assertEquals(0, ((List<String>) this.console.eval("spark.ls()")).size());

    this.console.addBinding("graph", graph);
    this.console.eval(
            "graph.compute(SparkGraphComputer).program(PageRankVertexProgram.build().iterations(1).create()).submit().get()");
    assertEquals(1, ((List<String>) this.console.eval("spark.ls()")).size());
    assertEquals(rddName1 + " [Memory Deserialized 1x Replicated]",
            ((List<String>) this.console.eval("spark.ls()")).get(0));

    final String rddName2 = TestHelper.makeTestDataDirectory(SparkGremlinPluginTest.class,
            "shouldSupportBasicRDDOperations", "graph-2");
    this.console.eval("graph.configuration().setProperty('" + Constants.GREMLIN_HADOOP_OUTPUT_LOCATION + "','"
            + rddName2 + "')");
    this.console.eval(
            "graph.compute(SparkGraphComputer).program(PageRankVertexProgram.build().iterations(1).create()).submit().get()");
    assertEquals(2, ((List<String>) this.console.eval("spark.ls()")).size());
    assertTrue(((List<String>) this.console.eval("spark.ls()"))
            .contains(rddName2 + " [Memory Deserialized 1x Replicated]"));

    this.console.eval("spark.rm('" + rddName2 + "')");
    assertEquals(1, ((List<String>) this.console.eval("spark.ls()")).size());
    assertTrue(((List<String>) this.console.eval("spark.ls()"))
            .contains(rddName1 + " [Memory Deserialized 1x Replicated]"));

    assertEquals(6, ((List<Object>) this.console.eval("spark.head('" + rddName1 + "')")).size());

    this.console.eval("spark.rm('" + root + "graph-*')");
    assertEquals(0, ((List<String>) this.console.eval("spark.ls()")).size());

    ////////w  ww. j a  va 2s .  co m
    this.console.eval("graph.configuration().setProperty('" + Constants.GREMLIN_HADOOP_OUTPUT_LOCATION + "','"
            + rddName1 + "')");
    this.console.eval(
            "graph.compute(SparkGraphComputer).program(PageRankVertexProgram.build().iterations(1).create()).submit().get()");

    this.console.eval("graph.configuration().setProperty('" + Constants.GREMLIN_HADOOP_OUTPUT_LOCATION + "','"
            + rddName2 + "')");
    this.console.eval(
            "graph.compute(SparkGraphComputer).program(PageRankVertexProgram.build().iterations(1).create()).submit().get()");

    final String rddName3 = TestHelper.makeTestDataDirectory(SparkGremlinPluginTest.class,
            "shouldSupportBasicRDDOperations", "x");
    this.console.eval("graph.configuration().setProperty('" + Constants.GREMLIN_HADOOP_OUTPUT_LOCATION + "','"
            + rddName3 + "')");
    this.console.eval(
            "graph.compute(SparkGraphComputer).program(PageRankVertexProgram.build().iterations(1).create()).submit().get()");

    assertEquals(3, ((List<String>) this.console.eval("spark.ls()")).size());
    this.console.eval("spark.rm('" + root + "graph-*')");
    assertEquals(1, ((List<String>) this.console.eval("spark.ls()")).size());
    this.console.eval("spark.rm('*')");
    assertEquals(0, ((List<String>) this.console.eval("spark.ls()")).size());

    //
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.io.InputOutputRDDTest.java

@Test
public void shouldReadFromWriteToArbitraryRDD() throws Exception {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            ExampleOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, "target/test-output");
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    //////////from ww w .ja v  a 2s .  com
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(
                    GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(
                            GraphTraversalSource.build()
                                    .engine(ComputerTraversalEngine.build().computer(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
}