Example usage for org.apache.commons.configuration Configuration setProperty

List of usage examples for org.apache.commons.configuration Configuration setProperty

Introduction

In this page you can find the example usage for org.apache.commons.configuration Configuration setProperty.

Prototype

void setProperty(String key, Object value);

Source Link

Document

Set a property, this will replace any previously set values.

Usage

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.LocalPropertyTest.java

@Test
public void shouldSetThreadLocalProperties() throws Exception {
    final String testName = "ThreadLocalProperties";
    final String rddName = TestHelper.makeTestDataDirectory(LocalPropertyTest.class)
            + UUID.randomUUID().toString();
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName);
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    configuration.setProperty("spark.jobGroup.id", "22");
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))/*from w w w . j a  va  2  s  . com*/
            .submit().get();
    ////////
    SparkConf sparkConfiguration = new SparkConf();
    sparkConfiguration.setAppName(testName);
    ConfUtil.makeHadoopConfiguration(configuration)
            .forEach(entry -> sparkConfiguration.set(entry.getKey(), entry.getValue()));
    JavaSparkContext sparkContext = new JavaSparkContext(SparkContext.getOrCreate(sparkConfiguration));
    JavaSparkStatusTracker statusTracker = sparkContext.statusTracker();
    assertTrue(statusTracker.getJobIdsForGroup("22").length >= 1);
    assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    ///////
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            PersistedInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, rddName);
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD, null);
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, null);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, false);
    configuration.setProperty("spark.jobGroup.id", "44");
    graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.NOTHING)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
    ///////
    assertTrue(statusTracker.getJobIdsForGroup("44").length >= 1);
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.traversal.strategy.optimization.SparkInterceptorStrategyTest.java

@Test
public void shouldHandleSideEffectsCorrectly() throws Exception {
    final Configuration configuration = getBaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, TestHelper
            .makeTestDataDirectory(SparkSingleIterationStrategyTest.class, UUID.randomUUID().toString()));
    configuration.setProperty(Constants.GREMLIN_HADOOP_DEFAULT_GRAPH_COMPUTER,
            SparkGraphComputer.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    ////*from w  w  w . ja v a  2s  .  co  m*/
    Graph graph = GraphFactory.open(configuration);
    GraphTraversalSource g = graph.traversal().withComputer()
            .withoutStrategies(SparkSingleIterationStrategy.class);
    assertFalse(g.getStrategies().toList().contains(SparkSingleIterationStrategy.instance()));
    assertFalse(
            g.V().count().explain().toString().contains(SparkSingleIterationStrategy.class.getSimpleName()));
    assertTrue(g.getStrategies().toList().contains(SparkInterceptorStrategy.instance()));
    assertTrue(g.V().count().explain().toString().contains(SparkInterceptorStrategy.class.getSimpleName()));
    /// groupCount(m)-test
    Traversal.Admin<Vertex, Long> traversal = g.V().groupCount("m").by(T.label).count().asAdmin();
    test(SparkStarBarrierInterceptor.class, 6l, traversal);
    assertEquals(1, traversal.getSideEffects().keys().size());
    assertTrue(traversal.getSideEffects().exists("m"));
    assertTrue(traversal.getSideEffects().keys().contains("m"));
    final Map<String, Long> map = traversal.getSideEffects().get("m");
    assertEquals(2, map.size());
    assertEquals(2, map.get("software").intValue());
    assertEquals(4, map.get("person").intValue());
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.traversal.strategy.optimization.SparkInterceptorStrategyTest.java

@Test
public void shouldSuccessfullyEvaluateInterceptedTraversals() throws Exception {
    final Configuration configuration = getBaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, TestHelper
            .makeTestDataDirectory(SparkSingleIterationStrategyTest.class, UUID.randomUUID().toString()));
    configuration.setProperty(Constants.GREMLIN_HADOOP_DEFAULT_GRAPH_COMPUTER,
            SparkGraphComputer.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    /////from   ww  w.  j  a  v a2  s  .  co  m
    Graph graph = GraphFactory.open(configuration);
    GraphTraversalSource g = graph.traversal().withComputer()
            .withoutStrategies(SparkSingleIterationStrategy.class);
    assertFalse(g.getStrategies().toList().contains(SparkSingleIterationStrategy.instance()));
    assertFalse(
            g.V().count().explain().toString().contains(SparkSingleIterationStrategy.class.getSimpleName()));
    assertTrue(g.getStrategies().toList().contains(SparkInterceptorStrategy.instance()));
    assertTrue(g.V().count().explain().toString().contains(SparkInterceptorStrategy.class.getSimpleName()));
    /// SparkCountInterceptor matches
    test(SparkStarBarrierInterceptor.class, 6l, g.V().count());
    test(SparkStarBarrierInterceptor.class, 2l, g.V().hasLabel("software").count());
    test(SparkStarBarrierInterceptor.class, 2l, g.V().hasLabel("person").has("age", P.gt(30)).count());
    test(SparkStarBarrierInterceptor.class, 2l,
            g.V().hasLabel("person").has("age", P.gt(30)).values("name").count());
    test(SparkStarBarrierInterceptor.class, 2l,
            g.V().hasLabel("person").has("age", P.gt(30)).properties("name").count());
    test(SparkStarBarrierInterceptor.class, 4l,
            g.V().hasLabel("person").has("age", P.gt(30)).properties("name", "age").count());
    test(SparkStarBarrierInterceptor.class, 3l, g.V().hasLabel("person").has("age", P.gt(30)).out().count());
    test(SparkStarBarrierInterceptor.class, 0l,
            g.V().hasLabel("person").has("age", P.gt(30)).out("knows").count());
    test(SparkStarBarrierInterceptor.class, 3l,
            g.V().has(T.label, P.not(P.within("robot", "android")).and(P.within("person", "software")))
                    .hasLabel("person").has("age", P.gt(30)).out("created").count());
    test(SparkStarBarrierInterceptor.class, 3l, g.V(1).out().count());
    test(SparkStarBarrierInterceptor.class, 2l, g.V(1).out("knows").count());
    test(SparkStarBarrierInterceptor.class, 3l, g.V(1).out("knows", "created").count());
    test(SparkStarBarrierInterceptor.class, 5l, g.V(1, 4).out("knows", "created").count());
    test(SparkStarBarrierInterceptor.class, 1l, g.V(2).in("knows").count());
    test(SparkStarBarrierInterceptor.class, 0l, g.V(6).has("name", "peter").in().count());
    test(SparkStarBarrierInterceptor.class, 6l, g.V().as("a").values("name").as("b").count());
    test(SparkStarBarrierInterceptor.class, 6l, g.V().as("a").count());
    test(SparkStarBarrierInterceptor.class, 1l,
            g.V().has("name", "marko").as("a").values("name").as("b").count());
    test(SparkStarBarrierInterceptor.class, 4l,
            g.V().has(T.label, P.not(P.within("robot", "android")).and(P.within("person", "software")))
                    .hasLabel("person").has("age").out("created").count());
    test(SparkStarBarrierInterceptor.class, 123l, g.V().has("age").values("age").sum());
    test(SparkStarBarrierInterceptor.class, 67l, g.V().has("age").has("age", P.gt(30)).values("age").sum());
    test(SparkStarBarrierInterceptor.class, 27, g.V().hasLabel("person").values("age").min());
    test(SparkStarBarrierInterceptor.class, 35, g.V().hasLabel("person").values("age").max());
    test(SparkStarBarrierInterceptor.class, new HashMap<String, Long>() {
        {
            put("software", 2l);
            put("person", 4l);
        }
    }, g.V().<String>groupCount().by(T.label));
    test(SparkStarBarrierInterceptor.class, Collections.singletonMap("person", 2l),
            g.V().has("person", "age", P.lt(30)).<String>groupCount().by(T.label));
    test(SparkStarBarrierInterceptor.class, new HashMap<String, Long>() {
        {
            put("software", 2l);
            put("person", 4l);
        }
    }, g.V().<String, Long>group().by(T.label).by(__.count()));
    test(SparkStarBarrierInterceptor.class, 123l,
            g.V().hasLabel("person").values("age").fold(0l, Operator.sum));
    /// No interceptor matches
    test(2l, g.V().out().out().count());
    test(6l, g.E().count());
    test(2l, g.V().out().out().count());
    test(6l, g.V().out().values("name").count());
    test(2l, g.V().out("knows").values("name").count());
    test(3l, g.V().in().has("name", "marko").count());
    test(0l, g.V().repeat(__.dedup()).times(2).count());
    test(6l, g.V().dedup().count());
    test(4l, g.V().hasLabel("person").order().by("age").count());
    test(1l, g.V().count().count());
    test(2l, g.V().limit(2).count());
    test(3l, g.V().tail(3).count());
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.traversal.strategy.optimization.SparkSingleIterationStrategyTest.java

@Test
public void shouldSuccessfullyEvaluateSingleIterationTraversals() throws Exception {
    final String outputLocation = TestHelper.makeTestDataDirectory(SparkSingleIterationStrategyTest.class,
            UUID.randomUUID().toString());
    Configuration configuration = getBaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, outputLocation);
    configuration.setProperty(Constants.GREMLIN_HADOOP_DEFAULT_GRAPH_COMPUTER,
            SparkGraphComputer.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);

    /////////// WITHOUT SINGLE-ITERATION STRATEGY LESS SINGLE-PASS OPTIONS ARE AVAILABLE

    Graph graph = GraphFactory.open(configuration);
    GraphTraversalSource g = graph.traversal().withComputer().withoutStrategies(SparkInterceptorStrategy.class,
            MessagePassingReductionStrategy.class);
    assertFalse(g.getStrategies().toList().contains(SparkInterceptorStrategy.instance()));
    assertFalse(g.V().count().explain().getStrategyTraversals().stream()
            .filter(pair -> pair.getValue0() instanceof SparkInterceptorStrategy).findAny().isPresent());
    assertFalse(g.getStrategies().toList().contains(MessagePassingReductionStrategy.instance()));
    assertFalse(g.V().count().explain().getStrategyTraversals().stream()
            .filter(pair -> pair.getValue0() instanceof MessagePassingReductionStrategy).findAny().isPresent());
    assertTrue(g.getStrategies().toList().contains(SparkSingleIterationStrategy.instance()));
    assertTrue(g.V().count().explain().getStrategyTraversals().stream()
            .filter(pair -> pair.getValue0() instanceof SparkSingleIterationStrategy).findAny().isPresent());

    test(true, g.V().limit(10));/*  w  w w.  j ava 2 s.c om*/
    test(true, g.V().values("age").groupCount());
    test(true, g.V().groupCount().by(__.out().count()));
    test(true, g.V().outE());
    test(true, 6L, g.V().count());
    test(true, 6L, g.V().out().count());
    test(true, 6L, g.V().outE().inV().count());
    ////
    test(false, 6L, g.V().local(__.inE()).count());
    test(false, g.V().outE().inV());
    test(false, g.V().both());
    test(false, 12L, g.V().both().count());
    test(false, g.V().out().id());
    test(false, 2L, g.V().out().out().count());
    test(false, 6L, g.V().in().count());
    test(false, 6L, g.V().inE().count());

    /////////// WITH SINGLE-ITERATION STRATEGY MORE SINGLE-PASS OPTIONS ARE AVAILABLE

    graph = GraphFactory.open(configuration);
    g = graph.traversal().withComputer().withoutStrategies(SparkInterceptorStrategy.class)
            .withStrategies(MessagePassingReductionStrategy.instance());
    assertFalse(g.getStrategies().toList().contains(SparkInterceptorStrategy.instance()));
    assertFalse(g.V().count().explain().getStrategyTraversals().stream()
            .filter(pair -> pair.getValue0() instanceof SparkInterceptorStrategy).findAny().isPresent());
    assertTrue(g.getStrategies().toList().contains(MessagePassingReductionStrategy.instance()));
    assertTrue(g.V().count().explain().getStrategyTraversals().stream()
            .filter(pair -> pair.getValue0() instanceof MessagePassingReductionStrategy).findAny().isPresent());
    assertTrue(g.getStrategies().toList().contains(SparkSingleIterationStrategy.instance()));
    assertTrue(g.V().count().explain().getStrategyTraversals().stream()
            .filter(pair -> pair.getValue0() instanceof SparkSingleIterationStrategy).findAny().isPresent());

    test(true, g.V().limit(10));
    test(true, g.V().values("age").groupCount());
    test(true, g.V().groupCount().by(__.out().count()));
    test(true, g.V().outE());
    test(true, 6L, g.V().outE().values("weight").count());
    test(true, 6L, g.V().inE().values("weight").count());
    test(true, 12L, g.V().bothE().values("weight").count());
    test(true, g.V().bothE().values("weight"));
    test(true, g.V().bothE().values("weight").limit(2));
    test(true, 6L, g.V().count());
    test(true, 6L, g.V().id().count());
    test(true, 6L, g.V().identity().outE().identity().count());
    test(true, 6L, g.V().identity().outE().has("weight").count());
    test(true, 6L, g.V().out().count());
    test(true, 6L, g.V().outE().inV().count());
    test(true, 6L, g.V().outE().inV().id().count());
    test(true, 2L, g.V().outE().inV().id().groupCount().select(Column.values).unfold().dedup().count());
    test(true, g.V().out().id());
    test(true, 6L, g.V().outE().valueMap().count());
    test(true, g.V().outE().valueMap());
    test(true, 6L, g.V().inE().valueMap().count());
    test(true, g.V().inE().valueMap());
    test(true, 12L, g.V().bothE().valueMap().count());
    test(true, g.V().bothE().valueMap());
    test(true, 6L, g.V().inE().id().count());
    test(true, 6L, g.V().outE().count());
    test(true, 4L, g.V().outE().inV().id().dedup().count());
    test(true, 4L, g.V().filter(__.in()).count());
    test(true, 6L, g.V().sideEffect(__.in()).count());
    test(true, 6L, g.V().map(__.constant("hello")).count());
    test(true, g.V().groupCount());
    test(true, g.V().groupCount("x"));
    test(true, g.V().groupCount("x").cap("x"));
    test(true, g.V().id().groupCount("x").cap("x"));
    test(true, g.V().outE().groupCount());
    test(true, g.V().outE().groupCount().by("weight"));
    test(true, g.V().inE().id().groupCount());
    test(true, g.V().inE().values("weight").groupCount());
    test(true, 6L, g.V().outE().outV().count());
    test(true, g.V().out().id().groupCount("x"));
    test(true, g.V().inE().values("weight").groupCount("x"));
    test(true, 6L, g.V().in().count());
    test(true, 12L, g.V().both().count());
    test(true, 6L, g.V().flatMap(__.in()).count());
    test(true, 4L, g.V().map(__.in()).count());
    test(true, 6L, g.V().inE().count());
    test(true, 4L, g.V().outE().inV().dedup().count());
    /////
    test(false, 6L, g.V().as("a").outE().inV().as("b").id().dedup("a", "b").by(T.id).count());
    test(false, 6L, g.V().local(__.inE()).count());
    test(false, 4L, g.V().outE().inV().dedup().by("name").count());
    test(false, 6L, g.V().local(__.in()).count());
    test(false, g.V().outE().inV());
    test(false, g.V().both());
    test(false, g.V().outE().inV().dedup());
    test(false, 2L, g.V().out().out().count());
    test(false, 6L, g.V().as("a").map(__.both()).select("a").count());
    test(false, g.V().out().values("name"));
    test(false, g.V().out().properties("name"));
    test(false, g.V().out().valueMap());
    test(false, 6L, g.V().as("a").outE().inV().values("name").as("b").dedup("a", "b").count());
    test(false, 2L, g.V().outE().inV().groupCount().select(Column.values).unfold().dedup().count());
    test(false, g.V().out().groupCount("x"));
    test(false, g.V().out().groupCount("x").cap("x"));
    test(false, 6L, g.V().both().groupCount("x").cap("x").select(keys).unfold().count());
    test(false, g.V().outE().inV().groupCount());
    test(false, g.V().outE().unfold().inV().groupCount());
    test(false, g.V().outE().inV().groupCount().by("name"));
    test(false, g.V().outE().inV().tree());
    test(false, g.V().outE().inV().id().tree());
    test(false, g.V().inE().groupCount());
    test(false, g.V().inE().groupCount().by("weight"));
    test(false, g.V().in().values("name").groupCount());
    test(false, g.V().out().groupCount("x"));
    test(false, g.V().in().groupCount("x"));
    test(false, g.V().both().groupCount("x").cap("x"));
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.gryo.GryoSerializerIntegrateTest.java

@Test
public void shouldHaveAllRegisteredGryoSerializerClasses() throws Exception {
    // this is a stress test that ensures that when data is spilling to disk, persisted to an RDD, etc. the correct classes are registered with GryoSerializer.
    final TinkerGraph randomGraph = TinkerGraph.open();
    int totalVertices = 200000;
    TestHelper.createRandomGraph(randomGraph, totalVertices, 100);
    final String inputLocation = TestHelper.makeTestDataDirectory(GryoSerializerIntegrateTest.class,
            UUID.randomUUID().toString()) + "/random-graph.kryo";
    randomGraph.io(IoCore.gryo()).writeGraph(inputLocation);
    randomGraph.clear();//from   w  w w.ja  va2s. c o  m
    randomGraph.close();

    final String outputLocation = TestHelper.makeTestDataDirectory(GryoSerializerIntegrateTest.class,
            UUID.randomUUID().toString());
    Configuration configuration = getBaseConfiguration();
    configuration.clearProperty(Constants.SPARK_SERIALIZER); // ensure proper default to GryoSerializer
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, inputLocation);
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT,
            GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, outputLocation);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, false);
    Graph graph = GraphFactory.open(configuration);
    final GraphTraversal.Admin<Vertex, Map<Vertex, Collection<Vertex>>> traversal = graph.traversal()
            .withComputer(SparkGraphComputer.class).V().group("m").<Map<Vertex, Collection<Vertex>>>cap("m")
            .asAdmin();
    assertTrue(traversal.hasNext());
    assertTrue(traversal.next() == traversal.getSideEffects().get("m"));
    assertFalse(traversal.hasNext());
    assertTrue(traversal.getSideEffects().exists("m"));
    assertTrue(traversal.getSideEffects().get("m") instanceof Map);
    assertEquals(totalVertices, traversal.getSideEffects().<Map>get("m").size());

    configuration = getBaseConfiguration();
    configuration.clearProperty(Constants.SPARK_SERIALIZER); // ensure proper default to GryoSerializer
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, inputLocation);
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_STORAGE_LEVEL, "DISK_ONLY");
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, "persisted-rdd");
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    graph = GraphFactory.open(configuration);
    assertEquals(totalVertices,
            graph.compute(SparkGraphComputer.class)
                    .program(PageRankVertexProgram.build().iterations(2).create(graph)).submit().get().graph()
                    .traversal().V().count().next().longValue());

    configuration = getBaseConfiguration();
    configuration.clearProperty(Constants.SPARK_SERIALIZER); // ensure proper default to GryoSerializer
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, "persisted-rdd");
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            PersistedInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT,
            GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, outputLocation);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    graph = GraphFactory.open(configuration);
    assertEquals(totalVertices,
            graph.traversal().withComputer(SparkGraphComputer.class).V().count().next().longValue());

    configuration = getBaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, "persisted-rdd");
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            PersistedInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            PersistedOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, outputLocation);
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_STORAGE_LEVEL, "MEMORY_ONLY"); // this should be ignored as you can't change the persistence level once created
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_STORAGE_LEVEL, "MEMORY_AND_DISK");
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    graph = GraphFactory.open(configuration);
    assertEquals(totalVertices,
            graph.traversal().withComputer(SparkGraphComputer.class).V().count().next().longValue());
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.InputOutputRDDTest.java

@Test
public void shouldReadFromWriteToArbitraryRDD() throws Exception {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            ExampleOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldReadFromWriteToArbitraryRDD"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    /////////*from   w w  w .  j  a v  a 2 s.  com*/
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.InputRDDTest.java

@Test
public void shouldReadFromArbitraryRDD() {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT,
            GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldReadFromArbitraryRDD"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    //////////from   w  ww  .  ja  va  2s . c o m
    Graph graph = GraphFactory.open(configuration);
    assertEquals(123l, graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)).V()
            .values("age").sum().next());
    assertEquals(Long.valueOf(4l), graph.traversal().withComputer(SparkGraphComputer.class).V().count().next());
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.InputRDDTest.java

@Test
public void shouldSupportHadoopGraphOLTP() {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD,
            ExampleInputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            InputRDDFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_OUTPUT_FORMAT,
            GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldSupportHadoopGraphOLTP"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    //////////from w  w w.  j  a v  a 2  s.  c  o  m
    Graph graph = GraphFactory.open(configuration);
    GraphTraversalSource g = graph.traversal(); // OLTP;
    assertEquals("person", g.V().has("age", 29).next().label());
    assertEquals(Long.valueOf(4), g.V().count().next());
    assertEquals(Long.valueOf(0), g.E().count().next());
    assertEquals(Long.valueOf(2), g.V().has("age", P.gt(30)).count().next());
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.OutputRDDTest.java

@Test
public void shouldWriteToArbitraryRDD() throws Exception {
    final Configuration configuration = new BaseConfiguration();
    configuration.setProperty("spark.master", "local[4]");
    configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName());
    configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT,
            GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD,
            ExampleOutputRDD.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION,
            TestHelper.makeTestDataDirectory(this.getClass(), "shouldWriteToArbitraryRDD"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false);
    /////////*from  ww w  .  j av a  2  s.  c  om*/
    Graph graph = GraphFactory.open(configuration);
    graph.compute(SparkGraphComputer.class).result(GraphComputer.ResultGraph.NEW)
            .persist(GraphComputer.Persist.EDGES)
            .program(TraversalVertexProgram.build()
                    .traversal(graph.traversal().withComputer(g -> g.compute(SparkGraphComputer.class)),
                            "gremlin-groovy", "g.V()")
                    .create(graph))
            .submit().get();
}

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.PersistedInputOutputRDDIntegrateTest.java

@Test
public void shouldNotHaveDanglingPersistedComputeRDDs() throws Exception {
    Spark.create("local[4]");
    final String rddName = TestHelper.makeTestDataDirectory(PersistedInputOutputRDDIntegrateTest.class,
            UUID.randomUUID().toString());
    final Configuration configuration = super.getBaseConfiguration();
    configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION,
            SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo"));
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, GryoInputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER, GryoOutputFormat.class.getCanonicalName());
    configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName);
    configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true);
    Graph graph = GraphFactory.open(configuration);
    /////from ww  w  . j a  va  2  s.  c om
    assertEquals(6, graph.traversal().withComputer(Computer.compute(SparkGraphComputer.class)).V().out().count()
            .next().longValue());
    assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertEquals(0, Spark.getContext().getPersistentRDDs().size());
    //
    assertEquals(2, graph.traversal().withComputer(Computer.compute(SparkGraphComputer.class)).V().out().out()
            .count().next().longValue());
    assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName)));
    assertEquals(0, Spark.getContext().getPersistentRDDs().size());
    ///////
    Spark.close();
}