Example usage for org.apache.spark.launcher SparkLauncher SPARK_MASTER

List of usage examples for org.apache.spark.launcher SparkLauncher SPARK_MASTER

Introduction

In this page you can find the example usage for org.apache.spark.launcher SparkLauncher SPARK_MASTER.

Prototype

String SPARK_MASTER

To view the source code for org.apache.spark.launcher SparkLauncher SPARK_MASTER.

Click Source Link

Document

The Spark master.

Usage

From source file:ai.grakn.graph.internal.computer.GraknSparkComputer.java

License:Open Source License

@Override
public GraphComputer workers(final int workers) {
    super.workers(workers);
    if (this.sparkConfiguration.containsKey(SparkLauncher.SPARK_MASTER)
            && this.sparkConfiguration.getString(SparkLauncher.SPARK_MASTER).startsWith("local")) {
        this.sparkConfiguration.setProperty(SparkLauncher.SPARK_MASTER, "local[" + this.workers + "]");
    }//from   w w w .j av  a  2 s .c  o m
    this.workersSet = true;
    return this;
}

From source file:org.apache.tinkerpop.gremlin.spark.process.computer.SparkHadoopGraphProvider.java

License:Apache License

@Override
public Map<String, Object> getBaseConfiguration(final String graphName, final Class<?> test,
        final String testMethodName, final LoadGraphWith.GraphData loadGraphWith) {
    if (this.getClass().equals(SparkHadoopGraphProvider.class) && !SparkHadoopGraphProvider.class
            .getCanonicalName().equals(System.getProperty(PREVIOUS_SPARK_PROVIDER, null))) {
        Spark.close();// w ww .  j a v  a 2  s.  c  o m
        HadoopPools.close();
        KryoShimServiceLoader.close();
        System.setProperty(PREVIOUS_SPARK_PROVIDER, SparkHadoopGraphProvider.class.getCanonicalName());
    }

    final Map<String, Object> config = super.getBaseConfiguration(graphName, test, testMethodName,
            loadGraphWith);
    config.put(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true); // this makes the test suite go really fast

    // toy graph inputRDD does not have corresponding outputRDD so where jobs chain, it fails (failing makes sense)
    if (null != loadGraphWith && !test.equals(ProgramTest.Traversals.class)
            && !test.equals(GroovyProgramTest.Traversals.class) && !test.equals(PageRankTest.Traversals.class)
            && !test.equals(GroovyPageRankTest.Traversals.class)
            && !test.equals(PeerPressureTest.Traversals.class)
            && !test.equals(GroovyPeerPressureTest.Traversals.class)
            && !test.equals(FileSystemStorageCheck.class) && !testMethodName.equals("shouldSupportJobChaining")
            && // GraphComputerTest.shouldSupportJobChaining
            RANDOM.nextBoolean()) {
        config.put(RANDOM.nextBoolean() ? Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD
                : Constants.GREMLIN_HADOOP_GRAPH_READER, ToyGraphInputRDD.class.getCanonicalName());
    }

    // tests persisted RDDs
    if (test.equals(SparkContextStorageCheck.class)) {
        config.put(RANDOM.nextBoolean() ? Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD
                : Constants.GREMLIN_HADOOP_GRAPH_READER, ToyGraphInputRDD.class.getCanonicalName());
        config.put(RANDOM.nextBoolean() ? Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD
                : Constants.GREMLIN_HADOOP_GRAPH_WRITER, PersistedOutputRDD.class.getCanonicalName());
    }

    // sugar plugin causes meta-method issues with a persisted context
    if (test.equals(HadoopGremlinPluginCheck.class)) {
        Spark.close();
        HadoopPools.close();
        KryoShimServiceLoader.close();
        SugarTestHelper.clearRegistry(this);
    }

    config.put(Constants.GREMLIN_HADOOP_DEFAULT_GRAPH_COMPUTER, SparkGraphComputer.class.getCanonicalName());
    config.put(SparkLauncher.SPARK_MASTER, "local[4]");
    config.put(Constants.SPARK_SERIALIZER, KryoSerializer.class.getCanonicalName());
    config.put(Constants.SPARK_KRYO_REGISTRATOR, GryoRegistrator.class.getCanonicalName());
    config.put(Constants.SPARK_KRYO_REGISTRATION_REQUIRED, true);
    return config;
}