Example usage for org.apache.hadoop.conf Configuration setBoolean

List of usage examples for org.apache.hadoop.conf Configuration setBoolean

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setBoolean.

Prototype

public void setBoolean(String name, boolean value) 

Source Link

Document

Set the value of the name property to a boolean.

Usage

From source file:RunPageRankSchimmy.java

License:Apache License

private float phase1(String path, int i, int j, int n, boolean useCombiner, boolean useInmapCombiner,
        boolean useRange) throws Exception {
    Configuration conf = getConf();

    String in = path + "/iter" + FORMAT.format(i);
    String out = path + "/iter" + FORMAT.format(j) + "t";
    String outm = out + "-mass";

    FileSystem fs = FileSystem.get(conf);

    // We need to actually count the number of part files to get the number
    // of partitions (because the directory might contain _log).
    int numPartitions = 0;
    for (FileStatus s : FileSystem.get(conf).listStatus(new Path(in))) {
        if (s.getPath().getName().contains("part-")) {
            numPartitions++;//from  w w  w.  ja  v a2 s.c  o  m
        }
    }

    conf.setInt("NodeCount", n);

    Partitioner<IntWritable, Writable> p = null;

    if (useRange) {
        p = new RangePartitioner();
        ((Configurable) p).setConf(conf);
    } else {
        p = new HashPartitioner<IntWritable, Writable>();
    }

    // This is really annoying: the mapping between the partition numbers on
    // disk (i.e., part-XXXX) and what partition the file contains (i.e.,
    // key.hash % #reducer) is arbitrary... so this means that we need to
    // open up each partition, peek inside to find out.
    IntWritable key = new IntWritable();
    PageRankNode value = new PageRankNode();
    FileStatus[] status = fs.listStatus(new Path(in));

    StringBuilder sb = new StringBuilder();

    for (FileStatus f : status) {
        if (!f.getPath().getName().contains("part-")) {
            continue;
        }

        SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(f.getPath()));

        reader.next(key, value);
        int np = p.getPartition(key, value, numPartitions);
        reader.close();

        LOG.info(f.getPath() + "\t" + np);
        sb.append(np + "=" + f.getPath() + ";");
    }

    LOG.info(sb.toString().trim());

    LOG.info("PageRankSchimmy: iteration " + j + ": Phase1");
    LOG.info(" - input: " + in);
    LOG.info(" - output: " + out);
    LOG.info(" - nodeCnt: " + n);
    LOG.info(" - useCombiner: " + useCombiner);
    LOG.info(" - useInmapCombiner: " + useInmapCombiner);
    LOG.info(" - numPartitions: " + numPartitions);
    LOG.info(" - useRange: " + useRange);
    LOG.info("computed number of partitions: " + numPartitions);

    int numReduceTasks = numPartitions;

    conf.setInt("mapred.min.split.size", 1024 * 1024 * 1024);
    //conf.set("mapred.child.java.opts", "-Xmx2048m");

    conf.set("PageRankMassPath", outm);
    conf.set("BasePath", in);
    conf.set("PartitionMapping", sb.toString().trim());

    conf.setBoolean("mapred.map.tasks.speculative.execution", false);
    conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);

    Job job = Job.getInstance(conf);
    job.setJobName("PageRankSchimmy:iteration" + j + ":Phase1");
    job.setJarByClass(RunPageRankSchimmy.class);

    job.setNumReduceTasks(numReduceTasks);

    FileInputFormat.setInputPaths(job, new Path(in));
    FileOutputFormat.setOutputPath(job, new Path(out));

    job.setInputFormatClass(SequenceFileInputFormat.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);

    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(FloatWritable.class);

    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(PageRankNode.class);

    if (useInmapCombiner) {
        job.setMapperClass(MapWithInMapperCombiningClass.class);
    } else {
        job.setMapperClass(MapClass.class);
    }

    if (useCombiner) {
        job.setCombinerClass(CombineClass.class);
    }

    if (useRange) {
        job.setPartitionerClass(RangePartitioner.class);
    }

    job.setReducerClass(ReduceClass.class);

    FileSystem.get(conf).delete(new Path(out), true);
    FileSystem.get(conf).delete(new Path(outm), true);

    long startTime = System.currentTimeMillis();
    job.waitForCompletion(true);
    System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

    float mass = Float.NEGATIVE_INFINITY;
    for (FileStatus f : fs.listStatus(new Path(outm))) {
        FSDataInputStream fin = fs.open(f.getPath());
        mass = sumLogProbs(mass, fin.readFloat());
        fin.close();
    }

    return mass;
}

From source file:TestFuseDFS.java

License:Apache License

static public void startStuff() {
    try {/*  w  w  w.  j  a v a 2 s .c  o  m*/
        Configuration conf = new Configuration();
        conf.setBoolean("dfs.permissions", false);
        cluster = new MiniDFSCluster(conf, 1, true, null);
        fileSys = (DistributedFileSystem) cluster.getFileSystem();
        assertTrue(fileSys.getFileStatus(new Path("/")).isDir());
        mount(mpoint, fileSys.getUri());
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:RyaDirectExample.java

License:Apache License

public static void main(final String[] args) throws Exception {
    final Configuration conf = getConf();
    conf.set(PrecomputedJoinIndexerConfig.PCJ_STORAGE_TYPE, PrecomputedJoinStorageType.ACCUMULO.name());
    conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);

    log.info("Creating the tables as root.");

    SailRepository repository = null;// w w  w.  ja va2 s . c  o  m
    SailRepositoryConnection conn = null;

    try {
        log.info("Connecting to Indexing Sail Repository.");
        final Sail extSail = RyaSailFactory.getInstance(conf);
        repository = new SailRepository(extSail);
        conn = repository.getConnection();

        createPCJ(conf);

        final long start = System.currentTimeMillis();
        log.info("Running SPARQL Example: Add and Delete");
        testAddAndDelete(conn);
        log.info("Running SAIL/SPARQL Example: PCJ Search");
        testPCJSearch(conn);
        log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
        testAddAndTemporalSearchWithPCJ(conn);
        log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ");
        testAddAndFreeTextSearchWithPCJ(conn);
        //         log.info("Running SPARQL Example: Add Point and Geo Search with PCJ");
        ////         testAddPointAndWithinSearchWithPCJ(conn);
        //         log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search");
        //         testTemporalFreeGeoSearch(conn);
        //         log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
        //         testGeoFreetextWithPCJSearch(conn);
        log.info("Running SPARQL Example: Delete Temporal Data");
        testDeleteTemporalData(conn);
        log.info("Running SPARQL Example: Delete Free Text Data");
        testDeleteFreeTextData(conn);
        //         log.info("Running SPARQL Example: Delete Geo Data");
        //         testDeleteGeoData(conn);

        log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
    } finally {
        log.info("Shutting down");
        closeQuietly(conn);
        closeQuietly(repository);
    }
}

From source file:WordCountSplitTest.java

License:Apache License

private final static void test(boolean use_shards, boolean use_chunks, Boolean slaveok) throws Exception {
    did_start = false;/*w  w w  . jav a 2s . co m*/
    final Configuration conf = new Configuration();
    MongoConfigUtil.setInputURI(conf, "mongodb://localhost:30000/test.lines");
    conf.setBoolean(MongoConfigUtil.SPLITS_USE_SHARDS, use_shards);
    conf.setBoolean(MongoConfigUtil.SPLITS_USE_CHUNKS, use_chunks);
    String output_table = null;
    if (use_chunks) {
        if (use_shards)
            output_table = "with_shards_and_chunks";
        else
            output_table = "with_chunks";
    } else {
        if (use_shards)
            output_table = "with_shards";
        else
            output_table = "no_splits";
    }
    if (slaveok != null) {
        output_table += "_" + slaveok;
    }
    MongoConfigUtil.setOutputURI(conf, "mongodb://localhost:30000/test." + output_table);
    System.out.println("Conf: " + conf);

    final Job job = new Job(conf, "word count " + output_table);

    job.setJarByClass(WordCountSplitTest.class);

    job.setMapperClass(TokenizerMapper.class);

    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setInputFormatClass(MongoInputFormat.class);
    job.setOutputFormatClass(MongoOutputFormat.class);

    final long start = System.currentTimeMillis();
    System.out.println(" ----------------------- running test " + output_table + " --------------------");
    try {
        boolean result = job.waitForCompletion(true);
        System.out.println("job.waitForCompletion( true ) returned " + result);
    } catch (Exception e) {
        System.out.println("job.waitForCompletion( true ) threw Exception");
        e.printStackTrace();
    }
    final long end = System.currentTimeMillis();
    final float seconds = ((float) (end - start)) / 1000;
    java.text.NumberFormat nf = java.text.NumberFormat.getInstance();
    nf.setMaximumFractionDigits(3);
    System.out.println("finished run in " + nf.format(seconds) + " seconds");

    com.mongodb.Mongo m = new com.mongodb.Mongo(
            new com.mongodb.MongoURI("mongodb://localhost:30000/?slaveok=true"));
    com.mongodb.DB db = m.getDB("test");
    com.mongodb.DBCollection coll = db.getCollection(output_table);
    com.mongodb.BasicDBObject query = new com.mongodb.BasicDBObject();
    query.put("_id", "the");
    com.mongodb.DBCursor cur = coll.find(query);
    if (!cur.hasNext())
        System.out.println("FAILURE: could not find count of \'the\'");
    else
        System.out.println("'the' count: " + cur.next());

    //        if (! result)
    //           System.exit(  1 );
}

From source file:InferenceExamples.java

License:Apache License

public static void main(final String[] args) throws Exception {
    if (IS_DETAILED_LOGGING_ENABLED) {
        setupLogging();//from   ww w . j  ava2  s  .com
    }
    final Configuration conf = getConf();
    conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);

    SailRepository repository = null;
    SailRepositoryConnection conn = null;
    try {
        log.info("Connecting to Indexing Sail Repository.");
        final Sail sail = RyaSailFactory.getInstance(conf);
        repository = new SailRepository(sail);
        conn = repository.getConnection();

        final long start = System.currentTimeMillis();

        testInfer(conn, sail);
        testPropertyChainInference(conn, sail);
        testPropertyChainInferenceAltRepresentation(conn, sail);
        testSomeValuesFromInference(conn, sail);
        testAllValuesFromInference(conn, sail);
        testIntersectionOfInference(conn, sail);
        testOneOfInference(conn, sail);

        log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
    } finally {
        log.info("Shutting down");
        closeQuietly(conn);
        closeQuietly(repository);
        MongoConnectorFactory.closeMongoClient();
    }
}

From source file:EntityDirectExample.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = getConf();
    conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);

    log.info("Creating the tables as root.");
    SailRepository repository = null;//w  ww .  j a  v  a2  s.co m
    SailRepositoryConnection conn = null;

    try {
        log.info("Connecting to Indexing Sail Repository.");

        Sail extSail = RyaSailFactory.getInstance(conf);
        repository = new SailRepository(extSail);
        repository.initialize();
        conn = repository.getConnection();

        log.info("Running SPARQL Example: Add and Delete");
        testAddAndDelete(conn);
        log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
        testAddAndTemporalSearchWithPCJ(conn);

    } finally {
        log.info("Shutting down");
        closeQuietly(conn);
        closeQuietly(repository);
    }
}

From source file:MongoRyaDirectExample.java

License:Apache License

public static void main(final String[] args) throws Exception {
    if (IS_DETAILED_LOGGING_ENABLED) {
        setupLogging();/*from w w  w.j  ava  2  s  .c o  m*/
    }
    final Configuration conf = getConf();
    conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);

    SailRepository repository = null;
    SailRepositoryConnection conn = null;
    try {
        log.info("Connecting to Indexing Sail Repository.");
        final Sail sail = RyaSailFactory.getInstance(conf);
        repository = new SailRepository(sail);
        conn = repository.getConnection();

        final long start = System.currentTimeMillis();
        log.info("Running SPARQL Example: Add and Delete");
        //            testAddPointAndWithinSearch(conn);
        //  to test out inference, set inference to true in the conf

        log.info("Running Jena Sesame Reasoning with Rules Example");
        testJenaSesameReasoningWithRules(conn);

        testPelletExamples(null);

        log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
    } catch (final Exception e) {
        log.error("Encountered error running MongoDB example", e);
    } finally {
        log.info("Shutting down");
        closeQuietly(conn);
        closeQuietly(repository);
    }
}

From source file:at.illecker.hama.hybrid.examples.hellohybrid.HelloHybridBSP.java

License:Apache License

public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {

    Configuration conf = new HamaConfiguration();

    if (args.length > 0) {
        if (args.length == 1) {
            conf.setInt("bsp.peers.num", Integer.parseInt(args[0]));
        } else {/*from ww  w .ja va  2  s  . c  o  m*/
            System.out.println("Wrong argument size!");
            System.out.println("    Argument1=numBspTask");
            return;
        }
    } else {
        // BSPJobClient jobClient = new BSPJobClient(conf);
        // ClusterStatus cluster = jobClient.getClusterStatus(true);
        // job.setNumBspTask(cluster.getMaxTasks());

        conf.setInt("bsp.peers.num", 2); // 1 CPU and 1 GPU
    }
    // Enable one GPU task
    conf.setInt("bsp.peers.gpu.num", 1);
    conf.setBoolean("hama.pipes.logging", true);

    LOG.info("NumBspTask: " + conf.getInt("bsp.peers.num", 0));
    LOG.info("NumBspGpuTask: " + conf.getInt("bsp.peers.gpu.num", 0));
    LOG.info("bsp.tasks.maximum: " + conf.get("bsp.tasks.maximum"));
    LOG.info("inputPath: " + CONF_INPUT_DIR);
    LOG.info("outputPath: " + CONF_OUTPUT_DIR);

    Path example = new Path(CONF_INPUT_DIR.getParent(), "example.seq");
    conf.set(CONF_EXAMPLE_PATH, example.toString());
    LOG.info("exampleFile: " + example.toString());

    prepareInput(conf, CONF_INPUT_DIR, example, CONF_N);

    BSPJob job = createHelloHybridBSPConf(conf, CONF_INPUT_DIR, CONF_OUTPUT_DIR);

    long startTime = System.currentTimeMillis();
    if (job.waitForCompletion(true)) {
        LOG.info("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

        // Print input files
        // printOutput(job, CONF_INPUT_DIR);
        // printOutput(job, example);

        // Print output
        printOutput(job, FileOutputFormat.getOutputPath(job));
    }
}

From source file:at.illecker.hama.hybrid.examples.kmeans.KMeansHybridBSP.java

License:Apache License

public static void main(String[] args) throws Exception {

    // Defaults// ww w  . j a  v  a 2  s. co m
    int numBspTask = 1;
    int numGpuBspTask = 1;
    int blockSize = BLOCK_SIZE;
    int gridSize = GRID_SIZE;
    long n = 10; // input vectors
    int k = 3; // start vectors
    int vectorDimension = 2;
    int maxIteration = 10;
    boolean useTestExampleInput = false;
    boolean isDebugging = false;
    boolean timeMeasurement = false;
    int GPUPercentage = 80;

    Configuration conf = new HamaConfiguration();
    FileSystem fs = FileSystem.get(conf);

    // Set numBspTask to maxTasks
    // BSPJobClient jobClient = new BSPJobClient(conf);
    // ClusterStatus cluster = jobClient.getClusterStatus(true);
    // numBspTask = cluster.getMaxTasks();

    if (args.length > 0) {
        if (args.length == 12) {
            numBspTask = Integer.parseInt(args[0]);
            numGpuBspTask = Integer.parseInt(args[1]);
            blockSize = Integer.parseInt(args[2]);
            gridSize = Integer.parseInt(args[3]);
            n = Long.parseLong(args[4]);
            k = Integer.parseInt(args[5]);
            vectorDimension = Integer.parseInt(args[6]);
            maxIteration = Integer.parseInt(args[7]);
            useTestExampleInput = Boolean.parseBoolean(args[8]);
            GPUPercentage = Integer.parseInt(args[9]);
            isDebugging = Boolean.parseBoolean(args[10]);
            timeMeasurement = Boolean.parseBoolean(args[11]);

        } else {
            System.out.println("Wrong argument size!");
            System.out.println("    Argument1=numBspTask");
            System.out.println("    Argument2=numGpuBspTask");
            System.out.println("    Argument3=blockSize");
            System.out.println("    Argument4=gridSize");
            System.out.println("    Argument5=n | Number of input vectors (" + n + ")");
            System.out.println("    Argument6=k | Number of start vectors (" + k + ")");
            System.out.println(
                    "    Argument7=vectorDimension | Dimension of each vector (" + vectorDimension + ")");
            System.out.println(
                    "    Argument8=maxIterations | Number of maximal iterations (" + maxIteration + ")");
            System.out.println("    Argument9=testExample | Use testExample input (true|false=default)");
            System.out.println("    Argument10=GPUPercentage (percentage of input)");
            System.out.println("    Argument11=isDebugging (true|false=defaul)");
            System.out.println("    Argument12=timeMeasurement (true|false=defaul)");
            return;
        }
    }

    // Set config variables
    conf.setBoolean(CONF_DEBUG, isDebugging);
    conf.setBoolean("hama.pipes.logging", false);
    conf.setBoolean(CONF_TIME, timeMeasurement);

    // Set CPU tasks
    conf.setInt("bsp.peers.num", numBspTask);
    // Set GPU tasks
    conf.setInt("bsp.peers.gpu.num", numGpuBspTask);
    // Set GPU blockSize and gridSize
    conf.set(CONF_BLOCKSIZE, "" + blockSize);
    conf.set(CONF_GRIDSIZE, "" + gridSize);
    // Set maxIterations for KMeans
    conf.setInt(CONF_MAX_ITERATIONS, maxIteration);
    // Set n for KMeans
    conf.setLong(CONF_N, n);
    // Set GPU workload
    conf.setInt(CONF_GPU_PERCENTAGE, GPUPercentage);

    LOG.info("NumBspTask: " + conf.getInt("bsp.peers.num", 0));
    LOG.info("NumGpuBspTask: " + conf.getInt("bsp.peers.gpu.num", 0));
    LOG.info("bsp.tasks.maximum: " + conf.get("bsp.tasks.maximum"));
    LOG.info("GPUPercentage: " + conf.get(CONF_GPU_PERCENTAGE));
    LOG.info("BlockSize: " + conf.get(CONF_BLOCKSIZE));
    LOG.info("GridSize: " + conf.get(CONF_GRIDSIZE));
    LOG.info("isDebugging: " + conf.get(CONF_DEBUG));
    LOG.info("timeMeasurement: " + conf.get(CONF_TIME));
    LOG.info("useTestExampleInput: " + useTestExampleInput);
    LOG.info("inputPath: " + CONF_INPUT_DIR);
    LOG.info("centersPath: " + CONF_CENTER_DIR);
    LOG.info("outputPath: " + CONF_OUTPUT_DIR);
    LOG.info("n: " + n);
    LOG.info("k: " + k);
    LOG.info("vectorDimension: " + vectorDimension);
    LOG.info("maxIteration: " + maxIteration);

    Path centerIn = new Path(CONF_CENTER_DIR, "center_in.seq");
    Path centerOut = new Path(CONF_CENTER_DIR, "center_out.seq");
    conf.set(CONF_CENTER_IN_PATH, centerIn.toString());
    conf.set(CONF_CENTER_OUT_PATH, centerOut.toString());

    // prepare Input
    if (useTestExampleInput) {
        // prepareTestInput(conf, fs, input, centerIn);
        prepareInputData(conf, fs, CONF_INPUT_DIR, centerIn, numBspTask, numGpuBspTask, n, k, vectorDimension,
                null, GPUPercentage);
    } else {
        prepareInputData(conf, fs, CONF_INPUT_DIR, centerIn, numBspTask, numGpuBspTask, n, k, vectorDimension,
                new Random(3337L), GPUPercentage);
    }

    BSPJob job = createKMeansHybridBSPConf(conf, CONF_INPUT_DIR, CONF_OUTPUT_DIR);

    long startTime = System.currentTimeMillis();
    if (job.waitForCompletion(true)) {
        LOG.info("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

        if (isDebugging) {
            printFile(conf, fs, centerOut, new PipesVectorWritable(), NullWritable.get());
            printOutput(conf, fs, ".log", new IntWritable(), new PipesVectorWritable());
        }

        if (k < 50) {
            printFile(conf, fs, centerOut, new PipesVectorWritable(), NullWritable.get());
        }
    }
}

From source file:at.illecker.hama.hybrid.examples.matrixmultiplication.MatrixMultiplicationHybridBSP.java

License:Apache License

public static void main(String[] args) throws Exception {

    // Defaults//  w w  w . jav a2 s .c o  m
    int numRowsA = 4;// 1024;
    int numColsA = 4;// 1024;
    int numRowsB = 4;// 1024;
    int numColsB = 4;// 1024;
    boolean isDebugging = true;

    Configuration conf = new HamaConfiguration();
    BSPJobClient jobClient = new BSPJobClient(conf);
    ClusterStatus cluster = jobClient.getClusterStatus(true);

    if (args.length > 0) {
        if (args.length == 6) {
            conf.setInt("bsp.peers.num", Integer.parseInt(args[0]));
            numRowsA = Integer.parseInt(args[1]);
            numColsA = Integer.parseInt(args[2]);
            numRowsB = Integer.parseInt(args[3]);
            numColsB = Integer.parseInt(args[4]);
            isDebugging = Boolean.parseBoolean(args[5]);

        } else {
            System.out.println("Wrong argument size!");
            System.out.println("    Argument1=numBspTask");
            System.out.println("    Argument2=numRowsA | Number of rows of the first input matrix");
            System.out.println("    Argument3=numColsA | Number of columns of the first input matrix");
            System.out.println("    Argument4=numRowsB | Number of rows of the second input matrix");
            System.out.println("    Argument5=numColsB | Number of columns of the second input matrix");
            System.out.println("    Argument6=debug | Enable debugging (true|false)");
            return;
        }
    } else {
        conf.setInt("bsp.peers.num", 1); // cluster.getMaxTasks());
        // Enable one GPU task
        conf.setInt("bsp.peers.gpu.num", 1);
    }

    conf.setBoolean("hama.pipes.logging", isDebugging);
    conf.setBoolean(CONF_DEBUG, isDebugging);
    conf.set(CONF_BLOCKSIZE, "" + BLOCK_SIZE);
    conf.set(CONF_GRIDSIZE, "" + GRID_SIZE);

    LOG.info("NumBspTask: " + conf.getInt("bsp.peers.num", 0));
    LOG.info("NumGpuBspTask: " + conf.getInt("bsp.peers.gpu.num", 0));
    LOG.info("numRowsA: " + numRowsA);
    LOG.info("numColsA: " + numColsA);
    LOG.info("numRowsB: " + numRowsB);
    LOG.info("numColsB: " + numColsB);
    LOG.info("isDebugging: " + isDebugging);
    LOG.info("outputPath: " + OUTPUT_DIR);

    if (numColsA != numRowsB) {
        throw new Exception("Cols of MatrixA != rows of MatrixB! (" + numColsA + "!=" + numRowsB + ")");
    }

    // Create random DistributedRowMatrix
    // use constant seeds to get reproducible results

    // Matrix A
    DistributedRowMatrix.createRandomDistributedRowMatrix(conf, numRowsA, numColsA, new Random(42L),
            MATRIX_A_PATH, false);
    // Matrix B is stored transposed
    DistributedRowMatrix.createRandomDistributedRowMatrix(conf, numRowsB, numColsB, new Random(1337L),
            MATRIX_B_PATH, true);

    // Load DistributedRowMatrix a and b
    DistributedRowMatrix a = new DistributedRowMatrix(MATRIX_A_PATH, OUTPUT_DIR, numRowsA, numColsA);
    a.setConf(conf);

    DistributedRowMatrix b = new DistributedRowMatrix(MATRIX_B_PATH, OUTPUT_DIR, numRowsB, numColsB);
    b.setConf(conf);

    // MatrixMultiplication
    long startTime = System.currentTimeMillis();
    DistributedRowMatrix c = a.multiplyBSP(b, MATRIX_C_PATH);

    LOG.info("MatrixMultiplicationHybrid using Hama finished in "
            + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

    // Verification

    // Overwrite matrix B, NOT transposed for verification
    DistributedRowMatrix.createRandomDistributedRowMatrix(conf, numRowsB, numColsB, new Random(1337L),
            MATRIX_B_PATH, false);
    b = new DistributedRowMatrix(MATRIX_B_PATH, OUTPUT_DIR, numRowsB, numColsB);
    b.setConf(conf);

    DistributedRowMatrix d = a.multiplyJava(b, MATRIX_D_PATH);

    if (c.verify(d)) {
        System.out.println("Verify PASSED!");
    } else {
        System.out.println("Verify FAILED!");
    }

    if (isDebugging) {
        System.out.println("Matrix A:");
        a.printDistributedRowMatrix();
        System.out.println("Matrix B:");
        b.printDistributedRowMatrix();
        System.out.println("Matrix C:");
        c.printDistributedRowMatrix();
        System.out.println("Matrix D:");
        d.printDistributedRowMatrix();
        printOutput(conf);
    }
}