Example usage for org.apache.hadoop.util Tool setConf

List of usage examples for org.apache.hadoop.util Tool setConf

Introduction

In this page you can find the example usage for org.apache.hadoop.util Tool setConf.

Prototype

void setConf(Configuration conf);

Source Link

Document

Set the configuration to be used by this object.

Usage

From source file:com.asakusafw.operation.tools.hadoop.fs.Clean.java

License:Apache License

/**
 * Program entry./*from   w ww. j av a 2  s  .  co m*/
 * @param args arguments
 * @throws Exception if failed to execute command
 */
public static void main(String... args) throws Exception {
    LOG.info("[OT-CLEAN-I00000] Start Hadoop FS cleaning tool");
    long start = System.currentTimeMillis();
    Tool tool = new Clean();
    tool.setConf(new Configuration());
    int exit = tool.run(args); // no generic options
    long end = System.currentTimeMillis();
    LOG.info(MessageFormat.format(
            "[OT-CLEAN-I00999] Finish Hadoop FS cleaning tool (exit-code={0}, elapsed={1}ms)", exit,
            end - start));
    if (exit != 0) {
        System.exit(exit);
    }
}

From source file:com.ery.server.util.ToolRunner.java

License:Apache License

/**
 * Runs the given <code>Tool</code> by {@link Tool#run(String[])}, after
 * parsing with the given generic arguments. Uses the given
 * <code>Configuration</code>, or builds one if null.
 * /*  www. j  a  v  a 2s .  c  om*/
 * Sets the <code>Tool</code>'s configuration with the possibly modified
 * version of the <code>conf</code>.
 * 
 * @param conf
 *            <code>Configuration</code> for the <code>Tool</code>.
 * @param tool
 *            <code>Tool</code> to run.
 * @param args
 *            command-line arguments to the tool.
 * @return exit code of the {@link Tool#run(String[])} method.
 */
public static int run(Configuration conf, Tool tool, String[] args) throws Exception {
    if (conf == null) {
        conf = new Configuration();
    }
    // set the configuration back, so that Tool can configure itself
    tool.setConf(conf);

    // get the args w/o generic hadoop args
    // String[] toolArgs = parser.getRemainingArgs();
    return tool.run(args);
}

From source file:de.tuberlin.dima.aim.exercises.two.BookAndAuthorJoinTest.java

License:Open Source License

void testJoin(Tool bookAndAuthorJoin) throws Exception {
    File authorsFile = getTestTempFile("authors.tsv");
    File booksFile = getTestTempFile("books.tsv");
    File outputDir = getTestTempDir("output");
    outputDir.delete();/*from   ww  w  .jav  a2 s  . c  o m*/

    writeLines(authorsFile, readLines("/two/authors.tsv"));
    writeLines(booksFile, readLines("/two/books.tsv"));

    Configuration conf = new Configuration();

    bookAndAuthorJoin.setConf(conf);
    bookAndAuthorJoin.run(new String[] { "--authors", authorsFile.getAbsolutePath(), "--books",
            booksFile.getAbsolutePath(), "--output", outputDir.getAbsolutePath() });

    Multimap<String, Book> booksByAuthors = readBooksByAuthors(new File(outputDir, "part-r-00000"));

    assertTrue(booksByAuthors.containsKey("Charles Bukowski"));
    assertTrue(booksByAuthors.get("Charles Bukowski")
            .contains(new Book("Confessions of a Man Insane Enough to Live with Beasts", 1965)));
    assertTrue(booksByAuthors.get("Charles Bukowski").contains(new Book("Hot Water Music", 1983)));

    assertTrue(booksByAuthors.containsKey("Fyodor Dostoyevsky"));
    assertTrue(booksByAuthors.get("Fyodor Dostoyevsky").contains(new Book("Crime and Punishment", 1866)));
    assertTrue(booksByAuthors.get("Fyodor Dostoyevsky").contains(new Book("The Brothers Karamazov", 1880)));

}

From source file:de.tuberlin.dima.aim3.assignment1.BookAndAuthorJoinTest.java

License:Open Source License

void testJoin(Tool bookAndAuthorJoin, boolean mapOnly) throws Exception {
    File authorsFile = getTestTempFile("authors.tsv");
    File booksFile = getTestTempFile("books.tsv");
    File outputDir = getTestTempDir("output");
    outputDir.delete();/*from  w w  w.j  a v a 2 s . c  o m*/

    writeLines(authorsFile, readLines("/assignment1/authors.tsv"));
    writeLines(booksFile, readLines("/assignment1/books.tsv"));

    Configuration conf = new Configuration();

    bookAndAuthorJoin.setConf(conf);
    bookAndAuthorJoin.run(new String[] { "--authors", authorsFile.getAbsolutePath(), "--books",
            booksFile.getAbsolutePath(), "--output", outputDir.getAbsolutePath() });

    String outputFilename = mapOnly ? "part-m-00000" : "part-r-00000";

    Multimap<String, Book> booksByAuthors = readBooksByAuthors(new File(outputDir, outputFilename));

    assertTrue(booksByAuthors.containsKey("Charles Bukowski"));
    assertTrue(booksByAuthors.get("Charles Bukowski")
            .contains(new Book("Confessions of a Man Insane Enough to Live with Beasts", 1965)));
    assertTrue(booksByAuthors.get("Charles Bukowski").contains(new Book("Hot Water Music", 1983)));

    assertTrue(booksByAuthors.containsKey("Fyodor Dostoyevsky"));
    assertTrue(booksByAuthors.get("Fyodor Dostoyevsky").contains(new Book("Crime and Punishment", 1866)));
    assertTrue(booksByAuthors.get("Fyodor Dostoyevsky").contains(new Book("The Brothers Karamazov", 1880)));

}

From source file:eu.scape_project.tb.wc.archd.mapreduce.FileCharacterisation.java

License:Apache License

public static void main(String[] args) throws Exception {
    System.out.println(name);/*w  w w  .ja v  a 2  s.  c o m*/
    long startTime = System.currentTimeMillis();

    Tool tool = new FileCharacterisation();
    tool.setConf(new Configuration(true));
    tool.getConf().set("mapreduce.job.user.classpath.first", "true");
    tool.getConf().set("mapreduce.user.classpath.first", "true");

    for (int i = 0; i < args.length; i++) {
        System.out.println("Arg" + i + ": " + args[i]);
    }

    int res = ToolRunner.run(tool.getConf(), tool, args);

    long elapsedTime = System.currentTimeMillis() - startTime;
    System.out.println("Processing time (sec): " + elapsedTime / 1000F);

    System.exit(res);
}

From source file:eu.scape_project.tb.wc.archd.mapreduce.TikaCharacterisation.java

License:Apache License

public static void main(String[] args) throws Exception {
    System.out.println(name);//from   ww  w  . ja v  a  2  s  .c o m
    long startTime = System.currentTimeMillis();

    Tool tool = new TikaCharacterisation();
    tool.setConf(new Configuration(true));
    tool.getConf().addResource("jobConfig.xml");

    for (int i = 0; i < args.length; i++) {
        System.out.println("Arg" + i + ": " + args[i]);
    }

    int res = ToolRunner.run(tool.getConf(), tool, args);

    long elapsedTime = System.currentTimeMillis() - startTime;
    System.out.println("Processing time (sec): " + elapsedTime / 1000F);

    System.exit(res);
}

From source file:io.fabric8.hadoop.commands.HadoopCommand.java

License:Apache License

protected void doExecute(List<Object> objects) throws Exception {
    org.apache.hadoop.conf.Configuration conf = getConfiguration();
    Tool run;
    try {/*from  www .ja  va2  s .  co  m*/
        Constructor cns = tool.getDeclaredConstructor();
        cns.setAccessible(true);
        run = (Tool) cns.newInstance();
        run.setConf(conf);
    } catch (NoSuchMethodException e) {
        Constructor cns = tool.getDeclaredConstructor(org.apache.hadoop.conf.Configuration.class);
        cns.setAccessible(true);
        run = (Tool) cns.newInstance(conf);
    }
    String[] args = new String[objects.size()];
    for (int i = 0; i < args.length; i++) {
        args[i] = objects.get(i) != null ? objects.get(i).toString() : null;
    }
    run.run(args);
}

From source file:it.crs4.seal.common.SealToolRunner.java

License:Open Source License

public int run(Tool tool, String[] args) throws Exception {
    Configuration conf = new Configuration();
    tool.setConf(conf);

    return tool.run(args);
}

From source file:org.apache.kylin.engine.mr.MRUtil.java

License:Apache License

public static int runMRJob(Tool tool, String[] args) throws Exception {
    Configuration conf = tool.getConf();
    if (conf == null) {
        conf = new Configuration();
    }//ww  w .j a  va 2s .  com

    GenericOptionsParser parser = getParser(conf, args);
    //set the configuration back, so that Tool can configure itself
    tool.setConf(conf);

    //get the args w/o generic hadoop args
    String[] toolArgs = parser.getRemainingArgs();
    return tool.run(toolArgs);
}