Example usage for org.apache.hadoop.util ProgramDriver ProgramDriver

List of usage examples for org.apache.hadoop.util ProgramDriver ProgramDriver

Introduction

In this page you can find the example usage for org.apache.hadoop.util ProgramDriver ProgramDriver.

Prototype

public ProgramDriver() 

Source Link

Usage

From source file:$.Driver.java

License:Apache License

public static void main(String[] args) {
        int exitCode = -1;
        ProgramDriver pgd = new ProgramDriver();
        try {/*from   w  w w  . j a va2s.co  m*/
            pgd.addClass("yarnjob", YarnJob.class, "Yarn Job");
            pgd.addClass("wc", YarnWordCount.class, "Word Count");
            exitCode = pgd.run(args);
        } catch (Throwable e) {
            e.printStackTrace();
        }
        System.exit(exitCode);
    }

From source file:boostingPL.driver.BoostingPLDriver.java

License:Open Source License

public static void main(String[] args) throws Throwable {
    ProgramDriver programDriver = new ProgramDriver();

    addClass(programDriver, "boostingPL.driver.AdaBoostPLDriver",
            "AdaBoostPL:Parallel boosting for binary classifier problem");
    addClass(programDriver, "boostingPL.driver.SAMMEPLDriver",
            "SAMMEPL:Parallel boosting for multiclass classifier problem");

    programDriver.driver(args);//from w  ww  .j  a  v a 2  s.  co m
}

From source file:ca.dealsaccess.scout.driver.ScoutDriver.java

License:Apache License

public static void main(String[] args) throws Throwable {

    ProgramDriver programDriver = new ProgramDriver();

    Properties mainClasses = loadProperties("driver.classes.props");
    if (mainClasses == null) {
        mainClasses = loadProperties("driver.classes.default.props");
    }//from  ww  w .  jav a2s. c  o m
    if (mainClasses == null) {
        throw new IOException("Can't load any properties file?");
    }

    boolean foundShortName = false;
    for (Object key : mainClasses.keySet()) {
        String keyString = (String) key;
        if (args.length > 0 && shortName(mainClasses.getProperty(keyString)).equals(args[0])) {
            foundShortName = true;
        }
        if (args.length > 0 && keyString.equalsIgnoreCase(args[0]) && isDeprecated(mainClasses, keyString)) {
            log.error(desc(mainClasses.getProperty(keyString)));
            return;
        }
        if (isDeprecated(mainClasses, keyString)) {
            continue;
        }
        addClass(programDriver, keyString, mainClasses.getProperty(keyString));
    }

    if (args.length < 1 || args[0] == null || "-h".equals(args[0]) || "--help".equals(args[0])) {
        programDriver.driver(args);
    }

    String progName = args[0];
    if (!foundShortName) {
        addClass(programDriver, progName, progName);
    }
    shift(args);

    Properties mainProps = loadProperties(progName + ".props");
    if (mainProps == null) {
        log.warn("No {}.props found on classpath, will use command-line arguments only", progName);
        mainProps = new Properties();
    }

    Map<String, String[]> argMap = Maps.newHashMap();
    int i = 0;
    while (i < args.length && args[i] != null) {
        List<String> argValues = Lists.newArrayList();
        String arg = args[i];
        i++;
        if (arg.startsWith("-D")) { // '-Dkey=value' or '-Dkey=value1,value2,etc' case
            String[] argSplit = arg.split("=");
            arg = argSplit[0];
            if (argSplit.length == 2) {
                argValues.add(argSplit[1]);
            }
        } else { // '-key [values]' or '--key [values]' case.
            while (i < args.length && args[i] != null) {
                if (args[i].startsWith("-")) {
                    break;
                }
                argValues.add(args[i]);
                i++;
            }
        }
        argMap.put(arg, argValues.toArray(new String[argValues.size()]));
    }

    // Add properties from the .props file that are not overridden on the command line
    for (String key : mainProps.stringPropertyNames()) {
        String[] argNamePair = key.split("\\|");
        String shortArg = '-' + argNamePair[0].trim();
        String longArg = argNamePair.length < 2 ? null : "--" + argNamePair[1].trim();
        if (!argMap.containsKey(shortArg) && (longArg == null || !argMap.containsKey(longArg))) {
            argMap.put(longArg, new String[] { mainProps.getProperty(key) });
        }
    }

    // Now add command-line args
    List<String> argsList = Lists.newArrayList();
    argsList.add(progName);
    for (Map.Entry<String, String[]> entry : argMap.entrySet()) {
        String arg = entry.getKey();
        if (arg.startsWith("-D")) { // arg is -Dkey - if value for this !isEmpty(), then arg -> -Dkey + "=" + value
            String[] argValues = entry.getValue();
            if (argValues.length > 0 && !argValues[0].trim().isEmpty()) {
                arg += '=' + argValues[0].trim();
            }
            argsList.add(1, arg);
        } else {
            argsList.add(arg);
            for (String argValue : Arrays.asList(argMap.get(arg))) {
                if (!argValue.isEmpty()) {
                    argsList.add(argValue);
                }
            }
        }
    }

    long start = System.currentTimeMillis();

    programDriver.driver(argsList.toArray(new String[argsList.size()]));

    if (log.isInfoEnabled()) {
        log.info("Program took {} ms (Minutes: {})", System.currentTimeMillis() - start,
                (System.currentTimeMillis() - start) / 60000.0);
    }
}

From source file:ca.etsmtl.lasi.hbasewikipedialoader.Driver.java

License:Apache License

/**
 * @param args/*from  ww  w .j  a  v  a2  s  .c o m*/
 * @throws Throwable 
 */
public static void main(String[] args) throws Throwable {
    ProgramDriver pgd = new ProgramDriver();
    pgd.addClass(HBaseWikipediaLoader.NAME, HBaseWikipediaLoader.class,
            "Load the Wikipedia articles dump in a HBase table");
    pgd.driver(args);
}

From source file:ca.uwaterloo.iss4e.hadoop.Driver.java

License:Open Source License

public static void main(String argv[]) {
    ProgramDriver pgd = new ProgramDriver();
    try {/*w w w .j a v  a2 s .  co m*/
        pgd.addClass("ThreelMain", ThreelMain.class, "Threeline program");
        pgd.driver(argv);
    } catch (Throwable e) {
        e.printStackTrace();
    }
}

From source file:cn.clickwise.bigdata.Main.java

License:Apache License

public static void main(String argv[]) {
    int exitCode = -1;
    ProgramDriver pgd = new ProgramDriver();
    try {//from   w ww. j ava 2 s.  c o  m
        pgd.addClass("preprocess", Preprocess.class,
                "A map/reduce program that preprocess all the input data sources.");

        pgd.driver(argv);

        // Success
        exitCode = 0;
    } catch (Throwable e) {
        e.printStackTrace();
    }

    System.exit(exitCode);
}

From source file:cn.edu.buaa.practice.util.KPIJobsBootstrap.java

License:Apache License

public static void main(String argv[]) {
    int exitCode = -1;
    ProgramDriver pgd = new ProgramDriver();
    try {/* ww  w .ja  v a 2s.c  om*/
        pgd.addClass("pvcount", PVCountJob.class, "MR job to count pv.");
        pgd.addClass("uvcount", IPCountJob.class, "MR job to count uv.");
        pgd.addClass("timecount", HourPvCountJob.class, "MR job to count by datetime/hour.");
        pgd.addClass("browsercount", BrowserCountJob.class, "MR job to count browser.");
        pgd.addClass("devicecount", DeviceTypeCountJob.class, "MR job to count by device type.");
        pgd.addClass("sourcecount", SourceCountJob.class, "MR job to count by source/referer.");
        exitCode = pgd.run(argv);
    } catch (Throwable e) {
        e.printStackTrace();
    }

    System.exit(exitCode);
}

From source file:com.ailk.oci.ocnosql.tools.Driver.java

License:Apache License

/**
 * @param args/*from   w  w  w .j  a  va 2  s  .c  o  m*/
 * @throws Throwable
 */
public static void main(String[] args) throws Throwable {
    ProgramDriver pgd = new ProgramDriver();
    pgd.addClass(SingleColumnImportTsv.NAME, SingleColumnImportTsv.class,
            "Import data as single column in TSV format.");
    pgd.addClass(MutipleColumnImportTsv.NAME, MutipleColumnImportTsv.class,
            "Import data as mutiple column in TSV format.");
    pgd.addClass(Export.NAME, Export.class, "Export data from a Htable by mapreduce");
    pgd.addClass(CsvBulkLoadTool.NAME, CsvBulkLoadTool.class, "phoenix csvBulkLoad");
    ProgramDriver.class.getMethod("driver", new Class[] { String[].class }).invoke(pgd, new Object[] { args });
}

From source file:com.benchmark.mapred.ExampleDriver.java

License:Apache License

public static void main(String argv[]) {
    int exitCode = -1;

    ProgramDriver pgd = new ProgramDriver();
    try {/*from w  w w  .  j  a v  a2  s .c  o m*/
        pgd.addClass("wordcount", WordCount.class,
                "A map/reduce program that counts the words in the input files.");
        pgd.addClass("aggregatewordcount", AggregateWordCount.class,
                "An Aggregate based map/reduce program that counts the words in the input files.");
        pgd.addClass("aggregatewordhist", AggregateWordHistogram.class,
                "An Aggregate based map/reduce program that computes the histogram of the words in the input files.");
        pgd.addClass("grep", Grep.class,
                "A map/reduce program that counts the matches of a regex in the input.");
        pgd.addClass("randomwriter", RandomWriter.class,
                "A map/reduce program that writes 10GB of random data per node.");
        pgd.addClass("randomtextwriter", RandomTextWriter.class,
                "A map/reduce program that writes 10GB of random textual data per node.");
        pgd.addClass("sort", Sort.class,
                "A map/reduce program that sorts the data written by the random writer.");
        pgd.addClass("pi", PiEstimator.class,
                "A map/reduce program that estimates Pi using monte-carlo method.");
        pgd.addClass("pentomino", DistributedPentomino.class,
                "A map/reduce tile laying program to find solutions to pentomino problems.");
        pgd.addClass("secondarysort", SecondarySort.class,
                "An example defining a secondary sort to the reduce.");
        pgd.addClass("sudoku", Sudoku.class, "A sudoku solver.");
        pgd.addClass("sleep", SleepJob.class, "A job that sleeps at each map and reduce task.");
        pgd.addClass("join", Join.class, "A job that effects a join over sorted, equally partitioned datasets");
        pgd.addClass("multifilewc", MultiFileWordCount.class, "A job that counts words from several files.");
        pgd.addClass("dbcount", DBCountPageView.class,
                "An example job that count the pageview counts from a database.");
        pgd.addClass("teragen", TeraGen.class, "Generate data for the terasort");
        pgd.addClass("terasort", TeraSort.class, "Run the terasort");
        pgd.addClass("teravalidate", TeraValidate.class, "Checking results of terasort");
        pgd.addClass("kmeans", Kmeans.class, "Kmeans on movies data");
        pgd.addClass("classification", Classification.class, "Classify movies into clusters");
        pgd.addClass("histogram_movies", HistogramMovies.class,
                "A map/reduce program that gives a histogram of movies based on ratings.");
        pgd.addClass("histogram_ratings", HistogramRatings.class,
                "A map/reduce program that gives a histogram of users ratings on movies.");
        pgd.addClass("selfjoin", SelfJoin.class,
                "A map/reduce program that creates k+1 associations given set of k-field associations");
        pgd.addClass("invertedindex", InvertedIndex.class,
                "A map/reduce program that creates an inverted index of documents.");
        pgd.addClass("adjlist", AdjList.class,
                "A map/reduce program that finds adjacency list of graph nodes.");
        pgd.addClass("termvectorperhost", TermVectorPerHost.class,
                "A map/reduce program that creates the term-vectors (frequency of words) per document.");
        pgd.addClass("sequencecount", SequenceCount.class,
                "A map/reduce program that counts the occurrence of consecutive words in the input files.");
        pgd.addClass("rankedinvertedindex", RankedInvertedIndex.class,
                "A map/reduce program that creates the top k document lists per word");

        pgd.driver(argv);

        // Success
        exitCode = 0;
    } catch (Throwable e) {
        e.printStackTrace();
    }

    System.exit(exitCode);
}

From source file:com.checkup.tez.test.DriverTest.java

@Test
public void hello() {
    String[] argv = new String[] {};
    int exitCode = -1;
    ProgramDriver pgd = new ProgramDriver();
    try {/*from  w  w w .j  ava 2s .  c  om*/
        pgd.addClass("broadcastloadgen", BroadcastLoadGen.class,
                "Run a DAG to generate load for Broadcast Shuffle");
        pgd.addClass("rpcloadgen", RPCLoadGen.class, "Run a DAG to generate load for the task to AM RPC");
        pgd.addClass("wordcount", MapredWordCount.class,
                "A map/reduce program that counts the words in the input files.");
        pgd.addClass("mapredwordcount", MapredWordCount.class,
                "A map/reduce program that counts the words in the input files" + " using the mapred apis.");
        pgd.addClass("randomwriter", RandomWriter.class,
                "A map/reduce program that writes 10GB of random data per node.");
        pgd.addClass("randomtextwriter", RandomTextWriter.class,
                "A map/reduce program that writes 10GB of random textual data per node.");
        pgd.addClass("sort", Sort.class,
                "A map/reduce program that sorts the data written by the random" + " writer.");
        pgd.addClass("secondarysort", SecondarySort.class,
                "An example defining a secondary sort to the reduce.");
        pgd.addClass("join", Join.class,
                "A job that effects a join over sorted, equally partitioned" + " datasets");
        pgd.addClass("groupbyorderbymrrtest", GroupByOrderByMRRTest.class,
                "A map-reduce-reduce program that does groupby-order by. Takes input"
                        + " containing employee_name department name per line of input"
                        + " and generates count of employees per department and" + " sorted on employee count");
        pgd.addClass("mrrsleep", MRRSleepJob.class, "MRR Sleep Job");
        pgd.addClass("testorderedwordcount", TestOrderedWordCount.class,
                "Word Count with words sorted on frequency");
        pgd.addClass("unionexample", UnionExample.class, "Union example");
        pgd.addClass("broadcastAndOneToOneExample", BroadcastAndOneToOneExample.class,
                "BroadcastAndOneToOneExample example");
        pgd.addClass("filterLinesByWord", FilterLinesByWord.class,
                "Filters lines by the specified word using broadcast edge");
        pgd.addClass("filterLinesByWordOneToOne", FilterLinesByWordOneToOne.class,
                "Filters lines by the specified word using OneToOne edge");
        exitCode = pgd.run(argv);
    } catch (Throwable e) {
        e.printStackTrace();
    }

    //System.exit(exitCode);

}