Example usage for org.apache.hadoop.mapreduce Job waitForCompletion

List of usage examples for org.apache.hadoop.mapreduce Job waitForCompletion

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job waitForCompletion.

Prototype

public boolean waitForCompletion(boolean verbose)
        throws IOException, InterruptedException, ClassNotFoundException 

Source Link

Document

Submit the job to the cluster and wait for it to finish.

Usage

From source file:boostingPL.driver.SAMMEPLDriver.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    int status = commandAnalysis(args);
    if (status != 0) {
        return status;
    }//  ww  w.  j  a v  a  2 s.  c o  m

    @SuppressWarnings("deprecation")
    Job job = new Job(getConf());
    job.setJobName("SAMMEPL:" + runModel + " " + dataPath.toString() + " " + modelPath.toString() + " "
            + numLinesPerMap + " " + numIterations);
    job.setJarByClass(SAMMEPLDriver.class);

    job.setInputFormatClass(NLineInputFormat.class);
    NLineInputFormat.addInputPath(job, dataPath);
    NLineInputFormat.setNumLinesPerSplit(job, numLinesPerMap);
    FileSystem fs = modelPath.getFileSystem(getConf());
    if (fs.exists(modelPath)) {
        fs.delete(modelPath, true);
    }
    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    SequenceFileOutputFormat.setOutputPath(job, modelPath);

    if (runModel.equals("train")) {
        job.setMapperClass(AdaBoostPLMapper.class);

        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(ClassifierWritable.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(ClassifierWritable.class);
    } else {
        job.setMapperClass(AdaBoostPLTestMapper.class);
        job.setReducerClass(AdaBoostPLTestReducer.class);
        job.setOutputFormatClass(NullOutputFormat.class);

        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(NullWritable.class);
    }

    Configuration conf = job.getConfiguration();
    conf.set("BoostingPL.boostingName", "SAMME");
    conf.set("BoostingPL.numIterations", String.valueOf(numIterations));
    conf.set("BoostingPL.modelPath", modelPath.toString());
    if (metadataPath == null) {
        conf.set("BoostingPL.metadata", dataPath.toString() + ".metadata");
    } else {
        conf.set("BoostingPL.metadata", metadataPath.toString());
    }
    if (outputFolder != null) {
        conf.set("BoostingPL.outputFolder", outputFolder.toString());
    }

    LOG.info(StringUtils.arrayToString(args));
    return job.waitForCompletion(true) == true ? 0 : -1;
}

From source file:br.com.lassal.nqueens.grid.job.GenerateSolutions.java

public int run(String[] args) throws Exception {
    // Configuration processed by ToolRunner
    Configuration conf = getConf();

    // Create a JobConf using the processed conf
    Job job = new Job(conf, "nqueens-gensolutions");
    job.setJarByClass(GenerateSolutions.class);

    // este job nao possui reduce tasks
    job.setNumReduceTasks(0);/*from   ww w  .  j a v a  2s  .c  o m*/

    int queensNumber = Integer.parseInt(args[0]);

    this.setWorkingFolder(queensNumber, job);

    job.setMapperClass(br.com.lassal.nqueens.grid.mapreduce.NQueenPartialShotMapper.class);

    // Submit the job, then poll for progress until the job is complete
    boolean result = job.waitForCompletion(true);
    return result ? 0 : 1;

}

From source file:br.com.lassal.nqueens.grid.job.NQueenCounter.java

/**
 * Forma de chamada//from  www .jav a 2s.co m
 * <> {numero de rainhas} {diretorio raiz} -F
 *
 * @param strings
 * @return
 * @throws Exception
 */
public int run(String[] args) throws Exception {
    // Configuration processed by ToolRunner
    Configuration conf = getConf();

    // Create a JobConf using the processed conf
    Job job = new Job(conf, "nqueens-counter");
    job.setJarByClass(NQueenCounter.class);

    int queensNumber = Integer.parseInt(args[0]);
    String workingFolder = args.length >= 2 ? args[1] : null;
    boolean isFinal = args.length >= 3 && "-F".equals(args[2]) ? true : false;

    Path sourcePath = this.setWorkingFolder(queensNumber, workingFolder, isFinal, job);
    job.setOutputKeyClass(org.apache.hadoop.io.Text.class);
    job.setOutputValueClass(org.apache.hadoop.io.Text.class);

    if (isFinal) {
        job.setMapperClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterResultMapper.class);
        job.setReducerClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterResultReducer.class);
    } else {
        job.setMapperClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterMapper.class);
        job.setReducerClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterReducer.class);
    }

    // Submit the job, then poll for progress until the job is complete
    boolean result = job.waitForCompletion(true);

    if (sourcePath != null) {
        FileSystem fs = FileSystem.get(conf);
        fs.delete(sourcePath, true);
    }

    return result ? 0 : 1;

}

From source file:br.ufpr.inf.hpath.HPath.java

License:Apache License

/**
 * Execute the XPath query as a Hadoop job
 * @param xpath_query XPath query submitted by the user via cli.
 * @param inputFile XML file which has all data.
 * @param outputFile Query's result is stored in this file. 
 * @throws Exception//from   w w  w. ja v a2 s .  c  o  m
 */
public static void main(String[] args) throws Exception {

    if (args.length < 1) {
        System.out.println("USAGE: hpath [xpath_query] [input_file] [<output_dir>]");
        System.exit(-1);
    }

    System.out.println("***************");
    System.out.println(" Query  -> " + args[2]);
    System.out.println(" Input  -> " + args[0]);
    System.out.println(" Output -> " + args[1]);
    System.out.println("***************");

    String xpath_query = args[2];
    String inputFile = args[0];
    String outputFile = args[1];
    String tag = "";

    // tag = getFisrtQueryTag(xpath_query);
    tag = getLastQueryTag(xpath_query);
    Configuration conf = new Configuration();
    conf.set("xmlinput.start", "<" + tag);
    conf.set("xmlinput.end", "</" + tag + ">");
    conf.set("xpath.query", xpath_query);

    @SuppressWarnings("deprecation")
    Job job = new Job(conf, "HPath");
    FileSystem fs = FileSystem.get(conf);
    Path inFile = new Path(inputFile);
    Path outFile = new Path(outputFile);

    if (!fs.exists(inFile)) {
        System.out.println("error: Input file not found.");
        System.exit(-1);
    }
    if (!fs.isFile(inFile)) {
        System.out.println("error: Input should be a file.");
        System.exit(-1);
    }
    if (fs.exists(outFile)) {
        System.out.println("error: Output already exists.");
        System.exit(-1);
    }

    job.setJarByClass(HPath.class);

    job.setMapperClass(Map.class);
    job.setReducerClass(Reduce.class);

    job.setInputFormatClass(XmlItemInputFormat.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    FileInputFormat.addInputPath(job, inFile);
    FileOutputFormat.setOutputPath(job, outFile);
    job.waitForCompletion(true);
}

From source file:BU.MET.CS755.SpeciesIterDriver2.java

static boolean MRGraphBuilder(String args[], int iterCnt) {
    Job theJob = null;

    conf = new JobConf(SpeciesIterDriver2.class);
    conf.setJobName("Species Graph Builder");
    conf.setNumReduceTasks(5);/*  w  ww  . ja v a2  s.  c om*/
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);
    conf.setMapperClass(SpeciesGraphBuilderMapper.class);
    conf.setReducerClass(SpeciesGraphBuilderReducer.class);

    // Reading in XML.
    conf.setInputFormat(StreamInputFormat.class);
    conf.set("stream.recordreader.class", "org.apache.hadoop.streaming.StreamXmlRecordReader");

    // Look for the <page> record in the XML.
    conf.set("stream.recordreader.begin", "<page>");
    conf.set("stream.recordreader.end", "</page>");

    inputpath = args[0];
    outputpath = args[1] + iterCnt;

    FileInputFormat.setInputPaths(conf, new Path(inputpath));
    FileOutputFormat.setOutputPath(conf, new Path(outputpath));

    try {
        theJob = new Job(conf, "SpeciesIter");
        theJob.submit();
    } catch (Exception e) {
        e.printStackTrace();
    }

    try {
        if (theJob != null) {
            theJob.waitForCompletion(true);
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

    return true;
}

From source file:BU.MET.CS755.SpeciesIterDriver2.java

static boolean MRSpeciesRank(String args[], int iterCnt) {
    long newCounterVal = 0;
    long totalLinks = 1; // Initialize to 1 to prevent divide by zero
    long totalIterations = 0;
    Job theJob = null;

    conf = new JobConf(SpeciesIterDriver2.class);
    conf.setJobName("Species Iter");
    conf.setNumReduceTasks(5);/*from w w  w .j  a va 2s .  co m*/
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);
    conf.setMapperClass(SpeciesIterMapper2.class);
    conf.setReducerClass(SpeciesIterReducer2.class);

    boolean nextIterationNeeded = true;

    while (nextIterationNeeded || numExtraIterations != 0) {
        long iterationNumber = 0;

        if ((iterCnt == 0) || (iterCnt == 1)) {
            inputpath = args[1] + "0";
        } else {
            inputpath = args[1] + iterCnt;
        }

        iterCnt++;

        conf.set("iterationNumber", Integer.toString(iterCnt));
        conf.set("totalLinks", Long.toString(totalLinks));

        outputpath = args[1] + iterCnt;

        FileInputFormat.setInputPaths(conf, new Path(inputpath));
        FileOutputFormat.setOutputPath(conf, new Path(outputpath));

        try {
            theJob = new Job(conf, "SpeciesIter");
        } catch (Exception e) {
            e.printStackTrace();
        }

        try {
            if (theJob != null) {
                theJob.waitForCompletion(true);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

        try {
            if (theJob.isComplete()) {
                Counters jobCtrs = theJob.getCounters();

                if (jobCtrs != null) {
                    newCounterVal = jobCtrs.findCounter(ITERATION_COUNTER.ITERATIONS_NEEDED).getValue();
                }

                // If reducer recorded change in species rank, repeat iteration.
                if ((newCounterVal > 0) || (iterCnt == 1)) {
                    nextIterationNeeded = true;
                } else {
                    nextIterationNeeded = false;
                    numExtraIterations--; // Do one extra iteration
                }

                totalLinks = jobCtrs.findCounter(BU.MET.CS755.SpeciesIterDriver2.ITERATION_COUNTER.TOTAL_LINKS)
                        .getValue();
            }

            totalIterations += 1;

            if (totalIterations > 200) {
                System.out.println("too many iterations!!");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    System.out.println("Total iterations = " + totalIterations);

    return true;
}

From source file:BU.MET.CS755.SpeciesIterDriver2.java

static boolean MRSpeciesView(String input, String args[]) {
    Job theJob = null;

    JobConf conf = new JobConf(SpeciesIterDriver2.class);
    conf.setJobName("Species Viewer");

    conf.setOutputKeyClass(FloatWritable.class);
    conf.setOutputValueClass(Text.class);

    inputpath = input;/*from w w w.j  a va2s .c  o m*/
    outputpath = args[1] + "FinalRanks";

    FileInputFormat.setInputPaths(conf, new Path(inputpath));
    FileOutputFormat.setOutputPath(conf, new Path(outputpath));

    conf.setMapperClass(SpeciesViewerMapper.class);
    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);

    try {
        theJob = new Job(conf, "SpeciesIter");
        theJob.waitForCompletion(true);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return true;
}

From source file:bulkload.ImportTsv.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (args.length < 2) {
        usage("Wrong number of arguments: " + args.length);
        return -1;
    }//ww w .  j  a  va  2  s .  com
    setConf(HBaseConfiguration.create(getConf()));
    Configuration conf = getConf();
    // Make sure columns are specified
    String columns[] = conf.getStrings(COLUMNS_CONF_KEY);
    if (columns == null) {
        usage("No columns specified. Please specify with -D" + COLUMNS_CONF_KEY + "=...");
        return -1;
    }
    // Make sure rowkey is specified
    String rowkey = conf.get(ROWKEY_CONF_KEY);
    if (StringUtil.isEmpty(rowkey)) {
        usage("No rowkey specified or rowkey is empty. Please specify with -D" + ROWKEY_CONF_KEY + "=...");
        return -1;
    }
    // Make sure rowkey handler is specified
    String rowKeyGenerator = conf.get(ROWKEY_GENERATOR_CONF_KEY);
    if (StringUtil.isEmpty(rowKeyGenerator)) {
        usage("No rowkey_handler specified or rowkey generator is empty. Please specify with -D"
                + ROWKEY_GENERATOR_CONF_KEY + "=...");
        return -1;
    }
    // Make sure they specify exactly one column as the row key
    int rowkeysFound = 0;
    for (String col : columns) {
        String[] parts = col.split(":", 3);
        if (parts.length > 1 && rowkey.equals(parts[1])) {
            rowkeysFound++;
        }
    }
    if (rowkeysFound != 1) {
        usage("Must specify exactly one column as " + rowkey);
        return -1;
    }
    // Make sure at least one columns are specified
    if (columns.length < 1) {
        usage("One or more columns in addition to the row key are required");
        System.exit(-1);
    }

    Job job = createSubmittableJob(conf, args);
    return job.waitForCompletion(true) ? 0 : 1;
}

From source file:Business.MapReduceOne.java

@Override
public int run(String[] args) throws Exception {

    Configuration conf = getConf();
    Job job = new Job(conf, "FirstJob");
    job.setJarByClass(MapReduceOne.class);

    final File f = new File(MapReduceOne.class.getProtectionDomain().getCodeSource().getLocation().getPath());
    String inFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/inFiles/";
    String outFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/outFiles/OutputOne";
    //use the arguments instead if provided.
    if (args.length > 1) {
        inFiles = args[1];/*  w  ww.  j  a v a  2  s  .  c  om*/
        outFiles = args[2];
    }
    Path in = new Path(inFiles);
    Path out = new Path(outFiles);
    FileInputFormat.setInputPaths(job, in);
    FileOutputFormat.setOutputPath(job, out);

    job.setMapperClass(Mapper1.class);
    job.setCombinerClass(Reducer1.class);
    job.setReducerClass(Reducer1.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    System.exit(job.waitForCompletion(true) ? 0 : 1);
    return 0;
}

From source file:byte_import.HexastoreBulkImport.java

License:Open Source License

public int run(String[] args) throws Exception {

    Job job = createSubmittableJob(args);
    job.waitForCompletion(true);
    loadHFiles();/*from w  w w .j  a  v a2s.  c o m*/
    return 0;
}