Example usage for org.apache.hadoop.mapreduce Job isSuccessful

List of usage examples for org.apache.hadoop.mapreduce Job isSuccessful

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job isSuccessful.

Prototype

public boolean isSuccessful() throws IOException 

Source Link

Document

Check if the job completed successfully.

Usage

From source file:com.bah.applefox.main.plugins.fulltextindex.FTLoader.java

License:Apache License

/**
 * run takes the comandline args as arguments (in this case from a
 * configuration file), creates a new job, configures it, initiates it,
 * waits for completion, and returns 0 if it is successful (1 if it is not)
 * /*  ww  w.j  a v a2 s . com*/
 * @param args
 *            the commandline arguments (in this case from a configuration
 *            file)
 * 
 * @return 0 if the job ran successfully and 1 if it isn't
 */
public int run(String[] args) throws Exception {
    try {
        // Initialize variables
        FTLoader.articleFile = args[8];
        FTLoader.maxNGrams = Integer.parseInt(args[9]);
        FTLoader.stopWords = getStopWords();
        FTLoader.dTable = args[10];
        FTLoader.urlCheckedTable = args[11];
        FTLoader.divsFile = args[20];
        FTLoader.exDivs = getExDivs();

        // Give the job a name
        String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

        // Create job and set the jar
        Job job = new Job(getConf(), jobName);
        job.setJarByClass(this.getClass());

        String urlTable = args[5];

        job.setInputFormatClass(AccumuloInputFormat.class);
        InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), urlTable,
                new Authorizations());

        job.setMapperClass(MapperClass.class);
        job.setMapOutputKeyClass(Key.class);
        job.setMapOutputValueClass(Value.class);

        job.setReducerClass(ReducerClass.class);
        job.setNumReduceTasks(Integer.parseInt(args[4]));

        job.setOutputFormatClass(AccumuloOutputFormat.class);
        job.setOutputKeyClass(Key.class);
        job.setOutputValueClass(Value.class);

        AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true, urlTable);

        job.waitForCompletion(true);

        return job.isSuccessful() ? 0 : 1;
    } catch (IOException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    } catch (InterruptedException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    } catch (ClassNotFoundException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    }
    return 1;
}

From source file:com.bah.applefox.main.plugins.imageindex.ImageLoader.java

License:Apache License

/**
 * run takes the comandline args as arguments (in this case from a
 * configuration file), creates a new job, configures it, initiates it,
 * waits for completion, and returns 0 if it is successful (1 if it is not)
 * //from   w  w w. j a  va2  s.c  om
 * @param args
 *            the commandline arguments (in this case from a configuration
 *            file)
 * 
 * @return 0 if the job ran successfully and 1 if it isn't
 */
public int run(String[] args) throws Exception {

    checkedImages = args[18];
    hashTable = args[17];
    tagTable = args[19];
    divsFile = args[20];
    UserAgent = args[6];

    // Create the table
    AccumuloUtils.setSplitSize(args[23]);
    AccumuloUtils.connectBatchWrite(checkedImages).close();

    // Give the job a name
    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

    // Create the job and set its jar
    Job job = new Job(getConf(), jobName);
    job.setJarByClass(this.getClass());

    // Set the url table to read from
    String urlTable = args[5];

    job.setInputFormatClass(AccumuloInputFormat.class);
    InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), urlTable,
            new Authorizations());

    job.setMapperClass(MapperClass.class);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);

    job.setNumReduceTasks(Integer.parseInt(args[4]));

    job.setReducerClass(ReducerClass.class);

    job.setOutputFormatClass(AccumuloOutputFormat.class);
    job.setOutputKeyClass(Key.class);
    job.setOutputValueClass(Value.class);
    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true, urlTable);

    AccumuloUtils.setSplitSize(args[22]);

    job.waitForCompletion(true);

    return job.isSuccessful() ? 0 : 1;
}

From source file:com.bah.applefox.main.plugins.pageranking.utilities.CountURLs.java

License:Apache License

public int run(String[] args) throws Exception {

    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

    Job job = new Job(getConf(), jobName);
    job.setJarByClass(this.getClass());

    mappedInput = args[12] + "From";

    job.setInputFormatClass(AccumuloInputFormat.class);
    InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), mappedInput,
            new Authorizations());

    job.setMapperClass(MapperClass.class);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);

    job.setReducerClass(ReducerClass.class);
    job.setOutputFormatClass(AccumuloOutputFormat.class);
    job.setOutputKeyClass(Key.class);
    job.setOutputValueClass(Value.class);
    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true, args[15]);

    job.waitForCompletion(true);//from www .ja v a2 s.  c o m

    return job.isSuccessful() ? 0 : 1;
}

From source file:com.bah.applefox.main.plugins.pageranking.utilities.DampenTable.java

License:Apache License

public int run(String[] args) throws Exception {

    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

    Job job = new Job(getConf(), jobName);
    job.setJarByClass(this.getClass());

    tablePrefix = args[13];/*from  w  w w .j a  va 2 s  . co  m*/
    dampeningFactor = Double.parseDouble(args[14]);

    job.setInputFormatClass(AccumuloInputFormat.class);
    AccumuloInputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    AccumuloInputFormat.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), tablePrefix + "New",
            new Authorizations());

    job.setMapperClass(MapperClass.class);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);

    job.setReducerClass(ReducerClass.class);
    job.setOutputFormatClass(AccumuloOutputFormat.class);
    job.setOutputKeyClass(Key.class);
    job.setOutputValueClass(Value.class);
    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true,
            tablePrefix + "New");

    job.waitForCompletion(true);

    return job.isSuccessful() ? 0 : 1;
}

From source file:com.bah.applefox.main.plugins.pageranking.utilities.InitializePRTables.java

License:Apache License

public int run(String[] args) throws Exception {

    tablePrefix = args[13];/*  ww w .  ja va2 s.c  om*/

    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

    Job job = new Job(getConf(), jobName);
    job.setJarByClass(this.getClass());

    job.setInputFormatClass(AccumuloInputFormat.class);
    InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), args[12] + "To",
            new Authorizations());

    job.setMapperClass(MapperClass.class);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);

    job.setReducerClass(ReducerClass.class);
    job.setOutputFormatClass(AccumuloOutputFormat.class);
    job.setOutputKeyClass(Key.class);
    job.setOutputValueClass(Value.class);
    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true,
            tablePrefix + "Old");

    AccumuloUtils.connectBatchWrite(tablePrefix + "New");

    job.waitForCompletion(true);

    return job.isSuccessful() ? 0 : 1;
}

From source file:com.bah.applefox.main.plugins.pageranking.utilities.MRPageRanking.java

License:Apache License

public int run(String[] args) throws Exception {

    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

    Job job = new Job(getConf(), jobName);
    job.setJarByClass(this.getClass());

    tablePrefix = args[13];//from w  ww  . j a va  2  s  .c  o m
    outboundLinks = args[15];

    job.setInputFormatClass(AccumuloInputFormat.class);
    AccumuloInputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);

    AccumuloInputFormat.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), args[12] + "To",
            new Authorizations());

    job.setMapperClass(MapperClass.class);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);

    job.setReducerClass(ReducerClass.class);
    job.setOutputFormatClass(AccumuloOutputFormat.class);
    job.setOutputKeyClass(Key.class);
    job.setOutputValueClass(Value.class);
    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true,
            tablePrefix + "New");

    job.waitForCompletion(true);

    return job.isSuccessful() ? 0 : 1;
}

From source file:com.bah.applefox.main.plugins.webcrawler.WebCrawler.java

License:Apache License

/**
 * run takes the comandline args as arguments (in this case from a
 * configuration file), creates a new job, configures it, initiates it,
 * waits for completion, and returns 0 if it is successful (1 if it is not)
 * //from w w w.j  ava 2 s .c o  m
 * @param args
 *            the commandline arguments (in this case from a configuration
 *            file)
 * 
 * @return 0 if the job ran successfully and 1 if it isn't
 */
public int run(String[] args) throws Exception {

    userAgent = args[6];

    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

    Job job = new Job(getConf(), jobName);
    job.setJarByClass(this.getClass());

    String clone = args[5];
    String clone2 = args[12];
    table = clone;

    AccumuloUtils.setSplitSize(args[24]);
    table2 = clone2 + "From";
    table3 = clone2 + "To";

    job.setInputFormatClass(AccumuloInputFormat.class);
    InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), clone,
            new Authorizations());

    job.setMapperClass(MapperClass.class);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);

    job.setNumReduceTasks(0);
    job.setOutputFormatClass(NullOutputFormat.class);
    job.setOutputKeyClass(Key.class);
    job.setOutputValueClass(Value.class);
    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true, clone);

    job.waitForCompletion(true);

    return job.isSuccessful() ? 0 : 1;
}

From source file:com.cloudera.accumulo.upgrade.compatibility.DataCompatibilityLoad.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    final String jobName = this.getClass().getName();
    options.parseArgs(jobName, args);/*from   w  w  w  .  j  ava 2s . c om*/
    final Job job = new Job(getConf(), jobName);

    if (-1 == options.test.numRows) {
        options.test.numRows = job.getConfiguration().getInt("mapred.map.tasks",
                DataCompatibilityTestCli.DEFAULT_NUM_ROWS);
    }

    job.setJarByClass(this.getClass());

    job.setInputFormatClass(DataLoadInputFormat.class);
    DataLoadInputFormat.setTabletServers(job,
            options.connection.getConnector().instanceOperations().getTabletServers());
    DataLoadInputFormat.setNumRows(job, options.test.numRows);
    DataLoadInputFormat.setNumQualifiersPerFamily(job, options.test.qualifiers);

    job.getConfiguration().set(VISIBILITY, new String(options.visibility.visibility.getExpression(), "UTF-8"));

    final TableOperations ops = options.connection.getConnector().tableOperations();

    final List<String> names = options.test.getTableNamesAndConfigureThem(ops);
    for (String name : names) {
        final int numSplits = ops.getSplits(name, options.test.numRows).size();
        if (options.test.numRows > numSplits) {
            log.info("adding splits to table '" + name + "', to bring it from " + numSplits + " to "
                    + options.test.numRows + ".");
            final SortedSet<Text> splits = new TreeSet<Text>();
            // for cases where we're adding way more splits than there are currently possible servers to handle them, do a pre-pre-split
            //   N.B. If we've just created this table, there will be 0 splits because we'll just have the initial tablet.
            if (0 == numSplits || options.test.numRows / numSplits > 10) {
                log.info("splitting in two waves due to the number of splits we need to add.");
                // TODO turtles all the way down.
                final int prepre = options.test.numRows / (0 == numSplits ? 10 : numSplits * 10);
                for (int i = 0; i < prepre; i++) {
                    splits.add(new Text(new StringBuilder(Long.toString(i)).reverse().toString()));
                }
                ops.addSplits(name, splits);
                log.debug("delay 30s for splits to get assigned off host.");
                try {
                    Thread.currentThread().sleep(30 * 1000);
                } catch (InterruptedException exception) {
                    log.warn("interrupted from sleep early.");
                }
                splits.clear();
            }
            for (int i = 0; i < options.test.numRows; i++) {
                splits.add(new Text(new StringBuilder(Long.toString(i)).reverse().toString()));
            }
            ops.addSplits(name, splits);
        }
    }
    log.debug("delay 30s for splits to get assigned off host.");
    try {
        Thread.currentThread().sleep(30 * 1000);
    } catch (InterruptedException exception) {
        log.warn("interrupted from sleep early.");
    }

    job.getConfiguration().setStrings(OUTPUT_TABLE_NAMES, names.toArray(new String[0]));

    job.setMapperClass(DataLoadMapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Mutation.class);

    job.setNumReduceTasks(0);

    log.info("launching map-only job to insert " + options.test.numRows + " rows of "
            + (FAMILIES.length * options.test.qualifiers) + " cells each into each of the tables " + names);
    options.output.useAccumuloOutputFormat(job);

    job.waitForCompletion(true);
    return job.isSuccessful() ? 0 : 1;
}

From source file:com.datasalt.pangool.solr.TupleSolrOutputFormatExample.java

License:Apache License

public int run(String input, String output, Configuration conf) throws Exception {
    // Define the intermediate schema: It must match SOLR's schema.xml!
    final Schema schema = new Schema("iSchema", Fields.parse("user_id:string, message:string"));

    TupleMRBuilder job = new TupleMRBuilder(conf);
    job.addIntermediateSchema(schema);/*from  w  ww.  j av  a2  s.c om*/
    job.setGroupByFields("user_id");
    // Define the input and its associated mapper.
    // We'll just have a Mapper, reducer will be Identity
    job.addInput(new Path(input), new HadoopInputFormat(TextInputFormat.class),
            new TupleMapper<LongWritable, Text>() {

                Tuple tuple = new Tuple(schema);

                @Override
                public void map(LongWritable key, Text value, TupleMRContext context, Collector collector)
                        throws IOException, InterruptedException {
                    String[] fields = value.toString().split("\t");
                    String language = fields[1];
                    tuple.set("user_id", fields[0]);
                    tuple.set("message", fields[2]);
                    if (language.equals("en")) {
                        // English -> write to main output
                        collector.write(tuple);
                    } else if (language.equals("fr")) {
                        // French -> write to french index
                        collector.getNamedOutput("FR").write(tuple, NullWritable.get());
                    } else if (language.equals("es")) {
                        // Spanish -> write to spanish index
                        collector.getNamedOutput("ES").write(tuple, NullWritable.get());
                    }
                }
            });
    // Add multi-output: French index
    job.addNamedOutput("FR", new TupleSolrOutputFormat(new File("src/test/resources/solr-fr"), conf),
            ITuple.class, NullWritable.class);
    // Add multi-output: Spanish index
    job.addNamedOutput("ES", new TupleSolrOutputFormat(new File("src/test/resources/solr-es"), conf),
            ITuple.class, NullWritable.class);
    job.setTupleReducer(new IdentityTupleReducer());
    // Add multi-output: English index
    job.setOutput(new Path(output), new TupleSolrOutputFormat(new File("src/test/resources/solr-en"), conf),
            ITuple.class, NullWritable.class);
    Job hadoopJob = job.createJob();
    try {
        hadoopJob.waitForCompletion(true);
        if (!hadoopJob.isSuccessful()) {
            throw new PangoolRuntimeException("Job was not sucessfull");
        }
    } finally {
        job.cleanUpInstanceFiles();
    }
    return 0;
}

From source file:com.datasalt.pangool.utils.test.AbstractHadoopTestLibrary.java

License:Apache License

public void assertRun(Job job) throws IOException, InterruptedException, ClassNotFoundException {
    FileSystem fs = FileSystem.get(job.getConfiguration());
    HadoopUtils.deleteIfExists(fs, FileOutputFormat.getOutputPath(job));
    // Close input writers first
    for (Map.Entry<String, Object> entry : inputs.entrySet()) {
        Object in = entry.getValue();
        if (in instanceof SequenceFile.Writer) {
            ((SequenceFile.Writer) in).close();
        } else if (in instanceof TupleFile.Writer) {
            ((TupleFile.Writer) in).close();
        }/*from w  w w .j a v a 2s.c  om*/
    }
    job.waitForCompletion(true);
    Assert.assertTrue(job.isSuccessful());

}