Example usage for org.apache.hadoop.mapreduce Job subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Job subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job subclass-usage.

Usage

From source file com.linkedin.mr_kluj.StagedOutputJob.java

/**
 *
 */
public class StagedOutputJob extends Job {
    private final String stagingPrefix;
    private final Logger log;

From source file com.linkedin.whiteelephant.mapreduce.lib.job.StagedOutputJob.java

/**
 *
 */
public class StagedOutputJob extends Job implements Callable<Boolean> {
    private final String stagingPrefix;
    private final Logger log;

From source file com.scaleoutsoftware.soss.hserver.HServerJob.java

/**
 * This class should should be used to direct the map reduce application to
 * use ScaleOut hServer infrastructure instead of a job tracker/task trackers.
 * It should be used in place of a {@link Job} class to make a Hadoop application
 * run on ScaleOut hServer.
 */

From source file com.splicemachine.mrio.api.mapreduce.SpliceJob.java

/**
 * SpliceJob which controls submission of MapReduce Job
 * - Notice: You have to call commit() after SpliceJob finished successfully,
 *           You have to call rollback() after SpliceJob failed.
 *
 * @author Yanan Jian

From source file datafu.hourglass.jobs.StagedOutputJob.java

/**
 * A derivation of {@link Job} that stages its output in another location and only
 * moves it to the final destination if the job completes successfully.
 * It also outputs a counters file to the file system that contains counters fetched from Hadoop
 * and other task statistics.
 */

From source file edu.uci.ics.pregelix.api.job.PregelixJob.java

/**
 * This class represents a Pregelix job.
 */
public class PregelixJob extends Job {
    /** Vertex class - required */
    public static final String VERTEX_CLASS = "pregelix.vertexClass";

From source file org.apache.mahout.cf.taste.hadoop.RecommenderJob.java

/**
 * <p>This class configures and runs a {@link RecommenderMapper} using Hadoop.</p>
 *
 * <p>Command line arguments are:</p> <ol> <li>Fully-qualified class name of {@link Recommender} to use to make
 * recommendations. Note that it must have a constructor which takes a {@link org.apache.mahout.cf.taste.model.DataModel}
 * argument.</li> <li>Number of recommendations to compute per user</li> <li>Location of a text file containing user IDs

From source file org.apache.mahout.cf.taste.hadoop.SlopeOneDiffsToAveragesJob.java

public final class SlopeOneDiffsToAveragesJob extends Job {

    private SlopeOneDiffsToAveragesJob(Configuration jobConf) throws IOException {
        super(jobConf);
    }

From source file org.apache.mahout.cf.taste.hadoop.SlopeOnePrefsToDiffsJob.java

public final class SlopeOnePrefsToDiffsJob extends Job {

    private SlopeOnePrefsToDiffsJob(Configuration jobConf) throws IOException {
        super(jobConf);
    }

From source file org.apache.nutch.util.NutchJob.java

/** A {@link Job} for Nutch jobs. */
public class NutchJob extends Job {

    public NutchJob(Configuration conf) throws IOException {
        super(conf);
        setJarByClass(this.getClass());