Example usage for org.apache.hadoop.mapred JobConf getClass

List of usage examples for org.apache.hadoop.mapred JobConf getClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf getClass.

Prototype

public <U> Class<? extends U> getClass(String name, Class<? extends U> defaultValue, Class<U> xface) 

Source Link

Document

Get the value of the name property as a Class implementing the interface specified by xface.

Usage

From source file:cascading.flow.hadoop.HadoopFlowStep.java

License:Open Source License

protected void initFromSink(FlowProcess<JobConf> flowProcess, JobConf conf) {
    // init sink first so tempSink can take precedence
    if (getSink() != null)
        getSink().sinkConfInit(flowProcess, conf);

    Class<? extends OutputFormat> outputFormat = conf.getClass("mapred.output.format.class", null,
            OutputFormat.class);
    boolean isFileOutputFormat = false;

    if (outputFormat != null)
        isFileOutputFormat = FileOutputFormat.class.isAssignableFrom(outputFormat);

    Path outputPath = FileOutputFormat.getOutputPath(conf);

    // if no output path is set, we need to substitute an alternative if the OutputFormat is file based
    // PartitionTap won't set the output, but will set an OutputFormat
    // MultiSinkTap won't set the output or set the OutputFormat
    // Non file based OutputFormats don't have an output path, but do have an OutputFormat set (JDBCTap..)
    if (outputPath == null && (isFileOutputFormat || outputFormat == null))
        tempSink = new TempHfs(conf, "tmp:/" + new Path(getSink().getIdentifier()).toUri().getPath(), true);

    // tempSink exists because sink is writeDirect
    if (tempSink != null)
        tempSink.sinkConfInit(flowProcess, conf);
}

From source file:colossal.pipe.ColHadoopCombiner.java

License:Apache License

@Override
@SuppressWarnings("unchecked")
protected ColReducer<V, V> getReducer(JobConf conf) {
    return ReflectionUtils.newInstance(conf.getClass(ColPhase.COMBINER, BaseReducer.class, ColReducer.class),
            conf);/*from   www .  j av  a  2 s.  c  o m*/
}

From source file:colossal.pipe.ColHadoopMapper.java

License:Apache License

@SuppressWarnings("unchecked")
public void configure(JobConf conf) {
    this.mapper = ReflectionUtils.newInstance(conf.getClass(ColPhase.MAPPER, BaseMapper.class, ColMapper.class),
            conf);/*from w ww  .  j a  va2 s. c  o  m*/
    this.isMapOnly = conf.getNumReduceTasks() == 0;
    try {
        this.out = (OUT) ReflectionUtils
                .newInstance(conf.getClass(ColPhase.MAP_OUT_CLASS, Object.class, Object.class), conf);
        this.schema = ColPhase.getSchema(this.out);
        this.groupBy = conf.get(ColPhase.GROUP_BY);
        this.sortBy = conf.get(ColPhase.SORT_BY);
        if (conf.getInputFormat() instanceof TextInputFormat) {
            Class<?> inClass = conf.getClass(ColPhase.MAP_IN_CLASS, Object.class, Object.class);
            if (inClass == String.class) {
                isStringInput = true;
            } else if (inClass == Text.class) {
                isTextInput = true;
            } else {
                isJsonInput = true;
                inSchema = ColPhase.getSchema((IN) ReflectionUtils.newInstance(inClass, conf));
            }
        }
    } catch (Exception e) {
        if (e instanceof RuntimeException)
            throw (RuntimeException) e;
        throw new RuntimeException(e);
    }

    mapper.setConf(conf);
}

From source file:colossal.pipe.ColHadoopReducer.java

License:Apache License

@Override
@SuppressWarnings("unchecked")
protected ColReducer<V, OUT> getReducer(JobConf conf) {
    return ReflectionUtils.newInstance(conf.getClass(ColPhase.REDUCER, BaseReducer.class, ColReducer.class),
            conf);/*from   w ww. j  av  a2s . c o  m*/
}

From source file:com.benchmark.mapred.dancing.DistributedPentomino.java

License:Apache License

public int run(String[] args) throws Exception {
    JobConf conf;
    int depth = 5;
    int width = 9;
    int height = 10;
    Class<? extends Pentomino> pentClass;
    if (args.length == 0) {
        System.out.println("pentomino <output>");
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }//from  ww w . j a  v  a  2 s.c  o  m

    conf = new JobConf(getConf());
    width = conf.getInt("pent.width", width);
    height = conf.getInt("pent.height", height);
    depth = conf.getInt("pent.depth", depth);
    pentClass = conf.getClass("pent.class", OneSidedPentomino.class, Pentomino.class);

    Path output = new Path(args[0]);
    Path input = new Path(output + "_input");
    FileSystem fileSys = FileSystem.get(conf);
    try {
        FileInputFormat.setInputPaths(conf, input);
        FileOutputFormat.setOutputPath(conf, output);
        conf.setJarByClass(PentMap.class);

        conf.setJobName("dancingElephant");
        Pentomino pent = ReflectionUtils.newInstance(pentClass, conf);
        pent.initialize(width, height);
        createInputDirectory(fileSys, input, pent, depth);

        // the keys are the prefix strings
        conf.setOutputKeyClass(Text.class);
        // the values are puzzle solutions
        conf.setOutputValueClass(Text.class);

        conf.setMapperClass(PentMap.class);
        conf.setReducerClass(IdentityReducer.class);

        conf.setNumMapTasks(2000);
        conf.setNumReduceTasks(1);

        JobClient.runJob(conf);
    } finally {
        fileSys.delete(input, true);
    }
    return 0;
}

From source file:com.bianfeng.bfas.hive.io.RealtimeInputFormat2.java

License:Apache License

/**
 * Get a PathFilter instance of the filter set for the input paths.
 *
 * @return the PathFilter instance set for the job, NULL if none has been set.
 *///from w  w  w.  ja  va 2s. c  o  m
public static PathFilter getInputPathFilter(JobConf conf) {
    Class<? extends PathFilter> filterClass = conf.getClass("mapred.input.pathFilter.class", null,
            PathFilter.class);
    return (filterClass != null) ? ReflectionUtils.newInstance(filterClass, conf) : null;
}

From source file:com.github.gaoyangthu.demo.mapred.dancing.DistributedPentomino.java

License:Apache License

public int run(String[] args) throws Exception {
    JobConf conf;
    int depth = 5;
    int width = 9;
    int height = 10;
    Class<? extends Pentomino> pentClass;
    if (args.length == 0) {
        System.out.println("Usage: pentomino <output> [-depth #] [-height #] [-width #]");
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }// w  w  w .  ja  v  a  2 s .c  o m

    conf = new JobConf(getConf());

    // Pick up the parameters, should the user set these
    width = conf.getInt("pent.width", width);
    height = conf.getInt("pent.height", height);
    depth = conf.getInt("pent.depth", depth);
    pentClass = conf.getClass("pent.class", OneSidedPentomino.class, Pentomino.class);

    for (int i = 0; i < args.length; i++) {
        if (args[i].equalsIgnoreCase("-depth")) {
            depth = Integer.parseInt(args[++i].trim());
        } else if (args[i].equalsIgnoreCase("-height")) {
            height = Integer.parseInt(args[++i].trim());
        } else if (args[i].equalsIgnoreCase("-width")) {
            width = Integer.parseInt(args[++i].trim());
        }
    }

    // Set parameters for MR tasks to pick up either which way the user sets
    // them or not
    conf.setInt("pent.width", width);
    conf.setInt("pent.height", height);
    conf.setInt("pent.depth", depth);

    Path output = new Path(args[0]);
    Path input = new Path(output + "_input");
    FileSystem fileSys = FileSystem.get(conf);
    try {
        FileInputFormat.setInputPaths(conf, input);
        FileOutputFormat.setOutputPath(conf, output);
        conf.setJarByClass(PentMap.class);

        conf.setJobName("dancingElephant");
        Pentomino pent = ReflectionUtils.newInstance(pentClass, conf);
        pent.initialize(width, height);
        createInputDirectory(fileSys, input, pent, depth);

        // the keys are the prefix strings
        conf.setOutputKeyClass(Text.class);
        // the values are puzzle solutions
        conf.setOutputValueClass(Text.class);

        conf.setMapperClass(PentMap.class);
        conf.setReducerClass(IdentityReducer.class);

        conf.setNumMapTasks(2000);
        conf.setNumReduceTasks(1);

        JobClient.runJob(conf);
    } finally {
        fileSys.delete(input, true);
    }
    return 0;
}

From source file:com.hadoopilluminated.examples.dancing.DistributedPentomino.java

License:Apache License

public int run(String[] args) throws Exception {
    JobConf conf;
    int depth = 5;
    int width = 9;
    int height = 10;
    Class<? extends Pentomino> pentClass;
    if (args.length == 0) {
        System.out.println("pentomino <output>");
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }/*from   w  ww.  j  a va2 s.  com*/

    conf = new JobConf(getConf());
    width = conf.getInt("pent.width", width);
    height = conf.getInt("pent.height", height);
    depth = conf.getInt("pent.depth", depth);
    pentClass = conf.getClass("pent.class", OneSidedPentonimo.class, Pentomino.class);

    Path output = new Path(args[0]);
    Path input = new Path(output + "_input");
    FileSystem fileSys = FileSystem.get(conf);
    try {
        FileInputFormat.setInputPaths(conf, input);
        FileOutputFormat.setOutputPath(conf, output);
        conf.setJarByClass(PentMap.class);

        conf.setJobName("dancingElephant");
        Pentomino pent = ReflectionUtils.newInstance(pentClass, conf);
        pent.initialize(width, height);
        createInputDirectory(fileSys, input, pent, depth);

        // the keys are the prefix strings
        conf.setOutputKeyClass(Text.class);
        // the values are puzzle solutions
        conf.setOutputValueClass(Text.class);

        conf.setMapperClass(PentMap.class);
        conf.setReducerClass(IdentityReducer.class);

        conf.setNumMapTasks(2000);
        conf.setNumReduceTasks(1);

        JobClient.runJob(conf);
    } finally {
        fileSys.delete(input, true);
    }
    return 0;
}

From source file:com.ibm.jaql.io.hadoop.Db2InputFormat.java

License:Apache License

protected void init(JobConf conf) throws IOException, SQLException {
    String url = conf.get(URL_KEY);
    String jsonRec = conf.get(PROPERTIES_KEY);
    Class<? extends Driver> driverClass = conf.getClass(DRIVER_KEY, null, Driver.class);
    if (driverClass == null) {
        throw new RuntimeException("jdbc driver class not found: " + conf.get(DRIVER_KEY));
    }/*from  w ww  .j a  v  a2 s  .co  m*/

    Driver driver;
    try {
        driver = driverClass.newInstance();
    } catch (Exception e) {
        throw new UndeclaredThrowableException(e);// IOException("Error constructing jdbc driver", e);
    }

    Properties props = new Properties();

    if (jsonRec != null && !"".equals(jsonRec)) {
        try {
            JsonParser parser = new JsonParser(new StringReader(jsonRec));
            JsonRecord jrec = (JsonRecord) parser.JsonVal();
            for (Entry<JsonString, JsonValue> f : jrec) {
                JsonString key = f.getKey();
                JsonValue value = f.getValue();
                props.setProperty(key.toString(), value == null ? null : value.toString());
            }
        } catch (ParseException pe) {
            throw new UndeclaredThrowableException(pe); // IOException("couldn't parse "+PROPERTIES_KEY+" = "+jsonRec, pe);
        }
    }

    // conn = DriverManager.getConnection(url, props);
    conn = driver.connect(url, props);
}

From source file:com.ibm.jaql.io.hadoop.SelectSplitInputFormat.java

License:Apache License

@Override
public void configure(JobConf conf) {
    Class<? extends InputFormat> inputFormatCls = conf.getClass(INPUT_FORMAT, null, InputFormat.class);
    iFormat = ReflectionUtils.newInstance(inputFormatCls, conf);
    Class<? extends InputSplit> splitCls = conf.getClass(SPLIT_CLASS, null, InputSplit.class);
    split = ReflectionUtils.newInstance(splitCls, conf);
    byte[] bytes = ConfUtil.readBinary(conf, SPLIT);
    DataInputBuffer buffer = new DataInputBuffer();
    buffer.reset(bytes, bytes.length);/*from w  w w . j  a va2 s. c o  m*/
    try {
        split.readFields(buffer);
    } catch (IOException e) {
        throw new UndeclaredThrowableException(e);
    }
}