Example usage for org.apache.hadoop.mapred JobConf set

List of usage examples for org.apache.hadoop.mapred JobConf set

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:edu.stolaf.cs.wmrserver.streaming.StreamJob.java

License:Apache License

public static void setStreamReducer(JobConf conf, String reduceCommand) {
    conf.setReducerClass(PipeReducer.class);
    try {//from  w w  w. jav a 2s  . co  m
        conf.set("stream.reduce.streamprocessor", URLEncoder.encode(reduceCommand, "UTF-8"));
    } catch (java.io.UnsupportedEncodingException ex) {
        throw new RuntimeException("The sky is falling! Java doesn't support UTF-8.");
    }
}

From source file:edu.stolaf.cs.wmrserver.streaming.StreamJob.java

License:Apache License

protected static void setTaskEnvironment(JobConf conf, String vars) {
    conf.set("stream.addenvironment", vars);
}

From source file:edu.ub.ahstfg.hadoop.ParamSet.java

License:Open Source License

/**
 * Transfers the parameters to a Hadoop job.
 * @param job Job where parameters will be transfered.
 *///from  ww w.  ja  va2 s  . c om
public void toJobConf(JobConf job) {
    for (String key : strings.keySet()) {
        job.set(key, strings.get(key));
    }
    for (String key : ints.keySet()) {
        job.setInt(key, ints.get(key));
    }
    for (String key : floats.keySet()) {
        job.setFloat(key, floats.get(key));
    }
}

From source file:edu.ubc.mirrors.holographs.mapreduce.Driver.java

License:Open Source License

public int run(String[] args) throws Exception {
    JobConf job = new JobConf(getConf());
    job.setClassLoader(Driver.class.getClassLoader());
    job.setInputFormat(SnapshotObjectsOfTypeInputFormat.class);
    job.setMapperClass(InvokeMethodMapper.class);
    job.setCombinerClass(TextCountSumReducer.class);
    job.setReducerClass(TextCountSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.set("snapshotPath", args[0]);
    job.set("targetClassName", "org.eclipse.cdt.internal.core.dom.parser.cpp.CPPASTName");
    job.setInt("splitSize", 10000);
    job.setInt("maxNumObjects", 100000);

    FileInputFormat.addInputPath(job, new Path(args[0]));

    String outputPath = args[1];/*from w  w w .j  av a  2  s  . c  om*/
    int suffix = 2;
    while (new File(outputPath).exists()) {
        outputPath = args[1] + suffix++;
    }
    FileOutputFormat.setOutputPath(job, new Path(outputPath));

    JobClient.runJob(job);
    return 0;
}

From source file:edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory.java

License:Apache License

public static JobConf configureJobConf(Map<String, String> configuration) throws Exception {
    JobConf conf = new JobConf();
    String formatClassName = (String) formatClassNames
            .get(((String) configuration.get(KEY_INPUT_FORMAT)).trim());
    if (formatClassName == null) {
        formatClassName = ((String) configuration.get(KEY_INPUT_FORMAT)).trim();
    }/*from  w w w .  j a va 2s .c om*/
    conf.set("fs.default.name", ((String) configuration.get(KEY_HDFS_URL)).trim());
    conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    conf.setClassLoader(HDFSAdapter.class.getClassLoader());
    conf.set("mapred.input.dir", ((String) configuration.get(KEY_PATH)).trim());
    conf.set("mapred.input.format.class", formatClassName);
    return conf;
}

From source file:edu.uci.ics.asterix.test.runtime.HDFSCluster.java

License:Apache License

private static JobConf configureJobConf() throws Exception {
    JobConf conf = new JobConf();
    String hdfsUrl = "hdfs://127.0.0.1:31888";
    String hdfsPath = "/asterix/extrasmalltweets.txt";
    conf.set("fs.default.name", hdfsUrl);
    conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    conf.setClassLoader(HDFSAdapter.class.getClassLoader());
    conf.set("mapred.input.dir", hdfsPath);
    conf.set("mapred.input.format.class", "org.apache.hadoop.mapred.TextInputFormat");
    return conf;//from   w  w  w  . j  a  v a 2s .  c om
}

From source file:edu.uci.ics.hyracks.dataflow.hadoop.AbstractHadoopOperatorDescriptor.java

License:Apache License

public void populateCache(JobConf jobConf) {
    try {//from   w  w w  .j a  va  2s .  c om
        String cache = jobConf.get(MAPRED_CACHE_FILES);
        System.out.println("cache:" + cache);
        if (cache == null) {
            return;
        }
        String localCache = jobConf.get(MAPRED_CACHE_LOCALFILES);
        System.out.println("localCache:" + localCache);
        if (localCache != null) {
            return;
        }
        localCache = "";
        StringTokenizer cacheTokenizer = new StringTokenizer(cache, ",");
        while (cacheTokenizer.hasMoreTokens()) {
            if (!"".equals(localCache)) {
                localCache += ",";
            }
            try {
                localCache += DCacheClient.get().get(cacheTokenizer.nextToken());
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
        jobConf.set(MAPRED_CACHE_LOCALFILES, localCache);
        System.out.println("localCache:" + localCache);
    } catch (Exception e) {

    }
}

From source file:edu.uci.ics.hyracks.dataflow.hadoop.util.DatatypeHelper.java

License:Apache License

public static JobConf map2JobConf(Map<String, String> jobConfMap) {
    JobConf jobConf;
    synchronized (Configuration.class) {
        jobConf = new JobConf();
        for (Entry<String, String> entry : jobConfMap.entrySet()) {
            jobConf.set(entry.getKey(), entry.getValue());
        }/*from  www.ja  va  2  s .co m*/
    }
    return jobConf;
}

From source file:edu.uci.ics.hyracks.hadoop.compat.driver.CompatibilityLayer.java

License:Apache License

private JobConf constructHadoopJobConf(String jobFile) {
    Properties jobProperties = Utilities.getProperties(jobFile, '=');
    JobConf conf = new JobConf(hadoopAdapter.getConf());
    for (Entry entry : jobProperties.entrySet()) {
        conf.set((String) entry.getKey(), (String) entry.getValue());
        System.out.println((String) entry.getKey() + " : " + (String) entry.getValue());
    }//from   w ww.j  ava  2s .  co  m
    return conf;
}

From source file:edu.uci.ics.pregelix.core.util.DataGenerator.java

License:Apache License

public static void main(String[] args) throws IOException {

    JobConf job = new JobConf(DataGenerator.class);
    FileSystem dfs = FileSystem.get(job);
    String maxFile = "/maxtemp";
    dfs.delete(new Path(maxFile), true);

    job.setJobName(DataGenerator.class.getSimpleName() + "max ID");
    job.setMapperClass(MapMaxId.class);
    job.setCombinerClass(CombineMaxId.class);
    job.setReducerClass(ReduceMaxId.class);
    job.setMapOutputKeyClass(NullWritable.class);
    job.setMapOutputValueClass(VLongWritable.class);

    job.setInputFormat(TextInputFormat.class);
    FileInputFormat.setInputPaths(job, args[0]);
    FileOutputFormat.setOutputPath(job, new Path(maxFile));
    job.setNumReduceTasks(1);//from w ww  .  j a  va2 s . c  om
    JobClient.runJob(job);

    job = new JobConf(DataGenerator.class);
    job.set("hyracks.maxid.file", maxFile);
    job.setInt("hyracks.x", Integer.parseInt(args[2]));
    dfs.delete(new Path(args[1]), true);

    job.setJobName(DataGenerator.class.getSimpleName());
    job.setMapperClass(MapRecordGen.class);
    job.setReducerClass(ReduceRecordGen.class);
    job.setMapOutputKeyClass(LongWritable.class);
    job.setMapOutputValueClass(Text.class);

    job.setInputFormat(TextInputFormat.class);
    FileInputFormat.setInputPaths(job, args[0]);
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    job.setNumReduceTasks(Integer.parseInt(args[3]));

    if (args.length > 4) {
        if (args[4].startsWith("bzip"))
            FileOutputFormat.setOutputCompressorClass(job, BZip2Codec.class);
        if (args[4].startsWith("gz"))
            FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
    }
    JobClient.runJob(job);
}