Example usage for org.apache.hadoop.mapred JobClient setConf

List of usage examples for org.apache.hadoop.mapred JobClient setConf

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobClient setConf.

Prototype

@Override
    public void setConf(Configuration conf) 

Source Link

Usage

From source file:CountHistogram.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    try {/*from  ww w. j  av  a 2s.  com*/
        JobClient client = new JobClient();
        JobConf job = new JobConf(getConf(), CountHistogram.class);
        job.setJobName("CountHistogram");

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(IntWritable.class);

        job.setMapperClass(Map.class);
        job.setReducerClass(Reduce.class);

        job.setInputFormat(TextInputFormat.class);
        job.setOutputFormat(TextOutputFormat.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        client.setConf(job);
        JobClient.runJob(job);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    }
    return 0;
}

From source file:average.AverageDriver.java

public static void main(String[] args) {
    JobClient client = new JobClient();
    // Configurations for Job set in this variable
    JobConf conf = new JobConf(average.AverageDriver.class);

    // Name of the Job
    conf.setJobName("BookCrossing1.0");

    // Data type of Output Key and Value
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    // Setting the Mapper and Reducer Class
    conf.setMapperClass(average.AverageMapper.class);
    conf.setReducerClass(average.AverageReducer.class);

    // Formats of the Data Type of Input and output
    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    // Specify input and output DIRECTORIES (not files)
    FileInputFormat.setInputPaths(conf, new Path(args[1]));
    FileOutputFormat.setOutputPath(conf, new Path(args[2]));

    client.setConf(conf);
    try {//from  w w w  .j  a  va2  s .  c o  m
        // Running the job with Configurations set in the conf.
        JobClient.runJob(conf);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.datatorrent.demos.mroperator.LineIndexer.java

License:Open Source License

/**
 * The actual main() method for our program; this is the
 * "driver" for the MapReduce job.//from  w ww.j  a v  a  2s  .  c  o m
 */
public static void main(String[] args) {
    JobClient client = new JobClient();
    JobConf conf = new JobConf(LineIndexer.class);

    conf.setJobName("LineIndexer");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    FileInputFormat.addInputPath(conf, new Path("input"));
    FileOutputFormat.setOutputPath(conf, new Path("output"));

    conf.setMapperClass(LineIndexMapper.class);
    conf.setReducerClass(LineIndexReducer.class);

    client.setConf(conf);

    try {
        JobClient.runJob(conf);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.ikanow.infinit.e.processing.custom.CustomProcessingController.java

License:Open Source License

public boolean killRunningJob(CustomMapReduceJobPojo jobToKillInfo) {
    try {/*from  w ww. j a va2 s .  c o m*/
        Configuration conf = new Configuration();
        JobClient jc = new JobClient(InfiniteHadoopUtils.getJobClientConnection(prop_custom), conf);
        jc.setConf(conf); // (doesn't seem to be set by the above call)

        RunningJob jobToKill = jc.getJob(new JobID(jobToKillInfo.jobidS, jobToKillInfo.jobidN));
        if (null == jobToKill) {
            _logger.error("Couldn't find this job: " + jobToKillInfo.jobidS + "_" + jobToKillInfo.jobidN + " / "
                    + new JobID(jobToKillInfo.jobidS, jobToKillInfo.jobidN).toString());
            return false;
        }
        jobToKill.killJob();

        int nRuns = 0;
        while (!checkRunningJobs(jobToKillInfo)) {
            try {
                Thread.sleep(5000);
            } catch (Exception e) {
            }
            if (++nRuns > 24) { // bail out after 2 minutes 
                _logger.error("Killed job: " + jobToKillInfo.jobidS + "_" + jobToKillInfo.jobidN
                        + ", but job failed to stop within time allowed");
                return false;
            }
        }
        if (null != jobToKillInfo.derivedFromSourceKey) { // Update the derived source, if one existse 
            BasicDBObject query = new BasicDBObject(SourcePojo.key_, jobToKillInfo.derivedFromSourceKey);
            BasicDBObject setUpdate = new BasicDBObject(SourceHarvestStatusPojo.sourceQuery_harvest_status_,
                    HarvestEnum.error.toString());
            setUpdate.put(SourceHarvestStatusPojo.sourceQuery_harvest_message_, "Manually stopped");
            BasicDBObject srcUpdate = new BasicDBObject(DbManager.set_, setUpdate);
            DbManager.getIngest().getSource().update(query, srcUpdate, false, false);
        } //TESTED (actually a bit pointless usually because is then overwritten by the source publish)
        return true;
    } catch (Exception e) {
        _logger.error("Failed to kill job: " + jobToKillInfo.jobidS + "_" + jobToKillInfo.jobidN + " / "
                + e.getMessage(), e);
        return false;
    }
}

From source file:com.impetus.ankush2.hadoop.monitor.JobStatusProvider.java

License:Open Source License

/**
 * Gets the job client./*from  w w  w  .  j a v a 2 s.  c o m*/
 * 
 * @return JobClient object
 */
private JobClient getJobClient(String host, String port) {
    // JobClient is the primary interface for the user-job to interact with
    // the JobTracker.
    JobClient jobClient = null;
    if (host == null) {
        host = "localhost";
    }
    LOG.info("Requesting job Client..");
    try {
        // Provides access to configuration parameters.
        Configuration conf = new Configuration();
        LOG.info("JobClient : " + host + " & port : " + port);
        // Build a job client, connect to the indicated job tracker.
        jobClient = new JobClient(new InetSocketAddress(host, Integer.parseInt(port)), new JobConf(conf));
        // Set the configuration to be used by this object.
        jobClient.setConf(conf);
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
    }

    return jobClient;
}

From source file:combiner.CombinerDriver.java

public static void main(String[] args) {
    JobClient client = new JobClient();
    // Configurations for Job set in this variable
    JobConf conf = new JobConf(combiner.CombinerDriver.class);

    // Name of the Job
    conf.setJobName("BookCrossing1.0");

    // Data type of Output Key and Value
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);

    // Setting the Mapper and Reducer Class
    conf.setMapperClass(combiner.CombinerMapper.class);
    conf.setCombinerClass(combiner.CombinerReducer.class);
    conf.setReducerClass(combiner.CombinerReducer.class);

    // Formats of the Data Type of Input and output
    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    // Specify input and output DIRECTORIES (not files)
    FileInputFormat.setInputPaths(conf, new Path(args[1]));
    FileOutputFormat.setOutputPath(conf, new Path(args[2]));

    client.setConf(conf);
    try {/*from w w  w  . j  a va 2  s  . co  m*/
        // Running the job with Configurations set in the conf.
        JobClient.runJob(conf);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:dinocode.SpeciesGraphBuilder.java

public static void main(String[] args) throws Exception {
    JobClient client = new JobClient();
    JobConf conf = new JobConf(SpeciesDriver.class);
    conf.setJobName("Page-rank Species Graph Builder");
    final File f = new File(SpeciesDriver.class.getProtectionDomain().getCodeSource().getLocation().getPath());
    String inFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/InputFiles/species_medium.txt";
    String outFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/outputFiles/Result";
    FileInputFormat.setInputPaths(conf, new Path(inFiles));
    FileOutputFormat.setOutputPath(conf, new Path(outFiles));

    //conf.setOutputKeyClass(Text.class); 
    //conf.setOutputValueClass(Text.class); 
    conf.setMapperClass(SpeciesGraphBuilderMapperd.class);
    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(Text.class);

    //conf.setInputFormat(org.apache.hadoop.mapred.TextInputFormat.class); 
    //conf.setOutputFormat(org.apache.hadoop.mapred.SequenceFileOutputFormat.class); 
    conf.setReducerClass(SpeciesGraphBuilderReducerd.class);
    //conf.setCombinerClass(SpeciesGraphBuilderReducer.class); 

    //conf.setInputPath(new Path("graph1")); 
    //conf.setOutputPath(new Path("graph2")); 
    // take the input and output from the command line
    FileInputFormat.setInputPaths(conf, new Path(inFiles));
    FileOutputFormat.setOutputPath(conf, new Path(outFiles));

    client.setConf(conf);
    try {/*from w  w w .  ja  va  2  s .  co  m*/
        JobClient.runJob(conf);
    } catch (Exception e) {
        e.printStackTrace();
    }

    inFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/outputFiles/Result/part-00000";
    for (int i = 0; i < 500; i++) {
        client = new JobClient();
        conf = new JobConf(SpeciesDriver.class);
        conf.setJobName("Species Iter");

        int count = i + 1;
        outFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/outputFiles/Result" + count;
        conf.setNumReduceTasks(5);

        conf.setOutputKeyClass(Text.class);
        conf.setOutputValueClass(Text.class);

        FileInputFormat.setInputPaths(conf, new Path(inFiles));
        FileOutputFormat.setOutputPath(conf, new Path(outFiles));

        conf.setMapperClass(SpeciesIterMapper2d.class);
        conf.setReducerClass(SpeciesIterReducer2d.class);
        conf.setCombinerClass(SpeciesIterReducer2d.class);

        client.setConf(conf);
        try {
            JobClient.runJob(conf);
        } catch (Exception e) {
            e.printStackTrace();
        }
        inFiles = outFiles;

    }

    //Viewer
    client = new JobClient();
    conf = new JobConf(SpeciesDriver.class);
    conf.setJobName("Species Viewer");

    conf.setOutputKeyClass(FloatWritable.class);
    conf.setOutputValueClass(Text.class);

    inFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/outputFiles/Result500/part-00000";
    outFiles = f.getAbsolutePath().replace("/build/classes", "") + "/src/outputFiles/ResultFinal";

    FileInputFormat.setInputPaths(conf, new Path(inFiles));
    FileOutputFormat.setOutputPath(conf, new Path(outFiles));

    conf.setMapperClass(SpeciesViewerMapperd.class);
    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);

    client.setConf(conf);
    try {
        JobClient.runJob(conf);
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.java

public static void main(String[] args) {
    //paths and input handling
    Path inputRatingsPath = new Path(args[0]); //movieid, userid, rating (text files)
    Path job1OutputPath = new Path("/user/hduser/partialResults");
    Path partialDistancesPath = new Path("/user/hduser/partialResults/part-*"); //member_nonMember \t partialDistance (sequence files)
    Path candidateMoviesPath = new Path("/user/hduser/partialResults/candidateMovies-*"); //candidateMovieId, nonMemberUserId_rating (text files)
    Path userSimilaritiesPath = new Path("/user/hduser/userSimilarities"); //similarity of each group member to his friends (text files)
    Path finalScoresPath = new Path(args[1]); //movieId \t outputScore

    int numReduceTasks = 56; //defaultValue
    if (args.length == 3) {
        numReduceTasks = Integer.parseInt(args[2]);
    }/*from ww w.j a  v  a 2s.  c  o  m*/

    final float friendsSimThresh = 0.8f;

    String groupFilePath = "/user/hduser/group.txt"; //one-line csv file with user ids (text file)

    if (args.length < 2 || args.length > 3) {
        System.err.println(
                "Incorrect input. Example usage: hadoop jar ~/GroupRecs/MultiUser.jar inputPath outputPath [numReduceTasks]");
        return;
    }

    //JOB 1//
    JobClient client = new JobClient();
    JobConf conf = new JobConf(gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.class);

    try {
        FileSystem fs = FileSystem.get(conf);
        if (fs.exists(job1OutputPath)) {
            fs.delete(job1OutputPath, true);
        }
        if (fs.exists(userSimilaritiesPath)) {
            fs.delete(userSimilaritiesPath, true);
        }
        if (fs.exists(finalScoresPath)) {
            fs.delete(finalScoresPath, true);
        }
    } catch (IOException ex) {
        Logger.getLogger(MultiUserMain.class.getName()).log(Level.SEVERE, null, ex);
    }

    conf.setJobName("Multi-user approach - Job 1");
    System.out.println("Starting Job 1 (Multi-user approach)...");

    conf.setMapOutputKeyClass(VIntWritable.class);
    conf.setMapOutputValueClass(Text.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(ByteWritable.class);

    conf.setInputFormat(TextInputFormat.class);
    //conf.setOutputFormat(TextOutputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    SequenceFileOutputFormat.setOutputCompressionType(conf, SequenceFile.CompressionType.BLOCK);

    FileInputFormat.setInputPaths(conf, inputRatingsPath); //user ratings
    FileOutputFormat.setOutputPath(conf, job1OutputPath); //partial distances

    MultipleOutputs.addNamedOutput(conf, "candidateMovies", SequenceFileOutputFormat.class, VIntWritable.class,
            Text.class); //movieId, userId_rating

    conf.setMapperClass(gr.forth.ics.isl.grouprecsmr.job1.Job1Mapper.class);
    conf.setReducerClass(gr.forth.ics.isl.grouprecsmr.job1.Job1Reducer.class);

    conf.setNumReduceTasks(numReduceTasks);

    try {
        DistributedCache.addCacheFile(new URI(groupFilePath), conf); // group   
    } catch (URISyntaxException e1) {
        System.err.println(e1.toString());
    }

    conf.setInt("mapred.task.timeout", 6000000);

    client.setConf(conf);
    RunningJob job;
    try {
        job = JobClient.runJob(conf);
        job.waitForCompletion();
    } catch (Exception e) {
        System.err.println(e);
    }

    //JOB 2//
    System.out.println("Starting Job 2 (Multi-user approach)...");
    JobClient client2 = new JobClient();
    JobConf conf2 = new JobConf(gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.class);

    conf2.setJobName("Multi-user approach - Job 2");

    conf2.setMapOutputKeyClass(Text.class); //user pair (member_nonMember), where nonMember is in friends
    conf2.setMapOutputValueClass(ByteWritable.class);//similarity part unsquared

    conf2.setOutputKeyClass(Text.class); //user pair (member_nonMember), where nonMember is in friends
    conf2.setOutputValueClass(DoubleWritable.class);//similarity

    conf2.setInputFormat(SequenceFileInputFormat.class);
    //conf2.setInputFormat(TextInputFormat.class);
    conf2.setOutputFormat(TextOutputFormat.class);
    //conf2.setOutputFormat(SequenceFileOutputFormat.class);
    //SequenceFileOutputFormat.setOutputCompressionType(conf2, SequenceFile.CompressionType.BLOCK);

    FileInputFormat.setInputPaths(conf2, partialDistancesPath); //Job 1 output
    FileOutputFormat.setOutputPath(conf2, userSimilaritiesPath); //Job 2 output (similarity of each group member to his friends)

    conf2.setMapperClass(IdentityMapper.class);
    conf2.setReducerClass(gr.forth.ics.isl.grouprecsmr.job2.Job2ReducerMulti.class);

    int numSimilaritiesPartitions = numReduceTasks;
    conf2.setNumReduceTasks(numSimilaritiesPartitions);

    conf2.setFloat("friendsSimThreshold", friendsSimThresh);

    conf2.setInt("mapred.task.timeout", 6000000);
    conf2.set("io.sort.mb", "500");

    client2.setConf(conf2);
    RunningJob job2;
    try {
        job2 = JobClient.runJob(conf2);
        job2.waitForCompletion();
    } catch (Exception e) {
        System.err.println(e);
    }

    //JOB 3//
    System.out.println("Starting Job 3 (Multi-user approach)...");
    JobClient client3 = new JobClient();
    JobConf conf3 = new JobConf(gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.class);

    conf3.setJobName("Multi-user approach - Job 3");

    conf3.setMapOutputKeyClass(VIntWritable.class);
    conf3.setMapOutputValueClass(Text.class);

    conf3.setOutputKeyClass(VIntWritable.class);
    conf3.setOutputValueClass(DoubleWritable.class);

    conf3.setInputFormat(SequenceFileInputFormat.class);
    //conf3.setInputFormat(TextInputFormat.class);
    conf3.setOutputFormat(TextOutputFormat.class);
    //conf3.setOutputFormat(SequenceFileOutputFormat.class);
    //SequenceFileOutputFormat.setOutputCompressionType(conf3,SequenceFile.CompressionType.BLOCK);

    try {
        DistributedCache.addCacheFile(new URI(groupFilePath), conf3);
    } catch (URISyntaxException ex) {
        System.err.println("Could not add group file to distributed cache. " + ex);
    }
    for (int i = 0; i < numSimilaritiesPartitions; i++) {
        String reduceId = String.format("%05d", i); //5-digit int with leading
        try {
            DistributedCache.addCacheFile(new URI(userSimilaritiesPath.toString() + "/part-" + reduceId),
                    conf3);
        } catch (URISyntaxException ex) {
            System.err.println("Could not add similarities files to distributed cache. " + ex);
        }

    }

    FileInputFormat.setInputPaths(conf3, candidateMoviesPath); //Job 1 output (candidate movies)
    FileOutputFormat.setOutputPath(conf3, finalScoresPath); //Job 3 output (movie \t outputScore)

    //        conf3.setMapperClass(IdentityMapper.class);      
    conf3.setMapperClass(gr.forth.ics.isl.grouprecsmr.job3.Job3MapperMulti.class); //filtering out ratings from non-Friends
    conf3.setReducerClass(gr.forth.ics.isl.grouprecsmr.job3.Job3ReducerMulti.class);

    conf3.setInt("mapred.task.timeout", 6000000);
    conf3.set("io.sort.mb", "500");

    conf3.setNumReduceTasks(numReduceTasks);

    client3.setConf(conf3);
    RunningJob job3;
    try {
        job3 = JobClient.runJob(conf3);
        job3.waitForCompletion();
    } catch (Exception e) {
        System.err.println(e);
    }
}

From source file:infinidb.hadoop.example.InfiniDBOutputDriver.java

License:Apache License

public int run(String[] args) throws Exception {
    Configuration conf = new Configuration();
    JobConf jobconf = new JobConf(conf, InfiniDoopDriver.class);
    DBConfiguration.configureDB(jobconf, "com.mysql.jdbc.Driver", "jdbc:mysql://srvswint4/tpch1", "root", "");
    String[] fields = { "n_nationkey", "n_name" };
    String[] outFields = { "id", "name" };
    jobconf.setInputFormat(IDBFileInputFormat.class);
    jobconf.setOutputFormat(InfiniDBOutputFormat.class);
    jobconf.setOutputKeyClass(NullWritable.class);
    jobconf.setOutputValueClass(Text.class);
    InfiniDBOutputFormat.setOutput(jobconf, "db", outFields);
    InfiniDBConfiguration idbconf = new InfiniDBConfiguration(jobconf);
    idbconf.setInputPath("input");
    idbconf.setOutputPath("output");
    idbconf.setInfiniDBHome("/usr/local/Calpont");

    jobconf.setMapperClass(InfiniDoopMapper.class);
    jobconf.setNumMapTasks(1);//  w w w .j  ava  2s.  co  m
    jobconf.setNumReduceTasks(2);
    JobClient client = new JobClient();
    client.setConf(jobconf);
    try {
        JobClient.runJob(jobconf);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return 0;
}

From source file:infinidb.hadoop.example.InfiniDoopDriver.java

License:Apache License

public int run(String[] args) throws Exception {
    Configuration conf = new Configuration();
    JobConf jobconf = new JobConf(conf, InfiniDoopDriver.class);
    DBConfiguration.configureDB(jobconf, "com.mysql.jdbc.Driver", "jdbc:mysql://srvswint4/tpch1", "root", "");
    String[] fields = { "n_nationkey", "n_name" };
    jobconf.setInputFormat(InfiniDBInputFormat.class);

    jobconf.setOutputKeyClass(LongWritable.class);
    jobconf.setOutputValueClass(Text.class);

    InfiniDBInputFormat.setInput(jobconf, InfiniDoopRecord.class, "nation", null, "n_nationkey", fields);

    InfiniDBConfiguration idbconf = new InfiniDBConfiguration(jobconf);
    idbconf.setOutputPath("output2");
    jobconf.setMapperClass(InfiniDoopInputMapper.class);
    jobconf.setNumMapTasks(4);//from  w  w w  .  j a  v a  2s .  co  m
    jobconf.setNumReduceTasks(1);
    jobconf.set("mapred.textoutputformat.separator", "|");
    JobClient client = new JobClient();

    client.setConf(jobconf);
    try {
        JobClient.runJob(jobconf);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return 0;
}