Example usage for org.apache.hadoop.mapreduce.lib.chain ChainMapper addMapper

List of usage examples for org.apache.hadoop.mapreduce.lib.chain ChainMapper addMapper

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.chain ChainMapper addMapper.

Prototype

public static void addMapper(Job job, Class<? extends Mapper> klass, Class<?> inputKeyClass,
        Class<?> inputValueClass, Class<?> outputKeyClass, Class<?> outputValueClass, Configuration mapperConf)
        throws IOException 

Source Link

Document

Adds a Mapper class to the chain mapper.

Usage

From source file:MapReduce3.java

public static void main(String[] args) throws Exception {

    ///*  ww w  .  jav a2s.co m*/
    String dst = "hdfs://localhost:9000/data/2006a.csv";

    //??
    //  String dstOut = "hdfs://localhost:9000/mapreduce/result3/1";
    String dstOut = "/Users/wendyzhuo/NetBeansProjects/final_Hadoop/src/output3/1";
    String outFiles = "/Users/wendyzhuo/NetBeansProjects/final_Hadoop/src/output3/2";
    Configuration hadoopConfig = new Configuration();

    hadoopConfig.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());

    hadoopConfig.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

    Job job = new Job(hadoopConfig);
    Job job2 = new Job(hadoopConfig);

    FileInputFormat.addInputPath(job, new Path(dst));
    FileOutputFormat.setOutputPath(job, new Path(dstOut));
    FileInputFormat.addInputPath(job2, new Path(dstOut));
    FileOutputFormat.setOutputPath(job2, new Path(outFiles));

    JobConf map1Conf = new JobConf(false);
    ChainMapper.addMapper(job, TempMapper.class, LongWritable.class, Text.class, CompositeKey_wd.class,
            IntWritable.class, map1Conf);
    JobConf reduceConf = new JobConf(false);
    ChainReducer.setReducer(job, TempReducer.class, CompositeKey_wd.class, IntWritable.class,
            CompositeKey_wd.class, IntWritable.class, reduceConf);

    JobConf map2Conf = new JobConf(false);
    ChainMapper.addMapper(job2, TempMapper2.class, LongWritable.class, Text.class, IntWritable.class,
            CompositeKey_wd.class, map2Conf);
    JobConf map3Conf = new JobConf(false);
    ChainReducer.setReducer(job2, TempReduce2.class, IntWritable.class, CompositeKey_wd.class, Text.class,
            IntWritable.class, map3Conf);
    //       
    //  JobClient.runJob(job);

    //MapperReducer?
    //        job.setMapperClass(TempMapper.class);
    //
    //        job.setReducerClass(TempReducer.class);

    //?KeyValue
    job.setOutputKeyClass(CompositeKey_wd.class);

    job.setOutputValueClass(IntWritable.class);

    job2.setMapOutputKeyClass(IntWritable.class);
    job2.setMapOutputValueClass(CompositeKey_wd.class);

    //  job2.setSortComparatorClass(LongWritable.DecreasingComparator.class);

    //job?
    job.waitForCompletion(true);
    System.out.println("Finished1");
    job2.waitForCompletion(true);
    System.out.println("Finished2");

}

From source file:Top20AZRestaurants.java

@Override
public int run(String[] args) throws Exception {
    Job job1 = new Job(getConf());
    job1.setSortComparatorClass(MyDecreasingDoubleComparator.class);
    job1.setJobName("Top20 AZ Restaurants ChainJob");
    job1.setJarByClass(Top20AZRestaurants.class);

    JobConf map1Conf = new JobConf(false);
    ChainMapper.addMapper(job1, Top20Mapper.class, LongWritable.class, Text.class, Text.class, Text.class,
            map1Conf);/*from   ww  w.  j a v  a 2  s.  c  o m*/
    JobConf map2Conf = new JobConf(false);
    ChainMapper.addMapper(job1, Top20MapperRedo.class, Text.class, Text.class, DoubleWritable.class, Text.class,
            map2Conf);
    JobConf reduceConf = new JobConf(false);
    ChainReducer.setReducer(job1, Top20ReducerRedo.class, DoubleWritable.class, Text.class, Text.class,
            DoubleWritable.class, reduceConf);
    FileInputFormat.setInputPaths(job1, new Path(args[0]));
    FileOutputFormat.setOutputPath(job1, new Path(args[1]));

    boolean success = job1.waitForCompletion(true);
    return success ? 0 : 1;
}

From source file:cienciaCelularMR.Main.java

@Override
public int run(String[] args) throws Exception {

    for (int i = 0; i < args.length; i++) {
        System.out.println("Hadoop - arg[" + i + "] es: " + args[i]);
    }/* ww  w .  ja  v  a  2 s. com*/
    //Configuracin de memoria de YARN
    Configuration conf = new Configuration();
    conf.set("mapreduce.map.memory.mb", "1400");
    conf.set("mapreduce.reduce.memory.mb", "2800");
    conf.set("mapreduce.map.java.opts", "-Xmx1120m");
    conf.set("mapreduce.reduce.java.opts", "-Xmx2240m");
    conf.set("yarn.app.mapreduce.am.resource.mb", "2800");
    conf.set("yarn.app.mapreduce.am.command-opts", "-Xmx2240m");
    conf.set("yarn.nodemanager.resource.memory-mb", "5040");
    conf.set("yarn.scheduler.minimum-allocation-mb", "1400");
    conf.set("yarn.scheduler.maximum-allocation-mb", "5040");
    conf.set("mapreduce.task.timeout", "18000000");//5 horas

    //Creacin del Job
    Job job = Job.getInstance(conf);
    job.setInputFormatClass(WholeFileInputFormat.class);
    FileInputFormat.setInputPaths(job, new Path(args[5]));
    FileOutputFormat.setOutputPath(job, new Path(args[6]));

    //Salidas alternativas de Mapper para brindar informacin
    MultipleOutputs.addNamedOutput(job, "controloutput", TextOutputFormat.class, KeyMcell.class, Text.class);
    MultipleOutputs.addNamedOutput(job, "errormcell", TextOutputFormat.class, KeyMcell.class, Text.class);

    //Archivos copiados a cache de los nodos
    job.addCacheFile(new Path("wasb:///mcell.exe").toUri());
    job.addCacheFile(new Path("wasb:///fernet.exe").toUri());
    job.addCacheFile(new Path("wasb:///fernet.cfg").toUri());
    job.addCacheFile(new Path("wasb:///libconfig_d.dll").toUri());
    job.addCacheFile(new Path("wasb:///libtiff3.dll").toUri());
    job.addCacheFile(new Path("wasb:///jpeg62.dll").toUri());
    job.addCacheFile(new Path("wasb:///zlib1.dll").toUri());
    job.addCacheFile(new Path("wasb:///msvcr100d.dll").toUri());

    job.setJarByClass(Main.class);

    Configuration mapAConf = new Configuration(false);
    ChainMapper.addMapper(job, McellMapper.class, KeyMcell.class, BytesWritable.class, KeyMcell.class,
            Text.class, mapAConf);

    Configuration mapBConf = new Configuration(false);
    ChainMapper.addMapper(job, FernetMapper.class, KeyMcell.class, Text.class, KeyMcell.class,
            FernetOutput.class, mapBConf);

    job.setReducerClass(ResultReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(BytesWritable.class);

    job.submit();
    return 0;
}

From source file:com.jbw.jobcontrol.Patent.java

@Override
public int run(String[] strings) throws Exception {
    Configuration conf = getConf();
    Job job1 = Job.getInstance(conf);/*from   w w w .j  a  va  2 s .c  o  m*/
    job1.setJobName("test");
    job1.setJarByClass(Patent.class);

    ChainMapper.addMapper(job1, InverseMapper.class, LongWritable.class, Text.class, Text.class, Text.class,
            conf);
    ChainMapper.addMapper(job1, CountMapper.class, Text.class, Text.class, Text.class, IntWritable.class, conf);

    job1.setReducerClass(IntSumReducer.class);

    Job job2 = Job.getInstance();
    ControlledJob cjob1 = new ControlledJob(job1.getConfiguration());
    ControlledJob cjob2 = new ControlledJob(job2.getConfiguration());
    cjob2.addDependingJob(cjob1);
    JobControl jc = new JobControl("process job");
    jc.addJob(cjob1);
    jc.addJob(cjob2);
    Thread t = new Thread(jc);
    t.start();
    while (true) {
        for (ControlledJob j : jc.getRunningJobList()) {
            break;
        }
        break;
    }
    return 0;
}

From source file:com.jhkt.playgroundArena.hadoop.tasks.jobs.ChainJob.java

License:Apache License

@Override
public int run(String[] args) throws Exception {

    Configuration conf = getConf();
    Job job = new Job(conf, ChainJob.class.getSimpleName());
    job.setJobName("Sample Chain Job");
    job.setJarByClass(ChainJob.class);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    Path in = new Path(args[0]);
    Path out = new Path(args[1]);

    FileInputFormat.setInputPaths(job, in);
    FileOutputFormat.setOutputPath(job, out);

    ChainMapper.addMapper(job, ReverseMapper.class, Text.class, Text.class, Text.class, Text.class,
            new Configuration(false));
    ChainMapper.addMapper(job, AverageMapper.class, Text.class, Text.class, Text.class, AverageWritable.class,
            new Configuration(false));
    ChainReducer.setReducer(job, AverageReducer.class, Text.class, AverageWritable.class, Text.class,
            DoubleWritable.class, new Configuration(false));

    System.exit(job.waitForCompletion(true) ? 0 : 1);

    return 0;/*from   w  ww.j  a  v a2 s .co  m*/
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.MeanJob.java

License:Apache License

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("mean job");
    try {/*  w  ww. j a  v  a2s .co  m*/
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(MeanJob.class);
    log.info("Mean Job initialized");
    log.warn("Mean job: Processing...Do not terminate/close");
    log.debug("Mean job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, MeanMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    job.getConfiguration().set("fields.spec", "" + column);
    job.getConfiguration().setInt("n", n);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Mean Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Mean job: Mapping process completed");

    log.debug("Mean job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    try {
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Mean Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Mean job: Reducing process completed");
    log.info("Mean Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaJob.java

License:Apache License

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {/*from  ww  w.j  a  v a  2 s  . c  o  m*/
        job = new Job();
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    job.setJarByClass(SigmaJob.class);
    log.info("Sigma Job initialized");
    log.warn("Sigma job: Processing...Do not terminate/close");
    log.debug("Sigma job: Mapping process started");
    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", "" + column);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e2) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e2.toString());
        for (StackTraceElement s : e2.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Sigma Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setMapOutputValueClass(DoubleWritable.class);
    job.setMapOutputKeyClass(Text.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Sigma job: Mapping process completed");

    log.debug("Sigma job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    try {
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Sigma Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Sigma job: Reducing process completed");
    log.info("Sigma Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaSqJob.java

License:Apache License

public Double call() throws NectarException {
    // TODO Auto-generated method stub
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {/*from   www  .  j ava  2 s . com*/
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SigmaSqJob.class);
    log.info("Sigma square Job initialized");
    log.warn("Sigma square job: Processing...Do not terminate/close");
    log.debug("Sigma square job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaSqMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", "" + column);
    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Sigma square Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Sigma square job: Mapping process completed");

    log.debug("Sigma square job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    try {
        fs = FileSystem.get(job.getConfiguration());
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Sigma square Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Sigma square job: Reducing process completed");
    log.info("Sigma square Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaXYJob.java

License:Apache License

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {// www . j  a  va2  s .c o m
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SigmaXYJob.class);
    log.info("SigmaXY Job initialized");
    log.warn("SigmaXY job: Processing...Do not terminate/close");
    log.debug("SigmaXY job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaXYMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", x + "," + y);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("SigmaXY Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("SigmaXY job: Mapping process completed");

    log.debug("SigmaXY job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    FileSystem fs;
    try {
        fs = FileSystem.get(job.getConfiguration());
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("SigmaXY Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("SigmaXY job: Reducing process completed");
    log.info("SigmaXY Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SortJob.java

License:Apache License

public Double[] call() throws NectarException {
    // TODO Auto-generated method stub
    JobControl jobControl = new JobControl("Sortjob");
    try {//from  w  w  w . j a v a 2  s . c o m
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SortJob.class);
    log.info("Sorting Job initialized");
    log.warn("Sorting job: Processing...Do not terminate/close");
    log.debug("Sorting job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SortMapper.class, NullWritable.class, Text.class, DoubleWritable.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", "" + column);
    job.setReducerClass(Reducer.class);

    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e2) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e2.toString());
        for (StackTraceElement s : e2.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Sorting Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setMapOutputValueClass(DoubleWritable.class);
    job.setMapOutputKeyClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Sorting job: Mapping process completed");

    log.debug("Sorting job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    try {
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine;
        while ((valueLine = bufferedReader.readLine()) != null) {
            String[] fields = valueLine.split("\t");
            value.add(Double.parseDouble(fields[1]));
        }
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Sorting Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Sorting job: Reducing process completed");
    log.info("Sorting Job completed\n");
    return value.toArray(new Double[value.size()]);
}