Example usage for org.apache.hadoop.mapred JobConf setNumMapTasks

List of usage examples for org.apache.hadoop.mapred JobConf setNumMapTasks

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf setNumMapTasks.

Prototype

public void setNumMapTasks(int n) 

Source Link

Document

Set the number of map tasks for this job.

Usage

From source file:org.sf.xrime.algorithms.statistics.ViewAdjSetVertexWithLabelDegree.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, ViewAdjSetVertexWithLabelDegree.class);
    jobConf.setJobName("ViewAdjSetVertexWithLabelDegree");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(Text.class);
    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    // jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    // Enable compression.
    jobConf.setCompressMapOutput(true);/* www  .  j  a v a2  s .  co m*/
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.statistics.ViewLabeledSetWithLabelDegree.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, ViewLabeledSetWithLabelDegree.class);
    jobConf.setJobName("ViewLabeledSetWithLabelDegree");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(Text.class);
    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    // jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    // Enable compression.
    jobConf.setCompressMapOutput(true);/*from  w  w w  .java 2s  .c  om*/
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(1);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.AdjSetWithLabel2AdjSetVertexTransformer.java

License:Apache License

/**
       * Continue to transform the outgoing adjacent vertexes list to
       * undirected ones, and set appropriate label each vertex.
       * //w w  w  .  ja  v  a 2s  .c o  m
       * @author xue
       * @author juwei
       */
/*
 * public static class ReduceClass extends MapReduceBase implements Reducer<Text,
 * AdjSetVertex, Text, AdjSetVertex> {
 * 
 * @Override public void reduce(Text key, Iterator<AdjSetVertex> values,
 * OutputCollector<Text, AdjSetVertex> output, Reporter reporter) throws
 * IOException { // TODO } }
 */

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, AdjSetWithLabel2AdjSetVertexTransformer.class);
    jobConf.setJobName("AdjSetWithLabel2AdjSetVertexTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertex.class);

    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    // no reduce is needed.
    // jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.AdjVertex2VertexSetTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, AdjVertex2VertexSetTransformer.class);
    jobConf.setJobName("AdjVertex2VertexSetTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are vertex sets (Writable)
    jobConf.setOutputValueClass(VertexSet.class);
    jobConf.setMapperClass(MapClass.class);
    jobConf.setCombinerClass(ReduceClass.class);
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);
    jobConf.setCompressMapOutput(true);// w  w w.  j av a  2s.  com
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.InAdjVertex2AdjBiSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, InAdjVertex2AdjBiSetVertexTransformer.class);
    jobConf.setJobName("InAdjVertex2AdjBiSetVertexTransformer");

    // the keys are author names (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes (Writable)
    jobConf.setOutputValueClass(AdjBiSetVertex.class);
    jobConf.setMapperClass(MapClass.class);
    // No combiner is permitted.
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    jobConf.setCompressMapOutput(true);/*from  w w  w.  ja  va 2  s  .com*/

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.LabeledSetWithLabel2AdjSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, LabeledSetWithLabel2AdjSetVertexTransformer.class);
    jobConf.setJobName("LabeledSetWithLabel2AdjSetVertexTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertex.class);

    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    // no reduce is needed.
    // jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);/*from   w  w  w .  ja  v a2s .  co m*/
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(0);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2AdjBiSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, OutAdjVertex2AdjBiSetVertexTransformer.class);
    jobConf.setJobName("OutAdjVertex2AdjBiSetVertexTransformer");

    // the keys are author names (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes (Writable)
    jobConf.setOutputValueClass(AdjBiSetVertex.class);
    jobConf.setMapperClass(MapClass.class);
    // No combiner is permitted.
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    jobConf.setCompressMapOutput(true);/* ww w .  j av  a  2  s  .  c  om*/

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2AdjSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, OutAdjVertex2AdjSetVertexTransformer.class);
    jobConf.setJobName("OutAdjVertex2AdjSetVertexTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertex.class);
    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    // Enable compression.
    jobConf.setCompressMapOutput(true);//  w w  w.  j av a2  s  .  c o  m
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2LabeledSetWithLabelTransformer.java

License:Apache License

@SuppressWarnings("deprecation")
@Override/*from  ww  w . ja v  a 2s.  c om*/
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2LabeledSetWithLabelTransformer.class);
    jobConf.setJobName("OutAdjVertex2LabeledSetWithLabelTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(LabeledAdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setCombinerClass(ReduceClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
        System.out.println("Output Node Num. =" + this.runningJob.getCounters().getCounter(Counter.VertexNum));
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2SetWithLabelTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2SetWithLabelTransformer.class);
    jobConf.setJobName("OutAdjVertex2AdjSetVertexWithLabelTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setCombinerClass(ReduceClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);//from   w  w w  .  j  a va  2  s . c  o  m
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}