List of usage examples for org.apache.hadoop.mapred JobConf setNumMapTasks
public void setNumMapTasks(int n)
From source file:org.sf.xrime.algorithms.partitions.connected.strongly.ForwardTrimPartA.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, ForwardTrimPartA.class); conf.setJobName("ForwardTrimPartA"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are vertexes (Writable) conf.setOutputValueClass(LabeledAdjBiSetVertex.class); conf.setMapperClass(MapClass.class); // No combiners, no reducers. // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {/*from ww w . ja v a 2 s .c o m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(0); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.strongly.ForwardTrimPartB.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, ForwardTrimPartB.class); conf.setJobName("ForwardTrimPartB"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are vertexes (Writable) conf.setOutputValueClass(LabeledAdjBiSetVertex.class); conf.setMapperClass(MapClass.class); // No combiner is permitted. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {/*from ww w. j a va 2s .c o m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); conf.setCompressMapOutput(true); conf.setMapOutputCompressorClass(GzipCodec.class); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.strongly.LabelPropagation.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, LabelPropagation.class); conf.setJobName("LabelPropagation"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are adjacent vertexes with labels (Writable) conf.setOutputValueClass(LabeledAdjBiSetVertex.class); conf.setMapperClass(MapClass.class); conf.setCombinerClass(ReduceClass.class); conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {/*from www .ja v a 2s . c o m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); conf.setCompressMapOutput(true); conf.setMapOutputCompressorClass(GzipCodec.class); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.strongly.PivotChoose.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, PivotChoose.class); conf.setJobName("PivotChoose"); // This is necessary. conf.setMapOutputKeyClass(Text.class); conf.setMapOutputValueClass(LabeledAdjBiSetVertex.class); // the keys are a pseudo one. conf.setOutputKeyClass(Text.class); // the values are chosen vertex id. conf.setOutputValueClass(Text.class); conf.setMapperClass(MapClass.class); // Since k2,v2 is different from k3,v3. No combiner is permitted. conf.setReducerClass(ReduceClass.class); // The format of input data is generated with WritableSerialization. conf.setInputFormat(SequenceFileInputFormat.class); try {/*from w w w . j a va2s .co m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); // Only one reducer is permitted, or the largest value will be wrong. conf.setNumReduceTasks(1); conf.setCompressMapOutput(true); conf.setMapOutputCompressorClass(GzipCodec.class); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.strongly.PropagationConvergenceTest.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, PropagationConvergenceTest.class); conf.setJobName("PropagationConvergenceTest"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are integers (Writable) conf.setOutputValueClass(IntWritable.class); conf.setMapperClass(MapClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); try {/* www. jav a 2s .co m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(0); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.weakly.alg_1.AdjVertex4Component.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, AdjVertex4Component.class); conf.setJobName("AdjVertexesList4Component"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are vertex sets (Writable) conf.setOutputValueClass(AdjVertex.class); conf.setMapperClass(MapClass.class); // No combiner is permitted. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {/*ww w . jav a2 s . c om*/ // The first two directories are original adjacent vertexes lists and partitions of the // graph (output of VertexSetExpand and VertexSetMinorExpand). Path[] input_paths = new Path[2]; FileInputFormat.setInputPaths(conf, getSource().getPaths().toArray(input_paths)); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } // Task numbers. conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.weakly.alg_1.ExtractPartitions.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, ExtractPartitions.class); conf.setJobName("ExtractPartitions"); // the keys are vertex labels (strings) conf.setOutputKeyClass(Text.class); // the values are vertex sets (Writable) conf.setOutputValueClass(Text.class); conf.setMapperClass(MapClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); try {/* www . j ava 2 s . c o m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(0); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.weakly.alg_1.VertexSetExpand.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, VertexSetExpand.class); conf.setJobName("VertexSetExpand"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are ultimate labels of vertexes (Writable) conf.setOutputValueClass(VertexSet.class); conf.setMapperClass(MapClass.class); // No combiner is permitted. Or we may get wrong with labeling. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); // Two input pathes (link to lower and upper layers), one output pathes. try {/*from w w w. j av a2 s .c o m*/ Path[] input_paths = new Path[2]; FileInputFormat.setInputPaths(conf, getSource().getPaths().toArray(input_paths)); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.weakly.alg_1.VertexSetJoin.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, VertexSetJoin.class); conf.setJobName("VertexSetJoin"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are vertex sets (Writable) conf.setOutputValueClass(VertexSet.class); conf.setMapperClass(MapClass.class); // Since this is a join operation, combiner is not permitted here. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {//w w w.j av a 2s . co m FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.weakly.alg_1.VertexSetMinorExpand.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, VertexSetMinorExpand.class); conf.setJobName("VertexSetMinorExpand"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are ultimate vertex set identifiers/label (Writable) conf.setOutputValueClass(VertexSet.class); conf.setMapperClass(MapClass.class); conf.setCombinerClass(ReduceClass.class); conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {//from w w w . jav a2 s . c om FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }