Example usage for org.apache.hadoop.mapred JobConf setCompressMapOutput

List of usage examples for org.apache.hadoop.mapred JobConf setCompressMapOutput

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf setCompressMapOutput.

Prototype

public void setCompressMapOutput(boolean compress) 

Source Link

Document

Should the map outputs be compressed before transfer?

Usage

From source file:org.sf.xrime.algorithms.transform.vertex.AdjSetWithLabel2AdjSetVertexTransformer.java

License:Apache License

/**
       * Continue to transform the outgoing adjacent vertexes list to
       * undirected ones, and set appropriate label each vertex.
       * //from   w w w . j a  v a  2s . c  o  m
       * @author xue
       * @author juwei
       */
/*
 * public static class ReduceClass extends MapReduceBase implements Reducer<Text,
 * AdjSetVertex, Text, AdjSetVertex> {
 * 
 * @Override public void reduce(Text key, Iterator<AdjSetVertex> values,
 * OutputCollector<Text, AdjSetVertex> output, Reporter reporter) throws
 * IOException { // TODO } }
 */

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, AdjSetWithLabel2AdjSetVertexTransformer.class);
    jobConf.setJobName("AdjSetWithLabel2AdjSetVertexTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertex.class);

    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    // no reduce is needed.
    // jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.AdjVertex2VertexSetTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, AdjVertex2VertexSetTransformer.class);
    jobConf.setJobName("AdjVertex2VertexSetTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are vertex sets (Writable)
    jobConf.setOutputValueClass(VertexSet.class);
    jobConf.setMapperClass(MapClass.class);
    jobConf.setCombinerClass(ReduceClass.class);
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);//w w w . j ava2s . c  o  m
    jobConf.setNumReduceTasks(reducerNum);
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.InAdjVertex2AdjBiSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, InAdjVertex2AdjBiSetVertexTransformer.class);
    jobConf.setJobName("InAdjVertex2AdjBiSetVertexTransformer");

    // the keys are author names (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes (Writable)
    jobConf.setOutputValueClass(AdjBiSetVertex.class);
    jobConf.setMapperClass(MapClass.class);
    // No combiner is permitted.
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);/*from www. j av a 2 s.co  m*/
    jobConf.setNumReduceTasks(reducerNum);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    jobConf.setCompressMapOutput(true);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.LabeledSetWithLabel2AdjSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, LabeledSetWithLabel2AdjSetVertexTransformer.class);
    jobConf.setJobName("LabeledSetWithLabel2AdjSetVertexTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertex.class);

    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    // no reduce is needed.
    // jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);/*w ww  .  j a va2s .c  om*/
    jobConf.setNumReduceTasks(0);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2AdjBiSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, OutAdjVertex2AdjBiSetVertexTransformer.class);
    jobConf.setJobName("OutAdjVertex2AdjBiSetVertexTransformer");

    // the keys are author names (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes (Writable)
    jobConf.setOutputValueClass(AdjBiSetVertex.class);
    jobConf.setMapperClass(MapClass.class);
    // No combiner is permitted.
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);/*from w  ww.  j  a  v a 2 s . c  om*/
    jobConf.setNumReduceTasks(reducerNum);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    jobConf.setCompressMapOutput(true);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2AdjSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, OutAdjVertex2AdjSetVertexTransformer.class);
    jobConf.setJobName("OutAdjVertex2AdjSetVertexTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertex.class);
    jobConf.setMapperClass(MapClass.class);
    // no combiner is needed.
    jobConf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);
    jobConf.setNumMapTasks(mapperNum);/*from w w  w. j a  v  a  2s  .c o m*/
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2LabeledSetWithLabelTransformer.java

License:Apache License

@SuppressWarnings("deprecation")
@Override/*from   ww  w . j a va  2  s . c  om*/
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2LabeledSetWithLabelTransformer.class);
    jobConf.setJobName("OutAdjVertex2LabeledSetWithLabelTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(LabeledAdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setCombinerClass(ReduceClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
        System.out.println("Output Node Num. =" + this.runningJob.getCounters().getCounter(Counter.VertexNum));
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2SetWithLabelTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2SetWithLabelTransformer.class);
    jobConf.setJobName("OutAdjVertex2AdjSetVertexWithLabelTransformer");

    // the keys are vertex identifiers (strings)
    jobConf.setOutputKeyClass(Text.class);
    // the values are adjacent vertexes with labels (Writable)
    jobConf.setOutputValueClass(AdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setCombinerClass(ReduceClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);//from   ww w.  j a v a2 s  .  com
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2StrongLabeledSWLTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2StrongLabeledSWLTransformer.class);
    jobConf.setJobName("OutAdjVertex2StrongLabeledSWLTransformer");

    jobConf.setMapOutputKeyClass(Text.class);
    jobConf.setMapOutputValueClass(LabeledAdjSetVertex.class);

    jobConf.setOutputKeyClass(Text.class);
    jobConf.setOutputValueClass(LabeledAdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);/*w  ww.j av  a2 s . c o  m*/
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2StrongSetWithLabelTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2StrongSetWithLabelTransformer.class);
    jobConf.setJobName("OutAdjVertex2StrongSetWithLabelTransformer");

    jobConf.setMapOutputKeyClass(Text.class);
    jobConf.setMapOutputValueClass(LabeledAdjSetVertex.class);

    jobConf.setOutputKeyClass(Text.class);
    jobConf.setOutputValueClass(AdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);//  w w w . j  av a2s  .c  om
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}