Example usage for org.apache.hadoop.mapred JobConf setMapOutputKeyClass

List of usage examples for org.apache.hadoop.mapred JobConf setMapOutputKeyClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf setMapOutputKeyClass.

Prototype

public void setMapOutputKeyClass(Class<?> theClass) 

Source Link

Document

Set the key class for the map output data.

Usage

From source file:org.sf.xrime.algorithms.partitions.connected.bi.Tree2EdgeSet.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, Tree2EdgeSet.class);
    conf.setJobName("Tree2EdgeSet");

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(PathAsVertexesList.class);
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(EdgeSet.class);
    conf.setMapperClass(MapClass.class);
    // No combiner is permitted, since the logic of reducer depends on the completeness
    // of information.
    conf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    try {//  w ww  .jav  a  2 s.c  o m
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(getReducerNum());
    conf.setCompressMapOutput(true);
    conf.setMapOutputCompressorClass(GzipCodec.class);

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.partitions.connected.strongly.PivotChoose.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, PivotChoose.class);
    conf.setJobName("PivotChoose");

    // This is necessary.
    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(LabeledAdjBiSetVertex.class);
    // the keys are a pseudo one.
    conf.setOutputKeyClass(Text.class);
    // the values are chosen vertex id.
    conf.setOutputValueClass(Text.class);
    conf.setMapperClass(MapClass.class);
    // Since k2,v2 is different from k3,v3. No combiner is permitted.
    conf.setReducerClass(ReduceClass.class);
    // The format of input data is generated with WritableSerialization.
    conf.setInputFormat(SequenceFileInputFormat.class);
    try {//from   w  w w .ja v  a2  s  . c o  m
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    conf.setNumMapTasks(getMapperNum());
    // Only one reducer is permitted, or the largest value will be wrong.
    conf.setNumReduceTasks(1);
    conf.setCompressMapOutput(true);
    conf.setMapOutputCompressorClass(GzipCodec.class);

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.AdjVertex2AdjSetVertexTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, Vertex2LabeledTransformer.class);
    jobConf.setJobName("Vertex2Labelled");

    jobConf.setMapperClass(AdjVertex2AdjSetVertexMapper.class);
    jobConf.setMapOutputKeyClass(Text.class);
    jobConf.setMapOutputValueClass(ObjectWritable.class);
    jobConf.setNumReduceTasks(reducerNum);
    jobConf.setReducerClass(AdjVertex2AdjSetVertexReducer.class);
    jobConf.setOutputKeyClass(Text.class);
    jobConf.setOutputValueClass(AdjSetVertex.class);
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    jobConf.setClass(edgeFilterKey, edgeFilter, EdgeFilter.class);
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    try {/*from   w  ww.ja  v  a  2s  .c o  m*/
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2StrongLabeledSWLTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2StrongLabeledSWLTransformer.class);
    jobConf.setJobName("OutAdjVertex2StrongLabeledSWLTransformer");

    jobConf.setMapOutputKeyClass(Text.class);
    jobConf.setMapOutputValueClass(LabeledAdjSetVertex.class);

    jobConf.setOutputKeyClass(Text.class);
    jobConf.setOutputValueClass(LabeledAdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);// ww  w. j ava  2s .  c  o  m
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.OutAdjVertex2StrongSetWithLabelTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    // Create a JobConf with default settings.
    JobConf jobConf = new JobConf(conf, OutAdjVertex2StrongSetWithLabelTransformer.class);
    jobConf.setJobName("OutAdjVertex2StrongSetWithLabelTransformer");

    jobConf.setMapOutputKeyClass(Text.class);
    jobConf.setMapOutputValueClass(LabeledAdjSetVertex.class);

    jobConf.setOutputKeyClass(Text.class);
    jobConf.setOutputValueClass(AdjSetVertexWithTwoHopLabel.class);

    jobConf.setMapperClass(MapClass.class);
    jobConf.setReducerClass(ReduceClass.class);

    // makes the file format suitable for machine processing.
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    // Enable compression.
    jobConf.setCompressMapOutput(true);//from w w  w. j  a  v  a2 s  .c  o m
    jobConf.setMapOutputCompressorClass(GzipCodec.class);

    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    jobConf.setNumMapTasks(mapperNum);
    jobConf.setNumReduceTasks(reducerNum);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.transform.vertex.Vertex2LabeledTransformer.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf jobConf = new JobConf(conf, Vertex2LabeledTransformer.class);
    jobConf.setJobName("Vertex2Labelled");

    jobConf.setMapperClass(Vertex2LabeledMapper.class);
    jobConf.setNumReduceTasks(0);//  w  w  w .j  ava  2 s  . c om
    jobConf.setOutputKeyClass(Text.class);
    if (this.theClass == null) {
        throw new ProcessorExecutionException("Need to specify the output value class.");
    }
    jobConf.setOutputValueClass(this.theClass);
    jobConf.setMapOutputKeyClass(Text.class);
    jobConf.setMapOutputValueClass(Vertex.class);
    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);
    if (theLabelAdderClass != null) {
        jobConf.setClass(labelFactoryKey, theLabelAdderClass, LabelAdder.class);
    }
    FileInputFormat.setInputPaths(jobConf, srcPath);
    FileOutputFormat.setOutputPath(jobConf, destPath);

    try {
        this.runningJob = JobClient.runJob(jobConf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.smartfrog.services.hadoop.benchmark.citerank.CheckConvergence.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    if (args.length != 2) {
        return usage(IN_AND_OUT);
    }//  w w  w.  j a v a  2 s  .  c om

    JobConf conf = createInputOutputConfiguration(args);

    conf.setMapperClass(CheckConvergenceMapper.class);
    conf.setCombinerClass(CheckConvergenceReducer.class);
    conf.setReducerClass(CheckConvergenceReducer.class);

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(DoubleWritable.class);
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(DoubleWritable.class);

    conf.setNumMapTasks(CiteRankTool.NUM_MAP_TASKS);
    conf.setNumReduceTasks(1);

    return runJob(conf);
}

From source file:org.smartfrog.services.hadoop.benchmark.citerank.CheckingData.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    if (args.length != 2) {
        return usage(IN_AND_OUT);
    }//from   w w  w. j a  v  a2 s .co  m

    JobConf conf = createInputOutputConfiguration(args);

    conf.setMapperClass(CheckingDataMapper.class);
    conf.setReducerClass(CheckingDataReducer.class);

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(Text.class);
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    conf.setNumMapTasks(CiteRankTool.NUM_MAP_TASKS);
    conf.setNumReduceTasks(CiteRank.NUM_REDUCE_TASKS);

    return runJob(conf);
}

From source file:org.smartfrog.services.hadoop.benchmark.citerank.CountPages.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    if (args.length != 2) {
        return usage(IN_AND_OUT);
    }//  w w w. j a v a  2  s .  c o  m

    JobConf conf = createInputOutputConfiguration(args);

    conf.setMapperClass(CountPagesMapper.class);
    conf.setCombinerClass(CountPagesReducer.class);
    conf.setReducerClass(CountPagesReducer.class);

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(LongWritable.class);
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(LongWritable.class);

    conf.setNumMapTasks(CiteRankTool.NUM_MAP_TASKS);
    conf.setNumReduceTasks(1);

    return runJob(conf);
}

From source file:org.smartfrog.services.hadoop.benchmark.citerank.DanglingPages.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    if (args.length != 2) {
        return usage(IN_AND_OUT);
    }//  www . j  a  va2 s  .c  om

    JobConf conf = createInputOutputConfiguration(args);

    conf.setMapperClass(DanglingPagesMapper.class);
    conf.setCombinerClass(DanglingPagesReducer.class);
    conf.setReducerClass(DanglingPagesReducer.class);

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(DoubleWritable.class);
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(DoubleWritable.class);

    conf.setNumMapTasks(CiteRankTool.NUM_MAP_TASKS);
    conf.setNumReduceTasks(1);

    return runJob(conf);
}