Example usage for org.apache.hadoop.mapred JobConf setFloat

List of usage examples for org.apache.hadoop.mapred JobConf setFloat

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf setFloat.

Prototype

public void setFloat(String name, float value) 

Source Link

Document

Set the value of the name property to a float.

Usage

From source file:edu.umd.cloud9.pagerank.RunPageRankSchimmy.java

License:Apache License

private void phase2(String path, int i, int j, int n, float missing) throws IOException {
    JobConf conf = new JobConf(RunPageRankBasic.class);

    sLogger.info("missing PageRank mass: " + missing);
    sLogger.info("number of nodes: " + n);

    String in = path + "/iter" + sFormat.format(j) + "t";
    String out = path + "/iter" + sFormat.format(j);

    sLogger.info("PageRankSchimmy: iteration " + j + ": Phase2");
    sLogger.info(" - input: " + in);
    sLogger.info(" - output: " + out);

    int numMapTasks = FileSystem.get(conf).listStatus(new Path(in)).length;
    int numReduceTasks = 0;

    conf.setJobName("PageRankSchimmy:iteration" + j + ":Phase2");
    conf.setInt("mapred.min.split.size", 1024 * 1024 * 1024);

    conf.setNumMapTasks(numMapTasks);//from   w w w . j av  a  2  s  .  c om
    conf.setNumReduceTasks(numReduceTasks);

    FileInputFormat.setInputPaths(conf, new Path(in));
    FileOutputFormat.setOutputPath(conf, new Path(out));

    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);

    conf.setMapOutputKeyClass(IntWritable.class);
    conf.setMapOutputValueClass(PageRankNode.class);

    conf.setOutputKeyClass(IntWritable.class);
    conf.setOutputValueClass(PageRankNode.class);

    conf.setMapperClass(MapPageRankMassDistributionClass.class);
    conf.setCombinerClass(IdentityReducer.class);
    conf.setReducerClass(IdentityReducer.class);

    conf.setFloat("MissingMass", (float) missing);
    conf.setInt("NodeCount", n);

    FileSystem.get(conf).delete(new Path(out), true);

    JobClient.runJob(conf);
}

From source file:edu.umn.cs.spatialHadoop.operations.PyramidPlot.java

License:Apache License

/**
 * Plot a file to a set of images in different zoom levels using a MapReduce
 * program./*  w  ww.  ja v  a 2  s. co  m*/
 * @param <S> type of shapes stored in file
 * @param inFile - Path to the input file(s)
 * @param outFile - Path to the output file (image)
 * @param shape - A sample object to be used for parsing input file
 * @param tileWidth - With of each tile 
 * @param tileHeight - Height of each tile
 * @param vflip - Set to <code>true</code> to file the whole image vertically
 * @param color - Color used to draw single shapes
 * @param numLevels - Number of zoom levels to plot
 * @throws IOException
 */
private static <S extends Shape> RunningJob plotMapReduce(Path inFile, Path outFile, OperationsParams params)
        throws IOException {
    Color color = params.getColor("color", Color.BLACK);

    String hdfDataset = (String) params.get("dataset");
    Shape shape = hdfDataset != null ? new NASARectangle() : params.getShape("shape");
    Shape plotRange = params.getShape("rect");

    boolean background = params.is("background");

    JobConf job = new JobConf(params, PyramidPlot.class);
    job.setJobName("PlotPyramid");

    String partition = job.get("partition", "space").toLowerCase();
    if (partition.equals("space")) {
        job.setMapperClass(SpacePartitionMap.class);
        job.setReducerClass(SpacePartitionReduce.class);
        job.setMapOutputKeyClass(TileIndex.class);
        job.setMapOutputValueClass(shape.getClass());
        job.setInputFormat(ShapeInputFormat.class);
    } else {
        job.setMapperClass(DataPartitionMap.class);
        job.setReducerClass(DataPartitionReduce.class);
        job.setMapOutputKeyClass(TileIndex.class);
        job.setMapOutputValueClass(ImageWritable.class);
        job.setInputFormat(ShapeArrayInputFormat.class);
    }

    job.setInt("color", color.getRGB());
    ClusterStatus clusterStatus = new JobClient(job).getClusterStatus();
    job.setNumMapTasks(clusterStatus.getMaxMapTasks() * 5);
    job.setNumReduceTasks(Math.max(1, clusterStatus.getMaxReduceTasks()));

    if (shape instanceof Point && job.getBoolean("sample", false)) {
        // Enable adaptive sampling
        int imageWidthRoot = job.getInt("tilewidth", 256);
        int imageHeightRoot = job.getInt("tileheight", 256);
        long recordCount = FileMBR.fileMBR(inFile, params).recordCount;
        float sampleRatio = params.getFloat(GeometricPlot.AdaptiveSampleFactor, 1.0f) * imageWidthRoot
                * imageHeightRoot / recordCount;
        job.setFloat(GeometricPlot.AdaptiveSampleRatio, sampleRatio);
    }

    Rectangle fileMBR;
    if (hdfDataset != null) {
        // Input is HDF
        job.set(HDFRecordReader.DatasetName, hdfDataset);
        job.setBoolean(HDFRecordReader.SkipFillValue, true);
        job.setClass("shape", NASARectangle.class, Shape.class);
        // Determine the range of values by opening one of the HDF files
        Aggregate.MinMax minMax = Aggregate.aggregate(new Path[] { inFile }, params);
        job.setInt(MinValue, minMax.minValue);
        job.setInt(MaxValue, minMax.maxValue);
        //fileMBR = new Rectangle(-180, -90, 180, 90);
        fileMBR = plotRange != null ? plotRange.getMBR() : new Rectangle(-180, -140, 180, 169);
        //      job.setClass(HDFRecordReader.ProjectorClass, MercatorProjector.class,
        //          GeoProjector.class);
    } else {
        fileMBR = FileMBR.fileMBR(inFile, params);
    }

    boolean keepAspectRatio = params.is("keep-ratio", true);
    if (keepAspectRatio) {
        // Expand input file to a rectangle for compatibility with the pyramid
        // structure
        if (fileMBR.getWidth() > fileMBR.getHeight()) {
            fileMBR.y1 -= (fileMBR.getWidth() - fileMBR.getHeight()) / 2;
            fileMBR.y2 = fileMBR.y1 + fileMBR.getWidth();
        } else {
            fileMBR.x1 -= (fileMBR.getHeight() - fileMBR.getWidth() / 2);
            fileMBR.x2 = fileMBR.x1 + fileMBR.getHeight();
        }
    }

    SpatialSite.setRectangle(job, InputMBR, fileMBR);

    // Set input and output
    ShapeInputFormat.addInputPath(job, inFile);
    if (plotRange != null) {
        job.setClass(SpatialSite.FilterClass, RangeFilter.class, BlockFilter.class);
    }

    job.setOutputFormat(PyramidOutputFormat.class);
    TextOutputFormat.setOutputPath(job, outFile);
    job.setOutputCommitter(PlotPyramidOutputCommitter.class);

    if (background) {
        JobClient jc = new JobClient(job);
        return lastSubmittedJob = jc.submitJob(job);
    } else {
        return lastSubmittedJob = JobClient.runJob(job);
    }

}

From source file:eu.stratosphere.myriad.driver.hadoop.MyriadInputFormat.java

License:Apache License

public static void setScalingFactor(JobConf conf, float scalingFactor) {
    conf.setFloat("mapred.myriad.dgen.scaling.factor", scalingFactor);
}

From source file:gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.java

public static void main(String[] args) {
    //paths and input handling
    Path inputRatingsPath = new Path(args[0]); //movieid, userid, rating (text files)
    Path job1OutputPath = new Path("/user/hduser/partialResults");
    Path partialDistancesPath = new Path("/user/hduser/partialResults/part-*"); //member_nonMember \t partialDistance (sequence files)
    Path candidateMoviesPath = new Path("/user/hduser/partialResults/candidateMovies-*"); //candidateMovieId, nonMemberUserId_rating (text files)
    Path userSimilaritiesPath = new Path("/user/hduser/userSimilarities"); //similarity of each group member to his friends (text files)
    Path finalScoresPath = new Path(args[1]); //movieId \t outputScore

    int numReduceTasks = 56; //defaultValue
    if (args.length == 3) {
        numReduceTasks = Integer.parseInt(args[2]);
    }//w w  w. j a v  a 2  s.  c o  m

    final float friendsSimThresh = 0.8f;

    String groupFilePath = "/user/hduser/group.txt"; //one-line csv file with user ids (text file)

    if (args.length < 2 || args.length > 3) {
        System.err.println(
                "Incorrect input. Example usage: hadoop jar ~/GroupRecs/MultiUser.jar inputPath outputPath [numReduceTasks]");
        return;
    }

    //JOB 1//
    JobClient client = new JobClient();
    JobConf conf = new JobConf(gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.class);

    try {
        FileSystem fs = FileSystem.get(conf);
        if (fs.exists(job1OutputPath)) {
            fs.delete(job1OutputPath, true);
        }
        if (fs.exists(userSimilaritiesPath)) {
            fs.delete(userSimilaritiesPath, true);
        }
        if (fs.exists(finalScoresPath)) {
            fs.delete(finalScoresPath, true);
        }
    } catch (IOException ex) {
        Logger.getLogger(MultiUserMain.class.getName()).log(Level.SEVERE, null, ex);
    }

    conf.setJobName("Multi-user approach - Job 1");
    System.out.println("Starting Job 1 (Multi-user approach)...");

    conf.setMapOutputKeyClass(VIntWritable.class);
    conf.setMapOutputValueClass(Text.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(ByteWritable.class);

    conf.setInputFormat(TextInputFormat.class);
    //conf.setOutputFormat(TextOutputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    SequenceFileOutputFormat.setOutputCompressionType(conf, SequenceFile.CompressionType.BLOCK);

    FileInputFormat.setInputPaths(conf, inputRatingsPath); //user ratings
    FileOutputFormat.setOutputPath(conf, job1OutputPath); //partial distances

    MultipleOutputs.addNamedOutput(conf, "candidateMovies", SequenceFileOutputFormat.class, VIntWritable.class,
            Text.class); //movieId, userId_rating

    conf.setMapperClass(gr.forth.ics.isl.grouprecsmr.job1.Job1Mapper.class);
    conf.setReducerClass(gr.forth.ics.isl.grouprecsmr.job1.Job1Reducer.class);

    conf.setNumReduceTasks(numReduceTasks);

    try {
        DistributedCache.addCacheFile(new URI(groupFilePath), conf); // group   
    } catch (URISyntaxException e1) {
        System.err.println(e1.toString());
    }

    conf.setInt("mapred.task.timeout", 6000000);

    client.setConf(conf);
    RunningJob job;
    try {
        job = JobClient.runJob(conf);
        job.waitForCompletion();
    } catch (Exception e) {
        System.err.println(e);
    }

    //JOB 2//
    System.out.println("Starting Job 2 (Multi-user approach)...");
    JobClient client2 = new JobClient();
    JobConf conf2 = new JobConf(gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.class);

    conf2.setJobName("Multi-user approach - Job 2");

    conf2.setMapOutputKeyClass(Text.class); //user pair (member_nonMember), where nonMember is in friends
    conf2.setMapOutputValueClass(ByteWritable.class);//similarity part unsquared

    conf2.setOutputKeyClass(Text.class); //user pair (member_nonMember), where nonMember is in friends
    conf2.setOutputValueClass(DoubleWritable.class);//similarity

    conf2.setInputFormat(SequenceFileInputFormat.class);
    //conf2.setInputFormat(TextInputFormat.class);
    conf2.setOutputFormat(TextOutputFormat.class);
    //conf2.setOutputFormat(SequenceFileOutputFormat.class);
    //SequenceFileOutputFormat.setOutputCompressionType(conf2, SequenceFile.CompressionType.BLOCK);

    FileInputFormat.setInputPaths(conf2, partialDistancesPath); //Job 1 output
    FileOutputFormat.setOutputPath(conf2, userSimilaritiesPath); //Job 2 output (similarity of each group member to his friends)

    conf2.setMapperClass(IdentityMapper.class);
    conf2.setReducerClass(gr.forth.ics.isl.grouprecsmr.job2.Job2ReducerMulti.class);

    int numSimilaritiesPartitions = numReduceTasks;
    conf2.setNumReduceTasks(numSimilaritiesPartitions);

    conf2.setFloat("friendsSimThreshold", friendsSimThresh);

    conf2.setInt("mapred.task.timeout", 6000000);
    conf2.set("io.sort.mb", "500");

    client2.setConf(conf2);
    RunningJob job2;
    try {
        job2 = JobClient.runJob(conf2);
        job2.waitForCompletion();
    } catch (Exception e) {
        System.err.println(e);
    }

    //JOB 3//
    System.out.println("Starting Job 3 (Multi-user approach)...");
    JobClient client3 = new JobClient();
    JobConf conf3 = new JobConf(gr.forth.ics.isl.grouprecsmr.multiuser.MultiUserMain.class);

    conf3.setJobName("Multi-user approach - Job 3");

    conf3.setMapOutputKeyClass(VIntWritable.class);
    conf3.setMapOutputValueClass(Text.class);

    conf3.setOutputKeyClass(VIntWritable.class);
    conf3.setOutputValueClass(DoubleWritable.class);

    conf3.setInputFormat(SequenceFileInputFormat.class);
    //conf3.setInputFormat(TextInputFormat.class);
    conf3.setOutputFormat(TextOutputFormat.class);
    //conf3.setOutputFormat(SequenceFileOutputFormat.class);
    //SequenceFileOutputFormat.setOutputCompressionType(conf3,SequenceFile.CompressionType.BLOCK);

    try {
        DistributedCache.addCacheFile(new URI(groupFilePath), conf3);
    } catch (URISyntaxException ex) {
        System.err.println("Could not add group file to distributed cache. " + ex);
    }
    for (int i = 0; i < numSimilaritiesPartitions; i++) {
        String reduceId = String.format("%05d", i); //5-digit int with leading
        try {
            DistributedCache.addCacheFile(new URI(userSimilaritiesPath.toString() + "/part-" + reduceId),
                    conf3);
        } catch (URISyntaxException ex) {
            System.err.println("Could not add similarities files to distributed cache. " + ex);
        }

    }

    FileInputFormat.setInputPaths(conf3, candidateMoviesPath); //Job 1 output (candidate movies)
    FileOutputFormat.setOutputPath(conf3, finalScoresPath); //Job 3 output (movie \t outputScore)

    //        conf3.setMapperClass(IdentityMapper.class);      
    conf3.setMapperClass(gr.forth.ics.isl.grouprecsmr.job3.Job3MapperMulti.class); //filtering out ratings from non-Friends
    conf3.setReducerClass(gr.forth.ics.isl.grouprecsmr.job3.Job3ReducerMulti.class);

    conf3.setInt("mapred.task.timeout", 6000000);
    conf3.set("io.sort.mb", "500");

    conf3.setNumReduceTasks(numReduceTasks);

    client3.setConf(conf3);
    RunningJob job3;
    try {
        job3 = JobClient.runJob(conf3);
        job3.waitForCompletion();
    } catch (Exception e) {
        System.err.println(e);
    }
}

From source file:hibench.ZipfRandom.java

License:Apache License

public void setJobConf(JobConf job) {
    job.setInt("agents", agents);
    job.setLong("elems", elems);
    job.setLong("agentelems", agentelems);
    job.setFloat("exponent", (float) exponent);
    job.setFloat("scale", (float) scale);
}

From source file:nl.tudelft.graphalytics.mapreducev2.evo.DirectedForestFireModelJob.java

License:Apache License

@Override
protected void setConfigurationParameters(JobConf jobConfiguration) {
    super.setConfigurationParameters(jobConfiguration);
    jobConfiguration.setLong(ForestFireModelUtils.MAX_ID, getParameters().getMaxId() + 1);
    jobConfiguration.setFloat(ForestFireModelUtils.P_RATIO, getParameters().getPRatio());
    jobConfiguration.setFloat(ForestFireModelUtils.R_RATIO, getParameters().getRRatio());
    jobConfiguration.set(ForestFireModelUtils.CURRENT_AMBASSADORS,
            ForestFireModelUtils.verticesIDsMap2String(burnedEdges));

    if (getIteration() == 1) {
        if (getNumMappers() > 0) {
            jobConfiguration.setInt(ForestFireModelUtils.NEW_VERTICES_NR,
                    getParameters().getNumNewVertices() / getNumMappers());
            jobConfiguration.setInt(ForestFireModelUtils.ID_SHIFT, getNumMappers());
        } else {/*from  w  w  w . jav  a 2 s . com*/
            jobConfiguration.setInt(ForestFireModelUtils.NEW_VERTICES_NR, getParameters().getNumNewVertices());
            jobConfiguration.setInt(ForestFireModelUtils.ID_SHIFT, 1024 * 1024);
        }
        jobConfiguration.setBoolean(ForestFireModelUtils.IS_INIT, true);
    }
}

From source file:nl.tudelft.graphalytics.mapreducev2.evo.UndirectedForestFireModelJob.java

License:Apache License

@Override
protected void setConfigurationParameters(JobConf jobConfiguration) {
    super.setConfigurationParameters(jobConfiguration);
    jobConfiguration.setLong(ForestFireModelUtils.MAX_ID, getParameters().getMaxId() + 1);
    jobConfiguration.setFloat(ForestFireModelUtils.P_RATIO, getParameters().getPRatio());
    jobConfiguration.setFloat(ForestFireModelUtils.R_RATIO, getParameters().getRRatio());
    jobConfiguration.set(ForestFireModelUtils.CURRENT_AMBASSADORS,
            ForestFireModelUtils.verticesIDsMap2String(burnedEdges));

    if (getIteration() == 1) {
        if (getNumMappers() > 0) {
            jobConfiguration.setInt(ForestFireModelUtils.NEW_VERTICES_NR,
                    getParameters().getNumNewVertices() / getNumMappers());
            jobConfiguration.setInt(ForestFireModelUtils.ID_SHIFT, getNumMappers());
        } else {/*from  w  ww .  ja v  a2  s .  co  m*/
            jobConfiguration.setInt(ForestFireModelUtils.NEW_VERTICES_NR, getParameters().getNumNewVertices());
            jobConfiguration.setInt(ForestFireModelUtils.ID_SHIFT, 1024 * 1024);
        }
        jobConfiguration.setBoolean(ForestFireModelUtils.IS_INIT, true);
    } else if (getIteration() == getParameters().getMaxIterations() + 1) {
        jobConfiguration.setBoolean(ForestFireModelUtils.IS_FINAL, true);
    }
}

From source file:org.apache.ambari.servicemonitor.unit.FileUsingJobTest.java

License:Apache License

/**
 * Test that a file-using job works/*w  w w.ja v a 2  s  . c  o  m*/
 * @throws Throwable
 */
@Test
public void testFileOpJob() throws Throwable {
    createDFSCluster();
    createMRCluster();

    float Pmap = 1.0f;
    float Preduce = 1.0f;
    JobConf jobConf = mrCluster.createJobConf();
    int maps = 1;
    jobConf.setInt(JobKeys.RANGEINPUTFORMAT_ROWS, maps);
    jobConf.setFloat(FileUsingMapper.NAME + ProbableFileOperation.PROBABILITY, Pmap);
    jobConf.setInt(FileUsingMapper.NAME + ProbableFileOperation.SLEEPTIME, 10);

    jobConf.setFloat(FileUsingReducer.NAME + ProbableFileOperation.PROBABILITY, Preduce);
    jobConf.setInt(FileUsingReducer.NAME + ProbableFileOperation.SLEEPTIME, 10);
    bondDataOutputDir(this, "testFileOpJob", jobConf);
    int r = FileUsingJobRunner.exec(jobConf);
    assertEquals("Wrong return code from job", 0, r);

}

From source file:org.apache.tez.mapreduce.hadoop.TestDeprecatedKeys.java

License:Apache License

@Test(timeout = 5000)
public void verifyReduceKeyTranslation() {
    JobConf jobConf = new JobConf();

    jobConf.setFloat(MRJobConfig.SHUFFLE_INPUT_BUFFER_PERCENT, 0.4f);
    jobConf.setLong(MRJobConfig.REDUCE_MEMORY_TOTAL_BYTES, 20000l);
    jobConf.setInt(MRJobConfig.IO_SORT_FACTOR, 2000);
    jobConf.setFloat(MRJobConfig.SHUFFLE_MEMORY_LIMIT_PERCENT, 0.55f);
    jobConf.setFloat(MRJobConfig.REDUCE_MEMTOMEM_THRESHOLD, 0.60f);
    jobConf.setFloat(MRJobConfig.SHUFFLE_MERGE_PERCENT, 0.22f);
    jobConf.setBoolean(MRJobConfig.REDUCE_MEMTOMEM_ENABLED, true);
    jobConf.setFloat(MRJobConfig.REDUCE_INPUT_BUFFER_PERCENT, 0.33f);

    MRHelpers.translateMRConfToTez(jobConf);

    assertEquals(0.4f, jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT, 0f),
            0.01f);//from ww w.j  ava  2s. c  o m
    assertEquals(20000l, jobConf.getLong(Constants.TEZ_RUNTIME_TASK_MEMORY, 0));
    assertEquals(2000, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_FACTOR, 0));
    assertEquals(0.55f, jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT, 0),
            0.01f);
    assertEquals(0.60f, jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMTOMEM_SEGMENTS, 0),
            0.01f);
    assertEquals(0.22f, jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, 0), 0.01f);
    assertEquals(true, jobConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_ENABLE_MEMTOMEM, false));
    assertEquals(0.33f,
            jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_INPUT_POST_MERGE_BUFFER_PERCENT, 0), 0.01f);
}

From source file:org.apache.tez.mapreduce.hadoop.TestDeprecatedKeys.java

License:Apache License

@Test(timeout = 5000)
/**/*from  w  ww .  j a  v  a2 s  . c o  m*/
 * Set of keys that can be overriden at tez runtime
 */
public void verifyTezOverridenKeys() {
    JobConf jobConf = new JobConf();
    jobConf.setInt(MRJobConfig.IO_SORT_FACTOR, 2000);
    jobConf.setInt(MRJobConfig.IO_SORT_MB, 100);
    jobConf.setInt(MRJobConfig.COUNTERS_MAX_KEY, 100);

    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_FACTOR, 1000);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB, 200);
    jobConf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD, true);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, 20);
    jobConf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SORT_SPILL_PERCENT, 0.2f);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_INDEX_CACHE_MEMORY_LIMIT_BYTES, 10);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_COMBINE_MIN_SPILLS, 20);
    jobConf.setInt(Constants.TEZ_RUNTIME_TASK_MEMORY, 10);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES, 10);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_FAILURES_LIMIT, 10);
    jobConf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_NOTIFY_READERROR, true);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_CONNECT_TIMEOUT, 10);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_READ_TIMEOUT, 10);
    jobConf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_ENABLE_SSL, true);
    jobConf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT, 10.0f);
    jobConf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT, 10.0f);
    jobConf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, 10.0f);
    jobConf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMTOMEM_SEGMENTS, 10);
    jobConf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_ENABLE_MEMTOMEM, true);
    jobConf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_INPUT_POST_MERGE_BUFFER_PERCENT, 10.0f);
    jobConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_INTERNAL_SORTER_CLASS, "DefaultSorter");
    jobConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_GROUP_COMPARATOR_CLASS, "groupComparator");
    jobConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_SECONDARY_COMPARATOR_CLASS, "SecondaryComparator");

    jobConf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, false);
    jobConf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS, true);

    MRHelpers.translateMRConfToTez(jobConf);

    assertEquals(1000, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_FACTOR, 0));
    assertEquals(200, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB, 100));
    assertEquals(true, jobConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD, false));
    assertEquals(20, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, 0));
    assertEquals(10, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_INDEX_CACHE_MEMORY_LIMIT_BYTES, 0));
    assertEquals(20, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_COMBINE_MIN_SPILLS, 0));
    assertEquals(10, jobConf.getInt(Constants.TEZ_RUNTIME_TASK_MEMORY, 0));
    assertEquals(10, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES, 0));
    assertEquals(10, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_FAILURES_LIMIT, 0));
    assertEquals(true, jobConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_NOTIFY_READERROR, false));
    assertEquals(10, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_CONNECT_TIMEOUT, 0));
    assertEquals(10, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_READ_TIMEOUT, 0));
    assertEquals(true, jobConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_ENABLE_SSL, false));
    assertEquals(10.0f,
            jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT, 0.0f), 0.0f);
    assertEquals(10.0f,
            jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT, 0.0f), 0.0f);
    assertEquals(10.0f, jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, 0.0f),
            0.0f);
    assertEquals(10, jobConf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMTOMEM_SEGMENTS, 0));
    assertEquals(true, jobConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_ENABLE_MEMTOMEM, false));
    assertEquals(10.0f,
            jobConf.getFloat(TezRuntimeConfiguration.TEZ_RUNTIME_INPUT_POST_MERGE_BUFFER_PERCENT, 0.0f), 0.0f);
    assertEquals("DefaultSorter", jobConf.get(TezRuntimeConfiguration.TEZ_RUNTIME_INTERNAL_SORTER_CLASS, ""));
    assertEquals("groupComparator",
            jobConf.get(TezRuntimeConfiguration.TEZ_RUNTIME_GROUP_COMPARATOR_CLASS, ""));
    assertEquals("SecondaryComparator",
            jobConf.get(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_SECONDARY_COMPARATOR_CLASS, ""));
    assertEquals("DefaultSorter", jobConf.get(TezRuntimeConfiguration.TEZ_RUNTIME_INTERNAL_SORTER_CLASS, ""));
    assertTrue(jobConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS, false));

    assertNull(jobConf.get(MRConfig.MAPRED_IFILE_READAHEAD));
    assertNull(jobConf.get(MRConfig.MAPRED_IFILE_READAHEAD_BYTES));
    assertNull(jobConf.get(MRJobConfig.RECORDS_BEFORE_PROGRESS));
    assertNull(jobConf.get(MRJobConfig.IO_SORT_FACTOR));
    assertNull(jobConf.get(MRJobConfig.IO_SORT_MB));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_READ_TIMEOUT));
    assertNull(jobConf.get(MRJobConfig.INDEX_CACHE_MEMORY_LIMIT));
    assertNull(jobConf.get(MRJobConfig.MAP_COMBINE_MIN_SPILLS));
    assertNull(jobConf.get(MRJobConfig.REDUCE_MEMORY_TOTAL_BYTES));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_PARALLEL_COPIES));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_FETCH_FAILURES));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_NOTIFY_READERROR));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_CONNECT_TIMEOUT));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_READ_TIMEOUT));
    assertNull(jobConf.get(MRConfig.SHUFFLE_SSL_ENABLED_KEY));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_INPUT_BUFFER_PERCENT));
    assertNull(jobConf.get(MRJobConfig.SHUFFLE_MEMORY_LIMIT_PERCENT));
    assertNull(jobConf.get(MRJobConfig.REDUCE_MEMTOMEM_THRESHOLD));
    assertNull(jobConf.get(MRJobConfig.REDUCE_MEMTOMEM_ENABLED));
    assertNull(jobConf.get(MRJobConfig.REDUCE_INPUT_BUFFER_PERCENT));
    assertNull(jobConf.get(MRJobConfig.GROUP_COMPARATOR_CLASS));
    assertNull(jobConf.get(MRJobConfig.GROUP_COMPARATOR_CLASS));
    assertNull(jobConf.get("map.sort.class"));
}