Example usage for org.apache.hadoop.mapreduce JobID JobID

List of usage examples for org.apache.hadoop.mapreduce JobID JobID

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce JobID JobID.

Prototype

public JobID(String jtIdentifier, int id) 

Source Link

Document

Constructs a JobID object

Usage

From source file:com.google.appengine.tools.mapreduce.RangeInputFormatTest.java

License:Apache License

private List<InputSplit> getSplitsForConfiguration() throws Exception {
    JobContext context = new JobContext(conf, new JobID("Foo", 1));
    return format.getSplits(context);
}

From source file:com.inmobi.conduit.distcp.tools.mapred.TestCopyOutputFormat.java

License:Apache License

@Test
public void testCheckOutputSpecs() {
    try {// w w w  .  j av  a2s .co  m
        OutputFormat outputFormat = new CopyOutputFormat();
        Configuration conf = new Configuration();
        Job job = new Job(conf);
        JobID jobID = new JobID("200707121733", 1);

        try {
            JobContext context = Mockito.mock(JobContext.class);
            Mockito.when(context.getConfiguration()).thenReturn(job.getConfiguration());
            Mockito.when(context.getJobID()).thenReturn(jobID);
            outputFormat.checkOutputSpecs(context);
            Assert.fail("No checking for invalid work/commit path");
        } catch (IllegalStateException ignore) {
        }

        CopyOutputFormat.setWorkingDirectory(job, new Path("/tmp/work"));
        try {
            JobContext context = Mockito.mock(JobContext.class);
            Mockito.when(context.getConfiguration()).thenReturn(job.getConfiguration());
            Mockito.when(context.getJobID()).thenReturn(jobID);
            outputFormat.checkOutputSpecs(context);
            Assert.fail("No checking for invalid commit path");
        } catch (IllegalStateException ignore) {
        }

        job.getConfiguration().set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, "");
        CopyOutputFormat.setCommitDirectory(job, new Path("/tmp/commit"));
        try {
            JobContext context = Mockito.mock(JobContext.class);
            Mockito.when(context.getConfiguration()).thenReturn(job.getConfiguration());
            Mockito.when(context.getJobID()).thenReturn(jobID);
            outputFormat.checkOutputSpecs(context);
            Assert.fail("No checking for invalid work path");
        } catch (IllegalStateException ignore) {
        }

        CopyOutputFormat.setWorkingDirectory(job, new Path("/tmp/work"));
        CopyOutputFormat.setCommitDirectory(job, new Path("/tmp/commit"));
        try {
            JobContext context = Mockito.mock(JobContext.class);
            Mockito.when(context.getConfiguration()).thenReturn(job.getConfiguration());
            Mockito.when(context.getJobID()).thenReturn(jobID);
            outputFormat.checkOutputSpecs(context);
        } catch (IllegalStateException ignore) {
            ignore.printStackTrace();
            Assert.fail("Output spec check failed.");
        }

    } catch (IOException e) {
        LOG.error("Exception encountered while testing checkoutput specs", e);
        Assert.fail("Checkoutput Spec failure");
    } catch (InterruptedException e) {
        LOG.error("Exception encountered while testing checkoutput specs", e);
        Assert.fail("Checkoutput Spec failure");
    }
}

From source file:com.scaleoutsoftware.soss.hserver.HServerJob.java

License:Apache License

/**
 * Constructs the ScaleOut hServer job object.
 *
 * @param conf        configuration//from   ww  w. ja v  a  2s. c om
 * @param jobName     job name
 * @param sortEnabled <code>true</code> if the reducer input key should be
 *                    sorted within each partition, or <code>false</code>
 *                    if sorting is not necessary.
 * @param grid        {@link InvocationGrid} to be used for running the job
 * @throws IOException if the job cannot be constructed
 */
public HServerJob(Configuration conf, String jobName, boolean sortEnabled, InvocationGrid grid)
        throws IOException {
    super(conf, jobName);
    this.sortEnabled = sortEnabled;
    this.grid = grid;
    //We do not have a way to keep track of the number of jobs performed, so make jobtracker id
    //param unique instead by appending a UUID
    jobID = new JobID("HServer_" + jobName + "_" + UUID.randomUUID(), 0);
    this.getConfiguration().setBoolean(HServerParameters.IS_HSERVER_JOB, true);
    this.getConfiguration().setInt(HServerParameters.INVOCATION_ID, getAppId());
}

From source file:com.splicemachine.derby.impl.io.WholeTextInputFormatTest.java

License:Apache License

@Test
public void testGetsStreamForDirectory() throws Exception {
    /*/*  w w  w. jav  a  2  s. co m*/
     * This test failed before changes to WholeTextInputFormat(hooray for test-driven development!),
     * so this constitutes an effective regression test for SPLICE-739. Of course, we'll be certain
     * about it by ALSO writing an IT, but this is a nice little Unit test of the same thing.
     */
    Configuration configuration = HConfiguration.unwrapDelegate();
    String dirPath = SpliceUnitTest.getResourceDirectory() + "multiLineDirectory";
    configuration.set("mapred.input.dir", dirPath);

    WholeTextInputFormat wtif = new WholeTextInputFormat();
    wtif.setConf(configuration);

    JobContext ctx = new JobContextImpl(configuration, new JobID("test", 1));
    List<InputSplit> splits = wtif.getSplits(ctx);

    int i = 0;
    Set<String> files = readFileNames(dirPath);

    Assert.assertEquals("We didn't get a split per file", files.size(), splits.size());

    Set<String> readFiles = new HashSet<>();
    long totalRecords = 0;

    for (InputSplit is : splits) {
        TaskAttemptContext tac = new TaskAttemptContextImpl(configuration,
                new TaskAttemptID("test", 1, true, i, 1));
        RecordReader<String, InputStream> recordReader = wtif.createRecordReader(is, tac);
        CombineFileSplit cfs = (CombineFileSplit) is;
        System.out.println(cfs);

        totalRecords += collectRecords(readFiles, recordReader);
        i++;
    }
    Assert.assertEquals("did not read all data!", 28, totalRecords);

    Assert.assertEquals("Did not read all files!", files.size(), readFiles.size());
    for (String expectedFile : files) {
        Assert.assertTrue("Did not read file <" + expectedFile + "> read =" + readFiles + " exp",
                readFiles.contains(expectedFile));
    }
}

From source file:com.stratio.deep.commons.extractor.impl.GenericHadoopExtractor.java

License:Apache License

@Override
public Partition[] getPartitions(S config) {

    int id = config.getRddId();

    jobId = new JobID(jobTrackerId, id);

    Configuration conf = getHadoopConfig(config);

    JobContext jobContext = DeepSparkHadoopMapReduceUtil.newJobContext(conf, jobId);

    try {/*from w  w  w . j  a v  a  2s .co m*/
        List<InputSplit> splits = inputFormat.getSplits(jobContext);

        Partition[] partitions = new Partition[(splits.size())];
        for (int i = 0; i < splits.size(); i++) {
            partitions[i] = new NewHadoopPartition(id, i, splits.get(i));
        }

        return partitions;

    } catch (IOException | InterruptedException | RuntimeException e) {
        LOG.error("Impossible to calculate partitions " + e.getMessage());
        throw new DeepGenericException("Impossible to calculate partitions ", e);
    }

}

From source file:cz.seznam.euphoria.hadoop.HadoopUtils.java

License:Apache License

public static JobContext createJobContext(Configuration conf) {
    // TODO jobId uses some default hard-coded value
    return new JobContextImpl(conf, new JobID("", 0));
}

From source file:edu.uci.ics.hyracks.hdfs.ContextFactory.java

License:Apache License

public JobContext createJobContext(Configuration conf) {
    return new JobContextImpl(conf, new JobID("0", 0));
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception/*from   www.  ja  v  a 2  s  . c om*/
 */
@Test
public void testRunner() throws Exception {
    // clean old password files
    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(Submitter.IS_JAVA_RR, "true");
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        job.setInputFormatClass(DummyInputFormat.class);
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);

        InputSplit isplit = isplits.get(0);

        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordReader<FloatWritable, NullWritable> rReader = input_format.createRecordReader(isplit, tcontext);

        TestMapContext context = new TestMapContext(conf, taskAttemptid, rReader, null, null, null, isplit);
        // stub for client
        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        PipesMapper<FloatWritable, NullWritable, IntWritable, Text> mapper = new PipesMapper<FloatWritable, NullWritable, IntWritable, Text>(
                context);

        initStdOut(conf);
        mapper.run(context);
        String stdOut = readStdOut(conf);

        // test part of translated data. As common file for client and test -
        // clients stdOut
        // check version
        assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
        // check key and value classes
        assertTrue(stdOut.contains("Key class:org.apache.hadoop.io.FloatWritable"));
        assertTrue(stdOut.contains("Value class:org.apache.hadoop.io.NullWritable"));
        // test have sent all data from reader
        assertTrue(stdOut.contains("value:0.0"));
        assertTrue(stdOut.contains("value:9.0"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.Application
 * test a internal functions: //from   w w  w .  jav a 2s.  c o  m
 *     MessageType.REGISTER_COUNTER,  INCREMENT_COUNTER, STATUS, PROGRESS...
 *
 * @throws Throwable
 */

@Test
public void testApplication() throws Throwable {

    System.err.println("testApplication");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationStub");
        //getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        DummyRecordReader reader = (DummyRecordReader) input_format.createRecordReader(isplit, tcontext);

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        MapContextImpl<IntWritable, Text, IntWritable, Text> context = new MapContextImpl<IntWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, null, writer, null, reporter, null);

        System.err.println("ready to launch application");
        Application<IntWritable, Text, IntWritable, Text> application = new Application<IntWritable, Text, IntWritable, Text>(
                context, reader);
        System.err.println("done");

        application.getDownlink().flush();
        application.getDownlink().mapItem(new IntWritable(3), new Text("txt"));
        application.getDownlink().flush();
        application.waitForFinish();

        // test getDownlink().mapItem();
        String stdOut = readStdOut(conf);
        assertTrue(stdOut.contains("key:3"));
        assertTrue(stdOut.contains("value:txt"));

        assertEquals(0.0, context.getProgress(), 0.01);
        assertNotNull(context.getCounter("group", "name"));

        // test status MessageType.STATUS
        assertEquals(context.getStatus(), "PROGRESS");
        // check MessageType.PROGRESS
        assertEquals(0.55f, reader.getProgress(), 0.001);
        application.getDownlink().close();
        // test MessageType.OUTPUT
        stdOut = readFile(new File(workSpace.getAbsolutePath() + File.separator + "outfile"));
        assertTrue(stdOut.contains("key:123"));
        assertTrue(stdOut.contains("value:value"));
        try {
            // try to abort
            application.abort(new Throwable());
            fail();
        } catch (IOException e) {
            // abort works ?
            assertEquals("pipe child exception", e.getMessage());
        }
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception//w w w . j a v a2 s  .  c o m
 */
@Test
public void testPipesReducer() throws Exception {
    System.err.println("testPipesReducer");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeReducerStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        BooleanWritable bw = new BooleanWritable(true);
        List<Text> texts = new ArrayList<Text>();
        texts.add(new Text("first"));
        texts.add(new Text("second"));
        texts.add(new Text("third"));

        DummyRawKeyValueIterator kvit = new DummyRawKeyValueIterator();

        ReduceContextImpl<BooleanWritable, Text, IntWritable, Text> context = new ReduceContextImpl<BooleanWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, kvit, null, null, writer, null, null, null, BooleanWritable.class,
                Text.class);

        PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
        reducer.setup(context);

        initStdOut(conf);
        reducer.reduce(bw, texts, context);
        reducer.cleanup(context);
        String stdOut = readStdOut(conf);

        // test data: key
        assertTrue(stdOut.contains("reducer key :true"));
        // and values
        assertTrue(stdOut.contains("reduce value  :first"));
        assertTrue(stdOut.contains("reduce value  :second"));
        assertTrue(stdOut.contains("reduce value  :third"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }

}