Example usage for org.apache.hadoop.mapreduce TaskType MAP

List of usage examples for org.apache.hadoop.mapreduce TaskType MAP

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskType MAP.

Prototype

TaskType MAP

To view the source code for org.apache.hadoop.mapreduce TaskType MAP.

Click Source Link

Usage

From source file:eu.stratosphere.addons.hbase.GenericTableOutputFormat.java

License:Apache License

@Override
public void open(int taskNumber, int numTasks) throws IOException {
    this.hadoopConfig = getHadoopConfig(this.config);

    /**/*from www  .ja va2 s.  c  o m*/
     * PLASE NOTE:
     * If you are a Eclipse+Maven Integration user and you have two (or more) warnings here, please
     * close the pact-hbase project OR set the maven profile to hadoop_yarn
     * 
     * pact-hbase requires hadoop_yarn, but Eclipse is not able to parse maven profiles properly. Therefore,
     * it imports the pact-hbase project even if it is not included in the standard profile (hadoop_v1)
     */
    final TaskAttemptID attemptId = new TaskAttemptID(this.jtID, this.jobId, TaskType.MAP, taskNumber - 1, 0);

    this.context = new org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl(this.hadoopConfig, attemptId);
    final HFileOutputFormat outFormat = new HFileOutputFormat();
    try {
        this.writer = outFormat.getRecordWriter(this.context);
    } catch (InterruptedException iex) {
        throw new IOException("Opening the writer was interrupted.", iex);
    }
}

From source file:gobblin.runtime.mapreduce.GobblinWorkUnitsInputFormatTest.java

License:Apache License

@Test
public void testRecordReader() throws Exception {

    List<String> paths = Lists.newArrayList("/path1", "/path2");
    GobblinWorkUnitsInputFormat.GobblinSplit split = new GobblinWorkUnitsInputFormat.GobblinSplit(paths);

    GobblinWorkUnitsInputFormat inputFormat = new GobblinWorkUnitsInputFormat();
    RecordReader<LongWritable, Text> recordReader = inputFormat.createRecordReader(split,
            new TaskAttemptContextImpl(new Configuration(), new TaskAttemptID("a", 1, TaskType.MAP, 1, 1)));

    recordReader.nextKeyValue();//from   www .j  av a  2s .  co  m
    Assert.assertEquals(recordReader.getCurrentKey().get(), 0);
    Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path1");

    recordReader.nextKeyValue();
    Assert.assertEquals(recordReader.getCurrentKey().get(), 1);
    Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path2");

    Assert.assertFalse(recordReader.nextKeyValue());

}

From source file:io.druid.indexer.updater.HadoopConverterJob.java

License:Apache License

public List<DataSegment> run() throws IOException {
    final JobConf jobConf = new JobConf();
    jobConf.setKeepFailedTaskFiles(false);
    for (Map.Entry<String, String> entry : converterConfig.getHadoopProperties().entrySet()) {
        jobConf.set(entry.getKey(), entry.getValue(), "converterConfig.getHadoopProperties()");
    }/*from w w  w. j a v a2 s  .  c o m*/
    final List<DataSegment> segments = converterConfig.getSegments();
    if (segments.isEmpty()) {
        throw new IAE("No segments found for datasource [%s]", converterConfig.getDataSource());
    }
    converterConfigIntoConfiguration(converterConfig, segments, jobConf);

    jobConf.setNumReduceTasks(0);// Map only. Number of map tasks determined by input format
    jobConf.setWorkingDirectory(new Path(converterConfig.getDistributedSuccessCache()));

    setJobName(jobConf, segments);

    if (converterConfig.getJobPriority() != null) {
        jobConf.setJobPriority(JobPriority.valueOf(converterConfig.getJobPriority()));
    }

    final Job job = Job.getInstance(jobConf);

    job.setInputFormatClass(ConfigInputFormat.class);
    job.setMapperClass(ConvertingMapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);
    job.setMapSpeculativeExecution(false);
    job.setOutputFormatClass(ConvertingOutputFormat.class);

    JobHelper.setupClasspath(JobHelper.distributedClassPath(jobConf.getWorkingDirectory()),
            JobHelper.distributedClassPath(getJobClassPathDir(job.getJobName(), jobConf.getWorkingDirectory())),
            job);

    Throwable throwable = null;
    try {
        job.submit();
        log.info("Job %s submitted, status available at %s", job.getJobName(), job.getTrackingURL());
        final boolean success = job.waitForCompletion(true);
        if (!success) {
            final TaskReport[] reports = job.getTaskReports(TaskType.MAP);
            if (reports != null) {
                for (final TaskReport report : reports) {
                    log.error("Error in task [%s] : %s", report.getTaskId(),
                            Arrays.toString(report.getDiagnostics()));
                }
            }
            return null;
        }
        try {
            loadedBytes = job.getCounters().findCounter(COUNTER_GROUP, COUNTER_LOADED).getValue();
            writtenBytes = job.getCounters().findCounter(COUNTER_GROUP, COUNTER_WRITTEN).getValue();
        } catch (IOException ex) {
            log.error(ex, "Could not fetch counters");
        }
        final JobID jobID = job.getJobID();

        final Path jobDir = getJobPath(jobID, job.getWorkingDirectory());
        final FileSystem fs = jobDir.getFileSystem(job.getConfiguration());
        final RemoteIterator<LocatedFileStatus> it = fs.listFiles(jobDir, true);
        final List<Path> goodPaths = new ArrayList<>();
        while (it.hasNext()) {
            final LocatedFileStatus locatedFileStatus = it.next();
            if (locatedFileStatus.isFile()) {
                final Path myPath = locatedFileStatus.getPath();
                if (ConvertingOutputFormat.DATA_SUCCESS_KEY.equals(myPath.getName())) {
                    goodPaths.add(new Path(myPath.getParent(), ConvertingOutputFormat.DATA_FILE_KEY));
                }
            }
        }
        if (goodPaths.isEmpty()) {
            log.warn("No good data found at [%s]", jobDir);
            return null;
        }
        final List<DataSegment> returnList = ImmutableList
                .copyOf(Lists.transform(goodPaths, new Function<Path, DataSegment>() {
                    @Nullable
                    @Override
                    public DataSegment apply(final Path input) {
                        try {
                            if (!fs.exists(input)) {
                                throw new ISE("Somehow [%s] was found but [%s] is missing at [%s]",
                                        ConvertingOutputFormat.DATA_SUCCESS_KEY,
                                        ConvertingOutputFormat.DATA_FILE_KEY, jobDir);
                            }
                        } catch (final IOException e) {
                            throw Throwables.propagate(e);
                        }
                        try (final InputStream stream = fs.open(input)) {
                            return HadoopDruidConverterConfig.jsonMapper.readValue(stream, DataSegment.class);
                        } catch (final IOException e) {
                            throw Throwables.propagate(e);
                        }
                    }
                }));
        if (returnList.size() == segments.size()) {
            return returnList;
        } else {
            throw new ISE(
                    "Tasks reported success but result length did not match! Expected %d found %d at path [%s]",
                    segments.size(), returnList.size(), jobDir);
        }
    } catch (InterruptedException | ClassNotFoundException e) {
        RuntimeException exception = Throwables.propagate(e);
        throwable = exception;
        throw exception;
    } catch (Throwable t) {
        throwable = t;
        throw t;
    } finally {
        try {
            cleanup(job);
        } catch (IOException e) {
            if (throwable != null) {
                throwable.addSuppressed(e);
            } else {
                log.error(e, "Could not clean up job [%s]", job.getJobID());
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception//from  w ww  . j  a  v  a  2 s  .c o m
 */
@Test
public void testRunner() throws Exception {
    // clean old password files
    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(Submitter.IS_JAVA_RR, "true");
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        job.setInputFormatClass(DummyInputFormat.class);
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);

        InputSplit isplit = isplits.get(0);

        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordReader<FloatWritable, NullWritable> rReader = input_format.createRecordReader(isplit, tcontext);

        TestMapContext context = new TestMapContext(conf, taskAttemptid, rReader, null, null, null, isplit);
        // stub for client
        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        PipesMapper<FloatWritable, NullWritable, IntWritable, Text> mapper = new PipesMapper<FloatWritable, NullWritable, IntWritable, Text>(
                context);

        initStdOut(conf);
        mapper.run(context);
        String stdOut = readStdOut(conf);

        // test part of translated data. As common file for client and test -
        // clients stdOut
        // check version
        assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
        // check key and value classes
        assertTrue(stdOut.contains("Key class:org.apache.hadoop.io.FloatWritable"));
        assertTrue(stdOut.contains("Value class:org.apache.hadoop.io.NullWritable"));
        // test have sent all data from reader
        assertTrue(stdOut.contains("value:0.0"));
        assertTrue(stdOut.contains("value:9.0"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.Application
 * test a internal functions: /*from w w  w.j  a  va  2 s .  c  om*/
 *     MessageType.REGISTER_COUNTER,  INCREMENT_COUNTER, STATUS, PROGRESS...
 *
 * @throws Throwable
 */

@Test
public void testApplication() throws Throwable {

    System.err.println("testApplication");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationStub");
        //getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        DummyRecordReader reader = (DummyRecordReader) input_format.createRecordReader(isplit, tcontext);

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        MapContextImpl<IntWritable, Text, IntWritable, Text> context = new MapContextImpl<IntWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, null, writer, null, reporter, null);

        System.err.println("ready to launch application");
        Application<IntWritable, Text, IntWritable, Text> application = new Application<IntWritable, Text, IntWritable, Text>(
                context, reader);
        System.err.println("done");

        application.getDownlink().flush();
        application.getDownlink().mapItem(new IntWritable(3), new Text("txt"));
        application.getDownlink().flush();
        application.waitForFinish();

        // test getDownlink().mapItem();
        String stdOut = readStdOut(conf);
        assertTrue(stdOut.contains("key:3"));
        assertTrue(stdOut.contains("value:txt"));

        assertEquals(0.0, context.getProgress(), 0.01);
        assertNotNull(context.getCounter("group", "name"));

        // test status MessageType.STATUS
        assertEquals(context.getStatus(), "PROGRESS");
        // check MessageType.PROGRESS
        assertEquals(0.55f, reader.getProgress(), 0.001);
        application.getDownlink().close();
        // test MessageType.OUTPUT
        stdOut = readFile(new File(workSpace.getAbsolutePath() + File.separator + "outfile"));
        assertTrue(stdOut.contains("key:123"));
        assertTrue(stdOut.contains("value:value"));
        try {
            // try to abort
            application.abort(new Throwable());
            fail();
        } catch (IOException e) {
            // abort works ?
            assertEquals("pipe child exception", e.getMessage());
        }
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception/*from   ww  w. jav a 2s . c  om*/
 */
@Test
public void testPipesReducer() throws Exception {
    System.err.println("testPipesReducer");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeReducerStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        BooleanWritable bw = new BooleanWritable(true);
        List<Text> texts = new ArrayList<Text>();
        texts.add(new Text("first"));
        texts.add(new Text("second"));
        texts.add(new Text("third"));

        DummyRawKeyValueIterator kvit = new DummyRawKeyValueIterator();

        ReduceContextImpl<BooleanWritable, Text, IntWritable, Text> context = new ReduceContextImpl<BooleanWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, kvit, null, null, writer, null, null, null, BooleanWritable.class,
                Text.class);

        PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
        reducer.setup(context);

        initStdOut(conf);
        reducer.reduce(bw, texts, context);
        reducer.cleanup(context);
        String stdOut = readStdOut(conf);

        // test data: key
        assertTrue(stdOut.contains("reducer key :true"));
        // and values
        assertTrue(stdOut.contains("reduce value  :first"));
        assertTrue(stdOut.contains("reduce value  :second"));
        assertTrue(stdOut.contains("reduce value  :third"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }

}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipesNonJavaInputFormat.java

License:Apache License

/**
 *  test PipesNonJavaInputFormat/*from   w  ww. j  av a 2s. c om*/
  */

@Test
public void testFormat() throws IOException, InterruptedException {
    JobID jobId = new JobID("201408272347", 0);
    TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
    TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

    Job job = new Job(new Configuration());
    job.setJobID(jobId);
    Configuration conf = job.getConfiguration();

    TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

    PipesNonJavaInputFormat input_format = new PipesNonJavaInputFormat();

    DummyRecordReader reader = (DummyRecordReader) input_format.createRecordReader(new FileSplit(), tcontext);
    assertEquals(0.0f, reader.getProgress(), 0.001);

    // input and output files
    File input1 = new File(workSpace + File.separator + "input1");
    if (!input1.getParentFile().exists()) {
        Assert.assertTrue(input1.getParentFile().mkdirs());
    }

    if (!input1.exists()) {
        Assert.assertTrue(input1.createNewFile());
    }

    File input2 = new File(workSpace + File.separator + "input2");
    if (!input2.exists()) {
        Assert.assertTrue(input2.createNewFile());
    }

    // THIS fill fail without hdfs support.
    // // set data for splits
    // conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
    //          StringUtils.escapeString(input1.getAbsolutePath()) + ","
    //          + StringUtils.escapeString(input2.getAbsolutePath()));
    // List<InputSplit> splits = input_format.getSplits(job);
    // assertTrue(splits.size() >= 2);

    PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
            new FileSplit(), tcontext);
    // empty dummyRecordReader
    assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
    // test method next
    assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
    assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
    dummyRecordReader.close();
}

From source file:org.apache.accumulo.examples.wikisearch.logic.TestQueryLogic.java

License:Apache License

@Before
public void setup() throws Exception {

    Logger.getLogger(AbstractQueryLogic.class).setLevel(Level.DEBUG);
    Logger.getLogger(QueryLogic.class).setLevel(Level.DEBUG);
    Logger.getLogger(RangeCalculator.class).setLevel(Level.DEBUG);

    conf.set(AggregatingRecordReader.START_TOKEN, "<page>");
    conf.set(AggregatingRecordReader.END_TOKEN, "</page>");
    conf.set(WikipediaConfiguration.TABLE_NAME, TABLE_NAME);
    conf.set(WikipediaConfiguration.NUM_PARTITIONS, "1");
    conf.set(WikipediaConfiguration.NUM_GROUPS, "1");

    MockInstance i = new MockInstance();
    c = i.getConnector("root", new PasswordToken(""));
    WikipediaIngester.createTables(c.tableOperations(), TABLE_NAME, false);
    for (String table : TABLE_NAMES) {
        writerMap.put(new Text(table), c.createBatchWriter(table, 1000L, 1000L, 1));
    }// w w w .j  a v  a  2s  .  co  m

    TaskAttemptID id = new TaskAttemptID("fake", 1, TaskType.MAP, 1, 1);
    TaskAttemptContext context = new TaskAttemptContextImpl(conf, id);

    RawLocalFileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);

    URL url = ClassLoader.getSystemResource("enwiki-20110901-001.xml");
    Assert.assertNotNull(url);
    File data = new File(url.toURI());
    Path tmpFile = new Path(data.getAbsolutePath());

    // Setup the Mapper
    WikipediaInputSplit split = new WikipediaInputSplit(
            new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(), null), 0);
    AggregatingRecordReader rr = new AggregatingRecordReader();
    Path ocPath = new Path(tmpFile, "oc");
    OutputCommitter oc = new FileOutputCommitter(ocPath, context);
    fs.deleteOnExit(ocPath);
    StandaloneStatusReporter sr = new StandaloneStatusReporter();
    rr.initialize(split, context);
    MockAccumuloRecordWriter rw = new MockAccumuloRecordWriter();
    WikipediaMapper mapper = new WikipediaMapper();

    // there are times I wonder, "Why do Java people think this is good?" then I drink more whiskey
    final MapContextImpl<LongWritable, Text, Text, Mutation> mapContext = new MapContextImpl<LongWritable, Text, Text, Mutation>(
            conf, id, rr, rw, oc, sr, split);
    // Load data into Mock Accumulo
    Mapper<LongWritable, Text, Text, Mutation>.Context con = mapper.new Context() {
        /**
         * Get the input split for this map.
         */
        public InputSplit getInputSplit() {
            return mapContext.getInputSplit();
        }

        @Override
        public LongWritable getCurrentKey() throws IOException, InterruptedException {
            return mapContext.getCurrentKey();
        }

        @Override
        public Text getCurrentValue() throws IOException, InterruptedException {
            return mapContext.getCurrentValue();
        }

        @Override
        public boolean nextKeyValue() throws IOException, InterruptedException {
            return mapContext.nextKeyValue();
        }

        @Override
        public Counter getCounter(Enum<?> counterName) {
            return mapContext.getCounter(counterName);
        }

        @Override
        public Counter getCounter(String groupName, String counterName) {
            return mapContext.getCounter(groupName, counterName);
        }

        @Override
        public OutputCommitter getOutputCommitter() {
            return mapContext.getOutputCommitter();
        }

        @Override
        public void write(Text key, Mutation value) throws IOException, InterruptedException {
            mapContext.write(key, value);
        }

        @Override
        public String getStatus() {
            return mapContext.getStatus();
        }

        @Override
        public TaskAttemptID getTaskAttemptID() {
            return mapContext.getTaskAttemptID();
        }

        @Override
        public void setStatus(String msg) {
            mapContext.setStatus(msg);
        }

        @Override
        public Path[] getArchiveClassPaths() {
            return mapContext.getArchiveClassPaths();
        }

        @Override
        public String[] getArchiveTimestamps() {
            return mapContext.getArchiveTimestamps();
        }

        @Override
        public URI[] getCacheArchives() throws IOException {
            return mapContext.getCacheArchives();
        }

        @Override
        public URI[] getCacheFiles() throws IOException {
            return mapContext.getCacheArchives();
        }

        @Override
        public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass() throws ClassNotFoundException {
            return mapContext.getCombinerClass();
        }

        @Override
        public Configuration getConfiguration() {
            return mapContext.getConfiguration();
        }

        @Override
        public Path[] getFileClassPaths() {
            return mapContext.getFileClassPaths();
        }

        @Override
        public String[] getFileTimestamps() {
            return mapContext.getFileTimestamps();
        }

        @Override
        public RawComparator<?> getGroupingComparator() {
            return mapContext.getGroupingComparator();
        }

        @Override
        public Class<? extends InputFormat<?, ?>> getInputFormatClass() throws ClassNotFoundException {
            return mapContext.getInputFormatClass();
        }

        @Override
        public String getJar() {
            return mapContext.getJar();
        }

        @Override
        public JobID getJobID() {
            return mapContext.getJobID();
        }

        @Override
        public String getJobName() {
            return mapContext.getJobName();
        }

        /*@Override
        public boolean userClassesTakesPrecedence() {
          return mapContext.userClassesTakesPrecedence();
        }*/

        @Override
        public boolean getJobSetupCleanupNeeded() {
            return mapContext.getJobSetupCleanupNeeded();
        }

        @Override
        public boolean getTaskCleanupNeeded() {
            return mapContext.getTaskCleanupNeeded();
        }

        @Override
        public Path[] getLocalCacheArchives() throws IOException {
            return mapContext.getLocalCacheArchives();
        }

        @Override
        public Path[] getLocalCacheFiles() throws IOException {
            return mapContext.getLocalCacheFiles();
        }

        @Override
        public Class<?> getMapOutputKeyClass() {
            return mapContext.getMapOutputKeyClass();
        }

        @Override
        public Class<?> getMapOutputValueClass() {
            return mapContext.getMapOutputValueClass();
        }

        @Override
        public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass() throws ClassNotFoundException {
            return mapContext.getMapperClass();
        }

        @Override
        public int getMaxMapAttempts() {
            return mapContext.getMaxMapAttempts();
        }

        @Override
        public int getMaxReduceAttempts() {
            return mapContext.getMaxReduceAttempts();
        }

        @Override
        public int getNumReduceTasks() {
            return mapContext.getNumReduceTasks();
        }

        @Override
        public Class<? extends OutputFormat<?, ?>> getOutputFormatClass() throws ClassNotFoundException {
            return mapContext.getOutputFormatClass();
        }

        @Override
        public Class<?> getOutputKeyClass() {
            return mapContext.getOutputKeyClass();
        }

        @Override
        public Class<?> getOutputValueClass() {
            return mapContext.getOutputValueClass();
        }

        @Override
        public Class<? extends Partitioner<?, ?>> getPartitionerClass() throws ClassNotFoundException {
            return mapContext.getPartitionerClass();
        }

        @Override
        public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass() throws ClassNotFoundException {
            return mapContext.getReducerClass();
        }

        @Override
        public RawComparator<?> getSortComparator() {
            return mapContext.getSortComparator();
        }

        @Override
        public boolean getSymlink() {
            return mapContext.getSymlink();
        }

        @Override
        public Path getWorkingDirectory() throws IOException {
            return mapContext.getWorkingDirectory();
        }

        @Override
        public void progress() {
            mapContext.progress();
        }

        @Override
        public boolean getProfileEnabled() {
            return mapContext.getProfileEnabled();
        }

        @Override
        public String getProfileParams() {
            return mapContext.getProfileParams();
        }

        @Override
        public IntegerRanges getProfileTaskRange(boolean isMap) {
            return mapContext.getProfileTaskRange(isMap);
        }

        @Override
        public String getUser() {
            return mapContext.getUser();
        }

        @Override
        public Credentials getCredentials() {
            return mapContext.getCredentials();
        }

        @Override
        public float getProgress() {
            return mapContext.getProgress();
        }
    };

    mapper.run(con);

    // Flush and close record writers.
    rw.close(context);

    table = new QueryLogic();
    table.setMetadataTableName(METADATA_TABLE_NAME);
    table.setTableName(TABLE_NAME);
    table.setIndexTableName(INDEX_TABLE_NAME);
    table.setReverseIndexTableName(RINDEX_TABLE_NAME);
    table.setUseReadAheadIterator(false);
    table.setUnevaluatedFields(Collections.singletonList("TEXT"));
}

From source file:org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.MapReduceJobHistoryUpdater.java

License:Apache License

private void processTaskAttemptFinishedEvent(PreparedStatement entityPS, LoggingEvent logEvent,
        TaskAttemptFinishedEvent historyEvent) {

    if (historyEvent.getTaskType() == TaskType.MAP || historyEvent.getTaskType() == TaskType.REDUCE) {
        LOG.debug("Ignoring TaskAttemptFinishedEvent for " + historyEvent.getTaskType());
        return;//  www .  j av  a  2s. c  om
    }

    try {
        entityPS.setString(1, historyEvent.getTaskId().getJobID().toString());
        entityPS.setString(2, historyEvent.getTaskId().toString());
        entityPS.setString(3, historyEvent.getTaskType().toString());
        entityPS.setLong(4, historyEvent.getFinishTime());
        entityPS.setString(5, historyEvent.getTaskStatus());
        entityPS.setString(6, historyEvent.getHostname());
        entityPS.setString(7, historyEvent.getAttemptId().toString());
        entityPS.executeUpdate();
    } catch (SQLException sqle) {
        LOG.info("Failed to store " + historyEvent.getEventType() + " for taskAttempt "
                + historyEvent.getAttemptId() + " into " + TASKATTEMPT_TABLE, sqle);
    }
}

From source file:org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.MapReduceJobHistoryUpdater.java

License:Apache License

private void processMapAttemptFinishedEvent(PreparedStatement entityPS, LoggingEvent logEvent,
        MapAttemptFinishedEvent historyEvent) {

    if (historyEvent.getTaskType() != TaskType.MAP) {
        LOG.debug("Ignoring MapAttemptFinishedEvent for " + historyEvent.getTaskType());
        return;//from   w  w w  . j  ava 2  s  .c  om
    }

    long[] ioBytes = getInputOutputBytes(historyEvent.getCounters());

    try {
        entityPS.setString(1, historyEvent.getTaskId().getJobID().toString());
        entityPS.setString(2, historyEvent.getTaskId().toString());
        entityPS.setString(3, historyEvent.getTaskType().toString());
        entityPS.setLong(4, historyEvent.getMapFinishTime());
        entityPS.setLong(5, historyEvent.getFinishTime());
        entityPS.setLong(6, ioBytes[0]);
        entityPS.setLong(7, ioBytes[1]);
        entityPS.setString(8, historyEvent.getTaskStatus());
        entityPS.setString(9, historyEvent.getHostname());
        entityPS.setString(10, historyEvent.getAttemptId().toString());
        entityPS.executeUpdate();
    } catch (SQLException sqle) {
        LOG.info("Failed to store " + historyEvent.getEventType() + " for taskAttempt "
                + historyEvent.getAttemptId() + " into " + TASKATTEMPT_TABLE, sqle);
    }
}