List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration
public Configuration getConfiguration();
From source file:com.knewton.mapreduce.SSTableRecordReaderTest.java
License:Apache License
/** * Test to make sure that initialization fails when an invalid partitioner is set. *//*from w ww.j a v a2 s. c om*/ @Test(expected = IllegalArgumentException.class) public void testInitializeInvalidPartitioner() throws Exception { TaskAttemptContext context = getTaskAttemptContext(true, true, true); context.getConfiguration().set(PropertyConstants.PARTITIONER.txt, "invalidPartitioner"); ssTableColumnRecordReader.initialize(inputSplit, context); }
From source file:com.knewton.mapreduce.SSTableRecordReaderTest.java
License:Apache License
/** * Test to make sure that initialization fails when an invalid comparator is set. *///from w w w. j av a2 s .c o m @Test(expected = IllegalArgumentException.class) public void testInitializeInvalidComparator() throws Exception { TaskAttemptContext context = getTaskAttemptContext(true, true, true); context.getConfiguration().set(PropertyConstants.COLUMN_COMPARATOR.txt, "invalidComparator"); ssTableColumnRecordReader.initialize(inputSplit, context); }
From source file:com.knewton.mapreduce.SSTableRecordReaderTest.java
License:Apache License
/** * Test to make sure that initialization fails when an invalid subcomparator is set. *///from w w w . j a va2 s. c om @Test(expected = IllegalArgumentException.class) public void testInitializeInvalidSubComparator() throws Exception { TaskAttemptContext context = getTaskAttemptContext(true, true, true); context.getConfiguration().set(PropertyConstants.COLUMN_SUBCOMPARATOR.txt, "invalidSubComparator"); ssTableColumnRecordReader.initialize(inputSplit, context); }
From source file:com.knewton.mrtool.io.JsonRecordReader.java
License:Apache License
/** * Should be called before any key or value is read to setup any initialization actions. If you * want to inject the name of the file to the json record set the * {@link JsonRecordReader.APPEND_FILENAME_TO_JSON} property to true. *///w w w . jav a 2s . c o m @Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); FileSplit fileSplit = (FileSplit) split; start = fileSplit.getStart(); end = start + fileSplit.getLength(); compressionCodecs = new CompressionCodecFactory(conf); in = initLineReader(fileSplit, conf); pos = start; }
From source file:com.lightboxtechnologies.spectrum.FsEntryHBaseOutputFormat.java
License:Apache License
public static HTable getHTable(TaskAttemptContext ctx, byte[] colFam) throws IOException { final Configuration conf = HBaseConfiguration.create(ctx.getConfiguration()); LOG.info("hbase.zookeeper.quorum:" + conf.get("hbase.zookeeper.quorum")); final String tblName = conf.get(HBaseTables.ENTRIES_TBL_VAR, HBaseTables.ENTRIES_TBL); return HBaseTables.summon(conf, tblName.getBytes(), HBaseTables.ENTRIES_COLFAM_B); }
From source file:com.linkedin.camus.etl.kafka.common.StringKafkaRecordWriterProvider.java
@Override public RecordWriter<IEtlKey, CamusWrapper> getDataRecordWriter(final TaskAttemptContext context, final String fileName, CamusWrapper data, FileOutputCommitter committer) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); Path file = committer.getWorkPath(); file = new Path(file, EtlMultiOutputFormat.getUniqueFile(context, fileName, getFilenameExtension())); CompressionCodec codec = null;/*from w ww .j a v a 2s . c om*/ SequenceFile.CompressionType compressionType = SequenceFile.CompressionType.NONE; final SequenceFile.Writer out = SequenceFile.createWriter(conf, SequenceFile.Writer.file(file), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class), SequenceFile.Writer.compression(compressionType, codec), SequenceFile.Writer.progressable(context)); return new RecordWriter<IEtlKey, CamusWrapper>() { @Override public void write(IEtlKey iEtlKey, CamusWrapper camusWrapper) throws IOException { String record = (String) camusWrapper.getRecord() + recordDelimiter; out.append(new Text(String.valueOf(iEtlKey.getOffset())), new Text(record.getBytes())); } @Override public void close(TaskAttemptContext taskAttemptContext) throws IOException { out.close(); } }; }
From source file:com.linkedin.cubert.io.avro.PigAvroInputFormatAdaptor.java
License:Open Source License
@Override public RecordReader<NullWritable, Writable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { return getDelegate(context.getConfiguration()).createRecordReader(split, context); }
From source file:com.linkedin.cubert.io.avro.PigAvroOutputFormatAdaptor.java
License:Open Source License
@Override public RecordWriter<NullWritable, Object> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { return getDelegate(context.getConfiguration()).getRecordWriter(context); }
From source file:com.linkedin.cubert.io.CubertInputFormat.java
License:Open Source License
@Override public RecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); ConfigurationDiff confDiff = new ConfigurationDiff(conf); MultiMapperSplit mmSplit = (MultiMapperSplit) split; int multiMapperIndex = mmSplit.getMultiMapperIndex(); confDiff.applyDiff(multiMapperIndex); // reset the conf to multiMapperIndex InputSplit actualSplit = mmSplit.getActualSplit(); // get the actual input format class InputFormat<K, V> actualInputFormat = getActualInputFormat(context); RecordReader<K, V> reader = null; if (actualSplit instanceof CombineFileSplit) { reader = new CombinedFileRecordReader<K, V>(actualInputFormat, (CombineFileSplit) actualSplit, context); } else {//from w w w. j ava 2 s.co m reader = actualInputFormat.createRecordReader(actualSplit, context); } // confDiff.undoDiff(multiMapperIndex); return new MultiMapperRecordReader<K, V>(reader); }
From source file:com.linkedin.cubert.io.MultiMapperInputFormat.java
License:Open Source License
@Override public RecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { MultiMapperSplit mmSplit = (MultiMapperSplit) split; int multiMapperIndex = mmSplit.getMultiMapperIndex(); return getDelegate(context.getConfiguration(), multiMapperIndex) .createRecordReader(mmSplit.getActualSplit(), context); }