List of usage examples for org.apache.hadoop.io NullWritable get
public static NullWritable get()
From source file:com.ibm.jaql.io.hadoop.converter.ToJsonTextConverter.java
License:Apache License
@Override public WritableComparable<?> createKeyTarget() { return NullWritable.get(); }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestJsonNodeWritableUtils.java
License:Apache License
@Test public void test_transform() { final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty()); new JsonNodeWritableUtils(); //coverage! assertEquals(NullNode.instance, JsonNodeWritableUtils.transform("banana", JsonNodeFactory.instance)); assertEquals(null, JsonNodeWritableUtils.transform(null, JsonNodeFactory.instance)); assertEquals(NullNode.instance,// w w w. j a va 2 s . c o m JsonNodeWritableUtils.transform(NullWritable.get(), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(true, JsonNode.class), JsonNodeWritableUtils.transform(new BooleanWritable(true), JsonNodeFactory.instance)); assertEquals(mapper.convertValue("test", JsonNode.class), JsonNodeWritableUtils.transform(new Text("test"), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(new byte[] { (byte) 0xFF }, JsonNode.class), JsonNodeWritableUtils.transform(new ByteWritable((byte) 0xFF), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4, JsonNode.class), JsonNodeWritableUtils.transform(new IntWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4, JsonNode.class), JsonNodeWritableUtils.transform(new VIntWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4L, JsonNode.class), JsonNodeWritableUtils.transform(new LongWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4L, JsonNode.class), JsonNodeWritableUtils.transform(new VLongWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(new byte[] { (byte) 0xFF, (byte) 0xFE }, JsonNode.class), JsonNodeWritableUtils.transform(new BytesWritable(new byte[] { (byte) 0xFF, (byte) 0xFE }), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4.0, JsonNode.class), JsonNodeWritableUtils.transform(new DoubleWritable(4), JsonNodeFactory.instance)); //(had real trouble creating a float node!) assertEquals(JsonNodeFactory.instance.numberNode(Float.valueOf((float) 4.0)), JsonNodeWritableUtils.transform(new FloatWritable(4), JsonNodeFactory.instance)); // will test object writable and array writable below }
From source file:com.inmobi.conduit.distcp.tools.mapred.CopyMapper.java
License:Apache License
/** * Implementation of the Mapper<>::map(). Does the copy. * @param relPath: The target path.//from w w w.ja v a 2 s. c om * @param sourceFileStatus: The source path. * @throws IOException */ @Override public void map(Text relPath, FileStatus sourceFileStatus, Context context) throws IOException, InterruptedException { Path sourcePath = sourceFileStatus.getPath(); Map<Long, Long> received = null; if (context.getConfiguration().getBoolean(ConduitConstants.AUDIT_ENABLED_KEY, true)) { received = new HashMap<Long, Long>(); } if (LOG.isDebugEnabled()) LOG.debug("DistCpMapper::map(): Received " + sourcePath + ", " + relPath); Path target = new Path(targetWorkPath.makeQualified(targetFS) + relPath.toString()); EnumSet<DistCpOptions.FileAttribute> fileAttributes = getFileAttributeSettings(context); final String description = "Copying " + sourcePath + " to " + target; context.setStatus(description); LOG.info(description); try { FileStatus sourceCurrStatus; FileSystem sourceFS; try { sourceFS = sourcePath.getFileSystem(conf); sourceCurrStatus = sourceFS.getFileStatus(sourcePath); } catch (FileNotFoundException e) { throw new IOException(new RetriableFileCopyCommand.CopyReadException(e)); } FileStatus targetStatus = null; try { targetStatus = targetFS.getFileStatus(target); } catch (FileNotFoundException ignore) { } if (targetStatus != null && (targetStatus.isDir() != sourceCurrStatus.isDir())) { throw new IOException("Can't replace " + target + ". Target is " + getFileType(targetStatus) + ", Source is " + getFileType(sourceCurrStatus)); } if (sourceCurrStatus.isDir()) { createTargetDirsWithRetry(description, target, context); return; } if (skipFile(sourceFS, sourceCurrStatus, target)) { LOG.info("Skipping copy of " + sourceCurrStatus.getPath() + " to " + target); updateSkipCounters(context, sourceCurrStatus); } else { String streamName = null; if (!relPath.toString().isEmpty()) { Path relativePath = new Path(relPath.toString()); if (relativePath.depth() > 2) { // path is for mirror service and is of format // /conduit/streams/<streamName>/2013/09/12 Path tmpPath = relativePath; while (tmpPath.getParent() != null && !tmpPath.getParent().getName().equals("streams")) { tmpPath = tmpPath.getParent(); } streamName = tmpPath.getName(); } else { // path is for merge service and of form /<stream name>/filename.gz streamName = relativePath.getParent().getName(); } } copyFileWithRetry(description, sourceCurrStatus, target, context, fileAttributes, received); // generate audit counters if (received != null) { for (Entry<Long, Long> entry : received.entrySet()) { String counterNameValue = getCounterNameValue(streamName, sourcePath.getName(), entry.getKey(), entry.getValue()); context.write(NullWritable.get(), new Text(counterNameValue)); } } } DistCpUtils.preserve(target.getFileSystem(conf), target, sourceCurrStatus, fileAttributes); } catch (IOException exception) { handleFailures(exception, sourceFileStatus, target, context); } }
From source file:com.inmobi.conduit.local.CopyMapper.java
License:Apache License
@Override public void map(Text key, FileStatus value, Context context) throws IOException, InterruptedException { Path src = value.getPath();/*from www . ja v a2s. c o m*/ String dest = key.toString(); String collector = src.getParent().getName(); String category = src.getParent().getParent().getName(); Map<Long, Long> received = null; if (context.getConfiguration().getBoolean(ConduitConstants.AUDIT_ENABLED_KEY, true)) { received = new HashMap<Long, Long>(); } Configuration srcConf = new Configuration(); srcConf.set(FS_DEFAULT_NAME_KEY, context.getConfiguration().get(SRC_FS_DEFAULT_NAME_KEY)); FileSystem fs = FileSystem.get(srcConf); Path target = getTempPath(context, src, category, collector); if (FileUtil.gzip(src, target, srcConf, received)) { LOG.info("File " + src + " is empty hence returning without compressing"); return; } // move to final destination fs.mkdirs(new Path(dest).makeQualified(fs)); String destnFilename = collector + "-" + src.getName() + ".gz"; Path destPath = new Path(dest + File.separator + destnFilename); LOG.info("Renaming file " + target + " to " + destPath); fs.rename(target, destPath); if (received != null) { for (Entry<Long, Long> entry : received.entrySet()) { String counterNameValue = getCounterNameValue(category, destnFilename, entry.getKey(), entry.getValue()); context.write(NullWritable.get(), new Text(counterNameValue)); } } }
From source file:com.intropro.prairie.format.seq.SequenceFormatWriter.java
License:Apache License
@Override public void write(String line) throws IOException { if (writer == null) { tmpFile = File.createTempFile("seq-", ".dat"); writer = SequenceFile.createWriter(new Configuration(), Writer.file(new Path(tmpFile.toURI())), Writer.keyClass(NullWritable.class), Writer.valueClass(Text.class)); }// w w w.ja v a 2 s. c o m text.set(line); writer.append(NullWritable.get(), text); }
From source file:com.jfolson.hive.serde.RTypedBytesWritableInput.java
License:Apache License
public Writable read(RType type, Writable w) throws IOException { // can't use switch because not final?? if (type.code == BYTES.code) { return readBytes((BytesWritable) w); } else if (type.code == BYTE.code) { return readByte((ByteWritable) w); } else if (type.code == BOOL.code) { return readBoolean((BooleanWritable) w); } else if (type.code == INT.code) { return readInt((IntWritable) w); } else if (type.code == SHORT.code) { return readShort((ShortWritable) w); } else if (type.code == LONG.code) { return readLong((LongWritable) w); } else if (type.code == FLOAT.code) { return readFloat((FloatWritable) w); } else if (type.code == DOUBLE.code) { return readDouble((DoubleWritable) w); } else if (type.code == STRING.code) { return readText((Text) w); } else if (type.code == VECTOR.code) { return readVector((ArrayWritable) w); } else if (type.code == MAP.code) { return readMap((MapWritable) w); } else if (type.code == WRITABLE.code) { return readWritable(w); } else if (type.code == ENDOFRECORD.code) { return null; } else if (type.code == NULL.code) { return NullWritable.get(); } else if (type.code >= 50 && type.code <= 200) { byte[] bytes = this.in.readRaw(type.code); bytes[0] = (byte) type.code; ((TypedBytesWritable) w).set(bytes, 0, bytes.length); return w; } else {//from w w w .j a v a2s .c o m throw new RuntimeException("unknown type"); } }
From source file:com.justgiving.raven.kissmetrics.jsonenricher.KissmetricsJsonToEnrichedJsonReducer.java
License:Open Source License
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { //int total = 0; for (Text value : values) { // total += value.get(); context.write(value, NullWritable.get()); }/* w w w . j a v a 2 s .c o m*/ }
From source file:com.justgiving.raven.kissmetrics.jsonenricher.KissmetricsJsonToEnrichedJsonReducerTest.java
License:Open Source License
@Test public void reducer_keyAndjson_json() throws IOException { List<Text> values = new ArrayList<Text>(); values.add(new Text("{\"_n\":\"viewed signup\",\"_p\":\"bob@bob.com\",\"_t\":1397577453}")); //values.add(new IntWritable(1)); //mapDriver.withInput(new LongWritable(1), new Text("{\"_n\":\"viewed signup\",\"_p\":\"bob@bob.com\",\"_t\":1397577453}")); reduceDriver.withInput(new Text("1397577453"), values); NullWritable nullKey = NullWritable.get(); reduceDriver.withOutput(new Text("{\"_n\":\"viewed signup\",\"_p\":\"bob@bob.com\",\"_t\":1397577453}"), nullKey);//, NullWritable); reduceDriver.runTest(true);// w w w . j a va2s.co m }
From source file:com.kylinolap.job.hadoop.invertedindex.RandomKeyDistributionMapper.java
License:Apache License
@Override protected void cleanup(Context context) throws IOException, InterruptedException { RandomSampler<KEY> sampler = new RandomSampler<KEY>(); List<KEY> sampleResult = sampler.sample(allKeys, sampleNumber); for (KEY k : sampleResult) { context.write(k, NullWritable.get()); }//ww w. j a v a2 s . c om }
From source file:com.kylinolap.job.hadoop.invertedindex.RandomKeyDistributionReducer.java
License:Apache License
@Override protected void cleanup(Context context) throws IOException, InterruptedException { int stepLength = allSplits.size() / regionNumber; for (int i = stepLength; i < allSplits.size(); i += stepLength) { context.write(allSplits.get(i), NullWritable.get()); }// www .ja v a2 s . co m }