List of usage examples for org.apache.hadoop.io MapWritable MapWritable
public MapWritable()
From source file:org.apache.sqoop.mapreduce.odps.HdfsOdpsImportJob.java
License:Apache License
private void configureGenericRecordExportInputFormat(Job job, String tableName) throws IOException { if (options.getOdpsTable() != null) { MapWritable columnTypes = new MapWritable(); Map<String, OdpsType> colTypeMap = getColTypeMap(); for (Map.Entry<String, OdpsType> e : colTypeMap.entrySet()) { String column = e.getKey(); if (column != null) { Text columnName = new Text(column); Text columnType = new Text(toJavaType(e.getValue())); columnTypes.put(columnName, columnType); }/*w w w . j a v a 2s .c o m*/ } DefaultStringifier.store(job.getConfiguration(), columnTypes, AvroExportMapper.AVRO_COLUMN_TYPES_MAP); return; } ConnManager connManager = context.getConnManager(); Map<String, Integer> columnTypeInts; if (options.getCall() == null) { columnTypeInts = connManager.getColumnTypes(tableName, options.getSqlQuery()); } else { columnTypeInts = connManager.getColumnTypesForProcedure(options.getCall()); } String[] specifiedColumns = options.getColumns(); MapWritable columnTypes = new MapWritable(); for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) { String column = e.getKey(); column = (specifiedColumns == null) ? column : options.getColumnNameCaseInsensitive(column); if (column != null) { Text columnName = new Text(column); Text columnType = new Text(connManager.toJavaType(tableName, column, e.getValue())); columnTypes.put(columnName, columnType); } } DefaultStringifier.store(job.getConfiguration(), columnTypes, AvroExportMapper.AVRO_COLUMN_TYPES_MAP); }
From source file:org.apache.taverna.platform.execution.impl.hadoop.TavernaRecordReader.java
License:Apache License
@Override public MapWritable getCurrentValue() throws IOException, InterruptedException { MapWritable mapWritable = new MapWritable(); mapWritable.put(new Text("tag"), new Text(datalinks.get(recordName))); mapWritable.put(new Text("record"), new Text(files[index].getPath().toString())); return mapWritable; }
From source file:org.commoncrawl.util.CrawlDatum.java
License:Apache License
public CrawlDatum() { metaData = new MapWritable(); }
From source file:org.elasticsearch.hadoop.serialization.FieldExtractorTests.java
License:Apache License
@Test public void testMapWritableFieldExtractorTopLevel() throws Exception { ConstantFieldExtractor cfe = new MapWritableFieldExtractor(); Map<Writable, Writable> m = new MapWritable(); m.put(new Text("key"), new Text("value")); assertEquals(new Text("value"), extract(cfe, "key", m)); }
From source file:org.elasticsearch.hadoop.serialization.FieldExtractorTests.java
License:Apache License
@Test public void testMapWritableFieldExtractorNestedNotFound() throws Exception { ConstantFieldExtractor cfe = new MapWritableFieldExtractor(); Map<Writable, Writable> m = new MapWritable(); assertEquals(FieldExtractor.NOT_FOUND, extract(cfe, "key", m)); }
From source file:org.elasticsearch.hadoop.serialization.FieldExtractorTests.java
License:Apache License
@Test public void testMapFieldExtractorNested() throws Exception { ConstantFieldExtractor cfe = new MapWritableFieldExtractor(); Map<Writable, Writable> m = new MapWritable(); MapWritable nested = new MapWritable(); nested.put(new Text("bar"), new Text("found")); m.put(new Text("foo"), nested); assertEquals(new Text("found"), extract(cfe, "foo.bar", m)); }
From source file:org.elasticsearch.hadoop.serialization.handler.write.impl.HiveSerializationEventConverterTest.java
License:Apache License
@Test public void generateEventHiveRecordLimited() throws Exception { Map<Writable, Writable> map = new MapWritable(); map.put(new Text("one"), new IntWritable(1)); map.put(new Text("two"), new IntWritable(2)); map.put(new Text("three"), new IntWritable(3)); HiveType tuple = new HiveType(map, TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo( TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo))); SerializationEventConverter eventConverter = new SerializationEventConverter(); SerializationFailure iaeFailure = new SerializationFailure(new IllegalArgumentException("garbage"), tuple, new ArrayList<String>()); String rawEvent = eventConverter.getRawEvent(iaeFailure); assertThat(rawEvent, startsWith("HiveType{object=org.apache.hadoop.io.MapWritable@")); String timestamp = eventConverter.getTimestamp(iaeFailure); assertTrue(StringUtils.hasText(timestamp)); assertTrue(DateUtils.parseDate(timestamp).getTime().getTime() > 1L); String exceptionType = eventConverter.renderExceptionType(iaeFailure); assertEquals("illegal_argument_exception", exceptionType); String exceptionMessage = eventConverter.renderExceptionMessage(iaeFailure); assertEquals("garbage", exceptionMessage); String eventMessage = eventConverter.renderEventMessage(iaeFailure); assertEquals("Could not construct bulk entry from record", eventMessage); }
From source file:org.elasticsearch.hadoop.serialization.handler.write.impl.SerializationEventConverterTest.java
License:Apache License
@Test public void generateEventWritable() throws Exception { MapWritable document = new MapWritable(); document.put(new Text("field"), new Text("value")); SerializationEventConverter eventConverter = new SerializationEventConverter(); SerializationFailure iaeFailure = new SerializationFailure(new IllegalArgumentException("garbage"), document, new ArrayList<String>()); String rawEvent = eventConverter.getRawEvent(iaeFailure); assertThat(rawEvent, Matchers.startsWith("org.apache.hadoop.io.MapWritable@")); String timestamp = eventConverter.getTimestamp(iaeFailure); assertTrue(StringUtils.hasText(timestamp)); assertTrue(DateUtils.parseDate(timestamp).getTime().getTime() > 1L); String exceptionType = eventConverter.renderExceptionType(iaeFailure); assertEquals("illegal_argument_exception", exceptionType); String exceptionMessage = eventConverter.renderExceptionMessage(iaeFailure); assertEquals("garbage", exceptionMessage); String eventMessage = eventConverter.renderEventMessage(iaeFailure); assertEquals("Could not construct bulk entry from record", eventMessage); }
From source file:org.elasticsearch.hadoop.util.WritableUtils.java
License:Apache License
@SuppressWarnings({ "unchecked", "rawtypes" }) public static Writable toWritable(Object object) { if (object instanceof Writable) { return (Writable) object; }/*from w w w . j av a2 s.com*/ if (object == null) { return NullWritable.get(); } if (object instanceof String) { return new Text((String) object); } if (object instanceof Long) { return new VLongWritable((Long) object); } if (object instanceof Integer) { return new VIntWritable((Integer) object); } if (object instanceof Byte) { return new ByteWritable((Byte) object); } if (object instanceof Short) { return WritableCompatUtil.availableShortWritable((Short) object); } if (object instanceof Double) { return new DoubleWritable((Double) object); } if (object instanceof Float) { return new FloatWritable((Float) object); } if (object instanceof Boolean) { return new BooleanWritable((Boolean) object); } if (object instanceof byte[]) { return new BytesWritable((byte[]) object); } if (object instanceof List) { List<Object> list = (List<Object>) object; if (!list.isEmpty()) { Object first = list.get(0); Writable[] content = new Writable[list.size()]; for (int i = 0; i < content.length; i++) { content[i] = toWritable(list.get(i)); } return new ArrayWritable(toWritable(first).getClass(), content); } return new ArrayWritable(NullWritable.class, new Writable[0]); } if (object instanceof SortedSet) { SortedMapWritable smap = new SortedMapWritable(); SortedSet<Object> set = (SortedSet) object; for (Object obj : set) { smap.put((WritableComparable) toWritable(obj), NullWritable.get()); } return smap; } if (object instanceof Set) { MapWritable map = new MapWritable(); Set<Object> set = (Set) object; for (Object obj : set) { map.put(toWritable(obj), NullWritable.get()); } return map; } if (object instanceof SortedMap) { SortedMapWritable smap = new SortedMapWritable(); Map<Object, Object> map = (Map) object; for (Map.Entry<?, ?> entry : map.entrySet()) { smap.put((WritableComparable) toWritable(entry.getKey()), toWritable(entry.getValue())); } return smap; } if (object instanceof Map) { MapWritable result = new MapWritable(); Map<Object, Object> map = (Map) object; for (Map.Entry<?, ?> entry : map.entrySet()) { result.put(toWritable(entry.getKey()), toWritable(entry.getValue())); } return result; } // fall-back to bytearray return new BytesWritable(object.toString().getBytes(StringUtils.UTF_8)); }
From source file:org.freeeed.main.FileProcessor.java
License:Apache License
/** * Create a map/* w w w. j a v a 2s. c o m*/ * * @param metadata Hadoop metadata to insert into map * @return Created map * @throws IOException */ private MapWritable createMapWritable(Metadata metadata, DiscoveryFile discoveryFile) throws IOException { String fileName = discoveryFile.getPath().getPath(); MapWritable mapWritable = new MapWritable(); String[] names = metadata.names(); for (String name : names) { mapWritable.put(new Text(name), new Text(metadata.get(name))); } byte[] bytes = Util.getFileContent(fileName); mapWritable.put(new Text(ParameterProcessing.NATIVE), new BytesWritable(bytes)); if (isPdf()) { String pdfFileName = fileName + ".pdf"; if (new File(pdfFileName).exists()) { byte[] pdfBytes = Util.getFileContent(pdfFileName); mapWritable.put(new Text(ParameterProcessing.NATIVE_AS_PDF), new BytesWritable(pdfBytes)); } } createMapWritableForHtml(mapWritable); return mapWritable; }