List of usage examples for org.apache.hadoop.io MapWritable get
@Override
public Writable get(Object key)
From source file:com.shmsoft.dmass.main.Reduce.java
License:Apache License
private Metadata getAllMetadata(MapWritable map) { Metadata metadata = new Metadata(); Set<Writable> set = map.keySet(); Iterator<Writable> iter = set.iterator(); while (iter.hasNext()) { String name = iter.next().toString(); if (!ParameterProcessing.NATIVE.equals(name) && !ParameterProcessing.NATIVE_AS_PDF.equals(name)) { // all metadata but native - which is bytes! Text value = (Text) map.get(new Text(name)); metadata.set(name, value.toString()); }/*from w w w. ja va 2 s.com*/ } return metadata; }
From source file:crunch.MaxTemperature.java
License:Apache License
@Test public void mapWritable() throws IOException { // vv MapWritableTest MapWritable src = new MapWritable(); src.put(new IntWritable(1), new Text("cat")); src.put(new VIntWritable(2), new LongWritable(163)); MapWritable dest = new MapWritable(); WritableUtils.cloneInto(dest, src); assertThat((Text) dest.get(new IntWritable(1)), is(new Text("cat"))); assertThat((LongWritable) dest.get(new VIntWritable(2)), is(new LongWritable(163))); // ^^ MapWritableTest }//w w w . j a va 2s. c om
From source file:edu.ub.ahstfg.io.WritableConverter.java
License:Open Source License
/** * Converts MapWritable to HashMap<String, LinkedList<Long>>. * @param input MapWritable to convert./*w w w . ja va 2s.c o m*/ * @return Converted HashMap. */ public static HashMap<String, LinkedList<Short>> mapWritable2HashMapStringLinkedListShort(MapWritable input) { HashMap<String, LinkedList<Short>> ret = new HashMap<String, LinkedList<Short>>(); Text t; ArrayWritable aw; LinkedList<Short> al; for (Writable w : input.keySet()) { t = (Text) w; aw = (ArrayWritable) input.get(t); al = arrayWritable2LinkedListShort(aw); ret.put(t.toString(), al); } return ret; }
From source file:full_MapReduce.AttributeInfoReducer.java
License:Open Source License
public void reduce(Text key, Iterable<AttributeCounterWritable> values, Context context) throws IOException, InterruptedException { MapWritable res = new MapWritable(); Text value;/*from ww w.jav a 2 s . c om*/ Text classification; IntWritable count; for (AttributeCounterWritable cur_attribute_counter : values) { value = cur_attribute_counter.getValue(); classification = cur_attribute_counter.getClassification(); count = cur_attribute_counter.getCount(); if (!res.containsKey(value)) { res.put(new Text(value), new MapWritable()); } MapWritable cur_map = (MapWritable) res.get(value); if (!cur_map.containsKey(classification)) { cur_map.put(new Text(classification), new IntWritable(0)); } ((IntWritable) cur_map.get(classification)) .set(((IntWritable) cur_map.get(classification)).get() + count.get()); } context.write(key, res); }
From source file:full_MapReduce.FindBestAttributeMapper.java
License:Open Source License
private TextArrayWritable getValues(MapWritable value) { TextArrayWritable res = new TextArrayWritable(); Text[] tmp_res = new Text[value.keySet().size()]; int index = 0; for (Writable w1 : value.keySet()) { MapWritable mw = (MapWritable) value.get(w1); int nb_class = mw.size(); Text prefered_class = new Text(); IntWritable best_count = new IntWritable(Integer.MIN_VALUE); for (Writable w2 : mw.keySet()) { if (((IntWritable) mw.get(w2)).compareTo(best_count) > 0) { best_count = (IntWritable) mw.get(w2); prefered_class.set((Text) w2); }// w ww . jav a2s. co m } tmp_res[index++] = new Text(((Text) w1).toString() + " " + nb_class + " " + prefered_class.toString()); } res.set(tmp_res); return res; }
From source file:full_MapReduce.FindBestAttributeMapper.java
License:Open Source License
private Map<Text, Integer> getTuplePerSplit(MapWritable data) { Map<Text, Integer> res = new HashMap<Text, Integer>(); Text my_text_key;/*from www. ja va2s . c o m*/ int nb_tuple; for (Writable my_key : data.keySet()) { my_text_key = (Text) my_key; nb_tuple = 0; for (Writable my_value : ((MapWritable) data.get(my_key)).values()) { nb_tuple += ((IntWritable) my_value).get(); } res.put(new Text(my_text_key), nb_tuple); } return res; }
From source file:full_MapReduce.FindBestAttributeMapper.java
License:Open Source License
private double global_entropy(MapWritable data, int tot_tuple) { double res = 0.0; Map<Text, Integer> count_per_class = new HashMap<Text, Integer>(); for (Writable tmp_cur_map : data.values()) { MapWritable cur_map = (MapWritable) tmp_cur_map; for (Writable cur_key : cur_map.keySet()) { Text cur_key_text = (Text) cur_key; if (!count_per_class.containsKey(cur_key_text)) { count_per_class.put(new Text(cur_key_text), 0); }/*from ww w . j a va 2s. c o m*/ count_per_class.put(cur_key_text, ((IntWritable) cur_map.get(cur_key)).get() + count_per_class.get(cur_key_text)); } } double p; for (Integer i : count_per_class.values()) { p = (i * 1.0) / tot_tuple; res -= p * Math.log(p) / Math.log(2); } return res; }
From source file:full_MapReduce.FindBestAttributeMapper.java
License:Open Source License
private double gain(double global_entropy, Map<Text, Integer> tuple_per_split, MapWritable data, int tot_tuple) { double sum_partial_entropy = 0; Text my_text_key;//from ww w . ja v a 2 s .c om int nb_tuple; double uniform_ratio; double p; for (Writable my_key : data.keySet()) { my_text_key = (Text) my_key; nb_tuple = tuple_per_split.get(my_text_key); uniform_ratio = (nb_tuple * 1.0) / tot_tuple; for (Writable my_count : ((MapWritable) data.get(my_key)).values()) { p = (((IntWritable) my_count).get() * 1.0) / nb_tuple; sum_partial_entropy -= uniform_ratio * p * Math.log(p) / Math.log(2); } } return global_entropy - sum_partial_entropy; }
From source file:gaffer.accumulostore.AccumuloStoreBackedGraphFactory.java
License:Apache License
/** * Creates a new {@link gaffer.accumulostore.AccumuloStore} from a * properties file only, provided the table name specified in that * properties file already exists//from w ww. jav a 2 s . c o m * * @param propertiesFileLocation the properties file location * @return A new Instance of the AccumuloStore * @throws StoreException if any issues occur creating a Graph backed by an Accumulo Store. */ public static Graph getGraph(final Path propertiesFileLocation) throws StoreException { final AccumuloProperties props = new AccumuloProperties(propertiesFileLocation); final MapWritable map = TableUtils.getStoreConstructorInfo(props); final DataSchema dataSchema = DataSchema .fromJson(((BytesWritable) map.get(AccumuloStoreConstants.DATA_SCHEMA_KEY)).getBytes()); final StoreSchema storeSchema = StoreSchema .fromJson(((BytesWritable) map.get(AccumuloStoreConstants.STORE_SCHEMA_KEY)).getBytes()); final String keyPackageClass; try { keyPackageClass = new String( ((BytesWritable) map.get(AccumuloStoreConstants.KEY_PACKAGE_KEY)).getBytes(), AccumuloStoreConstants.UTF_8_CHARSET); } catch (final UnsupportedEncodingException e) { throw new StoreException(e.getMessage(), e); } if (!props.getKeyPackageClass().equals(keyPackageClass)) { LOGGER.warn("Key package class " + props.getKeyPackageClass() + " will be overridden by cached class " + keyPackageClass); props.setKeyPackageClass(keyPackageClass); } return new Graph(dataSchema, storeSchema, props); }
From source file:gaffer.accumulostore.key.core.AbstractCoreKeyAccumuloElementConverter.java
License:Apache License
@Override public Properties getPropertiesFromValue(final String group, final Value value) throws AccumuloElementConversionException { final Properties properties = new Properties(); if (value == null || value.getSize() == 0) { return properties; }/*from w w w.j a va 2 s. com*/ final MapWritable map = new MapWritable(); try (final InputStream inStream = new ByteArrayInputStream(value.get()); final DataInputStream dataStream = new DataInputStream(inStream)) { map.readFields(dataStream); } catch (final IOException e) { throw new AccumuloElementConversionException("Failed to read map writable from value", e); } final StoreElementDefinition elementDefinition = storeSchema.getElement(group); if (null == elementDefinition) { throw new AccumuloElementConversionException("No StoreElementDefinition found for group " + group + " is this group in your Store Schema or do your table iterators need updating?"); } for (final Writable writeableKey : map.keySet()) { final String propertyName = writeableKey.toString(); final BytesWritable propertyValueBytes = (BytesWritable) map.get(writeableKey); try { properties.put(propertyName, elementDefinition.getProperty(propertyName).getSerialiser() .deserialise(propertyValueBytes.getBytes())); } catch (final SerialisationException e) { throw new AccumuloElementConversionException("Failed to deserialise property " + propertyName, e); } } return properties; }