List of usage examples for org.apache.hadoop.io MapWritable size
@Override public int size()
From source file:com.jfolson.hive.serde.RTypedBytesWritableOutput.java
License:Apache License
public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey());/* w w w . j av a 2 s . co m*/ write(entry.getValue()); } }
From source file:full_MapReduce.FindBestAttributeMapper.java
License:Open Source License
private TextArrayWritable getValues(MapWritable value) { TextArrayWritable res = new TextArrayWritable(); Text[] tmp_res = new Text[value.keySet().size()]; int index = 0; for (Writable w1 : value.keySet()) { MapWritable mw = (MapWritable) value.get(w1); int nb_class = mw.size(); Text prefered_class = new Text(); IntWritable best_count = new IntWritable(Integer.MIN_VALUE); for (Writable w2 : mw.keySet()) { if (((IntWritable) mw.get(w2)).compareTo(best_count) > 0) { best_count = (IntWritable) mw.get(w2); prefered_class.set((Text) w2); }//from w w w .j a v a2s . c om } tmp_res[index++] = new Text(((Text) w1).toString() + " " + nb_class + " " + prefered_class.toString()); } res.set(tmp_res); return res; }
From source file:org.apache.mahout.classifier.sequencelearning.baumwelchmapreduce.BaumWelchMapper.java
License:Apache License
@Override public void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration config = context.getConfiguration(); nrOfHiddenStates = Integer.parseInt(config.get(BaumWelchConfigKeys.NUMBER_OF_HIDDEN_STATES_KEY)); nrOfEmittedStates = Integer.parseInt(config.get(BaumWelchConfigKeys.NUMBER_OF_EMITTED_STATES_KEY)); MapWritable hiddenStatesWritableMap = MapWritableCache.load(config, new Path(config.get(BaumWelchConfigKeys.HIDDEN_STATES_MAP_PATH))); log.info("Mapper Setup hiddenStatesWritableMap loaded. Number of entries = {}", hiddenStatesWritableMap.size()); MapWritable emittedStatesWritableMap = MapWritableCache.load(config, new Path(config.get(BaumWelchConfigKeys.EMITTED_STATES_MAP_PATH))); log.info("Mapper Setup emittedStatesWritableMap loaded. Number of entries = {}", emittedStatesWritableMap.size()); //HashMap hiddenStatesMap = new HashMap(); //HashMap emittedStatesMap = new HashMap(); String[] hiddenStatesArray = new String[hiddenStatesWritableMap.size()]; String[] emittedStatesArray = new String[emittedStatesWritableMap.size()]; int k = 0;/*from w w w. j a v a 2 s. c o m*/ int l = 0; for (MapWritable.Entry<Writable, Writable> entry : hiddenStatesWritableMap.entrySet()) { log.info("Mapper Setup hiddenStateMap adding pair ({} ,{})", ((Text) (entry.getKey())).toString(), ((IntWritable) (entry.getValue())).get()); //hiddenStatesMap.put( ((Text)(entry.getKey())).toString(), ((IntWritable)(entry.getValue())).get() ); hiddenStatesArray[k++] = ((Text) (entry.getKey())).toString(); } for (MapWritable.Entry<Writable, Writable> entry : emittedStatesWritableMap.entrySet()) { log.info("Mapper Setup emittedStateMap adding pair ({} ,{})", ((Text) (entry.getKey())).toString(), ((IntWritable) (entry.getValue())).get()); //emittedStatesMap.put( ((Text)(entry.getKey())).toString(), ((IntWritable)(entry.getValue())).get() ); emittedStatesArray[l++] = ((Text) (entry.getKey())).toString(); } modelPath = new Path(config.get(BaumWelchConfigKeys.MODEL_PATH_KEY)); Model = BaumWelchUtils.CreateHmmModel(nrOfHiddenStates, nrOfEmittedStates, modelPath, config); Model.registerHiddenStateNames(hiddenStatesArray); Model.registerOutputStateNames(emittedStatesArray); HmmUtils.validate(Model); log.info("Mapper Setup Hmm Model Created. Hidden States = {} Emitted States = {}", Model.getNrOfHiddenStates(), Model.getNrOfOutputStates()); Vector initialPr = Model.getInitialProbabilities(); Matrix transitionPr = Model.getTransitionMatrix(); Matrix emissionPr = Model.getEmissionMatrix(); for (int i = 0; i < Model.getNrOfHiddenStates(); i++) { log.info("Mapper Setup Hmm Model Initial Prob Vector. State {} = {}", i, initialPr.get(i)); } for (int i = 0; i < Model.getNrOfHiddenStates(); i++) { for (int j = 0; j < Model.getNrOfHiddenStates(); j++) { log.info("Mapper Setup Hmm Model Transition Prob Matrix ({}, {}) = {} ", new Object[] { i, j, transitionPr.get(i, j) }); } } for (int i = 0; i < Model.getNrOfHiddenStates(); i++) { for (int j = 0; j < Model.getNrOfOutputStates(); j++) { log.info("Mapper Setup Hmm Model Emission Prob Matrix. ({}, {}) = {}", new Object[] { i, j, emissionPr.get(i, j) }); } } }
From source file:org.apache.mahout.classifier.sequencelearning.baumwelchmapreduce.BaumWelchMapper.java
License:Apache License
@Override public void map(LongWritable seqID, IntArrayWritable seq, Context context) throws IOException, InterruptedException { MapWritable initialDistributionStripe = new MapWritable(); MapWritable transitionDistributionStripe = new MapWritable(); MapWritable emissionDistributionStripe = new MapWritable(); //IntArrayWritable[] writableSequence = (IntArrayWritable[])seq.get(); //int[] sequence = new int[seq.get().length]; int[] sequence = new int[seq.get().length]; int n = 0;/*from w ww. j a va 2s . c o m*/ for (Writable val : seq.get()) { sequence[n] = ((IntWritable) val).get(); n++; } for (int k = 0; k < sequence.length; k++) { log.info("Sequence Array {}", Integer.toString(sequence[k])); } Matrix alphaFactors = HmmAlgorithms.forwardAlgorithm(Model, sequence, false); for (int i = 0; i < alphaFactors.numRows(); i++) { for (int j = 0; j < alphaFactors.numCols(); j++) { log.info("Alpha Factors Matrix entry ({}, {}) = {}", new Object[] { i, j, alphaFactors.get(i, j) }); } } Matrix betaFactors = HmmAlgorithms.backwardAlgorithm(Model, sequence, false); for (int i = 0; i < betaFactors.numRows(); i++) { for (int j = 0; j < betaFactors.numCols(); j++) { log.info("Beta Factors Matrix entry ({}, {}) = {}", new Object[] { i, j, betaFactors.get(i, j) }); } //Initial Distribution for (int q = 0; q < nrOfHiddenStates; q++) { double alpha_1_q = alphaFactors.get(1, q); double beta_1_q = betaFactors.get(1, q); initialDistributionStripe.put(new IntWritable(q), new DoubleWritable(alpha_1_q * beta_1_q)); } //Emission Distribution /* Matrix emissionMatrix = new DenseMatrix(nrOfHiddenStates, sequence.length); for (int t = 0; t < sequence.length; t++) { HashMap<Integer, Double> innerMap = new HashMap<Integer, Double>(); for (int q = 0; q < nrOfHiddenStates; q++) { double alpha_t_q = alphaFactors.get(t, q); double beta_t_q = betaFactors.get(t, q); //innerMap.put(q, alpha_t_q * beta_t_q); emissionMatrix.set(q, t, alpha_t_q * beta_t_q); } } for (int q = 0; q < nrOfHiddenStates; q++) { Map innerEmissionMap = new MapWritable(); for (int xt = 0; xt < sequence.length; xt++) { innerEmissionMap.put(new IntWritable(xt), new DoubleWritable(emissionMatrix.get(q, xt))); } emissionDistributionStripe.put(new IntWritable(q), (MapWritable)innerEmissionMap); } */ double[][] emissionMatrix = new double[nrOfHiddenStates][nrOfEmittedStates]; for (int q = 0; q < nrOfHiddenStates; q++) { for (int x = 0; x < nrOfEmittedStates; x++) { emissionMatrix[q][x] = 0.0; } } for (int t = 0; t < sequence.length; t++) { //HashMap<Integer, Double> innerMap = new HashMap<Integer, Double>(); for (int q = 0; q < nrOfHiddenStates; q++) { double alpha_t_q = alphaFactors.get(t, q); double beta_t_q = betaFactors.get(t, q); //innerMap.put(q, alpha_t_q * beta_t_q); //emissionMatrix.set(q, t, alpha_t_q * beta_t_q); emissionMatrix[q][sequence[t]] += alpha_t_q * beta_t_q; } } for (int q = 0; q < nrOfHiddenStates; q++) { Map innerEmissionMap = new MapWritable(); for (int xt = 0; xt < sequence.length; xt++) { innerEmissionMap.put(new IntWritable(sequence[xt]), new DoubleWritable(emissionMatrix[q][sequence[xt]])); } emissionDistributionStripe.put(new IntWritable(q), (MapWritable) innerEmissionMap); } //Transition Distribution double[][] transitionMatrix = new double[nrOfHiddenStates][nrOfHiddenStates]; for (int q = 0; q < nrOfHiddenStates; q++) { for (int x = 0; x < nrOfHiddenStates; x++) { transitionMatrix[q][x] = 0.0; } } for (int t = 0; t < sequence.length - 1; t++) { for (int q = 0; q < nrOfHiddenStates; q++) { for (int r = 0; r < nrOfHiddenStates; r++) { double alpha_t_q = alphaFactors.get(t, q); double A_q_r = Model.getTransitionMatrix().get(q, r); double B_r_xtplus1 = Model.getEmissionMatrix().get(r, sequence[t + 1]); double beta_tplus1_r = betaFactors.get(t + 1, r); double transitionProb = alpha_t_q * A_q_r * B_r_xtplus1 * beta_tplus1_r; log.info("Putting into Inner Map of Transition Distribution. Key = {}, Value = {}", q, transitionProb); transitionMatrix[q][r] += transitionProb; } } } for (int q = 0; q < nrOfHiddenStates; q++) { Map innerTransitionMap = new MapWritable(); for (int r = 0; r < nrOfHiddenStates; r++) { innerTransitionMap.put(new IntWritable(r), new DoubleWritable(transitionMatrix[q][r])); } transitionDistributionStripe.put(new IntWritable(q), (MapWritable) innerTransitionMap); } context.write(new Text("INITIAL"), initialDistributionStripe); log.info("Context Writing from Mapper the Initial Distribution Stripe. Size = {} Entries = {}", Integer.toString(initialDistributionStripe.size()), Integer.toString(initialDistributionStripe.entrySet().size())); for (int q = 0; q < nrOfHiddenStates; q++) { context.write(new Text("EMIT_" + Integer.toString(q)), (MapWritable) emissionDistributionStripe.get(new IntWritable(q))); log.info("Context Writing from Mapper the Emission Distribution Stripe. State = {} Entries = {}", Integer.toString(q), Integer.toString( ((MapWritable) emissionDistributionStripe.get(new IntWritable(q))).size())); for (MapWritable.Entry<Writable, Writable> entry : ((MapWritable) emissionDistributionStripe .get(new IntWritable(q))).entrySet()) { log.info("Emission Distribution Stripe Details. Key = {} Value = {} ", Integer.toString(((IntWritable) entry.getKey()).get()), Double.toString(((DoubleWritable) entry.getValue()).get())); } context.write(new Text("TRANSIT_" + Integer.toString(q)), (MapWritable) transitionDistributionStripe.get(new IntWritable(q))); log.info("Context Writing from Mapper the Transition Distribution Stripe. State = {} Entries = {}", Integer.toString(q), Integer.toString( ((MapWritable) transitionDistributionStripe.get(new IntWritable(q))).size())); for (MapWritable.Entry<Writable, Writable> entry : ((MapWritable) transitionDistributionStripe .get(new IntWritable(q))).entrySet()) { log.info("Transition Distribution Stripe Details. Key = {} Value = {} ", Integer.toString(((IntWritable) entry.getKey()).get()), Double.toString(((DoubleWritable) entry.getValue()).get())); } } } }
From source file:org.apache.mahout.classifier.sequencelearning.baumwelchmapreduce.BaumWelchReducer.java
License:Apache License
@Override protected void reduce(Text key, Iterable<MapWritable> stripes, Context context) throws IOException, InterruptedException { log.info("Entering Reducer. Key = {}", key.toString()); MapWritable sumOfStripes = new MapWritable(); MapWritable finalStripe = new MapWritable(); boolean isInitial = false; boolean isTransit = false; boolean isEmit = false; int stateID = -1; if (key.charAt(0) == 'I') { isInitial = true;/*from w w w . j a v a 2s. co m*/ } else if (key.charAt(0) == 'E') { isEmit = true; stateID = Character.getNumericValue(key.charAt(5)); } else if (key.charAt(0) == 'T') { isTransit = true; stateID = Character.getNumericValue(key.charAt(8)); } else { throw new IllegalStateException("Baum Welch Reducer Error Determining the Key Type"); } if (isInitial) { ; Double[] val = new Double[nrOfHiddenStates]; for (int i = 0; i < nrOfHiddenStates; i++) { val[i] = 0.0; } for (MapWritable stripe : stripes) { log.info("Reducer Processing Initial Distribution Stripe."); for (MapWritable.Entry<Writable, Writable> stripeEntry : stripe.entrySet()) { log.info("Reducer Getting Initial Distribution Stripe Entry. Key = {} Value = {} ", Integer.toString(((IntWritable) stripeEntry.getKey()).get()), Double.toString(((DoubleWritable) stripeEntry.getValue()).get())); val[((IntWritable) stripeEntry.getKey()).get()] += ((DoubleWritable) stripeEntry.getValue()) .get(); } } for (int i = 0; i < nrOfHiddenStates; i++) { log.info("Reducer adding to sumOfStripes for Initial. Key = {} Value ={}", Integer.toString(i), Double.toString(val[i])); sumOfStripes.put(new IntWritable(i), new DoubleWritable(val[i])); } } else if (isEmit) { Iterator<MapWritable> it = stripes.iterator(); int seqlength = it.next().size(); Double[] val = new Double[nrOfEmittedStates]; for (int i = 0; i < nrOfEmittedStates; i++) { val[i] = 0.0; } for (MapWritable stripe : stripes) { log.info("Reducer Processing Emission Distribution Stripe."); for (MapWritable.Entry<Writable, Writable> stripeEntry : stripe.entrySet()) { log.info("Reducer Getting Emission Distribution Stripe Entry. Key = {} Value = {} ", Integer.toString(((IntWritable) stripeEntry.getKey()).get()), Double.toString(((DoubleWritable) stripeEntry.getValue()).get())); val[((IntWritable) stripeEntry.getKey()).get()] += ((DoubleWritable) stripeEntry.getValue()) .get(); } } for (int i = 0; i < nrOfEmittedStates; i++) { log.info("Reducer adding to sumOfStripes for Emission. Key = {} Value ={}", Integer.toString(i), Double.toString(val[i])); sumOfStripes.put(new IntWritable(i), new DoubleWritable(val[i])); } } else if (isTransit) { Double[] val = new Double[nrOfHiddenStates]; for (int i = 0; i < nrOfHiddenStates; i++) { val[i] = 0.0; } for (MapWritable stripe : stripes) { log.info("Reducer Processing Transition Distribution Stripe."); for (MapWritable.Entry<Writable, Writable> stripeEntry : stripe.entrySet()) { log.info("Reducer Getting Transition Distribution Stripe Entry. Key = {} Value = {} ", Integer.toString(((IntWritable) stripeEntry.getKey()).get()), Double.toString(((DoubleWritable) stripeEntry.getValue()).get())); val[((IntWritable) stripeEntry.getKey()).get()] += ((DoubleWritable) stripeEntry.getValue()) .get(); } } for (int i = 0; i < nrOfHiddenStates; i++) { log.info("Reducer adding to sumOfStripes for Transition. Key = {} Value ={}", Integer.toString(i), Double.toString(val[i])); sumOfStripes.put(new IntWritable(i), new DoubleWritable(val[i])); } } else { throw new IllegalStateException("Baum Welch Reducer Error: Unable to aggregate distribution stripes."); } double sum = 0.0; for (MapWritable.Entry<Writable, Writable> sumEntry : sumOfStripes.entrySet()) { sum += ((DoubleWritable) sumEntry.getValue()).get(); } //DoubleWritable normalizedSum = new DoubleWritable(0.0); //double[] innerValues = new double[sumOfStripes.size()]; int index = 0; MapWritable distributionStripe = new MapWritable(); for (MapWritable.Entry<Writable, Writable> sumEntry : sumOfStripes.entrySet()) { IntWritable state = (IntWritable) sumEntry.getKey(); double innerValue = ((DoubleWritable) sumEntry.getValue()).get(); double normalizedSum = innerValue / sum; //innerValues[index++] = normalizedSum; distributionStripe.put(state, new DoubleWritable(normalizedSum)); //finalStripe.put(((IntWritable)sumEntry.getKey()), val); } log.info("Reducer Writing: Key = {} Value (Stripe) Size = {}", key.toString(), finalStripe.size()); for (MapWritable.Entry<Writable, Writable> entry : finalStripe.entrySet()) { log.info("Distribution Stripe Detail Key = {}, Value ={}", ((IntWritable) entry.getKey()).get(), ((DoubleWritable) entry.getValue()).get()); } context.write(key, distributionStripe); }
From source file:org.apache.mahout.classifier.sequencelearning.hmm.hadoop.BaumWelchMapper.java
License:Apache License
@Override public void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration config = context.getConfiguration(); String scalingMethod = config.get(BaumWelchConfigKeys.SCALING_OPTION_KEY); if (scalingMethod.equals("rescaling")) { scaling = HmmAlgorithms.ScalingMethod.RESCALING; } else if (scalingMethod.equals("logscaling")) { scaling = HmmAlgorithms.ScalingMethod.LOGSCALING; }//from ww w .j ava2 s .c o m nrOfHiddenStates = Integer.parseInt(config.get(BaumWelchConfigKeys.NUMBER_OF_HIDDEN_STATES_KEY)); nrOfEmittedStates = Integer.parseInt(config.get(BaumWelchConfigKeys.NUMBER_OF_EMITTED_STATES_KEY)); MapWritable hiddenStatesWritableMap = MapWritableCache.load(config, new Path(config.get(BaumWelchConfigKeys.HIDDEN_STATES_MAP_PATH))); MapWritable emittedStatesWritableMap = MapWritableCache.load(config, new Path(config.get(BaumWelchConfigKeys.EMITTED_STATES_MAP_PATH))); String[] hiddenStatesArray = new String[hiddenStatesWritableMap.size()]; String[] emittedStatesArray = new String[emittedStatesWritableMap.size()]; int k = 0; int l = 0; for (MapWritable.Entry<Writable, Writable> entry : hiddenStatesWritableMap.entrySet()) { hiddenStatesArray[k++] = ((entry.getKey())).toString(); } for (MapWritable.Entry<Writable, Writable> entry : emittedStatesWritableMap.entrySet()) { emittedStatesArray[l++] = ((entry.getKey())).toString(); } modelPath = new Path(config.get(BaumWelchConfigKeys.MODEL_PATH_KEY)); Model = BaumWelchUtils.createHmmModel(nrOfHiddenStates, nrOfEmittedStates, modelPath, config); Model.registerHiddenStateNames(hiddenStatesArray); Model.registerOutputStateNames(emittedStatesArray); HmmUtils.normalizeModel(Model); HmmUtils.validate(Model); log.info("Mapper Setup Hmm Model Created. Hidden States = {} Emitted States = {}", Model.getNrOfHiddenStates(), Model.getNrOfOutputStates()); }
From source file:org.apache.nutch.crawl.CrawlDatum.java
License:Apache License
private boolean metadataEquals(org.apache.hadoop.io.MapWritable otherMetaData) { if (metaData == null || metaData.size() == 0) { return otherMetaData == null || otherMetaData.size() == 0; }//w w w.j a v a 2s .c o m if (otherMetaData == null) { // we already know that the current object is not null or empty return false; } HashSet<Entry<Writable, Writable>> set1 = new HashSet<Entry<Writable, Writable>>(metaData.entrySet()); HashSet<Entry<Writable, Writable>> set2 = new HashSet<Entry<Writable, Writable>>(otherMetaData.entrySet()); return set1.equals(set2); }
From source file:org.huahinframework.core.util.ObjectUtil.java
License:Apache License
/** * Convert the PrimitiveObject from Hadoop {@link Writable}. * @param object/*from w ww . j ava2 s . c om*/ * @return PrimitiveObject */ public static PrimitiveObject hadoop2Primitive(Writable object) { if (object instanceof NullWritable) { return new PrimitiveObject(NULL, null); } if (object instanceof ByteWritable) { return new PrimitiveObject(BYTE, ((ByteWritable) object).get()); } else if (object instanceof IntWritable) { return new PrimitiveObject(INTEGER, ((IntWritable) object).get()); } else if (object instanceof LongWritable) { return new PrimitiveObject(LONG, ((LongWritable) object).get()); } else if (object instanceof DoubleWritable) { return new PrimitiveObject(DOUBLE, ((DoubleWritable) object).get()); } else if (object instanceof FloatWritable) { return new PrimitiveObject(FLOAT, ((FloatWritable) object).get()); } else if (object instanceof BooleanWritable) { return new PrimitiveObject(BOOLEAN, ((BooleanWritable) object).get()); } else if (object instanceof Text) { return new PrimitiveObject(STRING, ((Text) object).toString()); } else if (object instanceof ArrayWritable) { ArrayWritable aw = (ArrayWritable) object; if (aw.get().length == 0) { return new PrimitiveObject(ARRAY, true, STRING, new ArrayList<String>()); } int type = NULL; List<Object> l = new ArrayList<Object>(); for (Writable w : aw.get()) { PrimitiveObject no = hadoop2Primitive(w); type = no.getType(); l.add(no.getObject()); } return new PrimitiveObject(ARRAY, true, type, l); } else if (object instanceof MapWritable) { MapWritable mw = (MapWritable) object; if (mw.size() == 0) { return new PrimitiveObject(ARRAY, true, STRING, STRING, new HashMap<String, String>()); } int keyType = NULL; int valueType = NULL; Map<Object, Object> m = new HashMap<Object, Object>(); for (Entry<Writable, Writable> entry : mw.entrySet()) { PrimitiveObject keyNo = hadoop2Primitive(entry.getKey()); PrimitiveObject valueNo = hadoop2Primitive(entry.getValue()); keyType = keyNo.getType(); valueType = valueNo.getType(); m.put(keyNo.getObject(), valueNo.getObject()); } return new PrimitiveObject(MAP, true, keyType, valueType, m); } throw new ClassCastException("cast object not found"); }
From source file:org.huahinframework.core.util.ObjectUtilTest.java
License:Apache License
@Test public void testPrimitive2HadoopIOMap() { Map<String, Integer> o = new HashMap<String, Integer>(); MapWritable m = new MapWritable(); o.put("0", 0); m.put(new Text("0"), new IntWritable(0)); o.put("1", 1); m.put(new Text("1"), new IntWritable(1)); HadoopObject ho = ObjectUtil.primitive2Hadoop(o); assertEquals(ObjectUtil.MAP, ho.getType()); assertEquals(MapWritable.class, ho.getObject().getClass()); MapWritable mw = (MapWritable) ho.getObject(); if (mw.size() != m.size()) { fail("map not equals size: " + mw.size() + " != " + m.size()); }//from w w w . j av a 2 s .c om for (Entry<Writable, Writable> entry : m.entrySet()) { if (mw.get(entry.getKey()) == null) { fail("map key not found"); } assertEquals(mw.get(entry.getKey()), entry.getValue()); } }
From source file:org.huahinframework.core.util.ObjectUtilTest.java
License:Apache License
@SuppressWarnings("unchecked") @Test/*from w ww. ja v a 2 s .c o m*/ public void testHadoopIO2PrimitiveMap() { Map<String, Integer> o = new HashMap<String, Integer>(); MapWritable mw = new MapWritable(); o.put("0", 0); mw.put(new Text("0"), new IntWritable(0)); o.put("1", 1); mw.put(new Text("1"), new IntWritable(1)); PrimitiveObject no = ObjectUtil.hadoop2Primitive(mw); assertEquals(ObjectUtil.MAP, no.getType()); assertEquals(ObjectUtil.STRING, no.getMapKeyType()); assertEquals(ObjectUtil.INTEGER, no.getMapValueType()); if (!(no.getObject() instanceof Map<?, ?>)) { fail("object not map"); } Map<String, Integer> m = (Map<String, Integer>) no.getObject(); if (mw.size() != o.size()) { fail("map not equals size: " + mw.size() + " != " + o.size()); } for (Entry<String, Integer> entry : o.entrySet()) { if (m.get(entry.getKey()) == null) { fail("map key not found"); } assertEquals(m.get(entry.getKey()), entry.getValue()); } }