Example usage for org.apache.hadoop.io MapWritable entrySet

List of usage examples for org.apache.hadoop.io MapWritable entrySet

Introduction

In this page you can find the example usage for org.apache.hadoop.io MapWritable entrySet.

Prototype

@Override
    public Set<Map.Entry<Writable, Writable>> entrySet() 

Source Link

Usage

From source file:org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.java

License:Apache License

/**
 * Returns all {@link InputTableConfig} objects associated with this job.
 *
 * @param implementingClass/*from ww w  .j ava2  s  . c o  m*/
 *          the class whose name will be used as a prefix for the property configuration key
 * @param conf
 *          the Hadoop configuration object to configure
 * @return all of the table query configs for the job
 * @since 1.6.0
 */
public static Map<String, InputTableConfig> getInputTableConfigs(Class<?> implementingClass,
        Configuration conf) {
    Map<String, InputTableConfig> configs = new HashMap<>();
    Map.Entry<String, InputTableConfig> defaultConfig = getDefaultInputTableConfig(implementingClass, conf);
    if (defaultConfig != null)
        configs.put(defaultConfig.getKey(), defaultConfig.getValue());
    String configString = conf.get(enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS));
    MapWritable mapWritable = new MapWritable();
    if (configString != null) {
        try {
            byte[] bytes = Base64.getDecoder().decode(configString);
            ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
            mapWritable.readFields(new DataInputStream(bais));
            bais.close();
        } catch (IOException e) {
            throw new IllegalStateException(
                    "The table query configurations could not be deserialized from the given configuration");
        }
    }
    for (Map.Entry<Writable, Writable> entry : mapWritable.entrySet())
        configs.put(((Text) entry.getKey()).toString(), (InputTableConfig) entry.getValue());

    return configs;
}

From source file:org.apache.accumulo.core.clientImpl.mapreduce.lib.InputConfigurator.java

License:Apache License

/**
 * Returns all InputTableConfig objects associated with this job.
 *
 * @param implementingClass/*from   w w w.ja v  a 2s .co m*/
 *          the class whose name will be used as a prefix for the property configuration key
 * @param conf
 *          the Hadoop configuration object to configure
 * @return all of the table query configs for the job
 * @since 1.6.0
 */
public static Map<String, org.apache.accumulo.core.client.mapreduce.InputTableConfig> getInputTableConfigs(
        Class<?> implementingClass, Configuration conf) {
    Map<String, org.apache.accumulo.core.client.mapreduce.InputTableConfig> configs = new HashMap<>();
    Map.Entry<String, org.apache.accumulo.core.client.mapreduce.InputTableConfig> defaultConfig = getDefaultInputTableConfig(
            implementingClass, conf);
    if (defaultConfig != null)
        configs.put(defaultConfig.getKey(), defaultConfig.getValue());
    String configString = conf.get(enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS));
    MapWritable mapWritable = new MapWritable();
    if (configString != null) {
        try {
            byte[] bytes = Base64.getDecoder().decode(configString);
            ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
            mapWritable.readFields(new DataInputStream(bais));
            bais.close();
        } catch (IOException e) {
            throw new IllegalStateException("The table query configurations could not be deserialized"
                    + " from the given configuration");
        }
    }
    for (Map.Entry<Writable, Writable> entry : mapWritable.entrySet())
        configs.put(entry.getKey().toString(),
                (org.apache.accumulo.core.client.mapreduce.InputTableConfig) entry.getValue());

    return configs;
}

From source file:org.apache.accumulo.hadoopImpl.mapreduce.lib.InputConfigurator.java

License:Apache License

/**
 * Returns all {@link InputTableConfig} objects associated with this job.
 *
 * @param implementingClass/*from  w  w  w.  ja v a2 s  .c  om*/
 *          the class whose name will be used as a prefix for the property configuration key
 * @param conf
 *          the Hadoop configuration object to configure
 * @param tableName
 *          the table name for which to retrieve the configuration
 * @return all of the table query configs for the job
 * @since 1.6.0
 */
private static Map<String, InputTableConfig> getInputTableConfigs(Class<?> implementingClass,
        Configuration conf, String tableName) {
    Map<String, InputTableConfig> configs = new HashMap<>();
    Map.Entry<String, InputTableConfig> defaultConfig = getDefaultInputTableConfig(implementingClass, conf,
            tableName);
    if (defaultConfig != null)
        configs.put(defaultConfig.getKey(), defaultConfig.getValue());
    String configString = conf.get(enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS));
    MapWritable mapWritable = new MapWritable();
    if (configString != null) {
        try {
            byte[] bytes = Base64.getDecoder().decode(configString);
            ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
            mapWritable.readFields(new DataInputStream(bais));
            bais.close();
        } catch (IOException e) {
            throw new IllegalStateException("The table query configurations could not be deserialized"
                    + " from the given configuration");
        }
    }
    for (Map.Entry<Writable, Writable> entry : mapWritable.entrySet())
        configs.put(entry.getKey().toString(), (InputTableConfig) entry.getValue());

    return configs;
}

From source file:org.apache.flume.channel.file.FlumeEvent.java

License:Apache License

private Map<String, String> fromMapWritable(MapWritable map) {
    Map<String, String> result = Maps.newHashMap();
    if (map != null) {
        for (Map.Entry<Writable, Writable> entry : map.entrySet()) {
            result.put(entry.getKey().toString(), entry.getValue().toString());
        }/*  w  w  w . j  av  a2s  .  c  om*/
    }
    return result;
}

From source file:org.apache.gora.util.WritableUtils.java

License:Apache License

public static final Properties readProperties(DataInput in) throws IOException {
    Properties props = new Properties();
    MapWritable propsWritable = new MapWritable();
    propsWritable.readFields(in);//from   w  w  w.j a v  a  2s.c om
    for (Entry<Writable, Writable> prop : propsWritable.entrySet()) {
        String key = prop.getKey().toString();
        String value = prop.getValue().toString();
        props.put(key, value);
    }
    return props;
}

From source file:org.apache.hama.bsp.PartitioningRunner.java

License:Apache License

@Override
@SuppressWarnings({ "rawtypes" })
public void bsp(BSPPeer<Writable, Writable, Writable, Writable, MapWritable> peer)
        throws IOException, SyncException, InterruptedException {

    Partitioner partitioner = getPartitioner();
    KeyValuePair<Writable, Writable> rawRecord = null;
    KeyValuePair<Writable, Writable> convertedRecord = null;

    Class rawKeyClass = null;/* w  w  w .j a va 2  s .com*/
    Class rawValueClass = null;
    MapWritable raw = null;

    while ((rawRecord = peer.readNext()) != null) {
        if (rawKeyClass == null && rawValueClass == null) {
            rawKeyClass = rawRecord.getKey().getClass();
            rawValueClass = rawRecord.getValue().getClass();
        }
        convertedRecord = converter.convertRecord(rawRecord, conf);

        if (convertedRecord == null) {
            throw new IOException("The converted record can't be null.");
        }

        int index = converter.getPartitionId(convertedRecord, partitioner, conf, peer, peer.getNumPeers());

        raw = new MapWritable();
        raw.put(rawRecord.getKey(), rawRecord.getValue());

        peer.send(peer.getPeerName(index), raw);
    }

    peer.sync();

    MapWritable record;

    while ((record = peer.getCurrentMessage()) != null) {
        for (Map.Entry<Writable, Writable> e : record.entrySet()) {
            peer.write(e.getKey(), e.getValue());
        }
    }

}

From source file:org.apache.hama.examples.ClassSerializePrinting.java

License:Apache License

@Override
public void bsp(BSPPeer<NullWritable, NullWritable, IntWritable, Text, MapWritable> bspPeer)
        throws IOException, SyncException, InterruptedException {

    for (int i = 0; i < NUM_SUPERSTEPS; i++) {
        for (String otherPeer : bspPeer.getAllPeerNames()) {
            MapWritable map = new MapWritable();
            map.put(new Text(bspPeer.getPeerName()), new IntWritable(i));

            bspPeer.send(otherPeer, map);
        }//from  w ww  . j av a 2  s.c  om

        // Test superstep counter
        if (i != bspPeer.getSuperstepCount()) {
            throw new IOException();
        }

        bspPeer.sync();

        MapWritable msg = null;
        while ((msg = bspPeer.getCurrentMessage()) != null) {
            for (Entry<Writable, Writable> e : msg.entrySet()) {
                bspPeer.write((IntWritable) e.getValue(), (Text) e.getKey());
            }
        }
    }
}

From source file:org.apache.mahout.classifier.sequencelearning.baumwelchmapreduce.BaumWelchCombiner.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<MapWritable> stripes, Context context)
        throws IOException, InterruptedException {

    log.info("Entering Reducer. Key = {}", key.toString());
    MapWritable sumOfStripes = new MapWritable();
    MapWritable finalStripe = new MapWritable();
    boolean isInitial = false;
    boolean isTransit = false;
    boolean isEmit = false;

    if (key.charAt(0) == 'I') {
        isInitial = true;/*from  w  w  w.ja  v  a2  s.  c om*/
    } else if (key.charAt(0) == 'E') {
        isEmit = true;
    } else if (key.charAt(0) == 'T') {
        isTransit = true;
    } else {
        throw new IllegalStateException("Baum Welch Reducer Error Determining the Key Type");
    }

    if (isInitial) {
        Double[] val = new Double[nrOfHiddenStates];
        for (int i = 0; i < nrOfHiddenStates; i++) {
            val[i] = 0.0;
        }
        for (MapWritable stripe : stripes) {
            log.info("Reducer Processing Initial Distribution Stripe.");
            for (MapWritable.Entry<Writable, Writable> stripeEntry : stripe.entrySet()) {
                log.info("Reducer Getting Initial Distribution Stripe Entry. Key = {}  Value = {} ",
                        Integer.toString(((IntWritable) stripeEntry.getKey()).get()),
                        Double.toString(((DoubleWritable) stripeEntry.getValue()).get()));
                val[((IntWritable) stripeEntry.getKey()).get()] += ((DoubleWritable) stripeEntry.getValue())
                        .get();
            }
        }
        for (int i = 0; i < nrOfHiddenStates; i++) {
            log.info("Reducer adding to sumOfStripes for Initial. Key = {}  Value ={}", Integer.toString(i),
                    Double.toString(val[i]));
            sumOfStripes.put(new IntWritable(i), new DoubleWritable(val[i]));
        }
    } else if (isEmit) {
        Iterator<MapWritable> it = stripes.iterator();
        int seqlength = it.next().size();
        Double[] val = new Double[nrOfEmittedStates];
        for (int i = 0; i < nrOfEmittedStates; i++) {
            val[i] = 0.0;
        }
        for (MapWritable stripe : stripes) {
            log.info("Reducer Processing Emission Distribution Stripe.");
            for (MapWritable.Entry<Writable, Writable> stripeEntry : stripe.entrySet()) {
                log.info("Reducer Getting Emission Distribution Stripe Entry. Key = {}  Value = {} ",
                        Integer.toString(((IntWritable) stripeEntry.getKey()).get()),
                        Double.toString(((DoubleWritable) stripeEntry.getValue()).get()));
                val[((IntWritable) stripeEntry.getKey()).get()] += ((DoubleWritable) stripeEntry.getValue())
                        .get();
            }
        }
        for (int i = 0; i < nrOfEmittedStates; i++) {
            log.info("Reducer adding to sumOfStripes for Emission. Key = {}  Value ={}", Integer.toString(i),
                    Double.toString(val[i]));
            sumOfStripes.put(new IntWritable(i), new DoubleWritable(val[i]));
        }
    } else if (isTransit) {
        Double[] val = new Double[nrOfHiddenStates];
        for (int i = 0; i < nrOfHiddenStates; i++) {
            val[i] = 0.0;
        }
        for (MapWritable stripe : stripes) {
            log.info("Reducer Processing Transition Distribution Stripe.");
            for (MapWritable.Entry<Writable, Writable> stripeEntry : stripe.entrySet()) {
                log.info("Reducer Getting Transition Distribution Stripe Entry. Key = {}  Value = {} ",
                        Integer.toString(((IntWritable) stripeEntry.getKey()).get()),
                        Double.toString(((DoubleWritable) stripeEntry.getValue()).get()));
                val[((IntWritable) stripeEntry.getKey()).get()] += ((DoubleWritable) stripeEntry.getValue())
                        .get();
            }
        }
        for (int i = 0; i < nrOfHiddenStates; i++) {
            log.info("Reducer adding to sumOfStripes for Transition. Key = {}  Value ={}", Integer.toString(i),
                    Double.toString(val[i]));
            sumOfStripes.put(new IntWritable(i), new DoubleWritable(val[i]));
        }
    } else {
        throw new IllegalStateException("Baum Welch Reducer Error: Unable to aggregate distribution stripes.");
    }

    context.write(key, sumOfStripes);

}

From source file:org.apache.mahout.classifier.sequencelearning.baumwelchmapreduce.BaumWelchMapper.java

License:Apache License

@Override
public void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Configuration config = context.getConfiguration();

    nrOfHiddenStates = Integer.parseInt(config.get(BaumWelchConfigKeys.NUMBER_OF_HIDDEN_STATES_KEY));
    nrOfEmittedStates = Integer.parseInt(config.get(BaumWelchConfigKeys.NUMBER_OF_EMITTED_STATES_KEY));
    MapWritable hiddenStatesWritableMap = MapWritableCache.load(config,
            new Path(config.get(BaumWelchConfigKeys.HIDDEN_STATES_MAP_PATH)));
    log.info("Mapper Setup hiddenStatesWritableMap loaded. Number of entries = {}",
            hiddenStatesWritableMap.size());
    MapWritable emittedStatesWritableMap = MapWritableCache.load(config,
            new Path(config.get(BaumWelchConfigKeys.EMITTED_STATES_MAP_PATH)));
    log.info("Mapper Setup emittedStatesWritableMap loaded. Number of entries = {}",
            emittedStatesWritableMap.size());

    //HashMap hiddenStatesMap = new HashMap();
    //HashMap emittedStatesMap = new HashMap();

    String[] hiddenStatesArray = new String[hiddenStatesWritableMap.size()];
    String[] emittedStatesArray = new String[emittedStatesWritableMap.size()];

    int k = 0;/*from w  w w  .j a v a  2s  .c  o m*/
    int l = 0;

    for (MapWritable.Entry<Writable, Writable> entry : hiddenStatesWritableMap.entrySet()) {
        log.info("Mapper Setup hiddenStateMap adding pair ({} ,{})", ((Text) (entry.getKey())).toString(),
                ((IntWritable) (entry.getValue())).get());
        //hiddenStatesMap.put( ((Text)(entry.getKey())).toString(), ((IntWritable)(entry.getValue())).get() );
        hiddenStatesArray[k++] = ((Text) (entry.getKey())).toString();
    }

    for (MapWritable.Entry<Writable, Writable> entry : emittedStatesWritableMap.entrySet()) {
        log.info("Mapper Setup emittedStateMap adding pair ({} ,{})", ((Text) (entry.getKey())).toString(),
                ((IntWritable) (entry.getValue())).get());
        //emittedStatesMap.put( ((Text)(entry.getKey())).toString(), ((IntWritable)(entry.getValue())).get() );
        emittedStatesArray[l++] = ((Text) (entry.getKey())).toString();
    }

    modelPath = new Path(config.get(BaumWelchConfigKeys.MODEL_PATH_KEY));
    Model = BaumWelchUtils.CreateHmmModel(nrOfHiddenStates, nrOfEmittedStates, modelPath, config);
    Model.registerHiddenStateNames(hiddenStatesArray);
    Model.registerOutputStateNames(emittedStatesArray);
    HmmUtils.validate(Model);

    log.info("Mapper Setup Hmm Model Created. Hidden States = {} Emitted States = {}",
            Model.getNrOfHiddenStates(), Model.getNrOfOutputStates());
    Vector initialPr = Model.getInitialProbabilities();
    Matrix transitionPr = Model.getTransitionMatrix();
    Matrix emissionPr = Model.getEmissionMatrix();

    for (int i = 0; i < Model.getNrOfHiddenStates(); i++) {
        log.info("Mapper Setup Hmm Model Initial Prob Vector. State {} = {}", i, initialPr.get(i));
    }

    for (int i = 0; i < Model.getNrOfHiddenStates(); i++) {
        for (int j = 0; j < Model.getNrOfHiddenStates(); j++) {
            log.info("Mapper Setup Hmm Model Transition Prob Matrix ({}, {}) = {} ",
                    new Object[] { i, j, transitionPr.get(i, j) });
        }
    }

    for (int i = 0; i < Model.getNrOfHiddenStates(); i++) {
        for (int j = 0; j < Model.getNrOfOutputStates(); j++) {
            log.info("Mapper Setup Hmm Model Emission Prob Matrix. ({}, {}) = {}",
                    new Object[] { i, j, emissionPr.get(i, j) });
        }
    }
}

From source file:org.apache.mahout.classifier.sequencelearning.baumwelchmapreduce.BaumWelchMapper.java

License:Apache License

@Override
public void map(LongWritable seqID, IntArrayWritable seq, Context context)
        throws IOException, InterruptedException {

    MapWritable initialDistributionStripe = new MapWritable();
    MapWritable transitionDistributionStripe = new MapWritable();
    MapWritable emissionDistributionStripe = new MapWritable();

    //IntArrayWritable[] writableSequence = (IntArrayWritable[])seq.get();
    //int[] sequence = new int[seq.get().length];
    int[] sequence = new int[seq.get().length];

    int n = 0;//from w w w  . j ava  2 s  .c  o  m
    for (Writable val : seq.get()) {
        sequence[n] = ((IntWritable) val).get();
        n++;
    }

    for (int k = 0; k < sequence.length; k++) {
        log.info("Sequence Array {}", Integer.toString(sequence[k]));
    }

    Matrix alphaFactors = HmmAlgorithms.forwardAlgorithm(Model, sequence, false);
    for (int i = 0; i < alphaFactors.numRows(); i++) {
        for (int j = 0; j < alphaFactors.numCols(); j++) {
            log.info("Alpha Factors Matrix entry ({}, {}) = {}", new Object[] { i, j, alphaFactors.get(i, j) });
        }
    }

    Matrix betaFactors = HmmAlgorithms.backwardAlgorithm(Model, sequence, false);
    for (int i = 0; i < betaFactors.numRows(); i++) {
        for (int j = 0; j < betaFactors.numCols(); j++) {
            log.info("Beta Factors Matrix entry ({}, {}) = {}", new Object[] { i, j, betaFactors.get(i, j) });
        }

        //Initial Distribution
        for (int q = 0; q < nrOfHiddenStates; q++) {
            double alpha_1_q = alphaFactors.get(1, q);
            double beta_1_q = betaFactors.get(1, q);
            initialDistributionStripe.put(new IntWritable(q), new DoubleWritable(alpha_1_q * beta_1_q));
        }

        //Emission Distribution
        /*
        Matrix emissionMatrix = new DenseMatrix(nrOfHiddenStates, sequence.length);
        for (int t = 0; t < sequence.length; t++) {
        HashMap<Integer, Double> innerMap = new HashMap<Integer, Double>();
        for (int q = 0; q < nrOfHiddenStates; q++) {
          double alpha_t_q = alphaFactors.get(t, q);
          double beta_t_q  = betaFactors.get(t, q);
          //innerMap.put(q, alpha_t_q * beta_t_q);
          emissionMatrix.set(q, t, alpha_t_q * beta_t_q);
          }
        }
        for (int q = 0; q < nrOfHiddenStates; q++) {
        Map innerEmissionMap = new MapWritable();
        for (int xt = 0; xt < sequence.length; xt++) {
          innerEmissionMap.put(new IntWritable(xt), new DoubleWritable(emissionMatrix.get(q, xt)));
        }
        emissionDistributionStripe.put(new IntWritable(q), (MapWritable)innerEmissionMap);
        }
        */

        double[][] emissionMatrix = new double[nrOfHiddenStates][nrOfEmittedStates];

        for (int q = 0; q < nrOfHiddenStates; q++) {
            for (int x = 0; x < nrOfEmittedStates; x++) {
                emissionMatrix[q][x] = 0.0;
            }
        }

        for (int t = 0; t < sequence.length; t++) {
            //HashMap<Integer, Double> innerMap = new HashMap<Integer, Double>();
            for (int q = 0; q < nrOfHiddenStates; q++) {
                double alpha_t_q = alphaFactors.get(t, q);
                double beta_t_q = betaFactors.get(t, q);
                //innerMap.put(q, alpha_t_q * beta_t_q);
                //emissionMatrix.set(q, t, alpha_t_q * beta_t_q);
                emissionMatrix[q][sequence[t]] += alpha_t_q * beta_t_q;
            }
        }
        for (int q = 0; q < nrOfHiddenStates; q++) {
            Map innerEmissionMap = new MapWritable();
            for (int xt = 0; xt < sequence.length; xt++) {
                innerEmissionMap.put(new IntWritable(sequence[xt]),
                        new DoubleWritable(emissionMatrix[q][sequence[xt]]));
            }
            emissionDistributionStripe.put(new IntWritable(q), (MapWritable) innerEmissionMap);
        }

        //Transition Distribution
        double[][] transitionMatrix = new double[nrOfHiddenStates][nrOfHiddenStates];
        for (int q = 0; q < nrOfHiddenStates; q++) {
            for (int x = 0; x < nrOfHiddenStates; x++) {
                transitionMatrix[q][x] = 0.0;
            }
        }

        for (int t = 0; t < sequence.length - 1; t++) {
            for (int q = 0; q < nrOfHiddenStates; q++) {
                for (int r = 0; r < nrOfHiddenStates; r++) {
                    double alpha_t_q = alphaFactors.get(t, q);
                    double A_q_r = Model.getTransitionMatrix().get(q, r);
                    double B_r_xtplus1 = Model.getEmissionMatrix().get(r, sequence[t + 1]);
                    double beta_tplus1_r = betaFactors.get(t + 1, r);
                    double transitionProb = alpha_t_q * A_q_r * B_r_xtplus1 * beta_tplus1_r;
                    log.info("Putting into Inner Map of Transition Distribution. Key = {}, Value = {}", q,
                            transitionProb);
                    transitionMatrix[q][r] += transitionProb;
                }
            }
        }
        for (int q = 0; q < nrOfHiddenStates; q++) {
            Map innerTransitionMap = new MapWritable();
            for (int r = 0; r < nrOfHiddenStates; r++) {
                innerTransitionMap.put(new IntWritable(r), new DoubleWritable(transitionMatrix[q][r]));
            }
            transitionDistributionStripe.put(new IntWritable(q), (MapWritable) innerTransitionMap);
        }

        context.write(new Text("INITIAL"), initialDistributionStripe);
        log.info("Context Writing from Mapper the Initial Distribution Stripe. Size = {}  Entries = {}",
                Integer.toString(initialDistributionStripe.size()),
                Integer.toString(initialDistributionStripe.entrySet().size()));
        for (int q = 0; q < nrOfHiddenStates; q++) {
            context.write(new Text("EMIT_" + Integer.toString(q)),
                    (MapWritable) emissionDistributionStripe.get(new IntWritable(q)));
            log.info("Context Writing from Mapper the Emission Distribution Stripe. State = {}  Entries = {}",
                    Integer.toString(q), Integer.toString(
                            ((MapWritable) emissionDistributionStripe.get(new IntWritable(q))).size()));
            for (MapWritable.Entry<Writable, Writable> entry : ((MapWritable) emissionDistributionStripe
                    .get(new IntWritable(q))).entrySet()) {
                log.info("Emission Distribution Stripe Details. Key = {}  Value = {} ",
                        Integer.toString(((IntWritable) entry.getKey()).get()),
                        Double.toString(((DoubleWritable) entry.getValue()).get()));
            }
            context.write(new Text("TRANSIT_" + Integer.toString(q)),
                    (MapWritable) transitionDistributionStripe.get(new IntWritable(q)));
            log.info("Context Writing from Mapper the Transition Distribution Stripe. State = {}  Entries = {}",
                    Integer.toString(q), Integer.toString(
                            ((MapWritable) transitionDistributionStripe.get(new IntWritable(q))).size()));
            for (MapWritable.Entry<Writable, Writable> entry : ((MapWritable) transitionDistributionStripe
                    .get(new IntWritable(q))).entrySet()) {
                log.info("Transition Distribution Stripe Details. Key = {}  Value = {} ",
                        Integer.toString(((IntWritable) entry.getKey()).get()),
                        Double.toString(((DoubleWritable) entry.getValue()).get()));
            }
        }

    }
}