Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.ankus.mapreduce.algorithms.clustering.kmeans.KMeansClusterUpdateReducer.java

License:Apache License

protected void reduce(IntWritable key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {
    Iterator<Text> iterator = values.iterator();

    KMeansClusterInfoMgr cluster = new KMeansClusterInfoMgr();
    cluster.setClusterID(key.get());/*  www  . j ava2 s  .c o  m*/
    int dataCnt = 0;
    while (iterator.hasNext()) {
        dataCnt++;
        String tokens[] = iterator.next().toString().split(mDelimiter);

        for (int i = 0; i < tokens.length; i++) {
            if (CommonMethods.isContainIndex(mIndexArr, i, true)
                    && !CommonMethods.isContainIndex(mExceptionIndexArr, i, false)) {
                if (CommonMethods.isContainIndex(mNominalIndexArr, i, false)) {
                    cluster.addAttributeValue(i, tokens[i], ConfigurationVariable.NOMINAL_ATTRIBUTE);
                } else
                    cluster.addAttributeValue(i, tokens[i], ConfigurationVariable.NUMERIC_ATTRIBUTE);
            }
        }
    }
    cluster.finalCompute(dataCnt);

    String writeStr = cluster.getClusterInfoString(mDelimiter,
            context.getConfiguration().get("subDelimiter", "@@"));
    context.write(NullWritable.get(), new Text(writeStr));
}

From source file:org.ankus.mapreduce.algorithms.preprocessing.normalize.NormalizeMapper.java

License:Apache License

@Override
protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    String[] columns = value.toString().split(mDelimiter);

    String writeVal = "";
    for (int i = 0; i < columns.length; i++) {
        if (CommonMethods.isContainIndex(mIndexArr, i, true)) {
            if (!CommonMethods.isContainIndex(mExceptionIndexArr, i, false)) {
                if (i > 0)
                    writeVal += mDelimiter;

                String minMax[] = context.getConfiguration()
                        .get(ConfigurationVariable.MINMAX_VALUE + "_" + i, "0,0").split(",");

                double val1 = Double.parseDouble(columns[i]) - Double.parseDouble(minMax[0]);
                double val2 = Double.parseDouble(minMax[1]) - Double.parseDouble(minMax[0]);

                if ((val2 == 0) || (val2 == 0))
                    writeVal += "0";
                else
                    writeVal += "" + (val1 / val2);
            } else if (mRemainFields) {
                if (i > 0)
                    writeVal += mDelimiter;
                writeVal += columns[i];/*from  www .  j av a2s  .  co  m*/
            }
        } else if (mRemainFields) {
            if (i > 0)
                writeVal += mDelimiter;
            writeVal += columns[i];
        }
    }
    context.write(NullWritable.get(), new Text(writeVal.trim()));
}

From source file:org.ankus.mapreduce.algorithms.recommendation.recommender.itemlist.ItemListMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

    String row = value.toString();
    String[] columns = row.split(delimiter);

    String item = columns[1];//from   w  ww  . j av a2s.  com
    context.write(new Text(item), NullWritable.get());
}

From source file:org.ankus.mapreduce.algorithms.recommendation.recommender.neighborhood.aggregate.AggregateMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text values, Context context) throws IOException, InterruptedException {
    context.write(NullWritable.get(), values);
}

From source file:org.ankus.mapreduce.algorithms.statistics.certaintyfactorsum.CFSumReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {
    Iterator<Text> iterator = values.iterator();
    double m_sum = 0;

    while (iterator.hasNext()) {
        double value = Double.parseDouble(iterator.next().toString());
        m_sum = m_sum + value - (m_sum * value / sumMax);
    }//  ww  w .j  a  va2s  .  com
    context.write(NullWritable.get(), new Text(key.toString() + delimiter + m_sum));
}

From source file:org.ankus.mapreduce.algorithms.statistics.nominalstats.NominalStatsFrequencyReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
    Iterator<IntWritable> iterator = values.iterator();

    long sum = 0;
    while (iterator.hasNext()) {
        sum += iterator.next().get();/*from  w  w  w.  j ava  2s  .  co  m*/
    }
    context.write(NullWritable.get(), new Text(key.toString() + delimiter + sum));
}

From source file:org.ankus.mapreduce.algorithms.statistics.nominalstats.NominalStatsRatioMapper.java

License:Apache License

@Override
protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    String valStr = value.toString();
    long val = Long.parseLong(valStr.substring(valStr.indexOf(",") + 1));
    double rate = (double) val / (double) totalMapRecords;

    context.write(NullWritable.get(), new Text(valStr + delimiter + rate));
}

From source file:org.ankus.mapreduce.algorithms.statistics.numericstats.NumericStats1MRReducer.java

License:Apache License

@Override
protected void reduce(IntWritable key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {
    Iterator<Text> iterator = values.iterator();

    int cnt = 0;//from w w  w.  j  a  va 2s  . co m
    double sum = 0;
    double avg = 0;
    double avgGeometric = 0;
    double avgHarmonic = 0;
    double variance = 0;
    double stdDeviation = 0;
    double maxData = 0;
    double minData = 0;
    double middleData_Value = 0;
    double harmonicSum = 0;
    double geometricSum = 0;
    double squareSum = 0;
    boolean allPositive = true;

    while (iterator.hasNext()) {
        double value = Double.parseDouble(iterator.next().toString());
        cnt++;

        if (cnt == 1) {
            maxData = value;
            minData = value;
        } else {
            if (maxData < value)
                maxData = value;
            if (minData > value)
                minData = value;
        }

        if (value <= 0)
            allPositive = false;
        sum += value;
        if (allPositive) {
            harmonicSum += 1 / value;
            geometricSum += Math.log10(value);
        }
        squareSum += Math.pow(value, 2) / 10000;
    }

    avg = sum / (double) cnt;
    if (allPositive) {
        avgHarmonic = (double) cnt / harmonicSum;
        avgGeometric = Math.pow(10, geometricSum / (double) cnt);
    } else {
        avgHarmonic = 0;
        avgGeometric = 0;
    }

    variance = (squareSum * 10000 / (double) cnt) - Math.pow(avg, 2);
    stdDeviation = Math.sqrt(variance);
    middleData_Value = (maxData + minData) / 2;

    String writeVal = sum + delimiter + avg + delimiter + avgHarmonic + delimiter + avgGeometric + delimiter
            + variance + delimiter + stdDeviation + delimiter + maxData + delimiter + minData + delimiter
            + middleData_Value;
    context.write(NullWritable.get(), new Text(key.toString() + delimiter + writeVal));
}

From source file:org.ankus.mapreduce.algorithms.statistics.numericstats.NumericStats2MRMergeReducer.java

License:Apache License

protected void reduce(Text key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {
    int cnt = 0;//from www.  j  a va  2 s. co  m
    double max = 0;
    double min = 0;
    double sum = 0;
    double harmonic_sum = 0;
    double geometric_sum = 0;
    double square_sum = 0;
    boolean allPositive = true;

    Iterator<Text> iterator = values.iterator();
    while (iterator.hasNext()) {
        String tokens[] = iterator.next().toString().split(delimiter);

        int curCnt = Integer.parseInt(tokens[0]);
        double curMax = Double.parseDouble(tokens[1]);
        double curMin = Double.parseDouble(tokens[2]);
        cnt += curCnt;
        if (cnt == curCnt) {
            max = curMax;
            min = curMin;
        } else {
            if (max < curMax)
                max = curMax;
            if (min > curMin)
                min = curMin;
        }

        if (tokens[7].equals("F"))
            allPositive = false;
        sum += Double.parseDouble(tokens[3]);
        if (allPositive) {
            harmonic_sum += Double.parseDouble(tokens[4]);
            geometric_sum += Double.parseDouble(tokens[5]);
        }
        square_sum += Double.parseDouble(tokens[6]);
    }

    double avg = sum / (double) cnt;
    double avg_harmonic = 0;
    double avg_geometric = 0;
    if (allPositive) {
        avg_harmonic = (double) cnt / harmonic_sum;
        avg_geometric = Math.pow(10, geometric_sum / (double) cnt);
    }

    double variance = (square_sum * 10000 / (double) cnt) - Math.pow(avg, 2);
    double stdDeviation = Math.sqrt(variance);
    double middleData_Value = (max + min) / 2;

    String writeVal = sum + delimiter + avg + delimiter + avg_harmonic + delimiter + avg_geometric + delimiter
            + variance + delimiter + stdDeviation + delimiter + max + delimiter + min + delimiter
            + middleData_Value;
    context.write(NullWritable.get(), new Text(key.toString() + delimiter + writeVal));
}

From source file:org.apache.avro.hadoop.io.TestAvroDatumConverterFactory.java

License:Apache License

@Test
public void testConvertNullWritable() {
    AvroDatumConverter<NullWritable, Object> converter = mFactory.create(NullWritable.class);
    assertNull(converter.convert(NullWritable.get()));
}