Example usage for org.apache.hadoop.io IntWritable set

List of usage examples for org.apache.hadoop.io IntWritable set

Introduction

In this page you can find the example usage for org.apache.hadoop.io IntWritable set.

Prototype

public void set(int value) 

Source Link

Document

Set the value of this IntWritable.

Usage

From source file:hivemall.mf.BPRMatrixFactorizationUDTF.java

License:Apache License

@Override
public void close() throws HiveException {
    if (model != null) {
        if (count == 0) {
            this.model = null; // help GC
            return;
        }/* w ww .java2  s  . c o  m*/
        if (iterations > 1) {
            runIterativeTraining(iterations);
        }

        final IntWritable idx = new IntWritable();
        final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable Bi = useBiasClause ? new FloatWritable() : null;
        final Object[] forwardObj = new Object[] { idx, Pu, Qi, Bi };

        int numForwarded = 0;
        for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
            idx.set(i);
            Rating[] userRatings = model.getUserVector(i);
            if (userRatings == null) {
                forwardObj[1] = null;
            } else {
                forwardObj[1] = Pu;
                copyTo(userRatings, Pu);
            }
            Rating[] itemRatings = model.getItemVector(i);
            if (itemRatings == null) {
                forwardObj[2] = null;
            } else {
                forwardObj[2] = Qi;
                copyTo(itemRatings, Qi);
            }
            if (useBiasClause) {
                Bi.set(model.getItemBias(i));
            }
            forward(forwardObj);
            numForwarded++;
        }
        this.model = null; // help GC
        LOG.info("Forwarded the prediction model of " + numForwarded + " rows. [lastLosses="
                + cvState.getCumulativeLoss() + ", #trainingExamples=" + count + "]");
    }
}

From source file:hivemall.mf.BPRMatrixFactorizationUDTFTest.java

License:Apache License

private static void parseLine(@Nonnull String line, @Nonnull IntWritable user, @Nonnull IntWritable posItem,
        @Nonnull IntWritable negItem) {/*from  ww  w .j  av a 2  s .  c o  m*/
    String[] cols = StringUtils.split(line, ' ');
    Assert.assertEquals(3, cols.length);
    user.set(Integer.parseInt(cols[0]));
    posItem.set(Integer.parseInt(cols[1]));
    negItem.set(Integer.parseInt(cols[2]));
}

From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java

License:Open Source License

@Override
public void close() throws HiveException {
    if (model != null) {
        if (count == 0) {
            this.model = null; // help GC
            return;
        }/*w  ww  .ja  v  a  2s  . c o m*/
        if (iterations > 1) {
            runIterativeTraining(iterations);
        }
        final IntWritable idx = new IntWritable();
        final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable Bu = new FloatWritable();
        final FloatWritable Bi = new FloatWritable();
        final Object[] forwardObj;
        if (updateMeanRating) {
            float meanRating = model.getMeanRating();
            FloatWritable mu = new FloatWritable(meanRating);
            forwardObj = new Object[] { idx, Pu, Qi, Bu, Bi, mu };
        } else {
            forwardObj = new Object[] { idx, Pu, Qi, Bu, Bi };
        }
        int numForwarded = 0;
        for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
            idx.set(i);
            Rating[] userRatings = model.getUserVector(i);
            if (userRatings == null) {
                forwardObj[1] = null;
            } else {
                forwardObj[1] = Pu;
                copyTo(userRatings, Pu);
            }
            Rating[] itemRatings = model.getItemVector(i);
            if (itemRatings == null) {
                forwardObj[2] = null;
            } else {
                forwardObj[2] = Qi;
                copyTo(itemRatings, Qi);
            }
            Bu.set(model.getUserBias(i));
            Bi.set(model.getItemBias(i));
            forward(forwardObj);
            numForwarded++;
        }
        this.model = null; // help GC
        logger.info("Forwarded the prediction model of " + numForwarded + " rows. [totalErrors=" + totalErrors
                + ", lastLosses=" + currLosses + ", #trainingExamples=" + count + "]");
    }
}

From source file:hivemall.recommend.SlimUDTF.java

License:Apache License

private void forwardModel() throws HiveException {
    final IntWritable f0 = new IntWritable(); // i
    final IntWritable f1 = new IntWritable(); // nn
    final FloatWritable f2 = new FloatWritable(); // w
    final Object[] forwardObj = new Object[] { f0, f1, f2 };

    final MutableObject<HiveException> catched = new MutableObject<>();
    _weightMatrix.eachNonZeroCell(new VectorProcedure() {
        @Override// ww w .  j av a  2 s.  co  m
        public void apply(int i, int j, float value) {
            if (value == 0.f) {
                return;
            }
            f0.set(i);
            f1.set(j);
            f2.set(value);
            try {
                forward(forwardObj);
            } catch (HiveException e) {
                catched.setIfAbsent(e);
            }
        }
    });
    HiveException ex = catched.get();
    if (ex != null) {
        throw ex;
    }
    logger.info("Forwarded SLIM's weights matrix");
}

From source file:hivemall.tools.array.ArraySliceUDFTest.java

License:Apache License

@Test
public void testNonNullReturn() throws IOException, HiveException {
    ArraySliceUDF udf = new ArraySliceUDF();

    udf.initialize(new ObjectInspector[] {
            ObjectInspectorFactory// w w w.  j a  va 2 s . c om
                    .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector),
            PrimitiveObjectInspectorFactory.writableIntObjectInspector,
            PrimitiveObjectInspectorFactory.writableIntObjectInspector });

    IntWritable offset = new IntWritable();
    IntWritable length = new IntWritable();
    DeferredObject arg1 = new GenericUDF.DeferredJavaObject(offset);
    DeferredObject arg2 = new GenericUDF.DeferredJavaObject(length);
    DeferredObject nullarg = new GenericUDF.DeferredJavaObject(null);

    DeferredObject[] args = new DeferredObject[] { new GenericUDF.DeferredJavaObject(Arrays.asList("zero",
            "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten")), arg1, arg2 };

    offset.set(0);
    length.set(3);
    List<Object> actual = udf.evaluate(args);
    Assert.assertEquals(Arrays.asList("zero", "one", "two"), actual);

    offset.set(1);
    length.set(-2);
    actual = udf.evaluate(args);
    Assert.assertEquals(Arrays.asList("one", "two", "three", "four", "five", "six", "seven", "eight"), actual);

    offset.set(1);
    length.set(0);
    actual = udf.evaluate(args);
    Assert.assertEquals(Collections.emptyList(), actual);

    offset.set(-1);
    length.set(0);
    actual = udf.evaluate(args);
    Assert.assertEquals(Collections.emptyList(), actual);

    offset.set(6);
    args[2] = nullarg;
    actual = udf.evaluate(args);
    Assert.assertEquals(Arrays.asList("six", "seven", "eight", "nine", "ten"), actual);

    udf.close();
}

From source file:hivemall.tools.array.ArraySliceUDFTest.java

License:Apache License

@Test
public void testNullReturn() throws IOException, HiveException {
    ArraySliceUDF udf = new ArraySliceUDF();

    udf.initialize(new ObjectInspector[] {
            ObjectInspectorFactory/*from   ww w  . j  a  v a  2  s.co m*/
                    .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector),
            PrimitiveObjectInspectorFactory.writableIntObjectInspector,
            PrimitiveObjectInspectorFactory.writableIntObjectInspector });

    IntWritable offset = new IntWritable();
    IntWritable length = new IntWritable();
    DeferredObject arg1 = new GenericUDF.DeferredJavaObject(offset);
    DeferredObject arg2 = new GenericUDF.DeferredJavaObject(length);

    DeferredObject[] args = new DeferredObject[] { new GenericUDF.DeferredJavaObject(Arrays.asList("zero",
            "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten")), arg1, arg2 };

    offset.set(-12);
    length.set(0);
    List<Object> actual = udf.evaluate(args);
    Assert.assertNull(actual);

    udf.close();

}

From source file:hivemall.tools.EachTopKUDTF.java

License:Apache License

private void drainQueue() throws HiveException {
    final int queueSize = _queue.size();
    if (queueSize > 0) {
        final TupleWithKey[] tuples = new TupleWithKey[queueSize];
        for (int i = 0; i < queueSize; i++) {
            TupleWithKey tuple = _queue.poll();
            if (tuple == null) {
                throw new IllegalStateException("Found null element in the queue");
            }/*from  w  w w .  j ava  2  s .c o  m*/
            tuples[i] = tuple;
        }
        final IntWritable rankProbe = new IntWritable(-1);
        final DoubleWritable keyProbe = new DoubleWritable(Double.NaN);
        int rank = 0;
        double lastKey = Double.NaN;
        for (int i = queueSize - 1; i >= 0; i--) {
            TupleWithKey tuple = tuples[i];
            tuples[i] = null; // help GC
            double key = tuple.getKey();
            if (key != lastKey) {
                ++rank;
                rankProbe.set(rank);
                keyProbe.set(key);
                lastKey = key;
            }
            Object[] row = tuple.getRow();
            row[0] = rankProbe;
            row[1] = keyProbe;
            forward(row);
        }
        _queue.clear();
    }
}

From source file:hivemall.topicmodel.ProbabilisticTopicModelBaseUDTF.java

License:Apache License

protected void forwardModel() throws HiveException {
    final IntWritable topicIdx = new IntWritable();
    final Text word = new Text();
    final FloatWritable score = new FloatWritable();

    final Object[] forwardObjs = new Object[3];
    forwardObjs[0] = topicIdx;//w  w w.ja v a2  s . co m
    forwardObjs[1] = word;
    forwardObjs[2] = score;

    for (int k = 0; k < topics; k++) {
        topicIdx.set(k);

        final SortedMap<Float, List<String>> topicWords = model.getTopicWords(k);
        for (Map.Entry<Float, List<String>> e : topicWords.entrySet()) {
            score.set(e.getKey().floatValue());
            for (String v : e.getValue()) {
                word.set(v);
                forward(forwardObjs);
            }
        }
    }

    logger.info("Forwarded topic words each of " + topics + " topics");
}

From source file:hk.newsRecommender.MatrixAndCluster.java

License:Open Source License

public static void matrix2Vector(Configuration conf, Path path) throws IOException {
    FileSystem fs = FileSystem.get(conf);

    SequenceFile.Reader reader = null;
    // ??SequenceFile????Name??
    reader = new SequenceFile.Reader(fs, path, conf);
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable val = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    Writer writer = null;/*w w w .  j a va2s  . c  o m*/
    try {
        writer = SequenceFile.createWriter(fs, conf, path, IntWritable.class, VectorWritable.class,
                CompressionType.BLOCK);
        final IntWritable key1 = new IntWritable();
        final VectorWritable value = new VectorWritable();
        int lineNum = 0;
        Vector vector = null;
        while (reader.next(key, val)) {
            int index = 0;
            StringTokenizer st = new StringTokenizer(val.toString());
            // SequentialAccessSparseVector??NamedVector
            vector = new NamedVector(new SequentialAccessSparseVector(Cardinality), lineNum + "");
            while (st.hasMoreTokens()) {
                if (Integer.parseInt(st.nextToken()) == 1) {
                    vector.set(index, 1);
                }
                index++;
            }
            key1.set(lineNum++);
            value.set(vector);
            writer.append(key, value);
        }
    } finally {
        writer.close();
        reader.close();
    }
}

From source file:info.halo9pan.word2vec.hadoop.mr.ReadWordsReducer.java

License:Open Source License

public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
    IntWritable result = new IntWritable();
    int sum = 0;/* w w  w.  j av a2s . co m*/
    for (IntWritable val : values) {
        sum += val.get();
    }
    result.set(sum);
    context.write(key, result);
}