Example usage for org.apache.hadoop.io IntWritable set

List of usage examples for org.apache.hadoop.io IntWritable set

Introduction

In this page you can find the example usage for org.apache.hadoop.io IntWritable set.

Prototype

public void set(int value) 

Source Link

Document

Set the value of this IntWritable.

Usage

From source file:co.nubetech.hiho.testdata.SequenceFileWriteDemo.java

License:Apache License

public static void main(String[] args) throws IOException {
    String uri = "input2.seq";
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    Path path = new Path(uri);
    IntWritable key = new IntWritable();
    Text value = new Text();
    SequenceFile.Writer writer = null;
    try {//  w w  w .j a  va 2  s  .co m
        writer = SequenceFile.createWriter(fs, conf, path, key.getClass(), value.getClass());
        for (int i = 0; i < 2; i++) {
            key.set(2 - i);
            value.set(DATA[i % DATA.length]);
            System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
            writer.append(key, value);
        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        IOUtils.closeStream(writer);
    }
}

From source file:com.caseystella.analytics.distribution.RotationTest.java

License:Apache License

@Test
public void rotationTest() throws Exception {
    OutlierConfig config = JSONUtil.INSTANCE.load(amountConfig, OutlierConfig.class);
    final IntWritable numChunksAdded = new IntWritable(0);
    final IntWritable numRotations = new IntWritable(0);
    Distribution.Context context = new Distribution.Context(0, 0) {
        @Override//ww  w  . j  a va 2  s .c o  m
        protected void addChunk(Distribution d) {
            super.addChunk(d);
            numChunksAdded.set(numChunksAdded.get() + 1);
        }

        @Override
        protected void rotate() {
            super.rotate();
            numRotations.set(numRotations.get() + 1);
        }
    };
    GlobalStatistics globalStats = new GlobalStatistics();
    Random r = new Random(0);
    List<DataPoint> points = new ArrayList<>();
    DescriptiveStatistics stats = new DescriptiveStatistics();
    LongWritable ts = new LongWritable(0L);
    Assert.assertEquals(context.getAmount(), 0);
    context.addDataPoint(nextDataPoint(r, ts, 1, points), config.getRotationPolicy(),
            config.getChunkingPolicy(), config.getScalingFunction(), globalStats);
    Assert.assertEquals(context.getAmount(), 1);
    Assert.assertEquals(context.getChunks().size(), 1);
    Assert.assertEquals(numChunksAdded.get(), 1);
    Assert.assertEquals(numRotations.get(), 0);
    for (int i = 1; i < 10; ++i) {
        context.addDataPoint(nextDataPoint(r, ts, 1, points), config.getRotationPolicy(),
                config.getChunkingPolicy(), config.getScalingFunction(), globalStats);
        Assert.assertEquals(context.getChunks().size(), 1);
    }
    //at the 11th point, we should create a new chunk
    context.addDataPoint(nextDataPoint(r, ts, 1, points), config.getRotationPolicy(),
            config.getChunkingPolicy(), config.getScalingFunction(), globalStats);
    Assert.assertEquals(context.getChunks().size(), 2);
    Assert.assertEquals(numChunksAdded.get(), 2);
    Assert.assertEquals(context.getAmount(), 11);
    Assert.assertEquals(numRotations.get(), 0);
    for (int i = 12; i <= 110; ++i) {
        context.addDataPoint(nextDataPoint(r, ts, 1, points), config.getRotationPolicy(),
                config.getChunkingPolicy(), config.getScalingFunction(), globalStats);
    }
    Assert.assertEquals(11, numChunksAdded.get());
    Assert.assertEquals(0, numRotations.get());
    //at the 111th point, we should create a rotation
    context.addDataPoint(nextDataPoint(r, ts, 1, points), config.getRotationPolicy(),
            config.getChunkingPolicy(), config.getScalingFunction(), globalStats);
    Assert.assertEquals(12, numChunksAdded.get());
    Assert.assertEquals(11, context.getChunks().size());
    Assert.assertEquals(1, numRotations.get());
    //rotates just past the rotation cutoff (ensuring that we keep at least the last 100 entries in there)
    Assert.assertEquals(context.getAmount(), 101);
    for (int i = 111; i <= 150; ++i) {
        context.addDataPoint(nextDataPoint(r, ts, 1, points), config.getRotationPolicy(),
                config.getChunkingPolicy(), config.getScalingFunction(), globalStats);
    }
    //no matter how far we go in the stream, we always stay at 11 chunks and a total number of values in the distribution of <= 110 (i.e. between the cutoff and cutoff + a chunk)
    Assert.assertEquals(11, context.getChunks().size());
    Assert.assertTrue(context.getAmount() <= 110);
}

From source file:com.chimpler.example.eigenface.Helper.java

License:Apache License

public static void writeMatrixSequenceFile(String matrixSeqFileName, double[][] covarianceMatrix)
        throws Exception {
    int rowCount = covarianceMatrix.length;
    int columnCount = covarianceMatrix[0].length;

    Configuration configuration = new Configuration();
    FileSystem fs = FileSystem.get(configuration);
    Writer matrixWriter = new SequenceFile.Writer(fs, configuration, new Path(matrixSeqFileName),
            IntWritable.class, VectorWritable.class);

    IntWritable key = new IntWritable();
    VectorWritable value = new VectorWritable();

    double[] doubleValues = new double[columnCount];
    for (int i = 0; i < rowCount; i++) {
        key.set(i);
        for (int j = 0; j < columnCount; j++) {
            doubleValues[j] = covarianceMatrix[i][j];
        }/* www .  j  av  a2  s . c  o m*/
        Vector vector = new DenseVector(doubleValues);
        value.set(vector);

        matrixWriter.append(key, value);
    }
    matrixWriter.close();
}

From source file:com.chinamobile.bcbsp.bspcontroller.Counters.java

License:Apache License

/**Extracts a block (data enclosed within delimeters) ignoring escape
 * sequences. Throws ParseException if an incomplete block is found else
 * returns null.//  w  ww .  ja  v  a2  s. c  o  m
 * @param str
 *        split address to find
 * @param open
 *        if the split is open
 * @param close
 *        if the split is close
 * @param index
 *        block write index.
 * @return blocks has got
 */
private static String getBlock(String str, char open, char close, IntWritable index) throws ParseException {
    StringBuilder split = new StringBuilder();
    int next = StringUtils.findNext(str, open, StringUtils.ESCAPE_CHAR, index.get(), split);
    split.setLength(0); // clear the buffer
    if (next >= 0) {
        ++next; // move over '('
        next = StringUtils.findNext(str, close, StringUtils.ESCAPE_CHAR, next, split);
        if (next >= 0) {
            ++next; // move over ')'
            index.set(next);
            return split.toString(); // found a block
        } else {
            throw new ParseException("Unexpected end of block", next);
        }
    }
    return null; // found nothing
}

From source file:com.cloudera.dataflow.spark.HadoopFileFormatPipelineTest.java

License:Open Source License

private void populateFile() throws IOException {
    IntWritable key = new IntWritable();
    Text value = new Text();
    try (Writer writer = SequenceFile.createWriter(new Configuration(), Writer.keyClass(IntWritable.class),
            Writer.valueClass(Text.class), Writer.file(new Path(this.inputFile.toURI())))) {
        for (int i = 0; i < 5; i++) {
            key.set(i);
            value.set("value-" + i);
            writer.append(key, value);//w ww  . j a va 2  s . com
        }
    }
}

From source file:com.csiro.hadoop.WritableTest.java

public static void main(String[] args) {
    System.out.println("*** Primitive Writable ***");

    BooleanWritable bool1 = new BooleanWritable(true);
    ByteWritable byte1 = new ByteWritable((byte) 3);
    System.out.printf("Boolean:%s Byte:%d\n", bool1, byte1.get());

    IntWritable int1 = new IntWritable(5);
    IntWritable int2 = new IntWritable(17);
    System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get());

    int1.set(int2.get());
    System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get());

    Integer int3 = new Integer(23);
    int1.set(int3);
    System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get());

    System.out.println("*** Array Writable ***");

    ArrayWritable a = new ArrayWritable(IntWritable.class);
    a.set(new IntWritable[] { new IntWritable(1), new IntWritable(3), new IntWritable(5) });

    IntWritable[] values = (IntWritable[]) a.get();
    for (IntWritable i : values) {
        System.out.println(i);//ww w.  j  a va2  s  . c o m
    }

    IntArrayWritable ia = new IntArrayWritable();
    ia.set(new IntWritable[] { new IntWritable(1), new IntWritable(3), new IntWritable(5) });

    IntWritable[] ivalues = (IntWritable[]) ia.get();

    ia.set((new LongWritable[] { new LongWritable(10001) }));

    System.out.println("*** Map Writables ***");

    MapWritable m = new MapWritable();
    IntWritable key1 = new IntWritable(5);
    NullWritable value1 = NullWritable.get();

    m.put(key1, value1);
    System.out.println(m.containsKey(key1));
    System.out.println(m.get(key1));
    m.put(new LongWritable(100000000), key1);
    Set<Writable> keys = m.keySet();

    for (Writable k : keys)
        System.out.println(k.getClass());

}

From source file:com.digitalpebble.behemoth.BehemothDocument.java

License:Apache License

protected void writeAnnotation(Annotation annot, DataOutput out, List<String> atypes) throws IOException {
    int typePos = atypes.indexOf(annot.getType());
    IntWritable intStringPool = new IntWritable(typePos);
    intStringPool.write(out);//from  w w w .j a  v a  2 s.co m
    WritableUtils.writeVLong(out, annot.getStart());
    WritableUtils.writeVLong(out, annot.getEnd());
    out.writeInt(annot.getFeatureNum());

    if (annot.getFeatures() != null) {
        Iterator<String> featNameIter = annot.getFeatures().keySet().iterator();
        while (featNameIter.hasNext()) {
            String fname = featNameIter.next();
            int fnamePos = atypes.indexOf(fname);
            intStringPool.set(fnamePos);
            intStringPool.write(out);
            WritableUtils.writeString(out, annot.getFeatures().get(fname));
        }
    }
}

From source file:com.facebook.hive.orc.lazy.LazyIntDictionaryTreeReader.java

License:Open Source License

IntWritable createWritable(Object previous, int v) throws IOException {
    IntWritable result = null;
    if (previous == null) {
        result = new IntWritable();
    } else {//from   w w  w.  j  av  a 2s  .  c om
        result = (IntWritable) previous;
    }
    result.set(v);
    return result;
}

From source file:com.github.ygf.pagerank.InLinksTopNReducer.java

License:Apache License

@Override
protected void cleanup(Context context) throws IOException, InterruptedException {

    Configuration conf = context.getConfiguration();
    Path titlesDir = new Path(conf.get("inlinks.titles_dir"));

    MapFile.Reader[] readers = MapFileOutputFormat.getReaders(titlesDir, conf);
    Partitioner<IntWritable, Text> partitioner = new HashPartitioner<IntWritable, Text>();
    IntWritable page = new IntWritable();
    Text title = new Text();

    int[] inLinks = new int[topN.size()];
    String[] titles = new String[topN.size()];

    for (int i = inLinks.length - 1; i >= 0; i--) {
        Map.Entry<Integer, Integer> entry = topN.poll();
        page.set(entry.getValue());
        MapFileOutputFormat.getEntry(readers, partitioner, page, title);
        inLinks[i] = entry.getKey();//w  w w. j a v a 2  s  .  co m
        titles[i] = title.toString();
    }

    for (MapFile.Reader reader : readers) {
        reader.close();
    }

    for (int i = 0; i < inLinks.length; i++) {
        context.write(new IntWritable(inLinks[i]), new Text(titles[i]));
    }
}

From source file:com.github.ygf.pagerank.PageRankTopNReducer.java

License:Apache License

@Override
protected void cleanup(Context context) throws IOException, InterruptedException {

    Configuration conf = context.getConfiguration();
    Path titlesDir = new Path(conf.get("pagerank.titles_dir"));

    MapFile.Reader[] readers = MapFileOutputFormat.getReaders(titlesDir, conf);
    Partitioner<IntWritable, Text> partitioner = new HashPartitioner<IntWritable, Text>();
    IntWritable page = new IntWritable();
    Text title = new Text();

    float[] pageRanks = new float[topN.size()];
    String[] titles = new String[topN.size()];

    // The order of the entries is reversed. The priority queue is in
    // non-decreasing order and we want the highest PageRank first.
    for (int i = pageRanks.length - 1; i >= 0; i--) {
        Map.Entry<Float, Integer> entry = topN.poll();
        // Get the title of the page from the title index.
        page.set(entry.getValue());
        MapFileOutputFormat.getEntry(readers, partitioner, page, title);
        pageRanks[i] = entry.getKey();/* w  w  w  .  jav  a 2  s . c o m*/
        titles[i] = title.toString();
    }

    for (MapFile.Reader reader : readers) {
        reader.close();
    }

    for (int i = 0; i < pageRanks.length; i++) {
        context.write(new FloatWritable(pageRanks[i]), new Text(titles[i]));
    }
}