Example usage for org.apache.hadoop.io LongWritable LongWritable

List of usage examples for org.apache.hadoop.io LongWritable LongWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable LongWritable.

Prototype

public LongWritable(long value) 

Source Link

Usage

From source file:com.placeiq.piqconnect.BlocksBuilderTest.java

License:Apache License

@Test
public void simpleMatrix() throws IOException {
    mrDriver.getConfiguration().setInt(Constants.PROP_BLOCK_SIZE, 3);
    mrDriver.getConfiguration().setBoolean("isVector", false);

    mrDriver.addInput(new LongWritable(0), new Text("1\t2"));
    mrDriver.addInput(new LongWritable(0), new Text("1\t3"));
    mrDriver.addInput(new LongWritable(0), new Text("3\t4"));

    BlockIndexWritable b1 = new BlockIndexWritable();
    BlockIndexWritable b2 = new BlockIndexWritable();
    BlockIndexWritable b3 = new BlockIndexWritable();
    BlockIndexWritable b4 = new BlockIndexWritable();

    BlockWritable d1 = new BlockWritable(3, BlockWritable.TYPE.MATRIX);
    BlockWritable d2 = new BlockWritable(3, BlockWritable.TYPE.MATRIX);
    BlockWritable d3 = new BlockWritable(3, BlockWritable.TYPE.MATRIX);
    BlockWritable d4 = new BlockWritable(3, BlockWritable.TYPE.MATRIX);

    b1.setMatrixIndex(0, 0);//from   w  w w. ja v  a  2s  .  com
    d1.addMatrixElem(1, 2);
    d1.addMatrixElem(2, 1);

    b2.setMatrixIndex(0, 1);
    d2.addMatrixElem(1, 0);

    b3.setMatrixIndex(1, 0);
    d3.addMatrixElem(0, 1);

    b4.setMatrixIndex(1, 1);
    d4.addMatrixElem(0, 1);
    d4.addMatrixElem(1, 0);

    mrDriver.addOutput(b1, d1);
    mrDriver.addOutput(b2, d2);
    mrDriver.addOutput(b3, d3);
    mrDriver.addOutput(b4, d4);

    mrDriver.runTest();
}

From source file:com.placeiq.piqconnect.BlocksBuilderTest.java

License:Apache License

@Test
public void simpleVector() throws IOException {
    mrDriver.getConfiguration().setInt(Constants.PROP_BLOCK_SIZE, 3);
    mrDriver.getConfiguration().setBoolean("isVector", true);

    mrDriver.addInput(new LongWritable(0), new Text("1\t1"));
    mrDriver.addInput(new LongWritable(0), new Text("2\t2"));
    mrDriver.addInput(new LongWritable(0), new Text("3\t3"));
    mrDriver.addInput(new LongWritable(0), new Text("4\t4"));
    mrDriver.addInput(new LongWritable(0), new Text("5\t5"));
    mrDriver.addInput(new LongWritable(0), new Text("6\t6"));

    BlockIndexWritable b1 = new BlockIndexWritable();
    BlockIndexWritable b2 = new BlockIndexWritable();
    BlockIndexWritable b3 = new BlockIndexWritable();

    BlockWritable d1 = new BlockWritable(3, BlockWritable.TYPE.VECTOR_INITIAL);
    BlockWritable d2 = new BlockWritable(3, BlockWritable.TYPE.VECTOR_INITIAL);
    BlockWritable d3 = new BlockWritable(3, BlockWritable.TYPE.VECTOR_INITIAL);

    b1.setVectorIndex(0);/*w  ww. java  2 s . c om*/
    d1.setVectorElem(0, -1);
    d1.setVectorElem(1, 1);
    d1.setVectorElem(2, 2);

    b2.setVectorIndex(1);
    d2.setVectorElem(0, 3);
    d2.setVectorElem(1, 4);
    d2.setVectorElem(2, 5);

    b3.setVectorIndex(2);
    d3.setVectorElem(0, 6);
    d3.setVectorElem(1, -1);
    d3.setVectorElem(2, -1);

    mrDriver.addOutput(b1, d1);
    mrDriver.addOutput(b2, d2);
    mrDriver.addOutput(b3, d3);

    mrDriver.runTest();
}

From source file:com.renaissance.mrunit.hbase.tests.SampleUseTest.java

License:Open Source License

@Test
public void oneHaiku_splitAndAddedToHbase() throws IOException {
    HBaseExpectedColumn oldPond = new HBaseExpectedColumn(TITLE_COLUMNFAMILY, "old pond");
    driver.withInput(new LongWritable(0L),
            new Text("Basho\nold pond\nold pond...\na frog leaps in\nwater's sound"))
            .withOutput(new ImmutableBytesWritable(Bytes.toBytes("Basho")),
                    oldPond.Value(new Text("old pond...\na frog leaps in\nwater's sound")))
            .runTest();// www.  j  ava2s  . c om
}

From source file:com.ricemap.spateDB.core.RTree.java

License:Apache License

/**
 * Searches the RTree starting from the given start position. This is either
 * a node number or offset of an element. If it's a node number, it performs
 * the search in the subtree rooted at this node. If it's an offset number,
 * it searches only the object found there. It is assumed that the
 * openQuery() has been called before this function and that endQuery() will
 * be called afterwards./*  w w w. jav  a2s.c  o  m*/
 * 
 * @param query_mbr
 * @param output
 * @param start
 *            - where to start searching
 * @param end
 *            - where to end searching. Only used when start is an offset of
 *            an object.
 * @return
 * @throws IOException
 */
protected int searchColumnar(Shape query_shape, ResultCollector<Writable> output, int start, int end,
        String field) throws IOException {
    if (output == null) {
        throw new RuntimeException("Output is NULL");
    }
    //build search field
    int fieldOffset = 0;
    int fieldSize = -1;
    FIELD_TYPE fieldType = FIELD_TYPE.NULL;
    //get fields
    Field[] fields = stockObject.getClass().getDeclaredFields();

    for (int i = 0; i < fields.length; i++) {
        if (fields[i].getName().equals(field)) {
            if (fields[i].getType().equals(Integer.TYPE)) {
                fieldSize = 4;
                fieldType = FIELD_TYPE.Integer;

            } else if (fields[i].getType().equals(Long.TYPE)) {
                fieldSize = 8;
                fieldType = FIELD_TYPE.Long;
            } else if (fields[i].getType().equals(Double.TYPE)) {
                fieldSize = 8;
                fieldType = FIELD_TYPE.Double;
            } else {
                //throw new RuntimeException("Unsupported type: " + fields[i].getType());
            }
            break;
        } else {
            if (fields[i].getType().equals(Integer.TYPE)) {
                fieldOffset += elementCount * 4;
            } else if (fields[i].getType().equals(Long.TYPE) || fields[i].getType().equals(Double.TYPE)) {
                fieldOffset += elementCount * 8;
            } else {
                //throw new RuntimeException("Unsupported type: " + fields[i].getType());
            }
        }
    }

    Prism query_mbr = query_shape.getMBR();
    int resultSize = 0;
    // Special case for an empty tree
    if (height == 0)
        return 0;

    Stack<Integer> toBeSearched = new Stack<Integer>();
    // Start from the given node
    toBeSearched.push(start);
    if (start >= nodeCount) {
        toBeSearched.push(end);
    }

    Prism node_mbr = new Prism();

    // Holds one data line from tree data
    Text line = new Text2();

    while (!toBeSearched.isEmpty()) {
        int searchNumber = toBeSearched.pop();
        int mbrsToTest = searchNumber == 0 ? 1 : degree;

        if (searchNumber < nodeCount) {
            long nodeOffset = NodeSize * searchNumber;
            structure.seek(nodeOffset);
            int dataOffset = structure.readInt();

            for (int i = 0; i < mbrsToTest; i++) {
                node_mbr.readFields(structure);
                int lastOffset = (searchNumber + i) == nodeCount - 1 ? elementCount - 1 : structure.readInt();
                if (query_mbr.contains(node_mbr)) {
                    // The node is full contained in the query range.
                    // Save the time and do full scan for this node

                    // Checks if this node is the last node in its level
                    // This can be easily detected because the next node in
                    // the level
                    // order traversal will be the first node in the next
                    // level
                    // which means it will have an offset less than this
                    // node
                    if (lastOffset <= dataOffset)
                        lastOffset = elementCount;

                    data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize
                            + elementCount * IndexUnitSize + fieldOffset + dataOffset * fieldSize);
                    for (int j = 0; j < lastOffset - dataOffset; j++) {
                        switch (fieldType) {
                        case Integer:
                            output.collect(new IntWritable(data.readInt()));
                            break;
                        case Long:
                            output.collect(new LongWritable(data.readLong()));
                            break;
                        case Double:
                            output.collect(new DoubleWritable(data.readDouble()));
                            break;
                        default:
                            output.collect(
                                    new Point3d(data.readDouble(), data.readDouble(), data.readDouble()));
                            break;
                        }
                        resultSize++;
                    }

                } else if (query_mbr.isIntersected(node_mbr)) {
                    // Node partially overlaps with query. Go deep under
                    // this node
                    if (searchNumber < nonLeafNodeCount) {
                        // Search child nodes
                        toBeSearched.push((searchNumber + i) * degree + 1);
                    } else {
                        // Search all elements in this node
                        //toBeSearched.push(dataOffset);
                        // Checks if this node is the last node in its level
                        // This can be easily detected because the next node
                        // in the level
                        // order traversal will be the first node in the
                        // next level
                        // which means it will have an offset less than this
                        // node
                        if (lastOffset <= dataOffset)
                            lastOffset = elementCount;
                        //toBeSearched.push(lastOffset);
                        data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize
                                + dataOffset * IndexUnitSize);
                        boolean report[] = new boolean[lastOffset - dataOffset];
                        Point3d point = new Point3d();
                        for (int j = 0; j < lastOffset - dataOffset; j++) {
                            point.t = data.readDouble();
                            point.x = data.readDouble();
                            point.y = data.readDouble();
                            if (point.isIntersected(query_shape)) {
                                report[j] = true;
                            } else
                                report[j] = false;
                        }
                        data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize
                                + elementCount * IndexUnitSize + fieldOffset + dataOffset * fieldSize);
                        for (int j = 0; j < lastOffset - dataOffset; j++) {
                            if (report[j]) {
                                switch (fieldType) {
                                case Integer:
                                    output.collect(new IntWritable(data.readInt()));
                                    break;
                                case Long:
                                    output.collect(new LongWritable(data.readLong()));
                                    break;
                                case Double:
                                    output.collect(new DoubleWritable(data.readDouble()));
                                    break;
                                default:
                                    output.collect(new Point3d(data.readDouble(), data.readDouble(),
                                            data.readDouble()));
                                    break;
                                }
                                resultSize++;
                            }
                        }
                    }
                }
                dataOffset = lastOffset;
            }
        } else {
            LOG.error("searchNumber > nodeCount, something is wrong");
            int firstOffset, lastOffset;
            // Search for data items (records)
            lastOffset = searchNumber;
            firstOffset = toBeSearched.pop();

            data.seek(firstOffset + treeStartOffset);
            LineReader lineReader = new LineReader(data);
            while (firstOffset < lastOffset) {
                firstOffset += lineReader.readLine(line);
                stockObject.fromText(line);
                if (stockObject.isIntersected(query_shape)) {
                    resultSize++;
                    if (output != null)
                        output.collect(stockObject);
                }
            }
        }
    }
    return resultSize;
}

From source file:com.sequenceiq.yarntest.mr.QuasiMonteCarlo.java

License:Apache License

/**
 * Run a map/reduce job for estimating Pi.
 *
 * @return the estimated value of Pi/*from  w w w.  j a v a 2 s .  co  m*/
 */
public static JobID submitPiEstimationMRApp(String jobName, int numMaps, long numPoints, Path tmpDir,
        Configuration conf) throws IOException, ClassNotFoundException, InterruptedException {
    Job job = new Job(conf);
    //setup job conf
    job.setJobName(jobName);
    job.setJarByClass(QuasiMonteCarlo.class);

    job.setInputFormatClass(SequenceFileInputFormat.class);

    job.setOutputKeyClass(BooleanWritable.class);
    job.setOutputValueClass(LongWritable.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);

    job.setMapperClass(QmcMapper.class);

    job.setReducerClass(QmcReducer.class);
    job.setNumReduceTasks(1);

    // turn off speculative execution, because DFS doesn't handle
    // multiple writers to the same file.
    job.setSpeculativeExecution(false);

    //setup input/output directories
    final Path inDir = new Path(tmpDir, "in");
    final Path outDir = new Path(tmpDir, "out");
    FileInputFormat.setInputPaths(job, inDir);
    FileOutputFormat.setOutputPath(job, outDir);

    final FileSystem fs = FileSystem.get(conf);
    if (fs.exists(tmpDir)) {
        fs.delete(tmpDir, true);
        //      throw new IOException("Tmp directory " + fs.makeQualified(tmpDir)
        //          + " already exists.  Please remove it first.");
    }
    if (!fs.mkdirs(inDir)) {
        throw new IOException("Cannot create input directory " + inDir);
    }

    //  try {
    //generate an input file for each map task
    for (int i = 0; i < numMaps; ++i) {
        final Path file = new Path(inDir, "part" + i);
        final LongWritable offset = new LongWritable(i * numPoints);
        final LongWritable size = new LongWritable(numPoints);
        final SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, file, LongWritable.class,
                LongWritable.class, CompressionType.NONE);
        try {
            writer.append(offset, size);
        } finally {
            writer.close();
        }
        System.out.println("Wrote input for Map #" + i);
    }

    //start a map/reduce job
    System.out.println("Starting Job");
    final long startTime = System.currentTimeMillis();
    job.submit();
    //      final double duration = (System.currentTimeMillis() - startTime)/1000.0;
    //      System.out.println("Job Finished in " + duration + " seconds");
    return job.getJobID();

    //    } finally {
    //      fs.delete(tmpDir, true);
    //    }
}

From source file:com.soteradefense.dga.io.formats.DGALongEdgeValueInputFormatTest.java

License:Apache License

@Test
public void testInputParserWithDefaultWeight() throws IOException, InterruptedException {
    String input = "1,2";
    when(rr.getCurrentValue()).thenReturn(new Text(input));
    EdgeReader ter = createEdgeReader(rr);
    ter.setConf(conf);/*from  w w  w. j  a  v  a2  s.c o m*/
    ter.initialize(null, tac);
    assertEquals(ter.getCurrentSourceId(), new Text("1"));
    assertEquals(ter.getCurrentEdge().getTargetVertexId(), new Text("2"));
    assertEquals(ter.getCurrentEdge().getValue(), new LongWritable(1L));

}

From source file:com.soteradefense.dga.io.formats.DGALongEdgeValueInputFormatTest.java

License:Apache License

@Test
public void testInputParserWithDefaultWeightAndOverriddenSeparator() throws IOException, InterruptedException {
    String input = "1\t2";
    when(rr.getCurrentValue()).thenReturn(new Text(input));
    EdgeReader ter = createEdgeReader(rr);
    conf.set(LINE_TOKENIZE_VALUE, "\t");
    ter.setConf(conf);/*from   www . j a v a 2 s  .  co  m*/
    ter.initialize(null, tac);
    assertEquals(ter.getCurrentSourceId(), new Text("1"));
    assertEquals(ter.getCurrentEdge().getTargetVertexId(), new Text("2"));
    assertEquals(ter.getCurrentEdge().getValue(), new LongWritable(1L));

}

From source file:com.soteradefense.dga.io.formats.DGALongEdgeValueInputFormatTest.java

License:Apache License

@Test
public void testInputParserWithCustomWeight() throws IOException, InterruptedException {
    String input = "1,2,10";
    when(rr.getCurrentValue()).thenReturn(new Text(input));
    EdgeReader ter = createEdgeReader(rr);
    ter.setConf(conf);/*from   w w w  . jav a 2 s. co  m*/
    ter.initialize(null, tac);
    assertEquals(ter.getCurrentSourceId(), new Text("1"));
    assertEquals(ter.getCurrentEdge().getTargetVertexId(), new Text("2"));
    assertEquals(ter.getCurrentEdge().getValue(), new LongWritable(10L));

}

From source file:com.soteradefense.dga.io.formats.DGALongEdgeValueInputFormatTest.java

License:Apache License

@Test
public void testInputParserWithCustomWeightAndOverriddenSeparator() throws IOException, InterruptedException {
    String input = "1\t2\t10";
    when(rr.getCurrentValue()).thenReturn(new Text(input));
    EdgeReader ter = createEdgeReader(rr);
    conf.set(LINE_TOKENIZE_VALUE, "\t");
    ter.setConf(conf);//  www.  j  a v a 2  s  . c o  m
    ter.initialize(null, tac);
    assertEquals(ter.getCurrentSourceId(), new Text("1"));
    assertEquals(ter.getCurrentEdge().getTargetVertexId(), new Text("2"));
    assertEquals(ter.getCurrentEdge().getValue(), new LongWritable(10L));

}

From source file:com.soteradefense.dga.io.formats.DGALongEdgeValueInputFormatTest.java

License:Apache License

@Test
public void testInputParserWithDelimiterInData() throws IOException, InterruptedException {
    String input = "te\\tst@test.com\tanother@test.com\t10";
    when(rr.getCurrentValue()).thenReturn(new Text(input));
    EdgeReader ter = createEdgeReader(rr);
    conf.set(LINE_TOKENIZE_VALUE, "\t");
    ter.setConf(conf);//from  ww  w.j  av  a2s.c  o m
    ter.initialize(null, tac);
    assertEquals(ter.getCurrentSourceId(), new Text("te\\tst@test.com"));
    assertEquals(ter.getCurrentEdge().getTargetVertexId(), new Text("another@test.com"));
    assertEquals(ter.getCurrentEdge().getValue(), new LongWritable(10L));

}