Example usage for org.apache.hadoop.fs FSDataOutputStream getPos

List of usage examples for org.apache.hadoop.fs FSDataOutputStream getPos

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FSDataOutputStream getPos.

Prototype

public long getPos() 

Source Link

Document

Get the current position in the output stream.

Usage

From source file:FormatStorageBasicTest.java

License:Open Source License

public void testPersistentSegment() {
    try {//www  .j  a v a  2s. c  o  m
        IndexInfo info = new IndexInfo();
        info.offset = 0;

        Head head = new Head();
        head.setVar((byte) 1);
        Configuration conf = new Configuration();
        FormatDataFile fd = new FormatDataFile(conf);
        fd.create(prefix + "testPersistentSegment_tmp", head);

        String fileName = prefix + "testPersistentSegment";
        Path path = new Path(fileName);
        FileSystem fs = FileSystem.get(new Configuration());
        FSDataOutputStream out = fs.create(path);

        fd.setOut(out);
        Segment segment = new Segment(info, fd);

        int unitSize = 100;
        for (int i = 0; i < unitSize; i++) {
            IndexInfo indexInfo = new IndexInfo();
            indexInfo.offset = i * 100;
            indexInfo.len = 77;
            indexInfo.beginLine = (i + 1) * 100;
            indexInfo.endLine = (i + 2) * 100;
            indexInfo.idx = i;

            Unit unit = new Unit(indexInfo, segment);
            addRecord2Unit(unit, 100);
            unit.beginLine = (i + 1) * 100;
            unit.endLine = (i + 2) * 100;
            segment.addUnit(unit);
            if (unit.len() != 100 * full7chunkLen + 100 * 8 + ConstVar.DataChunkMetaOffset) {
                fail("error unit.len:" + unit.len());
            }
        }

        segment.recordNum = 234;
        segment.setBeginLine(1);
        segment.setEndLine(235);

        segment.persistent(out);

        if (out.getPos() != fd.confSegmentSize()) {
            System.out.println("seg.len:" + segment.len() + "seg.remain:" + segment.remain() + "index.len"
                    + segment.unitIndex().len());
            fail("error pos:" + out.getPos());
        }
        out.close();

        int unitlen = full7chunkLen * 100 + 8 * 100 + ConstVar.DataChunkMetaOffset;
        FSDataInputStream in = fs.open(path);

        in.seek(segment.lineIndexOffset());

        info.offset = 0;
        info.len = segment.len();
        fd.setWorkStatus(ConstVar.WS_Read);
        Segment segment2 = new Segment(info, fd);
        segment2.unpersistentUnitIndex(in);
        if (segment2.recordNum() != 234) {
            fail("error recordnum:" + segment2.recordNum());
        }
        if (segment2.unitNum() != unitSize) {
            fail("error unitNum:" + segment2.unitNum());
        }
        if (segment2.keyIndexOffset() != -1) {
            fail("error key index offset:" + segment2.keyIndexOffset());
        }
        if (segment2.lineIndexOffset() != unitlen * unitSize) {
            fail("error line index offset:" + segment2.lineIndexOffset());
        }
        if (segment2.units().size() != unitSize) {
            fail("error units.size:" + segment2.units().size());
        }

        UnitIndex index = segment2.unitIndex();
        if (index.lineIndexInfos().size() != unitSize) {
            fail("error line unit index size:" + index.lineIndexInfos().size());
        }
        if (index.keyIndexInfos().size() != 0) {
            fail("error key unit index size:" + index.keyIndexInfos().size());
        }

        for (int i = 0; i < unitSize; i++) {
            IndexInfo ii = index.lineIndexInfos().get(i);
            if (ii.beginLine() != (1 + i) * 100) {
                fail("error beginline:" + ii.beginLine() + "i:" + i);
            }
            if (ii.endLine() != (2 + i) * 100) {
                fail("error end line:" + ii.endLine() + "i:" + i);
            }
            if (ii.offset() != i * 100) {
                fail("error offset:" + ii.offset() + "i:" + i);
            }
            if (ii.len != unitlen) {
                fail("error len:" + ii.len() + "i:" + i);
            }
            if (ii.idx() != i) {
                fail("error idx:" + ii.idx() + "i:" + i);
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
        fail("get IOException:" + e.getMessage());
    } catch (Exception e) {
        e.printStackTrace();
        fail("get Exception:" + e.getMessage());
    }
}

From source file:cn.uway.util.apache.parquet.hadoop.ParquetFileWriter.java

License:Apache License

private static void serializeFooter(ParquetMetadata footer, FSDataOutputStream out) throws IOException {
    long footerIndex = out.getPos();
    parquet.format.FileMetaData parquetMetadata = metadataConverter.toParquetMetadata(CURRENT_VERSION, footer);
    writeFileMetaData(parquetMetadata, out);
    if (DEBUG)/* w ww .  j  av a 2s.  c  om*/
        LOG.debug(out.getPos() + ": footer length = " + (out.getPos() - footerIndex));
    BytesUtils.writeIntLittleEndian(out, (int) (out.getPos() - footerIndex));
    out.write(MAGIC);
}

From source file:com.bah.lucene.hdfs.HdfsDirectory.java

License:Apache License

@Override
public IndexOutput createOutput(String name, IOContext context) throws IOException {
    LOG.debug(MessageFormat.format("createOutput [{0}] [{1}] [{2}]", name, context, _path));
    if (fileExists(name)) {
        throw new IOException("File [" + name + "] already exists found.");
    }/*from  www . j ava  2s . com*/
    final FSDataOutputStream outputStream = openForOutput(name);
    return new BufferedIndexOutput() {

        @Override
        public long length() throws IOException {
            return outputStream.getPos();
        }

        @Override
        protected void flushBuffer(byte[] b, int offset, int len) throws IOException {
            outputStream.write(b, offset, len);
        }

        @Override
        public void close() throws IOException {
            super.close();
            outputStream.close();
        }

        @Override
        public void seek(long pos) throws IOException {
            throw new IOException("seeks not allowed on IndexOutputs.");
        }
    };
}

From source file:com.cloudera.hadoop.hdfs.nfs.nfs4.attrs.SizeHandler.java

License:Apache License

public boolean set(NFS4Handler server, Session session, FileSystem fs, FileStatus fileStatus, StateID stateID,
        Size size) throws NFS4Exception, IOException {
    // we only support truncating files
    if (size.getSize() != 0) {
        throw new UnsupportedOperationException("Setting size to non-zero (truncate) is not supported.");
    }/*from w  w  w  . j a  v  a 2 s . c o m*/
    synchronized (mProcessedRequests) {
        if (mProcessedRequests.containsKey(session.getXID())) {
            return true;
        }
        mProcessedRequests.put(session.getXID(), value);
        // open the file, overwriting if needed. Creation of an empty file with
        // overwrite on is the only way we can support truncating files
        FSDataOutputStream out = server.forWrite(stateID, fs, session.getCurrentFileHandle(), true);
        if (out.getPos() != 0) {
            stateID = server.close(session.getSessionID(), stateID, stateID.getSeqID(),
                    session.getCurrentFileHandle());
            out = server.forWrite(stateID, fs, session.getCurrentFileHandle(), true);
        }
        out.sync();
        return true;

    }
}

From source file:com.cloudera.hadoop.hdfs.nfs.nfs4.NFS4Handler.java

License:Apache License

/**
 * Files open for write will have an unreliable length according to the name
 * node. As such, this call intercepts calls for open files and returns the
 * length of the as reported by the output stream.
 *
 * @param status//from   w  w  w . j  a v  a  2 s . c  o m
 * @return the current file length including data written to the output
 * stream
 * @throws NFS4Exception if the getPos() call of the output stream throws
 * IOException
 */
public long getFileSize(FileStatus status) throws NFS4Exception {
    FileHolder fileHolder = mPathMap.get(realPath(status.getPath()));
    if (fileHolder != null) {
        OpenFile<FSDataOutputStream> file = fileHolder.getFSDataOutputStream();
        if (file != null) {
            try {
                FSDataOutputStream out = file.get();
                return out.getPos();
            } catch (IOException e) {
                throw new NFS4Exception(NFS4ERR_SERVERFAULT, e);
            }
        }
    }
    return status.getLen();
}

From source file:com.datatorrent.contrib.hdht.hfile.HFileImpl.java

License:Open Source License

/**
 * Creates and returns a new HFile writer.
 * @param bucketKey/*from   w ww. ja  v a 2  s .co m*/
 * @param fileName
 * @return The file writer.
 * @throws IOException
 */
@Override
public HDSFileWriter getWriter(long bucketKey, String fileName) throws IOException {
    final FSDataOutputStream fsdos = getOutputStream(bucketKey, fileName);
    final CacheConfig cacheConf = getCacheConfig();
    final KeyValue.KVComparator comparator = getKVComparator();
    final HFileContext context = getContext();
    final Configuration conf = getConfiguration();
    final HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fsdos)
            .withComparator(comparator).withFileContext(context).create();
    ComparatorAdaptor.COMPARATOR.set(this.comparator);

    return new HDSFileWriter() {

        private long bytesAppendedCounter = 0;

        @Override
        public void append(byte[] key, byte[] value) throws IOException {
            bytesAppendedCounter += (key.length + value.length);
            writer.append(key, value);
        }

        @Override
        public long getBytesWritten() throws IOException {
            // Not accurate below HFile block size resolution due to flushing (and compression)
            // HFile block size is available via writer.getFileContext().getBlocksize()
            // bytesAppendedCounter is used to produce non-zero counts until first flush
            return (fsdos.getPos() <= 0) ? bytesAppendedCounter : fsdos.getPos();
        }

        @Override
        public void close() throws IOException {
            writer.close();
            fsdos.close();
            ComparatorAdaptor.COMPARATOR.remove();
        }
    };

}

From source file:com.datatorrent.lib.bucket.HdfsBucketStore.java

License:Open Source License

/**
 * {@inheritDoc}/*from   w ww  .  j av a  2  s .co  m*/
 */
@Override
public void storeBucketData(long window, long timestamp, Map<Integer, Map<Object, T>> data) throws IOException {
    Path dataFilePath = new Path(bucketRoot + PATH_SEPARATOR + window);
    FileSystem fs = FileSystem.newInstance(dataFilePath.toUri(), configuration);
    FSDataOutputStream dataStream = fs.create(dataFilePath);

    Output output = new Output(dataStream);
    try {
        long offset = 0;
        for (int bucketIdx : data.keySet()) {
            Map<Object, T> bucketData = data.get(bucketIdx);

            if (eventKeyClass == null) {
                Map.Entry<Object, T> eventEntry = bucketData.entrySet().iterator().next();
                eventKeyClass = eventEntry.getKey().getClass();
                if (!writeEventKeysOnly) {
                    @SuppressWarnings("unchecked")
                    Class<T> lEventClass = (Class<T>) eventEntry.getValue().getClass();
                    eventClass = lEventClass;
                }
            }
            //Write the size of data and then data
            dataStream.writeInt(bucketData.size());
            for (Map.Entry<Object, T> entry : bucketData.entrySet()) {
                writeSerde.writeObject(output, entry.getKey());

                if (!writeEventKeysOnly) {
                    int posLength = output.position();
                    output.writeInt(0); //temporary place holder
                    writeSerde.writeObject(output, entry.getValue());
                    int posValue = output.position();
                    int valueLength = posValue - posLength - 4;
                    output.setPosition(posLength);
                    output.writeInt(valueLength);
                    output.setPosition(posValue);
                }
            }
            output.flush();
            if (bucketPositions[bucketIdx] == null) {
                bucketPositions[bucketIdx] = Maps.newHashMap();
            }
            windowToBuckets.put(window, bucketIdx);
            windowToTimestamp.put(window, timestamp);
            synchronized (bucketPositions[bucketIdx]) {
                bucketPositions[bucketIdx].put(window, offset);
            }
            offset = dataStream.getPos();
        }
    } finally {
        output.close();
        dataStream.close();
        fs.close();
    }
}

From source file:com.ricemap.spateDB.core.RTree.java

License:Apache License

/**
 * Builds the RTree given a serialized list of elements. It uses the given
 * stockObject to deserialize these elements and build the tree. Also writes
 * the created tree to the disk directly.
 * //from   ww  w .  j  av  a2  s .c  o  m
 * @param elements
 *            - serialization of elements to be written
 * @param offset
 *            - index of the first element to use in the elements array
 * @param len
 *            - number of bytes to user from the elements array
 * @param bytesAvailable
 *            - size available (in bytes) to store the tree structures
 * @param dataOut
 *            - an output to use for writing the tree to
 * @param fast_sort
 *            - setting this to <code>true</code> allows the method to run
 *            faster by materializing the offset of each element in the list
 *            which speeds up the comparison. However, this requires an
 *            additional 16 bytes per element. So, for each 1M elements, the
 *            method will require an additional 16 M bytes (approximately).
 */
public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree,
        DataOutput dataOut, final boolean fast_sort, final boolean columnarStorage) {
    try {
        columnar = columnarStorage;
        //TODO: the order of fields should be stable under Oracle JVM, but not guaranteed
        Field[] fields = stockObject.getClass().getDeclaredFields();

        // Count number of elements in the given text
        int i_start = offset;
        final Text line = new Text();
        while (i_start < offset + len) {
            int i_end = skipToEOL(element_bytes, i_start);
            // Extract the line without end of line character
            line.set(element_bytes, i_start, i_end - i_start - 1);
            stockObject.fromText(line);

            elementCount++;
            i_start = i_end;
        }
        LOG.info("Bulk loading an RTree with " + elementCount + " elements");

        // It turns out the findBestDegree returns the best degree when the
        // whole
        // tree is loaded to memory when processed. However, as current
        // algorithms
        // process the tree while it's on disk, a higher degree should be
        // selected
        // such that a node fits one file block (assumed to be 4K).
        // final int degree = findBestDegree(bytesAvailable, elementCount);
        LOG.info("Writing an RTree with degree " + degree);

        int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
        int leafNodeCount = (int) Math.pow(degree, height - 1);
        if (elementCount < 2 * leafNodeCount && height > 1) {
            height--;
            leafNodeCount = (int) Math.pow(degree, height - 1);
        }
        int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
        int nonLeafNodeCount = nodeCount - leafNodeCount;

        // Keep track of the offset of each element in the text
        final int[] offsets = new int[elementCount];
        final int[] ids = new int[elementCount];
        final double[] ts = fast_sort ? new double[elementCount] : null;
        final double[] xs = fast_sort ? new double[elementCount] : null;
        final double[] ys = fast_sort ? new double[elementCount] : null;

        //initialize columnar data output
        ByteArrayOutputStream index_bos = new ByteArrayOutputStream();
        DataOutputStream index_dos = new DataOutputStream(index_bos);
        ByteArrayOutputStream[] bos = new ByteArrayOutputStream[fields.length];
        DataOutputStream[] dos = new DataOutputStream[fields.length];
        for (int i = 0; i < bos.length; i++) {
            bos[i] = new ByteArrayOutputStream();
            dos[i] = new DataOutputStream(bos[i]);
        }

        i_start = offset;
        line.clear();
        for (int i = 0; i < elementCount; i++) {
            offsets[i] = i_start;
            ids[i] = i;
            int i_end = skipToEOL(element_bytes, i_start);
            if (xs != null) {
                // Extract the line with end of line character
                line.set(element_bytes, i_start, i_end - i_start - 1);
                stockObject.fromText(line);
                // Sample center of the shape
                ts[i] = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;
                xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                //build columnar storage
                if (stockObject instanceof Point3d) {
                    index_dos.writeDouble(ts[i]);
                    index_dos.writeDouble(xs[i]);
                    index_dos.writeDouble(ys[i]);
                } else {
                    throw new RuntimeException("Indexing non-point shape with RTREE is not supported yet");
                }

                for (int j = 0; j < fields.length; j++) {
                    if (fields[j].getType().equals(Integer.TYPE)) {
                        dos[j].writeInt(fields[j].getInt(stockObject));
                    } else if (fields[j].getType().equals(Double.TYPE)) {
                        dos[j].writeDouble(fields[j].getDouble(stockObject));
                    } else if (fields[j].getType().equals(Long.TYPE)) {
                        dos[j].writeLong(fields[j].getLong(stockObject));
                    } else {
                        continue;
                        //throw new RuntimeException("Field type is not supported yet");
                    }
                }
            }
            i_start = i_end;
        }
        index_dos.close();
        for (int i = 0; i < dos.length; i++) {
            dos[i].close();
        }

        /** A struct to store information about a split */
        class SplitStruct extends Prism {
            /** Start and end index for this split */
            int index1, index2;
            /** Direction of this split */
            byte direction;
            /** Index of first element on disk */
            int offsetOfFirstElement;

            static final byte DIRECTION_T = 0;
            static final byte DIRECTION_X = 1;
            static final byte DIRECTION_Y = 2;

            SplitStruct(int index1, int index2, byte direction) {
                this.index1 = index1;
                this.index2 = index2;
                this.direction = direction;
            }

            @Override
            public void write(DataOutput out) throws IOException {
                //
                if (columnarStorage)
                    out.writeInt(index1);
                else
                    out.writeInt(offsetOfFirstElement);
                super.write(out);
            }

            void partition(Queue<SplitStruct> toBePartitioned) {
                IndexedSortable sortableT;
                IndexedSortable sortableX;
                IndexedSortable sortableY;

                if (fast_sort) {
                    // Use materialized xs[] and ys[] to do the comparisons
                    sortableT = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ts[i] < ts[j])
                                return -1;
                            if (ts[i] > ts[j])
                                return 1;
                            return 0;
                        }
                    };
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ts[i] < ts[j])
                                return -1;
                            if (xs[i] < xs[j])
                                return -1;
                            if (xs[i] > xs[j])
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ys[i] < ys[j])
                                return -1;
                            if (ys[i] > ys[j])
                                return 1;
                            return 0;
                        }
                    };
                } else {
                    // No materialized xs and ys. Always deserialize objects
                    // to compare
                    sortableT = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double ti = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double tj = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;
                            if (ti < tj)
                                return -1;
                            if (ti > tj)
                                return 1;
                            return 0;
                        }
                    };
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                            if (xi < xj)
                                return -1;
                            if (xi > xj)
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
                            if (yi < yj)
                                return -1;
                            if (yi > yj)
                                return 1;
                            return 0;
                        }
                    };
                }

                final IndexedSorter sorter = new QuickSort();

                final IndexedSortable[] sortables = new IndexedSortable[3];
                sortables[SplitStruct.DIRECTION_T] = sortableT;
                sortables[SplitStruct.DIRECTION_X] = sortableX;
                sortables[SplitStruct.DIRECTION_Y] = sortableY;

                sorter.sort(sortables[direction], index1, index2);

                // Partition into maxEntries partitions (equally) and
                // create a SplitStruct for each partition
                int i1 = index1;
                for (int iSplit = 0; iSplit < degree; iSplit++) {
                    int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree;
                    SplitStruct newSplit;
                    if (direction == 0) {
                        newSplit = new SplitStruct(i1, i2, (byte) 1);
                    } else if (direction == 1) {
                        newSplit = new SplitStruct(i1, i2, (byte) 2);
                    } else {
                        newSplit = new SplitStruct(i1, i2, (byte) 0);
                    }
                    toBePartitioned.add(newSplit);
                    i1 = i2;
                }
            }
        }

        // All nodes stored in level-order traversal
        Vector<SplitStruct> nodes = new Vector<SplitStruct>();
        final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
        toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X));

        while (!toBePartitioned.isEmpty()) {
            SplitStruct split = toBePartitioned.poll();
            if (nodes.size() < nonLeafNodeCount) {
                // This is a non-leaf
                split.partition(toBePartitioned);
            }
            nodes.add(split);
        }

        if (nodes.size() != nodeCount) {
            throw new RuntimeException(
                    "Expected node count: " + nodeCount + ". Real node count: " + nodes.size());
        }

        // Now we have our data sorted in the required order. Start building
        // the tree.
        // Store the offset of each leaf node in the tree
        FSDataOutputStream fakeOut = new FSDataOutputStream(new java.io.OutputStream() {
            // Null output stream
            @Override
            public void write(int b) throws IOException {
                // Do nothing
            }

            @Override
            public void write(byte[] b, int off, int len) throws IOException {
                // Do nothing
            }

            @Override
            public void write(byte[] b) throws IOException {
                // Do nothing
            }
        }, null, TreeHeaderSize + nodes.size() * NodeSize);
        for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
            nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos();
            if (i != nodes.elementAt(i_leaf).index1)
                throw new RuntimeException();
            double t1, x1, y1, t2, x2, y2;

            // Initialize MBR to first object
            int eol = skipToEOL(element_bytes, offsets[i]);
            fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
            stockObject.fromText(line);
            Prism mbr = stockObject.getMBR();
            t1 = mbr.t1;
            x1 = mbr.x1;
            y1 = mbr.y1;
            t2 = mbr.t2;
            x2 = mbr.x2;
            y2 = mbr.y2;
            i++;

            while (i < nodes.elementAt(i_leaf).index2) {
                eol = skipToEOL(element_bytes, offsets[i]);
                fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                stockObject.fromText(line);
                mbr = stockObject.getMBR();
                if (mbr.t1 < t1)
                    t1 = mbr.t1;
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.t2 > t2)
                    t2 = mbr.t2;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i++;
            }
            nodes.elementAt(i_leaf).set(t1, x1, y1, t2, x2, y2);
        }
        fakeOut.close();
        fakeOut = null;

        // Calculate MBR and offsetOfFirstElement for non-leaves
        for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
            int i_first_child = i_node * degree + 1;
            nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement;
            int i_child = 0;
            Prism mbr;
            mbr = nodes.elementAt(i_first_child + i_child);
            double t1 = mbr.t1;
            double x1 = mbr.x1;
            double y1 = mbr.y1;
            double t2 = mbr.t2;
            double x2 = mbr.x2;
            double y2 = mbr.y2;
            i_child++;

            while (i_child < degree) {
                mbr = nodes.elementAt(i_first_child + i_child);
                if (mbr.t1 < t1)
                    t1 = mbr.t1;
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.t2 > t2)
                    t2 = mbr.t2;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i_child++;
            }
            nodes.elementAt(i_node).set(t1, x1, y1, t2, x2, y2);
        }

        // Start writing the tree
        // write tree header (including size)
        // Total tree size. (== Total bytes written - 8 bytes for the size
        // itself)
        dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
        // Tree height
        dataOut.writeInt(height);
        // Degree
        dataOut.writeInt(degree);
        dataOut.writeInt(elementCount);

        //isColumnar
        dataOut.writeInt(columnarStorage ? 1 : 0);

        // write nodes
        for (SplitStruct node : nodes) {
            node.write(dataOut);
        }
        // write elements
        if (columnarStorage) {
            byte[] index_bs = index_bos.toByteArray();
            byte[][] bss = new byte[bos.length][];
            for (int i = 0; i < bss.length; i++) {
                bss[i] = bos[i].toByteArray();
            }
            for (int element_i = 0; element_i < elementCount; element_i++) {
                //int eol = skipToEOL(element_bytes, offsets[element_i]);
                //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
                dataOut.write(index_bs, ids[element_i] * IndexUnitSize, IndexUnitSize);
            }

            for (int i = 0; i < fields.length; i++) {
                int fieldSize = 0;
                if (fields[i].getType().equals(Integer.TYPE)) {
                    fieldSize = 4;
                } else if (fields[i].getType().equals(Long.TYPE)) {
                    fieldSize = 8;
                } else if (fields[i].getType().equals(Double.TYPE)) {
                    fieldSize = 8;
                } else {
                    //throw new RuntimeException("Unsupported field type: " + fields[i].getType().getName());
                    continue;
                }
                for (int element_i = 0; element_i < elementCount; element_i++) {
                    //int eol = skipToEOL(element_bytes, offsets[element_i]);
                    //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
                    dataOut.write(bss[i], ids[element_i] * fieldSize, fieldSize);
                }
            }
        } else {
            for (int element_i = 0; element_i < elementCount; element_i++) {
                int eol = skipToEOL(element_bytes, offsets[element_i]);
                dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
            }
        }

    } catch (IOException e) {
        e.printStackTrace();
    } catch (IllegalArgumentException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.rockstor.core.io.ChunkWriter.java

License:Apache License

private static void align_write(FSDataOutputStream out) throws IOException {
    // aligned by 8 bytes
    long cur_offset = out.getPos();
    int pedding_bytes = (int) (cur_offset & ALIGN_MASK);
    if (pedding_bytes != 0) {
        out.write(ALGIN_BUF, 0, ALIGN_BYTES - pedding_bytes);
    }/*  w w  w  .  j  av  a  2  s.c  o  m*/
}

From source file:com.rockstor.core.io.ChunkWriter.java

License:Apache License

public static void writeHeader(Chunk chunk, FSDataOutputStream output)
        throws IllegalArgumentException, IOException {
    if (!chunk.valid()) {
        throw new IllegalArgumentException(chunk.toString() + " , some fileds of chunk is invalid!");
    }//from   w ww  . j a  v a  2s . c  o  m

    align_write(output);

    chunk.setOffset(output.getPos());
    output.write(chunk.getRockID());
    output.write(chunk.getChunkPrefix());
    output.writeLong(chunk.getTimestamp());
    output.writeLong(chunk.getSize());
    output.writeShort(chunk.getPartID());
    output.writeShort(chunk.getSeqID());
}