Example usage for java.nio IntBuffer wrap

List of usage examples for java.nio IntBuffer wrap

Introduction

In this page you can find the example usage for java.nio IntBuffer wrap.

Prototype

public static IntBuffer wrap(int[] array) 

Source Link

Document

Creates a new int buffer by wrapping the given int array.

Usage

From source file:Main.java

public static void main(String[] args) {
    IntBuffer bb = IntBuffer.wrap(new int[] { 0, 1, 2, 3, 4, 5, 6 });
    bb.put(100);/*from   ww w.j a v a 2 s.  c  o m*/
    System.out.println(Arrays.toString(bb.array()));

}

From source file:Main.java

private static IntBuffer getImageAsARGBIntBuffer(BufferedImage image) {
    DataBuffer buffer = image.getRaster().getDataBuffer();

    if (buffer instanceof DataBufferInt) {
        return IntBuffer.wrap(((DataBufferInt) buffer).getData());
    } else if (buffer instanceof DataBufferByte) {
        return ByteBuffer.wrap(((DataBufferByte) buffer).getData()).order(ByteOrder.BIG_ENDIAN).asIntBuffer();
    } else {/*from   ww w  .j  a  va2  s  . co m*/
        int width = image.getWidth();
        int height = image.getHeight();
        int[] pixels = new int[width * height];
        image.getRGB(0, 0, width, height, pixels, 0, width);
        return IntBuffer.wrap(pixels);
    }
}

From source file:Main.java

public static void saveRgb2Bitmap(IntBuffer buf, String filePath, int width, int height) {
    final int[] pixelMirroredArray = new int[width * height];
    Log.d("TryOpenGL", "Creating " + filePath);
    BufferedOutputStream bos = null;
    try {/*from  www  .  j  a  v  a  2s.  co m*/
        int[] pixelArray = buf.array();
        // rotate 180 deg with x axis because y is reversed
        for (int i = 0; i < height; i++) {
            for (int j = 0; j < width; j++) {
                pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j];
            }
        }
        bos = new BufferedOutputStream(new FileOutputStream(filePath));
        Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
        bmp.copyPixelsFromBuffer(IntBuffer.wrap(pixelMirroredArray));
        bmp.compress(Bitmap.CompressFormat.JPEG, 90, bos);
        bmp.recycle();
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        if (bos != null) {
            try {
                bos.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:org.jcodec.codecs.mjpeg.JpegDecoder.java

public DecodedImage decode(CodedImage coded, DecodedImage decoded) throws IOException {
    IntBuffer result = IntBuffer.wrap(decoded.getPixels());

    JPEGBitStream jbs = new JPEGBitStream(coded);
    int blockH = coded.frame.getHmax() << 3;
    int blockV = coded.frame.getVmax() << 3;
    int width = coded.getWidth();
    int height = coded.getHeight();
    int alignedWidth = align(width, blockH);
    int alignedHeight = align(height, blockV);
    int xBlocks = alignedWidth / blockH;
    int xxBlocks = width / blockH;
    int yBlocks = alignedHeight / blockV;
    int yyBlocks = height / blockV;

    int dWidth = alignedWidth - width;
    int dHeight = alignedHeight - height;

    MCU block = MCU.create(coded.frame);
    for (int by = 0; by < yyBlocks; by++) {
        for (int bx = 0; bx < xxBlocks; bx++) {
            readAndDecode(coded, jbs, block);
            putBlock(result, width, block, bx, by);
        }/*from   ww w . ja va2 s .  co m*/

        for (int bx = xxBlocks; bx < xBlocks; bx++) {
            readAndDecode(coded, jbs, block);
            putBlock(result, width, block, bx, by, blockH - dWidth, blockV);
        }
    }

    for (int by = yyBlocks; by < yBlocks; by++) {
        for (int bx = 0; bx < xxBlocks; bx++) {
            readAndDecode(coded, jbs, block);
            putBlock(result, width, block, bx, by, blockH, blockV - dHeight);
        }

        for (int bx = xxBlocks; bx < xBlocks; bx++) {
            readAndDecode(coded, jbs, block);
            putBlock(result, width, block, bx, by, blockH - dWidth, blockV - dHeight);
        }
    }

    return decoded;
}

From source file:org.jcodec.codecs.mjpeg.JpegDecoder.java

private static void decodeBlock(MCU block, QuantTable qLum, QuantTable qChrom) {

    zigzagDecodeAll(block.lum.data);/* w  ww .  j  ava2s . c om*/
    zigzagDecodeAll(block.cb.data);
    zigzagDecodeAll(block.cr.data);

    dequantAll(block.lum.data, qLum);
    dequantAll(block.cb.data, qChrom);
    dequantAll(block.cr.data, qChrom);

    dct.decodeAll(block.lum.data);
    dct.decodeAll(block.cb.data);
    dct.decodeAll(block.cr.data);

    IntBuffer rgb = block.getRgb24();
    IntBuffer Cb = IntBuffer.wrap(block.cb.data[0]);
    IntBuffer Cr = IntBuffer.wrap(block.cr.data[0]);
    if (block.is420()) {
        IntBuffer y00 = IntBuffer.wrap(block.lum.data[0]);
        IntBuffer y01 = IntBuffer.wrap(block.lum.data[1]);
        IntBuffer y10 = IntBuffer.wrap(block.lum.data[2]);
        IntBuffer y11 = IntBuffer.wrap(block.lum.data[3]);

        for (int j = 0; j < 8; j++) {
            Cb.position((j & ~1) << 2);
            Cr.position((j & ~1) << 2);
            lineToRgb(y00, Cb, Cr, rgb);
            lineToRgb(y01, Cb, Cr, rgb);
        }
        for (int j = 8; j < 16; j++) {
            Cb.position((j & ~1) << 2);
            Cr.position((j & ~1) << 2);
            lineToRgb(y10, Cb, Cr, rgb);
            lineToRgb(y11, Cb, Cr, rgb);
        }
    } else if (block.is422()) {
        IntBuffer y00 = IntBuffer.wrap(block.lum.data[0]);
        IntBuffer y01 = IntBuffer.wrap(block.lum.data[1]);
        for (int j = 0; j < 8; j++) {
            Cb.position(j << 3);
            Cr.position(j << 3);
            lineToRgb(y00, Cb, Cr, rgb);
            lineToRgb(y01, Cb, Cr, rgb);
        }
    } else if (block.is444()) {
        IntBuffer Y = IntBuffer.wrap(block.lum.data[0]);
        while (rgb.hasRemaining()) {
            rgb.put(ImageConvert.ycbcr_to_rgb24(Y.get(), Cb.get(), Cr.get()));
        }
    } else {
        throw new IllegalStateException("unsupported MCU");
    }

}

From source file:org.goko.viewer.jogl.utils.render.internal.AbstractVboJoglRenderer.java

/** (inheritDoc)
 * @see org.goko.viewer.jogl.service.ICoreJoglRenderer#performDestroy(javax.media.opengl.GL3)
 *//*from   w w w  . j  av  a2  s. co  m*/
@Override
public void performDestroy(GL3 gl) throws GkException {
    if (!isInitialized()) {
        return;
    }
    List<Integer> lstBuffers = new ArrayList<Integer>();

    if (useVerticesBuffer) {
        lstBuffers.add(verticesBufferObject);
    }
    if (useColorsBuffer) {
        lstBuffers.add(colorsBufferObject);
    }
    if (useUvsBuffer) {
        lstBuffers.add(uvsBufferObject);
    }

    IntBuffer buffers = IntBuffer.allocate(lstBuffers.size());
    for (Integer integer : lstBuffers) {
        buffers.put(integer);
    }
    buffers.rewind();
    gl.glDeleteBuffers(lstBuffers.size(), buffers);
    IntBuffer intBuffer = IntBuffer.wrap(new int[] { vertexArrayObject });
    gl.glDeleteVertexArrays(1, intBuffer);
}

From source file:io.druid.segment.IndexMergerV9.java

private void mergeIndexesAndWriteColumns(final List<IndexableAdapter> adapters,
        final ProgressIndicator progress, final Iterable<Rowboat> theRows,
        final LongColumnSerializer timeWriter, final ArrayList<GenericColumnSerializer> metWriters,
        final List<IntBuffer> rowNumConversions, final List<DimensionMerger> mergers) throws IOException {
    final String section = "walk through and merge rows";
    progress.startSection(section);//from   www .  j  a va2  s  .c  om
    long startTime = System.currentTimeMillis();

    int rowCount = 0;
    for (IndexableAdapter adapter : adapters) {
        int[] arr = new int[adapter.getNumRows()];
        Arrays.fill(arr, INVALID_ROW);
        rowNumConversions.add(IntBuffer.wrap(arr));
    }

    long time = System.currentTimeMillis();
    for (Rowboat theRow : theRows) {
        progress.progress();
        timeWriter.serialize(theRow.getTimestamp());

        final Object[] metrics = theRow.getMetrics();
        for (int i = 0; i < metrics.length; ++i) {
            metWriters.get(i).serialize(metrics[i]);
        }

        Object[] dims = theRow.getDims();
        for (int i = 0; i < dims.length; ++i) {
            DimensionMerger merger = mergers.get(i);
            if (merger.canSkip()) {
                continue;
            }
            merger.processMergedRow(dims[i]);
        }

        for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) {
            final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey());

            for (Integer rowNum : comprisedRow.getValue()) {
                while (conversionBuffer.position() < rowNum) {
                    conversionBuffer.put(INVALID_ROW);
                }
                conversionBuffer.put(rowCount);
            }
        }
        if ((++rowCount % 500000) == 0) {
            log.info("walked 500,000/%d rows in %,d millis.", rowCount, System.currentTimeMillis() - time);
            time = System.currentTimeMillis();
        }
    }
    for (IntBuffer rowNumConversion : rowNumConversions) {
        rowNumConversion.rewind();
    }
    log.info("completed walk through of %,d rows in %,d millis.", rowCount,
            System.currentTimeMillis() - startTime);
    progress.stopSection(section);
}

From source file:org.apache.druid.segment.IndexMergerV9.java

/**
 * Returns rowNumConversions, if fillRowNumConversions argument is true
 *//*from   w  ww. jav a 2s  .com*/
@Nullable
private List<IntBuffer> mergeIndexesAndWriteColumns(final List<IndexableAdapter> adapters,
        final ProgressIndicator progress, final TimeAndDimsIterator timeAndDimsIterator,
        final GenericColumnSerializer timeWriter, final ArrayList<GenericColumnSerializer> metricWriters,
        final List<DimensionMergerV9> mergers, final boolean fillRowNumConversions) throws IOException {
    final String section = "walk through and merge rows";
    progress.startSection(section);
    long startTime = System.currentTimeMillis();

    List<IntBuffer> rowNumConversions = null;
    int rowCount = 0;
    if (fillRowNumConversions) {
        rowNumConversions = new ArrayList<>(adapters.size());
        for (IndexableAdapter adapter : adapters) {
            int[] arr = new int[adapter.getNumRows()];
            Arrays.fill(arr, INVALID_ROW);
            rowNumConversions.add(IntBuffer.wrap(arr));
        }
    }

    long time = System.currentTimeMillis();
    while (timeAndDimsIterator.moveToNext()) {
        progress.progress();
        TimeAndDimsPointer timeAndDims = timeAndDimsIterator.getPointer();
        timeWriter.serialize(timeAndDims.timestampSelector);

        for (int metricIndex = 0; metricIndex < timeAndDims.getNumMetrics(); metricIndex++) {
            metricWriters.get(metricIndex).serialize(timeAndDims.getMetricSelector(metricIndex));
        }

        for (int dimIndex = 0; dimIndex < timeAndDims.getNumDimensions(); dimIndex++) {
            DimensionMerger merger = mergers.get(dimIndex);
            if (merger.canSkip()) {
                continue;
            }
            merger.processMergedRow(timeAndDims.getDimensionSelector(dimIndex));
        }

        if (timeAndDimsIterator instanceof RowCombiningTimeAndDimsIterator) {
            RowCombiningTimeAndDimsIterator comprisedRows = (RowCombiningTimeAndDimsIterator) timeAndDimsIterator;

            for (int originalIteratorIndex = comprisedRows.nextCurrentlyCombinedOriginalIteratorIndex(
                    0); originalIteratorIndex >= 0; originalIteratorIndex = comprisedRows
                            .nextCurrentlyCombinedOriginalIteratorIndex(originalIteratorIndex + 1)) {

                IntBuffer conversionBuffer = rowNumConversions.get(originalIteratorIndex);
                int minRowNum = comprisedRows
                        .getMinCurrentlyCombinedRowNumByOriginalIteratorIndex(originalIteratorIndex);
                int maxRowNum = comprisedRows
                        .getMaxCurrentlyCombinedRowNumByOriginalIteratorIndex(originalIteratorIndex);

                for (int rowNum = minRowNum; rowNum <= maxRowNum; rowNum++) {
                    while (conversionBuffer.position() < rowNum) {
                        conversionBuffer.put(INVALID_ROW);
                    }
                    conversionBuffer.put(rowCount);
                }

            }

        } else if (timeAndDimsIterator instanceof MergingRowIterator) {
            RowPointer rowPointer = (RowPointer) timeAndDims;
            IntBuffer conversionBuffer = rowNumConversions.get(rowPointer.getIndexNum());
            int rowNum = rowPointer.getRowNum();
            while (conversionBuffer.position() < rowNum) {
                conversionBuffer.put(INVALID_ROW);
            }
            conversionBuffer.put(rowCount);
        } else {
            if (fillRowNumConversions) {
                throw new IllegalStateException(
                        "Filling row num conversions is supported only with RowCombining and Merging iterators");
            }
        }

        if ((++rowCount % 500000) == 0) {
            log.info("walked 500,000/%d rows in %,d millis.", rowCount, System.currentTimeMillis() - time);
            time = System.currentTimeMillis();
        }
    }
    if (rowNumConversions != null) {
        for (IntBuffer rowNumConversion : rowNumConversions) {
            rowNumConversion.rewind();
        }
    }
    log.info("completed walk through of %,d rows in %,d millis.", rowCount,
            System.currentTimeMillis() - startTime);
    progress.stopSection(section);
    return rowNumConversions;
}

From source file:com.metamx.druid.index.v1.IndexMerger.java

private static File makeIndexFiles(final List<IndexableAdapter> indexes, final File outDir,
        final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics,
        final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn) throws IOException {
    Map<String, String> metricTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
    for (IndexableAdapter adapter : indexes) {
        for (String metric : adapter.getAvailableMetrics()) {
            metricTypes.put(metric, adapter.getMetricType(metric));
        }/*from   w w  w  . jav  a2s  .  co  m*/
    }
    final Interval dataInterval;
    File v8OutDir = new File(outDir, "v8-tmp");
    v8OutDir.mkdirs();

    /*************  Main index.drd file **************/
    progress.progress();
    long startTime = System.currentTimeMillis();
    File indexFile = new File(v8OutDir, "index.drd");

    FileOutputStream fileOutputStream = null;
    FileChannel channel = null;
    try {
        fileOutputStream = new FileOutputStream(indexFile);
        channel = fileOutputStream.getChannel();
        channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION }));

        GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy).writeToChannel(channel);
        GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy).writeToChannel(channel);

        DateTime minTime = new DateTime(Long.MAX_VALUE);
        DateTime maxTime = new DateTime(0l);

        for (IndexableAdapter index : indexes) {
            minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart());
            maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd());
        }

        dataInterval = new Interval(minTime, maxTime);
        serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
    } finally {
        Closeables.closeQuietly(channel);
        channel = null;
        Closeables.closeQuietly(fileOutputStream);
        fileOutputStream = null;
    }
    IndexIO.checkFileSize(indexFile);
    log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);

    /************* Setup Dim Conversions **************/
    progress.progress();
    startTime = System.currentTimeMillis();

    IOPeon ioPeon = new TmpFileIOPeon();
    ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size());
    Map<String, Integer> dimensionCardinalities = Maps.newHashMap();
    ArrayList<Map<String, IntBuffer>> dimConversions = Lists.newArrayListWithCapacity(indexes.size());

    for (IndexableAdapter index : indexes) {
        dimConversions.add(Maps.<String, IntBuffer>newHashMap());
    }

    for (String dimension : mergedDimensions) {
        final GenericIndexedWriter<String> writer = new GenericIndexedWriter<String>(ioPeon, dimension,
                GenericIndexed.stringStrategy);
        writer.open();

        List<Indexed<String>> dimValueLookups = Lists.newArrayListWithCapacity(indexes.size());
        DimValueConverter[] converters = new DimValueConverter[indexes.size()];
        for (int i = 0; i < indexes.size(); i++) {
            Indexed<String> dimValues = indexes.get(i).getDimValueLookup(dimension);
            if (dimValues != null) {
                dimValueLookups.add(dimValues);
                converters[i] = new DimValueConverter(dimValues);
            }
        }

        Iterable<String> dimensionValues = CombiningIterable.createSplatted(
                Iterables.transform(dimValueLookups, new Function<Indexed<String>, Iterable<String>>() {
                    @Override
                    public Iterable<String> apply(@Nullable Indexed<String> indexed) {
                        return Iterables.transform(indexed, new Function<String, String>() {
                            @Override
                            public String apply(@Nullable String input) {
                                return (input == null) ? "" : input;
                            }
                        });
                    }
                }), Ordering.<String>natural().nullsFirst());

        int count = 0;
        for (String value : dimensionValues) {
            value = value == null ? "" : value;
            writer.write(value);

            for (int i = 0; i < indexes.size(); i++) {
                DimValueConverter converter = converters[i];
                if (converter != null) {
                    converter.convert(value, count);
                }
            }

            ++count;
        }
        dimensionCardinalities.put(dimension, count);

        FileOutputSupplier dimOut = new FileOutputSupplier(IndexIO.makeDimFile(v8OutDir, dimension), true);
        dimOuts.add(dimOut);

        writer.close();
        serializerUtils.writeString(dimOut, dimension);
        ByteStreams.copy(writer.combineStreams(), dimOut);
        for (int i = 0; i < indexes.size(); ++i) {
            DimValueConverter converter = converters[i];
            if (converter != null) {
                dimConversions.get(i).put(dimension, converters[i].getConversionBuffer());
            }
        }

        ioPeon.cleanup();
    }
    log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir,
            System.currentTimeMillis() - startTime);

    /************* Walk through data sets and merge them *************/
    progress.progress();
    startTime = System.currentTimeMillis();

    ArrayList<Iterable<Rowboat>> boats = Lists.newArrayListWithCapacity(indexes.size());

    for (int i = 0; i < indexes.size(); ++i) {
        final IndexableAdapter adapter = indexes.get(i);

        final int[] dimLookup = new int[mergedDimensions.size()];
        int count = 0;
        for (String dim : adapter.getAvailableDimensions()) {
            dimLookup[count] = mergedDimensions.indexOf(dim.toLowerCase());
            count++;
        }

        final int[] metricLookup = new int[mergedMetrics.size()];
        count = 0;
        for (String metric : adapter.getAvailableMetrics()) {
            metricLookup[count] = mergedMetrics.indexOf(metric);
            count++;
        }

        boats.add(new MMappedIndexRowIterable(
                Iterables.transform(indexes.get(i).getRows(), new Function<Rowboat, Rowboat>() {
                    @Override
                    public Rowboat apply(@Nullable Rowboat input) {
                        int[][] newDims = new int[mergedDimensions.size()][];
                        int j = 0;
                        for (int[] dim : input.getDims()) {
                            newDims[dimLookup[j]] = dim;
                            j++;
                        }

                        Object[] newMetrics = new Object[mergedMetrics.size()];
                        j = 0;
                        for (Object met : input.getMetrics()) {
                            newMetrics[metricLookup[j]] = met;
                            j++;
                        }

                        return new Rowboat(input.getTimestamp(), newDims, newMetrics, input.getRowNum(),
                                input.getDescriptions());
                    }
                }), mergedDimensions, dimConversions.get(i), i));
    }

    Iterable<Rowboat> theRows = rowMergerFn.apply(boats);

    CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create(ioPeon,
            "little_end_time", IndexIO.BYTE_ORDER);

    timeWriter.open();

    ArrayList<VSizeIndexedWriter> forwardDimWriters = Lists.newArrayListWithCapacity(mergedDimensions.size());
    for (String dimension : mergedDimensions) {
        VSizeIndexedWriter writer = new VSizeIndexedWriter(ioPeon, dimension,
                dimensionCardinalities.get(dimension));
        writer.open();
        forwardDimWriters.add(writer);
    }

    ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size());
    for (Map.Entry<String, String> entry : metricTypes.entrySet()) {
        String metric = entry.getKey();
        String typeName = entry.getValue();
        if ("float".equals(typeName)) {
            metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon));
        } else {
            ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);

            if (serde == null) {
                throw new ISE("Unknown type[%s]", typeName);
            }

            metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde));
        }
    }
    for (MetricColumnSerializer metWriter : metWriters) {
        metWriter.open();
    }

    int rowCount = 0;
    long time = System.currentTimeMillis();
    List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size());
    for (IndexableAdapter index : indexes) {
        int[] arr = new int[index.getNumRows()];
        Arrays.fill(arr, INVALID_ROW);
        rowNumConversions.add(IntBuffer.wrap(arr));
    }

    final Map<String, String> descriptions = Maps.newHashMap();
    for (Rowboat theRow : theRows) {
        progress.progress();
        timeWriter.add(theRow.getTimestamp());

        final Object[] metrics = theRow.getMetrics();
        for (int i = 0; i < metrics.length; ++i) {
            metWriters.get(i).serialize(metrics[i]);
        }

        int[][] dims = theRow.getDims();
        for (int i = 0; i < dims.length; ++i) {
            List<Integer> listToWrite = (i >= dims.length || dims[i] == null) ? null : Ints.asList(dims[i]);
            forwardDimWriters.get(i).write(listToWrite);
        }

        for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) {
            final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey());

            for (Integer rowNum : comprisedRow.getValue()) {
                while (conversionBuffer.position() < rowNum) {
                    conversionBuffer.put(INVALID_ROW);
                }
                conversionBuffer.put(rowCount);
            }
        }

        if ((++rowCount % 500000) == 0) {
            log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount,
                    System.currentTimeMillis() - time);
            time = System.currentTimeMillis();
        }

        descriptions.putAll(theRow.getDescriptions());
    }

    for (IntBuffer rowNumConversion : rowNumConversions) {
        rowNumConversion.rewind();
    }

    final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER);
    timeFile.delete();
    OutputSupplier<FileOutputStream> out = Files.newOutputStreamSupplier(timeFile, true);
    timeWriter.closeAndConsolidate(out);
    IndexIO.checkFileSize(timeFile);

    for (int i = 0; i < mergedDimensions.size(); ++i) {
        forwardDimWriters.get(i).close();
        ByteStreams.copy(forwardDimWriters.get(i).combineStreams(), dimOuts.get(i));
    }

    for (MetricColumnSerializer metWriter : metWriters) {
        metWriter.close();
    }

    ioPeon.cleanup();
    log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount,
            System.currentTimeMillis() - startTime);

    /************ Create Inverted Indexes *************/
    startTime = System.currentTimeMillis();

    final File invertedFile = new File(v8OutDir, "inverted.drd");
    Files.touch(invertedFile);
    out = Files.newOutputStreamSupplier(invertedFile, true);

    final File geoFile = new File(v8OutDir, "spatial.drd");
    Files.touch(geoFile);
    OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true);

    for (int i = 0; i < mergedDimensions.size(); ++i) {
        long dimStartTime = System.currentTimeMillis();
        String dimension = mergedDimensions.get(i);

        File dimOutFile = dimOuts.get(i).getFile();
        final MappedByteBuffer dimValsMapped = Files.map(dimOutFile);

        if (!dimension.equals(serializerUtils.readString(dimValsMapped))) {
            throw new ISE("dimensions[%s] didn't equate!?  This is a major WTF moment.", dimension);
        }
        Indexed<String> dimVals = GenericIndexed.read(dimValsMapped, GenericIndexed.stringStrategy);
        log.info("Starting dimension[%s] with cardinality[%,d]", dimension, dimVals.size());

        GenericIndexedWriter<ImmutableConciseSet> writer = new GenericIndexedWriter<ImmutableConciseSet>(ioPeon,
                dimension, ConciseCompressedIndexedInts.objectStrategy);
        writer.open();

        boolean isSpatialDim = "spatial".equals(descriptions.get(dimension));
        ByteBufferWriter<ImmutableRTree> spatialWriter = null;
        RTree tree = null;
        IOPeon spatialIoPeon = new TmpFileIOPeon();
        if (isSpatialDim) {
            spatialWriter = new ByteBufferWriter<ImmutableRTree>(spatialIoPeon, dimension,
                    IndexedRTree.objectStrategy);
            spatialWriter.open();
            tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50));
        }

        for (String dimVal : IndexedIterable.create(dimVals)) {
            progress.progress();
            List<Iterable<Integer>> convertedInverteds = Lists.newArrayListWithCapacity(indexes.size());
            for (int j = 0; j < indexes.size(); ++j) {
                convertedInverteds.add(new ConvertingIndexedInts(indexes.get(j).getInverteds(dimension, dimVal),
                        rowNumConversions.get(j)));
            }

            ConciseSet bitset = new ConciseSet();
            for (Integer row : CombiningIterable.createSplatted(convertedInverteds,
                    Ordering.<Integer>natural().nullsFirst())) {
                if (row != INVALID_ROW) {
                    bitset.add(row);
                }
            }

            writer.write(ImmutableConciseSet.newImmutableFromMutable(bitset));

            if (isSpatialDim && dimVal != null) {
                List<String> stringCoords = Lists.newArrayList(SPLITTER.split(dimVal));
                float[] coords = new float[stringCoords.size()];
                for (int j = 0; j < coords.length; j++) {
                    coords[j] = Float.valueOf(stringCoords.get(j));
                }
                tree.insert(coords, bitset);
            }
        }
        writer.close();

        serializerUtils.writeString(out, dimension);
        ByteStreams.copy(writer.combineStreams(), out);
        ioPeon.cleanup();

        log.info("Completed dimension[%s] in %,d millis.", dimension,
                System.currentTimeMillis() - dimStartTime);

        if (isSpatialDim) {
            spatialWriter.write(ImmutableRTree.newImmutableFromMutable(tree));
            spatialWriter.close();

            serializerUtils.writeString(spatialOut, dimension);
            ByteStreams.copy(spatialWriter.combineStreams(), spatialOut);
            spatialIoPeon.cleanup();
        }

    }

    log.info("outDir[%s] completed inverted.drd in %,d millis.", v8OutDir,
            System.currentTimeMillis() - startTime);

    final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat(
            Arrays.asList("index.drd", "inverted.drd", "spatial.drd",
                    String.format("time_%s.drd", IndexIO.BYTE_ORDER)),
            Iterables.transform(mergedDimensions, GuavaUtils.formatFunction("dim_%s.drd")),
            Iterables.transform(mergedMetrics,
                    GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER)))));

    Map<String, File> files = Maps.newLinkedHashMap();
    for (String fileName : expectedFiles) {
        files.put(fileName, new File(v8OutDir, fileName));
    }

    File smooshDir = new File(v8OutDir, "smoosher");
    smooshDir.mkdir();

    for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) {
        entry.getValue().delete();
    }

    for (File file : smooshDir.listFiles()) {
        Files.move(file, new File(v8OutDir, file.getName()));
    }

    if (!smooshDir.delete()) {
        log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir,
                Arrays.asList(smooshDir.listFiles()));
        throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir));
    }

    createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir,
            GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy),
            GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy), dataInterval);

    IndexIO.DefaultIndexIOHandler.convertV8toV9(v8OutDir, outDir);
    FileUtils.deleteDirectory(v8OutDir);

    return outDir;
}

From source file:io.druid.segment.IndexMaker.java

private static int convertDims(final List<IndexableAdapter> adapters, final ProgressIndicator progress,
        final Iterable<Rowboat> theRows, final List<IntBuffer> rowNumConversions) throws IOException {
    final String section = "convert dims";
    progress.startSection(section);/*from w ww .j  a v  a2s.com*/

    for (IndexableAdapter index : adapters) {
        int[] arr = new int[index.getNumRows()];
        Arrays.fill(arr, INVALID_ROW);
        rowNumConversions.add(IntBuffer.wrap(arr));
    }

    int rowCount = 0;
    for (Rowboat theRow : theRows) {
        for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) {
            final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey());

            for (Integer rowNum : comprisedRow.getValue()) {
                while (conversionBuffer.position() < rowNum) {
                    conversionBuffer.put(INVALID_ROW);
                }
                conversionBuffer.put(rowCount);
            }
        }

        if ((++rowCount % 500000) == 0) {
            progress.progressSection(section, String.format("Walked 500,000/%,d rows", rowCount));
        }
    }

    for (IntBuffer rowNumConversion : rowNumConversions) {
        rowNumConversion.rewind();
    }

    progress.stopSection(section);

    return rowCount;
}