Example usage for java.io DataOutputStream size

List of usage examples for java.io DataOutputStream size

Introduction

In this page you can find the example usage for java.io DataOutputStream size.

Prototype

public final int size() 

Source Link

Document

Returns the current value of the counter written, the number of bytes written to this data output stream so far.

Usage

From source file:Main.java

public static void main(String[] args) throws Exception {
    FileOutputStream fos = new FileOutputStream("C:/Bytes.txt");
    DataOutputStream dos = new DataOutputStream(fos);

    dos.writeBytes("this is a test");
    int bytesWritten = dos.size();
    System.out.println("Total " + bytesWritten + " bytes are written to stream.");
    dos.close();//from  w w w  .ja v a 2s.c o  m
}

From source file:Main.java

public static void main(String[] args) throws IOException {

    byte[] buf = { 12, 11, 22, 33, 44 };

    FileOutputStream fos = new FileOutputStream("c:/test.txt");
    DataOutputStream dos = new DataOutputStream(fos);

    int size = 0;

    for (byte b : buf) {
        dos.write(b);//from   w  w w.  j a  va 2s. c  o m
        size = dos.size();
        System.out.print("Size: " + size + "; ");
    }
}

From source file:hyperloglog.tools.HyperLogLogCLI.java

public static void main(String[] args) {
    Options options = new Options();
    addOptions(options);//from ww w.ja v a2  s.c  o m

    CommandLineParser parser = new BasicParser();
    CommandLine cli = null;
    long n = 0;
    long seed = 123;
    EncodingType enc = EncodingType.SPARSE;
    int p = 14;
    int hb = 64;
    boolean bitPack = true;
    boolean noBias = true;
    int unique = -1;
    String filePath = null;
    BufferedReader br = null;
    String outFile = null;
    String inFile = null;
    FileOutputStream fos = null;
    DataOutputStream out = null;
    FileInputStream fis = null;
    DataInputStream in = null;
    try {
        cli = parser.parse(options, args);

        if (!(cli.hasOption('n') || cli.hasOption('f') || cli.hasOption('d'))) {
            System.out.println("Example usage: hll -n 1000 " + "<OR> hll -f /tmp/input.txt "
                    + "<OR> hll -d -i /tmp/out.hll");
            usage(options);
            return;
        }

        if (cli.hasOption('n')) {
            n = Long.parseLong(cli.getOptionValue('n'));
        }

        if (cli.hasOption('e')) {
            String value = cli.getOptionValue('e');
            if (value.equals(EncodingType.DENSE.name())) {
                enc = EncodingType.DENSE;
            }
        }

        if (cli.hasOption('p')) {
            p = Integer.parseInt(cli.getOptionValue('p'));
            if (p < 4 && p > 16) {
                System.out.println("Warning! Out-of-range value specified for p. Using to p=14.");
                p = 14;
            }
        }

        if (cli.hasOption('h')) {
            hb = Integer.parseInt(cli.getOptionValue('h'));
        }

        if (cli.hasOption('c')) {
            noBias = Boolean.parseBoolean(cli.getOptionValue('c'));
        }

        if (cli.hasOption('b')) {
            bitPack = Boolean.parseBoolean(cli.getOptionValue('b'));
        }

        if (cli.hasOption('f')) {
            filePath = cli.getOptionValue('f');
            br = new BufferedReader(new FileReader(new File(filePath)));
        }

        if (filePath != null && cli.hasOption('n')) {
            System.out.println("'-f' (input file) specified. Ignoring -n.");
        }

        if (cli.hasOption('s')) {
            if (cli.hasOption('o')) {
                outFile = cli.getOptionValue('o');
                fos = new FileOutputStream(new File(outFile));
                out = new DataOutputStream(fos);
            } else {
                System.err.println("Specify output file. Example usage: hll -s -o /tmp/out.hll");
                usage(options);
                return;
            }
        }

        if (cli.hasOption('d')) {
            if (cli.hasOption('i')) {
                inFile = cli.getOptionValue('i');
                fis = new FileInputStream(new File(inFile));
                in = new DataInputStream(fis);
            } else {
                System.err.println("Specify input file. Example usage: hll -d -i /tmp/in.hll");
                usage(options);
                return;
            }
        }

        // return after deserialization
        if (fis != null && in != null) {
            long start = System.currentTimeMillis();
            HyperLogLog deserializedHLL = HyperLogLogUtils.deserializeHLL(in);
            long end = System.currentTimeMillis();
            System.out.println(deserializedHLL.toString());
            System.out.println("Count after deserialization: " + deserializedHLL.count());
            System.out.println("Deserialization time: " + (end - start) + " ms");
            return;
        }

        // construct hll and serialize it if required
        HyperLogLog hll = HyperLogLog.builder().enableBitPacking(bitPack).enableNoBias(noBias).setEncoding(enc)
                .setNumHashBits(hb).setNumRegisterIndexBits(p).build();

        if (br != null) {
            Set<String> hashset = new HashSet<String>();
            String line;
            while ((line = br.readLine()) != null) {
                hll.addString(line);
                hashset.add(line);
            }
            n = hashset.size();
        } else {
            Random rand = new Random(seed);
            for (int i = 0; i < n; i++) {
                if (unique < 0) {
                    hll.addLong(rand.nextLong());
                } else {
                    int val = rand.nextInt(unique);
                    hll.addLong(val);
                }
            }
        }

        long estCount = hll.count();
        System.out.println("Actual count: " + n);
        System.out.println(hll.toString());
        System.out.println("Relative error: " + HyperLogLogUtils.getRelativeError(n, estCount) + "%");
        if (fos != null && out != null) {
            long start = System.currentTimeMillis();
            HyperLogLogUtils.serializeHLL(out, hll);
            long end = System.currentTimeMillis();
            System.out.println("Serialized hyperloglog to " + outFile);
            System.out.println("Serialized size: " + out.size() + " bytes");
            System.out.println("Serialization time: " + (end - start) + " ms");
            out.close();
        }
    } catch (ParseException e) {
        System.err.println("Invalid parameter.");
        usage(options);
    } catch (NumberFormatException e) {
        System.err.println("Invalid type for parameter.");
        usage(options);
    } catch (FileNotFoundException e) {
        System.err.println("Specified file not found.");
        usage(options);
    } catch (IOException e) {
        System.err.println("Exception occured while reading file.");
        usage(options);
    }
}

From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryPackingStringReader.java

@Override
public byte[] ensureDecompressed() throws IOException {
    System.out.println("280    inBuf.length   " + inBuf.getLength());
    FlexibleEncoding.Parquet.DeltaByteArrayReader reader = new FlexibleEncoding.Parquet.DeltaByteArrayReader();
    DataOutputBuffer transfer = new DataOutputBuffer();
    transfer.write(inBuf.getData(), 12, inBuf.getLength() - 12);
    byte[] data = transfer.getData();
    System.out.println("286   byte [] data  " + data.length + "  numPairs  " + numPairs);
    inBuf.close();/*from  w w  w. j  ava 2  s .co m*/
    Binary[] bin = new Utils().readData(reader, data, numPairs);
    System.out.println("2998   Binary[] bin   " + bin.length);
    ByteArrayOutputStream bos1 = new ByteArrayOutputStream();
    DataOutputStream dos1 = new DataOutputStream(bos1);
    ByteArrayOutputStream bos2 = new ByteArrayOutputStream();
    DataOutputStream dos2 = new DataOutputStream(bos2);
    //    DataOutputBuffer   decoding = new DataOutputBuffer();
    //    DataOutputBuffer   offset = new DataOutputBuffer();
    dos1.writeInt(decompressedSize);
    dos1.writeInt(numPairs);
    dos1.writeInt(startPos);
    int dataoffset = 12;
    String str;
    for (int i = 0; i < numPairs; i++) {
        str = bin[i].toStringUsingUTF8();
        dos1.writeUTF(str);
        dataoffset = dos1.size();
        dos2.writeInt(dataoffset);
    }
    System.out.println("315  offset.size() " + bos2.size() + "  decoding.szie   " + bos2.toByteArray().length);
    System.out.println("316  dataoffet   " + dataoffset);
    dos1.write(bos2.toByteArray(), 0, bos2.size());
    inBuf.close();
    System.out.println("316   bos1  " + bos1.toByteArray().length + "    " + bos1.size());
    byte[] bytes = bos1.toByteArray();
    dos2.close();
    bos2.close();
    bos1.close();
    dos1.close();
    return bytes;
}

From source file:com.jivesoftware.os.amza.service.AmzaService.java

@Override
public void availableRowsStream(boolean system, ChunkWriteable writeable, RingMember remoteRingMember,
        TimestampedRingHost remoteTimestampedRingHost, long takeSessionId, long sharedKey,
        long heartbeatIntervalMillis) throws Exception {

    ringStoreWriter.register(remoteRingMember, remoteTimestampedRingHost.ringHost,
            remoteTimestampedRingHost.timestampId, false);

    ByteArrayOutputStream out = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(new BufferedOutputStream(new SnappyOutputStream(out), 8192));

    takeCoordinator.availableRowsStream(system, ringStoreReader, partitionStripeProvider, remoteRingMember,
            takeSessionId, sharedKey, heartbeatIntervalMillis, (partitionName, txId) -> {
                dos.write(1);/*from w  w w  .ja  va  2  s . c  o m*/
                byte[] bytes = partitionName.toBytes();
                dos.writeInt(bytes.length);
                dos.write(bytes);
                dos.writeLong(txId);
            }, () -> {
                if (dos.size() > 0) {
                    dos.flush();
                    byte[] chunk = out.toByteArray();
                    writeable.write(chunk);
                    /*LOG.info("Offered rows for {} length={}", remoteRingMember, chunk.length);*/
                    out.reset();
                }
                return null;
            }, () -> {
                dos.write(1);
                dos.writeInt(0);
                dos.flush();
                writeable.write(out.toByteArray());
                out.reset();
                return null;
            });
    dos.write(0);
    dos.flush();
    writeable.write(out.toByteArray());
}

From source file:org.apache.hadoop.hbase.io.encoding.EncodedDataBlock.java

/**
 * Find the size of compressed data assuming that buffer will be compressed
 * using given algorithm./*from ww  w . j  av a2 s . c  o  m*/
 * @param algo compression algorithm
 * @param compressor compressor already requested from codec
 * @param inputBuffer Array to be compressed.
 * @param offset Offset to beginning of the data.
 * @param length Length to be compressed.
 * @return Size of compressed data in bytes.
 * @throws IOException
 */
public static int getCompressedSize(Algorithm algo, Compressor compressor, byte[] inputBuffer, int offset,
        int length) throws IOException {
    DataOutputStream compressedStream = new DataOutputStream(new IOUtils.NullOutputStream());
    if (compressor != null) {
        compressor.reset();
    }
    OutputStream compressingStream = null;

    try {
        compressingStream = algo.createCompressionStream(compressedStream, compressor, 0);

        compressingStream.write(inputBuffer, offset, length);
        compressingStream.flush();

        return compressedStream.size();
    } finally {
        if (compressingStream != null)
            compressingStream.close();
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.FixedFileTrailer.java

private static int[] computeTrailerSizeByVersion() {
    int versionToSize[] = new int[HFile.MAX_FORMAT_VERSION + 1];
    for (int version = MIN_FORMAT_VERSION; version <= MAX_FORMAT_VERSION; ++version) {
        FixedFileTrailer fft = new FixedFileTrailer(version, HFileBlock.MINOR_VERSION_NO_CHECKSUM);
        DataOutputStream dos = new DataOutputStream(new NullOutputStream());
        try {//w  w  w .  jav  a2s  . c  o  m
            fft.serialize(dos);
        } catch (IOException ex) {
            // The above has no reason to fail.
            throw new RuntimeException(ex);
        }
        versionToSize[version] = dos.size();
    }
    return versionToSize;
}

From source file:org.apache.hadoop.hbase.io.hfile.HFileWriterV1.java

/**
 * Let go of block compressor and compressing stream gotten in call {@link
 * #getCompressingStream}./*from  w  w w. ja v a 2 s .  c  om*/
 *
 * @param dos
 *
 * @return How much was written on this stream since it was taken out.
 *
 * @see #getCompressingStream()
 *
 * @throws IOException
 */
private int releaseCompressingStream(final DataOutputStream dos) throws IOException {
    dos.flush();
    this.compressAlgo.returnCompressor(this.compressor);
    this.compressor = null;
    return dos.size();
}

From source file:org.apache.hadoop.hbase.io.hfile.TestFixedFileTrailer.java

@Test
public void testTrailer() throws IOException {
    FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderV2.PBUF_TRAILER_MINOR_VERSION);
    t.setDataIndexCount(3);/* ww w . j  ava  2  s  . c  om*/
    t.setEntryCount(((long) Integer.MAX_VALUE) + 1);

    t.setLastDataBlockOffset(291);
    t.setNumDataIndexLevels(3);
    t.setComparatorClass(KeyValue.COMPARATOR.getClass());
    t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
    t.setUncompressedDataIndexSize(827398717L); // Something random.

    t.setLoadOnOpenOffset(128);
    t.setMetaIndexCount(7);

    t.setTotalUncompressedBytes(129731987);

    {
        DataOutputStream dos = new DataOutputStream(baos); // Limited scope.
        t.serialize(dos);
        dos.flush();
        assertEquals(dos.size(), FixedFileTrailer.getTrailerSize(version));
    }

    byte[] bytes = baos.toByteArray();
    baos.reset();

    assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version));

    ByteArrayInputStream bais = new ByteArrayInputStream(bytes);

    // Finished writing, trying to read.
    {
        DataInputStream dis = new DataInputStream(bais);
        FixedFileTrailer t2 = new FixedFileTrailer(version, HFileReaderV2.PBUF_TRAILER_MINOR_VERSION);
        t2.deserialize(dis);
        assertEquals(-1, bais.read()); // Ensure we have read everything.
        checkLoadedTrailer(version, t, t2);
    }

    // Now check what happens if the trailer is corrupted.
    Path trailerPath = new Path(util.getDataTestDir(), "trailer_" + version);

    {
        for (byte invalidVersion : new byte[] { HFile.MIN_FORMAT_VERSION - 1, HFile.MAX_FORMAT_VERSION + 1 }) {
            bytes[bytes.length - 1] = invalidVersion;
            writeTrailer(trailerPath, null, bytes);
            try {
                readTrailer(trailerPath);
                fail("Exception expected");
            } catch (IllegalArgumentException ex) {
                // Make it easy to debug this.
                String msg = ex.getMessage();
                String cleanMsg = msg.replaceAll("^(java(\\.[a-zA-Z]+)+:\\s+)?|\\s+\\(.*\\)\\s*$", "");
                assertEquals("Actual exception message is \"" + msg + "\".\n" + "Cleaned-up message", // will be followed by " expected: ..."
                        "Invalid HFile version: " + invalidVersion, cleanMsg);
                LOG.info("Got an expected exception: " + msg);
            }
        }

    }

    // Now write the trailer into a file and auto-detect the version.
    writeTrailer(trailerPath, t, null);

    FixedFileTrailer t4 = readTrailer(trailerPath);

    checkLoadedTrailer(version, t, t4);

    String trailerStr = t.toString();
    assertEquals("Invalid number of fields in the string representation " + "of the trailer: " + trailerStr,
            NUM_FIELDS_BY_VERSION[version - 2], trailerStr.split(", ").length);
    assertEquals(trailerStr, t4.toString());
}

From source file:org.apache.hadoop.hbase.io.hfile.TestFixedFileTrailer.java

@Test
public void testTrailerForV2NonPBCompatibility() throws Exception {
    if (version == 2) {
        FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderV2.MINOR_VERSION_NO_CHECKSUM);
        t.setDataIndexCount(3);/*  w ww. ja va  2 s.c  om*/
        t.setEntryCount(((long) Integer.MAX_VALUE) + 1);
        t.setLastDataBlockOffset(291);
        t.setNumDataIndexLevels(3);
        t.setComparatorClass(KeyValue.COMPARATOR.getClass());
        t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
        t.setUncompressedDataIndexSize(827398717L); // Something random.
        t.setLoadOnOpenOffset(128);
        t.setMetaIndexCount(7);
        t.setTotalUncompressedBytes(129731987);

        {
            DataOutputStream dos = new DataOutputStream(baos); // Limited scope.
            serializeAsWritable(dos, t);
            dos.flush();
            assertEquals(FixedFileTrailer.getTrailerSize(version), dos.size());
        }

        byte[] bytes = baos.toByteArray();
        baos.reset();
        assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version));

        ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
        {
            DataInputStream dis = new DataInputStream(bais);
            FixedFileTrailer t2 = new FixedFileTrailer(version, HFileReaderV2.MINOR_VERSION_NO_CHECKSUM);
            t2.deserialize(dis);
            assertEquals(-1, bais.read()); // Ensure we have read everything.
            checkLoadedTrailer(version, t, t2);
        }
    }
}