Example usage for org.apache.commons.io.output ByteArrayOutputStream reset

List of usage examples for org.apache.commons.io.output ByteArrayOutputStream reset

Introduction

In this page you can find the example usage for org.apache.commons.io.output ByteArrayOutputStream reset.

Prototype

public synchronized void reset() 

Source Link

Usage

From source file:com.streamsets.pipeline.lib.sdcipc.SdcIpcRequestFragmenter.java

static byte[] extract(InputStream is, ByteArrayOutputStream overflowBuffer, int limit) throws IOException {
    // the inputstream we get has been already stripped of the magic byte if first call
    byte[] message;
    if (copy(is, overflowBuffer, limit - overflowBuffer.size())) {
        // got rest of payload without exceeding the max message size
        if (overflowBuffer.size() == 0) {
            // there is no more payload
            message = null;// w w  w . j  ava2  s  .  c  o m
        } else {
            // extract the rest payload and prefix it with the magic byte
            byte[] data = overflowBuffer.toByteArray();
            message = new byte[data.length + 1];
            message[0] = JSON1_MAGIC_NUMBER;
            System.arraycopy(data, 0, message, 1, data.length);
            overflowBuffer.reset();
        }
    } else {
        // got partial payload, exceeded the max message size
        byte[] data = overflowBuffer.toByteArray();
        // find last full record in partial payload
        int lastEOL = findEndOfLastLineBeforeLimit(data, limit);
        if (lastEOL == -1) {
            throw new IOException(Utils.format("Maximum message size '{}' exceeded", limit));
        }
        // extract payload up to last EOL and prefix with the magic byte
        message = new byte[lastEOL + 1];
        message[0] = JSON1_MAGIC_NUMBER;
        System.arraycopy(data, 0, message, 1, lastEOL);

        // put back in the stream buffer the portion of the payload that did not make it to the message
        overflowBuffer.reset();
        overflowBuffer.write(data, lastEOL, data.length - lastEOL);
    }
    return message;
}

From source file:inti.core.codec.direct.DirectByteCodec.java

@Override
public byte[] decode(InputStream input) throws Exception {
    ByteArrayOutputStream output = outputs.get();
    byte[] transferBuffer = transferBuffers.get();
    int read;//from w  w  w  .  j  a v  a2  s.c  o  m

    output.reset();
    while (input.available() > 0 && (read = input.read(transferBuffer)) != -1) {
        output.write(transferBuffer, 0, read);
    }

    return output.toByteArray();
}

From source file:de.dal33t.powerfolder.util.ByteSerializer.java

/**
 * Serialize an object. This method is non-static an re-uses the internal
 * byteoutputstream/*  w ww .ja  va 2  s  .  co  m*/
 *
 * @param target
 *            The object to be serialized
 * @param compress
 *            true if serialization should compress.
 * @param padToSize
 *            the size to pad the output buffer to. number below 0 means no
 *            padding.
 * @return The serialized object
 * @throws IOException
 *             In case the object cannot be serialized
 */
public byte[] serialize(Serializable target, boolean compress, int padToSize) throws IOException {
    long start = System.currentTimeMillis();
    ByteArrayOutputStream byteOut;
    // Reset buffer
    if (outBufferRef != null && outBufferRef.get() != null) {
        // Reuse old buffer
        byteOut = outBufferRef.get();
        byteOut.reset();
    } else {
        // logFiner("Creating send buffer (512bytes)");
        // Create new bytearray output, 512b buffer
        byteOut = new ByteArrayOutputStream(512);
        if (CACHE_OUT_BUFFER) {
            // Chache outgoing buffer
            outBufferRef = new SoftReference<ByteArrayOutputStream>(byteOut);
        }
    }

    OutputStream targetOut;
    // Serialize....
    if (compress) {
        PFZIPOutputStream zipOut = new PFZIPOutputStream(byteOut);
        targetOut = zipOut;
    } else {
        targetOut = byteOut;
    }
    ObjectOutputStream objOut = new ObjectOutputStream(targetOut);

    // Write
    try {
        objOut.writeUnshared(target);
    } catch (StreamCorruptedException e) {
        LOG.log(Level.WARNING, "Problem while serializing: " + e, e);
        throw e;
    } catch (InvalidClassException e) {
        LOG.log(Level.WARNING, "Problem while serializing: " + target + ": " + e, e);
        throw e;
    }

    objOut.close();

    if (padToSize > 0) {
        int modulo = byteOut.size() % padToSize;
        if (modulo != 0) {
            int additionalBytesRequired = padToSize - (modulo);
            // LOG.warn("Buffersize: " + byteOut.size()
            // + ", Additonal bytes required: " + additionalBytesRequired);
            for (int i = 0; i < additionalBytesRequired; i++) {
                byteOut.write(0);
            }
        }
    }
    byteOut.flush();
    byteOut.close();

    if (byteOut.size() >= 256 * 1024) {
        logWarning("Send buffer exceeds 256KB! " + Format.formatBytes(byteOut.size()) + ". Message: " + target);
    }

    byte[] buf = byteOut.toByteArray();
    if (BENCHMARK) {
        totalObjects++;
        totalTime += System.currentTimeMillis() - start;
        int count = 0;
        if (CLASS_STATS.containsKey(target.getClass())) {
            count = CLASS_STATS.get(target.getClass());
        }
        count++;
        CLASS_STATS.put(target.getClass(), count);
    }
    return buf;
}

From source file:gsn.wrappers.general.HttpGetWrapper.java

public void run() {
    ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream(1024 * 20);
    byte[] buffer = new byte[16 * 1024];
    BufferedInputStream content;//ww  w  .ja  v a  2 s  .c om
    while (isActive()) {
        try {
            Thread.sleep(rate);
            httpURLConnection = (HttpURLConnection) url.openConnection();
            httpURLConnection.connect();
            if (httpURLConnection.getResponseCode() == HttpURLConnection.HTTP_ACCEPTED)
                continue;
            content = new BufferedInputStream(httpURLConnection.getInputStream(), 4096);
            arrayOutputStream.reset();
            int readIndex = -1;
            while ((readIndex = content.read(buffer)) != -1)
                arrayOutputStream.write(buffer, 0, readIndex);
            postStreamElement(arrayOutputStream.toByteArray());
        } catch (InterruptedException e) {
            logger.error(e.getMessage(), e);
        } catch (IOException e) {
            logger.error(e.getMessage(), e);
        }
    }
}

From source file:io.anserini.embeddings.IndexW2V.java

public void indexEmbeddings() throws IOException, InterruptedException {
    LOG.info("Starting indexer...");
    long startTime = System.currentTimeMillis();
    final WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer();
    final IndexWriterConfig config = new IndexWriterConfig(analyzer);
    final IndexWriter writer = new IndexWriter(directory, config);

    BufferedReader bRdr = new BufferedReader(new FileReader(args.input));
    String line = null;// w  ww  . j  ava 2 s .  c o m
    bRdr.readLine();

    Document document = new Document();
    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
    int cnt = 0;

    while ((line = bRdr.readLine()) != null) {
        String[] termEmbedding = line.trim().split("\t");
        document.add(new StringField(LuceneDocumentGenerator.FIELD_ID, termEmbedding[0], Field.Store.NO));
        String[] parts = termEmbedding[1].split(" ");

        for (int i = 0; i < parts.length; ++i) {
            byteStream.write(ByteBuffer.allocate(4).putFloat(Float.parseFloat(parts[i])).array());
        }
        document.add(new StoredField(FIELD_BODY, byteStream.toByteArray()));

        byteStream.flush();
        byteStream.reset();
        writer.addDocument(document);
        document.clear();
        cnt++;

        if (cnt % 100000 == 0) {
            LOG.info(cnt + " terms indexed");
        }
    }

    LOG.info(String.format("Total of %s terms added", cnt));

    try {
        writer.commit();
        writer.forceMerge(1);
    } finally {
        try {
            writer.close();
        } catch (IOException e) {
            LOG.error(e);
        }
    }

    LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
}

From source file:com.amalto.core.servlet.FileChunkLoaderTest.java

@Test
public void test() throws Exception {
    File test = getFile("com/amalto/core/servlet/test.log");

    FileChunkLoader loader = new FileChunkLoader(test);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    long position = 0;
    FileChunkInfo chunkInfo = null;//from w  ww  . j  av  a2  s .co m

    //
    for (int i = 0; i < 11; i++) {
        chunkInfo = loader.loadChunkTo(baos, position, 2);
        if (position == 0) {
            String result = baos.toString();
            assertTrue(result.endsWith("Service (JTA version) - JBoss Inc.\r\n"));
            assertEquals(238, chunkInfo.nextPosition);
        }
        position = chunkInfo.nextPosition;
    }
    assertEquals(2406, chunkInfo.nextPosition);
    assertEquals(1, chunkInfo.lines); // last line does not contains '\n'
    String result = baos.toString();
    assertTrue(result.endsWith("startup in 34 ms"));

    //
    baos.reset();
    chunkInfo = loader.loadChunkTo(baos, 0, 0);
    assertEquals(0, chunkInfo.nextPosition);
    assertEquals(0, chunkInfo.lines);

    //
    baos.reset();
    chunkInfo = loader.loadChunkTo(baos, 0, 100);
    assertEquals(2406, chunkInfo.nextPosition);
    assertEquals(21, chunkInfo.lines);

    // tail
    baos.reset();
    chunkInfo = loader.loadChunkTo(baos, -1, 10);
    assertEquals(2406, chunkInfo.nextPosition);
    assertEquals(6, chunkInfo.lines);
    assertTrue(result.endsWith("startup in 34 ms"));

    // tail
    baos.reset();
    chunkInfo = loader.loadChunkTo(baos, -1, 100);
    assertEquals(2406, chunkInfo.nextPosition);
    assertEquals(21, chunkInfo.lines);
}

From source file:org.kiji.mapreduce.JobHistoryKijiTable.java

/**
 * Writes a job into the JobHistoryKijiTable.
 *
 * @param job The job to save.//from   www  . jav a  2  s .c  o m
 * @param startTime The time the job began, in milliseconds.
 * @param endTime The time the job ended, in milliseconds
 * @throws IOException If there is an error writing to the table.
 */
public void recordJob(Job job, long startTime, long endTime) throws IOException {
    KijiTableWriter writer = mKijiTable.openTableWriter();
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    EntityId jobEntity = mKijiTable.getEntityId(job.getJobID().toString());
    try {
        writer.put(jobEntity, "info", "jobId", startTime, job.getJobID().toString());
        writer.put(jobEntity, "info", "jobName", startTime, job.getJobName());
        writer.put(jobEntity, "info", "startTime", startTime, startTime);
        writer.put(jobEntity, "info", "endTime", startTime, endTime);
        job.getCounters().write(dos);
        writer.put(jobEntity, "info", "counters", startTime, baos.toByteArray());
        baos.reset();
        job.getConfiguration().write(dos);
        writer.put(jobEntity, "info", "configuration", startTime, baos.toByteArray());
    } finally {
        IOUtils.closeQuietly(writer);
    }
}