Example usage for org.apache.commons.io.output CountingOutputStream flush

List of usage examples for org.apache.commons.io.output CountingOutputStream flush

Introduction

In this page you can find the example usage for org.apache.commons.io.output CountingOutputStream flush.

Prototype

public void flush() throws IOException 

Source Link

Document

Invokes the delegate's flush() method.

Usage

From source file:com.joyent.manta.client.crypto.EncryptingEntity.java

/**
 * Copies the entity content to the specified output stream and validates
 * that the number of bytes copied is the same as specified when in the
 * original content-length./*w ww  .j ava2  s. c  o  m*/
 *
 * @param out stream to copy to
 * @throws IOException throw when there is a problem writing to the streams
 */
private void copyContentToOutputStream(final OutputStream out) throws IOException {
    final long bytesCopied;

    /* Only the EmbeddedHttpContent class requires us to actually call
     * write out on the wrapped object. In its particular case it is doing
     * a wrapping operation between an InputStream and an OutputStream in
     * order to provide an OutputStream interface to MantaClient. */
    if (this.wrapped.getClass().equals(EmbeddedHttpContent.class)) {
        CountingOutputStream cout = new CountingOutputStream(out);
        this.wrapped.writeTo(cout);
        cout.flush();
        bytesCopied = cout.getByteCount();
    } else {
        /* We choose a small buffer because large buffer don't result in
         * better performance when writing to a CipherOutputStream. You
         * can try this yourself by fiddling with this value and running
         * EncryptingEntityBenchmark. */
        final int bufferSize = 128;

        InputStream contentStream = getContent();
        bytesCopied = IOUtils.copy(contentStream, out, bufferSize);
        out.flush();
        try {
            contentStream.close();
        } catch (IOException e) {
            LOGGER.error("Failed to close content stream in EncryptingEntity.", e);
        }
    }

    /* If we don't know the length of the underlying content stream, we
     * count the number of bytes written, so that it is available. */
    if (originalLength == UNKNOWN_LENGTH) {
        originalLength = bytesCopied;
    } else if (originalLength != bytesCopied) {
        MantaIOException e = new MantaIOException(
                "Bytes copied doesn't equal the " + "specified content length");
        e.setContextValue("specifiedContentLength", originalLength);
        e.setContextValue("actualContentLength", bytesCopied);
        throw e;
    }
}

From source file:hudson.model.LargeText.java

/**
 * Writes the tail portion of the file to the {@link Writer}.
 *
 * <p>/*from   ww w.  j a  va  2s .  c  o  m*/
 * The text file is assumed to be in the system default encoding.
 *
 * @param start
 *      The byte offset in the input file where the write operation starts.
 *
 * @return
 *      if the file is still being written, this method writes the file
 *      until the last newline character and returns the offset to start
 *      the next write operation.
 */
public long writeLogTo(long start, Writer w) throws IOException {
    CountingOutputStream os = new CountingOutputStream(new WriterOutputStream(w));

    Session f = source.open();
    f.skip(start);

    if (completed) {
        // write everything till EOF
        byte[] buf = new byte[1024];
        int sz;
        while ((sz = f.read(buf)) >= 0)
            os.write(buf, 0, sz);
    } else {
        ByteBuf buf = new ByteBuf(null, f);
        HeadMark head = new HeadMark(buf);
        TailMark tail = new TailMark(buf);

        while (tail.moveToNextLine(f)) {
            head.moveTo(tail, os);
        }
        head.finish(os);
    }

    f.close();
    os.flush();

    return os.getCount() + start;
}

From source file:org.apache.flex.compiler.clients.ASC.java

/**
 * When {@code -swf} option is set, ASC compiles the source files into a SWF
 * file.//from   ww  w .  j av a  2  s  . c  om
 * 
 * @param applicationProject application project
 * @param compilationUnits compilation unit(s) for the source file(s)
 * @param sourceFilename source file name
 * @param startTime time the build was started in nanoseconds
 * @return true if success
 * @throws InterruptedException error from compilation threads
 */
private boolean generateSWF(String outputDirectoryName, String outputBaseName,
        final ASCProject applicationProject, final Set<ICompilationUnit> compilationUnits,
        final String sourceFilename, ProblemQuery problemQuery, long startTime) throws InterruptedException {
    boolean success = true;
    final ArrayList<ICompilerProblem> problemsBuildingSWF = new ArrayList<ICompilerProblem>();
    final ISWFTarget target = new AppSWFTarget(applicationProject, new ASCTargetSettings(sourceFilename), null,
            compilationUnits);
    final ISWF swf = target.build(problemsBuildingSWF);

    if (swf != null) {
        swf.setTopLevelClass(getSymbolClass());

        final ISWFWriter writer = new SWFWriter(swf, Header.Compression.NONE);
        final String outputFileNameWithExt = outputBaseName + ".swf";
        final File outputFile = new File(outputDirectoryName + outputFileNameWithExt);
        try {
            CountingOutputStream output = new CountingOutputStream(
                    new BufferedOutputStream(new FileOutputStream(outputFile)));

            writer.writeTo(output);
            output.flush();
            output.close();
            writer.close();

            out.format("%s, %d bytes written in %5.3f seconds\n", outputFile.toString(), output.getByteCount(),
                    (System.nanoTime() - startTime) / 1e9);
        } catch (IOException e) {
            problemQuery.add(new FileWriteProblem(e));
            success = false;
        }
    } else {
        err.println("Unable to build SWF.");
        success = false;
    }

    problemQuery.addAll(problemsBuildingSWF);

    return success;
}

From source file:org.apache.flex.swf.io.SWFWriter.java

@Override
public int writeTo(File outputFile) throws FileNotFoundException, IOException {
    // Ensure that the directory for the SWF exists.
    final File outputDirectory = new File(outputFile.getAbsoluteFile().getParent());
    outputDirectory.mkdirs();//from  w w w  .  j  a  va 2  s.  co  m

    // Write out the SWF, counting how many bytes were written.
    final CountingOutputStream output = new CountingOutputStream(
            new BufferedOutputStream(new FileOutputStream(outputFile)));
    writeTo(output);
    output.flush();
    output.close();
    close();

    final int swfSize = output.getCount();
    return swfSize;
}

From source file:org.exist.mongodb.xquery.gridfs.Store.java

void writeCompressed(GridFSInputFile gfsFile, StopWatch stopWatch, Item content, int dataType)
        throws NoSuchAlgorithmException, IOException, XPathException {
    // Store data compressed, add statistics
    try (OutputStream stream = gfsFile.getOutputStream()) {
        MessageDigest md = MessageDigest.getInstance("MD5");
        CountingOutputStream cosGZ = new CountingOutputStream(stream);
        GZIPOutputStream gos = new GZIPOutputStream(cosGZ);
        DigestOutputStream dos = new DigestOutputStream(gos, md);
        CountingOutputStream cosRaw = new CountingOutputStream(dos);

        stopWatch.start();/*  w  ww  .  j a v  a  2 s.c  o m*/
        ContentSerializer.serialize(content, context, cosRaw);
        cosRaw.flush();
        cosRaw.close();
        stopWatch.stop();

        long nrBytesRaw = cosRaw.getByteCount();
        long nrBytesGZ = cosGZ.getByteCount();
        String checksum = Hex.encodeHexString(dos.getMessageDigest().digest());

        BasicDBObject info = new BasicDBObject();
        info.put(Constants.EXIST_COMPRESSION, GZIP);
        info.put(Constants.EXIST_ORIGINAL_SIZE, nrBytesRaw);
        info.put(Constants.EXIST_ORIGINAL_MD5, checksum);
        info.put(Constants.EXIST_DATATYPE, dataType);
        info.put(Constants.EXIST_DATATYPE_TEXT, Type.getTypeName(dataType));

        gfsFile.setMetaData(info);

        LOG.info("original_md5:" + checksum);
        LOG.info("compression ratio:" + ((100l * nrBytesGZ) / nrBytesRaw));

    }
}

From source file:org.jenkinsci.plugins.fabric8.support.hack.LargeText.java

/**
 * Writes the tail portion of the file to the {@link OutputStream}.
 *
 * @param start/*from w  ww.  ja  v  a  2s . co m*/
 *      The byte offset in the input file where the write operation starts.
 *
 * @return
 *      if the file is still being written, this method writes the file
 *      until the last newline character and returns the offset to start
 *      the next write operation.
 */
public long writeLogTo(long start, OutputStream out) throws IOException {
    CountingOutputStream os = new CountingOutputStream(out);

    Session f = source.open();
    f.skip(start);

    if (completed) {
        // write everything till EOF
        byte[] buf = new byte[1024];
        int sz;
        while ((sz = f.read(buf)) >= 0)
            os.write(buf, 0, sz);
    } else {
        ByteBuf buf = new ByteBuf(null, f);
        HeadMark head = new HeadMark(buf);
        TailMark tail = new TailMark(buf);
        buf = null;

        int readLines = 0;
        while (tail.moveToNextLine(f) && readLines++ < MAX_LINES_READ) {
            head.moveTo(tail, os);
        }
        head.finish(os);
    }

    f.close();
    os.flush();

    return os.getCount() + start;
}

From source file:org.jenkinsci.plugins.fabric8.support.hack.LargeText.java

/**
 * Writes the section of the file {@link OutputStream}.
 *
 * @param start/* w ww . j  av a 2 s .co m*/
 *      The byte offset in the input file where the write operation starts.
 *
 * @return
 *      if the file is still being written, this method writes the file
 *      until the last newline character and returns the offset to start
 *      the next write operation.
 */
public long writeLogTo(long start, int size, OutputStream out) throws IOException {
    if (size <= 0) {
        return 0;
    }

    CountingOutputStream os = new CountingOutputStream(out);

    Session f = source.open();
    f.skip(start);

    long end = start + size;

    byte[] buf = new byte[size];
    int sz;
    if ((sz = f.read(buf)) >= 0) {
        os.write(buf, 0, sz);
    }
    /*
            if(completed) {
            } else {
    ByteBuf buf = new ByteBuf(null,f, size);
    HeadMark head = new HeadMark(buf);
    TailMark tail = new TailMark(buf);
            
    int readLines = 0;
    while(tail.moveToNextLine(f) && readLines++ < MAX_LINES_READ) {
        head.moveTo(tail, os);
        if (buf.isFull() || os.getCount() >= end) {
            break;
        }
    }
    head.finish(os);
            }
    */

    f.close();
    os.flush();

    return os.getCount() + start;
}