Example usage for com.google.common.io CountingOutputStream CountingOutputStream

List of usage examples for com.google.common.io CountingOutputStream CountingOutputStream

Introduction

In this page you can find the example usage for com.google.common.io CountingOutputStream CountingOutputStream.

Prototype

public CountingOutputStream(OutputStream out) 

Source Link

Document

Wraps another output stream, counting the number of bytes written.

Usage

From source file:org.apache.beam.sdk.util.PackageUtil.java

/**
 * Compute and cache the attributes of a classpath element that we will need to stage it.
 *
 * @param classpathElement the file or directory to be staged.
 * @param stagingPath The base location for staged classpath elements.
 * @param overridePackageName If non-null, use the given value as the package name
 *                            instead of generating one automatically.
 * @return a {@link PackageAttributes} that containing metadata about the object to be staged.
 *///from   w w  w  . j a va2s  .  com
static PackageAttributes createPackageAttributes(File classpathElement, String stagingPath,
        String overridePackageName) {
    try {
        boolean directory = classpathElement.isDirectory();

        // Compute size and hash in one pass over file or directory.
        Hasher hasher = Hashing.md5().newHasher();
        OutputStream hashStream = Funnels.asOutputStream(hasher);
        CountingOutputStream countingOutputStream = new CountingOutputStream(hashStream);

        if (!directory) {
            // Files are staged as-is.
            Files.asByteSource(classpathElement).copyTo(countingOutputStream);
        } else {
            // Directories are recursively zipped.
            ZipFiles.zipDirectory(classpathElement, countingOutputStream);
        }

        long size = countingOutputStream.getCount();
        String hash = Base64Variants.MODIFIED_FOR_URL.encode(hasher.hash().asBytes());

        // Create the DataflowPackage with staging name and location.
        String uniqueName = getUniqueContentName(classpathElement, hash);
        String resourcePath = IOChannelUtils.resolve(stagingPath, uniqueName);
        DataflowPackage target = new DataflowPackage();
        target.setName(overridePackageName != null ? overridePackageName : uniqueName);
        target.setLocation(resourcePath);

        return new PackageAttributes(size, hash, directory, target);
    } catch (IOException e) {
        throw new RuntimeException("Package setup failure for " + classpathElement, e);
    }
}

From source file:org.apache.beam.runners.dataflow.util.PackageUtil.java

/**
 * Compute and cache the attributes of a classpath element that we will need to stage it.
 *
 * @param source the file or directory to be staged.
 * @param stagingPath The base location for staged classpath elements.
 * @param overridePackageName If non-null, use the given value as the package name
 *                            instead of generating one automatically.
 * @return a {@link PackageAttributes} that containing metadata about the object to be staged.
 *///from   www  . j  a  va 2 s . c o m
static PackageAttributes createPackageAttributes(File source, String stagingPath,
        @Nullable String overridePackageName) {
    boolean directory = source.isDirectory();

    // Compute size and hash in one pass over file or directory.
    Hasher hasher = Hashing.md5().newHasher();
    OutputStream hashStream = Funnels.asOutputStream(hasher);
    try (CountingOutputStream countingOutputStream = new CountingOutputStream(hashStream)) {
        if (!directory) {
            // Files are staged as-is.
            Files.asByteSource(source).copyTo(countingOutputStream);
        } else {
            // Directories are recursively zipped.
            ZipFiles.zipDirectory(source, countingOutputStream);
        }
        countingOutputStream.flush();

        long size = countingOutputStream.getCount();
        String hash = Base64Variants.MODIFIED_FOR_URL.encode(hasher.hash().asBytes());

        // Create the DataflowPackage with staging name and location.
        String uniqueName = getUniqueContentName(source, hash);
        String resourcePath = FileSystems.matchNewResource(stagingPath, true)
                .resolve(uniqueName, StandardResolveOptions.RESOLVE_FILE).toString();
        DataflowPackage target = new DataflowPackage();
        target.setName(overridePackageName != null ? overridePackageName : uniqueName);
        target.setLocation(resourcePath);

        return new PackageAttributes(size, hash, directory, target, source.getPath());
    } catch (IOException e) {
        throw new RuntimeException("Package setup failure for " + source, e);
    }
}

From source file:io.druid.segment.data.EntireLayoutDoubleSupplierSerializer.java

@Override
public void close() throws IOException {
    valuesOut.close();/*ww  w  . j  a  v a2 s  . c  om*/
    try (CountingOutputStream metaOut = new CountingOutputStream(ioPeon.makeOutputStream(metaFile))) {
        metaOut.write(CompressedDoublesIndexedSupplier.version);
        metaOut.write(Ints.toByteArray(numInserted));
        metaOut.write(Ints.toByteArray(0));
        metaOut.write(CompressedObjectStrategy.CompressionStrategy.NONE.getId());
        metaOut.close();
        metaCount = metaOut.getCount();
    }
}

From source file:io.druid.segment.data.BlockLayoutFloatSupplierSerializer.java

@Override
public void close() throws IOException {
    endBuffer.limit(endBuffer.position());
    endBuffer.rewind();/*from w w w. j av  a 2 s.c o  m*/
    flattener.write(StupidResourceHolder.create(endBuffer));
    endBuffer = null;
    flattener.close();

    try (CountingOutputStream metaOut = new CountingOutputStream(ioPeon.makeOutputStream(metaFile))) {
        metaOut.write(CompressedFloatsIndexedSupplier.version);
        metaOut.write(Ints.toByteArray(numInserted));
        metaOut.write(Ints.toByteArray(sizePer));
        metaOut.write(compression.getId());
        metaOut.close();
        metaCount = metaOut.getCount();
    }
}

From source file:com.google.enterprise.adaptor.sharepoint.HtmlResponseWriter.java

public HtmlResponseWriter(OutputStream os, Charset charset, DocIdEncoder docIdEncoder, Locale locale,
        long thresholdBytes, DocIdPusher pusher, Executor executor) {
    if (os == null) {
        throw new NullPointerException();
    }/*  ww  w .  ja  v a2 s .co  m*/
    if (charset == null) {
        throw new NullPointerException();
    }
    if (docIdEncoder == null) {
        throw new NullPointerException();
    }
    if (locale == null) {
        throw new NullPointerException();
    }
    if (pusher == null) {
        throw new NullPointerException();
    }
    if (executor == null) {
        throw new NullPointerException();
    }
    countingOutputStream = new CountingOutputStream(os);
    this.writer = new OutputStreamWriter(countingOutputStream, charset);
    this.docIdEncoder = docIdEncoder;
    this.locale = locale;
    this.thresholdBytes = thresholdBytes;
    this.pusher = pusher;
    this.executor = executor;
}

From source file:io.druid.segment.data.BlockLayoutLongSupplierSerializer.java

@Override
public void close() throws IOException {
    if (endBuffer != null) {
        writer.flush();/*from  w  w w  .  j  a v a2s .  c  o m*/
        endBuffer.limit(endBuffer.position());
        endBuffer.rewind();
        flattener.write(StupidResourceHolder.create(endBuffer));
    }
    endBuffer = null;
    flattener.close();

    try (CountingOutputStream metaOut = new CountingOutputStream(ioPeon.makeOutputStream(metaFile))) {
        metaOut.write(CompressedLongsIndexedSupplier.version);
        metaOut.write(Ints.toByteArray(numInserted));
        metaOut.write(Ints.toByteArray(sizePer));
        writer.putMeta(metaOut, compression);
        metaOut.close();
        metaCount = metaOut.getCount();
    }
}

From source file:org.locationtech.geogig.remote.http.BinaryPackedChanges.java

/**
 * Writes the set of changes to the provided output stream.
 * //from   w w w .  j a va 2s .c  o m
 * @param out the stream to write to
 * @param changes the changes to write
 * @throws IOException
 * @return the number of objects written
 */
public long write(OutputStream out, Iterator<DiffEntry> changes) throws IOException {
    final ObjectStore objectDatabase = repository.objectDatabase();
    out = new CountingOutputStream(out);

    // avoids sending the same metadata object multiple times
    Set<ObjectId> writtenMetadataIds = new HashSet<ObjectId>();

    // buffer to avoid ObjectId cloning its internal state for each object
    byte[] oidbuffer = new byte[ObjectId.NUM_BYTES];

    long objectCount = 0;

    while (changes.hasNext()) {
        DiffEntry diff = changes.next();

        if (diff.isDelete()) {
            out.write(CHUNK_TYPE.DIFF_ENTRY.value());
        } else {
            // its a change or an addition, new object is guaranteed to be present
            NodeRef newObject = diff.getNewObject();
            ObjectId metadataId = newObject.getMetadataId();
            if (writtenMetadataIds.contains(metadataId)) {
                out.write(CHUNK_TYPE.OBJECT_AND_DIFF_ENTRY.value());
            } else {
                out.write(CHUNK_TYPE.METADATA_OBJECT_AND_DIFF_ENTRY.value());
                RevObject metadata = objectDatabase.get(metadataId);
                writeObjectId(metadataId, out, oidbuffer);
                serializer.write(metadata, out);
                writtenMetadataIds.add(metadataId);
                objectCount++;
            }

            ObjectId objectId = newObject.getObjectId();
            writeObjectId(objectId, out, oidbuffer);
            RevObject object = objectDatabase.get(objectId);
            serializer.write(object, out);
            objectCount++;
        }
        DataOutputStream dataOut = new DataOutputStream(out);
        FormatCommonV1.writeDiff(diff, dataOut);
        dataOut.flush();
    }
    // signal the end of changes
    out.write(CHUNK_TYPE.FILTER_FLAG.value());
    final boolean filtersApplied = changes instanceof FilteredDiffIterator
            && ((FilteredDiffIterator) changes).wasFiltered();
    out.write(filtersApplied ? 1 : 0);

    LOGGER.info(String.format("Written %,d bytes to remote accounting for %,d objects.",
            ((CountingOutputStream) out).getCount(), objectCount));
    return objectCount;
}

From source file:org.locationtech.geogig.remote.BinaryPackedChanges.java

/**
 * Writes the set of changes to the provided output stream.
 * /*from  www.ja  v  a  2  s  .  co  m*/
 * @param out the stream to write to
 * @param changes the changes to write
 * @throws IOException
 * @return the number of objects written
 */
public long write(OutputStream out, Iterator<DiffEntry> changes) throws IOException {
    final ObjectDatabase objectDatabase = repository.objectDatabase();
    out = new CountingOutputStream(out);

    // avoids sending the same metadata object multiple times
    Set<ObjectId> writtenMetadataIds = new HashSet<ObjectId>();

    // buffer to avoid ObjectId cloning its internal state for each object
    byte[] oidbuffer = new byte[ObjectId.NUM_BYTES];

    long objectCount = 0;

    while (changes.hasNext()) {
        DiffEntry diff = changes.next();

        if (diff.isDelete()) {
            out.write(CHUNK_TYPE.DIFF_ENTRY.value());
        } else {
            // its a change or an addition, new object is guaranteed to be present
            NodeRef newObject = diff.getNewObject();
            ObjectId metadataId = newObject.getMetadataId();
            if (writtenMetadataIds.contains(metadataId)) {
                out.write(CHUNK_TYPE.OBJECT_AND_DIFF_ENTRY.value());
            } else {
                out.write(CHUNK_TYPE.METADATA_OBJECT_AND_DIFF_ENTRY.value());
                RevObject metadata = objectDatabase.get(metadataId);
                writeObjectId(metadataId, out, oidbuffer);
                serializer.write(metadata, out);
                writtenMetadataIds.add(metadataId);
                objectCount++;
            }

            ObjectId objectId = newObject.objectId();
            writeObjectId(objectId, out, oidbuffer);
            RevObject object = objectDatabase.get(objectId);
            serializer.write(object, out);
            objectCount++;
        }
        DataOutput dataOut = new DataOutputStream(out);
        FormatCommonV1.writeDiff(diff, dataOut);
    }
    // signal the end of changes
    out.write(CHUNK_TYPE.FILTER_FLAG.value());
    final boolean filtersApplied = changes instanceof FilteredDiffIterator
            && ((FilteredDiffIterator) changes).wasFiltered();
    out.write(filtersApplied ? 1 : 0);

    LOGGER.info(String.format("Written %,d bytes to remote accounting for %,d objects.",
            ((CountingOutputStream) out).getCount(), objectCount));
    return objectCount;
}

From source file:io.druid.segment.data.BlockLayoutDoubleSupplierSerializer.java

@Override
public void close() throws IOException {
    endBuffer.limit(endBuffer.position());
    endBuffer.rewind();//from  ww w  .  j  av  a2s .c  o m
    flattener.write(StupidResourceHolder.create(endBuffer));
    endBuffer = null;
    flattener.close();

    try (CountingOutputStream metaOut = new CountingOutputStream(ioPeon.makeOutputStream(metaFile))) {
        metaOut.write(CompressedDoublesIndexedSupplier.version);
        metaOut.write(Ints.toByteArray(numInserted));
        metaOut.write(Ints.toByteArray(sizePer));
        metaOut.write(compression.getId());
        metaOut.close();
        metaCount = metaOut.getCount();
    }
}

From source file:com.google.cloud.dataflow.sdk.coders.StringUtf8Coder.java

/**
 * {@inheritDoc}/*from  ww  w  .j  a  v  a  2 s .c  om*/
 *
 * @return the byte size of the UTF-8 encoding of the a string or, in a nested context,
 * the byte size of the encoding plus the encoded length prefix.
 */
@Override
protected long getEncodedElementByteSize(String value, Context context) throws Exception {
    if (value == null) {
        throw new CoderException("cannot encode a null String");
    }
    if (context.isWholeStream) {
        return Utf8.encodedLength(value);
    } else {
        CountingOutputStream countingStream = new CountingOutputStream(ByteStreams.nullOutputStream());
        DataOutputStream stream = new DataOutputStream(countingStream);
        writeString(value, stream);
        return countingStream.getCount();
    }
}