List of usage examples for com.google.common.io CountingOutputStream CountingOutputStream
public CountingOutputStream(OutputStream out)
From source file:com.metamx.druid.kv.GenericIndexedWriter.java
public void open() throws IOException { headerOut = new CountingOutputStream(ioPeon.makeOutputStream(makeFilename("header"))); valuesOut = new CountingOutputStream(ioPeon.makeOutputStream(makeFilename("values"))); }
From source file:io.druid.segment.data.IntermediateLongSupplierSerializer.java
public void open() throws IOException { tempOut = new CountingOutputStream(ioPeon.makeOutputStream(tempFile)); }
From source file:org.locationtech.geogig.spring.dto.BatchObjects.java
@Override protected void encode(OutputStream out) { if (packer != null && deduplicator != null) { CountingOutputStream counting = new CountingOutputStream(out); OutputStream output = counting; try {/*w ww .j av a 2 s.c o m*/ ObjectFunnel funnel; funnel = ObjectFunnels.newFunnel(output, DataStreamSerializationFactoryV1.INSTANCE); packer.write(funnel, want, have, false, deduplicator); counting.flush(); funnel.close(); } catch (Exception e) { throw new RuntimeException(e); } finally { deduplicator.release(); } } }
From source file:org.apache.beam.sdk.io.gcp.bigquery.TableRowWriter.java
TableRowWriter(String basename) throws Exception { String uId = UUID.randomUUID().toString(); resourceId = FileSystems.matchNewResource(basename + uId, false); LOG.info("Opening TableRowWriter to {}.", resourceId); channel = FileSystems.create(resourceId, MimeTypes.TEXT); out = new CountingOutputStream(Channels.newOutputStream(channel)); }
From source file:com.pinterest.secor.io.impl.DelimitedTextFileReaderWriter.java
public DelimitedTextFileReaderWriter(LogFilePath path, CompressionCodec codec, FileReaderWriter.Type type) throws FileNotFoundException, IOException { Path fsPath = new Path(path.getLogFilePath()); FileSystem fs = FileUtil.getFileSystem(path.getLogFilePath()); if (type == FileReaderWriter.Type.Reader) { InputStream inputStream = fs.open(fsPath); this.mReader = (codec == null) ? new BufferedInputStream(inputStream) : new BufferedInputStream(codec.createInputStream(inputStream)); this.mOffset = path.getOffset(); this.mCountingStream = null; this.mWriter = null; } else if (type == FileReaderWriter.Type.Writer) { this.mCountingStream = new CountingOutputStream(fs.create(fsPath)); this.mWriter = (codec == null) ? new BufferedOutputStream(this.mCountingStream) : new BufferedOutputStream(codec.createOutputStream(this.mCountingStream)); this.mReader = null; } else {/* w ww .ja v a 2 s .co m*/ throw new IllegalArgumentException("Undefined File Type: " + type); } }
From source file:io.druid.segment.data.EntireLayoutLongSupplierSerializer.java
@Override public void close() throws IOException { writer.flush();// w w w . j a v a 2 s . com valuesOut.close(); try (CountingOutputStream metaOut = new CountingOutputStream(ioPeon.makeOutputStream(metaFile))) { metaOut.write(CompressedLongsIndexedSupplier.version); metaOut.write(Ints.toByteArray(numInserted)); metaOut.write(Ints.toByteArray(0)); writer.putMeta(metaOut, CompressedObjectStrategy.CompressionStrategy.NONE); metaOut.close(); metaCount = metaOut.getCount(); } }
From source file:io.druid.segment.data.EntireLayoutFloatSupplierSerializer.java
@Override public void close() throws IOException { valuesOut.close();//w w w . java 2s . c om try (CountingOutputStream metaOut = new CountingOutputStream(ioPeon.makeOutputStream(metaFile))) { metaOut.write(CompressedFloatsIndexedSupplier.version); metaOut.write(Ints.toByteArray(numInserted)); metaOut.write(Ints.toByteArray(0)); metaOut.write(CompressedObjectStrategy.CompressionStrategy.NONE.getId()); metaOut.close(); metaCount = metaOut.getCount(); } }
From source file:io.prestosql.plugin.hive.RcFileFileWriter.java
public RcFileFileWriter(OutputStream outputStream, Callable<Void> rollbackAction, RcFileEncoding rcFileEncoding, List<Type> fileColumnTypes, Optional<String> codecName, int[] fileInputColumnIndexes, Map<String, String> metadata, Optional<Supplier<RcFileDataSource>> validationInputFactory) throws IOException { this.outputStream = new CountingOutputStream(outputStream); rcFileWriter = new RcFileWriter(new OutputStreamSliceOutput(this.outputStream), fileColumnTypes, rcFileEncoding, codecName,//from w w w . ja v a 2 s.co m new AircompressorCodecFactory(new HadoopCodecFactory(getClass().getClassLoader())), metadata, validationInputFactory.isPresent()); this.rollbackAction = requireNonNull(rollbackAction, "rollbackAction is null"); this.fileInputColumnIndexes = requireNonNull(fileInputColumnIndexes, "outputColumnInputIndexes is null"); ImmutableList.Builder<Block> nullBlocks = ImmutableList.builder(); for (Type fileColumnType : fileColumnTypes) { BlockBuilder blockBuilder = fileColumnType.createBlockBuilder(null, 1, 0); blockBuilder.appendNull(); nullBlocks.add(blockBuilder.build()); } this.nullBlocks = nullBlocks.build(); this.validationInputFactory = validationInputFactory; }
From source file:org.eclipse.packagedrone.utils.rpm.build.PayloadRecorder.java
public PayloadRecorder(final boolean autoFinish) throws IOException { this.autoFinish = autoFinish; this.tempFile = Files.createTempFile("rpm-", null); try {// w w w .j ava 2s. c om this.fileStream = new BufferedOutputStream(Files.newOutputStream(this.tempFile, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING)); this.payloadCounter = new CountingOutputStream(this.fileStream); final GZIPOutputStream payloadStream = new GZIPOutputStream(this.payloadCounter); this.archiveCounter = new CountingOutputStream(payloadStream); // setup archive stream this.archiveStream = new CpioArchiveOutputStream(this.archiveCounter, CpioConstants.FORMAT_NEW, 4, "UTF-8"); } catch (final IOException e) { Files.deleteIfExists(this.tempFile); throw e; } }
From source file:ch.ledcom.tomcat.valves.SessionSizeValve.java
private long measureSerializedSize(final Object attribute) throws IOException { final Closer closer = Closer.create(); try {/*from w w w .j av a 2 s . c o m*/ final CountingOutputStream countingStream = closer .register(new CountingOutputStream(ByteStreams.nullOutputStream())); final ObjectOutputStream out = closer.register(new ObjectOutputStream(countingStream)); out.writeObject(attribute); return countingStream.getCount(); } finally { closer.close(); } }