Example usage for com.google.common.io ByteSink ByteSink

List of usage examples for com.google.common.io ByteSink ByteSink

Introduction

In this page you can find the example usage for com.google.common.io ByteSink ByteSink.

Prototype

protected ByteSink() 

Source Link

Document

Constructor for use by subclasses.

Usage

From source file:io.druid.benchmark.FloatCompressionBenchmarkFileGenerator.java

public static void main(String[] args) throws IOException, URISyntaxException {
    if (args.length >= 1) {
        dirPath = args[0];//from  w ww .j a va  2  s .co m
    }

    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.FLOAT, true, 1,
            0d, ImmutableList.<Object>of(0f, 1.1f, 2.2f, 3.3f, 4.4f),
            ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1,
            1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1,
            1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.FLOAT, true, 1,
            0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeContinuousUniform("", ValueType.FLOAT, true,
            1, 0d, 0, 1000);

    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));

    File dir = new File(dirPath);
    dir.mkdir();

    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dataFile)))) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((Float) entry.getValue().generateRowValue() + "\n");
            }
        }
    }

    // create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressedObjectStrategy.CompressionStrategy compression : compressions) {
            String name = entry.getKey() + "-" + compression.toString();
            System.out.print(name + ": ");
            File compFile = new File(dir, name);
            compFile.delete();
            File dataFile = new File(dir, entry.getKey());

            TmpFileIOPeon iopeon = new TmpFileIOPeon(true);
            FloatSupplierSerializer writer = CompressionFactory.getFloatSerializer(iopeon, "float",
                    ByteOrder.nativeOrder(), compression);
            BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(dataFile)));

            try (FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW,
                    StandardOpenOption.WRITE)) {
                writer.open();
                String line;
                while ((line = br.readLine()) != null) {
                    writer.add(Float.parseFloat(line));
                }
                final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                writer.closeAndConsolidate(new ByteSink() {
                    @Override
                    public OutputStream openStream() throws IOException {
                        return baos;
                    }
                });
                output.write(ByteBuffer.wrap(baos.toByteArray()));
            } finally {
                iopeon.cleanup();
                br.close();
            }
            System.out.print(compFile.length() / 1024 + "\n");
        }
    }
}

From source file:io.druid.benchmark.LongCompressionBenchmarkFileGenerator.java

public static void main(String[] args) throws IOException, URISyntaxException {
    if (args.length >= 1) {
        dirPath = args[0];/*  w w w  .  ja v a  2  s . c  o m*/
    }

    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.LONG, true, 1,
            0d, ImmutableList.<Object>of(0, 1, 2, 3, 4), ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1,
            1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1,
            1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.LONG, true, 1,
            0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeDiscreteUniform("", ValueType.LONG, true, 1,
            0d, 0, 1000);

    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));

    File dir = new File(dirPath);
    dir.mkdir();

    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dataFile)))) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((long) entry.getValue().generateRowValue() + "\n");
            }
        }
    }

    // create compressed files using all combinations of CompressionStrategy and LongEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressedObjectStrategy.CompressionStrategy compression : compressions) {
            for (CompressionFactory.LongEncodingStrategy encoding : encodings) {
                String name = entry.getKey() + "-" + compression.toString() + "-" + encoding.toString();
                System.out.print(name + ": ");
                File compFile = new File(dir, name);
                compFile.delete();
                File dataFile = new File(dir, entry.getKey());

                TmpFileIOPeon iopeon = new TmpFileIOPeon(true);
                LongSupplierSerializer writer = CompressionFactory.getLongSerializer(iopeon, "long",
                        ByteOrder.nativeOrder(), encoding, compression);
                BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(dataFile)));

                try (FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW,
                        StandardOpenOption.WRITE)) {
                    writer.open();
                    String line;
                    while ((line = br.readLine()) != null) {
                        writer.add(Long.parseLong(line));
                    }
                    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    writer.closeAndConsolidate(new ByteSink() {
                        @Override
                        public OutputStream openStream() throws IOException {
                            return baos;
                        }
                    });
                    output.write(ByteBuffer.wrap(baos.toByteArray()));
                } finally {
                    iopeon.cleanup();
                    br.close();
                }
                System.out.print(compFile.length() / 1024 + "\n");
            }
        }
    }
}

From source file:org.haiku.haikudepotserver.support.AbstractExternalToolService.java

protected byte[] execute(T context, byte[] input) throws IOException {
    Preconditions.checkArgument(null != input && 0 != input.length, "the input is not specified");
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();

    execute(context, ByteSource.wrap(input), new ByteSink() {
        @Override/*  w w  w. ja  va  2 s. c o m*/
        public OutputStream openStream() {
            return baos;
        }
    });

    return baos.toByteArray();
}

From source file:com.b2international.snowowl.datastore.internal.file.DefaultFileRegistry.java

@Override
public void download(UUID id, OutputStream out) {
    final File requestedFile = getFile(id);

    try {//from w w w  . j  av a 2s  . c o m
        Files.asByteSource(requestedFile).copyTo(new ByteSink() {
            @Override
            public OutputStream openStream() throws IOException {
                return out;
            }
        });
    } catch (IOException e) {
        throw new SnowowlRuntimeException("Failed to download attachment of " + id, e);
    }
}

From source file:com.metamx.common.CompressionUtils.java

public static long gzip(final ByteSource in, final ByteSink out, Predicate<Throwable> shouldRetry)
        throws IOException {
    return StreamUtils.retryCopy(in, new ByteSink() {
        @Override// w w w . j  a  v a 2  s. co  m
        public OutputStream openStream() throws IOException {
            return new GZIPOutputStream(out.openStream());
        }
    }, shouldRetry, DEFAULT_RETRY_COUNT);
}

From source file:com.google.devtools.build.lib.vfs.FileSystemUtils.java

public static ByteSink asByteSink(final Path path, final boolean append) {
    return new ByteSink() {
        @Override/*from  w  ww.j av  a2s. c o m*/
        public OutputStream openStream() throws IOException {
            return path.getOutputStream(append);
        }
    };
}

From source file:org.apache.druid.java.util.common.CompressionUtils.java

public static long gzip(final ByteSource in, final ByteSink out, Predicate<Throwable> shouldRetry) {
    return StreamUtils.retryCopy(in, new ByteSink() {
        @Override/*from w  w w .  ja  v  a  2  s . c  om*/
        public OutputStream openStream() throws IOException {
            return new GZIPOutputStream(out.openStream());
        }
    }, shouldRetry, DEFAULT_RETRY_COUNT);
}