Example usage for com.google.common.io CharSource openBufferedStream

List of usage examples for com.google.common.io CharSource openBufferedStream

Introduction

In this page you can find the example usage for com.google.common.io CharSource openBufferedStream.

Prototype

public BufferedReader openBufferedStream() throws IOException 

Source Link

Document

Opens a new BufferedReader for reading from this source.

Usage

From source file:teetime.util.BucketTimingsReader.java

public static void main(final String[] args) throws IOException {
    final String fileName = args[0];

    final Long[] currentTimings = new Long[10000];
    int processedLines = 0;
    final List<Long> buckets = new LinkedList<Long>();

    LOGGER.trace("Reading " + fileName);
    final CharSource charSource = Files.asCharSource(new File(fileName), Charsets.UTF_8);
    final BufferedReader bufferedStream = charSource.openBufferedStream();
    String line;/*from  w  ww .  ja  va 2 s .  c  o m*/
    while (null != (line = bufferedStream.readLine())) {
        final String[] strings = line.split(";");
        final Long timing = new Long(strings[1]);
        currentTimings[processedLines] = timing;
        processedLines++;
        if (currentTimings.length == processedLines) {
            // Long aggregatedTimings = StatisticsUtil.calculateQuintiles(Arrays.asList(currentTimings)).get(0.5);
            final Long aggregatedTimings = StatisticsUtil.calculateAverage(Arrays.asList(currentTimings));
            buckets.add(aggregatedTimings);
            processedLines = 0;
        }
    }

    LOGGER.trace("#buckets: " + buckets.size());

    final List<Long> durationsInNs = buckets.subList(buckets.size() / 2, buckets.size());

    LOGGER.trace("Calculating quantiles...");
    final Map<Double, Long> quintiles = StatisticsUtil.calculateQuintiles(durationsInNs);
    LOGGER.info(StatisticsUtil.getQuantilesString(quintiles));

    final long confidenceWidth = StatisticsUtil.calculateConfidenceWidth(durationsInNs);
    LOGGER.info("Confidence width: " + confidenceWidth);
}

From source file:com.opengamma.strata.collect.io.CsvIterator.java

/**
 * Parses the specified source as a CSV file where the separator is specified and might not be a comma.
 * <p>/*  w  w w  . j a v  a2 s .com*/
 * This overload allows the separator to be controlled.
 * For example, a tab-separated file is very similar to a CSV file, the only difference is the separator.
 * <p>
 * This method opens the CSV file for reading.
 * The caller is responsible for closing it by calling {@link #close()}.
 * 
 * @param source  the file resource
 * @param headerRow  whether the source has a header row, an empty source must still contain the header
 * @param separator  the separator used to separate each field, typically a comma, but a tab is sometimes used
 * @return the CSV file
 * @throws UncheckedIOException if an IO exception occurs
 * @throws IllegalArgumentException if the file cannot be parsed
 */
public static CsvIterator of(CharSource source, boolean headerRow, char separator) {
    ArgChecker.notNull(source, "source");
    @SuppressWarnings("resource")
    BufferedReader reader = Unchecked.wrap(() -> source.openBufferedStream());
    return create(reader, headerRow, separator);
}

From source file:com.blackducksoftware.bdio.io.BdioReader.java

/**
 * Reads the entire BOM into memory. This will normalize the data so that each node is "complete",
 * however this requires a significant amount of up front resources: both in terms of memory and CPU.
 *///from   w w w.  j a  v  a 2s.  c om
public static Observable<Node> readFully(final LinkedDataContext context, final CharSource source) {
    return Single.create(new Single.OnSubscribe<List<Object>>() {
        @Override
        public void call(SingleSubscriber<? super List<Object>> t) {
            try (BdioReader reader = new BdioReader(context, source.openBufferedStream())) {
                // Parse the whole input
                List<?> input = reader.jp.readValueAs(List.class);
                String specVersion = scanForSpecVersion(input);

                // THIS IS THE EXPENSIVE BIT...

                // Expand the input and the frame; frame the results and compact it back down
                JsonLdOptions opts = new JsonLdOptions();
                opts.setExpandContext(context.newContextForReading(specVersion).serialize());
                List<Object> expandedInput = JsonLdProcessor.expand(input, opts);
                List<Object> expandedFrame = JsonLdProcessor.expand(context.newImportFrame(), opts);
                List<Object> framed = new JsonLdApi(opts).frame(expandedInput, expandedFrame);
                List<Object> compacted = (List<Object>) JsonLdProcessor
                        .compact(framed, context.serialize(), opts).get("@graph");
                // TODO How do we eliminate the blank node identifiers introduced during expansion?

                // We only emit a single element: an observable over the raw objects in the graph
                t.onSuccess(compacted);
            } catch (IOException | JsonLdError e) {
                t.onError(e);
            }
        }
    }).flatMapObservable(new Func1<List<Object>, Observable<Object>>() {
        @Override
        public Observable<Object> call(List<Object> graph) {
            // Wrap the "graph" (list of nodes) in an observable
            return Observable.from(graph);
        }
    }).flatMap(new Func1<Object, Observable<Node>>() {
        @Override
        public Observable<Node> call(Object nodeMap) {
            // Convert the raw JSON to Node instances, but only if each element is actually a Map
            // (e.g. scalars in the graph are safely ignored)
            if (nodeMap instanceof Map<?, ?>) {
                return Observable.just(context.expandToNode((Map<?, ?>) nodeMap));
            } else {
                return Observable.empty();
            }
        }
    });
}

From source file:com.blackducksoftware.bdio.io.BdioReader.java

/**
 * Returns an observable from a character source.
 *///from ww  w  .  j a va 2 s.c  o  m
public static Observable<Node> open(final LinkedDataContext context, final CharSource source) {
    checkNotNull(context);
    checkNotNull(source);
    // Use CheckedFuture as a wrapper for either a BdioReader or an IOException
    return Observable.create(new SyncOnSubscribe<CheckedFuture<BdioReader, IOException>, Node>() {
        @Override
        protected CheckedFuture<BdioReader, IOException> generateState() {
            try {
                return Futures.immediateCheckedFuture(new BdioReader(context, source.openBufferedStream()));
            } catch (IOException e) {
                return Futures.immediateFailedCheckedFuture(e);
            }
        }

        @Override
        protected CheckedFuture<BdioReader, IOException> next(CheckedFuture<BdioReader, IOException> s,
                Observer<? super Node> t) {
            // Iterate over the nodes in the file as we see them
            try {
                Node node = s.checkedGet().read();
                if (node != null) {
                    t.onNext(node);
                } else {
                    t.onCompleted();
                }
            } catch (IOException e) {
                t.onError(e);
            }
            return s;
        }

        @Override
        protected void onUnsubscribe(CheckedFuture<BdioReader, IOException> s) {
            try {
                s.checkedGet().close();
            } catch (IOException e) {
                return;
            }
        }
    });
}

From source file:com.basistech.tclre.Grep.java

private void processFile(File input) throws IOException, RegexException {
    CharSource inputCharSource = Files.asCharSource(input, Charsets.UTF_8);
    processReader(inputCharSource.openBufferedStream());
}

From source file:org.anarres.dblx.core.model.ModelLoader.java

@Nonnull
public Model load() throws IOException {
    Model model = new Model(modelName);

    Splitter splitter = Splitter.on(CharMatcher.BREAKING_WHITESPACE);

    NODES: {/*from w w w .  ja v a 2s.com*/
        URL url = Resources.getResource("models/" + modelName + "/nodes.csv");
        CharSource source = Resources.asCharSource(url, StandardCharsets.UTF_8);
        try (Reader in = source.openBufferedStream()) {
            CSVReader reader = newSVReader(in, '\t', 1);
            for (String[] line : reader) {
                String name = line[0];
                long x = (long) LengthUnit.INCH.toMillimetres(Double.parseDouble(line[1]));
                long y = (long) LengthUnit.INCH.toMillimetres(Double.parseDouble(line[2]));
                long z = (long) LengthUnit.INCH.toMillimetres(Double.parseDouble(line[3]));
                List<String> tags = splitter.splitToList(line[4]);
                model.addNode(new Node(name, x, y, z, tags));
            }
        }
    }

    BARS: {
        URL url = Resources.getResource("models/" + modelName + "/bars.csv");
        CharSource source = Resources.asCharSource(url, StandardCharsets.UTF_8);
        try (Reader in = source.openBufferedStream()) {
            CSVReader reader = newSVReader(in, '\t', 1);
            for (String[] line : reader) {
                List<String> tags = splitter.splitToList(line[2]);
                Bar bar = new Bar(line[0], line[1], tags);
                model.addEdge(bar);
            }
        }
    }

    return model;
}

From source file:org.locationtech.geogig.spring.service.LegacyConsoleService.java

private StringBuilder getLimitedOutput(FileBackedOutputStream out, final int limit) throws IOException {

    CharSource charSource = out.asByteSource().asCharSource(Charsets.UTF_8);
    BufferedReader reader = charSource.openBufferedStream();
    final StringBuilder output = new StringBuilder();
    int count = 0;
    String line;/*from   w  w  w  .j a  v a2  s  . c om*/
    while ((line = reader.readLine()) != null) {
        output.append(line).append('\n');
        count += line.length();
        if (count >= limit) {
            output.append("\nNote: output limited to ").append(count)
                    .append(" characters. Run config web.console.limit <newlimit> to change the current ")
                    .append(limit).append(" soft limit.");
            break;
        }
    }
    return output;
}

From source file:com.rocana.configuration.ConfigurationParser.java

public <T> T parse(CharSource source, Class<T> targetType) {
    try (Reader reader = source.openBufferedStream()) {
        return parse(reader, targetType);
    } catch (IOException e) {
        throw new ConfigurationException("Unable to read configuration data from source", e);
    }/*from w ww .  j a v a 2 s .  c  om*/
}

From source file:org.openqa.selenium.remote.NewSessionPayload.java

private Map<String, Object> getOss() throws IOException {
    CharSource charSource = backingStore.asByteSource().asCharSource(UTF_8);
    try (Reader reader = charSource.openBufferedStream(); JsonInput input = json.newInput(reader)) {
        input.beginObject();//from ww w  .  j  av  a  2 s.co m
        while (input.hasNext()) {
            String name = input.nextName();
            if ("desiredCapabilities".equals(name)) {
                return input.read(MAP_TYPE);
            } else {
                input.skipValue();
            }
        }
    }
    return null;
}

From source file:org.openqa.selenium.remote.NewSessionPayload.java

private void writeMetaData(JsonOutput out) throws IOException {
    CharSource charSource = backingStore.asByteSource().asCharSource(UTF_8);
    try (Reader reader = charSource.openBufferedStream(); JsonInput input = json.newInput(reader)) {
        input.beginObject();/*from   w w w. j ava 2 s.c  o m*/
        while (input.hasNext()) {
            String name = input.nextName();
            switch (name) {
            case "capabilities":
            case "desiredCapabilities":
            case "requiredCapabilities":
                input.skipValue();
                break;

            default:
                out.name(name);
                out.write(input.<Object>read(Object.class), Object.class);
                break;
            }
        }
    }
}