Example usage for com.google.common.io LineReader readLine

List of usage examples for com.google.common.io LineReader readLine

Introduction

In this page you can find the example usage for com.google.common.io LineReader readLine.

Prototype

public String readLine() throws IOException 

Source Link

Document

Reads a line of text.

Usage

From source file:com.ibm.watson.retrieveandrank.app.rest.RetrieveAndRankProxyResource.java

/**
 * Gets a list of queries from the resource file on disk. The queries are provided by the Cranfield dataset.. This
 * API randomly selects n queries (by default 8) and returns those to the client.
 *
 * @param numQueries/*from  w w  w .jav a2  s . co  m*/
 *            the number of queries the client wishes to retrieve, by default 8
 * @return a list of n queries. The returned query contains a query string and and query ID for ground truth lookup.
 */
@Path("/sampleQueries")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getSampleQueries(@QueryParam("numQueries") @DefaultValue("-1") int numQueries) {
    ServerErrorPayload error = null;
    final InputStream samples = RetrieveAndRankProxyResource.class.getResourceAsStream("/SampleQueries.json");
    if (samples != null) {
        try (InputStreamReader streamReader = new InputStreamReader(samples)) {
            final LineReader reader = new LineReader(streamReader);
            String line = reader.readLine();
            String json = null;
            while (line != null) {
                if (json != null) {
                    json += "\n" + line;
                } else {
                    json = line;
                }
                line = reader.readLine();
            }
            final JsonObject contents = new JsonParser().parse(json).getAsJsonObject();
            final JsonArray queries = contents.get("queries").getAsJsonArray();
            final int max = queries.size() - 1;
            if (numQueries == -1) {
                numQueries = max;
            } else {
                numQueries = Math.min(numQueries, max);
            }
            final Random randomizer = new Random();
            final SampleQueriesPayload payload = new SampleQueriesPayload();
            for (int i = 0; i < numQueries; i++) {
                final int index = randomizer.nextInt(max - 1);
                final JsonObject query = queries.get(index).getAsJsonObject();
                final SampleQueryPayload sqp = new SampleQueryPayload();
                sqp.setId(i);
                sqp.setQueryId(query.get("id").getAsInt());
                sqp.setQuery(query.get("query").getAsString());
                payload.addQuery(sqp);
            }
            return Response.ok(payload).type(MediaType.APPLICATION_JSON).build();
        } catch (final IOException e) {
            final String message = Messages.getString("RetrieveAndRankProxyResource.RNR_SAMPLE_QUERIES_ERROR");//$NON-NLS-1$
            error = new ServerErrorPayload(message);
            UtilityFunctions.logger.error(message, e);
        }
    }
    return Response.serverError().entity(error).type(MediaType.APPLICATION_JSON).build();
}

From source file:org.apache.brooklyn.util.core.osgi.BundleMaker.java

private boolean addUrlDirToZipRecursively(ZipOutputStream zout, String root, String item, InputStream itemFound,
        Predicate<? super String> filter) throws IOException {
    LineReader lr = new LineReader(new InputStreamReader(itemFound));
    boolean readSubdirFile = false;
    while (true) {
        String line = lr.readLine();
        if (line == null) {
            // at end of file return true if we were able to recurse, else false
            return readSubdirFile;
        }//  www  . j a  v  a2  s .c  om
        boolean isFile = addUrlItemRecursively(zout, root, item + "/" + line, filter);
        if (isFile) {
            readSubdirFile = true;
        } else {
            if (!readSubdirFile) {
                // not a folder
                return false;
            } else {
                // previous entry suggested it was a folder, but this one didn't work! -- was a false positive
                // but zip will be in inconsistent state, so throw
                throw new IllegalStateException("Failed to read entry " + line + " in " + item
                        + " but previous entry implied it was a directory");
            }
        }
    }
}

From source file:google.registry.tools.CreateLrpTokensCommand.java

@Override
public void run() throws Exception {
    checkArgument((assignee == null) == (assigneesFile != null),
            "Exactly one of either assignee or filename must be specified.");
    checkArgument((assigneesFile == null) || (metadata == null),
            "Metadata cannot be specified along with a filename.");
    checkArgument((assignee == null) || (metadataColumns == null),
            "Metadata columns cannot be specified along with an assignee.");
    final Set<String> validTlds = ImmutableSet.copyOf(Splitter.on(',').split(tlds));
    for (String tld : validTlds) {
        assertTldExists(tld);/*ww  w  .  j av  a 2  s.  co m*/
    }

    LineReader reader = new LineReader((assigneesFile != null) ? Files.newReader(assigneesFile.toFile(), UTF_8)
            : new StringReader(assignee));

    String line = null;
    do {
        ImmutableSet.Builder<LrpTokenEntity> tokensToSaveBuilder = new ImmutableSet.Builder<>();
        for (String token : generateTokens(BATCH_SIZE)) {
            line = reader.readLine();
            if (!isNullOrEmpty(line)) {
                ImmutableList<String> values = ImmutableList
                        .copyOf(Splitter.onPattern(COMMA_EXCEPT_WHEN_QUOTED_REGEX)
                                // Results should not be surrounded in double quotes.
                                .trimResults(CharMatcher.is('\"')).split(line));
                LrpTokenEntity.Builder tokenBuilder = new LrpTokenEntity.Builder().setAssignee(values.get(0))
                        .setToken(token).setValidTlds(validTlds);
                if (metadata != null) {
                    tokenBuilder.setMetadata(metadata);
                } else if (metadataColumns != null) {
                    ImmutableMap.Builder<String, String> metadataBuilder = ImmutableMap.builder();
                    for (ImmutableMap.Entry<String, Integer> entry : metadataColumns.entrySet()) {
                        checkArgument(values.size() > entry.getValue(),
                                "Entry for %s does not have a value for %s (index %s)", values.get(0),
                                entry.getKey(), entry.getValue());
                        metadataBuilder.put(entry.getKey(), values.get(entry.getValue()));
                    }
                    tokenBuilder.setMetadata(metadataBuilder.build());
                }
                tokensToSaveBuilder.add(tokenBuilder.build());
            }
        }
        final ImmutableSet<LrpTokenEntity> tokensToSave = tokensToSaveBuilder.build();
        // Wrap in a retrier to deal with transient 404 errors (thrown as RemoteApiExceptions).
        retrier.callWithRetry(new Callable<Void>() {
            @Override
            public Void call() throws Exception {
                saveTokens(tokensToSave);
                return null;
            }
        }, RemoteApiException.class);
    } while (line != null);
}

From source file:org.opencastproject.videosegmenter.ffmpeg.VideoSegmenterServiceImpl.java

/**
 * Starts segmentation on the video track identified by
 * <code>mediapackageId</code> and <code>elementId</code> and returns a
 * receipt containing the final result in the form of anMpeg7Catalog.
 *
 * @param track/*from w  w  w  .j  a v a 2 s .c om*/
 *            the element to analyze
 * @return a receipt containing the resulting mpeg-7 catalog
 * @throws VideoSegmenterException
 */
protected Catalog segment(Job job, Track track) throws VideoSegmenterException, MediaPackageException {

    // Make sure the element can be analyzed using this analysis
    // implementation
    if (!track.hasVideo()) {
        logger.warn("Element {} is not a video track", track);
        throw new VideoSegmenterException("Element is not a video track");
    }

    try {
        Mpeg7Catalog mpeg7 = mpeg7CatalogService.newInstance();

        File mediaFile = null;
        URL mediaUrl = null;
        try {
            mediaFile = workspace.get(track.getURI());
            mediaUrl = mediaFile.toURI().toURL();
        } catch (NotFoundException e) {
            throw new VideoSegmenterException("Error finding the video file in the workspace", e);
        } catch (IOException e) {
            throw new VideoSegmenterException("Error reading the video file in the workspace", e);
        }

        if (track.getDuration() == null)
            throw new MediaPackageException("Track " + track + " does not have a duration");
        logger.info("Track {} loaded, duration is {} s", mediaUrl, track.getDuration() / 1000);

        MediaTime contentTime = new MediaRelTimeImpl(0, track.getDuration());
        MediaLocator contentLocator = new MediaLocatorImpl(track.getURI());
        Video videoContent = mpeg7.addVideoContent("videosegment", contentTime, contentLocator);

        logger.info("Starting video segmentation of {}", mediaUrl);
        String[] command = new String[] { binary, "-nostats", "-i",
                mediaFile.getAbsolutePath().replaceAll(" ", "\\ "), "-filter:v",
                "select=gt(scene\\," + changesThreshold + "),showinfo", "-f", "null", "-" };
        String commandline = StringUtils.join(command, " ");

        logger.info("Running {}", commandline);

        ProcessBuilder pbuilder = new ProcessBuilder(command);
        List<String> segmentsStrings = new LinkedList<String>();
        Process process = pbuilder.start();
        BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
        try {
            LineReader lr = new LineReader(reader);
            String line = lr.readLine();
            while (null != line) {
                if (line.startsWith("[Parsed_showinfo")) {
                    segmentsStrings.add(line);
                }
                line = lr.readLine();
            }
        } catch (IOException e) {
            logger.error("Error executing ffmpeg: {}", e.getMessage());
        } finally {
            reader.close();
        }

        // [Parsed_showinfo_1 @ 0x157fb40] n:0 pts:12 pts_time:12 pos:227495
        // fmt:rgb24 sar:0/1 s:320x240 i:P iskey:1 type:I checksum:8DF39EA9
        // plane_checksum:[8DF39EA9]

        int segmentcount = 1;
        List<Segment> segments = new LinkedList<Segment>();
        if (segmentsStrings.size() == 0) {
            Segment s = videoContent.getTemporalDecomposition().createSegment("segement-" + segmentcount);
            s.setMediaTime(new MediaRelTimeImpl(0, track.getDuration()));
            segments.add(s);
        } else {
            long starttime = 0;
            long endtime = 0;
            Pattern pattern = Pattern.compile("pts_time\\:\\d+");
            for (String seginfo : segmentsStrings) {
                Matcher matcher = pattern.matcher(seginfo);
                String time = "0";
                while (matcher.find()) {
                    time = matcher.group().substring(9);
                }
                endtime = Long.parseLong(time) * 1000;
                long segmentLength = endtime - starttime;
                if (1000 * stabilityThreshold < segmentLength) {
                    Segment segement = videoContent.getTemporalDecomposition()
                            .createSegment("segement-" + segmentcount);
                    segement.setMediaTime(new MediaRelTimeImpl(starttime, endtime - starttime));
                    segments.add(segement);
                    segmentcount++;
                    starttime = endtime;
                }
            }
            // Add last segment
            Segment s = videoContent.getTemporalDecomposition().createSegment("segement-" + segmentcount);
            s.setMediaTime(new MediaRelTimeImpl(endtime, track.getDuration() - endtime));
            segments.add(s);
        }

        logger.info("Segmentation of {} yields {} segments", mediaUrl, segments.size());

        Catalog mpeg7Catalog = (Catalog) MediaPackageElementBuilderFactory.newInstance().newElementBuilder()
                .newElement(Catalog.TYPE, MediaPackageElements.SEGMENTS);
        URI uri;
        try {
            uri = workspace.putInCollection(COLLECTION_ID, job.getId() + ".xml",
                    mpeg7CatalogService.serialize(mpeg7));
        } catch (IOException e) {
            throw new VideoSegmenterException("Unable to put the mpeg7 catalog into the workspace", e);
        }
        mpeg7Catalog.setURI(uri);

        logger.info("Finished video segmentation of {}", mediaUrl);
        return mpeg7Catalog;
    } catch (Exception e) {
        logger.warn("Error segmenting " + track, e);
        if (e instanceof VideoSegmenterException) {
            throw (VideoSegmenterException) e;
        } else {
            throw new VideoSegmenterException(e);
        }
    }
}

From source file:com.android.manifmerger.Actions.java

public String blame(XmlDocument xmlDocument) throws IOException, SAXException, ParserConfigurationException {

    ImmutableMultimap<Integer, Record> resultingSourceMapping = getResultingSourceMapping(xmlDocument);
    LineReader lineReader = new LineReader(new StringReader(xmlDocument.prettyPrint()));

    StringBuilder actualMappings = new StringBuilder();
    String line;//from  www  .j a v a  2 s.  co  m
    int count = 0;
    while ((line = lineReader.readLine()) != null) {
        actualMappings.append(count + 1).append(line).append("\n");
        if (resultingSourceMapping.containsKey(count)) {
            for (Record record : resultingSourceMapping.get(count)) {
                actualMappings.append(count + 1).append("-->").append(record.getActionLocation().toString())
                        .append("\n");
            }
        }
        count++;
    }
    return actualMappings.toString();
}

From source file:co.jirm.core.sql.SqlPartialParser.java

private static FileDeclarationSql _processFile(String path, String sql) throws IOException {

    LineReader lr = new LineReader(new StringReader(sql));
    String line;//from   w ww .  j  a v  a 2s .co m
    ImmutableList.Builder<ReferenceSql> references = ImmutableList.builder();
    ImmutableMap.Builder<String, HashDeclarationSql> hashes = ImmutableMap.builder();
    ImmutableList.Builder<ReferenceSql> hashReferences = ImmutableList.builder();
    Map<String, HashDeclarationSql> nameToHash = newHashMap();

    ImmutableList.Builder<String> referenceContent = ImmutableList.builder();
    ImmutableList.Builder<String> hashContent = ImmutableList.builder();
    ImmutableList.Builder<String> fileContent = ImmutableList.builder();

    boolean first = true;
    PSTATE state = PSTATE.OTHER;
    PSTATE previousState = PSTATE.OTHER;

    String currentHash = null;
    String currentReference = null;
    Map<String, List<String>> currentReferenceParameters = ImmutableMap.of();
    int hashStartIndex = 0;
    int referenceStartIndex = 0;
    int lineIndex = 0;

    String PE = "For path: '{}', ";

    while ((line = lr.readLine()) != null) {
        if (first)
            first = false;
        Matcher m = tokenPattern.matcher(line);
        String tag;
        if (m.matches() && (tag = m.group(1)) != null && !(tag = tag.trim()).isEmpty()) {
            if (tag != null && tag.startsWith("#")) {
                check.state(state != PSTATE.HASH, PE + "Cannot hash within hash at line: {}.", path, lineIndex);
                state = PSTATE.HASH;
                hashContent = ImmutableList.builder();
                hashReferences = ImmutableList.builder();
                currentHash = tag.substring(1).trim();
                HashDeclarationSql existing = nameToHash.get(currentHash);
                if (existing != null) {
                    throw check.stateInvalid(
                            PE + "Hash: '#{}' already defined at line: {}, new definition at line: {}", path,
                            currentHash, existing.getStartIndex(), lineIndex);
                }
                hashContent.add(line);
                hashStartIndex = lineIndex;
            } else if (tag != null && tag.startsWith(">")) {
                check.state(state != PSTATE.REFERENCE, PE + "Cannot reference within reference at line: {}.",
                        path, lineIndex);
                previousState = state;
                state = PSTATE.REFERENCE;
                referenceContent = ImmutableList.builder();
                ReferenceHeader h = ReferenceHeader.parse(tag);
                currentReference = h.getPath().getFullPath();
                currentReferenceParameters = h.getParameters();

                check.state(!currentReference.isEmpty(), PE + "No reference defined", path);
                referenceStartIndex = lineIndex;
                referenceContent.add(line);
                if (previousState == PSTATE.HASH) {
                    hashContent.add(line);
                }
            } else if (tag != null && tag.equals("<")) {
                check.state(state == PSTATE.REFERENCE, PE + "Invalid close of reference line: {}", path,
                        lineIndex);
                state = previousState;
                int length = lineIndex - referenceStartIndex + 1;
                referenceContent.add(line);
                check.state(length > -1, "length should be greater than -1");
                check.state(length >= 0, PE + "Hash Line index incorrect. Index: {}, Reference start: {}", path,
                        lineIndex, referenceStartIndex);
                ReferenceSql rsql = new ReferenceSql(currentReference, path, referenceContent.build(),
                        referenceStartIndex, length, currentReferenceParameters);
                references.add(rsql);
                if (PSTATE.HASH == previousState) {
                    hashReferences.add(rsql);
                    hashContent.add(line);
                }
            } else if (tag != null && tag.startsWith("/")) {
                check.state(state == PSTATE.HASH, PE + "Hash not started or reference not finished line: {}",
                        path, lineIndex);
                String t = tag.substring(1).trim();
                check.state(!t.isEmpty(), PE + "No close hash is defined at line: {}", path, lineIndex);
                check.state(t.equals(currentHash), PE + "Should be current hash tag: {} at line: {}", path,
                        currentHash, lineIndex);
                state = PSTATE.OTHER;
                int length = lineIndex - hashStartIndex + 1;
                hashContent.add(line);
                check.state(length >= 0, PE + "Hash Line index incorrect. Index: {}, Hash start: {}", path,
                        lineIndex, hashStartIndex);
                HashDeclarationSql hash = new HashDeclarationSql(path + "#" + currentHash, hashContent.build(),
                        hashReferences.build(), hashStartIndex, length);
                nameToHash.put(currentHash, hash);
                hashes.put(currentHash, hash);
            } else {
                throw check.stateInvalid(PE + "Malformed hash or reference: {} at line: {}", path, tag,
                        lineIndex);
            }
        } else {
            if (PSTATE.HASH == state || PSTATE.HASH == previousState) {
                hashContent.add(line);
            }
            if (PSTATE.REFERENCE == state) {
                referenceContent.add(line);
            }
        }
        fileContent.add(line);

        lineIndex++;
    }

    check.state(PSTATE.OTHER == state, "Reference or hash not closed");
    FileDeclarationSql f = new FileDeclarationSql(path, fileContent.build(), references.build(),
            hashes.build());
    return f;

}