Example usage for com.google.common.io LineReader LineReader

List of usage examples for com.google.common.io LineReader LineReader

Introduction

In this page you can find the example usage for com.google.common.io LineReader LineReader.

Prototype

public LineReader(Readable readable) 

Source Link

Document

Creates a new instance that will read lines from the given Readable object.

Usage

From source file:org.opencastproject.videosegmenter.ffmpeg.VideoSegmenterServiceImpl.java

/**
 * Starts segmentation on the video track identified by
 * <code>mediapackageId</code> and <code>elementId</code> and returns a
 * receipt containing the final result in the form of anMpeg7Catalog.
 *
 * @param track/*  ww  w  .j  av  a 2s.co m*/
 *            the element to analyze
 * @return a receipt containing the resulting mpeg-7 catalog
 * @throws VideoSegmenterException
 */
protected Catalog segment(Job job, Track track) throws VideoSegmenterException, MediaPackageException {

    // Make sure the element can be analyzed using this analysis
    // implementation
    if (!track.hasVideo()) {
        logger.warn("Element {} is not a video track", track);
        throw new VideoSegmenterException("Element is not a video track");
    }

    try {
        Mpeg7Catalog mpeg7 = mpeg7CatalogService.newInstance();

        File mediaFile = null;
        URL mediaUrl = null;
        try {
            mediaFile = workspace.get(track.getURI());
            mediaUrl = mediaFile.toURI().toURL();
        } catch (NotFoundException e) {
            throw new VideoSegmenterException("Error finding the video file in the workspace", e);
        } catch (IOException e) {
            throw new VideoSegmenterException("Error reading the video file in the workspace", e);
        }

        if (track.getDuration() == null)
            throw new MediaPackageException("Track " + track + " does not have a duration");
        logger.info("Track {} loaded, duration is {} s", mediaUrl, track.getDuration() / 1000);

        MediaTime contentTime = new MediaRelTimeImpl(0, track.getDuration());
        MediaLocator contentLocator = new MediaLocatorImpl(track.getURI());
        Video videoContent = mpeg7.addVideoContent("videosegment", contentTime, contentLocator);

        logger.info("Starting video segmentation of {}", mediaUrl);
        String[] command = new String[] { binary, "-nostats", "-i",
                mediaFile.getAbsolutePath().replaceAll(" ", "\\ "), "-filter:v",
                "select=gt(scene\\," + changesThreshold + "),showinfo", "-f", "null", "-" };
        String commandline = StringUtils.join(command, " ");

        logger.info("Running {}", commandline);

        ProcessBuilder pbuilder = new ProcessBuilder(command);
        List<String> segmentsStrings = new LinkedList<String>();
        Process process = pbuilder.start();
        BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
        try {
            LineReader lr = new LineReader(reader);
            String line = lr.readLine();
            while (null != line) {
                if (line.startsWith("[Parsed_showinfo")) {
                    segmentsStrings.add(line);
                }
                line = lr.readLine();
            }
        } catch (IOException e) {
            logger.error("Error executing ffmpeg: {}", e.getMessage());
        } finally {
            reader.close();
        }

        // [Parsed_showinfo_1 @ 0x157fb40] n:0 pts:12 pts_time:12 pos:227495
        // fmt:rgb24 sar:0/1 s:320x240 i:P iskey:1 type:I checksum:8DF39EA9
        // plane_checksum:[8DF39EA9]

        int segmentcount = 1;
        List<Segment> segments = new LinkedList<Segment>();
        if (segmentsStrings.size() == 0) {
            Segment s = videoContent.getTemporalDecomposition().createSegment("segement-" + segmentcount);
            s.setMediaTime(new MediaRelTimeImpl(0, track.getDuration()));
            segments.add(s);
        } else {
            long starttime = 0;
            long endtime = 0;
            Pattern pattern = Pattern.compile("pts_time\\:\\d+");
            for (String seginfo : segmentsStrings) {
                Matcher matcher = pattern.matcher(seginfo);
                String time = "0";
                while (matcher.find()) {
                    time = matcher.group().substring(9);
                }
                endtime = Long.parseLong(time) * 1000;
                long segmentLength = endtime - starttime;
                if (1000 * stabilityThreshold < segmentLength) {
                    Segment segement = videoContent.getTemporalDecomposition()
                            .createSegment("segement-" + segmentcount);
                    segement.setMediaTime(new MediaRelTimeImpl(starttime, endtime - starttime));
                    segments.add(segement);
                    segmentcount++;
                    starttime = endtime;
                }
            }
            // Add last segment
            Segment s = videoContent.getTemporalDecomposition().createSegment("segement-" + segmentcount);
            s.setMediaTime(new MediaRelTimeImpl(endtime, track.getDuration() - endtime));
            segments.add(s);
        }

        logger.info("Segmentation of {} yields {} segments", mediaUrl, segments.size());

        Catalog mpeg7Catalog = (Catalog) MediaPackageElementBuilderFactory.newInstance().newElementBuilder()
                .newElement(Catalog.TYPE, MediaPackageElements.SEGMENTS);
        URI uri;
        try {
            uri = workspace.putInCollection(COLLECTION_ID, job.getId() + ".xml",
                    mpeg7CatalogService.serialize(mpeg7));
        } catch (IOException e) {
            throw new VideoSegmenterException("Unable to put the mpeg7 catalog into the workspace", e);
        }
        mpeg7Catalog.setURI(uri);

        logger.info("Finished video segmentation of {}", mediaUrl);
        return mpeg7Catalog;
    } catch (Exception e) {
        logger.warn("Error segmenting " + track, e);
        if (e instanceof VideoSegmenterException) {
            throw (VideoSegmenterException) e;
        } else {
            throw new VideoSegmenterException(e);
        }
    }
}

From source file:com.android.manifmerger.Actions.java

public String blame(XmlDocument xmlDocument) throws IOException, SAXException, ParserConfigurationException {

    ImmutableMultimap<Integer, Record> resultingSourceMapping = getResultingSourceMapping(xmlDocument);
    LineReader lineReader = new LineReader(new StringReader(xmlDocument.prettyPrint()));

    StringBuilder actualMappings = new StringBuilder();
    String line;/*from  ww w. j ava  2  s.c o m*/
    int count = 0;
    while ((line = lineReader.readLine()) != null) {
        actualMappings.append(count + 1).append(line).append("\n");
        if (resultingSourceMapping.containsKey(count)) {
            for (Record record : resultingSourceMapping.get(count)) {
                actualMappings.append(count + 1).append("-->").append(record.getActionLocation().toString())
                        .append("\n");
            }
        }
        count++;
    }
    return actualMappings.toString();
}

From source file:org.apache.brooklyn.util.core.osgi.BundleMaker.java

private boolean addUrlDirToZipRecursively(ZipOutputStream zout, String root, String item, InputStream itemFound,
        Predicate<? super String> filter) throws IOException {
    LineReader lr = new LineReader(new InputStreamReader(itemFound));
    boolean readSubdirFile = false;
    while (true) {
        String line = lr.readLine();
        if (line == null) {
            // at end of file return true if we were able to recurse, else false
            return readSubdirFile;
        }/*  w  ww.j  av a 2 s .c  o  m*/
        boolean isFile = addUrlItemRecursively(zout, root, item + "/" + line, filter);
        if (isFile) {
            readSubdirFile = true;
        } else {
            if (!readSubdirFile) {
                // not a folder
                return false;
            } else {
                // previous entry suggested it was a folder, but this one didn't work! -- was a false positive
                // but zip will be in inconsistent state, so throw
                throw new IllegalStateException("Failed to read entry " + line + " in " + item
                        + " but previous entry implied it was a directory");
            }
        }
    }
}

From source file:co.jirm.core.sql.SqlPartialParser.java

private static FileDeclarationSql _processFile(String path, String sql) throws IOException {

    LineReader lr = new LineReader(new StringReader(sql));
    String line;//from   w  ww  . j  a  v  a2  s. c o m
    ImmutableList.Builder<ReferenceSql> references = ImmutableList.builder();
    ImmutableMap.Builder<String, HashDeclarationSql> hashes = ImmutableMap.builder();
    ImmutableList.Builder<ReferenceSql> hashReferences = ImmutableList.builder();
    Map<String, HashDeclarationSql> nameToHash = newHashMap();

    ImmutableList.Builder<String> referenceContent = ImmutableList.builder();
    ImmutableList.Builder<String> hashContent = ImmutableList.builder();
    ImmutableList.Builder<String> fileContent = ImmutableList.builder();

    boolean first = true;
    PSTATE state = PSTATE.OTHER;
    PSTATE previousState = PSTATE.OTHER;

    String currentHash = null;
    String currentReference = null;
    Map<String, List<String>> currentReferenceParameters = ImmutableMap.of();
    int hashStartIndex = 0;
    int referenceStartIndex = 0;
    int lineIndex = 0;

    String PE = "For path: '{}', ";

    while ((line = lr.readLine()) != null) {
        if (first)
            first = false;
        Matcher m = tokenPattern.matcher(line);
        String tag;
        if (m.matches() && (tag = m.group(1)) != null && !(tag = tag.trim()).isEmpty()) {
            if (tag != null && tag.startsWith("#")) {
                check.state(state != PSTATE.HASH, PE + "Cannot hash within hash at line: {}.", path, lineIndex);
                state = PSTATE.HASH;
                hashContent = ImmutableList.builder();
                hashReferences = ImmutableList.builder();
                currentHash = tag.substring(1).trim();
                HashDeclarationSql existing = nameToHash.get(currentHash);
                if (existing != null) {
                    throw check.stateInvalid(
                            PE + "Hash: '#{}' already defined at line: {}, new definition at line: {}", path,
                            currentHash, existing.getStartIndex(), lineIndex);
                }
                hashContent.add(line);
                hashStartIndex = lineIndex;
            } else if (tag != null && tag.startsWith(">")) {
                check.state(state != PSTATE.REFERENCE, PE + "Cannot reference within reference at line: {}.",
                        path, lineIndex);
                previousState = state;
                state = PSTATE.REFERENCE;
                referenceContent = ImmutableList.builder();
                ReferenceHeader h = ReferenceHeader.parse(tag);
                currentReference = h.getPath().getFullPath();
                currentReferenceParameters = h.getParameters();

                check.state(!currentReference.isEmpty(), PE + "No reference defined", path);
                referenceStartIndex = lineIndex;
                referenceContent.add(line);
                if (previousState == PSTATE.HASH) {
                    hashContent.add(line);
                }
            } else if (tag != null && tag.equals("<")) {
                check.state(state == PSTATE.REFERENCE, PE + "Invalid close of reference line: {}", path,
                        lineIndex);
                state = previousState;
                int length = lineIndex - referenceStartIndex + 1;
                referenceContent.add(line);
                check.state(length > -1, "length should be greater than -1");
                check.state(length >= 0, PE + "Hash Line index incorrect. Index: {}, Reference start: {}", path,
                        lineIndex, referenceStartIndex);
                ReferenceSql rsql = new ReferenceSql(currentReference, path, referenceContent.build(),
                        referenceStartIndex, length, currentReferenceParameters);
                references.add(rsql);
                if (PSTATE.HASH == previousState) {
                    hashReferences.add(rsql);
                    hashContent.add(line);
                }
            } else if (tag != null && tag.startsWith("/")) {
                check.state(state == PSTATE.HASH, PE + "Hash not started or reference not finished line: {}",
                        path, lineIndex);
                String t = tag.substring(1).trim();
                check.state(!t.isEmpty(), PE + "No close hash is defined at line: {}", path, lineIndex);
                check.state(t.equals(currentHash), PE + "Should be current hash tag: {} at line: {}", path,
                        currentHash, lineIndex);
                state = PSTATE.OTHER;
                int length = lineIndex - hashStartIndex + 1;
                hashContent.add(line);
                check.state(length >= 0, PE + "Hash Line index incorrect. Index: {}, Hash start: {}", path,
                        lineIndex, hashStartIndex);
                HashDeclarationSql hash = new HashDeclarationSql(path + "#" + currentHash, hashContent.build(),
                        hashReferences.build(), hashStartIndex, length);
                nameToHash.put(currentHash, hash);
                hashes.put(currentHash, hash);
            } else {
                throw check.stateInvalid(PE + "Malformed hash or reference: {} at line: {}", path, tag,
                        lineIndex);
            }
        } else {
            if (PSTATE.HASH == state || PSTATE.HASH == previousState) {
                hashContent.add(line);
            }
            if (PSTATE.REFERENCE == state) {
                referenceContent.add(line);
            }
        }
        fileContent.add(line);

        lineIndex++;
    }

    check.state(PSTATE.OTHER == state, "Reference or hash not closed");
    FileDeclarationSql f = new FileDeclarationSql(path, fileContent.build(), references.build(),
            hashes.build());
    return f;

}