Example usage for org.apache.commons.io LineIterator next

List of usage examples for org.apache.commons.io LineIterator next

Introduction

In this page you can find the example usage for org.apache.commons.io LineIterator next.

Prototype

public Object next() 

Source Link

Document

Returns the next line in the wrapped Reader.

Usage

From source file:de.cebitec.guava.dockertest.Docker.java

public int run() throws Exception {

    if (repo.equals("")) {
        throw new Exception("No container defined.");
    }/*from   ww  w .ja va  2s .com*/
    if (!isImageAvailable(repo)) {
        pullImage(repo);
    }

    String[] command = cmd.toArray(new String[cmd.size()]);
    ContainerCreateResponse containerResp = dockerClient.createContainerCmd(repo).withCmd(command).exec();

    if (binds.isEmpty()) {
        dockerClient.startContainerCmd(containerResp.getId()).exec();
    } else {

        dockerClient.startContainerCmd(containerResp.getId()).withBinds(binds.toArray(new Bind[binds.size()]))
                .exec();
    }

    int exit = dockerClient.waitContainerCmd(containerResp.getId()).exec();

    if (exit != 0) {

        ClientResponse resp = dockerClient.logContainerCmd(containerResp.getId()).withStdErr().exec();
        LineIterator itr = IOUtils.lineIterator(resp.getEntityInputStream(), "US-ASCII");
        while (itr.hasNext()) {
            String line = itr.next();
            //ugly hack because of java -docker bug
            System.out.println(line.replaceAll("[^\\x20-\\x7e]", ""));
            System.out.println(itr.hasNext() ? "\n" : "");
        }

        throw new Exception("Command Exited with code " + exit);
    }
    if (!this.outputFile.isEmpty()) {
        new File(new File(this.outputFile).getParent()).mkdirs();
        PrintWriter writer = new PrintWriter(this.outputFile, "UTF-8");
        ClientResponse resp = dockerClient.logContainerCmd(containerResp.getId()).withStdOut().exec();

        try {
            LineIterator itr = IOUtils.lineIterator(resp.getEntityInputStream(), "UTF-8");
            while (itr.hasNext()) {
                String out = itr.next();
                //ugly hack because of java -docker bug
                out = out.replaceAll("[\\p{Cc}&&[^\t]]", "");
                writer.write(out);
                writer.write(itr.hasNext() ? "\n" : "");
            }
        } finally {
            writer.close();
            IOUtils.closeQuietly(resp.getEntityInputStream());
        }
    }
    dockerClient.removeContainerCmd(containerResp.getId()).exec();

    return exit;
}

From source file:de.tudarmstadt.ukp.clarin.webanno.tcf.WebAnnoSemanticGraphReader.java

public void convertToCas(JCas aJCas, InputStream aIs, String aEncoding) throws IOException

{
    StringBuilder text = new StringBuilder();
    LineIterator lineIterator = IOUtils.lineIterator(aIs, aEncoding);
    int tokenBeginPosition = 0;
    while (lineIterator.hasNext()) {
        String line = lineIterator.next();
        String[] contents = line.split("\t>\t|\tX\t");
        int sentenceBegin = tokenBeginPosition;
        int chainBegin = tokenBeginPosition;
        int chainEnd = 0;
        StringTokenizer st = new StringTokenizer(contents[0]);
        while (st.hasMoreTokens()) {
            String content = st.nextToken();
            Token outToken = new Token(aJCas, tokenBeginPosition, tokenBeginPosition + content.length());
            outToken.addToIndexes();/*from   w w w. ja v a2 s .  co m*/
            tokenBeginPosition = outToken.getEnd() + 1;
            chainEnd = tokenBeginPosition;
            text.append(content + " ");
        }

        CoreferenceChain chain = new CoreferenceChain(aJCas);
        CoreferenceLink link = new CoreferenceLink(aJCas, chainBegin, chainEnd - 1);
        link.setReferenceType("text");
        link.addToIndexes();
        chain.setFirst(link);

        if (line.contains("\t>\t")) {
            link.setReferenceRelation("entails");
            Token outToken = new Token(aJCas, tokenBeginPosition, tokenBeginPosition + 1);
            outToken.addToIndexes();
            tokenBeginPosition = outToken.getEnd() + 1;
            text.append("> ");
        } else {
            link.setReferenceRelation("do not entails");
            Token outToken = new Token(aJCas, tokenBeginPosition, tokenBeginPosition + 1);
            outToken.addToIndexes();
            tokenBeginPosition = outToken.getEnd() + 1;
            text.append("X ");
        }

        chainBegin = tokenBeginPosition;
        st = new StringTokenizer(contents[0]);
        while (st.hasMoreTokens()) {
            String content = st.nextToken();
            Token outToken = new Token(aJCas, tokenBeginPosition, tokenBeginPosition + content.length());
            outToken.addToIndexes();
            tokenBeginPosition = outToken.getEnd() + 1;
            chainEnd = tokenBeginPosition;
            text.append(content + " ");

        }
        CoreferenceLink nextLink = new CoreferenceLink(aJCas, chainBegin, chainEnd - 1);
        nextLink.setReferenceType("hypothesis");
        nextLink.addToIndexes();
        link.setNext(nextLink);
        chain.addToIndexes();
        text.append("\n");

        Sentence outSentence = new Sentence(aJCas);
        outSentence.setBegin(sentenceBegin);
        outSentence.setEnd(tokenBeginPosition);
        outSentence.addToIndexes();
        tokenBeginPosition = tokenBeginPosition + 1;
        sentenceBegin = tokenBeginPosition;
    }
    aJCas.setDocumentText(text.toString());
}

From source file:io.apiman.test.integration.runner.PolicyLoader.java

private List<NewPolicyBean> load(String file, List<NewPolicyBean> policies) throws IOException {
    try (InputStream is = PolicyLoader.class.getResourceAsStream("/policies/" + file + ".pcfg")) {
        LineIterator lines = IOUtils.lineIterator(is, "utf8");

        StringBuilder config = new StringBuilder();
        String definitionId = null;
        boolean cfgRead = false;

        while (lines.hasNext()) {
            String line = lines.next();

            if (line.startsWith("---")) { // Policy config starts on the next line
                cfgRead = true;//from  w  w  w. j a va 2s  .  c  o  m
                continue;
            }

            if (line.startsWith("@")) { // Include another file
                String include = line.substring(line.indexOf("@") + 1);
                load(include.trim(), policies);
                continue;
            }

            if (cfgRead) { // reading policy config
                config.append(line);
            } else { // reading definition definitionId
                definitionId = line.trim();
            }

            if (!lines.hasNext() || (line.isEmpty() && cfgRead)) { // Policy config ended on previous line
                storeBean(definitionId, config.toString(), policies);
                config = new StringBuilder();
                cfgRead = false;
            }
        }
    }
    return policies;
}

From source file:csv.to.sql.parser.mainMenu.java

private void btnParseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnParseActionPerformed
    // TODO add your handling code here:
    String filePath = this.selectedFile.getPath();
    filePath = filePath.replace(".csv", " ");
    File resultFile = new File(this.validFilePath(filePath + "csvTo.sql"));

    BufferedReader br = null;// w w  w  .ja  va  2  s .co  m
    BufferedWriter bw = null;
    try {

        if (resultFile.createNewFile()) {

            String currLine = null;

            br = new BufferedReader(new FileReader(this.selectedFile));
            bw = new BufferedWriter(new FileWriter(resultFile));
            bw.write("INSERT INTO " + this.selectedFile.getName().replace(".csv", "") + "  "
                    + this.formatFields(br.readLine()).replace('"', '`') + " VALUES\n");
            LineIterator it = new LineIterator(br);
            boolean lineStatus = it.hasNext();
            while (lineStatus) {
                currLine = it.next();

                bw.write(this.formatFields(currLine) + ((lineStatus = it.hasNext()) ? ",\n" : ";"));
            }
            this.resultOk = true;
        }
    } catch (IOException ex) {
        this.resultOk = false;
        System.out.println("Error al crear el archivo: " + ex.getMessage());
    } finally {
        try {
            if (br != null & bw != null) {
                br.close();
                bw.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(mainMenu.class.getName()).log(Level.SEVERE, null, ex);
        }

        JOptionPane.showMessageDialog(this, "Parse " + (this.resultOk ? "Successful!" : "Error!"));

        this.selectedFile = null;
        this.lblFile.setText("No File Selected!");
        this.btnOpenFile.setEnabled(true);
        this.btnParse.setEnabled(false);

    }

}

From source file:com.shopzilla.hadoop.testing.hdfs.DFSCluster.java

public void processData(final Path path, final Function<String, Void> lineProcessor) throws IOException {
    final Function<Path, Void> pathProcessor = new Function<Path, Void>() {
        @Override//from ww w  . j  ava  2s.c o  m
        public Void apply(Path path) {
            try {
                final FSDataInputStream in = miniDFSCluster.getFileSystem().open(path);
                final LineIterator lineIterator = new LineIterator(new InputStreamReader(in));
                while (lineIterator.hasNext()) {
                    lineProcessor.apply(lineIterator.next());
                }
                lineIterator.close();
                return null;
            } catch (final Exception ex) {
                throw new RuntimeException(ex);
            }
        }
    };
    processPaths(path, new Function<Path, Void>() {
        @Override
        public Void apply(Path input) {
            pathProcessor.apply(input);
            return null;
        }
    });
}

From source file:com.shopzilla.hadoop.testing.hdfs.DFSCluster.java

public void processDataRecursive(final Path path, final Function<String, Void> lineProcessor)
        throws IOException {
    final Function<Path, Void> pathProcessor = new Function<Path, Void>() {
        @Override//from w ww  .ja  v a  2 s .co  m
        public Void apply(Path path) {
            try {
                final FSDataInputStream in = miniDFSCluster.getFileSystem().open(path);
                final LineIterator lineIterator = new LineIterator(new InputStreamReader(in));
                while (lineIterator.hasNext()) {
                    lineProcessor.apply(lineIterator.next());
                }
                lineIterator.close();
                return null;
            } catch (final Exception ex) {
                throw new RuntimeException(ex);
            }
        }
    };
    processPathsRecursive(path, new Function<Path, Void>() {
        @Override
        public Void apply(Path input) {
            pathProcessor.apply(input);
            return null;
        }
    });
}

From source file:de.tudarmstadt.lt.seg.sentence.SentenceSplitterTest.java

@Test
public void ruleSplitterLineTest() {
    ISentenceSplitter sentenceSplitter = new RuleSplitter();
    ITokenizer tokenizer = new EmptySpaceTokenizer();
    StringWriter s = new StringWriter();
    PrintWriter w = new PrintWriter(s);

    LineIterator liter = new LineIterator(new BufferedReader(new StringReader(TEST_TEXT)));
    for (long lc = 0; liter.hasNext();) {
        if (++lc % 1000 == 0)
            System.err.format("Processing line %d %n", lc);

        Segmenter.split_and_tokenize(new StringReader(liter.next()), String.format("%s:%d", "TEST_TEXT", lc),
                sentenceSplitter, tokenizer, 2, 0, false, false, "\n", "\n", "\n", w);
    }//from   ww w.ja v a 2 s  . c  o  m
    System.out.println(s.toString());
}

From source file:com.cognifide.cq.cqsm.core.scripts.ScriptManagerImpl.java

private List<ActionDescriptor> parseIncludeDescriptors(Script script, Map<String, String> definitions,
        List<Script> includes, ResourceResolver resolver) throws ExecutionException {
    final List<ActionDescriptor> descriptors = new LinkedList<>();
    LineIterator lineIterator = IOUtils.lineIterator(new StringReader(script.getData()));

    while (lineIterator.hasNext()) {
        String line = lineIterator.next();
        if (ScriptUtils.isAction(line)) {
            final String command = ScriptUtils.parseCommand(line, definitions);
            final ActionDescriptor descriptor = actionFactory.evaluate(command);
            final Action action = descriptor.getAction();

            descriptors.add(descriptor);

            if (action instanceof DefinitionProvider) {
                definitions.putAll(((DefinitionProvider) action).provideDefinitions(definitions));
            } else if (action instanceof ScriptProvider) {
                getIncludes(definitions, includes, resolver, descriptors, (ScriptProvider) action);
            }/*  www .  j  a v a2s . co  m*/
        }
    }
    return descriptors;
}

From source file:$.LogEventParsingIterator.java

/**
     * Advance to the position where next line is available,
     * regardless of whether it passes filter requirements or not.
     * On the way to this position any lines that are not log events
     * will be appended to the previous log event.
     * <p/>// w  w  w .j  a  v  a2s  .  c o m
     * If this call is successful, either {@link ${symbol_pound}parsedEvent} will be a new event,
     * or {@link ${symbol_pound}preParsedEvent} will not be null; or both.
     *
     * @return true, if there might be more events; false otherwise
     * @throws IOException if there's a problem opening a new file
     */
    private boolean seekToNextEvent() throws IOException {
        LogEvent nextEvent = config.createLogEvent();

        LineIterator iterator;
        while ((iterator = getLineIterator()) != null) {
            while (iterator.hasNext()) {
                // process the line:
                LogEvent event = parser.parseLine(iterator.next(), nextEvent, preParsedEvent, config);
                // check what to do next:
                if (preParsedEvent != null && event != preParsedEvent) {
                    // there is pre-parsed event, but the line was NOT appended to it
                    // => pre-parsed event will not get any more lines, it can be considered fully parsed
                    parsedEvent = preParsedEvent; // graduate pre-parsed event - it'll be the next event
                    preParsedEvent = event; // next event that was found (if any) is now pre-parsed
                    return true; // found full pre-parsed event => success
                } else if (event == nextEvent) {
                    // nextEvent is now pre-parsed, but previous pre-parsed event is null, so no event graduated
                    preParsedEvent = nextEvent;
                    return true;
                }
                // otherwise line was ignored or added to the pre-parsed event, nothing really changed
            }
        }
        return false; // failure
    }

From source file:com.shopzilla.hadoop.mapreduce.MiniMRClusterContext.java

public void processData(final Path path, final Function<String, Void> lineProcessor) throws IOException {
    final Function<Path, Void> pathProcessor = new Function<Path, Void>() {
        @Override/*  w  ww.  j av  a  2s .c  om*/
        public Void apply(Path path) {
            try {
                FSDataInputStream in = miniDFSCluster.getFileSystem().open(path);
                LineIterator lineIterator = new LineIterator(new InputStreamReader(in));
                while (lineIterator.hasNext()) {
                    lineProcessor.apply(lineIterator.next());
                }
                lineIterator.close();
            } catch (Exception ex) {
                throw new RuntimeException(ex);
            }
            return null;
        }
    };
    processPaths(path, new Function<Path, Void>() {
        @Override
        public Void apply(Path input) {
            pathProcessor.apply(input);
            return null;
        }
    });
}