Example usage for edu.stanford.nlp.io IOUtils closeIgnoringExceptions

List of usage examples for edu.stanford.nlp.io IOUtils closeIgnoringExceptions

Introduction

In this page you can find the example usage for edu.stanford.nlp.io IOUtils closeIgnoringExceptions.

Prototype

public static void closeIgnoringExceptions(Closeable c) 

Source Link

Document

Provides an implementation of closing a file for use in a finally block so you can correctly close a file without even more exception handling stuff.

Usage

From source file:ilcc.ccgparser.nnparser.IncNNParser.java

private double[][] readEmbedFile(String embedFile, Map<String, Integer> embedID) {

    double[][] embeddings = null;
    if (embedFile != null) {
        BufferedReader input = null;
        try {// www.  j ava 2  s.c o  m
            input = IOUtils.readerFromString(embedFile);
            List<String> lines = new ArrayList<String>();
            for (String s; (s = input.readLine()) != null;) {
                lines.add(s);
            }

            int nWords = lines.size();
            String[] splits = lines.get(0).split("\\s+");

            int dim = splits.length - 1;
            embeddings = new double[nWords][dim];
            System.err.println("Embedding File " + embedFile + ": #Words = " + nWords + ", dim = " + dim);

            if (dim != config.embeddingSize)
                throw new IllegalArgumentException(
                        "The dimension of embedding file does not match config.embeddingSize");

            for (int i = 0; i < lines.size(); ++i) {
                splits = lines.get(i).split("\\s+");
                embedID.put(splits[0], i);
                for (int j = 0; j < dim; ++j)
                    embeddings[i][j] = Double.parseDouble(splits[j + 1]);
            }
        } catch (IOException e) {
            throw new RuntimeIOException(e);
        } finally {
            IOUtils.closeIgnoringExceptions(input);
        }
    }
    return embeddings;
}

From source file:process.PTBTokenizer.java

License:Open Source License

private static void tok(List<String> inputFileList, List<String> outputFileList, String charset,
        Pattern parseInsidePattern, String options, boolean preserveLines, boolean dump, boolean lowerCase)
        throws IOException {
    final long start = System.nanoTime();
    long numTokens = 0;
    int numFiles = inputFileList.size();
    if (numFiles == 0) {
        Reader stdin = IOUtils.readerFromStdin(charset);
        BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(System.out, charset));
        numTokens += tokReader(stdin, writer, parseInsidePattern, options, preserveLines, dump, lowerCase);
        IOUtils.closeIgnoringExceptions(writer);

    } else {/*from   w  ww.  jav  a2s  . c om*/
        for (int j = 0; j < numFiles; j++) {
            Reader r = IOUtils.readerFromString(inputFileList.get(j), charset);
            BufferedWriter out = (outputFileList == null)
                    ? new BufferedWriter(new OutputStreamWriter(System.out, charset))
                    : new BufferedWriter(
                            new OutputStreamWriter(new FileOutputStream(outputFileList.get(j)), charset));
            numTokens += tokReader(r, out, parseInsidePattern, options, preserveLines, dump, lowerCase);
            r.close();
            IOUtils.closeIgnoringExceptions(out);
        } // end for j going through inputFileList
    }

    final long duration = System.nanoTime() - start;
    final double wordsPerSec = (double) numTokens / ((double) duration / 1000000000.0);
    System.err.printf("PTBTokenizer tokenized %d tokens at %.2f tokens per second.%n", numTokens, wordsPerSec);
}