Example usage for java.io FileReader close

List of usage examples for java.io FileReader close

Introduction

In this page you can find the example usage for java.io FileReader close.

Prototype

public void close() throws IOException 

Source Link

Usage

From source file:org.kuali.kfs.gl.document.service.impl.CorrectionDocumentServiceImpl.java

public String createOutputFileForProcessing(String docId, java.util.Date today) {
    File outputFile = new File(glcpDirectoryName + File.separator + docId + OUTPUT_ORIGIN_ENTRIES_FILE_SUFFIX);
    String newFileName = batchFileDirectoryName + File.separator + GLCP_OUTPUT_PREFIX + "." + docId
            + buildFileExtensionWithDate(today);
    File newFile = new File(newFileName);
    FileReader inputFileReader;
    FileWriter newFileWriter;// ww w . j  a  va  2s.co m

    try {
        // copy output file and put in OriginEntryInformation directory
        inputFileReader = new FileReader(outputFile);
        newFileWriter = new FileWriter(newFile);
        int c;
        while ((c = inputFileReader.read()) != -1) {
            newFileWriter.write(c);
        }

        inputFileReader.close();
        newFileWriter.close();

        // create done file, after successfully copying output file
        String doneFileName = newFileName.replace(GeneralLedgerConstants.BatchFileSystem.EXTENSION,
                GeneralLedgerConstants.BatchFileSystem.DONE_FILE_EXTENSION);
        File doneFile = new File(doneFileName);
        if (!doneFile.exists()) {
            doneFile.createNewFile();
        }

    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    return newFileName;
}

From source file:org.corpus_tools.pepper.connectors.impl.MavenAccessor.java

/**
 * This method tries to read the blacklist file, if it already exists
 */// ww  w.  j  a  va2s  . co m
private void init() {
    /* init Maven utils */
    /* read/write dependency blacklist */
    File blacklistFile = new File(BLACKLIST_PATH);
    if (blacklistFile.exists()) {
        try {
            FileReader fR = new FileReader(blacklistFile);
            BufferedReader reader = new BufferedReader(fR);
            String line = reader.readLine();
            while (line != null) {
                forbiddenFruits.add(line);
                line = reader.readLine();
            }
            reader.close();
            fR.close();
        } catch (IOException e) {
            logger.debug("Could not read blacklist file.", e);
        }
    }
}

From source file:com.spike.tg4w.htmlunit.XmlTestResultImpl.java

private String outputFileAsHtml(String filename) {
    FileReader fr = null;
    try {//from  ww w . j  a  v  a  2 s  .  c o  m
        fr = new FileReader(filename);
        BufferedReader reader = new BufferedReader(fr);
        String line;
        int lineNum = 0;
        String outfile = (new File(filename)).getName() + "_rec.html";
        Writer writer = new FileWriter(this.writeDir + File.separator + outfile);
        writer.write(
                "<html><head><style>.highlight {background-color:lightblue; }</style> <script> function highlight() { var line = location.search.substring(6); if (line) document.getElementById(line).setAttribute(\"class\", \"highlight\"); } </script> </head><body onload='javascript:highlight();' style='font-size:90%;font-family:courier'><table cellpadding='0' cellspacing='0'>");
        while ((line = reader.readLine()) != null) {
            lineNum++;
            line = line.replace(" ", "&nbsp;").replace("\t", "&nbsp;").replace(">", "&gt;").replace("<",
                    "&lt;");
            writer.write("<tr><td id='" + lineNum + "'>");
            writer.write("<a name='" + lineNum + "'/> &nbsp;" + lineNum + "&nbsp;&nbsp;&nbsp;&nbsp;" + line
                    + "<br/>\n");
            writer.write("</td></tr>");
        }
        writer.write("</table></body></html>");
        writer.close();
        fr.close();
        reader.close();
        return outfile;
    } catch (Exception e) {
        e.printStackTrace();
        return null;
    } finally {
        try {
            if (fr != null) {
                fr.close();
            }
        } catch (IOException ex) {
            // ignore
        }
    }
}

From source file:com.apifest.doclet.integration.tests.DocletTest.java

@Test
public void check_what_doclet_will_generate_correct_stream_result_parameters() throws Exception {
    // GIVEN//from w w w. ja  va  2 s.  com
    String parserFilePath = "./all-mappings-docs.json";
    Map<String, ResultParamDocumentation> resNameToTypeMap = new HashMap<String, ResultParamDocumentation>();
    addResultParamToMap("tw_id", "integer", "The ** tw_id ** description", true, resNameToTypeMap);
    addResultParamToMap("in_reply_to_screen_name", "string", "The ** in_reply_to_screen_name ** description",
            true, resNameToTypeMap);
    addResultParamToMap("request_handle", "string", "The ** request_handle ** description", true,
            resNameToTypeMap);
    addResultParamToMap("in_reply_to_status_id", "string", "The ** in_reply_to_status_id ** description", true,
            resNameToTypeMap);
    addResultParamToMap("channel", "string", "The ** channel ** description", true, resNameToTypeMap);
    // WHEN
    runDoclet();
    // THEN
    JSONParser parser = new JSONParser();
    FileReader fileReader = null;
    try {
        fileReader = new FileReader(parserFilePath);
        JSONObject json = (JSONObject) parser.parse(fileReader);
        JSONArray arr = (JSONArray) json.get("endpoints");
        JSONObject obj = (JSONObject) arr.get(0);

        JSONArray resParam = (JSONArray) obj.get("resultParams");
        for (int i = 0; i < resParam.size(); i++) {
            JSONObject currentParam = (JSONObject) resParam.get(i);
            String currentName = (String) currentParam.get("name");
            ResultParamDocumentation correctCurrentParam = resNameToTypeMap.get(currentName);
            Assert.assertEquals((String) currentParam.get("type"), correctCurrentParam.getType());
            Assert.assertEquals((String) currentParam.get("name"), correctCurrentParam.getName());
            Assert.assertEquals((String) currentParam.get("description"), correctCurrentParam.getDescription());
            Assert.assertEquals(currentParam.get("required"), correctCurrentParam.isRequired());
        }
    } finally {
        if (fileReader != null) {
            fileReader.close();
        }
        deleteJsonFile(parserFilePath);
    }
}

From source file:org.fhcrc.cpl.viewer.ms2.commandline.PostProcessPepXMLCLM.java

/**
 *
 * @param featureFile/*  ww  w  . j  a va 2 s  . c  o  m*/
 * @param outputFile
 * @throws CommandLineModuleExecutionException
 */
protected void handleFeatureFile(File featureFile, File outputFile) throws CommandLineModuleExecutionException {
    ApplicationContext.infoMessage("Handling file " + featureFile.getAbsolutePath() + "...");

    if (maxFDR < Float.MAX_VALUE) {
        minPeptideProphet = (float) calcMinPeptideProphetForMaxFDR(featureFile);
        ApplicationContext.infoMessage("Minimum PeptideProphet for FDR " + maxFDR + ": " + minPeptideProphet);
    }

    try {
        Iterator<FeatureSet> featureSetIterator = new PepXMLFeatureFileHandler.PepXMLFeatureSetIterator(
                featureFile);

        List<File> tempFeatureFiles = new ArrayList<File>();
        int numSetsProcessed = 0;
        while (featureSetIterator.hasNext()) {
            FeatureSet featureSet = featureSetIterator.next();

            ApplicationContext.infoMessage("\tProcessing fraction " + (numSetsProcessed + 1) + "...");

            processFeatureSet(featureSet);
            String baseName = MS2ExtraInfoDef.getFeatureSetBaseName(featureSet);
            if (baseName == null) {
                baseName = featureFile.getName();
                if (numSetsProcessed > 0 || featureSetIterator.hasNext())
                    baseName = baseName + "_" + numSetsProcessed;
            }
            //if PeptideProphet was run from a directory below the directory containing the
            //mzXML files, we may have ../ in the baseName, which causes trouble in saving
            //the temporary files
            //                while (baseName.contains(".." + File.separator))
            //                    baseName.replaceFirst(".." + File.separator, "");
            if (baseName.contains(File.separator))
                baseName = baseName.substring(baseName.lastIndexOf(File.separator) + 1);

            File thisFractionFile = TempFileManager.createTempFile(baseName + ".pep.xml", this);

            featureSet.savePepXml(thisFractionFile);

            _log.debug("Saved fraction file as " + thisFractionFile.getAbsolutePath());
            tempFeatureFiles.add(thisFractionFile);

            numSetsProcessed++;
        }

        ApplicationContext.infoMessage("Saving output file " + outputFile.getAbsolutePath() + " ...");

        if (numSetsProcessed == 1) {
            FileReader in = new FileReader(tempFeatureFiles.get(0));
            FileWriter out = new FileWriter(outputFile);
            int c;

            while ((c = in.read()) != -1)
                out.write(c);

            in.close();
            out.close();
        } else {
            ApplicationContext.infoMessage(
                    "\tCombining individual fraction files... " + outputFile.getAbsolutePath() + "...");
            new PepXMLFeatureFileHandler().combinePepXmlFiles(tempFeatureFiles, outputFile);
        }
        ApplicationContext.infoMessage("Done.");
    } catch (IOException e) {
        throw new CommandLineModuleExecutionException(
                "Failed to process features from file " + featureFile.getAbsolutePath(), e);
    } finally {
        TempFileManager.deleteTempFiles(this);
    }
}

From source file:com.apifest.doclet.integration.tests.DocletTest.java

@Test
public void check_what_doclet_will_generate_correct_stream_request_parameters() throws Exception {
    // GIVEN/* w  w w. j a  v  a  2  s.com*/
    String parserFilePath = "./all-mappings-docs.json";
    Map<String, RequestParamDocumentation> correctNameToTypeMap = new HashMap<String, RequestParamDocumentation>();
    addRequestParamToMap("ids", "string", "** user ids goes here **", true, correctNameToTypeMap);
    addRequestParamToMap("fields", "list", "** The keys from result json can be added as filter**", false,
            correctNameToTypeMap);
    addRequestParamToMap("since", "integer", "** since is optional parameter**", false, correctNameToTypeMap);
    addRequestParamToMap("until", "integer", "** until is optional parameter**", false, correctNameToTypeMap);
    // WHEN
    runDoclet();
    // THEN
    JSONParser parser = new JSONParser();
    FileReader fileReader = null;
    try {
        fileReader = new FileReader(parserFilePath);
        JSONObject json = (JSONObject) parser.parse(fileReader);
        JSONArray arr = (JSONArray) json.get("endpoints");
        JSONObject obj = (JSONObject) arr.get(0);

        JSONArray reqParam = (JSONArray) obj.get("requestParams");

        for (int i = 0; i < reqParam.size(); i++) {
            JSONObject currentParam = (JSONObject) reqParam.get(i);
            String currentName = (String) currentParam.get("name");
            RequestParamDocumentation correctCurrentParam = correctNameToTypeMap.get(currentName);
            Assert.assertEquals((String) currentParam.get("type"), correctCurrentParam.getType());
            Assert.assertEquals((String) currentParam.get("name"), correctCurrentParam.getName());
            Assert.assertEquals((String) currentParam.get("description"), correctCurrentParam.getDescription());
            Assert.assertEquals(currentParam.get("required"), correctCurrentParam.isRequired());
        }
    } finally {
        if (fileReader != null) {
            fileReader.close();
        }
        deleteJsonFile(parserFilePath);
    }
}

From source file:com.apifest.doclet.integration.tests.DocletTest.java

@Test
public void check_what_doclet_will_generate_correct_metrics_result_parameters() throws Exception {
    // GIVEN//from w  ww .  ja  v a2  s  . c  o m
    String parserFilePath = "./all-mappings-docs.json";
    Map<String, ResultParamDocumentation> resNameToTypeMap = new HashMap<String, ResultParamDocumentation>();
    addResultParamToMap("channel", "string", "The **channel** description", true, resNameToTypeMap);
    addResultParamToMap("updated_time", "string", "The **updated_time** description", true, resNameToTypeMap);
    addResultParamToMap("request_handle", "string", "The **request_handle** description", true,
            resNameToTypeMap);
    addResultParamToMap("sentiment.score", "string", "The **sentiment_score** description", true,
            resNameToTypeMap);
    addResultParamToMap("sentiment.positive", "string", "The **sentiment_positive** description", true,
            resNameToTypeMap);
    addResultParamToMap("sentiment.neutral", "string", "The **sentiment_neutral** description", true,
            resNameToTypeMap);
    addResultParamToMap("sentiment.negative", "string", "The **sentiment_negative** description", true,
            resNameToTypeMap);
    addResultParamToMap("engagement.replies", "integer", "The **engagement_replies** description", true,
            resNameToTypeMap);
    addResultParamToMap("engagement.tweets", "integer", "The **engagement_tweets** description", true,
            resNameToTypeMap);
    // WHEN
    runDoclet();
    // THEN
    JSONParser parser = new JSONParser();
    FileReader fileReader = null;
    try {
        fileReader = new FileReader(parserFilePath);
        JSONObject json = (JSONObject) parser.parse(fileReader);
        JSONArray arr = (JSONArray) json.get("endpoints");
        JSONObject obj = (JSONObject) arr.get(1);

        JSONArray resParam = (JSONArray) obj.get("resultParams");
        for (int i = 0; i < resParam.size(); i++) {
            JSONObject currentParam = (JSONObject) resParam.get(i);
            String currentName = (String) currentParam.get("name");
            ResultParamDocumentation correctCurrentParam = resNameToTypeMap.get(currentName);
            Assert.assertEquals((String) currentParam.get("type"), correctCurrentParam.getType());
            Assert.assertEquals((String) currentParam.get("name"), correctCurrentParam.getName());
            Assert.assertEquals((String) currentParam.get("description"), correctCurrentParam.getDescription());
            Assert.assertEquals(currentParam.get("required"), correctCurrentParam.isRequired());

        }
    } finally {
        if (fileReader != null) {
            fileReader.close();
        }
        deleteJsonFile(parserFilePath);
    }
}

From source file:com.apifest.doclet.integration.tests.DocletTest.java

@Test
public void check_what_doclet_will_generate_correct_metric_exceptions() throws Exception {
    // GIVEN//from  w  w  w .  j a v  a  2s  . c o  m
    String parserFilePath = "./all-mappings-docs.json";
    Map<String, ExceptionDocumentation> exsNameToDescriptionMap = new HashMap<String, ExceptionDocumentation>();
    addException("invalid_parameter", "Please add valid parameter", 400, "The parameter is invalid",
            exsNameToDescriptionMap);
    // WHEN
    runDoclet();
    // THEN
    JSONParser parser = new JSONParser();
    FileReader fileReader = null;
    try {
        fileReader = new FileReader(parserFilePath);
        JSONObject json = (JSONObject) parser.parse(fileReader);
        JSONArray arr = (JSONArray) json.get("endpoints");
        JSONObject obj = (JSONObject) arr.get(1);

        JSONArray exsParam = (JSONArray) obj.get("exceptions");
        for (int i = 0; i < exsParam.size(); i++) {
            JSONObject currentParam = (JSONObject) exsParam.get(i);
            String currentName = (String) currentParam.get("name");
            ExceptionDocumentation correctCurrentParam = exsNameToDescriptionMap.get(currentName);
            Assert.assertEquals((String) currentParam.get("name"), correctCurrentParam.getName());
            Assert.assertEquals((String) currentParam.get("condition"), correctCurrentParam.getCondition());
            Assert.assertEquals((String) currentParam.get("description"), correctCurrentParam.getDescription());
            Assert.assertEquals(currentParam.get("code"), new Long(correctCurrentParam.getCode()));
        }
    } finally {
        if (fileReader != null) {
            fileReader.close();
        }
        deleteJsonFile(parserFilePath);
    }
}

From source file:com.apifest.doclet.integration.tests.DocletTest.java

@Test
public void check_what_doclet_will_generate_correct_stream_exceptions() throws Exception {
    // GIVEN/*from www. jav a2 s  .  co  m*/
    String parserFilePath = "./all-mappings-docs.json";
    Map<String, ExceptionDocumentation> exsNameToDescriptionMap = new HashMap<String, ExceptionDocumentation>();
    addException("invalid_since_until", "Since/until parameter must be within the last 30 days", 400,
            "The period is invalid", exsNameToDescriptionMap);
    // WHEN
    runDoclet();
    // THEN
    JSONParser parser = new JSONParser();
    FileReader fileReader = null;
    try {
        fileReader = new FileReader(parserFilePath);
        JSONObject json = (JSONObject) parser.parse(fileReader);
        JSONArray arr = (JSONArray) json.get("endpoints");
        JSONObject obj = (JSONObject) arr.get(0);

        JSONArray exsParam = (JSONArray) obj.get("exceptions");
        for (int i = 0; i < exsParam.size(); i++) {
            JSONObject currentParam = (JSONObject) exsParam.get(i);
            String currentName = (String) currentParam.get("name");
            ExceptionDocumentation correctCurrentParam = exsNameToDescriptionMap.get(currentName);
            Assert.assertEquals((String) currentParam.get("name"), correctCurrentParam.getName());
            Assert.assertEquals((String) currentParam.get("condition"), correctCurrentParam.getCondition());
            Assert.assertEquals((String) currentParam.get("description"), correctCurrentParam.getDescription());
            Assert.assertEquals(currentParam.get("code"), new Long(correctCurrentParam.getCode()));
        }
    } finally {
        if (fileReader != null) {
            fileReader.close();
        }
        deleteJsonFile(parserFilePath);
    }
}

From source file:com.datatorrent.contrib.hdht.HDHTWriter.java

/**
 * Flush changes from write cache to disk. New data files will be written and meta data replaced atomically. The flush
 * frequency determines availability of changes to external readers.
 *
 * @throws IOException//from w  ww.jav a  2s . c  o  m
 */
private void writeDataFiles(Bucket bucket) throws IOException {
    BucketIOStats ioStats = getOrCretaStats(bucket.bucketKey);
    LOG.debug("Writing data files in bucket {}", bucket.bucketKey);
    // copy meta data on write
    BucketMeta bucketMetaCopy = kryo.copy(getMeta(bucket.bucketKey));

    /** Process purge requests before flushing data from cache to maintain
     * the oder or purge and put operations. This makes sure that purged data
     * removed from file, before new data is added to the files */
    HashSet<String> filesToDelete = Sets.newHashSet();
    bucketMetaCopy = processPurge(bucket, bucketMetaCopy, filesToDelete);

    // bucket keys by file
    TreeMap<Slice, BucketFileMeta> bucketSeqStarts = bucketMetaCopy.files;
    Map<BucketFileMeta, Map<Slice, Slice>> modifiedFiles = Maps.newHashMap();

    for (Map.Entry<Slice, byte[]> entry : bucket.frozenWriteCache.entrySet()) {
        // find file for key
        Map.Entry<Slice, BucketFileMeta> floorEntry = bucketSeqStarts.floorEntry(entry.getKey());
        BucketFileMeta floorFile;
        if (floorEntry != null) {
            floorFile = floorEntry.getValue();
        } else {
            floorEntry = bucketSeqStarts.firstEntry();
            if (floorEntry == null || floorEntry.getValue().name != null) {
                // no existing file or file with higher key
                floorFile = new BucketFileMeta();
            } else {
                // placeholder for new keys, move start key
                floorFile = floorEntry.getValue();
                bucketSeqStarts.remove(floorEntry.getKey());
            }
            floorFile.startKey = entry.getKey();
            if (floorFile.startKey.length != floorFile.startKey.buffer.length) {
                // normalize key for serialization
                floorFile.startKey = new Slice(floorFile.startKey.toByteArray());
            }
            bucketSeqStarts.put(floorFile.startKey, floorFile);
        }

        Map<Slice, Slice> fileUpdates = modifiedFiles.get(floorFile);
        if (fileUpdates == null) {
            modifiedFiles.put(floorFile, fileUpdates = Maps.newHashMap());
        }
        fileUpdates.put(entry.getKey(), new Slice(entry.getValue()));
    }

    // write modified files
    for (Map.Entry<BucketFileMeta, Map<Slice, Slice>> fileEntry : modifiedFiles.entrySet()) {
        BucketFileMeta fileMeta = fileEntry.getKey();
        TreeMap<Slice, Slice> fileData = new TreeMap<Slice, Slice>(getKeyComparator());

        if (fileMeta.name != null) {
            // load existing file
            long start = System.currentTimeMillis();
            FileReader reader = store.getReader(bucket.bucketKey, fileMeta.name);
            reader.readFully(fileData);
            ioStats.dataBytesRead += store.getFileSize(bucket.bucketKey, fileMeta.name);
            ioStats.dataReadTime += System.currentTimeMillis() - start;
            /* these keys are re-written */
            ioStats.dataKeysRewritten += fileData.size();
            ioStats.filesReadInCurrentWriteCycle++;
            ioStats.dataFilesRead++;
            reader.close();
            filesToDelete.add(fileMeta.name);
        }

        // apply updates
        fileData.putAll(fileEntry.getValue());
        // new file
        writeFile(bucket, bucketMetaCopy, fileData);
    }

    LOG.debug("Files written {} files read {}", ioStats.filesWroteInCurrentWriteCycle,
            ioStats.filesReadInCurrentWriteCycle);
    // flush meta data for new files
    try {
        LOG.debug("Writing {} with {} file entries", FNAME_META, bucketMetaCopy.files.size());
        OutputStream os = store.getOutputStream(bucket.bucketKey, FNAME_META + ".new");
        Output output = new Output(os);
        bucketMetaCopy.committedWid = bucket.committedLSN;
        bucketMetaCopy.recoveryStartWalPosition = bucket.recoveryStartWalPosition;
        kryo.writeClassAndObject(output, bucketMetaCopy);
        output.close();
        os.close();
        store.rename(bucket.bucketKey, FNAME_META + ".new", FNAME_META);
    } catch (IOException e) {
        throw new RuntimeException("Failed to write bucket meta data " + bucket.bucketKey, e);
    }

    // clear pending changes
    ioStats.dataKeysWritten += bucket.frozenWriteCache.size();
    // switch to new version
    this.metaCache.put(bucket.bucketKey, bucketMetaCopy);

    // delete old files
    for (String fileName : filesToDelete) {
        store.delete(bucket.bucketKey, fileName);
    }
    invalidateReader(bucket.bucketKey, filesToDelete);
    // clearing cache after invalidating readers
    bucket.frozenWriteCache.clear();

    // cleanup WAL files which are not needed anymore.
    minimumRecoveryWalPosition = bucketMetaCopy.recoveryStartWalPosition;
    for (Long bucketId : this.bucketKeys) {
        BucketMeta meta = getMeta(bucketId);
        if (meta.recoveryStartWalPosition.fileId < minimumRecoveryWalPosition.fileId
                || (meta.recoveryStartWalPosition.fileId == minimumRecoveryWalPosition.fileId
                        && meta.recoveryStartWalPosition.offset < minimumRecoveryWalPosition.offset)) {
            minimumRecoveryWalPosition = meta.recoveryStartWalPosition;
        }
    }
    this.wal.cleanup(minimumRecoveryWalPosition.fileId);
    ioStats.filesReadInCurrentWriteCycle = 0;
    ioStats.filesWroteInCurrentWriteCycle = 0;
}