Example usage for java.io FileReader close

List of usage examples for java.io FileReader close

Introduction

In this page you can find the example usage for java.io FileReader close.

Prototype

public void close() throws IOException 

Source Link

Usage

From source file:org.eclipse.kura.core.deployment.install.InstallImpl.java

private Properties loadInstallPersistance(File installedDpPersistance) {
    Properties downloadProperies = new Properties();
    FileReader fr = null;
    try {//from www.  j  a v  a 2 s.  co m
        fr = new FileReader(installedDpPersistance);
        downloadProperies.load(fr);
    } catch (IOException e) {
        s_logger.error("Exception loading install configuration file", e);
    } finally {
        try {
            if (fr != null) {
                fr.close();
            }
        } catch (IOException e) {
            s_logger.error("Exception while closing opened resources!", e);
        }
    }
    return downloadProperies;
}

From source file:org.apache.pig.piggybank.squeal.backend.storm.Main.java

public void submitTopology(String topology_name) throws AlreadyAliveException, InvalidTopologyException {
    Config conf = new Config();

    String extraConf = pc.getProperties().getProperty(EXTRA_CONF_KEY, null);
    if (extraConf != null) {
        System.out.println("Loading additional configuration properties from: " + extraConf);
        // Load the configuration file.
        Yaml yaml = new Yaml();
        FileReader fr;
        try {/*  w w w .j av a 2  s  .c om*/
            fr = new FileReader(extraConf);
            Map<String, Object> m = (Map<String, Object>) yaml.load(fr);
            conf.putAll(m);
            fr.close();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    int workers = Integer.parseInt(pc.getProperties().getProperty(WORKERS_COUNT_KEY, "4"));
    conf.setNumWorkers(workers);
    int ackers = Integer.parseInt(pc.getProperties().getProperty(ACKERS_COUNT_KEY, "1"));
    conf.setNumAckers(ackers);

    // Register a Serializer for any Writable.
    registerSerializer(conf);

    passPigContextProperties(conf);

    StormSubmitter submitter = new StormSubmitter();

    submitter.submitTopology(topology_name, conf, t.build());
}

From source file:io.sugo.grok.api.Grok.java

/**
 * Add patterns to {@code Grok} from the given file.
 *
 * @param file : Path of the grok pattern
 * @throws GrokException runtime expt/*from   www . j  ava  2 s .  c o m*/
 */
public void addPatternFromFile(String file) throws GrokException {

    File f = new File(file);
    if (!f.exists()) {
        throw new GrokException("Pattern not found");
    }

    if (!f.canRead()) {
        throw new GrokException("Pattern cannot be read");
    }

    FileReader r = null;
    try {
        r = new FileReader(f);
        addPatternFromReader(r);
    } catch (FileNotFoundException e) {
        throw new GrokException(e.getMessage());
    } catch (@SuppressWarnings("hiding") IOException e) {
        throw new GrokException(e.getMessage());
    } finally {
        try {
            if (r != null) {
                r.close();
            }
        } catch (IOException io) {
            // TODO(anthony) : log the error
        }
    }
}

From source file:madkitgroupextension.export.Export.java

public void saveIndexList(File dst, File madkitindexfile) throws IOException {
    FileWriter fw = new FileWriter(dst);
    BufferedWriter bw = new BufferedWriter(fw);

    if (m_include_madkit) {
        FileReader fr = new FileReader(madkitindexfile);
        BufferedReader bf = new BufferedReader(fr);
        String line = null;//from   w w  w. j  a v  a2  s  .  c o  m
        while ((line = bf.readLine()) != null)
            bw.write(line + "\n");
        bf.close();
        fr.close();
    } else {
        bw.write("JarIndex-Version: 1.0\n\n");
    }
    int l = (ExportPathTmp + "jardir/").length();
    for (File f : FileTools.getTree(ExportPathTmp + "jardir/madkitgroupextension/")) {
        bw.write(f.getPath().substring(l) + "\n");
    }
    bw.flush();
    bw.close();
    fw.close();
}

From source file:org.deri.iris.queryrewriting.QueryRewritingTest.java

public void testFORewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    // Read the test-cases file

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Summary reporting
    final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(),
            "-", subchkStrategy.name(), "-", ncCheckStrategy.name());

    final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
    final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

    final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
    final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

    final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
    final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ',');

    final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
    final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ',');

    sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader());
    timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader());
    cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader());
    memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader());

    // Compute the rewriting for each test ontology.
    for (final String testName : tests) {

        // Read the next test case on the list
        final File testFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg");

        // Create the Directory where to store the test results
        final File outTestDir = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName);
        if (!outTestDir.exists()) {
            if (outTestDir.mkdir()) {
                LOGGER.info("Created output directory: " + testName);
            } else {
                LOGGER.fatal("Error creating output directory");
            }/*from ww w.j  ava 2  s.  c o  m*/
        }

        LOGGER.info("Processing file: " + testName);

        // Read the content of the current program
        final FileReader fr = new FileReader(testFile);
        final StringBuilder sb = new StringBuilder();
        int ch = -1;
        while ((ch = fr.read()) >= 0) {
            sb.append((char) ch);
        }
        final String program = sb.toString();
        fr.close();

        // Parse the program
        final Parser parser = new Parser();
        parser.parse(program);

        // Get the rules
        final List<IRule> rules = parser.getRules();

        // Get the queries
        final List<IQuery> queryHeads = parser.getQueries();

        // Get the TGDs from the set of rules
        final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

        // Convert the query bodies in rules
        final List<IRule> bodies = new LinkedList<IRule>(rules);
        bodies.removeAll(tgds);

        final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

        // get the constraints from the set of rules
        final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
        LOGGER.info("Expressivity: " + exprs.toString());

        if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY))
            throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting.");

        // compute the dependency graph

        LOGGER.debug("Computing position dependencies.");
        // long depGraphMem = MonitoringUtils.getHeapUsage();
        long posDepTime = System.currentTimeMillis();
        Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);
        posDepTime = System.currentTimeMillis() - posDepTime;

        // Setup caching
        CacheManager.setupCaching();

        // if linear TGDs, compute the atom coverage graph.
        LOGGER.debug("Computing atom coverage graph.");
        long atomCoverGraphTime = System.currentTimeMillis();
        if (exprs.contains(Expressivity.LINEAR)) {
            deps = DepGraphUtils.computeAtomCoverageGraph(deps);
        }
        atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
        // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

        // rewriting constraints
        // long ncRewMem = MonitoringUtils.getHeapUsage();
        final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
                RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
        long ncRewTime = System.currentTimeMillis();
        final Set<IRule> rewrittenConstraints = Sets.newHashSet();
        if (!ncCheckStrategy.equals(NCCheck.NONE)) {
            for (final IRule c : constraints) {
                rewrittenConstraints
                        .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
            }
        }
        ncRewTime = System.currentTimeMillis() - ncRewTime;
        // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
        LOGGER.debug("Finished rewriting constraints.");

        // dump the rewritten constraints:
        File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg"));
        final FileWriter cnsFW = new FileWriter(outFile);
        IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW);
        cnsFW.close();

        // Compute the Rewriting
        final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
                ncCheckStrategy);
        for (final IRule q : queries) {

            // Setup caching
            CacheManager.setupCaching();

            final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                    .getPredicateSymbol();

            // Setup reporting
            final Reporter rep = Reporter.getInstance(true);
            Reporter.setupReporting();
            Reporter.setQuery(queryPredicate);
            Reporter.setOntology(testName);
            rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime);

            LOGGER.info("Processing query: ".concat(q.toString()));
            // final long rewMem = MonitoringUtils.getHeapUsage();
            final long overallTime = System.currentTimeMillis();
            final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs);
            rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime);
            // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem);
            // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem);
            rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size());
            rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting));
            rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting));
            rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size());
            rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime);
            // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem);

            // Other metrics
            rep.setValue(RewMetric.OVERHEAD_TIME,
                    rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME));

            // Caching size metrics
            rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING));
            rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE,
                    CoveringCache.getCache().size(CacheType.NOT_COVERING));
            rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO));
            rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE,
                    MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO));
            rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size());
            rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size());

            // Create a file to store the rewriting results.

            outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg"));
            final FileWriter rewFW = new FileWriter(outFile);

            rewFW.write("/// Query: " + q + "///\n");
            rewFW.write("/// Ontology: " + testName + "///");
            rewFW.write("/// Created on: " + creationDate + " ///\n");
            rewFW.write("/// Rules in the program: " + rules.size() + " ///\n");
            rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n");
            rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n");
            rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n");
            rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n");
            rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n");
            rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n");
            rewFW.write(IOUtils.LINE_SEPARATOR);

            LOGGER.info("Writing the output at: " + outFile.getAbsolutePath());

            // dump metrics for individual queries.
            rewFW.write(rep.getReport());

            rewFW.write(IOUtils.LINE_SEPARATOR);
            rewFW.write(IOUtils.LINE_SEPARATOR);

            rewFW.write("/// Rewritten Program ///\n");
            final Set<ILiteral> newHeads = new HashSet<ILiteral>();
            for (final IRule qr : rewriting) {
                newHeads.add(qr.getHead().iterator().next());
                rewFW.write(qr + "\n");
            }
            rewFW.write("\n");
            for (final ILiteral h : newHeads) {
                rewFW.write("?- " + h + ".\n");
            }
            rewFW.write("\n");
            rewFW.flush();
            rewFW.close();

            // dump summary metrics.
            sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
            timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
            cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics());
            memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics());
            sizeSummaryWriter.flush();
            timeSummaryWriter.flush();
            cacheSummaryWriter.flush();
            memorySummaryWriter.flush();
        }
    }
    sizeSummaryWriter.close();
    timeSummaryWriter.close();
    cacheSummaryWriter.close();
    memorySummaryWriter.close();
}

From source file:model.settings.ReadSettings.java

/**
 * reads text from a configuration file; if is configuration file 
 * is not valid, return null; otherwise return configuration line.
 * //from www .  ja  va 2  s  .  c  om
 * @param _path from which _path is red.
 * @return the text read form file @ _path.
 * 
 * @throws IOException is thrown in case of error.
 */
public static void changeOption(final String _operation, final String _newValue) throws IOException {

    //create Reader
    FileReader fr = new FileReader(PROGRAM_SETTINGS_LOCATION);
    BufferedReader br = new BufferedReader(fr);

    String sumLine = "";
    String currentLine = "";
    boolean found = false;
    currentLine = br.readLine();
    while (currentLine != null) {

        if (!found) {
            sumLine += currentLine + "\n";
        } else {
            found = !found;
            sumLine += _newValue + "\n";
        }

        // if the current line is the identifier of the current
        // operation that has to be changed.
        if (currentLine != null && currentLine.equals(_operation)) {
            found = true;
        }
        currentLine = br.readLine();

    }

    //close reader
    br.close();
    fr.close();

    FileWriter fw = new FileWriter(PROGRAM_SETTINGS_LOCATION);
    BufferedWriter bw = new BufferedWriter(fw);
    bw.write(sumLine);
    bw.flush();
    bw.close();
    fw.close();
}

From source file:loternik.MyService.java

public String getHelloMessage(String teryt, String okreg) {
    FileReader file = null;
    try {//from   ww  w  .j a  v a 2  s.  c o  m
        String output = "";

        file = new FileReader("kandydaci.csv");
        CSVParser parser = new CSVParser(file, CSVFormat.EXCEL.withHeader().withDelimiter(';'));
        for (CSVRecord csvRecord : parser) {
            if (teryt.equals(csvRecord.get("teryt")) && okreg.equals(csvRecord.get("Okreg"))) {
                output += "<p>" + csvRecord.toString() + "</p>";
            }
        }
        return output;
    } catch (FileNotFoundException ex) {
        Logger.getLogger(MyService.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(MyService.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            file.close();
        } catch (IOException ex) {
            Logger.getLogger(MyService.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return null;
}

From source file:org.globus.gsi.SigningPolicyParser.java

/**
 * Parses the file to extract signing policy defined for CA with the
 * specified DN. If the policy file does not exist, a SigningPolicy object
 * with only CA DN is created. If policy path exists, but no relevant policy
 * exisit, SigningPolicy object with CA DN and file path is created.
 *
 * @param fileName Name of the signing policy file
 * @return SigningPolicy object that contains the information. If no policy
 *         is found, SigningPolicy object with only the CA DN is returned.
 * @throws org.globus.gsi.SigningPolicyException
 *                               Any errors with parsing the signing policy file.
 * @throws FileNotFoundException If the signing policy file does not exist.
 *///  ww  w.j a  v  a2  s .  co m
public Map<X500Principal, SigningPolicy> parse(String fileName)
        throws FileNotFoundException, SigningPolicyException {

    if ((fileName == null) || (fileName.trim().isEmpty())) {
        throw new IllegalArgumentException();
    }

    logger.debug("Signing policy file name " + fileName);

    FileReader fileReader = null;

    try {
        fileReader = new FileReader(fileName);
        return parse(fileReader);
    } catch (Exception e) {
        throw new SigningPolicyException(e);
    } finally {
        if (fileReader != null) {
            try {
                fileReader.close();
            } catch (Exception exp) {
                logger.debug("Error closing file reader", exp);
            }
        }
    }

}

From source file:org.apache.struts.scripting.ScriptAction.java

/**
 *  Loads the script from cache if possible. Reloads if the script has been
 *  recently modified./*from   www  .j  a  va2  s  .  co  m*/
 *
 *@param  name     The name of the script
 *@param  context  The servlet context
 *@return          The script object
 */
protected Script loadScript(String name, ServletContext context) {

    Script script = (Script) scripts.get(name);
    if (script == null) {
        script = new Script();
        script.file = new File(context.getRealPath(name));
        try {
            script.lang = BSFManager.getLangFromFilename(script.file.getName());
        } catch (BSFException ex) {
            LOG.warn(ex, ex);
        }
    }

    boolean reloadScript = false;
    long scriptLastModified = script.file.lastModified();
    if (scriptLastModified > script.timeLastLoaded) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Loading updated or new script: " + script.file.getName());
        }
        reloadScript = true;
    }

    if (reloadScript || script.string == null) {
        synchronized (this) {
            script.timeLastLoaded = System.currentTimeMillis();
            FileReader reader = null;
            try {
                reader = new FileReader(script.file);
                script.string = IOUtils.getStringFromReader(reader);
            } catch (IOException ex) {
                LOG.error("Unable to load script: " + script.file, ex);
            } finally {
                if (reader != null) {
                    try {
                        reader.close();
                    } catch (IOException ex) {
                        LOG.debug(ex, ex);
                    }
                }
            }
        }
    }

    return script;
}

From source file:org.hyperic.util.PropertyUtil.java

/**
 * Saves the provided map of keys and values into the properties file specified by <code>propFilePath</code>.
 * Values of properties that already exist int the file are overwritten. New values are placed near the end of the
 * file.// w  w  w . ja  v a  2s.  c om
 *
 * @param propFilePath the path (absolute/relative) to the properties file to edit.
 * @param props the properties to add/update.
 */
private static void _storeProperties(String propFilePath, Map<String, String> props)
        throws PropertyUtilException {
    // If the provided properties map is null or empty then exit the method.
    if (props == null || props.size() < 1) {
        return;
    }

    // Used for writing the properties file back to the disk.
    PrintWriter writer = null;
    // Used for reading the properties file from the disk.
    FileReader reader = null;

    try {
        // Create new reader
        reader = new FileReader(propFilePath);
        // Wrap the reader with a buffer so we can walk through the file line by line.
        BufferedReader bufferedReader = new BufferedReader(reader);

        // The list of lines that will be written to disk.
        List<String> newLines = new ArrayList<String>();

        // Read the lines from the file and replace values.
        String line;
        while ((line = bufferedReader.readLine()) != null) {
            newLines.add(processLine(line, props));
        } // EO while.

        // Iterate values that are left in the provided map and add them to the file.
        for (String key : props.keySet()) {
            newLines.add(key + " = " + props.get(key));
        }

        // TODO: change to UTF-8 and add support on property loader side. This might help:
        // http://stackoverflow.com/questions/863838/problem-with-java-properties-utf8-encoding-in-eclipse
        writer = new PrintWriter(new OutputStreamWriter(new FileOutputStream(propFilePath), "ISO-8859-1"));
        for (String aLineData : newLines) {
            writer.println(aLineData);
        }
        writer.flush();
    } catch (IOException exc) {
        String message = "Failed to store properties into the file: " + propFilePath;
        LOG.error(message, exc);
        throw new PropertyUtilException(exc);
    } finally {
        if (reader != null) {
            try {
                reader.close();
            } catch (IOException ignore) {
                /* ignore */ }
        }
        if (writer != null) {
            writer.close();
        }
    } // EO try-catch
}