Example usage for java.io FileReader read

List of usage examples for java.io FileReader read

Introduction

In this page you can find the example usage for java.io FileReader read.

Prototype

public int read() throws IOException 

Source Link

Document

Reads a single character.

Usage

From source file:com.ephesoft.dcma.util.FileUtils.java

/**
 * To copy all XML files.// ww  w  .  j a  v a  2 s .  co m
 * @param fromLoc {@link String}
 * @param toLoc {@link String}
 */
public static void copyAllXMLFiles(String fromLoc, String toLoc) {
    File inputFolder = new File(fromLoc);
    File outputFolder = new File(toLoc);
    File[] inputFiles = inputFolder.listFiles();
    for (int index = 0; index < inputFiles.length; index++) {
        if (inputFiles[index].getName().endsWith(EXTENSION_XML)) {
            FileReader input;
            FileWriter out;
            int character;
            try {
                input = new FileReader(inputFiles[index]);
                out = new FileWriter(outputFolder + File.separator + inputFiles[index].getName());
                character = input.read();
                while (character != -1) {
                    out.write(character);
                    character = input.read();
                }
                if (input != null) {
                    input.close();
                }
                if (out != null) {
                    out.close();
                }
            } catch (FileNotFoundException e) {
                LOGGER.error("Exception while reading files:" + e);
            } catch (IOException e) {
                LOGGER.error("Exception while copying files:" + e);
            }
        }
    }

}

From source file:org.deri.iris.performance.IRISPerformanceTest.java

/**
 * Loads the content of a file as a string.
 * @param filename the file name./*from   ww w  . jav a2 s .  c  o  m*/
 * @return a string representing the file content.
 */
private String loadFile(final String filename) {
    FileReader r = null;
    StringBuilder builder = null;
    try {
        r = new FileReader(filename);
        builder = new StringBuilder();

        int ch = -1;
        while ((ch = r.read()) >= 0) {
            builder.append((char) ch);
        }
        r.close();
    } catch (final FileNotFoundException e) {
        e.printStackTrace();
    } catch (final IOException e) {
        e.printStackTrace();
    }

    return builder.toString();
}

From source file:cs.ox.ac.uk.gsors.GroupPreferencesTest1.java

public String getStringFile(String input) throws IOException {
    // Read the content of the current program
    final FileReader fr = new FileReader(input);
    final StringBuilder sb = new StringBuilder();
    int ch = -1;/*w  w w  .j av a 2  s  . com*/
    while ((ch = fr.read()) >= 0) {
        sb.append((char) ch);
    }
    final String program = sb.toString();
    fr.close();
    return program;
}

From source file:org.webguitoolkit.tools.document.impl.file.FileProxy.java

public boolean copyTo(String absoluteUri, boolean deep) throws Exception {
    FileReader in = null;
    FileWriter out = null;//from ww  w.ja v a  2 s . c  o  m
    try {
        File outputFile = new File(absoluteUri);

        in = new FileReader(me);
        out = new FileWriter(outputFile);
        int c;

        while ((c = in.read()) != -1)
            out.write(c);
        return true;
    } catch (Exception e) {
        return false;
    } finally {
        if (in != null)
            in.close();
        if (out != null)
            out.close();
    }
}

From source file:org.apache.hadoop.mapred.TestTaskLogsMonitor.java

/**
 * Test the truncation of log-file when JVM-reuse is enabled.
 * /*ww  w .  j  a  v a 2 s  . c  o m*/
 * @throws IOException
 */
@Test
public void testLogTruncationOnFinishingWithJVMReuse() throws IOException {
    TaskTracker taskTracker = new TaskTracker();
    TaskLogsMonitor logsMonitor = new TaskLogsMonitor(150L, 150L);
    taskTracker.setTaskLogsMonitor(logsMonitor);

    TaskID baseTaskID = new TaskID();
    int attemptsCount = 0;

    // Assuming the job's retain size is 150
    TaskAttemptID attempt1 = new TaskAttemptID(baseTaskID, attemptsCount++);
    Task task1 = new MapTask(null, attempt1, 0, null, null, 0, null);

    // Let the tasks write logs more than retain-size
    writeRealBytes(attempt1, attempt1, LogName.SYSLOG, 200, 'A');

    logsMonitor.monitorTaskLogs();

    File attemptDir = TaskLog.getBaseDir(attempt1.toString());
    assertTrue(attemptDir + " doesn't exist!", attemptDir.exists());

    // Start another attempt in the same JVM
    TaskAttemptID attempt2 = new TaskAttemptID(baseTaskID, attemptsCount++);
    Task task2 = new MapTask(null, attempt2, 0, null, null, 0, null);
    logsMonitor.monitorTaskLogs();

    // Let attempt2 also write some logs
    writeRealBytes(attempt1, attempt2, LogName.SYSLOG, 100, 'B');
    logsMonitor.monitorTaskLogs();

    // Start yet another attempt in the same JVM
    TaskAttemptID attempt3 = new TaskAttemptID(baseTaskID, attemptsCount++);
    Task task3 = new MapTask(null, attempt3, 0, null, null, 0, null);
    logsMonitor.monitorTaskLogs();

    // Let attempt3 also write some logs
    writeRealBytes(attempt1, attempt3, LogName.SYSLOG, 225, 'C');
    logsMonitor.monitorTaskLogs();

    // Finish the JVM.
    logsMonitor.addProcessForLogTruncation(attempt1, Arrays.asList((new Task[] { task1, task2, task3 })));

    // The log-file should now be truncated.
    logsMonitor.monitorTaskLogs();
    assertTrue(attemptDir.exists());
    File logFile = TaskLog.getTaskLogFile(attempt1, LogName.SYSLOG);
    assertEquals(400, logFile.length());
    // The index files should also be proper.
    assertEquals(150, getAllLogsFileLengths(attempt1, false).get(LogName.SYSLOG).longValue());
    assertEquals(100, getAllLogsFileLengths(attempt2, false).get(LogName.SYSLOG).longValue());
    assertEquals(150, getAllLogsFileLengths(attempt3, false).get(LogName.SYSLOG).longValue());

    // assert the data.
    FileReader reader = new FileReader(TaskLog.getTaskLogFile(attempt1, LogName.SYSLOG));
    int ch, bytesRead = 0;
    boolean dataValid = true;
    while ((ch = reader.read()) != -1) {
        bytesRead++;
        if (bytesRead <= 150) {
            if ((char) ch != 'A') {
                LOG.warn("Truncation didn't happen properly. At " + (bytesRead + 1)
                        + "th byte, expected 'A' but found " + (char) ch);
                dataValid = false;
            }
        } else if (bytesRead <= 250) {
            if ((char) ch != 'B') {
                LOG.warn("Truncation didn't happen properly. At " + (bytesRead + 1)
                        + "th byte, expected 'B' but found " + (char) ch);
                dataValid = false;
            }
        } else if ((char) ch != 'C') {
            LOG.warn("Truncation didn't happen properly. At " + (bytesRead + 1)
                    + "th byte, expected 'C' but found " + (char) ch);
            dataValid = false;
        }
    }
    assertTrue("Log-truncation didn't happen properly!", dataValid);

    logsMonitor.monitorTaskLogs();
    assertEquals(400, logFile.length());
}

From source file:org.deri.iris.queryrewriting.QueryRewritingTest.java

public void testFORewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    // Read the test-cases file

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Summary reporting
    final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(),
            "-", subchkStrategy.name(), "-", ncCheckStrategy.name());

    final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
    final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

    final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
    final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

    final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
    final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ',');

    final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
    final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ',');

    sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader());
    timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader());
    cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader());
    memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader());

    // Compute the rewriting for each test ontology.
    for (final String testName : tests) {

        // Read the next test case on the list
        final File testFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg");

        // Create the Directory where to store the test results
        final File outTestDir = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName);
        if (!outTestDir.exists()) {
            if (outTestDir.mkdir()) {
                LOGGER.info("Created output directory: " + testName);
            } else {
                LOGGER.fatal("Error creating output directory");
            }/*  w w  w  . jav  a 2  s.co  m*/
        }

        LOGGER.info("Processing file: " + testName);

        // Read the content of the current program
        final FileReader fr = new FileReader(testFile);
        final StringBuilder sb = new StringBuilder();
        int ch = -1;
        while ((ch = fr.read()) >= 0) {
            sb.append((char) ch);
        }
        final String program = sb.toString();
        fr.close();

        // Parse the program
        final Parser parser = new Parser();
        parser.parse(program);

        // Get the rules
        final List<IRule> rules = parser.getRules();

        // Get the queries
        final List<IQuery> queryHeads = parser.getQueries();

        // Get the TGDs from the set of rules
        final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

        // Convert the query bodies in rules
        final List<IRule> bodies = new LinkedList<IRule>(rules);
        bodies.removeAll(tgds);

        final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

        // get the constraints from the set of rules
        final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
        LOGGER.info("Expressivity: " + exprs.toString());

        if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY))
            throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting.");

        // compute the dependency graph

        LOGGER.debug("Computing position dependencies.");
        // long depGraphMem = MonitoringUtils.getHeapUsage();
        long posDepTime = System.currentTimeMillis();
        Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);
        posDepTime = System.currentTimeMillis() - posDepTime;

        // Setup caching
        CacheManager.setupCaching();

        // if linear TGDs, compute the atom coverage graph.
        LOGGER.debug("Computing atom coverage graph.");
        long atomCoverGraphTime = System.currentTimeMillis();
        if (exprs.contains(Expressivity.LINEAR)) {
            deps = DepGraphUtils.computeAtomCoverageGraph(deps);
        }
        atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
        // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

        // rewriting constraints
        // long ncRewMem = MonitoringUtils.getHeapUsage();
        final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
                RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
        long ncRewTime = System.currentTimeMillis();
        final Set<IRule> rewrittenConstraints = Sets.newHashSet();
        if (!ncCheckStrategy.equals(NCCheck.NONE)) {
            for (final IRule c : constraints) {
                rewrittenConstraints
                        .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
            }
        }
        ncRewTime = System.currentTimeMillis() - ncRewTime;
        // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
        LOGGER.debug("Finished rewriting constraints.");

        // dump the rewritten constraints:
        File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg"));
        final FileWriter cnsFW = new FileWriter(outFile);
        IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW);
        cnsFW.close();

        // Compute the Rewriting
        final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
                ncCheckStrategy);
        for (final IRule q : queries) {

            // Setup caching
            CacheManager.setupCaching();

            final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                    .getPredicateSymbol();

            // Setup reporting
            final Reporter rep = Reporter.getInstance(true);
            Reporter.setupReporting();
            Reporter.setQuery(queryPredicate);
            Reporter.setOntology(testName);
            rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime);

            LOGGER.info("Processing query: ".concat(q.toString()));
            // final long rewMem = MonitoringUtils.getHeapUsage();
            final long overallTime = System.currentTimeMillis();
            final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs);
            rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime);
            // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem);
            // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem);
            rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size());
            rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting));
            rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting));
            rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size());
            rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime);
            // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem);

            // Other metrics
            rep.setValue(RewMetric.OVERHEAD_TIME,
                    rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME));

            // Caching size metrics
            rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING));
            rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE,
                    CoveringCache.getCache().size(CacheType.NOT_COVERING));
            rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO));
            rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE,
                    MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO));
            rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size());
            rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size());

            // Create a file to store the rewriting results.

            outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg"));
            final FileWriter rewFW = new FileWriter(outFile);

            rewFW.write("/// Query: " + q + "///\n");
            rewFW.write("/// Ontology: " + testName + "///");
            rewFW.write("/// Created on: " + creationDate + " ///\n");
            rewFW.write("/// Rules in the program: " + rules.size() + " ///\n");
            rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n");
            rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n");
            rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n");
            rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n");
            rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n");
            rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n");
            rewFW.write(IOUtils.LINE_SEPARATOR);

            LOGGER.info("Writing the output at: " + outFile.getAbsolutePath());

            // dump metrics for individual queries.
            rewFW.write(rep.getReport());

            rewFW.write(IOUtils.LINE_SEPARATOR);
            rewFW.write(IOUtils.LINE_SEPARATOR);

            rewFW.write("/// Rewritten Program ///\n");
            final Set<ILiteral> newHeads = new HashSet<ILiteral>();
            for (final IRule qr : rewriting) {
                newHeads.add(qr.getHead().iterator().next());
                rewFW.write(qr + "\n");
            }
            rewFW.write("\n");
            for (final ILiteral h : newHeads) {
                rewFW.write("?- " + h + ".\n");
            }
            rewFW.write("\n");
            rewFW.flush();
            rewFW.close();

            // dump summary metrics.
            sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
            timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
            cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics());
            memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics());
            sizeSummaryWriter.flush();
            timeSummaryWriter.flush();
            cacheSummaryWriter.flush();
            memorySummaryWriter.flush();
        }
    }
    sizeSummaryWriter.close();
    timeSummaryWriter.close();
    cacheSummaryWriter.close();
    memorySummaryWriter.close();
}

From source file:org.deri.iris.queryrewriting.SQLRewritingTest.java

public void testSQLRewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    // Read the test-cases file

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Summary reporting
    final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(),
            "-", subchkStrategy.name(), "-", ncCheckStrategy.name());

    final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
    final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

    final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
    final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

    final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
    final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ',');

    final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
    final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ',');

    sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader());
    timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader());
    cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader());
    memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader());

    // Compute the rewriting for each test ontology.
    for (final String testName : tests) {

        // Read the next test case on the list
        final File testFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg");

        // Create the Directory where to store the test results
        final File outTestDir = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName);
        if (!outTestDir.exists()) {
            if (outTestDir.mkdir()) {
                LOGGER.info("Created output directory: " + testName);
            } else {
                LOGGER.fatal("Error creating output directory");
            }//from  w w  w .  java2s.  c  o m
        }

        LOGGER.info("Processing file: " + testName);

        // Read the content of the current program
        final FileReader fr = new FileReader(testFile);
        final StringBuilder sb = new StringBuilder();
        int ch = -1;
        while ((ch = fr.read()) >= 0) {
            sb.append((char) ch);
        }
        final String program = sb.toString();
        fr.close();

        // Parse the program
        final Parser parser = new Parser();
        parser.parse(program);

        // Get the rules
        final List<IRule> rules = parser.getRules();

        // Get the queries
        final List<IQuery> queryHeads = parser.getQueries();

        // Get the TGDs from the set of rules
        final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

        // Convert the query bodies in rules
        final List<IRule> bodies = new LinkedList<IRule>(rules);
        bodies.removeAll(tgds);

        final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

        // Get the configuration
        final Map<IPredicate, IRelation> conf = parser.getDirectives();
        if (conf.containsKey(BasicFactory.getInstance().createPredicate("DBConnection", 8))) {
            StorageManager.getInstance();
            StorageManager.configure(conf);
        } else {
            LOGGER.error("Missing DB connection parameters.");
            throw new ConfigurationException("Missing DB connection parameters.");

        }

        // Get the SBox rules from the set of rules
        final List<IRule> sbox = RewritingUtils.getSBoxRules(rules, queryHeads);

        // get the constraints from the set of rules
        final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
        LOGGER.info("Expressivity: " + exprs.toString());

        if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY))
            throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting.");

        // compute the dependency graph

        LOGGER.debug("Computing position dependencies.");
        // long depGraphMem = MonitoringUtils.getHeapUsage();
        long posDepTime = System.currentTimeMillis();
        Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);
        posDepTime = System.currentTimeMillis() - posDepTime;

        // Setup caching
        CacheManager.setupCaching();

        // if linear TGDs, compute the atom coverage graph.
        LOGGER.debug("Computing atom coverage graph.");
        long atomCoverGraphTime = System.currentTimeMillis();
        if (exprs.contains(Expressivity.LINEAR)) {
            deps = DepGraphUtils.computeAtomCoverageGraph(deps);
        }
        atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
        // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

        // rewriting constraints
        // long ncRewMem = MonitoringUtils.getHeapUsage();
        final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
                RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
        long ncRewTime = System.currentTimeMillis();
        final Set<IRule> rewrittenConstraints = Sets.newHashSet();
        if (!ncCheckStrategy.equals(NCCheck.NONE)) {
            for (final IRule c : constraints) {
                rewrittenConstraints
                        .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
            }
        }
        ncRewTime = System.currentTimeMillis() - ncRewTime;
        // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
        LOGGER.debug("Finished rewriting constraints.");

        // dump the rewritten constraints:
        File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg"));
        final FileWriter cnsFW = new FileWriter(outFile);
        IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW);
        cnsFW.close();

        // Compute the Rewriting
        final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
                ncCheckStrategy);
        for (final IRule q : queries) {

            // Setup caching
            CacheManager.setupCaching();

            final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                    .getPredicateSymbol();

            // Setup reporting
            final Reporter rep = Reporter.getInstance(true);
            Reporter.setupReporting();
            Reporter.setQuery(queryPredicate);
            Reporter.setOntology(testName);
            rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime);

            LOGGER.info("Processing query: ".concat(q.toString()));
            // final long rewMem = MonitoringUtils.getHeapUsage();
            final long overallTime = System.currentTimeMillis();
            final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs);
            rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime);

            // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem);
            // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem);
            rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size());
            rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting));
            rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting));
            rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size());
            rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime);
            // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem);

            // Other metrics
            rep.setValue(RewMetric.OVERHEAD_TIME,
                    rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME));

            // Caching size metrics
            rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING));
            rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE,
                    CoveringCache.getCache().size(CacheType.NOT_COVERING));
            rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO));
            rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE,
                    MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO));
            rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size());
            rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size());

            // Create a file to store the rewriting results.

            outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg"));
            final FileWriter rewFW = new FileWriter(outFile);

            rewFW.write("/// Query: " + q + "///\n");
            rewFW.write("/// Ontology: " + testName + "///");
            rewFW.write("/// Created on: " + creationDate + " ///\n");
            rewFW.write("/// Rules in the program: " + rules.size() + " ///\n");
            rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n");
            rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n");
            rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n");
            rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n");
            rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n");
            rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n");
            rewFW.write(IOUtils.LINE_SEPARATOR);

            LOGGER.info("Writing the output at: " + outFile.getAbsolutePath());

            // dump metrics for individual queries.
            rewFW.write(rep.getReport());

            rewFW.write(IOUtils.LINE_SEPARATOR);
            rewFW.write(IOUtils.LINE_SEPARATOR);

            rewFW.write("/// Rewritten Program ///\n");
            final Set<ILiteral> newHeads = new HashSet<ILiteral>();
            for (final IRule qr : rewriting) {
                newHeads.add(qr.getHead().iterator().next());
                rewFW.write(qr + "\n");
            }
            rewFW.write("\n");
            for (final ILiteral h : newHeads) {
                rewFW.write("?- " + h + ".\n");
            }
            rewFW.write("\n");
            rewFW.flush();
            rewFW.close();

            // dump summary metrics.
            sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
            timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
            cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics());
            memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics());
            sizeSummaryWriter.flush();
            timeSummaryWriter.flush();
            cacheSummaryWriter.flush();
            memorySummaryWriter.flush();

            if (sbox.size() > 0) {

                // Produce the rewriting according to the Storage Box
                final IQueryRewriter ndmRewriter = new NDMRewriter(sbox);
                // final Set<ILiteral> newHeads = new HashSet<ILiteral>();
                final Set<IRule> sboxRew = new LinkedHashSet<IRule>();
                for (final IRule r : rewriting) {
                    // Create a file to store the rewriting results as Datalog Rules
                    LOGGER.debug("-- Processing rewriting: " + r);
                    sboxRew.addAll(ndmRewriter.getRewriting(r));
                }

                // dump the rewritten sbox rewriting:
                final File sboxFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_sbox_rew.dtg"));
                final FileWriter sboxFW = new FileWriter(sboxFile);
                IOUtils.writeLines(sboxRew, IOUtils.LINE_SEPARATOR, sboxFW);
                sboxFW.close();

                // Produce a SQL rewriting
                final SQLRewriter sqlRewriter = new SQLRewriter(sboxRew);
                final String sqlRew = sqlRewriter.getUCQSQLRewriting("", 1000, 0);
                final File sqlFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.sql"));
                final FileWriter sqlFW = new FileWriter(sqlFile);
                IOUtils.write(sqlRew, sqlFW);
                sqlFW.close();

                // Execute the SQL rewriting
                LOGGER.info("Executing SQL Rewriting");

                long duration = System.nanoTime();
                final IRelation result = StorageManager.executeQuery(sqlRew);
                duration = (System.nanoTime() - duration) / 1000000;
                LOGGER.info(result.size() + " tuples in " + duration + " [ms]\n");
            }
        }
    }
    sizeSummaryWriter.close();
    timeSummaryWriter.close();
    cacheSummaryWriter.close();
    memorySummaryWriter.close();

}

From source file:TextureTest.java

protected TextureGeometryInfo createTextureCoordinates(String szFile) {
    // create a simple wrapper class to package our
    // return values
    TextureGeometryInfo texInfo = new TextureGeometryInfo();

    // allocate a temporary buffer to store the input file
    StringBuffer szBufferData = new StringBuffer();

    float sizeGeometryX = 0;
    float factorY = 1;
    int nNumPoints = 0;
    Point2f boundsPoint = new Point2f();

    try {// w w  w  .j  a  v a  2s  . c  om
        // attach a reader to the input file
        FileReader fileIn = new FileReader(szFile);

        int nChar = 0;

        // read the entire file into the StringBuffer
        while (true) {
            nChar = fileIn.read();

            // if we have not hit the end of file
            // add the character to the StringBuffer
            if (nChar != -1)
                szBufferData.append((char) nChar);
            else
                // EOF
                break;
        }

        // create a tokenizer to tokenize the input file at whitespace
        java.util.StringTokenizer tokenizer = new java.util.StringTokenizer(szBufferData.toString());

        // read the name of the texture image
        texInfo.m_szImage = tokenizer.nextToken();

        // read the size of the generated geometry in the X dimension
        sizeGeometryX = Float.parseFloat(tokenizer.nextToken());

        // read the Y scale factor
        factorY = Float.parseFloat(tokenizer.nextToken());

        // read the number of texture coordinates
        nNumPoints = Integer.parseInt(tokenizer.nextToken());

        // read each texture coordinate
        texInfo.m_TexCoordArray = new Point2f[nNumPoints];
        Point2f texPoint2f = null;

        for (int n = 0; n < nNumPoints; n++) {
            // JAVA 3D 1.2 change - the Y coordinates
            // have been flipped, so we have to subtract the Y coordinate
            // from 1
            texPoint2f = new Point2f(Float.parseFloat(tokenizer.nextToken()),
                    1.0f - Float.parseFloat(tokenizer.nextToken()));

            texInfo.m_TexCoordArray[n] = texPoint2f;

            // keep an eye on the extents of the texture coordinates
            // so we can automatically center the geometry
            if (n == 0 || texPoint2f.x > boundsPoint.x)
                boundsPoint.x = texPoint2f.x;

            if (n == 0 || texPoint2f.y > boundsPoint.y)
                boundsPoint.y = texPoint2f.y;
        }
    } catch (Exception e) {
        System.err.println(e.toString());
        return null;
    }

    // build the array of coordinates
    texInfo.m_CoordArray = new Point3f[nNumPoints];

    for (int n = 0; n < nNumPoints; n++) {
        // scale and center the geometry based on the texture coordinates
        texInfo.m_CoordArray[n] = new Point3f(
                sizeGeometryX * (texInfo.m_TexCoordArray[n].x - boundsPoint.x / 2),
                factorY * sizeGeometryX * (texInfo.m_TexCoordArray[n].y - boundsPoint.y / 2), 0);
    }

    return texInfo;
}

From source file:CSVWriter.java

public void testNestedQuotes() {
        String[] data = new String[] { "\"\"", "test" };
        String oracle = new String("\"\"\"\"\"\",\"test\"\n");

        CSVWriter writer = null;/* w  w w.j  a  va2  s  .  c o  m*/
        File tempFile = null;
        FileWriter fwriter = null;

        try {
            tempFile = File.createTempFile("csvWriterTest", ".csv");
            tempFile.deleteOnExit();
            fwriter = new FileWriter(tempFile);
            writer = new CSVWriter(fwriter);
        } catch (IOException e) {
            fail();
        }

        // write the test data:
        writer.writeNext(data);

        try {
            writer.close();
        } catch (IOException e) {
            fail();
        }

        try {
            // assert that the writer was also closed.
            fwriter.flush();
            fail();
        } catch (IOException e) {
            // we should go through here..
        }

        // read the data and compare.
        FileReader in = null;
        try {
            in = new FileReader(tempFile);
        } catch (FileNotFoundException e) {
            fail();
        }

        StringBuffer fileContents = new StringBuffer();
        try {
            int ch;
            while ((ch = in.read()) != -1) {
                fileContents.append((char) ch);
            }
            in.close();
        } catch (IOException e) {
            fail();
        }

        assertTrue(oracle.equals(fileContents.toString()));
    }

From source file:test.org.osuosl.srw.sort.SortToolTests.java

protected void setUp() throws Exception {

    //read the datafiles

    String[] urls = { "test/org/osuosl/srw/sort/TestData0.xml", "test/org/osuosl/srw/sort/TestData1.xml",
            "test/org/osuosl/srw/sort/TestData2.xml", "test/org/osuosl/srw/sort/TestData3.xml",
            "test/org/osuosl/srw/sort/TestData4.xml", "test/org/osuosl/srw/sort/TestData5.xml" };

    unsortedRecords = (String[]) Array.newInstance(String.class, urls.length);

    for (int i = 0; i < urls.length; i++) {
        FileReader in = null;
        try {/*from ww w .  j  a v  a2 s . c  o  m*/
            String urlString = urls[i];
            //  log.info("finding url for ["+urlString+"]");
            URL url = SortToolTests.class.getClassLoader().getResource(urlString);
            //  log.info("Getting file ["+url+"]");
            StringBuffer temp = new StringBuffer();
            File file = new File(url.toURI());
            in = new FileReader(file);
            int c;
            while ((c = in.read()) != -1) {
                temp.append((char) c);
            }
            unsortedRecords[i] = temp.toString();

        } finally {
            if (in != null) {
                in.close();
            }
        }
    }

    super.setUp();
}