List of usage examples for org.apache.commons.io IOUtils readLines
public static List readLines(Reader input) throws IOException
Reader
as a list of Strings, one entry per line. From source file:com.taobao.diamond.server.service.task.processor.RemoveConfigInfoTaskProcessor.java
@SuppressWarnings("unchecked") private String generateNewContent(String oldContent, String content) throws Exception { StringBuilder sb = new StringBuilder(); StringReader strReader = new StringReader(oldContent); try {/*from w ww . j a v a 2 s.c o m*/ List<String> lines = IOUtils.readLines(strReader); for (int i = 0; i < lines.size(); i++) { String line = lines.get(i); if (!line.contains(content)) { sb.append(line); if (i != lines.size() - 1) { sb.append(Constants.DIAMOND_LINE_SEPARATOR); } } } String result = sb.toString(); if (result.endsWith(Constants.DIAMOND_LINE_SEPARATOR)) { result = result.substring(0, result.lastIndexOf(Constants.DIAMOND_LINE_SEPARATOR)); } return result; } finally { strReader.close(); } }
From source file:com.alibaba.jstorm.utils.LinuxResource.java
public static Long getFreePhysicalMem() { if (!OSInfo.isLinux()) { return 0L; }/*from www . jav a 2s . com*/ try { List<String> lines = IOUtils.readLines(new FileInputStream(PROCFS_MEMINFO)); String free = lines.get(1).split("\\s+")[1]; return Long.valueOf(free); } catch (Exception ignored) { LOG.warn("failed to get total free memory."); } return 0L; }
From source file:cs.ox.ac.uk.gsors.GroupPreferencesTest1.java
public void testFORewriting() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); final File testSuiteFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases1.txt"); final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile)); final String creationDate = dateFormat.format(new Date()); // Parse the program final Parser parser = new Parser(); parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg")); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); final Map<IPredicate, IRelation> conf = parser.getDirectives(); if (!conf.isEmpty()) { StorageManager.getInstance();/* w w w .jav a 2 s .c o m*/ StorageManager.configure(conf); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads); final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor(); ruleProc.process(mSBox); final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox); final IRelationFactory rf = new RelationFactory(); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) { extracted(); } // compute the dependency graph LOGGER.debug("Computing position dependencies."); // long depGraphMem = MonitoringUtils.getHeapUsage(); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; // depGraphMem = depGraphMem - MonitoringUtils.getHeapUsage(); // Setup caching CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem; // rewriting constraints // long ncRewMem = MonitoringUtils.getHeapUsage(); final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage(); LOGGER.debug("Finished rewriting constraints."); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); Map<String, Integer> cities = new HashMap<String, Integer>(); // cities.put("Peoria", 109); // cities.put("Gilbert", 163); // cities.put("Glendale", 242); // cities.put("Chandler", 349); cities.put("Tempe", 465); //cities.put("Scottsdale", 780); // cities.put("Phoenix", 1683); List<Integer> ks = new ArrayList<Integer>(); ks.add(1); ks.add(2); ks.add(3); List<AggregateStrategy> str = new ArrayList<AggregateStrategy>(); str.add(AggregateStrategy.CSU); str.add(AggregateStrategy.Plurality); str.add(AggregateStrategy.PluralityMisery); for (AggregateStrategy strategyQA : str) { final String summaryPrefix = StringUtils.join(creationDate, "-", strategyQA.toString()); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); // final File cacheSummaryFile = FileUtils.getFile( // _WORKING_DIR, // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" // + strategyQA.toString()), // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), // StringUtils.join(summaryPrefix, "-", "cache-summary.csv")); // final CSVWriter cacheSummaryWriter = new CSVWriter(new // FileWriter( // cacheSummaryFile), ','); // // final File memorySummaryFile = FileUtils.getFile( // _WORKING_DIR, // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" // + strategyQA.toString()), // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), // StringUtils.join(summaryPrefix, "-", "memory-summary.csv")); // final CSVWriter memorySummaryWriter = new CSVWriter(new // FileWriter( // memorySummaryFile), ','); sizeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingTimeReportHeader()); // cacheSummaryWriter.writeNext(GReportingUtils // .getSummaryCachingReportHeader()); // memorySummaryWriter.writeNext(GReportingUtils // .getSummaryMemoryReportHeader()); for (Integer k : ks) { for (String city : cities.keySet()) { for (int con = 0; con < 1; con++) { LOGGER.info("con-city-k: " + con + "-" + city + "-" + k + "-" + strategyQA.toString()); // top k for each preferences for (final String testName : tests) { // Create a buffer for the output final IRelation result = rf.createRelation(); GPrefParameters parameters = new GPrefParameters(testName, k, city, cities.get(city)); // Create the Directory where to store the test // results // final File outTestDir = FileUtils // .getFile( // _WORKING_DIR, // FilenameUtils // .separatorsToSystem(_DEFAULT_OUTPUT_PATH // + "/" // + strategyQA // .toString() // + k + city), // testName); // if (!outTestDir.exists()) { // if (outTestDir.mkdirs()) { // LOGGER.info("Created output directory: " // + testName); // } else { // LOGGER.fatal("Error creating output directory"); // } // } LOGGER.info("Processing file: " + testName); // dump the rewritten constraints: IRule q = null; if (parameters.getScenario() == Scenario.BREAKFAST_FOOD || parameters.getScenario() == Scenario.LUNCH_FOOD || parameters.getScenario() == Scenario.DINNER_FOOD) { q = queries.get(0); } if (parameters.getScenario() == Scenario.BREAKFAST_CUSINE || parameters.getScenario() == Scenario.LUNCH_CUSINE || parameters.getScenario() == Scenario.DINNER_CUSINE) { q = queries.get(1); } if (parameters.getScenario() == Scenario.BREAKFAST_PLACE || parameters.getScenario() == Scenario.LUNCH_PLACE || parameters.getScenario() == Scenario.DINNER_PLACE) { q = queries.get(2); } CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final ToitReporter rep = ToitReporter.getInstance(true); ToitReporter.setupReporting(); ToitReporter.setQuery(queryPredicate); ToitReporter.setTest(testName); ToitReporter.setK(parameters.getK()); // GroupReporter.setStrategy(parameters.getStrategy()); ToitReporter.setCity(parameters.getCity()); ToitReporter.setGroupID(parameters.getGroupId()); ToitReporter.setNbUsers(parameters.getMaxNbUsers()); ToitReporter.setNbBuss(parameters.getBs()); ToitReporter.setScenario(parameters.getScenario()); rep.setValue(GRewMetric.DEPGRAPH_TIME, posDepTime); // rep.setValue(GRewMetric.DEPGRAPH_MEM, // depGraphMem); LOGGER.info("Processing query: ".concat(q.toString())); // final long rewMem = // MonitoringUtils.getHeapUsage(); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(GRewMetric.REW_TIME, System.currentTimeMillis() - overallTime); // rep.setValue(GRewMetric.REW_MEM, // MonitoringUtils.getHeapUsage() - rewMem); rep.setValue(GRewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(GRewMetric.REW_CNS_TIME, ncRewTime); // rep.setValue(GRewMetric.REW_CNS_MEM, ncRewMem); // Other metrics // Caching size metrics // Create a file to store the rewriting results. // File outFile = FileUtils.getFile(outTestDir, // queryPredicate.concat("_rew.dtg")); // final FileWriter rewFW = new FileWriter(outFile); // // rewFW.write("/// Query: " + q + "///\n"); // rewFW.write("/// Ontology: " + testName + "///"); // rewFW.write("/// Created on: " + creationDate // + " ///\n"); // rewFW.write("/// Rules in the program: " // + rules.size() + " ///\n"); // rewFW.write("/// TGDs in the program: " // + tgds.size() + " ///\n"); // LOGGER.info("Writing the output at: " // + outFile.getAbsolutePath()); // dump metrics for individual queries. // rewFW.write(rep.getReport()); // // rewFW.write(IOUtils.LINE_SEPARATOR); // rewFW.write(IOUtils.LINE_SEPARATOR); // // rewFW.write("/// Rewritten Program ///\n"); final Set<ILiteral> newHeads = new HashSet<ILiteral>(); Map<IPredicate, IRelation> results = new HashMap<IPredicate, IRelation>(); for (final IRule qr : rewriting) { newHeads.add(qr.getHead().iterator().next()); // rewFW.write(qr + "\n"); final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>(); Set<IRule> rrules = ndmRewriter.getRewriting(qr); sboxRewriting.addAll(rrules); // Produce the SQL rewriting for each query in // the // program final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting); // rewFW.write("Computing SQL Rewriting"); try { // Get the SQL rewriting as Union of // Conjunctive // Queries long duration = -System.nanoTime(); final List<String> ucqSQLRewriting = sqlRewriter.getSQLRewritings( parameters.getConstraintsSqlQuery(), parameters.getNbNodes(), parameters.getStartFromRes()); duration = ((duration + System.nanoTime()) / 1000000); IRelation resultAux = rf.createRelation(); for (final String qu : ucqSQLRewriting) { IRelation r = StorageManager.executeQuery(qu); // LOGGER.info("-Query: " + // qu+" "+r.size()+" "+c); resultAux.addAll(r); } for (IPredicate predicate : qr.getBodyPredicates()) { results.put(predicate, resultAux); } result.addAll(resultAux); // LOGGER.info("-R: " +result.size()); } catch (final SQLException e) { e.printStackTrace(); } } // write the result in the output // rewFW.write(result.toString()); // construct the graph Map<User, List<user.models.Pair<IPredicate, IPredicate>>> prefs = JsonHelper .getGPreferences(parameters.getPrefs(), tgds); final cs.ox.ac.uk.gsors2.GPreferencesGraph prefGraph = Factory.GPGRAPH .createPreferencesGraph(); long constPrefGraphTime = System.currentTimeMillis(); // final long constPrefGraphMem = MonitoringUtils // .getHeapUsage(); for (User user : prefs.keySet()) { for (user.models.Pair<IPredicate, IPredicate> pairPreference : prefs.get(user)) { IRelation morePrefs = results.get(pairPreference.getElement0()); IRelation lessPrefs = results.get(pairPreference.getElement1()); for (int j = 0; j < morePrefs.size(); j++) { ITuple el1 = morePrefs.get(j); if (!lessPrefs.contains(el1)) { for (int i = 0; i < lessPrefs.size(); i++) { ITuple el2 = lessPrefs.get(i); GPreferenceEdge edge = new GPreferenceEdge(el1, el2, user); prefGraph.addPreference(edge); } } } } } for (int i = 0; i < result.size(); i++) { ITuple v = result.get(i); prefGraph.addVertex(v); } // LOGGER.info("-----Size--Graph--: " + // result.size()+"--"+prefGraph.getVertexesSize() ); constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime; rep.setValue(GRewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize()); // rep.setValue(GRewMetric.PREFGRAPH_CONST_MEM, // MonitoringUtils.getHeapUsage() // - constPrefGraphMem); long mergeOperatorTime = System.currentTimeMillis(); // final long mergeProbModel = MonitoringUtils // .getHeapUsage(); // prefGraph // .mergeProbabilisticModel(_DEFAULT_INPUT_PATH+"reviews.txt"); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; // rep.setValue(GRewMetric.PREFGRAPH_MERGE_MEM, // MonitoringUtils.getHeapUsage() // - mergeProbModel); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_E, (long) prefGraph.getEdgesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_TIME, (long) mergeOperatorTime); // long topKMem = MonitoringUtils // .getHeapUsage(); long topKTime = System.currentTimeMillis(); IRelation r = GTopKAlgorithms.getTopK(prefGraph, parameters.getK(), strategyQA); topKTime = System.currentTimeMillis() - topKTime; // rep.setValue(GRewMetric.PREFGRAPH_TOPK_MEM, // topKMem-MonitoringUtils // .getHeapUsage()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_TIME, topKTime); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefGraph.getEdgesSize()); rep.setValue(GRewMetric.ANSWER_SIZE, (long) r.size()); // rewFW.write("\n"); // for (final ILiteral h : newHeads) { // rewFW.write("?- " + h + ".\n"); // } // rewFW.write("\n"); // rewFW.flush(); // rewFW.close(); // dump summary metrics. sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); // cacheSummaryWriter.writeNext(rep // .getSummaryCacheMetrics()); // memorySummaryWriter.writeNext(rep // .getSummaryMemoryMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); // cacheSummaryWriter.flush(); // memorySummaryWriter.flush(); } } } } sizeSummaryWriter.close(); timeSummaryWriter.close(); // cacheSummaryWriter.close(); // memorySummaryWriter.close(); } }
From source file:cs.ox.ac.uk.gsors.GroupPreferencesTestAux.java
public void testFORewriting() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); final File testSuiteFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases1.txt"); final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile)); final String creationDate = dateFormat.format(new Date()); // Parse the program final Parser parser = new Parser(); parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg")); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); final Map<IPredicate, IRelation> conf = parser.getDirectives(); if (!conf.isEmpty()) { StorageManager.getInstance();/* www . ja v a 2s .co m*/ StorageManager.configure(conf); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads); final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor(); ruleProc.process(mSBox); final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox); final IRelationFactory rf = new RelationFactory(); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) { extracted(); } // compute the dependency graph LOGGER.debug("Computing position dependencies."); // long depGraphMem = MonitoringUtils.getHeapUsage(); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; // Setup caching CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem; // rewriting constraints // long ncRewMem = MonitoringUtils.getHeapUsage(); final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage(); LOGGER.debug("Finished rewriting constraints."); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); Map<String, Integer> cities = new HashMap<String, Integer>(); cities.put("Peoria", 142); cities.put("Gilbert", 216); cities.put("Glendale", 314); cities.put("Chandler", 466); // cities.put("Tempe", 648); // cities.put("Phoenix", 2351); List<Integer> ks = new ArrayList<Integer>(); ks.add(1); ks.add(2); ks.add(3); List<AggregateStrategy> str = new ArrayList<AggregateStrategy>(); str.add(AggregateStrategy.CSU); str.add(AggregateStrategy.Plurality); str.add(AggregateStrategy.PluralityMisery); for (AggregateStrategy strategyQA : str) { final String summaryPrefix = StringUtils.join(creationDate, "-", strategyQA.toString()); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "cache-summary.csv")); final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ','); final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "memory-summary.csv")); final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ','); sizeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingTimeReportHeader()); cacheSummaryWriter.writeNext(GReportingUtils.getSummaryCachingReportHeader()); memorySummaryWriter.writeNext(GReportingUtils.getSummaryMemoryReportHeader()); for (Integer k : ks) { for (String city : cities.keySet()) { for (int con = 0; con < 10; con++) { LOGGER.info("con-city-k: " + con + "-" + city + "-" + k + "-" + strategyQA.toString()); // top k for each preferences for (final String testName : tests) { // Create a buffer for the output final IRelation result = rf.createRelation(); GPrefParameters parameters = new GPrefParameters(testName, k, city, cities.get(city)); // Create the Directory where to store the test // results // final File outTestDir = FileUtils // .getFile( // _WORKING_DIR, // FilenameUtils // .separatorsToSystem(_DEFAULT_OUTPUT_PATH // + "/" // + strategyQA // .toString() // + k + city), // testName); // if (!outTestDir.exists()) { // if (outTestDir.mkdirs()) { // LOGGER.info("Created output directory: " // + testName); // } else { // LOGGER.fatal("Error creating output directory"); // } // } LOGGER.info("Processing file: " + testName); // dump the rewritten constraints: IRule q = null; if (parameters.getScenario() == Scenario.BREAKFAST_FOOD || parameters.getScenario() == Scenario.LUNCH_FOOD || parameters.getScenario() == Scenario.DINNER_FOOD) { q = queries.get(0); } if (parameters.getScenario() == Scenario.BREAKFAST_CUSINE || parameters.getScenario() == Scenario.LUNCH_CUSINE || parameters.getScenario() == Scenario.DINNER_CUSINE) { q = queries.get(1); } if (parameters.getScenario() == Scenario.BREAKFAST_PLACE || parameters.getScenario() == Scenario.LUNCH_PLACE || parameters.getScenario() == Scenario.DINNER_PLACE) { q = queries.get(2); } CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final ToitReporter rep = ToitReporter.getInstance(true); ToitReporter.setupReporting(); ToitReporter.setQuery(queryPredicate); ToitReporter.setTest(testName); ToitReporter.setK(parameters.getK()); //ToitReporter.setStrategy(parameters.getStrategy()); ToitReporter.setCity(parameters.getCity()); ToitReporter.setGroupID(parameters.getGroupId()); ToitReporter.setNbUsers(parameters.getMaxNbUsers()); ToitReporter.setNbBuss(parameters.getBs()); ToitReporter.setScenario(parameters.getScenario()); rep.setValue(GRewMetric.DEPGRAPH_TIME, posDepTime); LOGGER.info("Processing query: ".concat(q.toString())); // final long rewMem = // MonitoringUtils.getHeapUsage(); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(GRewMetric.REW_TIME, System.currentTimeMillis() - overallTime); // rep.setValue(RewMetric.REW_MEM, // MonitoringUtils.getHeapUsage() - rewMem); // rep.setValue(RewMetric.DEPGRAPH_MEM, // depGraphMem); rep.setValue(GRewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(GRewMetric.REW_CNS_TIME, ncRewTime); // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem); final Set<ILiteral> newHeads = new HashSet<ILiteral>(); Map<IPredicate, IRelation> results = new HashMap<IPredicate, IRelation>(); for (final IRule qr : rewriting) { newHeads.add(qr.getHead().iterator().next()); // rewFW.write(qr + "\n"); final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>(); Set<IRule> rrules = ndmRewriter.getRewriting(qr); sboxRewriting.addAll(rrules); // Produce the SQL rewriting for each query in // the // program final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting); // rewFW.write("Computing SQL Rewriting"); try { // Get the SQL rewriting as Union of // Conjunctive // Queries long duration = -System.nanoTime(); final List<String> ucqSQLRewriting = sqlRewriter.getSQLRewritings( parameters.getConstraintsSqlQuery(), parameters.getNbNodes(), parameters.getStartFromRes()); duration = ((duration + System.nanoTime()) / 1000000); IRelation resultAux = rf.createRelation(); for (final String qu : ucqSQLRewriting) { IRelation r = StorageManager.executeQuery(qu); // LOGGER.info("-Query: " + // qu+" "+r.size()+" "+c); resultAux.addAll(r); } for (IPredicate predicate : qr.getBodyPredicates()) { results.put(predicate, resultAux); } result.addAll(resultAux); // LOGGER.info("-R: " +result.size()); } catch (final SQLException e) { e.printStackTrace(); } } // write the result in the output // rewFW.write(result.toString()); // construct the graph Map<User, List<user.models.Pair<IPredicate, IPredicate>>> prefs = JsonHelper .getGPreferences(parameters.getPrefs(), tgds); final cs.ox.ac.uk.gsors2.GPreferencesGraph prefGraph = Factory.GPGRAPH .createPreferencesGraph(); long constPrefGraphTime = System.currentTimeMillis(); // final long constPrefGraphMem = // MonitoringUtils.getHeapUsage(); for (User user : prefs.keySet()) { for (user.models.Pair<IPredicate, IPredicate> pairPreference : prefs.get(user)) { IRelation morePrefs = results.get(pairPreference.getElement0()); IRelation lessPrefs = results.get(pairPreference.getElement1()); for (int j = 0; j < morePrefs.size(); j++) { ITuple el1 = morePrefs.get(j); if (!lessPrefs.contains(el1)) { for (int i = 0; i < lessPrefs.size(); i++) { ITuple el2 = lessPrefs.get(i); GPreferenceEdge edge = new GPreferenceEdge(el1, el2, user); prefGraph.addPreference(edge); } } } } } for (int i = 0; i < result.size(); i++) { ITuple v = result.get(i); prefGraph.addVertex(v); } // LOGGER.info("-----Size--Graph--: " + // result.size()+"--"+prefGraph.getVertexesSize() ); constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime; rep.setValue(GRewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize()); // rep.setValue(RewMetric.PREFGRAPH_CONST__MEM, // MonitoringUtils.getHeapUsage() - // constPrefGraphMem); long mergeOperatorTime = System.currentTimeMillis(); // prefGraph // .mergeProbabilisticModel("/home/onsa/Dropbox/VGOT/toit13/resources/data_final/reviews.txt"); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; rep.setValue(GRewMetric.PREFGRAPH_MERGE_TIME, mergeOperatorTime); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_E, (long) prefGraph.getEdgesSize()); long topKTime = System.currentTimeMillis(); // prefGraph.getTopK(parameters.getK(), // parameters.getStrategy()); topKTime = System.currentTimeMillis() - topKTime; rep.setValue(GRewMetric.PREFGRAPH_TOPK_TIME, topKTime); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefGraph.getEdgesSize()); // rewFW.write("\n"); // for (final ILiteral h : newHeads) { // rewFW.write("?- " + h + ".\n"); // } // rewFW.write("\n"); // rewFW.flush(); // rewFW.close(); // dump summary metrics. sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics()); memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); cacheSummaryWriter.flush(); memorySummaryWriter.flush(); } } } } sizeSummaryWriter.close(); timeSummaryWriter.close(); cacheSummaryWriter.close(); memorySummaryWriter.close(); } }
From source file:eu.delving.sip.frames.AllFrames.java
private void createDefaultFrameArrangements(File file) throws IOException { List<String> lines = IOUtils.readLines(getClass().getResource("/frame-arrangements.xml").openStream()); IOUtils.writeLines(lines, "\n", new FileOutputStream(file)); }
From source file:com.nridge.connector.common.con_com.crawl.CrawlIgnore.java
/** * Parses a file identified by the path/file name parameter * and loads it into an internally managed ignore URI list. * * @param aPathFileName Absolute file name (e.g. 'crawl_ignore.txt'). * * @throws IOException I/O related exception. *//*from ww w. ja v a2 s .com*/ public void load(String aPathFileName) throws IOException { Pattern regexPattern; List<String> lineList; Logger appLogger = mAppMgr.getLogger(this, "load"); appLogger.trace(mAppMgr.LOGMSG_TRACE_ENTER); try (FileReader fileReader = new FileReader(aPathFileName)) { lineList = IOUtils.readLines(fileReader); } for (String patternString : lineList) { if (!StringUtils.startsWith(patternString, "#")) { regexPattern = Pattern.compile(patternString); mPatternList.add(regexPattern); } } appLogger.trace(mAppMgr.LOGMSG_TRACE_DEPART); }
From source file:com.intuit.tank.project.DataFileBrowser.java
/** * @return/*w w w. j a v a 2 s . c om*/ */ @SuppressWarnings("unchecked") private List<String> getCurrentEntries() { if (currentEntries == null) { currentEntries = new ArrayList<String>(); if (viewDatafile != null) { FileData fd = DataFileUtil.getFileData(viewDatafile); try { FileStorage fileStorage = FileStorageFactory .getFileStorage(new TankConfig().getDataFileStorageDir(), false); currentEntries = IOUtils.readLines(fileStorage.readFileData(fd)); } catch (IOException e) { LOG.error("Error reading file " + fd.toString() + ": " + e, e); currentEntries.add("Error reading dataFile: " + e.toString()); } } else { currentEntries.add("current Data File Not set."); } } return currentEntries; }
From source file:com.streamsets.pipeline.stage.executor.s3.TestAmazonS3Executor.java
@Test public void testCopyObject() throws Exception { String newName = UUID.randomUUID().toString(); AmazonS3ExecutorConfig config = getConfig(); config.taskConfig.taskType = TaskType.COPY_OBJECT; config.taskConfig.copyTargetLocation = newName; AmazonS3Executor executor = new AmazonS3Executor(config); TargetRunner runner = new TargetRunner.Builder(AmazonS3DExecutor.class, executor).build(); runner.runInit();/* ww w . j a va 2 s .c o m*/ try { s3client.putObject(new PutObjectRequest(BUCKET_NAME, objectName, IOUtils.toInputStream("content"), new ObjectMetadata())); runner.runWrite(ImmutableList.of(getTestRecord())); S3Object object = s3client.getObject(BUCKET_NAME, newName); S3ObjectInputStream objectContent = object.getObjectContent(); List<String> stringList = IOUtils.readLines(objectContent); Assert.assertEquals(1, stringList.size()); Assert.assertEquals("content", stringList.get(0)); Assert.assertTrue(s3client.doesObjectExist(BUCKET_NAME, objectName)); Assert.assertEquals(1, runner.getEventRecords().size()); assertEvent(runner.getEventRecords().get(0), newName); } finally { runner.runDestroy(); } }
From source file:de.tudarmstadt.ukp.dkpro.tc.svmhmm.writer.SVMHMMDataWriterTest.java
@Test public void testDoubleFeatures() throws Exception { featureStore = new SparseFeatureStore(); featureStore.addInstance(new Instance(Arrays.asList(new Feature("doubleFeature", 0.123456789)))); SVMHMMDataWriter svmhmmDataWriter = new SVMHMMDataWriter(); System.out.println(featureStore.getNumberOfInstances()); svmhmmDataWriter.write(temporaryFolder.getRoot(), featureStore, false, null, false); List<String> lines = IOUtils .readLines(new FileInputStream(new File(temporaryFolder.getRoot(), "feature-vectors.txt"))); System.out.println(lines);/*from w ww. j a va2 s. c om*/ // each instance must be on one line! assertEquals(1, lines.size()); assertTrue(lines.get(0).contains(" 1:0.12345679 ")); }
From source file:com.adaptris.core.services.splitter.LineCountSplitterTest.java
public void testSplitMessageWithHeader1() throws Exception { LineCountSplitter s = new LineCountSplitter(); s.setKeepHeaderLines(1);/*from w w w . j a va 2 s . c o m*/ s.setSplitOnLine(1); s.setIgnoreBlankLines(true); final String HEADER_TEXT = "HEADER LINE 1"; List<AdaptrisMessage> result = toList( s.splitMessage(createLineCountMessageInputWithHeader(new String[] { HEADER_TEXT }))); assertEquals("50 split messages", 50, result.size()); for (AdaptrisMessage m : result) { List<String> lines = IOUtils.readLines(m.getReader()); assertEquals("2 lines per message", 2, lines.size()); assertEquals("Must be header line", HEADER_TEXT, lines.get(0)); assertEquals("Must be regular line", LINE, lines.get(1)); } }