List of usage examples for org.apache.commons.io FilenameUtils separatorsToSystem
public static String separatorsToSystem(String path)
From source file:com.pieframework.runtime.core.ResourceLoader.java
public static String locate(String url) { return FilenameUtils.separatorsToSystem( ModelStore.getCurrentModel().getModelDirectory() + File.separatorChar + "resources" + url); }
From source file:au.org.ala.delta.io.OutputFileManager.java
public void setOutputDirectory(String directory) throws Exception { _outputDirectory = FilenameUtils.separatorsToSystem(directory); }
From source file:au.org.ala.delta.io.OutputFileManager.java
protected String prependOutputDirectory(String fileName) { if (StringUtils.isEmpty(fileName)) { return ""; }/* w ww. j a v a2 s .c om*/ String outputFileName = FilenameUtils.separatorsToSystem(fileName); if (!outputFileName.contains(File.separator) && (_outputDirectory != null)) { outputFileName = FilenameUtils.concat(_outputDirectory, fileName); } return outputFileName; }
From source file:cs.ox.ac.uk.gsors.GroupPreferencesTest1.java
public void testFORewriting() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); final File testSuiteFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases1.txt"); final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile)); final String creationDate = dateFormat.format(new Date()); // Parse the program final Parser parser = new Parser(); parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg")); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); final Map<IPredicate, IRelation> conf = parser.getDirectives(); if (!conf.isEmpty()) { StorageManager.getInstance();//from w w w.ja v a 2s.c o m StorageManager.configure(conf); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads); final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor(); ruleProc.process(mSBox); final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox); final IRelationFactory rf = new RelationFactory(); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) { extracted(); } // compute the dependency graph LOGGER.debug("Computing position dependencies."); // long depGraphMem = MonitoringUtils.getHeapUsage(); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; // depGraphMem = depGraphMem - MonitoringUtils.getHeapUsage(); // Setup caching CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem; // rewriting constraints // long ncRewMem = MonitoringUtils.getHeapUsage(); final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage(); LOGGER.debug("Finished rewriting constraints."); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); Map<String, Integer> cities = new HashMap<String, Integer>(); // cities.put("Peoria", 109); // cities.put("Gilbert", 163); // cities.put("Glendale", 242); // cities.put("Chandler", 349); cities.put("Tempe", 465); //cities.put("Scottsdale", 780); // cities.put("Phoenix", 1683); List<Integer> ks = new ArrayList<Integer>(); ks.add(1); ks.add(2); ks.add(3); List<AggregateStrategy> str = new ArrayList<AggregateStrategy>(); str.add(AggregateStrategy.CSU); str.add(AggregateStrategy.Plurality); str.add(AggregateStrategy.PluralityMisery); for (AggregateStrategy strategyQA : str) { final String summaryPrefix = StringUtils.join(creationDate, "-", strategyQA.toString()); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); // final File cacheSummaryFile = FileUtils.getFile( // _WORKING_DIR, // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" // + strategyQA.toString()), // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), // StringUtils.join(summaryPrefix, "-", "cache-summary.csv")); // final CSVWriter cacheSummaryWriter = new CSVWriter(new // FileWriter( // cacheSummaryFile), ','); // // final File memorySummaryFile = FileUtils.getFile( // _WORKING_DIR, // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" // + strategyQA.toString()), // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), // StringUtils.join(summaryPrefix, "-", "memory-summary.csv")); // final CSVWriter memorySummaryWriter = new CSVWriter(new // FileWriter( // memorySummaryFile), ','); sizeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingTimeReportHeader()); // cacheSummaryWriter.writeNext(GReportingUtils // .getSummaryCachingReportHeader()); // memorySummaryWriter.writeNext(GReportingUtils // .getSummaryMemoryReportHeader()); for (Integer k : ks) { for (String city : cities.keySet()) { for (int con = 0; con < 1; con++) { LOGGER.info("con-city-k: " + con + "-" + city + "-" + k + "-" + strategyQA.toString()); // top k for each preferences for (final String testName : tests) { // Create a buffer for the output final IRelation result = rf.createRelation(); GPrefParameters parameters = new GPrefParameters(testName, k, city, cities.get(city)); // Create the Directory where to store the test // results // final File outTestDir = FileUtils // .getFile( // _WORKING_DIR, // FilenameUtils // .separatorsToSystem(_DEFAULT_OUTPUT_PATH // + "/" // + strategyQA // .toString() // + k + city), // testName); // if (!outTestDir.exists()) { // if (outTestDir.mkdirs()) { // LOGGER.info("Created output directory: " // + testName); // } else { // LOGGER.fatal("Error creating output directory"); // } // } LOGGER.info("Processing file: " + testName); // dump the rewritten constraints: IRule q = null; if (parameters.getScenario() == Scenario.BREAKFAST_FOOD || parameters.getScenario() == Scenario.LUNCH_FOOD || parameters.getScenario() == Scenario.DINNER_FOOD) { q = queries.get(0); } if (parameters.getScenario() == Scenario.BREAKFAST_CUSINE || parameters.getScenario() == Scenario.LUNCH_CUSINE || parameters.getScenario() == Scenario.DINNER_CUSINE) { q = queries.get(1); } if (parameters.getScenario() == Scenario.BREAKFAST_PLACE || parameters.getScenario() == Scenario.LUNCH_PLACE || parameters.getScenario() == Scenario.DINNER_PLACE) { q = queries.get(2); } CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final ToitReporter rep = ToitReporter.getInstance(true); ToitReporter.setupReporting(); ToitReporter.setQuery(queryPredicate); ToitReporter.setTest(testName); ToitReporter.setK(parameters.getK()); // GroupReporter.setStrategy(parameters.getStrategy()); ToitReporter.setCity(parameters.getCity()); ToitReporter.setGroupID(parameters.getGroupId()); ToitReporter.setNbUsers(parameters.getMaxNbUsers()); ToitReporter.setNbBuss(parameters.getBs()); ToitReporter.setScenario(parameters.getScenario()); rep.setValue(GRewMetric.DEPGRAPH_TIME, posDepTime); // rep.setValue(GRewMetric.DEPGRAPH_MEM, // depGraphMem); LOGGER.info("Processing query: ".concat(q.toString())); // final long rewMem = // MonitoringUtils.getHeapUsage(); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(GRewMetric.REW_TIME, System.currentTimeMillis() - overallTime); // rep.setValue(GRewMetric.REW_MEM, // MonitoringUtils.getHeapUsage() - rewMem); rep.setValue(GRewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(GRewMetric.REW_CNS_TIME, ncRewTime); // rep.setValue(GRewMetric.REW_CNS_MEM, ncRewMem); // Other metrics // Caching size metrics // Create a file to store the rewriting results. // File outFile = FileUtils.getFile(outTestDir, // queryPredicate.concat("_rew.dtg")); // final FileWriter rewFW = new FileWriter(outFile); // // rewFW.write("/// Query: " + q + "///\n"); // rewFW.write("/// Ontology: " + testName + "///"); // rewFW.write("/// Created on: " + creationDate // + " ///\n"); // rewFW.write("/// Rules in the program: " // + rules.size() + " ///\n"); // rewFW.write("/// TGDs in the program: " // + tgds.size() + " ///\n"); // LOGGER.info("Writing the output at: " // + outFile.getAbsolutePath()); // dump metrics for individual queries. // rewFW.write(rep.getReport()); // // rewFW.write(IOUtils.LINE_SEPARATOR); // rewFW.write(IOUtils.LINE_SEPARATOR); // // rewFW.write("/// Rewritten Program ///\n"); final Set<ILiteral> newHeads = new HashSet<ILiteral>(); Map<IPredicate, IRelation> results = new HashMap<IPredicate, IRelation>(); for (final IRule qr : rewriting) { newHeads.add(qr.getHead().iterator().next()); // rewFW.write(qr + "\n"); final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>(); Set<IRule> rrules = ndmRewriter.getRewriting(qr); sboxRewriting.addAll(rrules); // Produce the SQL rewriting for each query in // the // program final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting); // rewFW.write("Computing SQL Rewriting"); try { // Get the SQL rewriting as Union of // Conjunctive // Queries long duration = -System.nanoTime(); final List<String> ucqSQLRewriting = sqlRewriter.getSQLRewritings( parameters.getConstraintsSqlQuery(), parameters.getNbNodes(), parameters.getStartFromRes()); duration = ((duration + System.nanoTime()) / 1000000); IRelation resultAux = rf.createRelation(); for (final String qu : ucqSQLRewriting) { IRelation r = StorageManager.executeQuery(qu); // LOGGER.info("-Query: " + // qu+" "+r.size()+" "+c); resultAux.addAll(r); } for (IPredicate predicate : qr.getBodyPredicates()) { results.put(predicate, resultAux); } result.addAll(resultAux); // LOGGER.info("-R: " +result.size()); } catch (final SQLException e) { e.printStackTrace(); } } // write the result in the output // rewFW.write(result.toString()); // construct the graph Map<User, List<user.models.Pair<IPredicate, IPredicate>>> prefs = JsonHelper .getGPreferences(parameters.getPrefs(), tgds); final cs.ox.ac.uk.gsors2.GPreferencesGraph prefGraph = Factory.GPGRAPH .createPreferencesGraph(); long constPrefGraphTime = System.currentTimeMillis(); // final long constPrefGraphMem = MonitoringUtils // .getHeapUsage(); for (User user : prefs.keySet()) { for (user.models.Pair<IPredicate, IPredicate> pairPreference : prefs.get(user)) { IRelation morePrefs = results.get(pairPreference.getElement0()); IRelation lessPrefs = results.get(pairPreference.getElement1()); for (int j = 0; j < morePrefs.size(); j++) { ITuple el1 = morePrefs.get(j); if (!lessPrefs.contains(el1)) { for (int i = 0; i < lessPrefs.size(); i++) { ITuple el2 = lessPrefs.get(i); GPreferenceEdge edge = new GPreferenceEdge(el1, el2, user); prefGraph.addPreference(edge); } } } } } for (int i = 0; i < result.size(); i++) { ITuple v = result.get(i); prefGraph.addVertex(v); } // LOGGER.info("-----Size--Graph--: " + // result.size()+"--"+prefGraph.getVertexesSize() ); constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime; rep.setValue(GRewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize()); // rep.setValue(GRewMetric.PREFGRAPH_CONST_MEM, // MonitoringUtils.getHeapUsage() // - constPrefGraphMem); long mergeOperatorTime = System.currentTimeMillis(); // final long mergeProbModel = MonitoringUtils // .getHeapUsage(); // prefGraph // .mergeProbabilisticModel(_DEFAULT_INPUT_PATH+"reviews.txt"); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; // rep.setValue(GRewMetric.PREFGRAPH_MERGE_MEM, // MonitoringUtils.getHeapUsage() // - mergeProbModel); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_E, (long) prefGraph.getEdgesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_TIME, (long) mergeOperatorTime); // long topKMem = MonitoringUtils // .getHeapUsage(); long topKTime = System.currentTimeMillis(); IRelation r = GTopKAlgorithms.getTopK(prefGraph, parameters.getK(), strategyQA); topKTime = System.currentTimeMillis() - topKTime; // rep.setValue(GRewMetric.PREFGRAPH_TOPK_MEM, // topKMem-MonitoringUtils // .getHeapUsage()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_TIME, topKTime); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefGraph.getEdgesSize()); rep.setValue(GRewMetric.ANSWER_SIZE, (long) r.size()); // rewFW.write("\n"); // for (final ILiteral h : newHeads) { // rewFW.write("?- " + h + ".\n"); // } // rewFW.write("\n"); // rewFW.flush(); // rewFW.close(); // dump summary metrics. sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); // cacheSummaryWriter.writeNext(rep // .getSummaryCacheMetrics()); // memorySummaryWriter.writeNext(rep // .getSummaryMemoryMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); // cacheSummaryWriter.flush(); // memorySummaryWriter.flush(); } } } } sizeSummaryWriter.close(); timeSummaryWriter.close(); // cacheSummaryWriter.close(); // memorySummaryWriter.close(); } }
From source file:cs.ox.ac.uk.gsors.GroupPreferencesTestAux.java
public void testFORewriting() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); final File testSuiteFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases1.txt"); final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile)); final String creationDate = dateFormat.format(new Date()); // Parse the program final Parser parser = new Parser(); parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg")); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); final Map<IPredicate, IRelation> conf = parser.getDirectives(); if (!conf.isEmpty()) { StorageManager.getInstance();/* www . j a v a 2s . c o m*/ StorageManager.configure(conf); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads); final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor(); ruleProc.process(mSBox); final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox); final IRelationFactory rf = new RelationFactory(); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) { extracted(); } // compute the dependency graph LOGGER.debug("Computing position dependencies."); // long depGraphMem = MonitoringUtils.getHeapUsage(); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; // Setup caching CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem; // rewriting constraints // long ncRewMem = MonitoringUtils.getHeapUsage(); final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage(); LOGGER.debug("Finished rewriting constraints."); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); Map<String, Integer> cities = new HashMap<String, Integer>(); cities.put("Peoria", 142); cities.put("Gilbert", 216); cities.put("Glendale", 314); cities.put("Chandler", 466); // cities.put("Tempe", 648); // cities.put("Phoenix", 2351); List<Integer> ks = new ArrayList<Integer>(); ks.add(1); ks.add(2); ks.add(3); List<AggregateStrategy> str = new ArrayList<AggregateStrategy>(); str.add(AggregateStrategy.CSU); str.add(AggregateStrategy.Plurality); str.add(AggregateStrategy.PluralityMisery); for (AggregateStrategy strategyQA : str) { final String summaryPrefix = StringUtils.join(creationDate, "-", strategyQA.toString()); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "cache-summary.csv")); final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ','); final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "memory-summary.csv")); final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ','); sizeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingTimeReportHeader()); cacheSummaryWriter.writeNext(GReportingUtils.getSummaryCachingReportHeader()); memorySummaryWriter.writeNext(GReportingUtils.getSummaryMemoryReportHeader()); for (Integer k : ks) { for (String city : cities.keySet()) { for (int con = 0; con < 10; con++) { LOGGER.info("con-city-k: " + con + "-" + city + "-" + k + "-" + strategyQA.toString()); // top k for each preferences for (final String testName : tests) { // Create a buffer for the output final IRelation result = rf.createRelation(); GPrefParameters parameters = new GPrefParameters(testName, k, city, cities.get(city)); // Create the Directory where to store the test // results // final File outTestDir = FileUtils // .getFile( // _WORKING_DIR, // FilenameUtils // .separatorsToSystem(_DEFAULT_OUTPUT_PATH // + "/" // + strategyQA // .toString() // + k + city), // testName); // if (!outTestDir.exists()) { // if (outTestDir.mkdirs()) { // LOGGER.info("Created output directory: " // + testName); // } else { // LOGGER.fatal("Error creating output directory"); // } // } LOGGER.info("Processing file: " + testName); // dump the rewritten constraints: IRule q = null; if (parameters.getScenario() == Scenario.BREAKFAST_FOOD || parameters.getScenario() == Scenario.LUNCH_FOOD || parameters.getScenario() == Scenario.DINNER_FOOD) { q = queries.get(0); } if (parameters.getScenario() == Scenario.BREAKFAST_CUSINE || parameters.getScenario() == Scenario.LUNCH_CUSINE || parameters.getScenario() == Scenario.DINNER_CUSINE) { q = queries.get(1); } if (parameters.getScenario() == Scenario.BREAKFAST_PLACE || parameters.getScenario() == Scenario.LUNCH_PLACE || parameters.getScenario() == Scenario.DINNER_PLACE) { q = queries.get(2); } CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final ToitReporter rep = ToitReporter.getInstance(true); ToitReporter.setupReporting(); ToitReporter.setQuery(queryPredicate); ToitReporter.setTest(testName); ToitReporter.setK(parameters.getK()); //ToitReporter.setStrategy(parameters.getStrategy()); ToitReporter.setCity(parameters.getCity()); ToitReporter.setGroupID(parameters.getGroupId()); ToitReporter.setNbUsers(parameters.getMaxNbUsers()); ToitReporter.setNbBuss(parameters.getBs()); ToitReporter.setScenario(parameters.getScenario()); rep.setValue(GRewMetric.DEPGRAPH_TIME, posDepTime); LOGGER.info("Processing query: ".concat(q.toString())); // final long rewMem = // MonitoringUtils.getHeapUsage(); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(GRewMetric.REW_TIME, System.currentTimeMillis() - overallTime); // rep.setValue(RewMetric.REW_MEM, // MonitoringUtils.getHeapUsage() - rewMem); // rep.setValue(RewMetric.DEPGRAPH_MEM, // depGraphMem); rep.setValue(GRewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(GRewMetric.REW_CNS_TIME, ncRewTime); // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem); final Set<ILiteral> newHeads = new HashSet<ILiteral>(); Map<IPredicate, IRelation> results = new HashMap<IPredicate, IRelation>(); for (final IRule qr : rewriting) { newHeads.add(qr.getHead().iterator().next()); // rewFW.write(qr + "\n"); final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>(); Set<IRule> rrules = ndmRewriter.getRewriting(qr); sboxRewriting.addAll(rrules); // Produce the SQL rewriting for each query in // the // program final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting); // rewFW.write("Computing SQL Rewriting"); try { // Get the SQL rewriting as Union of // Conjunctive // Queries long duration = -System.nanoTime(); final List<String> ucqSQLRewriting = sqlRewriter.getSQLRewritings( parameters.getConstraintsSqlQuery(), parameters.getNbNodes(), parameters.getStartFromRes()); duration = ((duration + System.nanoTime()) / 1000000); IRelation resultAux = rf.createRelation(); for (final String qu : ucqSQLRewriting) { IRelation r = StorageManager.executeQuery(qu); // LOGGER.info("-Query: " + // qu+" "+r.size()+" "+c); resultAux.addAll(r); } for (IPredicate predicate : qr.getBodyPredicates()) { results.put(predicate, resultAux); } result.addAll(resultAux); // LOGGER.info("-R: " +result.size()); } catch (final SQLException e) { e.printStackTrace(); } } // write the result in the output // rewFW.write(result.toString()); // construct the graph Map<User, List<user.models.Pair<IPredicate, IPredicate>>> prefs = JsonHelper .getGPreferences(parameters.getPrefs(), tgds); final cs.ox.ac.uk.gsors2.GPreferencesGraph prefGraph = Factory.GPGRAPH .createPreferencesGraph(); long constPrefGraphTime = System.currentTimeMillis(); // final long constPrefGraphMem = // MonitoringUtils.getHeapUsage(); for (User user : prefs.keySet()) { for (user.models.Pair<IPredicate, IPredicate> pairPreference : prefs.get(user)) { IRelation morePrefs = results.get(pairPreference.getElement0()); IRelation lessPrefs = results.get(pairPreference.getElement1()); for (int j = 0; j < morePrefs.size(); j++) { ITuple el1 = morePrefs.get(j); if (!lessPrefs.contains(el1)) { for (int i = 0; i < lessPrefs.size(); i++) { ITuple el2 = lessPrefs.get(i); GPreferenceEdge edge = new GPreferenceEdge(el1, el2, user); prefGraph.addPreference(edge); } } } } } for (int i = 0; i < result.size(); i++) { ITuple v = result.get(i); prefGraph.addVertex(v); } // LOGGER.info("-----Size--Graph--: " + // result.size()+"--"+prefGraph.getVertexesSize() ); constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime; rep.setValue(GRewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize()); // rep.setValue(RewMetric.PREFGRAPH_CONST__MEM, // MonitoringUtils.getHeapUsage() - // constPrefGraphMem); long mergeOperatorTime = System.currentTimeMillis(); // prefGraph // .mergeProbabilisticModel("/home/onsa/Dropbox/VGOT/toit13/resources/data_final/reviews.txt"); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; rep.setValue(GRewMetric.PREFGRAPH_MERGE_TIME, mergeOperatorTime); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_E, (long) prefGraph.getEdgesSize()); long topKTime = System.currentTimeMillis(); // prefGraph.getTopK(parameters.getK(), // parameters.getStrategy()); topKTime = System.currentTimeMillis() - topKTime; rep.setValue(GRewMetric.PREFGRAPH_TOPK_TIME, topKTime); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefGraph.getEdgesSize()); // rewFW.write("\n"); // for (final ILiteral h : newHeads) { // rewFW.write("?- " + h + ".\n"); // } // rewFW.write("\n"); // rewFW.flush(); // rewFW.close(); // dump summary metrics. sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics()); memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); cacheSummaryWriter.flush(); memorySummaryWriter.flush(); } } } } sizeSummaryWriter.close(); timeSummaryWriter.close(); cacheSummaryWriter.close(); memorySummaryWriter.close(); } }
From source file:edu.ur.file.db.FileDatabaseTest.java
/** * Test creating an emtpy file in the file system. * @throws LocationAlreadyExistsException * @throws IllegalFileSystemNameException *//*ww w . j a v a 2 s . c o m*/ public void createEmptyFileTest() throws LocationAlreadyExistsException, IllegalFileSystemNameException { DefaultFileServer fs = new DefaultFileServer(); String databasePath = FilenameUtils.separatorsToSystem(properties.getProperty("FileDatabaseTest.db_path")); assert databasePath != null : "Path should not be null"; DefaultFileDatabase fileDatabaseImpl = fs.createFileDatabase("displayName", "dbName_1", databasePath, "dbDescription"); fileDatabaseImpl.setId(44l); TreeFolderInfo folder = fileDatabaseImpl.createRootFolder("rootFolder", "folderName"); TreeFolderInfo child = folder.createChild("newDbFolder", "newDbFolder"); // set the default file storage assert fileDatabaseImpl.setCurrentFileStore(child.getName()) : "the new default folder should be set"; // create the first file to store in the temporary folder String tempDirectory = properties.getProperty("file_db_temp_directory"); File directory = new File(tempDirectory); // helper to create the file FileUtil testUtil = new FileUtil(); testUtil.createDirectory(directory); FileInfo info = fileDatabaseImpl.addFile("anEmptyFileUniqueFileName"); assert fileDatabaseImpl.getFile(info.getName()) != null : "File should be found"; fs.deleteFileServer(); }
From source file:com.orient.lib.xbmc.addons.Addon.java
/** * Takes the ID of the addon and loads all properties from the corresponding * file structure./* w w w .j a va2 s .co m*/ * * @param id * @return */ private boolean loadPropsByAddonId(String id) { if (!Addon.exists(id)) return false; String path = Addon.getAddonXmlPath(id); if (path != null && !loadXMLDocument(path)) return false; AddonProps props = new AddonProps(); // Addon element // Settings settings = Settings.getInstance(); // File addonDir = settings.getAddonDir(); Element addonEl = document.getDocumentElement(); if (addonEl == null || !addonEl.getNodeName().equals("addon")) return false; props.path = FilenameUtils.separatorsToSystem(Settings.getInstance().getAddonDirPath() + "\\" + id); props.id = XMLUtils.getAttribute(addonEl, "id"); props.name = XMLUtils.getAttribute(addonEl, "name"); props.version = XMLUtils.getAttribute(addonEl, "version"); props.author = XMLUtils.getAttribute(addonEl, "provider-name"); // extension element Element extensionEl = XMLUtils.getFirstChildElement(addonEl, "extension"); while (extensionEl != null) { String point = XMLUtils.getAttribute(extensionEl, "point"); if (point.equals("xbmc.addon.metadata")) { Element licenseEl = XMLUtils.getFirstChildElement(extensionEl, "license"); if (licenseEl != null) props.license = licenseEl.getNodeValue(); // TODO add other meta data } else { props.libname = XMLUtils.getAttribute(extensionEl, "library"); props.type = getTranslateType(point); } extensionEl = XMLUtils.getNextSiblingElement(extensionEl, "extension"); } // dependencies element Element requiresEl = XMLUtils.getFirstChildElement(addonEl, "requires"); Element importEl = XMLUtils.getFirstChildElement(requiresEl, "import"); while (importEl != null) { String addon = XMLUtils.getAttribute(importEl, "addon"); if (addon != null) props.dependencies.add(addon); importEl = XMLUtils.getNextSiblingElement(importEl, "import"); } this.props = props; return true; }
From source file:com.thoughtworks.go.server.service.BackupServiceIntegrationTest.java
@Test public void shouldPerformConfigBackupForAllConfigFiles() throws Exception { try {//from ww w .j a v a 2 s . co m createConfigFile("foo", "foo_foo"); createConfigFile("bar", "bar_bar"); createConfigFile("baz", "hazar_bar"); createConfigFile("hello/world/file", "hello world!"); createConfigFile("some_dir/cruise-config.xml", "some-other-cruise-config"); createConfigFile("some_dir/cipher", "some-cipher"); ServerBackup backup = backupService.startBackup(admin); assertThat(backup.isSuccessful(), is(true)); assertThat(backup.getMessage(), is("Backup was generated successfully.")); File configZip = backedUpFile("config-dir.zip"); assertThat(fileContents(configZip, "foo"), is("foo_foo")); assertThat(fileContents(configZip, "bar"), is("bar_bar")); assertThat(fileContents(configZip, "baz"), is("hazar_bar")); assertThat(fileContents(configZip, FilenameUtils.separatorsToSystem("hello/world/file")), is("hello world!")); assertThat(fileContents(configZip, FilenameUtils.separatorsToSystem("some_dir/cruise-config.xml")), is("some-other-cruise-config")); assertThat(fileContents(configZip, FilenameUtils.separatorsToSystem("some_dir/cipher")), is("some-cipher")); assertThat(fileContents(configZip, "cruise-config.xml"), is(goConfigService.xml())); byte[] realDesCipher = new DESCipherProvider(systemEnvironment).getKey(); byte[] realAESCipher = new AESCipherProvider(systemEnvironment).getKey(); assertThat(fileContents(configZip, "cipher"), is(encodeHexString(realDesCipher))); assertThat(fileContents(configZip, "cipher.aes"), is(encodeHexString(realAESCipher))); } finally { deleteConfigFileIfExists("foo", "bar", "baz", "hello", "some_dir"); } }
From source file:beans.DeployManagerImpl.java
public WidgetInstance fork(ServerNode server, Widget widget) { File unzippedDir = Utils.downloadAndUnzip(widget.getRecipeURL(), widget.getApiKey()); File recipeDir = unzippedDir; if (widget.getRecipeRootPath() != null) { recipeDir = new File(unzippedDir, widget.getRecipeRootPath()); }//w ww . j a v a 2 s .co m logger.info("Deploying an instance for recipe at : [{}] ", recipeDir); Recipe.Type recipeType = new Recipe(recipeDir).getRecipeType(); if (alreadyInstalled(server, widget, recipeType)) { logger.info("[{}] [{}] is already installed", recipeType, widget.toInstallName()); WidgetInstance widgetInstance = widget.addWidgetInstance(server, recipeDir); String publicIp = getServicePublicIp(widgetInstance); if (!StringUtils.isEmpty(publicIp)) { logger.info("found service ip at [{}]", publicIp); widgetInstance.setServicePublicIp(publicIp); widgetInstance.save(); } server.createEvent(null, ServerNodeEvent.Type.DONE).save(); return widgetInstance; } else { logger.info("Deploying: [ServerIP={}] [recipe={}] [type={}]", new Object[] { server.getPublicIP(), recipeDir, recipeType.name() }); String recipePath = FilenameUtils.separatorsToSystem(recipeDir.getPath()); CommandLine cmdLine = new CommandLine(conf.cloudify.deployScript); cmdLine.addArgument(server.getPublicIP()); cmdLine.addArgument(recipePath.replaceAll("\\\\", "/")); // support for windows. cmdLine.addArgument(recipeType.commandParam); cmdLine.addArgument(widget.toInstallName()); execute(cmdLine, server); return widget.addWidgetInstance(server, recipeDir); } }
From source file:com.pieframework.repositories.PerforceArtifactRepository.java
public Map<String, String> downloadFileSpec(String artifactAddress, String localPath, Status status, Boolean forceUpdate) {/*from w w w .jav a 2 s.c om*/ Map<String, String> localFileList = new HashMap<String, String>(); this.connect(); try { // Create a fileSpec with the Path. List<IFileSpec> fsList = FileSpecBuilder.makeFileSpecList(artifactAddress); // Get the full list from the Server Depot List<IFileSpec> fileList = this.getServer().getDepotFiles(fsList, false); List<IFileSpec> validSyncList = FileSpecBuilder.getValidFileSpecs(fileList); List<IFileSpec> invalidSyncList = FileSpecBuilder.getInvalidFileSpecs(fileList); List<IFileSpec> syncList = this.getServer().getCurrentClient().sync(validSyncList, forceUpdate, false, false, false); if (invalidSyncList != null) { String errors = ""; for (IFileSpec fs : invalidSyncList) { errors += "error: " + artifactAddress + " cannot be synced beacuse it is in state:" + fs.getOpStatus() + ".Verify the file exists in p4."; } if (!errors.equalsIgnoreCase("")) { // status.addMessage("error",errors); } } if (syncList != null) { // status.addMessage("debug","p4 syncing "+artifactAddress); System.out.println("p4 syncing " + artifactAddress); for (IFileSpec fileSpec : syncList) { if (fileSpec != null) { if (fileSpec.getOpStatus() == FileSpecOpStatus.VALID) { localFileList.put(FilenameUtils.separatorsToSystem(fileSpec.getDepotPathString()), fileSpec.getClientPathString()); System.out.println("info: sync completed for " + fileSpec.getDepotPathString()); } else { CharSequence cs = "up-to-date."; if (!fileSpec.getStatusMessage().contains(cs) && status != null) { status.addMessage("error", "" // System.out.println("" + fileSpec.getOpStatus() + " sync failed for " + artifactAddress + " with error message " + fileSpec.getStatusMessage()); } else { String pathFromMessage = getPathFromStatusMessage(fileSpec.getStatusMessage()); localFileList.put(pathFromMessage, this.getServer().getCurrentClient().getRoot() + pathFromMessage); } } } } // status.addMessage("info",localFileList.size()+ // " files synced from p4 to local client."); } } catch (ConnectionException e1) { throw new RuntimeException(e1.getMessage(), e1); } catch (RequestException e1) { throw new RuntimeException(e1.getMessage(), e1); } catch (AccessException e1) { throw new RuntimeException(e1.getMessage(), e1); } this.disconnect(); return localFileList; }