List of usage examples for org.apache.commons.configuration Configuration getInt
int getInt(String key);
From source file:org.zaproxy.zap.extension.authorization.ExtensionAuthorization.java
@Override public void importContextData(Context ctx, Configuration config) throws ConfigurationException { int type = config.getInt(AuthorizationDetectionMethod.CONTEXT_CONFIG_AUTH_TYPE); switch (type) { case BasicAuthorizationDetectionMethod.METHOD_UNIQUE_ID: ctx.setAuthorizationDetectionMethod(new BasicAuthorizationDetectionMethod(config)); break;/*w w w. j a v a 2 s. c o m*/ } }
From source file:org.zaproxy.zap.extension.forceduser.ExtensionForcedUser.java
@Override public void importContextData(Context ctx, Configuration config) { int id = config.getInt("context.forceduser"); if (id >= 0) { this.setForcedUser(ctx.getIndex(), id); }//from w w w. j a v a 2 s. com }
From source file:org.zaproxy.zap.extension.sessions.ExtensionSessionManagement.java
@Override public void importContextData(Context ctx, Configuration config) throws ConfigurationException { SessionManagementMethodType t = getSessionManagementMethodTypeForIdentifier( config.getInt(CONTEXT_CONFIG_SESSION_TYPE)); if (t != null) { SessionManagementMethod method = t.createSessionManagementMethod(ctx.getIndex()); t.importData(config, method);/* www . ja v a2 s .co m*/ ctx.setSessionManagementMethod(method); } }
From source file:playground.michalm.jtrrouter.JTRRouter.java
protected void initTurn(HierarchicalConfiguration nodeCfg) { int id = nodeCfg.getInt("[@id]"); int prev = nodeCfg.getInt("[@prev]"); int length = nodeCfg.getMaxIndex("next") + 1; int[] nodes = new int[length]; double[] probs = new double[length]; for (int i = 0; i < length; i++) { Configuration nextCfg = nodeCfg.subset("next(" + i + ')'); nodes[i] = nextCfg.getInt("[@node]"); probs[i] = nextCfg.getDouble("[@probability]"); }/* ww w . ja v a 2 s .c om*/ turns[prev][id] = new Turn(id, prev, nodes, probs); }
From source file:playground.michalm.jtrrouter.transims.TransimsJTRRouter.java
protected void initFlow(HierarchicalConfiguration flowCfg) { int node = flowCfg.getInt("[@node]"); int next = flowCfg.getInt("[@next]"); int in = flowCfg.getInt("[@inParking]", -1); int out = flowCfg.getInt("[@outParking]", -1); int length = flowCfg.getMaxIndex("vehicle") + 1; int[] types = new int[length]; int[] subTypes = new int[length]; int[] nos = new int[length]; for (int i = 0; i < length; i++) { Configuration vehCfg = flowCfg.subset("vehicle(" + i + ')'); types[i] = vehCfg.getInt("[@type]"); subTypes[i] = vehCfg.getInt("[@subtype]"); nos[i] = vehCfg.getInt("[@no]"); }//w w w . j a v a 2 s .c om flows[node] = new TransimsFlow(node, in, out, next, types, subTypes, nos); }
From source file:se.liu.imt.SCT.SCTPostCoordTest.java
/** * @param args/*from w w w . jav a 2s. c o m*/ * @throws ClassNotFoundException * @throws IllegalAccessException * @throws InstantiationException * @throws ConfigurationException * @throws IOException * @throws OWLException */ public static void main(String[] args) throws InstantiationException, IllegalAccessException, ClassNotFoundException, IOException, ConfigurationException, OWLException { PropertyConfigurator.configure("log4j.properties"); Configuration config = null; try { String configFileName = args[0]; config = new XMLConfiguration(configFileName); } catch (ArrayIndexOutOfBoundsException e) { System.err.println("Requires configuration file argument"); System.exit(-1); } final long maxTime = 1000 * 60 * 60 * config.getInt("test_parameters.max_time"); log.debug("Starting test SNOMED CT post-coordination..."); // creating ontology manager and loading of SNOMED CT stated form OWL // file OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLDataFactory dataFactory = manager.getOWLDataFactory(); final String snomedFileName = config.getString("snomed.OWL_file"); log.debug("Loading ontology file: " + snomedFileName); OWLOntology ontology = manager.loadOntologyFromOntologyDocument(new File(snomedFileName)); log.debug("Loaded " + ontology.getOntologyID()); // create the reasoner final String classifierName = config.getString("classifier.name"); log.debug("Classifier name: " + classifierName); OWLReasonerFactory reasonerFactory = null; OWLReasoner reasoner = null; final String reasonerFactoryClassName = config.getString("classifier.reasoner_factory"); log.debug("Reasoner factory class: " + reasonerFactoryClassName); if (classifierName.equalsIgnoreCase("hermit")) { reasonerFactory = new org.semanticweb.HermiT.Reasoner.ReasonerFactory(); } else reasonerFactory = (OWLReasonerFactory) Class.forName(reasonerFactoryClassName).newInstance(); reasoner = reasonerFactory.createReasoner(ontology); log.debug("Created reasoner"); // SNOMED CT IRI string final String SNOMED_IRI = "http://snomed.info/id/"; // create input file reader BufferedReader in = null; FileWriter out = null; try { final String inputFileName = config.getString("test_parameters.input"); log.debug("Input file name: " + inputFileName); in = new BufferedReader(new FileReader(inputFileName)); // read past header line in.readLine(); // create output file final String outputFileName = config.getString("output.file_name_tag"); out = new FileWriter(outputFileName + "_" + classifierName + "_" + (new Date()).toString()); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // get current size in terms of class axioms int currentSize = ontology.getAxiomCount(AxiomType.SUBCLASS_OF) + ontology.getAxiomCount(AxiomType.EQUIVALENT_CLASSES); final int iterations = config.getInt("test_parameters.iterations"); final int jumpSize = config.getInt("test_parameters.jump_size"); final int tries = config.getInt("test_parameters.tries"); log.debug("Iterations: " + iterations + ", Jump size: " + jumpSize + ", Tries: " + tries); long startTime = System.currentTimeMillis(); // outer loop for iterations for (int i = 0; i <= iterations; i++) { // break if 24 hours has passed if (System.currentTimeMillis() - startTime > maxTime) { log.debug("Ending because time limit has been reached"); break; } log.info("Current size: " + currentSize); long minTime = Long.MAX_VALUE; for (int j = 0; j < tries; j++) { long t1 = System.currentTimeMillis(); // Ask the reasoner to do all the necessary work now log.debug("Start classifying..."); reasoner.precomputeInferences(org.semanticweb.owlapi.reasoner.InferenceType.CLASS_HIERARCHY); reasoner.flush(); // Do special things for special reasoners if (classifierName.equalsIgnoreCase("elk")) { // nothing special } else if (classifierName.equalsIgnoreCase("snorocket")) { //SnorocketOWLReasoner r = (SnorocketOWLReasoner) reasoner; //r.synchronizeSnorocket(); } else if (classifierName.equalsIgnoreCase("fact++")) { // nothing special } else if (classifierName.equalsIgnoreCase("hermit")) { // nothing special } log.debug("Finished classifying"); long time = System.currentTimeMillis() - t1; if (time < minTime) minTime = time; log.debug("Finished try: " + (j + 1) + ", Time; " + time); } log.debug("Finished classifying, Time: " + minTime); out.write("" + currentSize + "\t" + minTime + "\n"); out.flush(); log.debug("Adding stuff..."); if (i < iterations) // add post-coordinated expressions for (int j = 0; j < jumpSize; j++) { String line = in.readLine(); if (line == null) break; String[] comp = line.split("\t"); OWLClass new_pc_concept = dataFactory.getOWLClass(IRI.create("exp" + (i * jumpSize) + j)); String baseConcept = comp[0]; String bodyStructure = comp[1]; String morphology = comp[2]; OWLClass baseConceptClass = dataFactory.getOWLClass(IRI.create(SNOMED_IRI + baseConcept)); OWLClass bodyStructureClass = dataFactory.getOWLClass(IRI.create(SNOMED_IRI + bodyStructure)); OWLClass morphologyClass = dataFactory.getOWLClass(IRI.create(SNOMED_IRI + morphology)); OWLObjectProperty roleGroupProp = dataFactory .getOWLObjectProperty(IRI.create(SNOMED_IRI + "609096000")); OWLObjectProperty findingSiteProp = dataFactory .getOWLObjectProperty(IRI.create(SNOMED_IRI + "363698007")); OWLObjectProperty morphologyProp = dataFactory .getOWLObjectProperty(IRI.create(SNOMED_IRI + "116676008")); Set<OWLClassExpression> conceptSet = new HashSet<OWLClassExpression>(); conceptSet.add(dataFactory.getOWLObjectSomeValuesFrom(findingSiteProp, bodyStructureClass)); conceptSet.add(dataFactory.getOWLObjectSomeValuesFrom(morphologyProp, morphologyClass)); OWLClassExpression expr = dataFactory.getOWLObjectIntersectionOf(baseConceptClass, dataFactory.getOWLObjectSomeValuesFrom(roleGroupProp, dataFactory.getOWLObjectIntersectionOf(conceptSet))); manager.applyChange( new AddAxiom(ontology, dataFactory.getOWLEquivalentClassesAxiom(new_pc_concept, expr))); } currentSize += jumpSize; } out.close(); log.debug("Finished test hopefully successfully"); }
From source file:uk.ac.ebi.fg.jobs.JobController.java
/** * Initializes data map for jobs and other objects * * @param similarityComponent/*w w w. j a va 2s. com*/ * @param saxonEngine * @param configuration * @param experiments * @param xp * @param jobsController * @param lowPriorityOntologyURIs * @throws Exception */ public void init(ISimilarityComponent similarityComponent, IXPathEngine saxonEngine, Configuration configuration, List experiments, XPath xp, IJobsController jobsController, SortedSet<String> lowPriorityOntologyURIs) throws Exception { StaticSimilarityComponent.setComponent(similarityComponent); StaticJobController.setJobController(jobsController); this.jobsController = jobsController; Configuration properties = loadProperties(configuration); OntologyDistanceCalculator distanceCalculator = getOntologyDistanceCalculator(EFO.getEfo(), properties.getInt("max_ontology_distance"), properties.getString("persistence-location-distances")); dataMap.put("experiments", experiments); dataMap.put("experimentXPath", xp); dataMap.put("ontologyResults", ontologyResults); dataMap.put("properties", properties); dataMap.put("saxonEngine", saxonEngine); dataMap.put("pubMedRetriever", new PubMedRetriever(saxonEngine)); dataMap.put("distanceCalculator", distanceCalculator); dataMap.put("expToPubMedIdMap", expToPubMedIdMap); dataMap.put("pubMedIdRelationMap", pubMedIdRelationMap); dataMap.put("pubMedResults", pubMedResults); dataMap.put("expToURIMap", expToURIMap); dataMap.put("lowPriorityOntologyURIs", lowPriorityOntologyURIs); dataMap.put("jobsController", jobsController); dataMap.put("pubMedNewIds", new TreeSet<String>()); }
From source file:uk.ac.ebi.fg.jobs.OntologySimilarityJob.java
public void doExecute(JobExecutionContext jobExecutionContext) throws JobExecutionException, InterruptedException { JobDataMap dataMap = jobExecutionContext.getJobDetail().getJobDataMap(); Map<ExperimentId, SortedSet<EfoTerm>> smallMap = (Map<ExperimentId, SortedSet<EfoTerm>>) dataMap .get("smallMap"); OntologyDistanceCalculator distanceCalculator = (OntologyDistanceCalculator) dataMap .get("distanceCalculator"); Map<String, SortedSet<ExperimentId>> uriToExpMap = (ConcurrentHashMap<String, SortedSet<ExperimentId>>) dataMap .get("uriToExpMap"); Map<ExperimentId, SortedSet<EfoTerm>> expToURIMap = (ConcurrentHashMap<ExperimentId, SortedSet<EfoTerm>>) dataMap .get("expToURIMap"); Map<ExperimentId, SortedSet<ExperimentId>> ontologyResults = (ConcurrentHashMap<ExperimentId, SortedSet<ExperimentId>>) dataMap .get("ontologyResults"); lowPriorityURIs = (SortedSet<String>) dataMap.get("lowPriorityOntologyURIs"); int counter = (Integer) dataMap.get("counter"); Configuration properties = (Configuration) dataMap.get("properties"); final int maxOWLSimilarityCount = properties.getInt("max_displayed_OWL_similarities"); final int smallExpAssayCountLimit = properties.getInt("small_experiment_assay_count_limit"); final float minCalculatedOntologyDistance = properties.getFloat("minimal_calculated_ontology_distance"); logger.info("Started " + (counter - smallMap.size()) + " - " + counter + " ontology similarity jobs"); for (Map.Entry<ExperimentId, SortedSet<EfoTerm>> entry : smallMap.entrySet()) { ExperimentId experiment = entry.getKey(); SortedSet<ExperimentId> resultExpSimilaritySet = new TreeSet<ExperimentId>(); for (EfoTerm efoTerm : entry.getValue()) { Set<OntologySimilarityResult> similars = distanceCalculator.getSimilarNodes(efoTerm.getUri()); if (null != similars) { for (OntologySimilarityResult ontologySimilarityResult : similars) { int distance = ontologySimilarityResult.getDistance(); SortedSet<ExperimentId> similarExperiments = uriToExpMap .get(ontologySimilarityResult.getURI()); if (similarExperiments != null) { for (ExperimentId exp : similarExperiments) { if (experiment.getSpecies().equals(exp.getSpecies()) && !experiment.equals(exp)) { if (resultExpSimilaritySet.contains(exp)) { ExperimentId expClone = resultExpSimilaritySet.tailSet(exp).first().clone(); resultExpSimilaritySet.remove(exp); resultExpSimilaritySet.add( setDistance(expClone, ontologySimilarityResult.getURI(), distance)); } else { ExperimentId expClone = exp.clone(); resultExpSimilaritySet.add( setDistance(expClone, ontologySimilarityResult.getURI(), distance)); }/*w w w . j av a 2s. c om*/ } } } } } } // store information for maximal score calculation ExperimentId experimentClone = experiment.clone(); for (EfoTerm efoTerm : expToURIMap.get(experimentClone)) { if (lowPriorityURIs.contains(efoTerm.getUri())) experimentClone.setLowPriorityMatchCount(experimentClone.getLowPriorityMatchCount() + 1); else experimentClone.setDist0Count(experimentClone.getDist0Count() + 1); experimentClone.setNumbOfMatches(experimentClone.getNumbOfMatches() + 1); } ontologyResults.put(experimentClone, cleanResults(experimentClone, resultExpSimilaritySet, smallExpAssayCountLimit, maxOWLSimilarityCount, minCalculatedOntologyDistance, expToURIMap)); Thread.currentThread().wait(1); } logger.info("Finished " + (counter - smallMap.size()) + " - " + counter + " ontology similarity jobs"); smallMap.clear(); }
From source file:uk.ac.ebi.fg.jobs.OntologySimilarityWrapperJob.java
public void doExecute(JobExecutionContext jobExecutionContext) throws InterruptedException, SchedulerException { // get data from context JobDataMap dataMap = jobExecutionContext.getJobDetail().getJobDataMap(); IJobsController jobsController = (IJobsController) dataMap.get("jobsController"); OntologyDistanceCalculator distanceCalculator = (OntologyDistanceCalculator) dataMap .get("distanceCalculator"); Map<ExperimentId, SortedSet<ExperimentId>> ontologyResults = (ConcurrentHashMap<ExperimentId, SortedSet<ExperimentId>>) dataMap .get("ontologyResults"); Map<ExperimentId, SortedSet<EfoTerm>> expToURIMap = (ConcurrentHashMap<ExperimentId, SortedSet<EfoTerm>>) dataMap .get("expToURIMap"); SortedSet<String> lowPriorityURIs = (SortedSet<String>) dataMap.get("lowPriorityOntologyURIs"); Configuration properties = (Configuration) dataMap.get("properties"); String jobGroup = properties.getString("quartz_job_group_name"); int threadLimit = (properties.getInt("concurrent_job_limit") > 1) ? properties.getInt("concurrent_job_limit") - 1 : 1; // leave 1 thread for pubmed calculations Map<ExperimentId, SortedSet<EfoTerm>> smallMap = new HashMap<ExperimentId, SortedSet<EfoTerm>>(); int counter = 0; int separateAt = expToURIMap.size() / threadLimit + expToURIMap.size() % threadLimit; Map<String, SortedSet<ExperimentId>> uriToExpMap = reverseMap(expToURIMap); logger.info("Ontology jobs started"); for (Map.Entry<ExperimentId, SortedSet<EfoTerm>> entry : expToURIMap.entrySet()) { smallMap.put(entry.getKey(), entry.getValue()); ++counter;//from w ww . j a v a 2 s .co m if (counter % separateAt == 0 || counter == expToURIMap.size()) { JobDetail ontologyJobDetail = newJob(OntologySimilarityJob.class) .withIdentity("ontologySimilarityJob" + counter, jobGroup).storeDurably(false) .requestRecovery(false).build(); ontologyJobDetail.getJobDataMap().put("smallMap", new HashMap<ExperimentId, SortedSet<EfoTerm>>(smallMap)); ontologyJobDetail.getJobDataMap().put("distanceCalculator", distanceCalculator); ontologyJobDetail.getJobDataMap().put("uriToExpMap", uriToExpMap); ontologyJobDetail.getJobDataMap().put("expToURIMap", expToURIMap); ontologyJobDetail.getJobDataMap().put("ontologyResults", ontologyResults); ontologyJobDetail.getJobDataMap().put("lowPriorityOntologyURIs", lowPriorityURIs); ontologyJobDetail.getJobDataMap().put("counter", new Integer(counter)); ontologyJobDetail.getJobDataMap().put("properties", properties); jobsController.addJob("ontologySimilarityJob" + counter, OntologySimilarityJob.class, ontologyJobDetail); jobsController.executeJob("ontologySimilarityJob" + counter, jobGroup); // clear map smallMap.clear(); separateAt = expToURIMap.size() / threadLimit; } } }
From source file:uk.ac.ebi.fg.jobs.PubMedDataMinerJob.java
public void doExecute(JobExecutionContext jobExecutionContext) throws JobExecutionException, InterruptedException { JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap(); Set<String> pubMedNewIds = (Set<String>) dataMap.get("pubMedNewIds"); ConcurrentHashMap<String, SortedSet<PubMedId>> pubMedIdRelationMap = (ConcurrentHashMap<String, SortedSet<PubMedId>>) dataMap .get("pubMedIdRelationMap"); Configuration properties = (Configuration) dataMap.get("properties"); AtomicInteger pubMedCounter = (AtomicInteger) dataMap.get("pubMedCounter"); PubMedRetriever pubMedRetriever = (PubMedRetriever) dataMap.get("pubMedRetriever"); String entry = (String) dataMap.get("entry"); String pubMedURL = properties.getString("pub_med_url"); int maxPubMedDist = properties.getInt("max_pubmed_distance"); SortedSet<PubMedId> similarPublications = new TreeSet<PubMedId>(); // add publication with distance 0 similarPublications.add(new PubMedId(entry, 0)); // get similar publications (distance 1) if (maxPubMedDist >= 1) similarPublications.addAll(getPubMedIdSet(pubMedRetriever.getSimilars(pubMedURL, entry), 1)); // get publications with distance 2 if (null != similarPublications && maxPubMedDist == 2) { SortedSet<PubMedId> iterationSet = new TreeSet<PubMedId>(similarPublications); for (PubMedId publication : iterationSet) similarPublications.addAll(/* ww w .j av a2 s.c o m*/ getPubMedIdSet(pubMedRetriever.getSimilars(pubMedURL, publication.getPublicationId()), 2)); } if (!similarPublications.isEmpty()) pubMedIdRelationMap.putIfAbsent(entry, similarPublications); // delay job to run for 1 second Thread.currentThread().wait(1000); logger.debug("Finished " + pubMedCounter.incrementAndGet() + " of " + pubMedNewIds.size() + " PubMedDataMinerJobs"); }