List of usage examples for org.apache.commons.digester3 Digester parse
public <T> T parse(URL url) throws IOException, SAXException
From source file:eu.scape_project.planning.xml.PlanMigrator.java
/** * Detect the version of the given XML representation of plans. If the * version of the XML representation is not up to date, necessary * transformations are applied.//from w w w . j a v a 2s. co m * * @param importData * @return null if the transformation fails, otherwise an up to date XML * representation * @throws IOException * if parsing the XML representation fails * @throws SAXException * if parsing the XML representation fails */ public String getCurrentVersionData(final InputStream in, final String tempPath, final List<String> appliedTransformations) throws PlatoException { String originalFile = tempPath + "_original.xml"; try { FileUtils.writeToFile(in, new FileOutputStream(originalFile)); /** check for the version of the file **/ // The version of the read xml file is unknown, so it is not possible to // validate it // moreover, in old plans the version attribute was on different // nodes(project, projects), // with a different name (fileVersion) // to be backwards compatible we create rules for all these attributes fileVersion = "xxx"; SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setNamespaceAware(false); Digester d = new Digester(factory.newSAXParser()); d.setValidating(false); // StrictErrorHandler errorHandler = new StrictErrorHandler(); // d.setErrorHandler(errorHandler); d.push(this); // to read the version we have to support all versions: d.addSetProperties("*/projects", "version", "fileVersion"); // manually migrated projects may have the file version in the node // projects/project d.addSetProperties("*/projects/project", "version", "fileVersion"); // pre V1.3 version info was stored in the project node d.addSetProperties("*/project", "version", "fileVersion"); // since V1.9 the root node is plans: d.addSetProperties("plans", "version", "fileVersion"); InputStream inV = new FileInputStream(originalFile); d.parse(inV); inV.close(); /** this could be more sophisticated, but for now this is enough **/ String version = "1.0"; if (fileVersion != null) { version = fileVersion; } String fileTo = originalFile; String fileFrom = originalFile; boolean success = true; if ("xxx".equals(version)) { fileFrom = fileTo; fileTo = fileFrom + "_V1.3.xml"; /** this is an old export file, transform it to the 1.3 schema **/ success = transformXmlData(fileFrom, fileTo, "data/xslt/Vxxx-to-V1.3.xsl"); appliedTransformations.add("Vxxx-to-V1.3.xsl"); version = "1.3"; } if (success && "1.3".equals(version)) { fileFrom = fileTo; fileTo = fileFrom + "_V1.9.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V1.3-to-V1.9.xsl"); appliedTransformations.add("V1.3-to-V1.9.xsl"); version = "1.9"; } // with release of Plato 2.0 and its schema ProjectExporter creates // documents with version 2.0 if (success && "1.9".equals(version)) { version = "2.0"; } if (success && "2.0".equals(version)) { // transform the document to version 2.1 fileFrom = fileTo; fileTo = fileFrom + "_V2.1.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V2.0-to-V2.1.xsl"); appliedTransformations.add("V2.0-to-V2.1.xsl"); version = "2.1"; } if (success && "2.1".equals(version)) { // transform the document to version 2.1.2 fileFrom = fileTo; fileTo = fileFrom + "_V2.1.2.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V2.1-to-V2.1.2.xsl"); appliedTransformations.add("V2.1-to-V2.1.2.xsl"); version = "2.1.2"; } if (success && "2.1.1".equals(version)) { // transform the document to version 2.1.2 fileFrom = fileTo; fileTo = fileFrom + "_V2.1.2.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V2.1.1-to-V2.1.2.xsl"); appliedTransformations.add("V2.1.1-to-V2.1.2.xsl"); version = "2.1.2"; } if (success && "2.1.2".equals(version)) { // transform the document to version 3.0.0 fileFrom = fileTo; fileTo = fileFrom + "_V3.0.0.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V2.1.2-to-V3.0.0.xsl"); appliedTransformations.add("V2.1.2-to-V3.0.0.xsl"); version = "3.0.0"; } if (success && "3.0.0".equals(version)) { // transform the document to version 3.0.1 fileFrom = fileTo; fileTo = fileFrom + "_V3.0.1.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V3.0.0-to-V3.0.1.xsl"); appliedTransformations.add("V3.0.0-to-V3.0.1.xsl"); version = "3.0.1"; } if (success && "3.0.1".equals(version)) { // transform the document to version 3.9.0 fileFrom = fileTo; fileTo = fileFrom + "_V3.9.0.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V3.0.1-to-V3.9.0.xsl"); appliedTransformations.add("V3.0.1-to-V3.9.0.xsl"); version = "3.9.0"; } if (success && "3.9.0".equals(version)) { // transform the document to version 3.9.9 fileFrom = fileTo; fileTo = fileFrom + "_V3.9.9.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V3.9.0-to-V3.9.9.xsl"); appliedTransformations.add("V3.9.0-to-V3.9.9.xsl"); version = "3.9.9"; } if (success && "3.9.9".equals(version)) { // transform the document to version 4.0.0 fileFrom = fileTo; fileTo = fileFrom + "_V4.0.1.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V3.9.9-to-V4.0.1.xsl"); appliedTransformations.add("V3.9.9-to-V4.0.1.xsl"); version = "4.0.1"; } if (success && "4.0.1".equals(version)) { // transform the document to version 4.0.0 fileFrom = fileTo; fileTo = fileFrom + "_V4.0.2.xml"; success = transformXmlData(fileFrom, fileTo, "data/xslt/V4.0.1-to-V4.0.2.xsl"); appliedTransformations.add("V4.0.1-to-V4.0.2.xsl"); version = "4.0.2"; } if (success) { return fileTo; } else { return null; } } catch (Exception e) { throw new PlatoException("Failed to update plan to current version.", e); } }
From source file:com.app.server.WarDeployer.java
public void init(Vector serviceList, ServerConfig serverConfig, MBeanServer mbeanServer) { try {/*from w w w .jav a 2 s. c o m*/ this.serviceList = serviceList; this.serverConfig = serverConfig; this.mbeanServer = mbeanServer; this.scanDirectory = serverConfig.getDeploydirectory(); DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { try { loadXMLRules(new InputSource(new FileInputStream("./config/executorservices-config.xml"))); } catch (Exception e) { log.error("Could not able to load config xml rules ./config/executorservices-config.xml", e); //e.printStackTrace(); } } }); serverdigester = serverdigesterLoader.newDigester(); } catch (Exception e1) { log.error("Could not create digester executorservices-config.xml", e1); //e1.printStackTrace(); } try { DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { try { loadXMLRules(new InputSource(new FileInputStream("./config/messagingclass-rules.xml"))); } catch (FileNotFoundException e) { log.error("Could not able to load config xml rules ./config/messagingclass-rules.xml", e); //e.printStackTrace(); } } }); messagedigester = serverdigesterLoader.newDigester(); DigesterLoader messagingdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { // TODO Auto-generated method stub try { loadXMLRules(new InputSource(new FileInputStream("./config/messagingconfig-rules.xml"))); } catch (Exception e) { log.error("Could not able to load xml config file ./config/messagingclass-rules.xml", e); e.printStackTrace(); } } }); Digester messagingdigester = messagingdigesterLoader.newDigester(); messagingElem = (MessagingElem) messagingdigester .parse(new InputSource(new FileInputStream("./config/messaging.xml"))); synchronized (messagingElem) { ConcurrentHashMap randomQueue = messagingElem.randomQueue; Set<String> randomQueueSet = randomQueue.keySet(); Iterator<String> ite = randomQueueSet.iterator(); while (ite.hasNext()) { Queue queue = (Queue) randomQueue.get(ite.next()); ConcurrentHashMap randomqueuemap = (ConcurrentHashMap) messagingClassMap.get("RandomQueue"); if (randomqueuemap == null) { randomqueuemap = new ConcurrentHashMap(); messagingClassMap.put("RandomQueue", randomqueuemap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) randomqueuemap .get(queue.getQueuename()); if (randomqueuelist == null) randomqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList()); } ConcurrentHashMap roundrobinQueue = messagingElem.roundrobinQueue; Set<String> roundrobinQueueSet = roundrobinQueue.keySet(); ite = roundrobinQueueSet.iterator(); while (ite.hasNext()) { Queue queue = (Queue) roundrobinQueue.get(ite.next()); ConcurrentHashMap roundrobinqueuemap = (ConcurrentHashMap) messagingClassMap .get("RoundRobinQueue"); if (roundrobinqueuemap == null) { roundrobinqueuemap = new ConcurrentHashMap(); messagingClassMap.put("RoundRobinQueue", roundrobinqueuemap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) roundrobinqueuemap .get(queue.getQueuename()); if (randomqueuelist == null) roundrobinqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList()); } ConcurrentHashMap topicMap = messagingElem.topicMap; Set<String> topicSet = topicMap.keySet(); Iterator<String> iter = topicSet.iterator(); while (iter.hasNext()) { Topic topic = (Topic) topicMap.get(iter.next()); ConcurrentHashMap topicmap = (ConcurrentHashMap) messagingClassMap.get("Topic"); if (topicmap == null) { topicmap = new ConcurrentHashMap(); messagingClassMap.put("Topic", topicmap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) topicmap .get(topic.getTopicname()); if (randomqueuelist == null) topicmap.put(topic.getTopicname(), new CopyOnWriteArrayList()); } //log.info(messagingClassMap); } } catch (Exception e1) { log.error("", e1); //e1.printStackTrace(); } try { DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { try { loadXMLRules(new InputSource(new FileInputStream("./config/webxml-rules.xml"))); } catch (FileNotFoundException e) { log.error("could not able to load xml config rules ./config/webxml-rules.xml", e); //e.printStackTrace(); } } }); webxmldigester = serverdigesterLoader.newDigester(); } catch (Exception ex) { log.error("could not able to create web.xml digester", ex); // ex.printStackTrace(); } log.info("initialized"); }
From source file:main.java.com.omicronlab.avro.PhoneticXmlLoader.java
public Data getData() throws IOException, SAXException { Digester digester = new Digester(); digester.setValidating(false);/*from w w w. ja va 2 s . c o m*/ digester.addObjectCreate("data", Data.class); digester.addBeanPropertySetter("data/classes/vowel", "vowel"); digester.addBeanPropertySetter("data/classes/consonant", "consonant"); digester.addBeanPropertySetter("data/classes/punctuation", "punctuation"); digester.addBeanPropertySetter("data/classes/casesensitive", "casesensitive"); digester.addObjectCreate("data/patterns/pattern", Pattern.class); digester.addBeanPropertySetter("data/patterns/pattern/find", "find"); digester.addBeanPropertySetter("data/patterns/pattern/replace", "replace"); digester.addObjectCreate("data/patterns/pattern/rules/rule", Rule.class); digester.addBeanPropertySetter("data/patterns/pattern/rules/rule/replace", "replace"); digester.addObjectCreate("data/patterns/pattern/rules/rule/find/match", Match.class); digester.addBeanPropertySetter("data/patterns/pattern/rules/rule/find/match", "value"); digester.addSetProperties("data/patterns/pattern/rules/rule/find/match", "type", "type"); digester.addSetProperties("data/patterns/pattern/rules/rule/find/match", "scope", "scope"); digester.addSetNext("data/patterns/pattern/rules/rule/find/match", "addMatch"); digester.addSetNext("data/patterns/pattern/rules/rule", "addRule"); digester.addSetNext("data/patterns/pattern", "addPattern"); Data data = (Data) digester.parse(this.url); return data; }
From source file:com.smartapps.avro.PhoneticXmlLoader.java
public Data getData() throws IOException, SAXException { Digester digester = new Digester(); digester.setValidating(false);/*from w w w . j a v a 2 s. co m*/ digester.addObjectCreate("data", Data.class); digester.addBeanPropertySetter("data/classes/vowel", "vowel"); digester.addBeanPropertySetter("data/classes/consonant", "consonant"); digester.addBeanPropertySetter("data/classes/punctuation", "punctuation"); digester.addBeanPropertySetter("data/classes/casesensitive", "casesensitive"); digester.addObjectCreate("data/patterns/pattern", Pattern.class); digester.addBeanPropertySetter("data/patterns/pattern/find", "find"); digester.addBeanPropertySetter("data/patterns/pattern/replace", "replace"); digester.addObjectCreate("data/patterns/pattern/rules/rule", Rule.class); digester.addBeanPropertySetter("data/patterns/pattern/rules/rule/replace", "replace"); digester.addObjectCreate("data/patterns/pattern/rules/rule/find/match", Match.class); digester.addBeanPropertySetter("data/patterns/pattern/rules/rule/find/match", "value"); digester.addSetProperties("data/patterns/pattern/rules/rule/find/match", "type", "type"); digester.addSetProperties("data/patterns/pattern/rules/rule/find/match", "scope", "scope"); digester.addSetNext("data/patterns/pattern/rules/rule/find/match", "addMatch"); digester.addSetNext("data/patterns/pattern/rules/rule", "addRule"); digester.addSetNext("data/patterns/pattern", "addPattern"); // Data data = (Data) digester.parse(this.url); // InputStreamReader isr = new InputStreamReader(is, "UTF-8"); Data data = (Data) digester.parse(is); return data; }
From source file:eu.scape_project.planning.xml.PlanParser.java
/** * Imports the XML representation of plans from the given input stream. * //from w ww . java 2 s.co m * @param in * the input stream to read from * @return list of read plans * @throws PlatoException * if the plan cannot be parsed */ public List<Plan> importProjects(final InputStream in) throws PlatoException { try { SAXParser parser = validatingParserFactory.getValidatingParser(); parser.setProperty(ValidatingParserFactory.JAXP_SCHEMA_SOURCE, PlanXMLConstants.PLAN_SCHEMAS); Digester digester = new Digester(parser); SchemaResolver schemaResolver = new SchemaResolver(); schemaResolver .addSchemaLocation(PlanXMLConstants.PLATO_SCHEMA_URI, PlanXMLConstants.PLATO_SCHEMA_LOCATION) .addSchemaLocation(PlanXMLConstants.PAP_SCHEMA_URI, PlanXMLConstants.PAP_SCHEMA_LOCATION) .addSchemaLocation(PlanXMLConstants.TAVERNA_SCHEMA_URI, PlanXMLConstants.TAVERNA_SCHEMA_LOCATION); digester.setEntityResolver(schemaResolver); digester.setErrorHandler(new StrictErrorHandler()); digester.setNamespaceAware(true); digester.push(this); PlanParser.addRules(digester); digester.setUseContextClassLoader(true); plans = new ArrayList<Plan>(); // finally parse the XML representation with all created rules digester.parse(in); for (Plan plan : plans) { String projectName = plan.getPlanProperties().getName(); if ((projectName != null) && (!"".equals(projectName))) { /* * establish links from values to scales. For all(!) * alternatives: An alternative could have be discarded * after some measurements have already been added. */ plan.getTree().initValues(plan.getAlternativesDefinition().getAlternatives(), plan.getSampleRecordsDefinition().getRecords().size(), true); /* * establish references of Experiment.uploads */ HashMap<String, SampleObject> records = new HashMap<String, SampleObject>(); for (SampleObject record : plan.getSampleRecordsDefinition().getRecords()) { records.put(record.getShortName(), record); } for (Alternative alt : plan.getAlternativesDefinition().getAlternatives()) { if ((alt.getExperiment() != null) && (alt.getExperiment() instanceof ExperimentWrapper)) { alt.setExperiment(((ExperimentWrapper) alt.getExperiment()).getExperiment(records)); } } // CHECK NUMERIC TRANSFORMER THRESHOLDS for (Leaf l : plan.getTree().getRoot().getAllLeaves()) { eu.scape_project.planning.model.transform.Transformer t = l.getTransformer(); if (t != null && t instanceof NumericTransformer) { NumericTransformer nt = (NumericTransformer) t; if (!nt.checkOrder()) { StringBuffer sb = new StringBuffer("NUMERICTRANSFORMER THRESHOLD ERROR "); sb.append(l.getName()).append("::NUMERICTRANSFORMER:: "); sb.append(nt.getThreshold1()).append(" ").append(nt.getThreshold2()).append(" ") .append(nt.getThreshold3()).append(" ").append(nt.getThreshold4()) .append(" ").append(nt.getThreshold5()); log.error(sb.toString()); } } } /* * establish references to selected alternative */ HashMap<String, Alternative> alternatives = new HashMap<String, Alternative>(); for (Alternative alt : plan.getAlternativesDefinition().getAlternatives()) { alternatives.put(alt.getName(), alt); } if ((plan.getRecommendation() != null) && (plan.getRecommendation() instanceof RecommendationWrapper)) { plan.setRecommendation( ((RecommendationWrapper) plan.getRecommendation()).getRecommendation(alternatives)); } if ((plan.getPlanProperties().getState() == PlanState.ANALYSED) && ((plan.getRecommendation() == null) || (plan.getRecommendation().getAlternative() == null))) { /* * This project is NOT completely analysed */ plan.getPlanProperties().setState(PlanState.valueOf(PlanState.ANALYSED.getValue() - 1)); } } else { throw new PlatoException("Could not find any project data."); } } } catch (Exception e) { throw new PlatoException("Failed to import plans.", e); } return plans; }
From source file:eu.scape_project.planning.xml.PlanParser.java
/** * Imports the XML representation of templates. * //from w w w . ja va2 s. com * @param in * the input stream to read from * @return a list of read templates. * @throws PlatoException * if the template cannot be parsed */ public List<TemplateTree> importTemplates(final InputStream in) throws PlatoException { try { Digester digester = new Digester(); // digester.setValidating(true); StrictErrorHandler errorHandler = new StrictErrorHandler(); digester.setErrorHandler(errorHandler); // At the moment XML files for template tree's are only used // internally, // later we will define a schema and use it also for validation digester.push(this); digester.addObjectCreate("*/template", TemplateTree.class); digester.addSetProperties("*/template"); digester.addSetRoot("*/template", "setTemplate"); // digester.addSetNext("*/template/name", "setName"); // digester.addSetNext("*/template/owner", "setOwner"); PlanParser.addTreeParsingRulesToDigester(digester); digester.addObjectCreate("*/template/node", Node.class); digester.addSetProperties("*/template/node"); digester.addSetNext("*/template/node", "addChild"); digester.setUseContextClassLoader(true); templates = new ArrayList<TemplateTree>(); digester.parse(in); // FIXME: /* * for (TemplateTree t : templates) { log.info(t.getName() + * t.getOwner()); } */ return templates; } catch (Exception e) { throw new PlatoException("Failed to parse template tree.", e); } }
From source file:com.dreikraft.axbo.sound.SoundPackageUtil.java
/** * Reads meta information from package-info.xml (as stream) * * @param packageInfoXmlStream the package-info.xml FileInputStream * @return the sound package info read from the stream * @throws com.dreikraft.infactory.sound.SoundPackageException encapsulates * all low level (IO) exceptions// www. j a v a2s. c o m */ public static SoundPackage readPackageInfo(InputStream packageInfoXmlStream) throws SoundPackageException { Digester digester = new Digester(); digester.setValidating(false); digester.setRules(new ExtendedBaseRules()); digester.addObjectCreate(SoundPackageNodes.axboSounds.toString(), SoundPackage.class); digester.addBeanPropertySetter(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.packageName, "name"); digester.addBeanPropertySetter(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.creator, "creator"); digester.addBeanPropertySetter(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.creationDate, "creationDate"); digester.addBeanPropertySetter(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.security + SL + SoundPackageNodes.serialNumber, "serialNumber"); digester.addBeanPropertySetter( SoundPackageNodes.axboSounds + SL + SoundPackageNodes.security + SL + SoundPackageNodes.enforced, "securityEnforced"); digester.addObjectCreate(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds, ArrayList.class); digester.addSetNext(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds, "setSounds"); digester.addObjectCreate( SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound, Sound.class); digester.addSetNext( SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound, "add"); digester.addSetProperties( SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound, "id", "id"); digester.addBeanPropertySetter(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound + SL + SoundPackageNodes.displayName, "name"); digester.addObjectCreate(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound + SL + SoundPackageNodes.axboFile, SoundFile.class); digester.addSetNext(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound + SL + SoundPackageNodes.axboFile, "setAxboFile"); digester.addBeanPropertySetter(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound + SL + SoundPackageNodes.axboFile + SL + SoundPackageNodes.path); digester.addBeanPropertySetter(SoundPackageNodes.axboSounds + SL + SoundPackageNodes.sounds + SL + SoundPackageNodes.sound + SL + SoundPackageNodes.axboFile + SL + SoundPackageNodes.type); try { SoundPackage soundPackage = (SoundPackage) digester.parse(packageInfoXmlStream); return soundPackage; } catch (Exception ex) { throw new SoundPackageException(ex); } }
From source file:com.web.server.WebServer.java
/** * This method obtains the content executor which executes the executor services * @param deployDirectory// w ww. java 2 s . c o m * @param resource * @param httpHeaderClient * @param serverdigester * @return byte[] */ public byte[] ObtainContentExecutor(String deployDirectory, String resource, HttpHeaderClient httpHeaderClient, Digester serverdigester, Hashtable urlClassLoaderMap, ConcurrentHashMap servletMapping, com.web.server.HttpSessionServer session) { //System.out.println("In content Executor"); String[] resourcepath = resource.split("/"); //System.out.println("createDigester1"); Method method = null; //System.out.println("createDigester2"); ////System.out.println(); com.web.server.Executors serverconfig; if (resourcepath.length > 1) { ////System.out.println(resource); try { ClassLoader oldCL = null; String urlresource = ObtainUrlFromResource(resourcepath); try { //System.out.println(servletMapping); //System.out.println(deployDirectory+"/"+resourcepath[1]); HttpSessionServer httpSession; logger.info(deployDirectory + "/" + resourcepath[1] + " " + servletMapping.get(deployDirectory + "/" + resourcepath[1])); if (servletMapping.get(deployDirectory + "/" + resourcepath[1]) != null) { WebAppConfig webAppConfig = (WebAppConfig) servletMapping .get(deployDirectory + "/" + resourcepath[1]); webAppConfig = webAppConfig.clone(); webAppConfig.setWebApplicationAbsolutePath(deployDirectory + "/" + resourcepath[1]); WebClassLoader customClassLoader = null; Class customClass = null; customClassLoader = (WebClassLoader) urlClassLoaderMap .get(deployDirectory + "/" + resourcepath[1]); oldCL = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(customClassLoader); ConcurrentHashMap servletMappingsURL = webAppConfig.getServletMappingURL(); Enumeration urlPattern = servletMappingsURL.keys(); while (urlPattern.hasMoreElements()) { String pattern = (String) urlPattern.nextElement(); Pattern r = Pattern.compile(pattern.replace("*", "(.*)")); Matcher m = r.matcher(urlresource); if (m.find()) { urlresource = pattern; break; } } LinkedHashMap<String, Vector<FilterMapping>> filterMappings = webAppConfig .getFilterMappingURL(); Set<String> filterMappingKeys = filterMappings.keySet(); Iterator<String> filterMappingRoller = filterMappingKeys.iterator(); Vector<FilterMapping> filterMapping = null; while (filterMappingRoller.hasNext()) { String pattern = (String) filterMappingRoller.next(); Pattern r = Pattern.compile(pattern.replace("*", "(.*)")); Matcher m = r.matcher(urlresource); if (m.find()) { filterMapping = filterMappings.get(pattern); break; } } if (servletMappingsURL.get(urlresource) != null) { ServletMapping servletMappings = (ServletMapping) servletMappingsURL.get(urlresource); ConcurrentHashMap servlets = webAppConfig.getServlets(); Servlets servlet = (Servlets) servlets.get(servletMappings.getServletName()); HttpServlet httpServlet = null; System.out.println("Session " + session); if (session.getAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName()) != null) { httpServlet = (HttpServlet) session.getAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName()); httpServlet.init(); } else { Class servletClass = customClassLoader.loadClass(servlet.getServletClass()); httpServlet = (HttpServlet) servletClass.newInstance(); httpServlet.init(new WebServletConfig(servlet.getServletName().trim(), webAppConfig, customClassLoader)); httpServlet.init(); session.setAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName(), httpServlet); //ClassLoaderUtil.closeClassLoader(customClassLoader); } if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("GET") || httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("POST")) { Response response = new Response(httpHeaderClient); StringBuffer servletPath = new StringBuffer(); if (resourcepath.length > 1) { int pathcount = 0; for (String servPath : resourcepath) { if (pathcount > 1) { servletPath.append("/"); servletPath.append(servPath); } pathcount++; } } String servletpath = servletPath.toString(); if (servletpath.length() == 0) servletpath = "/"; Request request = new Request(httpHeaderClient, session, servletpath, customClassLoader); if (filterMapping != null) { WebFilterChain webFilterChain = new WebFilterChain(httpServlet, webAppConfig, filterMapping, customClassLoader); webFilterChain.doFilter(request, response); } else { httpServlet.service(request, response); } //System.out.println("RESPONSE="+new String(response.getResponse())); //httpServlet.destroy(); response.flushBuffer(); return response.getResponse(); } //httpServlet.destroy(); } else { if (customClassLoader != null) { Map map = customClassLoader.classMap; if (map.get(urlresource) != null) { Class jspBaseCls = customClassLoader.loadClass((String) map.get(urlresource)); HttpJspBase jspBase = (HttpJspBase) jspBaseCls.newInstance(); WebServletConfig servletConfig = new WebServletConfig(); servletConfig.getServletContext().setAttribute( "org.apache.tomcat.InstanceManager", new WebInstanceManager(urlresource)); //servletConfig.getServletContext().setAttribute(org.apache.tomcat.InstanceManager, arg1); jspBase.init(servletConfig); jspBase._jspInit(); Response response = new Response(httpHeaderClient); StringBuffer servletPath = new StringBuffer(); if (resourcepath.length > 1) { int pathcount = 0; for (String servPath : resourcepath) { if (pathcount > 1) { servletPath.append("/"); servletPath.append(servPath); } pathcount++; } } String servletpath = servletPath.toString(); if (servletpath.length() == 0) servletpath = "/"; jspBase._jspService( new Request(httpHeaderClient, session, servletpath, customClassLoader), response); jspBase.destroy(); response.flushBuffer(); return response.getResponse(); } } } } } catch (Exception ex) { ex.printStackTrace(); } finally { if (oldCL != null) { Thread.currentThread().setContextClassLoader(oldCL); } } File file = new File(deployDirectory + "/" + resourcepath[1] + "/WEB-INF/executor-config.xml"); if (!file.exists()) { return null; } WebClassLoader customClassLoader = (WebClassLoader) urlClassLoaderMap .get(deployDirectory + "/" + resourcepath[1]); Class customClass = null; if ((file.isFile() && file.exists())) { synchronized (serverdigester) { serverconfig = (com.web.server.Executors) serverdigester.parse(file); } ConcurrentHashMap urlMap = serverconfig.getExecutorMap(); //System.out.println("ObtainUrlFromResource1"); //logger.info("urlresource"+urlresource); Executor executor = (Executor) urlMap.get(urlresource); //System.out.println("ObtainUrlFromResource2"+executor); //System.out.println("custom class Loader1"+urlClassLoaderMap); //System.out.println("custom class Loader2"+customClassLoader); //System.out.println("CUSTOM CLASS lOADER path"+deployDirectory+"/"+resourcepath[1]); ////System.out.println("custom class loader" +customClassLoader); if (executor != null && customClassLoader != null) { customClass = customClassLoader.loadClass(executor.getExecutorclass()); ExecutorInterface executorInstance = (ExecutorInterface) customClass.newInstance(); Object buffer = null; if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("GET")) { buffer = executorInstance.doGet(httpHeaderClient); } else if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("POST")) { buffer = executorInstance.doPost(httpHeaderClient); } if (executor.getResponseResource() != null) { httpHeaderClient.setExecutorBuffer(buffer); //System.out.println("Method:"+httpHeaderClient.getHttpMethod()); String resourceClass = (String) customClassLoader.getClassMap() .get(executor.getResponseResource().trim()); customClass = customClassLoader.loadClass(resourceClass); HttpJspBase jspBase = (HttpJspBase) customClass.newInstance(); WebServletConfig servletConfig = new WebServletConfig(); servletConfig.getServletContext().setAttribute("org.apache.tomcat.InstanceManager", new WebInstanceManager(urlresource)); //servletConfig.getServletContext().setAttribute(org.apache.tomcat.InstanceManager, arg1); jspBase.init(servletConfig); jspBase._jspInit(); Response response = new Response(httpHeaderClient); jspBase._jspService(new Request(httpHeaderClient, session, null, customClassLoader), response); jspBase.destroy(); response.flushBuffer(); return response.getResponse(); } return buffer.toString().getBytes(); } } else if (customClassLoader != null) { //System.out.println("url resource"+urlresource); String resourceClass = (String) customClassLoader.getClassMap().get(urlresource); //System.out.println(resourceClass); //System.out.println(customClassLoader.getClassMap()); if (resourceClass == null) return null; customClass = customClassLoader.loadClass(resourceClass); ExecutorInterface executorInstance = (ExecutorInterface) customClass.newInstance(); Object buffer = executorInstance.doGet(httpHeaderClient); return buffer.toString().getBytes(); } ////System.out.println("executor resource 1"); //Object buffer = method.invoke(customClass.newInstance(), new Object[]{httpHeaderClient}); // //logger.info(buffer.toString()); } catch (IOException | SAXException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } /*catch (InvocationTargetException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (NoSuchMethodException e) { // TODO Auto-generated catch block e.printStackTrace(); } */catch (SecurityException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } return null; }
From source file:org.apache.commons.digester3.examples.api.addressbook.Main.java
/** * Main method : entry point for running this example program. * <p>/* w w w .j a va 2 s . c o m*/ * Usage: java Example example.xml */ public static void main(String[] args) { if (args.length != 1) { usage(); System.exit(-1); } String filename = args[0]; // Create a Digester instance Digester d = new Digester(); // Prime the digester stack with an object for rules to // operate on. Note that it is quite common for "this" // to be the object pushed. AddressBook book = new AddressBook(); d.push(book); // Add rules to the digester that will be triggered while // parsing occurs. addRules(d); // Process the input file. try { java.io.File srcfile = new java.io.File(filename); d.parse(srcfile); } catch (java.io.IOException ioe) { System.out.println("Error reading input file:" + ioe.getMessage()); System.exit(-1); } catch (org.xml.sax.SAXException se) { System.out.println("Error parsing input file:" + se.getMessage()); System.exit(-1); } // Print out all the contents of the address book, as loaded from // the input file. book.print(); }
From source file:org.apache.commons.digester3.examples.api.catalog.Main.java
/** * Main method : entry point for running this example program. * <p>/* w w w . j av a2 s . c o m*/ * Usage: java CatalogDigester example.xml */ public static void main(String[] args) { if (args.length != 1) { usage(); System.exit(-1); } String filename = args[0]; // Create a Digester instance Digester d = new Digester(); // Add rules to the digester that will be triggered while // parsing occurs. addRules(d); // Process the input file. try { java.io.Reader reader = getInputData(filename); d.parse(reader); } catch (java.io.IOException ioe) { System.out.println("Error reading input file:" + ioe.getMessage()); System.exit(-1); } catch (org.xml.sax.SAXException se) { System.out.println("Error parsing input file:" + se.getMessage()); System.exit(-1); } // Get the first object created by the digester's rules // (the "root" object). Note that this is exactly the same object // returned by the Digester.parse method; either approach works. Catalog catalog = (Catalog) d.getRoot(); // Print out all the contents of the catalog, as loaded from // the input file. catalog.print(); }