List of usage examples for org.apache.commons.digester3 Digester parse
public <T> T parse(URL url) throws IOException, SAXException
From source file:org.apache.commons.digester3.examples.api.dbinsert.Main.java
/** * Main method : entry point for running this example program. * <p>//ww w . jav a 2 s .c om * Usage: java Main example.xml */ public static void main(String[] args) { if (args.length != 1) { usage(); System.exit(-1); } String filename = args[0]; // Create a Digester instance Digester d = new Digester(); // Here you would establish a real connection. // There would also be a finally clause to ensure it is // closed after parsing terminates, etc. Connection connection = null; // Add rules to the digester that will be triggered while // parsing occurs. addRules(d, connection); // Process the input file. System.out.println("Parsing commencing..."); try { File srcfile = new File(filename); d.parse(srcfile); } catch (IOException ioe) { System.out.println("Error reading input file:" + ioe.getMessage()); System.exit(-1); } catch (SAXException se) { System.out.println("Error parsing input file:" + se.getMessage()); System.exit(-1); } // And here there is nothing to do. The digester rules have // (deliberately) not built a representation of the input, but // instead processed the data as it was read. System.out.println("Parsing complete."); }
From source file:org.apache.commons.digester3.examples.plugins.pipeline.Pipeline.java
public static void main(String[] args) { if (args.length != 1) { System.err.println("usage: pipeline config-file"); System.exit(-1);//from ww w. ja v a2s . co m } String configFile = args[0]; Digester digester = new Digester(); PluginRules rc = new PluginRules(); digester.setRules(rc); digester.addObjectCreate("pipeline", Pipeline.class); digester.addCallMethod("pipeline/source", "setSource", 1); digester.addCallParam("pipeline/source", 0, "file"); PluginCreateRule pcr = new PluginCreateRule(Transform.class); digester.addRule("pipeline/transform", pcr); digester.addSetNext("pipeline/transform", "setTransform"); digester.addCallMethod("pipeline/destination", "setDest", 1); digester.addCallParam("pipeline/destination", 0, "file"); Pipeline pipeline = null; try { pipeline = digester.parse(configFile); } catch (Exception e) { System.err.println("oops exception occurred during parse."); e.printStackTrace(); System.exit(-1); } try { pipeline.execute(); } catch (Exception e) { System.err.println("oops exception occurred during pipeline execution."); e.printStackTrace(); System.exit(-1); } }
From source file:org.apache.commons.digester3.examples.xmlrules.addressbook.Main.java
/** * Main method : entry point for running this example program. * <p>/*from w ww. ja v a2s . c o m*/ * Usage: java Example example.xml */ public static void main(String[] args) throws Exception { if (args.length != 2) { usage(); System.exit(-1); } final String rulesfileName = args[0]; String datafileName = args[1]; // Create a Digester instance which has been initialised with // rules loaded from the specified file. Digester d = newLoader(new FromXmlRulesModule() { @Override protected void loadRules() { loadXMLRules(rulesfileName); } }).newDigester(); // Prime the digester stack with an object for rules to // operate on. Note that it is quite common for "this" // to be the object pushed. AddressBook book = new AddressBook(); d.push(book); // Process the input file. try { File srcfile = new java.io.File(datafileName); d.parse(srcfile); } catch (IOException ioe) { System.out.println("Error reading input file:" + ioe.getMessage()); System.exit(-1); } catch (SAXException se) { System.out.println("Error parsing input file:" + se.getMessage()); System.exit(-1); } // Print out all the contents of the address book, as loaded from // the input file. book.print(); }
From source file:org.apache.hadoop.gateway.descriptor.xml.XmlGatewayDescriptorImporter.java
@Override public GatewayDescriptor load(Reader reader) throws IOException { Digester digester = loader.newDigester(new ExtendedBaseRules()); digester.setValidating(false);/* w ww . jav a 2s . c o m*/ try { GatewayDescriptor descriptor = digester.parse(reader); return descriptor; } catch (SAXException e) { throw new IOException(e); } }
From source file:org.apache.hadoop.gateway.filter.rewrite.impl.xml.XmlUrlRewriteRulesImporter.java
@Override public UrlRewriteRulesDescriptor load(Reader reader) throws IOException { Digester digester = loader.newDigester(new ExtendedBaseRules()); digester.setValidating(false);// ww w .ja v a 2 s . co m try { UrlRewriteRulesDescriptor rules = digester.parse(reader); return rules; } catch (SAXException e) { throw new IOException(e); } }
From source file:org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService.java
private Topology loadTopologyAttempt(File file) throws IOException, SAXException, URISyntaxException { Topology topology;/*from www . j a v a 2s. c o m*/ Digester digester = digesterLoader.newDigester(); TopologyBuilder topologyBuilder = digester.parse(FileUtils.openInputStream(file)); if (null == topologyBuilder) { return null; } topology = topologyBuilder.build(); topology.setUri(file.toURI()); topology.setName(FilenameUtils.removeExtension(file.getName())); topology.setTimestamp(file.lastModified()); return topology; }
From source file:org.apache.hadoop.gateway.topology.file.FileTopologyProvider.java
private static Topology loadTopology(FileObject file) throws IOException, SAXException, URISyntaxException { log.loadingTopologyFile(file.getName().getFriendlyURI()); Digester digester = digesterLoader.newDigester(); FileContent content = file.getContent(); TopologyBuilder topologyBuilder = digester.parse(content.getInputStream()); Topology topology = topologyBuilder.build(); topology.setUri(file.getURL().toURI()); topology.setName(FilenameUtils.removeExtension(file.getName().getBaseName())); topology.setTimestamp(content.getLastModifiedTime()); return topology; }
From source file:org.apache.hadoop.gateway.topology.xml.TopologyRulesModuleTest.java
@Test public void testParseSimpleTopologyXmlInKnoxFormat() throws IOException, SAXException, URISyntaxException { Digester digester = loader.newDigester(); String name = "org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml"; URL url = ClassLoader.getSystemResource(name); assertThat("Failed to find URL for resource " + name, url, notNullValue()); File file = new File(url.getFile()); TopologyBuilder topologyBuilder = digester.parse(url); Topology topology = topologyBuilder.build(); assertThat("Failed to parse resource " + name, topology, notNullValue()); topology.setTimestamp(file.lastModified()); assertThat(topology.getName(), is("topology")); assertThat(topology.getTimestamp(), is(file.lastModified())); assertThat(topology.getServices().size(), is(1)); Service comp = topology.getServices().iterator().next(); assertThat(comp, notNullValue());//from w ww . j a v a 2 s . c o m assertThat(comp.getRole(), is("WEBHDFS")); assertThat(comp.getUrl(), is("http://host:80/webhdfs")); Provider provider = topology.getProviders().iterator().next(); assertThat(provider, notNullValue()); assertThat(provider.isEnabled(), is(true)); assertThat(provider.getRole(), is("authentication")); assertThat(provider.getParams().size(), is(5)); }
From source file:org.apache.hadoop.gateway.topology.xml.TopologyRulesModuleTest.java
@Test public void testParseSimpleTopologyXmlInHadoopFormat() throws IOException, SAXException, URISyntaxException { Digester digester = loader.newDigester(); String name = "org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf"; URL url = ClassLoader.getSystemResource(name); assertThat("Failed to find URL for resource " + name, url, notNullValue()); File file = new File(url.getFile()); TopologyBuilder topologyBuilder = digester.parse(url); Topology topology = topologyBuilder.build(); assertThat("Failed to parse resource " + name, topology, notNullValue()); topology.setTimestamp(file.lastModified()); assertThat(topology.getName(), is("topology2")); assertThat(topology.getTimestamp(), is(file.lastModified())); assertThat(topology.getServices().size(), is(4)); assertThat(topology.getProviders().size(), is(2)); Service webhdfsService = topology.getService("WEBHDFS", null); assertThat(webhdfsService, notNullValue()); assertThat(webhdfsService.getRole(), is("WEBHDFS")); assertThat(webhdfsService.getName(), nullValue()); assertThat(webhdfsService.getUrl(), is("http://host:50070/webhdfs")); Service webhcatService = topology.getService("WEBHCAT", null); assertThat(webhcatService, notNullValue()); assertThat(webhcatService.getRole(), is("WEBHCAT")); assertThat(webhcatService.getName(), nullValue()); assertThat(webhcatService.getUrl(), is("http://host:50111/templeton")); Service oozieService = topology.getService("OOZIE", null); assertThat(oozieService, notNullValue()); assertThat(oozieService.getRole(), is("OOZIE")); assertThat(oozieService.getName(), nullValue()); assertThat(oozieService.getUrl(), is("http://host:11000/oozie")); Service hiveService = topology.getService("HIVE", null); assertThat(hiveService, notNullValue()); assertThat(hiveService.getRole(), is("HIVE")); assertThat(hiveService.getName(), nullValue()); assertThat(hiveService.getUrl(), is("http://host:10000")); Provider authenticationProvider = topology.getProvider("authentication", "ShiroProvider"); assertThat(authenticationProvider, notNullValue()); assertThat(authenticationProvider.isEnabled(), is(true)); assertThat(authenticationProvider.getRole(), is("authentication")); assertThat(authenticationProvider.getName(), is("ShiroProvider")); assertThat(authenticationProvider.getParams().size(), is(5)); assertThat(authenticationProvider.getParams().get("main.ldapRealm.contextFactory.url"), is("ldap://localhost:33389")); Provider identityAssertionProvider = topology.getProvider("identity-assertion", "Pseudo"); assertThat(identityAssertionProvider, notNullValue()); assertThat(identityAssertionProvider.isEnabled(), is(false)); assertThat(identityAssertionProvider.getRole(), is("identity-assertion")); assertThat(identityAssertionProvider.getName(), is("Pseudo")); assertThat(identityAssertionProvider.getParams().size(), is(2)); assertThat(identityAssertionProvider.getParams().get("name"), is("user.name")); }
From source file:org.azkfw.business.logic.LogicManager.java
/** * ?// w w w . j a va 2 s . c o m * * @param aNamespace ??? * @param aStream * @param aContext * @throws BusinessServiceException ???????? * @throws IOException ???????? */ @SuppressWarnings("unchecked") private void doLoad(final String aNamespace, final InputStream aStream, final Context aContext) throws BusinessServiceException, IOException { List<LogicEntity> logicList = null; try { Digester digester = new Digester(); digester.addObjectCreate("azuki/logics", ArrayList.class); digester.addObjectCreate("azuki/logics/logic", LogicEntity.class); digester.addSetProperties("azuki/logics/logic"); digester.addSetNext("azuki/logics/logic", "add"); logicList = digester.parse(aStream); } catch (SAXException ex) { error(ex); throw new IOException(ex); } catch (IOException ex) { error(ex); throw new IOException(ex); } Map<String, LogicData> m = null; if (logics.containsKey(aNamespace)) { m = logics.get(aNamespace); } else { m = new HashMap<String, LogicData>(); } for (int i = 0; i < logicList.size(); i++) { LogicEntity logic = logicList.get(i); info("Logic loading.[" + logic.name + "]"); if (m.containsKey(logic.getName())) { throw new BusinessServiceException("Duplicate logic name.[" + logic.getName() + "]"); } else { try { LogicData data = new LogicData(); Class<Logic> clazz = (Class<Logic>) Class.forName(logic.getLogic()); // XXX ???????? Map<String, Object> properties = new HashMap<String, Object>(); PropertyFile propertyFile = clazz.getAnnotation(PropertyFile.class); if (null != propertyFile) { String property = propertyFile.value(); if (StringUtility.isNotEmpty(property)) { InputStream is = aContext.getResourceAsStream(property); if (null != is) { Properties p = new Properties(); p.load(is); for (String key : p.stringPropertyNames()) { properties.put(key, p.getProperty(key)); } } else { throw new BusinessServiceException( "Not found logic property file.[" + property + "]"); } } } data.setLogic(clazz); data.setProperties(properties); data.setEntity(logic); m.put(logic.getName(), data); } catch (ClassNotFoundException ex) { error(ex); throw new BusinessServiceException(ex); } } } logics.put(aNamespace, m); }