List of usage examples for javax.xml.xpath XPath compile
public XPathExpression compile(String expression) throws XPathExpressionException;
From source file:cc.siara.csv_ml_demo.MultiLevelCSVSwingDemo.java
/** * Evaluates given XPath from Input box against Document generated by * parsing csv_ml in input box and sets value or node list to output box. */// www . j av a2 s.c om private void processXPath() { XPath xpath = XPathFactory.newInstance().newXPath(); Document doc = parseInputToDOM(); if (doc == null) return; StringBuffer out_str = new StringBuffer(); try { XPathExpression expr = xpath.compile(tfXPath.getText()); try { Document outDoc = Util.parseXMLToDOM("<output></output>"); Element rootElement = outDoc.getDocumentElement(); NodeList ret = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); for (int i = 0; i < ret.getLength(); i++) { Object o = ret.item(i); if (o instanceof String) { out_str.append(o); } else if (o instanceof Node) { Node n = (Node) o; short nt = n.getNodeType(); switch (nt) { case Node.TEXT_NODE: case Node.ATTRIBUTE_NODE: case Node.CDATA_SECTION_NODE: // Only one value gets // evaluated? if (out_str.length() > 0) out_str.append(','); if (nt == Node.ATTRIBUTE_NODE) out_str.append(n.getNodeValue()); else out_str.append(n.getTextContent()); break; case Node.ELEMENT_NODE: rootElement.appendChild(outDoc.importNode(n, true)); break; } } } if (out_str.length() > 0) { rootElement.setTextContent(out_str.toString()); out_str.setLength(0); } out_str.append(Util.docToString(outDoc, true)); } catch (Exception e) { // Thrown most likely because the given XPath evaluates to a // string out_str.append(expr.evaluate(doc)); } } catch (XPathExpressionException e) { e.printStackTrace(); } taOutput.setText(out_str.toString()); tfOutputSize.setText(String.valueOf(out_str.length())); }
From source file:org.apache.zeppelin.sap.universe.UniverseClient.java
public Map<String, UniverseNodeInfo> getUniverseNodesInfo(String token, String universeName) throws UniverseException { UniverseInfo universeInfo = universesMap.get(universeName); if (universeInfo != null && StringUtils.isNotBlank(universeInfo.getId())) { Map<String, UniverseNodeInfo> universeNodeInfoMap = universeInfosMap.get(universeName); if (universeNodeInfoMap != null && universesInfoUpdatedMap.containsKey(universeName) && !isExpired(universesInfoUpdatedMap.get(universeName))) { return universeNodeInfoMap; } else {//from w ww . ja va 2 s .c o m universeNodeInfoMap = new HashMap<>(); } try { HttpGet httpGet = new HttpGet( String.format("%s%s%s", apiUrl, "/sl/v1/universes/", universeInfo.getId())); setHeaders(httpGet, token); HttpResponse response = httpClient.execute(httpGet); if (response.getStatusLine().getStatusCode() == 200) { try (InputStream xmlStream = response.getEntity().getContent()) { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(xmlStream); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression expr = xpath.compile("//outline/folder"); XPathExpression exprRootItems = xpath.compile("//outline/item"); NodeList universeInfoNodes = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); NodeList universeRootInfoNodes = (NodeList) exprRootItems.evaluate(doc, XPathConstants.NODESET); if (universeInfoNodes != null) { parseUniverseInfo(universeInfoNodes, universeNodeInfoMap); } if (universeRootInfoNodes != null) { parseUniverseInfo(universeRootInfoNodes, universeNodeInfoMap); } } catch (Exception e) { throw new UniverseException(String.format(errorMessageTemplate, "UniverseClient " + "(get universe nodes info): Response processing failed", ExceptionUtils.getStackTrace(e))); } } } catch (IOException e) { throw new UniverseException(String.format(errorMessageTemplate, "UniverseClient " + "(get universe nodes info): Request failed", ExceptionUtils.getStackTrace(e))); } universeInfosMap.put(universeName, universeNodeInfoMap); universesInfoUpdatedMap.put(universeName, System.currentTimeMillis()); return universeNodeInfoMap; } return Collections.emptyMap(); }
From source file:betullam.xmlmodifier.XMLmodifier.java
private boolean isModsMets(Document xmlDoc) { boolean isMets = true; XPath xPath = XPathFactory.newInstance().newXPath(); XPathExpression xPathExpression; // Check for the goobi mods extension. If we find it, the XML document is not a classical MODS/METS document: try {//from www . j a v a 2 s . c om xPathExpression = xPath.compile("/mets/dmdSec/mdWrap/xmlData/mods/extension/goobi"); NodeList nodeList = (NodeList) xPathExpression.evaluate(xmlDoc, XPathConstants.NODESET); if (nodeList.getLength() > 0) { isMets = false; } } catch (XPathExpressionException e) { e.printStackTrace(); } return isMets; }
From source file:cc.siara.csv_ml_demo.MainActivity.java
/** * Evaluates given XPath from Input box against Document generated by * parsing csv_ml in input box and sets value or node list to output box. *//* ww w . java 2s. c om*/ void processXPath() { EditText etInput = (EditText) findViewById(R.id.etInput); EditText etXPath = (EditText) findViewById(R.id.etXPath); CheckBox cbPretty = (CheckBox) findViewById(R.id.cbPretty); XPath xpath = XPathFactory.newInstance().newXPath(); MultiLevelCSVParser parser = new MultiLevelCSVParser(); Document doc = null; try { doc = parser.parseToDOM(new StringReader(etInput.getText().toString()), false); } catch (IOException e1) { e1.printStackTrace(); } if (doc == null) return; StringBuffer out_str = new StringBuffer(); try { XPathExpression expr = xpath.compile(etXPath.getText().toString()); try { Document outDoc = Util.parseXMLToDOM("<output></output>"); Element rootElement = outDoc.getDocumentElement(); NodeList ret = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); for (int i = 0; i < ret.getLength(); i++) { Object o = ret.item(i); if (o instanceof String) { out_str.append(o); } else if (o instanceof Node) { Node n = (Node) o; short nt = n.getNodeType(); switch (nt) { case Node.TEXT_NODE: case Node.ATTRIBUTE_NODE: case Node.CDATA_SECTION_NODE: // Only one value gets // evaluated? if (out_str.length() > 0) out_str.append(','); if (nt == Node.ATTRIBUTE_NODE) out_str.append(n.getNodeValue()); else out_str.append(n.getTextContent()); break; case Node.ELEMENT_NODE: rootElement.appendChild(outDoc.importNode(n, true)); break; } } } if (out_str.length() > 0) { rootElement.setTextContent(out_str.toString()); out_str.setLength(0); } out_str.append(Util.docToString(outDoc, true)); } catch (Exception e) { // Thrown most likely because the given XPath evaluates to a // string out_str.append(expr.evaluate(doc)); } } catch (XPathExpressionException e) { e.printStackTrace(); } if (out_str.length() > 5 && out_str.substring(0, 5).equals("<?xml")) out_str.delete(0, out_str.indexOf(">") + 1); EditText etOutput = (EditText) findViewById(R.id.etOutput); etOutput.setText(out_str.toString()); // tfOutputSize.setText(String.valueOf(xmlString.length())); }
From source file:org.apache.zeppelin.sap.universe.UniverseClient.java
private void loadUniverses(String token, int offset, Map<String, UniverseInfo> universesMap) throws UniverseException { int limit = 50; HttpGet httpGet = new HttpGet( String.format("%s%s?offset=%s&limit=%s", apiUrl, "/sl/v1/universes", offset, limit)); setHeaders(httpGet, token);/*from ww w. j a v a 2 s . c o m*/ HttpResponse response = null; try { response = httpClient.execute(httpGet); } catch (Exception e) { throw new UniverseException(String.format(errorMessageTemplate, "UniverseClient " + "(get universes): Request failed", ExceptionUtils.getStackTrace(e))); } if (response != null && response.getStatusLine().getStatusCode() == 200) { try (InputStream xmlStream = response.getEntity().getContent()) { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(xmlStream); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression expr = xpath.compile("//universe"); NodeList universesNodes = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); if (universesNodes != null) { int count = universesNodes.getLength(); for (int i = 0; i < count; i++) { Node universe = universesNodes.item(i); if (universe.hasChildNodes()) { NodeList universeParameters = universe.getChildNodes(); int parapetersCount = universeParameters.getLength(); String id = null; String name = null; String type = null; for (int j = 0; j < parapetersCount; j++) { Node parameterNode = universeParameters.item(j); parameterNode.getNodeName(); if (parameterNode.getNodeType() == Node.ELEMENT_NODE) { if (parameterNode.getNodeName().equalsIgnoreCase("id")) { id = parameterNode.getTextContent(); continue; } if (parameterNode.getNodeName().equalsIgnoreCase("name")) { name = parameterNode.getTextContent(); continue; } if (parameterNode.getNodeName().equalsIgnoreCase("type")) { type = parameterNode.getTextContent(); continue; } } } if (StringUtils.isNotBlank(type)) { name = name.replaceAll(String.format("\\.%s$", type), StringUtils.EMPTY); } universesMap.put(name, new UniverseInfo(id, name, type)); } } if (count == limit) { offset += limit; loadUniverses(token, offset, universesMap); } } } catch (IOException e) { throw new UniverseException(String.format(errorMessageTemplate, "UniverseClient " + "(get universes): Response processing failed", ExceptionUtils.getStackTrace(e))); } catch (ParserConfigurationException | SAXException | XPathExpressionException e) { throw new UniverseException(String.format(errorMessageTemplate, "UniverseClient " + "(get universes): Response processing failed", ExceptionUtils.getStackTrace(e))); } } }
From source file:eu.fbk.dh.tint.tokenizer.ItalianTokenizer.java
public ItalianTokenizer(@Nullable File settingFile) { Trie.TrieBuilder builder = Trie.builder().removeOverlaps(); InputStream stream = null; if (settingFile != null) { try { stream = new FileInputStream(settingFile); } catch (FileNotFoundException e) { // continue }/*from ww w. j a va 2 s . c om*/ } if (stream == null) { stream = this.getClass().getResourceAsStream("/token-settings.xml"); } logger.trace("Loading model"); try { DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression expr; NodeList nl; int count; Document doc = dBuilder.parse(stream); doc.getDocumentElement().normalize(); // Normalization rules expr = xpath.compile("/settings/normalization/char"); nl = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); for (int i = 0; i < nl.getLength(); i++) { Node item = nl.item(i); Element element = (Element) item; String hexCode = element.getAttribute("hexcode"); String content = element.getTextContent(); // Bad: need fix if (content.equals("`")) { content = "'"; } int num = Integer.parseInt(hexCode, 16); if (content.length() == 0) { continue; } normalizedChars.put(num, content); } logger.info("Loaded {} normalization rules", normalizedChars.size()); // end sentence chars expr = xpath.compile("/settings/sentenceSplitting/char"); nl = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); for (int i = 0; i < nl.getLength(); i++) { Node item = nl.item(i); Element element = (Element) item; String charID = element.getAttribute("id"); sentenceChars.add(Integer.parseInt(charID)); } logger.info("Loaded {} sentence splitting rules", sentenceChars.size()); // splitting rules expr = xpath.compile("/settings/tokenSplitting/char"); nl = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); for (int i = 0; i < nl.getLength(); i++) { Node item = nl.item(i); Element element = (Element) item; String charID = element.getAttribute("id"); splittingChars.add(Integer.parseInt(charID)); } logger.info("Loaded {} token splitting rules", splittingChars.size()); // expressions expr = xpath.compile("/settings/expressions/expression"); nl = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); StringBuilder b = new StringBuilder(); b.append("("); boolean first = true; count = 0; for (int i = 0; i < nl.getLength(); i++) { Node item = nl.item(i); Element element = (Element) item; String regExp = element.getAttribute("find"); boolean merge = PropertiesUtils.getBoolean(element.getAttribute("merge"), true); Integer group = PropertiesUtils.getInteger(element.getAttribute("get"), 1); if (merge) { if (!first) { b.append("|"); } b.append(regExp); count++; first = false; } else { expressions.put(Pattern.compile(regExp), group); count++; } } b.append(")"); expressions.put(Pattern.compile(b.toString()), 1); logger.info("Loaded {} regular expressions", count); // abbreviations expr = xpath.compile("/settings/abbreviations/abbreviation"); nl = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); count = 0; for (int i = 0; i < nl.getLength(); i++) { Node item = nl.item(i); String abbr = item.getTextContent(); abbr = getString(tokenArray(abbr)); builder.addKeyword(" " + abbr + " "); count++; } logger.info("Loaded {} abbreviations", count); } catch (Exception e) { e.printStackTrace(); } trie = builder.build(); }
From source file:dk.netarkivet.harvester.harvesting.WARCWriterProcessor.java
/** * Return relevant values as header-like fields (here ANVLRecord, but spec-defined "application/warc-fields" type * when written). Field names from from DCMI Terms and the WARC/0.17 specification. * * @see org.archive.crawler.framework.WriterPoolProcessor#getFirstrecordBody(java.io.File) *//* www .j av a2 s . c o m*/ @Override protected String getFirstrecordBody(File orderFile) { ANVLRecord record = new ANVLRecord(7); record.addLabelValue("software", "Heritrix/" + Heritrix.getVersion() + " http://crawler.archive.org"); try { InetAddress host = InetAddress.getLocalHost(); record.addLabelValue("ip", host.getHostAddress()); record.addLabelValue("hostname", host.getCanonicalHostName()); } catch (UnknownHostException e) { logger.log(Level.WARNING, "unable top obtain local crawl engine host", e); } // conforms to ISO 28500:2009 as of May 2009 // as described at http://bibnum.bnf.fr/WARC/ // latest draft as of November 2008 record.addLabelValue("format", "WARC File Format 1.0"); record.addLabelValue("conformsTo", "http://bibnum.bnf.fr/WARC/WARC_ISO_28500_version1_latestdraft.pdf"); // Get other values from order.xml try { Document doc = XmlUtils.getDocument(orderFile); addIfNotBlank(record, "operator", XmlUtils.xpathOrNull(doc, "//meta/operator")); addIfNotBlank(record, "publisher", XmlUtils.xpathOrNull(doc, "//meta/organization")); addIfNotBlank(record, "audience", XmlUtils.xpathOrNull(doc, "//meta/audience")); addIfNotBlank(record, "isPartOf", XmlUtils.xpathOrNull(doc, "//meta/name")); // disabling "created" field per HER-1634 // though it's theoretically useful as a means of distinguishing // one crawl from another, the current usage/specification is too // vague... in particular a 'created' field in the 'warcinfo' is // reasonable to interpret as applying to the WARC-unit, rather // than the crawl-job-unit so we remove it and see if anyone // complains or makes a case for restoring it in a less-ambiguous // manner // String rawDate = XmlUtils.xpathOrNull(doc,"//meta/date"); // if(StringUtils.isNotBlank(rawDate)) { // Date date; // try { // date = ArchiveUtils.parse14DigitDate(rawDate); // addIfNotBlank(record,"created",ArchiveUtils.getLog14Date(date)); // } catch (ParseException e) { // logger.log(Level.WARNING,"obtaining warc created date",e); // } // } addIfNotBlank(record, "description", XmlUtils.xpathOrNull(doc, "//meta/description")); addIfNotBlank(record, "robots", XmlUtils.xpathOrNull(doc, "//newObject[@name='robots-honoring-policy']/string[@name='type']")); addIfNotBlank(record, "http-header-user-agent", XmlUtils.xpathOrNull(doc, "//map[@name='http-headers']/string[@name='user-agent']")); addIfNotBlank(record, "http-header-from", XmlUtils.xpathOrNull(doc, "//map[@name='http-headers']/string[@name='from']")); if (metadataMap == null) { //metadataMap = getMetadataItems(); XPathFactory factory = XPathFactory.newInstance(); XPath xpath = factory.newXPath(); XPathExpression expr = xpath.compile(H1HeritrixTemplate.METADATA_ITEMS_XPATH); Node node = (Node) expr.evaluate(doc, XPathConstants.NODE); //NodeList nodeList = (NodeList) expr.evaluate(doc, XPathConstants.NODESET); //Node node = nodeList.item(0); if (node != null) { NodeList nodeList = node.getChildNodes(); if (nodeList != null) { metadataMap = new HashMap(); for (int i = 0; i < nodeList.getLength(); ++i) { node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { String typeName = node.getNodeName(); if ("string".equals(typeName)) { Node attribute = node.getAttributes().getNamedItem("name"); if (attribute != null && attribute.getNodeType() == Node.ATTRIBUTE_NODE) { String key = attribute.getNodeValue(); if (key != null && key.length() > 0) { String value = node.getTextContent(); metadataMap.put(key, value); // debug //System.out.println(key + "=" + value); } } } } } } } } } catch (IOException e) { logger.log(Level.WARNING, "Error obtaining warcinfo", e); } catch (XPathExpressionException e) { logger.log(Level.WARNING, "Error obtaining metadata items", e); } // add fields from harvesInfo.xml version 0.4 /* * <harvestInfo> <version>0.4</version> <jobId>1</jobId> <priority>HIGHPRIORITY</priority> * <harvestNum>0</harvestNum> <origHarvestDefinitionID>1</origHarvestDefinitionID> * <maxBytesPerDomain>500000000</maxBytesPerDomain> <maxObjectsPerDomain>2000</maxObjectsPerDomain> * <orderXMLName>default_orderxml</orderXMLName> * <origHarvestDefinitionName>netarkivet</origHarvestDefinitionName> <scheduleName>Once_a_week</scheduleName> * <harvestFilenamePrefix>1-1</harvestFilenamePrefix> <jobSubmitDate>Some date</jobSubmitDate> * <performer>undefined</performer> </harvestInfo> */ String netarchiveSuiteComment = "#added by NetarchiveSuite " + dk.netarkivet.common.Constants.getVersionString(); ANVLRecord recordNAS = new ANVLRecord(7); if (metadataMap != null) { // Add the data from the metadataMap to the WarcInfoRecord. recordNAS.addLabelValue(HARVESTINFO_VERSION, (String) metadataMap.get(HARVESTINFO_VERSION)); recordNAS.addLabelValue(HARVESTINFO_JOBID, (String) metadataMap.get(HARVESTINFO_JOBID)); recordNAS.addLabelValue(HARVESTINFO_CHANNEL, (String) metadataMap.get(HARVESTINFO_CHANNEL)); recordNAS.addLabelValue(HARVESTINFO_HARVESTNUM, (String) metadataMap.get(HARVESTINFO_HARVESTNUM)); recordNAS.addLabelValue(HARVESTINFO_ORIGHARVESTDEFINITIONID, (String) metadataMap.get(HARVESTINFO_ORIGHARVESTDEFINITIONID)); recordNAS.addLabelValue(HARVESTINFO_MAXBYTESPERDOMAIN, (String) metadataMap.get(HARVESTINFO_MAXBYTESPERDOMAIN)); recordNAS.addLabelValue(HARVESTINFO_MAXOBJECTSPERDOMAIN, (String) metadataMap.get(HARVESTINFO_MAXOBJECTSPERDOMAIN)); recordNAS.addLabelValue(HARVESTINFO_ORDERXMLNAME, (String) metadataMap.get(HARVESTINFO_ORDERXMLNAME)); recordNAS.addLabelValue(HARVESTINFO_ORIGHARVESTDEFINITIONNAME, (String) metadataMap.get(HARVESTINFO_ORIGHARVESTDEFINITIONNAME)); if (metadataMap.containsKey((HARVESTINFO_SCHEDULENAME))) { recordNAS.addLabelValue(HARVESTINFO_SCHEDULENAME, (String) metadataMap.get(HARVESTINFO_SCHEDULENAME)); } recordNAS.addLabelValue(HARVESTINFO_HARVESTFILENAMEPREFIX, (String) metadataMap.get(HARVESTINFO_HARVESTFILENAMEPREFIX)); recordNAS.addLabelValue(HARVESTINFO_JOBSUBMITDATE, (String) metadataMap.get(HARVESTINFO_JOBSUBMITDATE)); if (metadataMap.containsKey(HARVESTINFO_PERFORMER)) { recordNAS.addLabelValue(HARVESTINFO_PERFORMER, (String) metadataMap.get(HARVESTINFO_PERFORMER)); } if (metadataMap.containsKey(HARVESTINFO_AUDIENCE)) { recordNAS.addLabelValue(HARVESTINFO_AUDIENCE, (String) metadataMap.get(HARVESTINFO_AUDIENCE)); } } else { logger.log(Level.SEVERE, "Error missing metadata"); } // really ugly to return as string, when it may just be merged with // a couple other fields at write time, but changing would require // larger refactoring return record.toString() + netarchiveSuiteComment + "\n" + recordNAS.toString(); }
From source file:com.sixdimensions.wcm.cq.pack.service.impl.LegacyPackageManagerServiceImpl.java
/** * Parses the response from the server using XPath. * // w w w. j a va 2s .c o m * @param response * the response to parse * @return a response object representing the response data * @throws XPathExpressionException * @throws SAXException * @throws IOException * @throws ParserConfigurationException */ private Response parseResponse(final byte[] response) throws XPathExpressionException, IOException, ParserConfigurationException { this.log.debug("parseResponse"); Response responseObj = null; try { final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); // never forget this! final Document doc = factory.newDocumentBuilder().parse(new ByteArrayInputStream(response)); final XPath xpath = XPathFactory.newInstance().newXPath(); // Sample response // // <crx version="2.0" user="admin" workspace="crx.default"> // <request> // <param name="cmd" value="rm"/> // <param name="name" value="myPackage"/> // </request> // <response> // <status code="200">ok</status> // </response> // </crx> this.log.debug("Parsing response code"); final XPathExpression codeXpr = xpath.compile("/crx/response/status/@code"); int responseCode = -1; try { responseCode = Integer.parseInt( ((NodeList) codeXpr.evaluate(doc, XPathConstants.NODESET)).item(0).getNodeValue(), 10); } catch (final NumberFormatException nfe) { this.log.warn("Unable to parse " + ((NodeList) codeXpr.evaluate(doc, XPathConstants.NODESET)).item(0).getNodeValue() + " as a number"); } this.log.debug("Parsing response message"); final XPathExpression messageXpr = xpath.compile("/crx/response/status"); final String responseMessage = ((NodeList) messageXpr.evaluate(doc, XPathConstants.NODESET)).item(0) .getChildNodes().item(0).getNodeValue(); responseObj = new Response(HttpStatus.SC_OK == responseCode, responseCode, responseMessage); this.log.debug("Response Code: " + responseCode); if (HttpStatus.SC_OK == responseCode) { this.log.debug("Response Message: " + responseMessage); } else { this.log.warn("Error Message: " + responseMessage); } } catch (final SAXException se) { final String message = "Exception parsing XML response, assuming failure. " + "This often occurs when an invalid XML file is uploaded as the error message " + "is not properly escaped in the response."; this.log.warn(message, se); this.log.warn("Response contents: " + new String(response, "utf-8")); responseObj = new Response(false, 500, message); } return responseObj; }
From source file:com.bekwam.mavenpomupdater.MainViewController.java
private POMObject parseFile(String path) { if (log.isDebugEnabled()) { log.debug("[PARSE] path=" + path); }//w ww . j a v a2 s .co m try { DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(path); XPath xpath = XPathFactory.newInstance().newXPath(); XPathExpression expression = xpath.compile("//project/version/text()"); Node node = (Node) expression.evaluate(doc, XPathConstants.NODE); String version = ""; if (node != null) { version = node.getNodeValue(); if (log.isDebugEnabled()) { log.debug("[PARSE] version=" + node.getNodeValue()); } } // XPath pvXPath = XPathFactory.newInstance().newXPath(); XPathExpression pvExpression = xpath.compile("//project/parent/version/text()"); Node pvNode = (Node) pvExpression.evaluate(doc, XPathConstants.NODE); String pVersion = ""; if (pvNode != null) { pVersion = pvNode.getNodeValue(); if (log.isDebugEnabled()) { log.debug("[PARSE] parentVersion=" + pvNode.getNodeValue()); } } return new POMObject(true, path, version, pVersion, false); } catch (Exception exc) { log.error("error parsing path=" + path, exc); errorLogDelegate.log(path, exc.getMessage()); return new POMObject(false, path, "Parse Error (will be skipped)", "Parse Error (will be skipped)", true); } }
From source file:gov.nih.nci.cacis.common.util.ExtractSchematron.java
/** * Creates new instance of the schematron extractor. * //from ww w . j ava 2s . c o m * @throws ClassCastException if there is an error getting a SchemaLoader * @throws ClassNotFoundException if there is an error getting a SchemaLoader * @throws InstantiationException if there is an error getting a SchemaLoader * @throws IllegalAccessException if there is an error getting a SchemaLoader * @throws TransformerFactoryConfigurationError if there is an error getting a serializer * @throws TransformerConfigurationException if there is an error getting a serializer * @throws XPathExpressionException if there is an error compiling expressions */ public ExtractSchematron() throws ClassCastException, ClassNotFoundException, InstantiationException, IllegalAccessException, TransformerConfigurationException, TransformerFactoryConfigurationError, XPathExpressionException { final DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance(); final XSImplementation impl = (XSImplementation) registry.getDOMImplementation("XS-Loader"); this.dfactory = DocumentBuilderFactory.newInstance(); this.dfactory.setNamespaceAware(true); final XPathFactory factory = XPathFactory.newInstance(); final XPath xpath = factory.newXPath(); xpath.setNamespaceContext(new SchNamespaceContext()); this.serializer = TransformerFactory.newInstance().newTransformer(); this.serializer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); this.patternExpr = xpath.compile("//sch:pattern"); this.schemaLoader = impl.createXSLoader(null); }