List of usage examples for javax.xml.transform OutputKeys VERSION
String VERSION
To view the source code for javax.xml.transform OutputKeys VERSION.
Click Source Link
From source file:net.sf.joost.stx.Processor.java
/** * Initialize the output properties to the values specified in the * transformation sheet or to their default values, resp. *//* www .j a va 2 s .c om*/ public void initOutputProperties() { outputProperties = new Properties(); outputProperties.setProperty(OutputKeys.ENCODING, transformNode.outputEncoding); outputProperties.setProperty(OutputKeys.MEDIA_TYPE, "text/xml"); outputProperties.setProperty(OutputKeys.METHOD, transformNode.outputMethod); outputProperties.setProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); outputProperties.setProperty(OutputKeys.STANDALONE, "no"); outputProperties.setProperty(OutputKeys.VERSION, "1.0"); }
From source file:com.ggvaidya.scinames.model.Project.java
public void saveToFile() throws IOException { File saveToFile = projectFile.getValue(); if (saveToFile == null) throw new IOException("Project file not set: nowhere to save to!"); /*/* ww w. java2 s.co m*/ XMLOutputFactory factory = XMLOutputFactory.newFactory(); try { XMLStreamWriter writer = factory.createXMLStreamWriter(new FileWriter(saveToFile)); writer.writeStartDocument(); serializeToXMLStream(writer); writer.writeEndDocument(); writer.flush(); // Success! lastModified.saved(); } catch (XMLStreamException ex) { throw new IOException("Could not write project to XML file '" + saveToFile + "': " + ex); }*/ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); // Create a document representation of this project. Document docProject; try { DocumentBuilder db = dbf.newDocumentBuilder(); docProject = db.newDocument(); serializeToDocument(docProject); } catch (ParserConfigurationException ex) { Logger.getLogger(Project.class.getName()).log(Level.SEVERE, null, ex); return; } // Write the document representation of this project // as XML. TransformerFactory tfc = TransformerFactory.newInstance(); try { OutputStream outputStream = new GZIPOutputStream(new FileOutputStream(saveToFile)); StreamResult res = new StreamResult(outputStream); Transformer t = tfc.newTransformer(); DOMSource ds = new DOMSource(docProject); t.setOutputProperty(OutputKeys.METHOD, "xml"); t.setOutputProperty(OutputKeys.VERSION, "1.0"); // Do NOT change to 1.1 -- this leads to complex problems! t.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); t.setOutputProperty(OutputKeys.STANDALONE, "yes"); t.setOutputProperty(OutputKeys.INDENT, "yes"); t.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); t.transform(ds, res); // Success! lastModified.saved(); outputStream.close(); } catch (TransformerConfigurationException ex) { throw new IOException("Could not write out XML to '" + saveToFile + "': " + ex); } catch (TransformerException ex) { throw new IOException("Could not write out XML to '" + saveToFile + "': " + ex); } }
From source file:com.krawler.portal.tools.ServiceBuilder.java
public void writeToSourceCfgXml(String fileName) { try {/* w w w. ja v a 2 s. co m*/ DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.parse(PropsValues.CFG_SOURCE_FILE_PATH); Node hibernate_conf = doc.getChildNodes().item(1); // .getChildNodes().item(1); Node SessionFac = hibernate_conf.getChildNodes().item(1); Element mapping = doc.createElement("mapping"); mapping.setAttribute("resource", PropsValues.PACKAGE_FILE_PATH + fileName + ".hbm.xml"); SessionFac.getChildNodes().getLength(); SessionFac.appendChild(mapping); DOMSource ds = new DOMSource(doc); StreamResult sr = new StreamResult(PropsValues.CFG_SOURCE_FILE_PATH); TransformerFactory tf = TransformerFactory.newInstance(); Transformer trans = tf.newTransformer(); trans.setOutputProperty(OutputKeys.VERSION, "1.0"); trans.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); trans.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd"); trans.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, "-//Hibernate/Hibernate Configuration DTD 3.0//EN"); trans.transform(ds, sr); // writeToClassesCfgXml(fileName); } catch (TransformerException ex) { logger.warn(ex.getMessage(), ex); } catch (SAXException ex) { logger.warn(ex.getMessage(), ex); } catch (IOException ex) { logger.warn(ex.getMessage(), ex); } catch (ParserConfigurationException ex) { logger.warn(ex.getMessage(), ex); } }
From source file:com.krawler.portal.tools.ServiceBuilder.java
public void writeToClassesCfgXml(String fileName) { try {//from w w w . j ava 2s .co m DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); // Document doc = docBuilder.parse(PropsValues.CFG_SOURCE_FILE_PATH); Document doc = docBuilder.parse(PropsValues.CFG_CLASSES_FILE_PATH); Node hibernate_conf = doc.getChildNodes().item(1); // .getChildNodes().item(1); Node SessionFac = hibernate_conf.getChildNodes().item(1); Element mapping = doc.createElement("mapping"); mapping.setAttribute("resource", PropsValues.PACKAGE_FILE_PATH + fileName + ".hbm.xml"); SessionFac.getChildNodes().getLength(); SessionFac.appendChild(mapping); DOMSource ds = new DOMSource(doc); // StreamResult sr = new StreamResult(PropsValues.CFG_SOURCE_FILE_PATH); StreamResult sr = new StreamResult(PropsValues.CFG_CLASSES_FILE_PATH); TransformerFactory tf = TransformerFactory.newInstance(); Transformer trans = tf.newTransformer(); trans.setOutputProperty(OutputKeys.VERSION, "1.0"); trans.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); trans.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd"); trans.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, "-//Hibernate/Hibernate Configuration DTD 3.0//EN"); trans.transform(ds, sr); } catch (TransformerException ex) { logger.warn(ex.getMessage(), ex); } catch (SAXException ex) { logger.warn(ex.getMessage(), ex); } catch (IOException ex) { logger.warn(ex.getMessage(), ex); } catch (ParserConfigurationException ex) { logger.warn(ex.getMessage(), ex); } }
From source file:com.krawler.portal.tools.ServiceBuilder.java
public void deleteSourceEntryCfgXml(String fileName) { try {/* ww w .ja v a 2s .c om*/ DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.parse(PropsValues.CFG_SOURCE_FILE_PATH); Node hibernate_conf = doc.getChildNodes().item(1); Node SessionFac = hibernate_conf.getChildNodes().item(1); Element sesFac = (Element) SessionFac; NodeList mapping_lists = sesFac.getElementsByTagName("mapping"); Node toDelete = null; for (int num = 0; num < mapping_lists.getLength(); num++) { Element mapEle = (Element) mapping_lists.item(num); if (mapEle.getAttribute("resource").equals(PropsValues.PACKAGE_FILE_PATH + fileName + ".hbm.xml")) { toDelete = mapEle; break; } } sesFac.removeChild(toDelete); DOMSource ds = new DOMSource(doc); StreamResult sr = new StreamResult(PropsValues.CFG_SOURCE_FILE_PATH); TransformerFactory tf = TransformerFactory.newInstance(); Transformer trans = tf.newTransformer(); trans.setOutputProperty(OutputKeys.VERSION, "1.0"); trans.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); trans.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd"); trans.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, "-//Hibernate/Hibernate Configuration DTD 3.0//EN"); trans.transform(ds, sr); } catch (TransformerException ex) { logger.warn(ex.getMessage(), ex); } catch (SAXException ex) { logger.warn(ex.getMessage(), ex); } catch (IOException ex) { logger.warn(ex.getMessage(), ex); } catch (ParserConfigurationException ex) { logger.warn(ex.getMessage(), ex); } }
From source file:com.krawler.portal.tools.ServiceBuilder.java
public void deleteClassesEntryCfgXml(String fileName) { try {/*w ww .j a v a2 s . c o m*/ DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.parse(PropsValues.CFG_CLASSES_FILE_PATH); Node hibernate_conf = doc.getChildNodes().item(1); Node SessionFac = hibernate_conf.getChildNodes().item(1); Element sesFac = (Element) SessionFac; NodeList mapping_lists = sesFac.getElementsByTagName("mapping"); Node toDelete = null; for (int num = 0; num < mapping_lists.getLength(); num++) { Element mapEle = (Element) mapping_lists.item(num); if (mapEle.getAttribute("resource").equals(PropsValues.PACKAGE_FILE_PATH + fileName + ".hbm.xml")) { toDelete = mapEle; break; } } sesFac.removeChild(toDelete); DOMSource ds = new DOMSource(doc); StreamResult sr = new StreamResult(PropsValues.CFG_CLASSES_FILE_PATH); TransformerFactory tf = TransformerFactory.newInstance(); Transformer trans = tf.newTransformer(); trans.setOutputProperty(OutputKeys.VERSION, "1.0"); trans.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); trans.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd"); trans.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, "-//Hibernate/Hibernate Configuration DTD 3.0//EN"); trans.transform(ds, sr); } catch (TransformerException ex) { logger.warn(ex.getMessage(), ex); } catch (SAXException ex) { logger.warn(ex.getMessage(), ex); } catch (IOException ex) { logger.warn(ex.getMessage(), ex); } catch (ParserConfigurationException ex) { logger.warn(ex.getMessage(), ex); } }
From source file:org.apache.cocoon.serialization.AbstractTextSerializer.java
/** * Set the configurations for this serializer. *//* ww w . j av a 2s . co m*/ public void configure(Configuration conf) throws ConfigurationException { // configure buffer size // Configuration bsc = conf.getChild("buffer-size", false); // if(null != bsc) // outputBufferSize = bsc.getValueAsInteger(DEFAULT_BUFFER_SIZE); // configure xalan String cdataSectionElements = conf.getChild("cdata-section-elements").getValue(null); String dtPublic = conf.getChild("doctype-public").getValue(null); String dtSystem = conf.getChild("doctype-system").getValue(null); String encoding = conf.getChild("encoding").getValue(null); String indent = conf.getChild("indent").getValue(null); String mediaType = conf.getChild("media-type").getValue(null); String method = conf.getChild("method").getValue(null); String omitXMLDeclaration = conf.getChild("omit-xml-declaration").getValue(null); String standAlone = conf.getChild("standalone").getValue(null); String version = conf.getChild("version").getValue(null); final StringBuffer buffer = new StringBuffer(); if (cdataSectionElements != null) { format.put(OutputKeys.CDATA_SECTION_ELEMENTS, cdataSectionElements); buffer.append(";cdata-section-elements=").append(cdataSectionElements); } if (dtPublic != null) { format.put(OutputKeys.DOCTYPE_PUBLIC, dtPublic); buffer.append(";doctype-public=").append(dtPublic); } if (dtSystem != null) { format.put(OutputKeys.DOCTYPE_SYSTEM, dtSystem); buffer.append(";doctype-system=").append(dtSystem); } if (encoding != null) { format.put(OutputKeys.ENCODING, encoding); buffer.append(";encoding=").append(encoding); } if (indent != null) { format.put(OutputKeys.INDENT, indent); buffer.append(";indent=").append(indent); } if (mediaType != null) { format.put(OutputKeys.MEDIA_TYPE, mediaType); buffer.append(";media-type=").append(mediaType); } if (method != null) { format.put(OutputKeys.METHOD, method); buffer.append(";method=").append(method); } if (omitXMLDeclaration != null) { format.put(OutputKeys.OMIT_XML_DECLARATION, omitXMLDeclaration); buffer.append(";omit-xml-declaration=").append(omitXMLDeclaration); } if (standAlone != null) { format.put(OutputKeys.STANDALONE, standAlone); buffer.append(";standalone=").append(standAlone); } if (version != null) { format.put(OutputKeys.VERSION, version); buffer.append(";version=").append(version); } if (buffer.length() > 0) { this.cachingKey = buffer.toString(); } String tFactoryClass = conf.getChild("transformer-factory").getValue(null); if (tFactoryClass != null) { try { this.tfactory = (SAXTransformerFactory) ClassUtils.newInstance(tFactoryClass); if (getLogger().isDebugEnabled()) { getLogger().debug("Using transformer factory " + tFactoryClass); } } catch (Exception e) { throw new ConfigurationException("Cannot load transformer factory " + tFactoryClass, e); } } else { // Standard TrAX behaviour this.tfactory = (SAXTransformerFactory) TransformerFactory.newInstance(); } tfactory.setErrorListener(new TraxErrorHandler(getLogger())); // Check if we need namespace as attributes. try { if (needsNamespacesAsAttributes()) { // Setup a correction pipe this.namespacePipe = new NamespaceAsAttributes(); this.namespacePipe.enableLogging(getLogger()); } } catch (Exception e) { getLogger().warn("Cannot know if transformer needs namespaces attributes - assuming NO.", e); } }
From source file:org.broadleafcommerce.common.extensibility.context.merge.ImportProcessor.java
public ResourceInputStream[] extract(ResourceInputStream[] sources) throws MergeException { if (sources == null) { return null; }/*from ww w.j a v a 2 s . co m*/ try { DynamicResourceIterator resourceList = new DynamicResourceIterator(); resourceList.addAll(Arrays.asList(sources)); while (resourceList.hasNext()) { ResourceInputStream myStream = resourceList.nextResource(); Document doc = builder.parse(myStream); NodeList nodeList = (NodeList) xPath.evaluate(IMPORT_PATH, doc, XPathConstants.NODESET); int length = nodeList.getLength(); for (int j = 0; j < length; j++) { Element element = (Element) nodeList.item(j); Resource resource = loader.getResource(element.getAttribute("resource")); ResourceInputStream ris = new ResourceInputStream(resource.getInputStream(), resource.getURL().toString()); resourceList.addEmbeddedResource(ris); element.getParentNode().removeChild(element); } if (length > 0) { TransformerFactory tFactory = TransformerFactory.newInstance(); Transformer xmlTransformer = tFactory.newTransformer(); xmlTransformer.setOutputProperty(OutputKeys.VERSION, "1.0"); xmlTransformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); xmlTransformer.setOutputProperty(OutputKeys.METHOD, "xml"); xmlTransformer.setOutputProperty(OutputKeys.INDENT, "yes"); DOMSource source = new DOMSource(doc); ByteArrayOutputStream baos = new ByteArrayOutputStream(); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(baos)); StreamResult result = new StreamResult(writer); xmlTransformer.transform(source, result); byte[] itemArray = baos.toByteArray(); resourceList.set(resourceList.getPosition() - 1, new ResourceInputStream( new ByteArrayInputStream(itemArray), null, myStream.getNames())); } else { myStream.reset(); } } return resourceList.toArray(new ResourceInputStream[resourceList.size()]); } catch (Exception e) { throw new MergeException(e); } }
From source file:org.broadleafcommerce.common.extensibility.context.merge.MergeManager.java
/** * Merge 2 xml document streams together into a final resulting stream. During * the merge, various merge business rules are followed based on configuration * defined for various merge points.//from www. jav a2 s . c o m * * @param stream1 * @param stream2 * @return the stream representing the merged document * @throws org.broadleafcommerce.common.extensibility.context.merge.exceptions.MergeException */ public ResourceInputStream merge(ResourceInputStream stream1, ResourceInputStream stream2) throws MergeException { try { Document doc1 = builder.parse(stream1); Document doc2 = builder.parse(stream2); List<Node> exhaustedNodes = new ArrayList<Node>(); //process any defined handlers for (MergeHandler handler : this.handlers) { if (LOG.isDebugEnabled()) { LOG.debug("Processing handler: " + handler.getXPath()); } MergePoint point = new MergePoint(handler, doc1, doc2); Node[] list = point.merge(exhaustedNodes); if (list != null) { Collections.addAll(exhaustedNodes, list); } } TransformerFactory tFactory = TransformerFactory.newInstance(); Transformer xmlTransformer = tFactory.newTransformer(); xmlTransformer.setOutputProperty(OutputKeys.VERSION, "1.0"); xmlTransformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); xmlTransformer.setOutputProperty(OutputKeys.METHOD, "xml"); xmlTransformer.setOutputProperty(OutputKeys.INDENT, "yes"); DOMSource source = new DOMSource(doc1); ByteArrayOutputStream baos = new ByteArrayOutputStream(); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(baos, "UTF-8")); StreamResult result = new StreamResult(writer); xmlTransformer.transform(source, result); byte[] itemArray = baos.toByteArray(); return new ResourceInputStream(new ByteArrayInputStream(itemArray), stream2.getName(), stream1.getNames()); } catch (Exception e) { throw new MergeException(e); } }
From source file:org.dcm4che.tool.dcm2xml.Dcm2Xml.java
public void parse(DicomInputStream dis) throws IOException, TransformerConfigurationException { dis.setIncludeBulkData(includeBulkData); if (blkAttrs != null) dis.setBulkDataDescriptor(BulkDataDescriptor.valueOf(blkAttrs)); dis.setBulkDataDirectory(blkDirectory); dis.setBulkDataFilePrefix(blkFilePrefix); dis.setBulkDataFileSuffix(blkFileSuffix); dis.setConcatenateBulkDataFiles(catBlkFiles); TransformerHandler th = getTransformerHandler(); Transformer t = th.getTransformer(); t.setOutputProperty(OutputKeys.INDENT, indent ? "yes" : "no"); t.setOutputProperty(OutputKeys.VERSION, xmlVersion); th.setResult(new StreamResult(System.out)); SAXWriter saxWriter = new SAXWriter(th); saxWriter.setIncludeKeyword(includeKeyword); saxWriter.setIncludeNamespaceDeclaration(includeNamespaceDeclaration); dis.setDicomInputHandler(saxWriter); dis.readDataset(-1, -1);/*from ww w . j av a 2s. c o m*/ }