List of usage examples for org.xml.sax SAXParseException getMessage
public String getMessage()
From source file:Counter.java
/** Prints the error message. */ protected void printError(String type, SAXParseException ex) { System.err.print("["); System.err.print(type);/*from ww w .ja va 2 s . co m*/ System.err.print("] "); if (ex == null) { System.out.println("!!!"); } String systemId = ex.getSystemId(); if (systemId != null) { int index = systemId.lastIndexOf('/'); if (index != -1) systemId = systemId.substring(index + 1); System.err.print(systemId); } System.err.print(':'); System.err.print(ex.getLineNumber()); System.err.print(':'); System.err.print(ex.getColumnNumber()); System.err.print(": "); System.err.print(ex.getMessage()); System.err.println(); System.err.flush(); }
From source file:com.evolveum.midpoint.prism.schema.SchemaRegistry.java
/** * This can be used to read additional schemas even after the registry was initialized. *//* w w w . jav a2 s . co m*/ public void initialize() throws SAXException, IOException, SchemaException { if (prismContext == null) { throw new IllegalStateException("Prism context not set"); } if (namespacePrefixMapper == null) { throw new IllegalStateException("Namespace prefix mapper not set"); } try { initResolver(); parsePrismSchemas(); parseJavaxSchema(); compileCompileTimeClassList(); initialized = true; } catch (SAXException ex) { if (ex instanceof SAXParseException) { SAXParseException sex = (SAXParseException) ex; throw new SchemaException("Error parsing schema " + sex.getSystemId() + " line " + sex.getLineNumber() + ": " + sex.getMessage()); } throw ex; } }
From source file:com.hp.hpl.inkml.InkMLDOMParser.java
/** Prints the error message. */ private void printError(final String messageType, final SAXParseException exception) { System.err.print("["); System.err.print(messageType); System.err.print("] "); String systemId = exception.getSystemId(); if (systemId != null) { final int index = systemId.lastIndexOf('/'); if (index != -1) { systemId = systemId.substring(index + 1); }/* ww w. j a va 2 s. c om*/ System.err.print(systemId); } System.err.print(':'); System.err.print(exception.getLineNumber()); System.err.print(':'); System.err.print(exception.getColumnNumber()); System.err.print(": "); System.err.print(exception.getMessage()); System.err.println(); System.err.flush(); }
From source file:Writer.java
/** Prints the error message. */ protected void printError(String type, SAXParseException ex) { System.err.print("["); System.err.print(type);//from w ww . j a v a 2 s . co m System.err.print("] "); String systemId = ex.getSystemId(); if (systemId != null) { int index = systemId.lastIndexOf('/'); if (index != -1) systemId = systemId.substring(index + 1); System.err.print(systemId); } System.err.print(':'); System.err.print(ex.getLineNumber()); System.err.print(':'); System.err.print(ex.getColumnNumber()); System.err.print(": "); System.err.print(ex.getMessage()); System.err.println(); System.err.flush(); }
From source file:com.evolveum.midpoint.prism.schema.SchemaRegistryImpl.java
/** * This can be used to read additional schemas even after the registry was initialized. *//* ww w.j a va 2 s . c o m*/ @Override public void initialize() throws SAXException, IOException, SchemaException { if (prismContext == null) { throw new IllegalStateException("Prism context not set"); } if (namespacePrefixMapper == null) { throw new IllegalStateException("Namespace prefix mapper not set"); } try { LOGGER.trace("initialize() starting"); long start = System.currentTimeMillis(); initResolver(); long resolverDone = System.currentTimeMillis(); LOGGER.trace("initResolver() done in {} ms", resolverDone - start); parsePrismSchemas(); long prismSchemasDone = System.currentTimeMillis(); LOGGER.trace("parsePrismSchemas() done in {} ms", prismSchemasDone - resolverDone); parseJavaxSchema(); long javaxSchemasDone = System.currentTimeMillis(); LOGGER.trace("parseJavaxSchema() done in {} ms", javaxSchemasDone - prismSchemasDone); compileCompileTimeClassList(); long classesDone = System.currentTimeMillis(); LOGGER.trace("compileCompileTimeClassList() done in {} ms", classesDone - javaxSchemasDone); initialized = true; } catch (SAXException ex) { if (ex instanceof SAXParseException) { SAXParseException sex = (SAXParseException) ex; throw new SchemaException("Error parsing schema " + sex.getSystemId() + " line " + sex.getLineNumber() + ": " + sex.getMessage(), sex); } throw ex; } }
From source file:jp.aegif.alfresco.online_webdav.WebDAVMethod.java
/** * Executes the method, wrapping the call to {@link #executeImpl()} in an appropriate transaction * and handling the error conditions.//from ww w . jav a2 s .c om * @throws IOException */ public void execute() throws WebDAVServerException { // Parse the HTTP headers parseRequestHeaders(); // Parse the HTTP body try { parseRequestBody(); } catch (WebDAVServerException e) { if (e.getCause() != null && e.getCause() instanceof SAXParseException) { SAXParseException saxParseEx = (SAXParseException) e.getCause(); if (logger.isTraceEnabled()) { // Include stack trace. logger.trace("Malformed request body", saxParseEx); } else if (logger.isDebugEnabled()) { // Log message only. logger.debug("Malformed request body: " + saxParseEx.getMessage()); } try { m_response.sendError(e.getHttpStatusCode()); } catch (IOException ioe) { if (logger.isDebugEnabled()) { logger.debug("Unable to send status code", ioe); } } // Halt processing. return; } else { // Rethrow the exception, as we haven't dealt with it here. throw e; } } m_userAgent = m_request.getHeader(WebDAV.HEADER_USER_AGENT); RetryingTransactionCallback<Object> executeImplCallback = new RetryingTransactionCallback<Object>() { public Object execute() throws Exception { // Reset the request input stream / reader state WebDAVMethod.this.m_inputStream = null; WebDAVMethod.this.m_reader = null; // cache current session getDAVHelper().getLockService().setCurrentSession(m_request.getSession()); executeImpl(); return null; } }; try { boolean isReadOnly = isReadOnly(); // Execute the method getTransactionService().getRetryingTransactionHelper().doInTransaction(executeImplCallback, isReadOnly); generateResponseImpl(); } catch (AccessDeniedException e) { // Return a forbidden status throw new WebDAVServerException(getStatusForAccessDeniedException(), e); } catch (Throwable e) { if (e instanceof WebDAVServerException) { throw (WebDAVServerException) e; } else if (e.getCause() instanceof WebDAVServerException) { throw (WebDAVServerException) e.getCause(); } else { boolean logOnly = false; Throwable t = e; while ((t = t.getCause()) != null) { if (t instanceof SocketException) { logOnly = true; // The client aborted the connection - we can't do much about this, except log it. if (logger.isTraceEnabled() || logger.isDebugEnabled()) { String message = "Client dropped connection [uri=" + m_request.getRequestURI() + "]"; if (logger.isTraceEnabled()) { // Include a stack trace when trace is enabled. logger.trace(message, e); } else if (logger.isDebugEnabled()) { // Just a message for debug-level output. logger.debug(message); } } break; } } // Convert error to a server error if (!logOnly) { throw new WebDAVServerException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e); } } } finally { cleanUp(); } }
From source file:de.escidoc.core.common.util.xml.XmlUtility.java
/** * Validates the provided XML data using the specified schema.<br> The provided {@code ByteArrayInputStream} is * reset after validation./*from w ww.jav a 2 s. c om*/ * * @param byteArrayInputStream The XML data to validate in an {@code ByteArrayInputStream}.<br> This input * stream is reset after the validation. * @param schemaUri The URL identifying the schema that shall be used for validation. * @throws XmlCorruptedException Thrown if the XML data cannot be parsed. * @throws XmlSchemaValidationException Thrown if both validation fail or only one validation is executed and fails * @throws WebserverSystemException Thrown in any other case. */ public void validate(final ByteArrayInputStream byteArrayInputStream, final String schemaUri) throws XmlCorruptedException, XmlSchemaValidationException, WebserverSystemException { try { final Validator validator = getSchema(schemaUri).newValidator(); validator.validate(new SAXSource(new InputSource(byteArrayInputStream))); } catch (final SAXParseException e) { final String errorMsg = "Error in line " + e.getLineNumber() + ", column " + e.getColumnNumber() + ". " + e.getMessage(); if (e.getMessage().startsWith("cvc")) { throw new XmlSchemaValidationException(errorMsg, e); } else { throw new XmlCorruptedException(errorMsg, e); } } catch (final Exception e) { throw new WebserverSystemException(e.getMessage(), e); } finally { if (byteArrayInputStream != null) { byteArrayInputStream.reset(); } } }
From source file:com.ephesoft.dcma.webservice.util.WebServiceHelper.java
/** * This Method is Added to process the input provided for WebService to upload files for Learning * //www .j av a 2 s . c o m * @param req the {@link HttpServletRequest} the request header for this web service hit * @throws InternalServerException the internal server exception * @throws ValidationException the validation exception */ public void getUploadFilesforLearning(final HttpServletRequest req) throws InternalServerException, ValidationException { LOGGER.info("Inside getUploadFilesforLearning method"); String respStr = WebServiceConstants.EMPTY_STRING; String workingDir = WebServiceConstants.EMPTY_STRING; String docTypeName = WebServiceConstants.EMPTY_STRING; String learningType = WebServiceConstants.EMPTY_STRING; List<String> fileNamesFirst = null; List<String> fileNamesMiddle = null; List<String> fileNamesLast = null; if (req instanceof DefaultMultipartHttpServletRequest) { try { final String webServiceFolderPath = batchSchemaService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); final DefaultMultipartHttpServletRequest multiPartRequest = (DefaultMultipartHttpServletRequest) req; final MultiValueMap<String, MultipartFile> fileMap = multiPartRequest.getMultiFileMap(); final int fileCountValue = checkFileCountForExtentionType("xml", fileMap); if (fileCountValue >= 2) { final HttpStatus status = HttpStatus.UNPROCESSABLE_ENTITY; respStr = "There are more than 1 xml file uploaded with the request. Only 1 xml is expected"; final RestError restError = new RestError(status, WebServiceConstants.INTERNAL_SERVER_ERROR_CODE, respStr, respStr + WebServiceConstants.CLASS_WEB_SERVICE_UTILITY, WebServiceConstants.DEFAULT_URL); LOGGER.error("Error response at server:" + respStr); final InternalServerException internalServerExcpetion = new InternalServerException( WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE, restError); LOGGER.error(respStr + WebServiceConstants.HTTP_STATUS + status); throw internalServerExcpetion; } String xmlFileName = WebServiceConstants.EMPTY_STRING; xmlFileName = getXMLFile(workingDir, multiPartRequest, fileMap); LOGGER.info("XML file name is" + xmlFileName); UploadLearningFiles uploadLearningFileXML = null; final File xmlFile = new File(workingDir + File.separator + xmlFileName); final FileInputStream inputStream = new FileInputStream(xmlFile); final Source source = XMLUtil.createSourceFromStream(inputStream); uploadLearningFileXML = (UploadLearningFiles) batchSchemaDao.getJAXB2Template().getJaxb2Marshaller() .unmarshal(source); final String inputXMLValidationRes = validateInputXMLForLearning(uploadLearningFileXML); if (inputXMLValidationRes.isEmpty()) { String searchPathName = WebServiceConstants.EMPTY_STRING; final List<DocType> docTypes = uploadLearningFileXML.getDocType(); if (isValidFileCountForLearning(docTypes, fileMap.size() - 1)) { for (final DocType docType : docTypes) { docTypeName = docType.getDocTypeName(); learningType = docType.getLearningType(); if (docType.getPageTypeFirst() != null && docType.getPageTypeFirst().getFilesToBeUploaded().getFileName() != null && !docType.getPageTypeFirst().getFilesToBeUploaded().getFileName().isEmpty()) { fileNamesFirst = docType.getPageTypeFirst().getFilesToBeUploaded().getFileName(); if (LUCENE_SEARCH_CLASSIFICATION_TYPE.equalsIgnoreCase(learningType)) { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getSearchClassSamplePath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.FIRST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesFirst); LOGGER.info( "Sucessfully Uploaded Images for lucene-search-classification-sample"); } else if ("Image".equalsIgnoreCase(learningType)) { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getImageMagickBaseFolderPath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.FIRST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesFirst); } else { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getSearchClassSamplePath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.FIRST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesFirst); LOGGER.info( "Sucessfully Uploaded Images for lucene-search-classification-sample"); searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getImageMagickBaseFolderPath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.FIRST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesFirst); LOGGER.info("Sucessfully Uploaded Images for image-classification-sample"); } } if (docType.getPageTypeMiddle() != null && docType.getPageTypeMiddle().getFilesToBeUploaded().getFileName() != null && !docType.getPageTypeMiddle().getFilesToBeUploaded().getFileName() .isEmpty()) { fileNamesMiddle = docType.getPageTypeMiddle().getFilesToBeUploaded().getFileName(); if (LUCENE_SEARCH_CLASSIFICATION_TYPE.equalsIgnoreCase(learningType)) { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getSearchClassSamplePath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.MIDDLE_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesMiddle); LOGGER.info( "Sucessfully Uploaded Images for lucene-search-classification-sample"); } else if ("Image".equalsIgnoreCase(learningType)) { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getImageMagickBaseFolderPath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.MIDDLE_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesMiddle); LOGGER.info("Sucessfully Uploaded Images for image-classification-sample"); } else { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getSearchClassSamplePath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.MIDDLE_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesMiddle); LOGGER.info( "Sucessfully Uploaded Images for lucene-search-classification-sample"); searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getImageMagickBaseFolderPath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.MIDDLE_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesMiddle); LOGGER.info("Sucessfully Uploaded Images for image-classification-sample"); } } if (docType.getPageTypeLast() != null && docType.getPageTypeLast().getFilesToBeUploaded().getFileName() != null && !docType.getPageTypeLast().getFilesToBeUploaded().getFileName().isEmpty()) { fileNamesLast = docType.getPageTypeLast().getFilesToBeUploaded().getFileName(); if (LUCENE_SEARCH_CLASSIFICATION_TYPE.equalsIgnoreCase(learningType)) { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getSearchClassSamplePath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.LAST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesLast); LOGGER.info( "Sucessfully Uploaded Images for lucene-search-classification-sample"); } else if ("Image".equalsIgnoreCase(learningType)) { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getImageMagickBaseFolderPath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.LAST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesLast); LOGGER.info("Sucessfully Uploaded Images for image-classification-sample"); } else { searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getSearchClassSamplePath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.LAST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesLast); LOGGER.info( "Sucessfully Uploaded Images for lucene-search-classification-sample"); searchPathName = EphesoftStringUtil.concatenate( batchSchemaService.getImageMagickBaseFolderPath( uploadLearningFileXML.getBatchClassId(), true), File.separator, docType.getDocTypeName(), File.separator, docTypeName, WebServiceConstants.LAST_PAGE); uploadInputImagesToLearningFolder(searchPathName, workingDir, fileNamesLast); LOGGER.info("Sucessfully Uploaded Images for image-classification-sample"); } } } } else { final RestError restError = createUnprocessableEntityRestError( WebServiceConstants.INVALID_NUMBER_OF_FILES_FOR_UPLOAD_LEARNING, WebServiceConstants.INVALID_ARGUMENTS_IN_XML_INPUT_CODE); LOGGER.error("Mismatch in the XML input and files sent."); throw new ValidationException( WebServiceConstants.INVALID_NUMBER_OF_FILES_FOR_UPLOAD_LEARNING, restError); } } else { final RestError restError = new RestError(HttpStatus.UNPROCESSABLE_ENTITY, WebServiceConstants.PARAMETER_XML_INCORRECT_CODE, inputXMLValidationRes, inputXMLValidationRes + WebServiceConstants.CLASS_WEB_SERVICE_UTILITY, WebServiceConstants.DEFAULT_URL); LOGGER.error(WebServiceConstants.PARAMETER_XML_INCORRECT_MESSAGE + WebServiceConstants.HTTP_STATUS + HttpStatus.UNPROCESSABLE_ENTITY); throw new InternalServerException(inputXMLValidationRes, restError); } } catch (final FileNotFoundException fe) { respStr = WebServiceConstants.INPUT_XML_NOT_FOUND_MESSAGE; final RestError restError = new RestError(HttpStatus.UNPROCESSABLE_ENTITY, WebServiceConstants.INPUT_XML_NOT_FOUND_CODE, respStr, WebServiceConstants.INPUT_XML_NOT_FOUND_MESSAGE + WebServiceConstants.CLASS_WEB_SERVICE_UTILITY, WebServiceConstants.DEFAULT_URL); LOGGER.error("Error response at server:" + respStr); final InternalServerException internalServerExcpetion = new InternalServerException( WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE, restError); LOGGER.error(respStr + WebServiceConstants.HTTP_STATUS); throw internalServerExcpetion; } catch (final org.xml.sax.SAXParseException ex) { respStr = "Error in Parsing Input XML.Please try again" + ex.getMessage(); final RestError restError = new RestError(HttpStatus.UNPROCESSABLE_ENTITY, WebServiceConstants.INPUT_XML_NOT_ABLE_TO_PARSE_CODE, respStr, WebServiceConstants.INPUT_XML_NOT_ABLE_TO_PARSE_MESSAGE + WebServiceConstants.CLASS_WEB_SERVICE_UTILITY, WebServiceConstants.DEFAULT_URL); LOGGER.error("Error response at server:" + respStr); final InternalServerException internalServerExcpetion = new InternalServerException( WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE, restError); LOGGER.error(respStr + WebServiceConstants.HTTP_STATUS); throw internalServerExcpetion; // JIRA-Bug-11130 } catch (InternalServerException internalServerException) { throw internalServerException; } catch (ValidationException validationException) { throw validationException; } catch (final Exception exception) { final HttpStatus status = HttpStatus.INTERNAL_SERVER_ERROR; respStr = WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE + exception; final RestError restError = new RestError(status, WebServiceConstants.INTERNAL_SERVER_ERROR_CODE, WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE, WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE + WebServiceConstants.CLASS_WEB_SERVICE_UTILITY, WebServiceConstants.DEFAULT_URL); LOGGER.error("Error response at server:" + respStr); final InternalServerException internalServerExcpetion = new InternalServerException( WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE, restError); LOGGER.error(respStr + WebServiceConstants.HTTP_STATUS + status); throw internalServerExcpetion; } finally { try { if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } catch (final Exception ex) { final HttpStatus status = HttpStatus.INTERNAL_SERVER_ERROR; respStr = WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE + ex.getMessage(); final RestError restError = new RestError(status, WebServiceConstants.INTERNAL_SERVER_ERROR_CODE, WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE, WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE + WebServiceConstants.CLASS_WEB_SERVICE_UTILITY, WebServiceConstants.DEFAULT_URL); LOGGER.error("Error response at server:" + respStr); final InternalServerException internalServerExcpetion = new InternalServerException( WebServiceConstants.INTERNAL_SERVER_ERROR_MESSAGE, restError); LOGGER.error(respStr + WebServiceConstants.HTTP_STATUS + status); throw internalServerExcpetion; } } } else { final HttpStatus status = HttpStatus.INTERNAL_SERVER_ERROR; respStr = WebServiceConstants.IMPROPER_INPUT_TO_SERVER_MESSAGE; final RestError restError = new RestError(status, WebServiceConstants.IMPROPER_INPUT_TO_SERVER_CODE, WebServiceConstants.IMPROPER_INPUT_TO_SERVER_MESSAGE, WebServiceConstants.IMPROPER_INPUT_TO_SERVER_MESSAGE + WebServiceConstants.CLASS_WEB_SERVICE_UTILITY, WebServiceConstants.DEFAULT_URL); LOGGER.error(respStr + WebServiceConstants.HTTP_STATUS + status); throw new InternalServerException(WebServiceConstants.IMPROPER_INPUT_TO_SERVER_MESSAGE, restError); } }
From source file:com.amalto.workbench.utils.Util.java
private static XSDSchema getXSDSchema(String namespaceURI, String rawData, final List<XSDImport> imports, final TreeObject treeObj, boolean uri, final List<Exception> exceptions, final Map<String, Integer> schemaMonitor) throws Exception { FileInputStream fin = null;/*from w ww . ja v a2 s .c o m*/ try { final String xsdFileName = System.getProperty("user.dir") + "/.xsdModel.xml";//$NON-NLS-1$//$NON-NLS-2$ URI fileURI = URI.createFileURI(xsdFileName); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); documentBuilderFactory.setValidating(false); DocumentBuilder documentBuilder; XSDSchema schema = null; InputSource source = null; Document document = null; String schemaLocation = rawData; documentBuilder = documentBuilderFactory.newDocumentBuilder(); if (rawData == null) { return XSDSchemaImpl.getSchemaForSchema("http://www.w3.org/2001/XMLSchema");//$NON-NLS-1$ } if (namespaceURI == null && rawData.endsWith(".xsd") && rawData.indexOf(File.separator) > 0) {//$NON-NLS-1$ File rawFile = new File(rawData); if (!rawFile.exists()) { throw new IllegalArgumentException(rawData); } } // import namespace="http://xxx" schemaLocation="xxxx" if (namespaceURI != null && schemaLocation.endsWith(".xsd")) {//$NON-NLS-1$ URL url = new java.net.URI(namespaceURI + "/" + rawData).toURL();//$NON-NLS-1$ uri = false; rawData = IOUtils.toString(url.openConnection().getInputStream()); rawData = rawData.replaceAll("<!DOCTYPE(.*?)>", "");//$NON-NLS-1$//$NON-NLS-2$ } if (rawData.equals("http://www.w3.org/2001/03/xml.xsd")) {//$NON-NLS-1$ URL url = new java.net.URI("http://www.w3.org/2001/03/xml.xsd").toURL();//$NON-NLS-1$ uri = false; rawData = IOUtils.toString(url.openConnection().getInputStream()); rawData = rawData.replaceAll("<!DOCTYPE(.*?)>", "");//$NON-NLS-1$//$NON-NLS-2$ } if (uri) { File file = new File(rawData); if (file.exists()) { fin = new FileInputStream(file); source = new InputSource(fin); } else { source = new InputSource(new StringReader(Util.getResponseFromURL(rawData, treeObj))); } } else { source = new InputSource(new StringReader(rawData)); } try { document = documentBuilder.parse(source); } catch (SAXParseException ex) { exceptions.add(ex); return null; } schema = XSDSchemaImpl.createSchema(document.getDocumentElement()); ResourceSet resourceSet = new ResourceSetImpl(); Resource resource = resourceSet.createResource(fileURI); resourceSet.getAdapterFactories().add(new AdapterFactoryImpl() { class SchemaLocator extends AdapterImpl implements XSDSchemaLocator { public XSDSchema locateSchema(XSDSchema xsdSchema, String namespaceURI, String rawSchemaLocationURI, String resolvedSchemaLocation) { XSDSchema schema; Integer rawCnt = schemaMonitor.get(rawSchemaLocationURI); if (rawCnt == null) { rawCnt = 0; } else { rawCnt++; } schemaMonitor.put(rawSchemaLocationURI, rawCnt); if (rawCnt >= 10) { schemaMonitor.put(rawSchemaLocationURI, -1); return null; } try { schema = Util.getXSDSchema(namespaceURI, rawSchemaLocationURI, imports, treeObj, true, exceptions, schemaMonitor); } catch (Exception e) { return XSDSchemaImpl.getSchemaForSchema(namespaceURI); } schema.setTargetNamespace(namespaceURI); schema.setElement(schema.getDocument().getDocumentElement()); return schema; } public boolean isAdatperForType(Object type) { return type == XSDSchemaLocator.class; } } protected SchemaLocator schemaLocator = new SchemaLocator(); @Override public boolean isFactoryForType(Object type) { return type == XSDSchemaLocator.class; } @Override public Adapter adaptNew(Notifier target, Object type) { return schemaLocator; } }); // import namespace="http://xxx" schemaLocation="xxxx" if (namespaceURI != null && schemaLocation.endsWith(".xsd")) {//$NON-NLS-1$ schema.setSchemaLocation(schemaLocation); } else { schema.setSchemaLocation(fileURI.toString()); // set the schema for schema QName prefix to "xsd" schema.setSchemaForSchemaQNamePrefix("xsd");//$NON-NLS-1$ } // catch up the NPE to make sure data model can still run in case of unknown conflict try { resource.getContents().add(schema); } catch (Exception ex) { log.error(ex.getMessage(), ex); } // Add the root schema to the resource that was created above Iterator<Integer> iter = schemaMonitor.values().iterator(); while (iter.hasNext()) { Integer it = iter.next(); if (it.intValue() == -1) { return schema; } } importSchema(schema, imports, schemaMonitor); schema.setElement(document.getDocumentElement()); return schema; } finally { if (fin != null) { fin.close(); } } }
From source file:nl.armatiek.xslweb.serializer.FopSerializer.java
@Override public void warning(SAXParseException e) throws SAXException { logger.warn(e.getMessage(), e); }