List of usage examples for org.apache.maven.plugin MojoExecutionException MojoExecutionException
public MojoExecutionException(String message, Throwable cause)
MojoExecutionException
exception wrapping an underlying Throwable
and providing a message
. From source file:com.agilejava.docbkx.maven.AbstractCitrusPdfMojo.java
License:Open Source License
@Override public void postProcessResult(File result) throws MojoExecutionException { // fonts? -- maven?? if (fonts != null) { setField(AbstractPdfMojo.class, "fonts", fonts); }//from w ww . jav a 2 s.c o m // fop? forceBreakingWords = trimToNull(forceBreakingWords); if (forceBreakingWords != null) { Locale locale = parseLocale(forceBreakingWords); WordBreaker wb = new WordBreaker(result, locale); try { wb.filter(); } catch (Exception e) { throw new MojoExecutionException("Failed to breaking words", e); } } super.postProcessResult(result); }
From source file:com.agilejava.docbkx.maven.AbstractEpub3Mojo.java
License:Apache License
public void postProcessResult(File result) throws MojoExecutionException { super.postProcessResult(result); final File targetDirectory = result.getParentFile(); // override current container.xml try {// w w w. j a v a2 s . c om final URL containerURL = getClass().getResource("/epub/container.epub3.xml"); // reuse of container.cml from epub output FileUtils.copyURLToFile(containerURL, new File(targetDirectory, "META-INF" + File.separator + "container.xml")); } catch (IOException e) { throw new MojoExecutionException("Unable to copy hardcoded container.xml file", e); } final byte[] mimetypeData; try { mimetypeData = makeMimetype(); } catch (UnsupportedEncodingException e) { throw new MojoExecutionException("Unable to create mimetype data", e); } try { // first delete "mimetype" that is already there final File mimetype = new File(targetDirectory, "mimetype"); mimetype.delete(); ZipArchiver zipArchiver = new ZipArchiver() { protected void initZipOutputStream(ZipOutputStream zOut) throws IOException, ArchiverException { CRC32 crc = new CRC32(); crc.update(mimetypeData); ZipEntry ze = new ZipEntry("mimetype"); ze.setMethod(ZipEntry.STORED); ze.setSize(mimetypeData.length); ze.setCrc(crc.getValue()); zOut.putNextEntry(ze); zOut.write(mimetypeData); } }; // add content zipArchiver.addDirectory(targetDirectory); zipArchiver.setCompress(true); // set output file zipArchiver.setDestFile(new File(targetDirectory.getParentFile(), result.getName())); // copy it to parent dir zipArchiver.createArchive(); getLog().debug("epub file created at: " + zipArchiver.getDestFile().getAbsolutePath()); } catch (Exception e) { throw new MojoExecutionException("Unable to zip epub file", e); } }
From source file:com.agilejava.docbkx.maven.AbstractEpubMojo.java
License:Apache License
public void postProcessResult(File result) throws MojoExecutionException { super.postProcessResult(result); final File targetDirectory = result.getParentFile(); try {/*from w ww.ja v a 2 s. c o m*/ final URL containerURL = getClass().getResource("/epub/container.xml"); FileUtils.copyURLToFile(containerURL, new File(targetDirectory, "META-INF" + File.separator + "container.xml")); } catch (IOException e) { throw new MojoExecutionException("Unable to copy hardcoded container.xml file", e); } // copy mimetype file try { final URL mimetypeURL = getClass().getResource("/epub/mimetype"); FileUtils.copyURLToFile(mimetypeURL, new File(targetDirectory, "mimetype")); } catch (IOException e) { throw new MojoExecutionException("Unable to copy hardcoded mimetype file", e); } try { ZipArchiver zipArchiver = new ZipArchiver(); zipArchiver.addDirectory(targetDirectory); zipArchiver.setCompress(true); // seems to not be a problem to have mimetype compressed zipArchiver.setDestFile(new File(targetDirectory.getParentFile(), result.getName())); // copy it to parent dir zipArchiver.createArchive(); getLog().debug("epub file created at: " + zipArchiver.getDestFile().getAbsolutePath()); } catch (Exception e) { throw new MojoExecutionException("Unable to zip epub file", e); } }
From source file:com.agilejava.docbkx.maven.AbstractFoMojo.java
License:Apache License
/** * DOCUMENT ME!/*from ww w . ja v a2 s.c om*/ * * @param result DOCUMENT ME! * * @throws MojoExecutionException DOCUMENT ME! */ public void postProcessResult(File result) throws MojoExecutionException { super.postProcessResult(result); final FopFactory fopFactory = FopFactory.newInstance(); final FOUserAgent userAgent = fopFactory.newFOUserAgent(); userAgent.setBaseURL(baseUrl); // FOUserAgent can be used to set PDF metadata Configuration configuration = loadFOPConfig(); InputStream in = null; OutputStream out = null; try { in = openFileForInput(result); final File outputFile = getOutputFile(result); out = openFileForOutput(outputFile); fopFactory.setUserConfig(configuration); Fop fop = fopFactory.newFop(getMimeType(), userAgent, out); // Setup JAXP using identity transformer TransformerFactory factory = TransformerFactory.newInstance(); Transformer transformer = factory.newTransformer(); // identity transformer // Setup input stream Source src = new StreamSource(in); // Resulting SAX events (the generated FO) must be piped through to FOP Result res = new SAXResult(fop.getDefaultHandler()); // Start XSLT transformation and FOP processing transformer.transform(src, res); getLog().info(outputFile.getAbsolutePath() + " has been generated."); } catch (FOPException e) { throw new MojoExecutionException("Failed to convert to " + getTargetFileExtension(), e); } catch (TransformerConfigurationException e) { throw new MojoExecutionException("Failed to load JAXP configuration", e); } catch (TransformerException e) { throw new MojoExecutionException("Failed to transform to " + getTargetFileExtension(), e); } finally { IOUtils.closeQuietly(out); IOUtils.closeQuietly(in); } }
From source file:com.agilejava.docbkx.maven.AbstractFoMojo.java
License:Apache License
/** * DOCUMENT ME!// w w w. j a v a 2s . c o m * * @return DOCUMENT ME! * * @throws MojoExecutionException DOCUMENT ME! */ protected Configuration loadFOPConfig() throws MojoExecutionException { // if using external fop configuration file if (externalFOPConfiguration != null) { DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder(); try { if (getLog().isDebugEnabled()) getLog().debug("Using external FOP configuration file: " + externalFOPConfiguration.toString()); getLog().info("Ignoring pom inline FOP configuration"); return builder.buildFromFile(externalFOPConfiguration); } catch (IOException ioe) { throw new MojoExecutionException("Failed to load external FOP config.", ioe); } catch (SAXException saxe) { throw new MojoExecutionException("Failed to parse external FOP config.", saxe); } catch (ConfigurationException e) { throw new MojoExecutionException("Failed to do something Avalon requires....", e); } // else generating the configuration file } else { ClassLoader loader = this.getClass().getClassLoader(); InputStream in = loader.getResourceAsStream("fonts.stg"); Reader reader = new InputStreamReader(in); StringTemplateGroup group = new StringTemplateGroup(reader); StringTemplate template = group.getInstanceOf("config"); template.setAttribute("fonts", fonts); if (targetResolution != 0) { template.setAttribute("targetResolution", targetResolution); } if (sourceResolution != 0) { template.setAttribute("sourceResolution", sourceResolution); } DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder(); final String config = template.toString(); if (getLog().isDebugEnabled()) { getLog().debug(config); } try { return builder.build(IOUtils.toInputStream(config)); } catch (IOException ioe) { throw new MojoExecutionException("Failed to load FOP config.", ioe); } catch (SAXException saxe) { throw new MojoExecutionException("Failed to parse FOP config.", saxe); } catch (ConfigurationException e) { throw new MojoExecutionException("Failed to do something Avalon requires....", e); } } }
From source file:com.agilejava.docbkx.maven.AbstractTransformerMojo.java
License:Apache License
/** * Builds the actual output document./*from www . ja v a2 s . c o m*/ */ public void execute() throws MojoExecutionException, MojoFailureException { if (isSkip()) { getLog().info("Skipping plugin execution"); return; } // userland (ant tasks) pre process preProcess(); final File targetDirectory = getTargetDirectory(); final File sourceDirectory = getSourceDirectory(); if (!sourceDirectory.exists()) { return; // No sources, so there is nothing to render. } if (!targetDirectory.exists()) { org.codehaus.plexus.util.FileUtils.mkdir(targetDirectory.getAbsolutePath()); } final String[] included = scanIncludedFiles(); // configure a resolver for catalog files final CatalogManager catalogManager = createCatalogManager(); final CatalogResolver catalogResolver = new CatalogResolver(catalogManager); // configure a resolver for urn:dockbx:stylesheet final URIResolver uriResolver = createStyleSheetResolver(catalogResolver); // configure a resolver for xml entities final InjectingEntityResolver injectingResolver = createEntityResolver(catalogResolver); EntityResolver resolver = catalogResolver; if (injectingResolver != null) { resolver = injectingResolver; } // configure the builder for XSL Transforms final TransformerBuilder builder = createTransformerBuilder(uriResolver); // configure the XML parser SAXParserFactory factory = createParserFactory(); // iterate over included source files for (int i = included.length - 1; i >= 0; i--) { try { if (injectingResolver != null) { injectingResolver.forceInjection(); } final String inputFilename = included[i]; // targetFilename is inputFilename - ".xml" + targetFile extension String baseTargetFile = inputFilename.substring(0, inputFilename.length() - 4); final String targetFilename = baseTargetFile + "." + getTargetFileExtension(); final File sourceFile = new File(sourceDirectory, inputFilename); getLog().debug("SourceFile: " + sourceFile.toString()); // creating targetFile File targetFile = null; if (isUseStandardOutput()) { targetFile = new File(targetDirectory, targetFilename); getLog().debug("TargetFile: " + targetFile.toString()); } else { String name = new File(baseTargetFile).getName(); String dir = new File(baseTargetFile).getParent(); if (dir == null) { // file is located on root of targetDirectory targetFile = targetDirectory; } else { // else append the relative directory to targetDirectory targetFile = new File(targetDirectory, dir); } targetFile = new File(targetFile, name + "." + getTargetFileExtension()); getLog().debug("TargetDirectory: " + targetDirectory.getAbsolutePath()); } if (!targetFile.exists() || (targetFile.exists() && FileUtils.isFileNewer(sourceFile, targetFile)) || (targetFile.exists() && getXIncludeSupported())) { getLog().info("Processing input file: " + inputFilename); final XMLReader reader = factory.newSAXParser().getXMLReader(); // configure XML reader reader.setEntityResolver(resolver); // eval PI final PreprocessingFilter filter = createPIHandler(resolver, reader); // configure SAXSource for XInclude final Source xmlSource = createSource(inputFilename, sourceFile, filter); configureXref(targetFile); // XSL Transformation setup final Transformer transformer = builder.build(); adjustTransformer(transformer, sourceFile.getAbsolutePath(), targetFile); // configure the output file Result result = null; if (!shouldProcessResult()) { // if the output is not the main result of the transformation, ie xref database if (getLog().isDebugEnabled()) { result = new StreamResult(System.out); } else { result = new StreamResult(new NullOutputStream()); } } else if (isUseStandardOutput()) { // if the output of the main result is the standard output result = new StreamResult(targetFile.getAbsolutePath()); } else { // if the output of the main result is not the standard output if (getLog().isDebugEnabled()) { result = new StreamResult(System.out); } else { result = new StreamResult(new NullOutputStream()); } } transformer.transform(xmlSource, result); if (shouldProcessResult()) { // if the transformation has produce the expected main results, we can continue // the chain of processing in the output mojos which can override postProcessResult postProcessResult(targetFile); if (isUseStandardOutput()) { getLog().info(targetFile + " has been generated."); } else { getLog().info("See " + targetFile.getParentFile().getAbsolutePath() + " for generated file(s)"); } } else { // if the output is not the main result getLog().info("See " + targetFile.getParentFile().getAbsolutePath() + " for generated secondary file(s)"); } } else { getLog().info(targetFile + " is up to date."); } } catch (SAXException saxe) { throw new MojoExecutionException("Failed to parse " + included[i] + ".", saxe); } catch (TransformerException te) { throw new MojoExecutionException("Failed to transform " + included[i] + ".", te); } catch (ParserConfigurationException pce) { throw new MojoExecutionException("Failed to construct parser.", pce); } } // userland (ant tasks) post process postProcess(); }
From source file:com.agilejava.docbkx.maven.AbstractTransformerMojo.java
License:Apache License
/** * Creates a SAXSource configured with the desired XInclude mode. XOM library is used for advanced XInclude else * Xerces XInclude is used.//ww w . j a va2 s.c om * * @param inputFilename Is used for temp file generation (XOM) * @param sourceFile The docbook source file. * @param filter The XML PI filter. * @return An XInclude configured SAXSource * @throws MojoExecutionException */ protected Source createSource(String inputFilename, File sourceFile, PreprocessingFilter filter) throws MojoExecutionException { // if both properties are set, XOM is used for a better XInclude support. if (getXIncludeSupported() && getGeneratedSourceDirectory() != null) { getLog().debug("Advanced XInclude mode entered"); final Builder xomBuilder = new Builder(); try { final nu.xom.Document doc = xomBuilder.build(sourceFile); XIncluder.resolveInPlace(doc); // TODO also dump PIs computed and Entities included final File dump = dumpResolvedXML(inputFilename, doc); return new SAXSource(filter, new InputSource(dump.getAbsolutePath())); } catch (ValidityException e) { throw new MojoExecutionException("Failed to validate source", e); } catch (ParsingException e) { throw new MojoExecutionException("Failed to parse source", e); } catch (IOException e) { throw new MojoExecutionException("Failed to read source", e); } catch (XIncludeException e) { throw new MojoExecutionException("Failed to process XInclude", e); } } else { // else fallback on Xerces XInclude support. getLog().debug("Xerces XInclude mode entered"); final InputSource inputSource = new InputSource(sourceFile.getAbsolutePath()); return new SAXSource(filter, inputSource); } }
From source file:com.agilejava.docbkx.maven.AbstractTransformerMojo.java
License:Apache License
/** * Creates an URI resolver to handle <code>urn:docbkx:stylesheet(/)</code> as a special URI. This URI points to the * default docbook stylesheet location// w w w. ja v a 2s .c o m * * @param catalogResolver The initial resolver to use * @return The Stylesheet resolver. * @throws MojoExecutionException If an error occurs while reading the stylesheet */ private URIResolver createStyleSheetResolver(CatalogResolver catalogResolver) throws MojoExecutionException { URIResolver uriResolver; try { URL url = getNonDefaultStylesheetURL() == null ? getDefaultStylesheetURL() : getNonDefaultStylesheetURL(); getLog().debug("Using stylesheet: " + url.toExternalForm()); uriResolver = new StylesheetResolver("urn:docbkx:stylesheet", new StreamSource(url.openStream(), url.toExternalForm()), catalogResolver); } catch (IOException ioe) { throw new MojoExecutionException("Failed to read stylesheet.", ioe); } return uriResolver; }
From source file:com.agilejava.docbkx.maven.AbstractTransformerMojo.java
License:Apache License
/** * Saves the Docbook XML file with all XInclude resolved. * * @param initialFilename Filename of the root docbook source file. * @param doc XOM Document resolved. * @return The new file generated.// w ww . j av a 2 s. co m * @throws MojoExecutionException */ protected File dumpResolvedXML(String initialFilename, nu.xom.Document doc) throws MojoExecutionException { final File file = new File(initialFilename); final String parent = file.getParent(); File resolvedXML = null; if (parent != null) { resolvedXML = new File(getGeneratedSourceDirectory(), parent); resolvedXML.mkdirs(); resolvedXML = new File(resolvedXML, "(gen)" + file.getName()); } else { getGeneratedSourceDirectory().mkdirs(); resolvedXML = new File(getGeneratedSourceDirectory(), "(gen)" + initialFilename); } FileOutputStream fos = null; try { fos = new FileOutputStream(resolvedXML); } catch (FileNotFoundException e) { throw new MojoExecutionException("Failed to open dump file", e); } if (fos != null) { getLog().info("Dumping to " + resolvedXML.getAbsolutePath()); final BufferedOutputStream bos = new BufferedOutputStream(fos); final Serializer serializer = new Serializer(bos); try { serializer.write(doc); bos.flush(); bos.close(); fos.close(); return resolvedXML; } catch (IOException e) { throw new MojoExecutionException("Failed to write to dump file", e); } finally { IOUtils.closeQuietly(bos); IOUtils.closeQuietly(fos); } } throw new MojoExecutionException("Failed to open dump file"); }
From source file:com.agilejava.docbkx.maven.AbstractTransformerMojo.java
License:Apache License
/** * Creates a <code>DocumentBuilder</code> to be used to parse DocBook XML documents. * * @return A <code>DocumentBuilder</code> instance. * @throws MojoExecutionException If we cannot create an instance of the <code>DocumentBuilder</code>. *//*from w ww. j a va 2s . co m*/ protected DocumentBuilder createDocumentBuilder() throws MojoExecutionException { try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); return builder; } catch (ParserConfigurationException pce) { throw new MojoExecutionException("Failed to construct parser.", pce); } }