List of usage examples for org.apache.maven.plugin MojoExecutionException MojoExecutionException
public MojoExecutionException(String message, Throwable cause)
MojoExecutionException
exception wrapping an underlying Throwable
and providing a message
. From source file:at.yawk.mdep.GenerateMojo.java
@Override public void execute() throws MojoExecutionException, MojoFailureException { if (cacheHours > 0) { cacheStore = Environment.createCacheStore(new Logger() { @Override/*from www .j a v a2s .c om*/ public void info(String msg) { getLog().info(msg); } @Override public void warn(String msg) { getLog().warn(msg); } }, "mdep-maven-plugin"); } ArtifactMatcher includesMatcher; if (includes == null) { includesMatcher = ArtifactMatcher.acceptAll(); } else { includesMatcher = ArtifactMatcher.anyMatch(toAntMatchers(includes)); } ArtifactMatcher excludesMatcher = ArtifactMatcher.anyMatch(toAntMatchers(excludes)); ArtifactMatcher matcher = includesMatcher.and(excludesMatcher.not()); List<Artifact> artifacts = new ArrayList<>(); try { ArtifactFilter subtreeFilter = artifact -> artifact.getScope() == null || artifact.getScope().equals(Artifact.SCOPE_COMPILE) || artifact.getScope().equals(Artifact.SCOPE_RUNTIME); DependencyNode root = dependencyTreeBuilder.buildDependencyTree(project, localRepository, subtreeFilter); root.accept(new DependencyNodeVisitor() { @Override public boolean visit(DependencyNode node) { if (node.getArtifact() != null) { if (!subtreeFilter.include(node.getArtifact())) { return false; } artifacts.add(node.getArtifact()); } return true; } @Override public boolean endVisit(DependencyNode node) { return true; } }); } catch (DependencyTreeBuilderException e) { throw new MojoExecutionException("Failed to build dependency tree", e); } List<Dependency> dependencies = new ArrayList<>(); for (Artifact artifact : artifacts) { if (matcher.matches(artifact)) { dependencies.add(findArtifact(artifact)); } } getLog().info("Saving dependency xml"); DependencySet dependencySet = new DependencySet(); dependencySet.setDependencies(dependencies); if (!outputDirectory.mkdirs()) { throw new MojoExecutionException("Failed to create output directory"); } File outputFile = new File(outputDirectory, "mdep-dependencies.xml"); try { JAXBContext jaxbContext = JAXBContext.newInstance(DependencySet.class); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.marshal(dependencySet, outputFile); } catch (JAXBException e) { throw new MojoExecutionException("Failed to serialize dependency set", e); } Resource resource = new Resource(); resource.setDirectory(outputDirectory.toString()); resource.setFiltering(false); project.addResource(resource); }
From source file:at.yawk.mdep.GenerateMojo.java
private Dependency findArtifact(Artifact artifact) throws MojoExecutionException { // all are scanned, the first is used to store the dependency List<Path> cacheSearchLocations = new ArrayList<>(); List<String> coordinateComponents = Arrays.asList(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), artifact.getScope()); // in 1.0, we used only ':' as the separator. This does not work on windows, and the following code fixes // that problem. for (String separator : Arrays.asList(":", "/")) { try {/* www .j ava 2s .co m*/ cacheSearchLocations.add(cacheStore.resolve(String.join(separator, coordinateComponents))); } catch (InvalidPathException ignored) { } } // check local cache if (cacheHours > 0) { for (Path searchLocation : cacheSearchLocations) { if (Files.exists(searchLocation)) { Instant cacheDeadline = Instant.now().minusSeconds((long) (60 * 60 * cacheHours)); try { if (Files.getLastModifiedTime(searchLocation).toInstant().isAfter(cacheDeadline)) { try (InputStream in = Files.newInputStream(searchLocation)) { Dependency dependency = (Dependency) JAXBContext.newInstance(Dependency.class) .createUnmarshaller().unmarshal(in); getLog().info("Checksum was present in local cache: " + artifact); return dependency; } } } catch (IOException | JAXBException e) { throw new MojoExecutionException("Failed to read local cache", e); } } } } for (ArtifactRepository repository : remoteArtifactRepositories) { // only scan configured repositories if (this.repositories != null && !this.repositories.contains(repository.getId())) { continue; } Dependency dependency = findArtifactInRepository(artifact, repository); if (dependency != null) { if (cacheHours > 0) { try { Path target = cacheSearchLocations.get(0); if (!Files.isDirectory(target.getParent())) { Files.createDirectories(target.getParent()); } try (OutputStream out = Files.newOutputStream(target)) { JAXBContext.newInstance(Dependency.class).createMarshaller().marshal(dependency, out); } } catch (IOException | JAXBException e) { getLog().warn("Could not save dependency to local cache", e); } } return dependency; } } throw new MojoExecutionException("Could not find " + artifact + " in configured repositories"); }
From source file:at.yawk.mdep.GenerateMojo.java
@Nullable @SneakyThrows({ MalformedURLException.class, NoSuchAlgorithmException.class }) @VisibleForTesting//from ww w. j a v a 2s. c om Dependency findArtifactInRepository(Artifact artifact, ArtifactRepository repository) throws MojoExecutionException { String artifactPath = getArtifactPath(artifact, artifact.getVersion()); if (artifact.isSnapshot()) { ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata(artifact) { // maven is weird - i have yet to find a better solution. @Override public boolean storedInArtifactVersionDirectory() { return true; } @Override public String getBaseVersion() { return artifact.getBaseVersion(); } }; // try to load maven-metadata.xml in case we need to use a different version for snapshots URL metaUrl = new URL(repository.getUrl() + '/' + repository.pathOfRemoteRepositoryMetadata(metadata)); Metadata loadedMetadata; try (InputStream input = openStream(metaUrl)) { loadedMetadata = new MetadataXpp3Reader().read(input, true); } catch (IOException e) { // could not find metadata loadedMetadata = null; } catch (XmlPullParserException e) { throw new MojoExecutionException("Failed to parse metadata", e); } if (loadedMetadata != null) { Snapshot snapshot = loadedMetadata.getVersioning().getSnapshot(); String versionWithoutSuffix = artifact.getVersion().substring(0, artifact.getBaseVersion().lastIndexOf('-')); artifactPath = getArtifactPath(artifact, versionWithoutSuffix + '-' + snapshot.getTimestamp() + '-' + snapshot.getBuildNumber()); } } URL url = new URL(repository.getUrl() + '/' + artifactPath); try (InputStream input = openStream(url)) { getLog().info("Getting checksum for " + artifact); MessageDigest digest = MessageDigest.getInstance("SHA-512"); byte[] buf = new byte[4096]; int len; while ((len = input.read(buf)) >= 0) { digest.update(buf, 0, len); } Dependency dependency = new Dependency(); dependency.setUrl(url); dependency.setSha512sum(digest.digest()); return dependency; } catch (IOException ignored) { // not in this repo return null; } }
From source file:au.com.alderaan.eclipselink.mojo.EclipselinkStaticWeaveMojo.java
License:Apache License
public void execute() throws MojoExecutionException { try {//ww w.j a v a 2s.c om if (includeProjectClasspath) { // Thread context class loader is the ClassRealm for this plugin. ClassRealm c = (ClassRealm) Thread.currentThread().getContextClassLoader(); // Add Project output directory to class path. c.addURL(new File(project.getBuild().getOutputDirectory()).toURI().toURL()); // Add Project class path to class path. for (URL url : buildClassPath()) { c.addURL(url); } } StaticWeaveProcessor weave = new StaticWeaveProcessor(source, target); URL[] urls = buildClassPath(); if (urls.length > 0) { URLClassLoader classLoader = new URLClassLoader(urls, Thread.currentThread().getContextClassLoader()); weave.setClassLoader(classLoader); } if (persistenceInfo != null) { weave.setPersistenceInfo(persistenceInfo); } if (persistenceXMLLocation != null) { weave.setPersistenceXMLLocation(persistenceXMLLocation); } weave.setLog(new PrintWriter(System.out)); weave.setLogLevel(getLogLevel()); weave.performWeaving(); } catch (MalformedURLException e) { throw new MojoExecutionException("Failed", e); } catch (IOException e) { throw new MojoExecutionException("Failed", e); } catch (URISyntaxException e) { throw new MojoExecutionException("Failed", e); } }
From source file:au.com.clearboxsystems.maven.plugins.nodejs.NodeJsMojoBase.java
License:Apache License
public NodeInstallInformation run(TaskFilter filter) throws MojoExecutionException { if (tasks == null || tasks.isEmpty()) { getLog().warn("No NodeJSTasks have been defined. Nothing to do"); return null; }/*from w ww . ja va2 s.co m*/ NodeInstallInformation information = getNodeInstallationInformation(nodeJsVersion, nodeJsDirectory); try { if (nodeJsURL != null) { information.url = new URL(nodeJsURL); } } catch (java.net.MalformedURLException ex) { throw new MojoExecutionException("Malformed provided node URL", ex); } try { if (!information.executable.exists()) { getLog().info("Downloading Node JS from " + information.url); FileUtils.copyURLToFile(information.url, information.archive); if (information.archive.getName().endsWith(".tar.gz")) { Commandline commandLine = getCommandLine(nodeJsDirectory, "tar", "xf", information.archive.getName()); executeCommandLine(commandLine); } } for (Task task : tasks) { if (filter == null || filter.accept(task)) { executeTask(task, information); } } } catch (IOException ex) { getLog().error("Failed to downloading nodeJs from " + nodeJsURL, ex); throw new MojoExecutionException("Failed to downloading nodeJs from " + nodeJsURL, ex); } catch (MojoExecutionException ex) { getLog().error("Execution Exception", ex); if (stopOnError) throw new MojoExecutionException("Execution Exception", ex); } catch (CommandLineException ex) { getLog().error("Command Line Exception", ex); throw new MojoExecutionException("Command execution failed.", ex); } return information; }
From source file:au.com.clearboxsystems.maven.plugins.nodejs.NodeJsWatcherMojo.java
License:Apache License
@Override public void execute() throws MojoExecutionException { NodeJsMojoBase.NodeInstallInformation info = super.run(filter); if (info == null) { return;/*from w ww .j av a 2 s .co m*/ } for (Task task : tasks) { try { if (task.watch) { addWatchForTask(task); } } catch (IOException ex) { throw new MojoExecutionException("Error adding watch for task " + task, ex); } } getLog().info("Starting watch vigil"); try { watch(info); } catch (CommandLineException ex) { throw new MojoExecutionException("Error during watch", ex); } catch (InterruptedException ex) { throw new MojoExecutionException("Error during watch", ex); } catch (IOException ex) { throw new MojoExecutionException("Error during watch", ex); } }
From source file:be.fedict.eid.applet.maven.DocbookMojo.java
License:Open Source License
public void execute() throws MojoExecutionException, MojoFailureException { getLog().info("executing..."); getLog().info("Protocol Message Catalog Class: " + this.protocolMessageCatalogClass); File outputFile = new File(this.outputDirectory, this.docbookFile); getLog().info("Output docbook file: " + outputFile.getAbsolutePath()); File graphFile = new File(this.outputDirectory, this.graphFile); getLog().info("Output graph file: " + graphFile.getAbsolutePath()); this.outputDirectory.mkdirs(); try {//from ww w . j a va 2 s .c om generateDocbook(outputFile); } catch (Exception e) { getLog().error("Error generating docbook: " + e.getMessage(), e); throw new MojoExecutionException("Error generating docbook: " + e.getMessage(), e); } try { generateGraph(graphFile); } catch (IOException e) { getLog().error("Error generating graph: " + e.getMessage(), e); throw new MojoExecutionException("Error generating graph: " + e.getMessage(), e); } }
From source file:be.fedict.eid.applet.maven.sql.ddl.SQLDDLMojo.java
License:Open Source License
@Override public void execute() throws MojoExecutionException, MojoFailureException { getLog().info("SQL DDL script generator"); File outputFile = new File(this.outputDirectory, this.outputName); getLog().info("Output SQL DDL script file: " + outputFile.getAbsolutePath()); this.outputDirectory.mkdirs(); try {/*w w w . ja va2s .c om*/ outputFile.createNewFile(); } catch (IOException e) { throw new MojoExecutionException("I/O error.", e); } for (ArtifactItem artifactItem : this.artifactItems) { getLog().info("artifact: " + artifactItem.getGroupId() + ":" + artifactItem.getArtifactId()); List<Dependency> dependencies = this.project.getDependencies(); String version = null; for (Dependency dependency : dependencies) { if (StringUtils.equals(dependency.getArtifactId(), artifactItem.getArtifactId()) && StringUtils.equals(dependency.getGroupId(), artifactItem.getGroupId())) { version = dependency.getVersion(); break; } } getLog().info("artifact version: " + version); VersionRange versionRange = VersionRange.createFromVersion(version); Artifact artifact = this.artifactFactory.createDependencyArtifact(artifactItem.getGroupId(), artifactItem.getArtifactId(), versionRange, "jar", null, Artifact.SCOPE_COMPILE); try { this.resolver.resolve(artifact, this.remoteRepos, this.local); } catch (ArtifactResolutionException e) { throw new MojoExecutionException("Unable to resolve artifact.", e); } catch (ArtifactNotFoundException e) { throw new MojoExecutionException("Unable to find artifact.", e); } getLog().info("artifact file: " + artifact.getFile().getAbsolutePath()); getLog().info("hibernate dialect: " + this.hibernateDialect); URL artifactUrl; try { artifactUrl = artifact.getFile().toURI().toURL(); } catch (MalformedURLException e) { throw new MojoExecutionException("URL error.", e); } URLClassLoader classLoader = new URLClassLoader(new URL[] { artifactUrl }, this.getClass().getClassLoader()); Thread.currentThread().setContextClassLoader(classLoader); AnnotationDB annotationDb = new AnnotationDB(); try { annotationDb.scanArchives(artifactUrl); } catch (IOException e) { throw new MojoExecutionException("I/O error.", e); } Set<String> classNames = annotationDb.getAnnotationIndex().get(Entity.class.getName()); getLog().info("# JPA entity classes: " + classNames.size()); AnnotationConfiguration configuration = new AnnotationConfiguration(); configuration.setProperty("hibernate.dialect", this.hibernateDialect); Dialect dialect = Dialect.getDialect(configuration.getProperties()); getLog().info("dialect: " + dialect.toString()); for (String className : classNames) { getLog().info("JPA entity: " + className); Class<?> entityClass; try { entityClass = classLoader.loadClass(className); getLog().info("entity class loader: " + entityClass.getClassLoader()); } catch (ClassNotFoundException e) { throw new MojoExecutionException("class not found.", e); } configuration.addAnnotatedClass(entityClass); } SchemaExport schemaExport = new SchemaExport(configuration); schemaExport.setFormat(true); schemaExport.setHaltOnError(true); schemaExport.setOutputFile(outputFile.getAbsolutePath()); schemaExport.setDelimiter(";"); try { getLog().info("SQL DDL script: " + IOUtil.toString(new FileInputStream(outputFile))); } catch (FileNotFoundException e) { throw new MojoExecutionException("file not found.", e); } catch (IOException e) { throw new MojoExecutionException("I/O error.", e); } // operate schemaExport.execute(true, false, false, true); List<Exception> exceptions = schemaExport.getExceptions(); for (Exception exception : exceptions) { getLog().error("exception: " + exception.getMessage()); } } }
From source file:be.redlab.maven.yamlprops.YamlToPropertiesMojo.java
License:Apache License
public void execute() throws MojoExecutionException, MojoFailureException { YamlConfiguration yamlConfiguration = getYamlConfiguration(); getLog().info("Parsing and writing properties of " + yamlfile + " to "); YamlPropertyConverter converter = new YamlPropertyConverterImpl(); Map<String, Properties> map = null; try {//from w w w. j a va 2s . com map = converter.convert( StringUtils.isBlank(readEncoding) ? new InputStreamReader(new FileInputStream(yamlfile)) : new InputStreamReader(new FileInputStream(yamlfile), readEncoding)); } catch (UnsupportedEncodingException e) { throw new MojoExecutionException("Unable to use provided encoding", e); } catch (FileNotFoundException e) { if (ignoreNotFound) { super.getLog().warn("Unable to find provided yaml file at " + yamlfile.getAbsolutePath()); super.getLog().debug(e); } else { throw new MojoExecutionException("Unable to find provided yaml file ", e); } } String location = yamlConfiguration.getLocation(); File baseDirectoryOfExport; if (StringUtils.isNotBlank(location)) { baseDirectoryOfExport = FileUtils.resolveFile(targetDir, location); } else { baseDirectoryOfExport = targetDir; } if (null != map) { if (!baseDirectoryOfExport.mkdirs()) { getLog().debug("Directory " + baseDirectoryOfExport + " not created, it probably already exists?."); } else { getLog().debug("Directory " + baseDirectoryOfExport + " created."); } for (Entry<String, Properties> e : map.entrySet()) { File directoryOfExport = baseDirectoryOfExport; FileOutputStream stream = null; try { File file; String targetFile; String extension = "properties"; boolean isXml = false; if ("xml".equalsIgnoreCase(yamlConfiguration.getType())) { extension = "xml"; isXml = true; } if (yamlConfiguration.getFiles().isEmpty() || !yamlConfiguration.getFiles().containsKey(e.getKey())) { getLog().debug("Not found " + e.getKey() + " to file mapping, using key as file name"); targetFile = e.getKey() + "." + extension; } else { String configuredFileName = yamlConfiguration.getFiles().get(e.getKey()); String dirname = FileUtils.getPath(configuredFileName, yamlConfiguration.getFileSeparator()); directoryOfExport = new File(directoryOfExport, dirname); directoryOfExport.mkdirs(); targetFile = FileUtils.removePath(configuredFileName, yamlConfiguration.getFileSeparator()); getLog().debug("found mapping " + configuredFileName + " for " + e.getKey() + " in " + directoryOfExport + " with file " + targetFile); } // targetFile File propertyFile = new File(directoryOfExport, targetFile); getLog().info("Writing to " + propertyFile); boolean newFile = propertyFile.createNewFile(); stream = new FileOutputStream(propertyFile); if (isXml) { e.getValue().storeToXML(stream, "Written " + (newFile ? "new file" : "") + " from yaml on " + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ").format(new Date()), writeEncoding); } else { e.getValue() .store(new OutputStreamWriter(stream, writeEncoding), "Written " + (newFile ? "new file" : "") + " from yaml on " + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ").format(new Date())); } } catch (IOException e1) { throw new MojoExecutionException("Unable to write file ", e1); } finally { if (null != stream) { try { stream.close(); } catch (IOException e1) { getLog().debug("Error closing stream", e1); } } } } } }
From source file:biz.paluch.maven.configurator.AbstractConfigureMojo.java
License:Open Source License
/** * Perform Configuration./* ww w. java2 s. com*/ * * @param artifactFile * @throws MojoExecutionException */ protected void configure(File artifactFile) throws MojoExecutionException { Preconditions.checkArgument(tokenStart != null && !tokenStart.isEmpty(), "tokenStart must not be empty"); Preconditions.checkArgument(tokenEnd != null && !tokenEnd.isEmpty(), "tokenEnd must not be empty"); File targetConfigurator = new File(targetDir, "configurator"); File targetWork = new File(targetConfigurator, "work"); File finalFile = new File(targetConfigurator, artifactFile.getName()); if (!targetWork.exists()) { targetWork.mkdirs(); } getLog().debug("Resolved target artifact to " + artifactFile.toString()); Closer closer = Closer.create(); Container container = new Container(artifactFile.getName()); try { ZipInputStream zis = closer .register(new ZipInputStream(new BufferedInputStream(new FileInputStream(artifactFile)))); getLog().info("Extracting " + artifactFile + " to " + targetWork); ZipFileIteratorAndExtractor iterator = new ZipFileIteratorAndExtractor(container, zis, targetWork); iterator.extract(); getLog().info("Retrieving Properties"); Properties properties = getProperties(); getLog().info("Processing Files"); TemplateProcessor processor = new TemplateProcessor(properties, tokenStart, tokenEnd, getLog()); FileTemplating.processFiles(getLog(), targetWork, processor); getLog().info("Compressing to " + finalFile); ZipOutputStream zos = closer .register(new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(finalFile)))); ZipFileCompressor compressor = new ZipFileCompressor(container, zos, targetWork); compressor.compress(getLog()); getLog().info("Done."); } catch (IOException e) { getLog().error(e); throw new MojoExecutionException(e.getMessage(), e); } finally { try { closer.close(); } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } } }