List of usage examples for org.apache.maven.artifact.resolver.filter ArtifactFilter include
boolean include(Artifact artifact);
From source file:at.yawk.mdep.GenerateMojo.java
@Override public void execute() throws MojoExecutionException, MojoFailureException { if (cacheHours > 0) { cacheStore = Environment.createCacheStore(new Logger() { @Override/* w ww . j a va 2 s.c om*/ public void info(String msg) { getLog().info(msg); } @Override public void warn(String msg) { getLog().warn(msg); } }, "mdep-maven-plugin"); } ArtifactMatcher includesMatcher; if (includes == null) { includesMatcher = ArtifactMatcher.acceptAll(); } else { includesMatcher = ArtifactMatcher.anyMatch(toAntMatchers(includes)); } ArtifactMatcher excludesMatcher = ArtifactMatcher.anyMatch(toAntMatchers(excludes)); ArtifactMatcher matcher = includesMatcher.and(excludesMatcher.not()); List<Artifact> artifacts = new ArrayList<>(); try { ArtifactFilter subtreeFilter = artifact -> artifact.getScope() == null || artifact.getScope().equals(Artifact.SCOPE_COMPILE) || artifact.getScope().equals(Artifact.SCOPE_RUNTIME); DependencyNode root = dependencyTreeBuilder.buildDependencyTree(project, localRepository, subtreeFilter); root.accept(new DependencyNodeVisitor() { @Override public boolean visit(DependencyNode node) { if (node.getArtifact() != null) { if (!subtreeFilter.include(node.getArtifact())) { return false; } artifacts.add(node.getArtifact()); } return true; } @Override public boolean endVisit(DependencyNode node) { return true; } }); } catch (DependencyTreeBuilderException e) { throw new MojoExecutionException("Failed to build dependency tree", e); } List<Dependency> dependencies = new ArrayList<>(); for (Artifact artifact : artifacts) { if (matcher.matches(artifact)) { dependencies.add(findArtifact(artifact)); } } getLog().info("Saving dependency xml"); DependencySet dependencySet = new DependencySet(); dependencySet.setDependencies(dependencies); if (!outputDirectory.mkdirs()) { throw new MojoExecutionException("Failed to create output directory"); } File outputFile = new File(outputDirectory, "mdep-dependencies.xml"); try { JAXBContext jaxbContext = JAXBContext.newInstance(DependencySet.class); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.marshal(dependencySet, outputFile); } catch (JAXBException e) { throw new MojoExecutionException("Failed to serialize dependency set", e); } Resource resource = new Resource(); resource.setDirectory(outputDirectory.toString()); resource.setFiltering(false); project.addResource(resource); }
From source file:br.com.anteros.restdoc.maven.plugin.util.ResourceResolver.java
License:Apache License
@SuppressWarnings("unchecked") private static List<String> resolveAndUnpack(final List<Artifact> artifacts, final SourceResolverConfig config, final List<String> validClassifiers, final boolean propagateErrors) throws ArtifactResolutionException, ArtifactNotFoundException { // NOTE: Since these are '-sources' and '-test-sources' artifacts, they won't actually // resolve transitively...this is just used to aggregate resolution failures into a single // exception. final Set<Artifact> artifactSet = new LinkedHashSet<Artifact>(artifacts); final Artifact pomArtifact = config.project().getArtifact(); final ArtifactRepository localRepo = config.localRepository(); final List<ArtifactRepository> remoteRepos = config.project().getRemoteArtifactRepositories(); final ArtifactMetadataSource metadataSource = config.artifactMetadataSource(); final ArtifactFilter filter = config.filter(); ArtifactFilter resolutionFilter = null; if (filter != null) { // Wrap the filter in a ProjectArtifactFilter in order to always include the pomArtifact for resolution. // NOTE that this is necessary, b/c the -sources artifacts are added dynamically to the pomArtifact // and the resolver also checks the dependency trail with the given filter, thus the pomArtifact has // to be explicitly included by the filter, otherwise the -sources artifacts won't be resolved. resolutionFilter = new ProjectArtifactFilter(pomArtifact, filter); }// w ww.ja va 2 s. c om final ArtifactResolver resolver = config.artifactResolver(); @SuppressWarnings("rawtypes") Map managed = config.project().getManagedVersionMap(); final ArtifactResolutionResult resolutionResult = resolver.resolveTransitively(artifactSet, pomArtifact, managed, localRepo, remoteRepos, metadataSource, resolutionFilter); final List<String> result = new ArrayList<String>(artifacts.size()); for (final Artifact a : (Collection<Artifact>) resolutionResult.getArtifacts()) { if (!validClassifiers.contains(a.getClassifier()) || (filter != null && !filter.include(a))) { continue; } final File d = new File(config.outputBasedir(), a.getArtifactId() + "-" + a.getVersion() + "-" + a.getClassifier()); if (!d.exists()) { d.mkdirs(); } try { final UnArchiver unArchiver = config.archiverManager().getUnArchiver(a.getType()); unArchiver.setDestDirectory(d); unArchiver.setSourceFile(a.getFile()); unArchiver.extract(); result.add(d.getAbsolutePath()); } catch (final NoSuchArchiverException e) { if (propagateErrors) { throw new ArtifactResolutionException( "Failed to retrieve valid un-archiver component: " + a.getType(), a, e); } } catch (final ArchiverException e) { if (propagateErrors) { throw new ArtifactResolutionException("Failed to unpack: " + a.getId(), a, e); } } } return result; }
From source file:com.exxeta.oses.maven.plugin.decompiler.CombinedArtifactFilter.java
License:Apache License
@Override public boolean include(Artifact artifact) { for (ArtifactFilter artifactFilter : artifactFilters) { if (!artifactFilter.include(artifact)) { return false; }/*w w w . j a v a2 s . co m*/ } return true; }
From source file:com.exxeta.oses.maven.plugin.decompiler.TransitiveDependencyResolver.java
License:Apache License
private void filterCollectedDependencies(Set<Artifact> transitiveDependencies, ArtifactFilter artifactFilter) { Iterator<Artifact> iterator = transitiveDependencies.iterator(); while (iterator.hasNext()) { Artifact transitiveDependency = iterator.next(); if (!artifactFilter.include(transitiveDependency)) { iterator.remove();/*from w ww .j a va2s. c o m*/ } } }
From source file:com.github.ferstl.jarscan.AbstractJarScanMojo.java
private void analyzeDependencies() throws MojoExecutionException { Set<Artifact> dependencies = this.project.getDependencyArtifacts(); ArtifactFilter filter = createArtifactFilter(); for (Artifact dependency : dependencies) { if (filter.include(dependency)) { getLog().debug("Analyzing " + dependency); scanAndprintReport(dependency.toString(), dependency.getFile()); }/* ww w. ja va 2s . c o m*/ } }
From source file:com.github.ferstl.jarscan.JarScanMojo.java
private void analyzeDependencies() throws MojoExecutionException { Set<Artifact> dependencies = this.project.getDependencyArtifacts(); ArtifactFilter filter = createArtifactFilter(); for (Artifact dependency : dependencies) { if (filter.include(dependency)) { getLog().debug("Analyzing " + dependency); printReport(dependency.toString(), dependency.getFile()); }//from w w w .j a va 2s. c o m } }
From source file:com.ning.maven.plugins.dependencyversionscheck.AbstractDependencyVersionsMojo.java
License:Apache License
/** * Resolve all transitive dependencies relative to a given dependency, based off the list of artifacts given. A scope filter can be added which limits the * results to the scopes present in that filter. */// ww w. ja va 2 s .co m private List resolveTransitiveVersions(final Dependency dependency, final Collection dependenciesToCheck, final String artifactName, final ArtifactFilter scopeFilter) throws InvalidDependencyVersionException, ArtifactResolutionException, ArtifactNotFoundException, ProjectBuildingException { final List resolutions = new ArrayList(); for (Iterator dependenciesToCheckIter = dependenciesToCheck.iterator(); dependenciesToCheckIter .hasNext();) { Artifact dependencyArtifactToCheck = (Artifact) dependenciesToCheckIter.next(); LOG.debug("Checking {}...", dependencyArtifactToCheck); if (!scopeFilter.include(dependencyArtifactToCheck)) { LOG.debug("... in invisible scope, ignoring!"); continue; // for } LOG.debug("... visible ..."); if (dependencyArtifactToCheck.isOptional()) { LOG.debug("... but optional, ignoring!"); continue; } LOG.debug("... resolving!"); String artifactToCheckName = getQualifiedName(dependencyArtifactToCheck); if (artifactToCheckName.length() > maxLen) { maxLen = artifactToCheckName.length(); } Artifact resolvedDependency = (Artifact) resolvedDependenciesByName.get(artifactToCheckName); if (resolvedDependency == null) { LOG.debug("Dependency {}:{} of artifact {} is no longer used in the current project.", new Object[] { artifactToCheckName, dependencyArtifactToCheck.getVersion(), artifactName }); } else { // if the artifact in question is excluded in the current pom, then we don't have to worry about it anyways // this should be in the resolver. CHECKME! if (!exclusions.contains(dependencyArtifactToCheck.getGroupId() + ":" + dependencyArtifactToCheck.getArtifactId())) { final Version resolvedVersion = getVersion(resolvedDependency); final Version versionToCheck = getVersion(dependencyArtifactToCheck); final VersionResolution resolution = new VersionResolution(artifactName, artifactToCheckName, versionToCheck, resolvedVersion, false); resolutions.add(resolution); // we have an error if // - if resolved dependency has a lower version or different qualifier than the stated one of the current transitive dependency // - if resolver dependency has a higher major version than the stated one of the current transitive dependency and // there is no explicit dependency to that major version in the current project // for this last check, we assume that explicit dependencies have already been checked against actual ones, so we only need to check // if the artifact is an explicit dependency final Strategy strategy = findStrategy(resolution); if (!isExcluded(resolvedDependency, versionToCheck, resolvedVersion)) { if (!strategy.isCompatible(resolvedVersion, versionToCheck)) { resolution.setConflict(true); } } else if (warnIfMajorVersionIsHigher && !strategy.isCompatible(resolvedVersion, versionToCheck)) { LOG.warn( "Artifact {} depends on {} at an incompatible version ({}) than the current project ({})!", new Object[] { artifactName, artifactToCheckName, dependencyArtifactToCheck.getVersion(), resolvedDependency.getVersion() }); } } } return resolutions; }
From source file:com.soebes.maven.plugins.licenseverifier.licenses.LicenseData.java
License:Apache License
/** * This will go through all licenses and categorize them * into their appropriate group./*from w w w .java 2 s . co m*/ * */ private void categorize() { PatternExcludeFilter patternExcludeFilter = new PatternExcludeFilter(); ArtifactFilter filter = patternExcludeFilter.createFilter(getExcludes()); for (LicenseInformation license : getLicenseInformations()) { if (!filter.include(license.getArtifact())) { getExcludedByConfiguration().add(license); getLog().debug("artifact " + license.getArtifact().getId() + " exculded by configuration."); continue; } if (getLicenseValidator().isValid(license.getLicenses())) { getValid().add(license); getLog().debug("artifact " + license.getArtifact().getId() + " has been categorized as Valid."); } else if (getLicenseValidator().isInvalid(license.getLicenses())) { getInvalid().add(license); getLog().debug("artifact " + license.getArtifact().getId() + " has been categorized as Invalid."); } else if (getLicenseValidator().isWarning(license.getLicenses())) { getWarning().add(license); getLog().debug("artifact " + license.getArtifact().getId() + " has been categorized as Warning."); } else if (getLicenseValidator().isUnknown(license.getLicenses())) { getUnknown().add(license); getLog().debug("artifact " + license.getArtifact().getId() + " has been categorized as Unknown."); } } }
From source file:com.tenderowls.opensource.haxemojos.components.HaxeCompiler.java
License:Apache License
private void addLibs(List<String> argumentsList, MavenProject project, ArtifactFilter artifactFilter) { for (Artifact artifact : project.getArtifacts()) { boolean filtered = artifactFilter != null && !artifactFilter.include(artifact); if (!filtered && artifact.getType().equals(HaxeFileExtensions.HAXELIB)) { String haxelibId = artifact.getArtifactId() + ":" + artifact.getVersion(); argumentsList.add("-lib"); argumentsList.add(haxelibId); }/*w ww. j av a2 s . c om*/ } }
From source file:com.tenderowls.opensource.haxemojos.components.HaxeCompiler.java
License:Apache License
private void addHars(List<String> argumentsList, MavenProject project, Set<CompileTarget> targets, ArtifactFilter artifactFilter) { Map<String, MavenProject> projectReferences = project.getProjectReferences(); for (Artifact artifact : project.getArtifacts()) { boolean filtered = artifactFilter != null && !artifactFilter.include(artifact); if (!filtered && artifact.getType().equals(HaxeFileExtensions.HAR)) { String artifactKey = getProjectReferenceKey(artifact, ":"); MavenProject reference = projectReferences.get(artifactKey); if (reference == null) { File harUnpackDirectory = new File(getDependenciesDirectory(), getProjectReferenceKey(artifact, "-")); unpackHar(artifact, harUnpackDirectory); validateHarMetadata(targets, artifact, new File(harUnpackDirectory, HarMetadata.METADATA_FILE_NAME)); addSourcePath(argumentsList, harUnpackDirectory.getAbsolutePath()); } else { String dirName = OutputNamesHelper.getHarValidationOutput(artifact); File validationDirectory = new File(reference.getBuild().getDirectory(), dirName); validateHarMetadata(targets, artifact, new File(validationDirectory, HarMetadata.METADATA_FILE_NAME)); for (String cp : reference.getCompileSourceRoots()) { addSourcePath(argumentsList, cp); }/*from ww w. j ava 2 s.co m*/ } } } }