Example usage for org.apache.maven.artifact.resolver.filter TypeArtifactFilter TypeArtifactFilter

List of usage examples for org.apache.maven.artifact.resolver.filter TypeArtifactFilter TypeArtifactFilter

Introduction

In this page you can find the example usage for org.apache.maven.artifact.resolver.filter TypeArtifactFilter TypeArtifactFilter.

Prototype

public TypeArtifactFilter(String type) 

Source Link

Usage

From source file:com.xpn.xwiki.tool.xar.AbstractXarMojo.java

License:Open Source License

/**
 * @param pomProject the project/*from  w  w w. j a va 2 s .  c o m*/
 * @return set of dependencies
 * @throws ArtifactResolutionException error
 * @throws ArtifactNotFoundException error
 * @throws InvalidDependencyVersionException error
 */
protected Set<Artifact> resolveDependencyArtifacts(MavenProject pomProject)
        throws ArtifactResolutionException, ArtifactNotFoundException, InvalidDependencyVersionException {
    AndArtifactFilter filters = new AndArtifactFilter();

    filters.add(new TypeArtifactFilter("xar"));
    filters.add(new ScopeArtifactFilter(DefaultArtifact.SCOPE_RUNTIME));

    Set<Artifact> artifacts = pomProject.createArtifacts(this.factory, Artifact.SCOPE_TEST, filters);

    for (Artifact artifact : artifacts) {
        // resolve the new artifact
        this.resolver.resolve(artifact, this.remoteRepos, this.local);
    }

    return artifacts;
}

From source file:de.tarent.maven.plugins.pkg.helper.Helper.java

License:Open Source License

/**
 * Creates the bootclasspath and classpath line from the project's
 * dependencies./*from   ww  w  .j  ava  2s  . c o m*/
 * 
 * @param pm
 *            The package map used to resolve the Jar file names.
 * @param bundled
 *            A set used to track the bundled jars for later file-size
 *            calculations.
 * @param bcp
 *            StringBuilder which contains the boot classpath line at the
 *            end of the method.
 * @param cp
 *            StringBuilder which contains the classpath line at the end of
 *            the method.
 */
protected final void createClasspathLine(final Log l, final File targetJarPath, final Path bcp, final Path cp,
        File targetArtifactFile) throws MojoExecutionException {
    // final Set<Artifact> bundled = new HashSet<Artifact>();

    l.info("resolving dependency artifacts");

    Set<Artifact> dependencies = new HashSet<Artifact>();
    try {
        // Notice only compilation dependencies which are Jars.
        // Shared Libraries ("so") are filtered out because the
        // JNI dependency is solved by the system already.

        // Here a filter for depencies of the COMPILE scope is created
        AndArtifactFilter compileFilter = new AndArtifactFilter();
        compileFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_COMPILE));
        compileFilter.add(new TypeArtifactFilter("jar"));

        // The result of the COMPILE filter will be added to the depencies
        // set
        dependencies.addAll(Utils.findArtifacts(compileFilter, apm.getFactory(), apm.getResolver(),
                apm.getProject(), apm.getProject().getArtifact(), apm.getLocalRepo(), apm.getRemoteRepos(),
                apm.getMetadataSource()));

        // Here a filter for depencies of the RUNTIME scope is created
        AndArtifactFilter runtimeFilter = new AndArtifactFilter();
        runtimeFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_RUNTIME));
        runtimeFilter.add(new TypeArtifactFilter("jar"));

        // The result of the RUNTIME filter will be added to the depencies
        // set
        dependencies.addAll(Utils.findArtifacts(runtimeFilter, apm.getFactory(), apm.getResolver(),
                apm.getProject(), apm.getProject().getArtifact(), apm.getLocalRepo(), apm.getRemoteRepos(),
                apm.getMetadataSource()));

        // Here a filter for depencies of the PROVIDED scope is created
        AndArtifactFilter providedFilter = new AndArtifactFilter();
        providedFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_PROVIDED));
        providedFilter.add(new TypeArtifactFilter("jar"));

        // The result of the PROVIDED filter will be added to the depencies
        // set
        dependencies.addAll(Utils.findArtifacts(providedFilter, apm.getFactory(), apm.getResolver(),
                apm.getProject(), apm.getProject().getArtifact(), apm.getLocalRepo(), apm.getRemoteRepos(),
                apm.getMetadataSource()));

    } catch (ArtifactNotFoundException anfe) {
        throw new MojoExecutionException("Exception while resolving dependencies", anfe);
    } catch (InvalidDependencyVersionException idve) {
        throw new MojoExecutionException("Exception while resolving dependencies", idve);
    } catch (ProjectBuildingException pbe) {
        throw new MojoExecutionException("Exception while resolving dependencies", pbe);
    } catch (ArtifactResolutionException are) {
        throw new MojoExecutionException("Exception while resolving dependencies", are);
    }

    Visitor v = new Visitor() {
        public void bundle(Artifact artifact) {
            // Nothing to do here. bundleDependencies should take care of
            // this.

        }

        public void visit(Artifact artifact, Entry entry) {
            // If all dependencies should be bundled skip adding them to the
            // classpath
            // thereby overriding what was configured through property
            // files.
            if (targetConfiguration.isBundleAll()) {
                return;
            }

            Path b = (entry.isBootClasspath) ? bcp : cp;

            Iterator<String> ite = entry.jarFileNames.iterator();
            while (ite.hasNext()) {
                StringBuilder sb = new StringBuilder();
                String fileName = ite.next();

                // Prepend default Jar path if file is not absolute.
                if (fileName.charAt(0) != '/') {
                    sb.append(packageMap.getDefaultJarPath());
                    sb.append("/");
                }

                sb.append(fileName);

                b.append(sb.toString());
            }
        }

    };

    if (!targetConfiguration.isIgnoreDependencies()) {
        packageMap.iterateDependencyArtifacts(l, dependencies, v, true);
    }

    // Add the custom jar files to the classpath
    for (Iterator<JarFile> ite = targetConfiguration.getJarFiles().iterator(); ite.hasNext();) {
        AuxFile auxFile = ite.next();

        cp.append(targetJarPath.toString() + "/" + new File(auxFile.getFrom()).getName());
    }

    // Add the project's own artifact at last. This way we can
    // save the deletion of the colon added in the loops above.
    cp.append(targetArtifactFile.toString());

    // return bundled;
}

From source file:de.tarent.maven.plugins.pkg.helper.Helper.java

License:Open Source License

public Set<Artifact> resolveProjectDependencies() throws MojoExecutionException {
    Set<Artifact> resolvedDeps = new HashSet<Artifact>();
    try {//from w  ww  .  ja v  a2s.co  m
        // Notice only compilation dependencies which are Jars.
        // Shared Libraries ("so") are filtered out because the
        // JNI dependency is solved by the system already.

        // Here a filter for depencies of the COMPILE scope is created
        AndArtifactFilter compileFilter = new AndArtifactFilter();
        compileFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_COMPILE));
        compileFilter.add(new TypeArtifactFilter("jar"));

        // The result of the COMPILE filter will be added to the depencies
        // set
        resolvedDeps.addAll(Utils.findArtifacts(compileFilter, apm.getFactory(), apm.getResolver(),
                apm.getProject(), apm.getProject().getArtifact(), apm.getLocalRepo(), apm.getRemoteRepos(),
                apm.getMetadataSource()));

        // Here a filter for depencies of the RUNTIME scope is created
        AndArtifactFilter runtimeFilter = new AndArtifactFilter();
        runtimeFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_RUNTIME));
        runtimeFilter.add(new TypeArtifactFilter("jar"));

        // The result of the RUNTIME filter will be added to the depencies
        // set
        resolvedDeps.addAll(Utils.findArtifacts(runtimeFilter, apm.getFactory(), apm.getResolver(),
                apm.getProject(), apm.getProject().getArtifact(), apm.getLocalRepo(), apm.getRemoteRepos(),
                apm.getMetadataSource()));

        // Here a filter for depencies of the PROVIDED scope is created
        AndArtifactFilter providedFilter = new AndArtifactFilter();
        providedFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_PROVIDED));
        providedFilter.add(new TypeArtifactFilter("jar"));

        // The result of the PROVIDED filter will be added to the depencies
        // set
        resolvedDeps.addAll(Utils.findArtifacts(providedFilter, apm.getFactory(), apm.getResolver(),
                apm.getProject(), apm.getProject().getArtifact(), apm.getLocalRepo(), apm.getRemoteRepos(),
                apm.getMetadataSource()));
    } catch (ArtifactNotFoundException anfe) {
        throw new MojoExecutionException("Exception while resolving dependencies", anfe);
    } catch (InvalidDependencyVersionException idve) {
        throw new MojoExecutionException("Exception while resolving dependencies", idve);
    } catch (ProjectBuildingException pbe) {
        throw new MojoExecutionException("Exception while resolving dependencies", pbe);
    } catch (ArtifactResolutionException are) {
        throw new MojoExecutionException("Exception while resolving dependencies", are);
    }

    return resolvedDeps;
}

From source file:fr.paris.lutece.maven.AbstractLuteceWebappMojo.java

License:Open Source License

/**
 * Add the lutece-core dependency's files to an exploded webapp directory.
 *
 * @param webappDir//  w w  w. j a v  a2  s.  c om
 *            the webapp directory.
 *
 * @throws MojoExecutionException
 *             if there is no lutece-core dependency, or more than one, or
 *             if an error occurs while resolving the artifact.
 */
private void explodeCore(File webappDir) throws MojoExecutionException {
    // Get all the lutece-core artifacts from the project
    Set cores = filterArtifacts(new TypeArtifactFilter(LUTECE_CORE_TYPE));

    // There must be exactly one
    if (LUTECE_CORE_TYPE.equals(project.getArtifactId())) {
        return;
    } else if ((cores == null) || cores.isEmpty() || (cores.size() > 1)) {
        throw new MojoExecutionException("Project \"" + project.getName()
                + "\" must have exactly one dependency of type " + LUTECE_CORE_TYPE);
    }

    // Now we know the Set has exactly one element
    Artifact coreArtifact = (Artifact) cores.iterator().next();

    addToExplodedWebapp(coreArtifact, webappDir);
}

From source file:fr.paris.lutece.maven.AbstractLuteceWebappMojo.java

License:Open Source License

/**
 * Add the lutece-plugin dependencies' files to an exploded webapp
 * directory.//from w w w  . j a va2  s . c o m
 *
 * @param webappDir
 *            the webapp directory.
 *
 * @throws MojoExecutionException
 *             if an error occurs while resolving the artifacts.
 */
private void explodePlugins(File webappDir) throws MojoExecutionException {
    // Get all the lutece-plugin artifacts from the project
    Set plugins = filterArtifacts(new TypeArtifactFilter(LUTECE_PLUGIN_TYPE));

    // Explode each artifact file
    for (Iterator iterArtifacts = plugins.iterator(); iterArtifacts.hasNext();) {
        Artifact pluginArtifact = (Artifact) iterArtifacts.next();
        addToExplodedWebapp(pluginArtifact, webappDir);
    }
}

From source file:fr.paris.lutece.maven.AbstractLuteceWebappMojo.java

License:Open Source License

/**
 * Add the lutece-site dependencies' files to an exploded webapp
 * directory./* ww w . ja v  a  2 s .c om*/
 *
 * @param webappDir
 *            the webapp directory.
 *
 * @throws MojoExecutionException
 *             if an error occurs while resolving the artifacts.
 */
private void explodeSites(File webappDir) throws MojoExecutionException {
    // Get all the lutece-site artifacts from the project
    Set sites = filterArtifacts(new TypeArtifactFilter(LUTECE_SITE_TYPE));

    // Explode each artifact file
    for (Iterator iterArtifacts = sites.iterator(); iterArtifacts.hasNext();) {
        Artifact siteArtifact = (Artifact) iterArtifacts.next();
        addToExplodedWebapp(siteArtifact, webappDir);
    }
}

From source file:org.apache.synapse.maven.xar.AbstractXarMojo.java

License:Apache License

private void addDependencies(Archiver archiver) throws ArchiverException, MojoExecutionException {

    Log log = getLog();//from www .  ja  v  a2s.c om
    AndArtifactFilter filter = new AndArtifactFilter();
    filter.add(new ScopeArtifactFilter(Artifact.SCOPE_RUNTIME));
    filter.add(new ArtifactFilter() {
        public boolean include(Artifact artifact) {
            return !artifact.isOptional();
        }
    });
    filter.add(new TypeArtifactFilter("jar"));
    filter.add(buildSynapseRuntimeArtifactFilter());
    for (Artifact artifact : filterArtifacts(project.getArtifacts(), filter)) {
        String targetFileName = artifact.getArtifactId() + "-" + artifact.getVersion() + "."
                + artifact.getArtifactHandler().getExtension();
        log.info("Adding " + targetFileName + " (scope " + artifact.getScope() + ")");
        archiver.addFile(artifact.getFile(), "lib/" + targetFileName);
    }
}

From source file:org.apache.synapse.maven.xar.AbstractXarMojo.java

License:Apache License

/**
 * Get the set of artifacts that are provided by Synapse at runtime.
 * /*from  w  w  w.  ja va  2s.  c  o m*/
 * @return
 * @throws MojoExecutionException
 */
private Set<Artifact> getSynapseRuntimeArtifacts() throws MojoExecutionException {
    Log log = getLog();
    log.debug("Looking for synapse-core artifact in XAR project dependencies ...");
    Artifact synapseCore = null;
    for (Iterator<?> it = project.getDependencyArtifacts().iterator(); it.hasNext();) {
        Artifact artifact = (Artifact) it.next();
        if (artifact.getGroupId().equals("org.apache.synapse")
                && artifact.getArtifactId().equals("synapse-core")) {
            synapseCore = artifact;
            break;
        }
    }
    if (synapseCore == null) {
        throw new MojoExecutionException("Could not locate dependency on synapse-core");
    }

    log.debug("Loading project data for " + synapseCore + " ...");
    MavenProject synapseCoreProject;
    try {
        synapseCoreProject = projectBuilder.buildFromRepository(synapseCore, remoteArtifactRepositories,
                localRepository);
    } catch (ProjectBuildingException e) {
        throw new MojoExecutionException("Unable to retrieve project information for " + synapseCore, e);
    }
    Set<Artifact> synapseRuntimeDeps;
    try {
        synapseRuntimeDeps = synapseCoreProject.createArtifacts(artifactFactory, Artifact.SCOPE_RUNTIME,
                new TypeArtifactFilter("jar"));
    } catch (InvalidDependencyVersionException e) {
        throw new MojoExecutionException("Unable to get project dependencies for " + synapseCore, e);
    }
    log.debug("Direct runtime dependencies for " + synapseCore + " :");
    logArtifacts(synapseRuntimeDeps);

    log.debug("Resolving transitive dependencies for " + synapseCore + " ...");
    try {
        synapseRuntimeDeps = artifactCollector.collect(synapseRuntimeDeps, synapseCoreProject.getArtifact(),
                synapseCoreProject.getManagedVersionMap(), localRepository, remoteArtifactRepositories,
                artifactMetadataSource, null, Collections.singletonList(new DebugResolutionListener(logger)))
                .getArtifacts();
    } catch (ArtifactResolutionException e) {
        throw new MojoExecutionException("Unable to resolve transitive dependencies for " + synapseCore);
    }
    log.debug("All runtime dependencies for " + synapseCore + " :");
    logArtifacts(synapseRuntimeDeps);

    return synapseRuntimeDeps;
}

From source file:org.codehaus.mojo.graphing.model.factory.GraphModelFactory.java

License:Apache License

public GraphModel getGraphModel(String groupId, String artifactId, String version)
        throws MojoExecutionException {
    Artifact pomArtifact = resolveArtifact(groupId, artifactId, version);
    // Model pom = getModel(pomArtifact);

    List listeners = Collections.EMPTY_LIST;
    if (verbose) {
        listeners = Collections.singletonList(new DebugResolutionListener(getLog()));
    }/*from   ww w .j a va  2 s .  c  o m*/

    List remoteArtifactRepositories = getArtifactRepositories();

    // TODO: managed dependencies
    Map managedDependencies = Collections.EMPTY_MAP;

    ArtifactFilter filter = null;
    if (scopeFilter != null) {
        filter = new ScopeArtifactFilter(scopeFilter);
    }
    if (typeFilter != null) {
        TypeArtifactFilter typeArtifactFilter = new TypeArtifactFilter(typeFilter);
        if (filter != null) {
            AndArtifactFilter andFilter = new AndArtifactFilter();
            andFilter.add(filter);
            andFilter.add(typeArtifactFilter);
            filter = andFilter;
        } else {
            filter = typeArtifactFilter;
        }
    }

    ArtifactResolutionResult result;
    Set artifacts;

    GraphModel model = new GraphModel();
    Node centerNode = toNode(pomArtifact);
    model.addNode(centerNode);
    model.setCenterNode(centerNode);

    try {
        artifacts = new HashSet();
        artifacts.add(pomArtifact);

        result = artifactResolver.resolveTransitively(artifacts, pomArtifact, managedDependencies,
                localRepository, remoteArtifactRepositories, mavenMetadataSource, filter, listeners);
    } catch (ArtifactResolutionException e) {
        throw new MojoExecutionException("Unable to resolve deps.", e);
    } catch (ArtifactNotFoundException e) {
        throw new MojoExecutionException("Unable to resolve deps.", e);
    }

    getLog().info("Got " + result.getArtifactResolutionNodes().size() + " resolution node(s).");

    Iterator it = result.getArtifactResolutionNodes().iterator();
    while (it.hasNext()) {
        ResolutionNode child = (ResolutionNode) it.next();
        Node childNode = toNode(child.getArtifact());
        Edge edge = new Edge(centerNode, childNode);
        if (model.addEdge(edge)) {
            addChildEdges(model, child);
        }
    }

    return model;
}

From source file:org.codehaus.mojo.jsimport.AbstractImportMojo.java

License:Apache License

/**
 * Build up the dependency graph and global symbol table by parsing the project's dependencies.
 * /*  w  w  w  .j a v  a  2s .c  o  m*/
 * @param scope compile or test.
 * @param fileDependencyGraphModificationTime the time that the dependency graph was updated. Used for file time
 *            comparisons to check the age of them.
 * @param processedFiles an insert-ordered set of files that have been processed.
 * @param targetFolder Where the target files live.
 * @param workFolder Where we can create some long lived information that may be useful to subsequent builds.
 * @param compileWorkFolder Ditto but in the case of testing it points to where the compile working folder is.
 * @return true if the dependency graph has been updated.
 * @throws MojoExecutionException if something bad happens.
 */
private boolean buildDependencyGraphForDependencies(Scope scope, long fileDependencyGraphModificationTime,
        LinkedHashSet<File> processedFiles, File targetFolder, File workFolder, File compileWorkFolder)
        throws MojoExecutionException {
    File targetJsFolder = new File(targetFolder, "js");

    boolean fileDependencyGraphUpdated = false;

    // Determine how we need to filter things both for direct filtering and transitive filtering.

    String scopeStr = (scope == Scope.COMPILE ? Artifact.SCOPE_COMPILE : Artifact.SCOPE_TEST);

    AndArtifactFilter jsArtifactFilter = new AndArtifactFilter();
    jsArtifactFilter.add(new ScopeArtifactFilter(scopeStr));
    jsArtifactFilter.add(new TypeArtifactFilter("js"));

    AndArtifactFilter wwwZipArtifactFilter = new AndArtifactFilter();
    wwwZipArtifactFilter.add(new ScopeArtifactFilter(scopeStr));
    wwwZipArtifactFilter.add(new TypeArtifactFilter("zip"));
    wwwZipArtifactFilter.add(new ArtifactFilter() {
        public boolean include(Artifact artifact) {
            return artifact.hasClassifier() && artifact.getClassifier().equals("www");
        }
    });

    // Determine the artifacts to resolve and associate their transitive dependencies.

    Map<Artifact, LinkedHashSet<Artifact>> directArtifactWithTransitives = new HashMap<Artifact, LinkedHashSet<Artifact>>(
            dependencies.size());

    Set<Artifact> directArtifacts = new HashSet<Artifact>(dependencies.size());
    LinkedHashSet<Artifact> transitiveArtifacts = new LinkedHashSet<Artifact>();

    for (Dependency dependency : dependencies) {
        // Process imports and symbols of this dependencies' transitives
        // first.
        Artifact directArtifact = artifactFactory.createDependencyArtifact(dependency.getGroupId(),
                dependency.getArtifactId(), VersionRange.createFromVersion(dependency.getVersion()),
                dependency.getType(), dependency.getClassifier(), dependency.getScope());

        if (!jsArtifactFilter.include(directArtifact) && !wwwZipArtifactFilter.include(directArtifact)) {
            continue;
        }

        Set<Artifact> artifactsToResolve = new HashSet<Artifact>(1);
        artifactsToResolve.add(directArtifact);

        ArtifactResolutionResult result;
        try {
            result = resolver.resolveTransitively(artifactsToResolve, project.getArtifact(), remoteRepositories,
                    localRepository, artifactMetadataSource);
        } catch (ArtifactResolutionException e) {
            throw new MojoExecutionException("Problem resolving dependencies", e);
        } catch (ArtifactNotFoundException e) {
            throw new MojoExecutionException("Problem resolving dependencies", e);
        }

        // Associate the transitive dependencies with the direct dependency and aggregate all transitives for
        // collection later.

        LinkedHashSet<Artifact> directTransitiveArtifacts = new LinkedHashSet<Artifact>(
                result.getArtifacts().size());
        for (Object o : result.getArtifacts()) {
            Artifact resolvedArtifact = (Artifact) o;
            if (jsArtifactFilter.include(resolvedArtifact) && //
                    !resolvedArtifact.equals(directArtifact)) {
                directTransitiveArtifacts.add(resolvedArtifact);
            }
        }

        directArtifacts.add(directArtifact);
        transitiveArtifacts.addAll(directTransitiveArtifacts);
        directArtifactWithTransitives.put(directArtifact, directTransitiveArtifacts);
    }

    // Resolve the best versions of the transitives to use by asking Maven to collect them.

    Set<Artifact> collectedArtifacts = new HashSet<Artifact>(
            directArtifacts.size() + transitiveArtifacts.size());
    Map<ArtifactId, Artifact> indexedCollectedDependencies = new HashMap<ArtifactId, Artifact>(
            collectedArtifacts.size());
    try {
        // Note that we must pass an insert-order set into the collector. The collector appears to assume that order
        // is significant, even though it is undocumented.
        LinkedHashSet<Artifact> collectableArtifacts = new LinkedHashSet<Artifact>(directArtifacts);
        collectableArtifacts.addAll(transitiveArtifacts);

        ArtifactResolutionResult resolutionResult = artifactCollector.collect(collectableArtifacts,
                project.getArtifact(), localRepository, remoteRepositories, artifactMetadataSource, null, //
                Collections.EMPTY_LIST);
        for (Object o : resolutionResult.getArtifacts()) {
            Artifact collectedArtifact = (Artifact) o;
            collectedArtifacts.add(collectedArtifact);

            // Build up an index of of collected transitive dependencies so that we can we refer back to them as we
            // process the direct dependencies.
            ArtifactId collectedArtifactId = new ArtifactId(collectedArtifact.getGroupId(),
                    collectedArtifact.getArtifactId());
            indexedCollectedDependencies.put(collectedArtifactId, collectedArtifact);
        }

        if (getLog().isDebugEnabled()) {
            getLog().debug("Dependencies collected: " + collectedArtifacts.toString());
        }
    } catch (ArtifactResolutionException e) {
        throw new MojoExecutionException("Cannot collect dependencies", e);
    }

    // Now go through direct artifacts and process their transitives.

    LocalRepositoryCollector localRepositoryCollector = new LocalRepositoryCollector(project, localRepository,
            new File[] {});

    for (Entry<Artifact, LinkedHashSet<Artifact>> entry : directArtifactWithTransitives.entrySet()) {
        Artifact directArtifact = entry.getKey();
        LinkedHashSet<Artifact> directArtifactTransitives = entry.getValue();

        LinkedHashSet<String> transitivesAsImports = new LinkedHashSet<String>(
                directArtifactTransitives.size());

        for (Object o : directArtifactTransitives) {
            Artifact directTransitiveArtifact = (Artifact) o;

            // Get the transitive artifact that Maven decided was the best to use.

            ArtifactId directTransitiveArtifactId = new ArtifactId(directTransitiveArtifact.getGroupId(),
                    directTransitiveArtifact.getArtifactId());
            Artifact transitiveArtifact = indexedCollectedDependencies.get(directTransitiveArtifactId);

            List<File> transitiveArtifactFiles = getArtifactFiles(transitiveArtifact, targetFolder, workFolder,
                    compileWorkFolder, localRepositoryCollector);

            // Only process this dependency if we've not done so
            // already.
            for (File transitiveArtifactFile : transitiveArtifactFiles) {
                if (!processedFiles.contains(transitiveArtifactFile)) {
                    String localRepository = localRepositoryCollector
                            .findLocalRepository(transitiveArtifactFile.getAbsolutePath());
                    if (localRepository != null) {
                        if (processFileForImportsAndSymbols(new File(localRepository), targetJsFolder,
                                transitiveArtifactFile, fileDependencyGraphModificationTime,
                                directArtifactTransitives)) {

                            processedFiles.add(transitiveArtifactFile);

                            fileDependencyGraphUpdated = true;
                        }
                    } else {
                        throw new MojoExecutionException(
                                "Problem determining local repository for transitive file: "
                                        + transitiveArtifactFile);
                    }
                }

                // Add transitives to the artifacts set of dependencies -
                // as if they were @import statements themselves.
                transitivesAsImports.add(transitiveArtifactFile.getPath());
            }
        }

        // Now deal with the pom specified dependency.
        List<File> artifactFiles = getArtifactFiles(directArtifact, targetFolder, workFolder, compileWorkFolder,
                localRepositoryCollector);
        for (File artifactFile : artifactFiles) {
            String artifactPath = artifactFile.getAbsolutePath();

            // Process imports and symbols of this dependency if we've not
            // already done so.
            if (!processedFiles.contains(artifactFile)) {
                String localRepository = localRepositoryCollector
                        .findLocalRepository(artifactFile.getAbsolutePath());
                if (localRepository != null) {
                    if (processFileForImportsAndSymbols(new File(localRepository), targetJsFolder, artifactFile,
                            fileDependencyGraphModificationTime, null)) {
                        processedFiles.add(artifactFile);

                        fileDependencyGraphUpdated = true;
                    }
                } else {
                    throw new MojoExecutionException(
                            "Problem determining local repository for file: " + artifactFile);
                }
            }

            // Add in our transitives to the dependency graph if they're not
            // already there.
            LinkedHashSet<String> existingImports = fileDependencies.get(artifactPath);
            if (existingImports.addAll(transitivesAsImports)) {
                if (getLog().isDebugEnabled()) {
                    getLog().debug("Using transitives as import: " + transitivesAsImports + " for file: "
                            + artifactPath);
                }
                fileDependencyGraphUpdated = true;
            }
        }

    }

    return fileDependencyGraphUpdated;
}