Example usage for org.apache.maven.artifact.resolver ResolutionNode getRemoteRepositories

List of usage examples for org.apache.maven.artifact.resolver ResolutionNode getRemoteRepositories

Introduction

In this page you can find the example usage for org.apache.maven.artifact.resolver ResolutionNode getRemoteRepositories.

Prototype

public List<ArtifactRepository> getRemoteRepositories() 

Source Link

Usage

From source file:com.alibaba.citrus.maven.eclipse.base.ide.AbstractIdeSupportMojo.java

License:Apache License

/**
 * Resolve project dependencies. Manual resolution is needed in order to avoid resolution of multiproject artifacts
 * (if projects will be linked each other an installed jar is not needed) and to avoid a failure when a jar is
 * missing.//from   w w w . ja  v  a2s  .  co m
 *
 * @return resolved IDE dependencies, with attached jars for non-reactor dependencies
 * @throws MojoExecutionException if dependencies can't be resolved
 */
protected IdeDependency[] doDependencyResolution() throws MojoExecutionException {
    if (ideDeps == null) {
        if (resolveDependencies) {
            MavenProject project = getProject();
            ArtifactRepository localRepo = getLocalRepository();

            List deps = getProject().getDependencies();

            // Collect the list of resolved IdeDependencies.
            List dependencies = new ArrayList();

            if (deps != null) {
                Map managedVersions = createManagedVersionMap(getArtifactFactory(), project.getId(),
                        project.getDependencyManagement());

                ArtifactResolutionResult artifactResolutionResult = null;

                try {

                    List listeners = new ArrayList();

                    if (logger.isDebugEnabled()) {
                        listeners.add(new DebugResolutionListener(logger));
                    }

                    listeners.add(new WarningResolutionListener(logger));

                    artifactResolutionResult = artifactCollector.collect(getProjectArtifacts(),
                            project.getArtifact(), managedVersions, localRepo,
                            project.getRemoteArtifactRepositories(), getArtifactMetadataSource(), null,
                            listeners);
                } catch (ArtifactResolutionException e) {
                    getLog().debug(e.getMessage(), e);
                    getLog().error(
                            Messages.getString("AbstractIdeSupportMojo.artifactresolution", new Object[] { //$NON-NLS-1$
                                    e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() }));

                    // if we are here artifactResolutionResult is null, create a project without dependencies but
                    // don't fail
                    // (this could be a reactor projects, we don't want to fail everything)
                    // Causes MECLIPSE-185. Not sure if it should be handled this way??
                    return new IdeDependency[0];
                }

                // keep track of added reactor projects in order to avoid duplicates
                Set emittedReactorProjectId = new HashSet();

                for (Iterator i = artifactResolutionResult.getArtifactResolutionNodes().iterator(); i
                        .hasNext();) {

                    ResolutionNode node = (ResolutionNode) i.next();
                    int dependencyDepth = node.getDepth();
                    Artifact art = node.getArtifact();
                    // don't resolve jars for reactor projects
                    if (hasToResolveJar(art)) {
                        try {
                            artifactResolver.resolve(art, node.getRemoteRepositories(), localRepository);
                        } catch (ArtifactNotFoundException e) {
                            getLog().debug(e.getMessage(), e);
                            getLog().warn(Messages.getString("AbstractIdeSupportMojo.artifactdownload", //$NON-NLS-1$
                                    new Object[] { e.getGroupId(), e.getArtifactId(), e.getVersion(),
                                            e.getMessage() }));
                        } catch (ArtifactResolutionException e) {
                            getLog().debug(e.getMessage(), e);
                            getLog().warn(Messages.getString("AbstractIdeSupportMojo.artifactresolution",
                                    new Object[] { //$NON-NLS-1$
                                            e.getGroupId(), e.getArtifactId(), e.getVersion(),
                                            e.getMessage() }));
                        }
                    }

                    boolean includeArtifact = true;
                    if (getExcludes() != null) {
                        String artifactFullId = art.getGroupId() + ":" + art.getArtifactId();
                        if (getExcludes().contains(artifactFullId)) {
                            getLog().info("excluded: " + artifactFullId);
                            includeArtifact = false;
                        }
                    }

                    if (includeArtifact && (!(getUseProjectReferences() && isAvailableAsAReactorProject(art))
                            || emittedReactorProjectId.add(art.getGroupId() + '-' + art.getArtifactId()))) {

                        // the following doesn't work: art.getArtifactHandler().getPackaging() always returns "jar"
                        // also
                        // if the packaging specified in pom.xml is different.

                        // osgi-bundle packaging is provided by the felix osgi plugin
                        // eclipse-plugin packaging is provided by this eclipse plugin
                        // String packaging = art.getArtifactHandler().getPackaging();
                        // boolean isOsgiBundle = "osgi-bundle".equals( packaging ) || "eclipse-plugin".equals(
                        // packaging );

                        // we need to check the manifest, if "Bundle-SymbolicName" is there the artifact can be
                        // considered
                        // an osgi bundle
                        boolean isOsgiBundle = false;
                        String osgiSymbolicName = null;
                        if (art.getFile() != null) {
                            JarFile jarFile = null;
                            try {
                                jarFile = new JarFile(art.getFile(), false, ZipFile.OPEN_READ);

                                Manifest manifest = jarFile.getManifest();
                                if (manifest != null) {
                                    osgiSymbolicName = manifest.getMainAttributes()
                                            .getValue(new Attributes.Name("Bundle-SymbolicName"));
                                }
                            } catch (IOException e) {
                                getLog().info("Unable to read jar manifest from " + art.getFile());
                            } finally {
                                if (jarFile != null) {
                                    try {
                                        jarFile.close();
                                    } catch (IOException e) {
                                        // ignore
                                    }
                                }
                            }
                        }

                        isOsgiBundle = osgiSymbolicName != null;

                        IdeDependency dep = new IdeDependency(art.getGroupId(), art.getArtifactId(),
                                art.getVersion(), art.getClassifier(), useProjectReference(art),
                                Artifact.SCOPE_TEST.equals(art.getScope()),
                                Artifact.SCOPE_SYSTEM.equals(art.getScope()),
                                Artifact.SCOPE_PROVIDED.equals(art.getScope()),
                                art.getArtifactHandler().isAddedToClasspath(), art.getFile(), art.getType(),
                                isOsgiBundle, osgiSymbolicName, dependencyDepth, getProjectNameForArifact(art));
                        // no duplicate entries allowed. System paths can cause this problem.
                        if (!dependencies.contains(dep)) {
                            dependencies.add(dep);
                        }
                    }
                }

                // @todo a final report with the list of
                // missingArtifacts?

            }

            ideDeps = (IdeDependency[]) dependencies.toArray(new IdeDependency[dependencies.size()]);
        } else {
            ideDeps = new IdeDependency[0];
        }
    }

    return ideDeps;
}

From source file:fr.paris.lutece.maven.ExplodedMojo.java

License:Open Source License

/**
 * Use to filter duplicate dependencies in multi project
 *
 * @return a list of artifacts whith no duplicate entry
 *//*from w w  w  . j a v  a 2 s.  com*/
private Set<Artifact> doDependencyResolution() {
    Set<Artifact> artifactsReturn = new HashSet<Artifact>();

    // Collector Filter jar artifacts in scope 'compile' or 'runtime'
    ArtifactFilter thirdPartyFilter = new ArtifactFilter() {
        @Override
        public boolean include(Artifact artifact) {
            return (!LUTECE_CORE_TYPE.equals(artifact.getArtifactId())
                    && !SERVLET_API.equals(artifact.getArtifactId())
                    && !Artifact.SCOPE_PROVIDED.equals(artifact.getScope())
                    && !Artifact.SCOPE_TEST.equals(artifact.getScope()));
        }
    };

    // Collector listener config
    List<ResolutionListener> listeners = new ArrayList<ResolutionListener>();

    if (logger.isDebugEnabled()) {
        listeners.add(new DebugResolutionListener(logger));
    }

    listeners.add(new WarningResolutionListener(logger));

    /*---------------- Resolution-------------*/
    // resolve conflict version artifacts with collector
    ArtifactResolutionResult artifactResolutionResult = null;

    try {
        artifactResolutionResult = artifactCollector.collect(multiProjectArtifacts, project.getArtifact(),
                localRepository, remoteRepositories, metadataSource, thirdPartyFilter, listeners);
    } catch (ArtifactResolutionException e) {
        e.printStackTrace();
    }

    // keep track of added reactor projects in order to avoid duplicates
    Set<String> emittedReactorProjectId = new HashSet<String>();

    for (ResolutionNode node : artifactResolutionResult.getArtifactResolutionNodes()) {
        Artifact art = node.getArtifact();

        try {
            resolver.resolve(art, node.getRemoteRepositories(), localRepository);
        } catch (ArtifactNotFoundException e) {
            e.printStackTrace();
        } catch (ArtifactResolutionException e) {
            e.printStackTrace();
        }

        if (emittedReactorProjectId.add(art.getGroupId() + '-' + art.getArtifactId())) {
            artifactsReturn.add(art);
        }
    }

    return artifactsReturn;
}

From source file:org.apache.tuscany.maven.plugin.eclipse.AbstractIdeSupportMojo.java

License:Apache License

/**
 * Resolve project dependencies. Manual resolution is needed in order to avoid resolution of multiproject artifacts
 * (if projects will be linked each other an installed jar is not needed) and to avoid a failure when a jar is
 * missing./* ww  w  .j  ava  2s. c o  m*/
 *
 * @throws MojoExecutionException if dependencies can't be resolved
 * @return resolved IDE dependencies, with attached jars for non-reactor dependencies
 */
protected IdeDependency[] doDependencyResolution() throws MojoExecutionException {
    if (ideDeps == null) {
        if (resolveDependencies) {
            MavenProject project = getProject();
            Set<String> imported = Collections.emptySet();
            try {
                imported = BundleUtil.getImportedPackages(project.getBasedir());
            } catch (IOException e1) {
                throw new MojoExecutionException(e1.getMessage(), e1);
            }
            ArtifactRepository localRepo = getLocalRepository();

            List deps = getProject().getDependencies();

            // Collect the list of resolved IdeDependencies.
            List dependencies = new ArrayList();

            if (deps != null) {
                Map managedVersions = createManagedVersionMap(getArtifactFactory(), project.getId(),
                        project.getDependencyManagement());

                ArtifactResolutionResult artifactResolutionResult = null;

                try {

                    List listeners = new ArrayList();

                    if (logger.isDebugEnabled()) {
                        listeners.add(new DebugResolutionListener(logger));
                    }

                    listeners.add(new WarningResolutionListener(logger));

                    artifactResolutionResult = artifactCollector.collect(getProjectArtifacts(),
                            project.getArtifact(), managedVersions, localRepo,
                            project.getRemoteArtifactRepositories(), getArtifactMetadataSource(), null,
                            listeners);
                } catch (ArtifactResolutionException e) {
                    getLog().debug(e.getMessage(), e);
                    getLog().error(
                            Messages.getString("AbstractIdeSupportMojo.artifactresolution", new Object[] { //$NON-NLS-1$
                                    e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() }));

                    // if we are here artifactResolutionResult is null, create a project without dependencies but
                    // don't fail
                    // (this could be a reactor projects, we don't want to fail everything)
                    // Causes MECLIPSE-185. Not sure if it should be handled this way??
                    return new IdeDependency[0];
                }

                // keep track of added reactor projects in order to avoid duplicates
                Set emittedReactorProjectId = new HashSet();

                for (Iterator i = artifactResolutionResult.getArtifactResolutionNodes().iterator(); i
                        .hasNext();) {

                    ResolutionNode node = (ResolutionNode) i.next();
                    int dependencyDepth = node.getDepth();
                    Artifact art = node.getArtifact();
                    // don't resolve jars for reactor projects
                    if (hasToResolveJar(art)) {
                        try {
                            artifactResolver.resolve(art, node.getRemoteRepositories(), localRepository);
                        } catch (ArtifactNotFoundException e) {
                            getLog().debug(e.getMessage(), e);
                            getLog().warn(Messages.getString("AbstractIdeSupportMojo.artifactdownload", //$NON-NLS-1$
                                    new Object[] { e.getGroupId(), e.getArtifactId(), e.getVersion(),
                                            e.getMessage() }));
                        } catch (ArtifactResolutionException e) {
                            getLog().debug(e.getMessage(), e);
                            getLog().warn(Messages.getString("AbstractIdeSupportMojo.artifactresolution", //$NON-NLS-1$
                                    new Object[] { e.getGroupId(), e.getArtifactId(), e.getVersion(),
                                            e.getMessage() }));
                        }
                    }

                    boolean includeArtifact = true;
                    if (getExcludes() != null) {
                        String artifactFullId = art.getGroupId() + ":" + art.getArtifactId();
                        if (getExcludes().contains(artifactFullId)) {
                            getLog().info("excluded: " + artifactFullId);
                            includeArtifact = false;
                        }
                    }

                    if (includeArtifact && (!(getUseProjectReferences() && isAvailableAsAReactorProject(art))
                            || emittedReactorProjectId.add(art.getGroupId() + '-' + art.getArtifactId()))) {

                        // the following doesn't work: art.getArtifactHandler().getPackaging() always returns "jar"
                        // also
                        // if the packaging specified in pom.xml is different.

                        // osgi-bundle packaging is provided by the felix osgi plugin
                        // eclipse-plugin packaging is provided by this eclipse plugin
                        // String packaging = art.getArtifactHandler().getPackaging();
                        // boolean isOsgiBundle = "osgi-bundle".equals( packaging ) || "eclipse-plugin".equals(
                        // packaging );

                        // we need to check the manifest, if "Bundle-SymbolicName" is there the artifact can be
                        // considered
                        // an osgi bundle
                        if ("pom".equals(art.getType())) {
                            continue;
                        }
                        File artifactFile = art.getFile();
                        MavenProject reactorProject = getReactorProject(art);
                        if (reactorProject != null) {
                            artifactFile = reactorProject.getBasedir();
                        }
                        boolean isOsgiBundle = false;
                        String osgiSymbolicName = null;
                        try {
                            osgiSymbolicName = BundleUtil.getBundleSymbolicName(artifactFile);
                        } catch (IOException e) {
                            getLog().error("Unable to read jar manifest from " + artifactFile, e);
                        }
                        isOsgiBundle = osgiSymbolicName != null;

                        IdeDependency dep = new IdeDependency(art.getGroupId(), art.getArtifactId(),
                                art.getVersion(), art.getClassifier(), useProjectReference(art),
                                Artifact.SCOPE_TEST.equals(art.getScope()),
                                Artifact.SCOPE_SYSTEM.equals(art.getScope()),
                                Artifact.SCOPE_PROVIDED.equals(art.getScope()),
                                art.getArtifactHandler().isAddedToClasspath(), art.getFile(), art.getType(),
                                isOsgiBundle, osgiSymbolicName, dependencyDepth, getProjectNameForArifact(art));
                        // no duplicate entries allowed. System paths can cause this problem.
                        if (!dependencies.contains(dep)) {
                            // [rfeng] Do not add compile/provided dependencies
                            if (!(pde && (Artifact.SCOPE_COMPILE.equals(art.getScope())
                                    || Artifact.SCOPE_PROVIDED.equals(art.getScope())))) {
                                dependencies.add(dep);
                            } else {
                                // Check this compile dependency is an OSGi package supplier
                                if (!imported.isEmpty()) {
                                    Set<String> exported = Collections.emptySet();
                                    try {
                                        exported = BundleUtil.getExportedPackages(artifactFile);
                                    } catch (IOException e) {
                                        getLog().error("Unable to read jar manifest from " + art.getFile(), e);
                                    }
                                    boolean matched = false;
                                    for (String p : imported) {
                                        if (exported.contains(p)) {
                                            matched = true;
                                            break;
                                        }
                                    }
                                    if (!matched) {
                                        dependencies.add(dep);
                                    } else {
                                        getLog().debug(
                                                "Compile dependency is skipped as it is added through OSGi dependency: "
                                                        + art);
                                    }
                                } else {
                                    dependencies.add(dep);
                                }
                            }
                        }
                    }

                }

                // @todo a final report with the list of
                // missingArtifacts?

            }

            ideDeps = (IdeDependency[]) dependencies.toArray(new IdeDependency[dependencies.size()]);
        } else {
            ideDeps = new IdeDependency[0];
        }
    }

    return ideDeps;
}

From source file:org.codehaus.mojo.sysdeo.ide.AbstractIdeSupportMojo.java

License:Apache License

/**
 * Resolve project dependencies. Manual resolution is needed in order to avoid resoltion of multiproject artifacts
 * (if projects will be linked each other an installed jar is not needed) and to avoid a failure when a jar is
 * missing./*w  w  w.  j  a  v a 2 s  . com*/
 *
 * @throws MojoExecutionException if dependencies can't be resolved
 * @return resoved IDE dependencies, with attached jars for non-reactor dependencies
 */
protected IdeDependency[] doDependencyResolution() throws MojoExecutionException {

    if (workspace != null) {
        getLog().info("read available projects in eclipse workspace");
        workspaceProjects = new ReadWorkspaceLocations().readWorkspace(workspace, getLog());
    }

    ArtifactRepository localRepo = getLocalRepository();

    List dependencies = getProject().getDependencies();

    // Collect the list of resolved IdeDependencies.
    List dependencyList = new ArrayList();

    if (dependencies != null) {
        Map managedVersions = createManagedVersionMap(project.getId(), project.getDependencyManagement());

        ArtifactResolutionResult artifactResolutionResult = null;

        try {

            List listeners = new ArrayList();

            if (logger.isDebugEnabled()) {
                listeners.add(new DebugResolutionListener(logger));
            }

            listeners.add(new WarningResolutionListener(logger));

            artifactResolutionResult = artifactCollector.collect(getProjectArtifacts(), project.getArtifact(),
                    managedVersions, localRepo, project.getRemoteArtifactRepositories(),
                    getArtifactMetadataSource(), null, listeners);
        } catch (ArtifactResolutionException e) {
            getLog().debug(e.getMessage(), e);
            getLog().error(Messages.getString("artifactresolution", new Object[] { //$NON-NLS-1$
                    e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() }));

            // if we are here artifactResolutionResult is null, create a project without dependencies but don't fail
            // (this could be a reactor projects, we don't want to fail everything)
            return new IdeDependency[0];
        }

        // keep track of added reactor projects in order to avoid duplicates
        Set emittedReactorProjectId = new HashSet();

        for (Iterator i = artifactResolutionResult.getArtifactResolutionNodes().iterator(); i.hasNext();) {
            ResolutionNode node = (ResolutionNode) i.next();
            Artifact art = node.getArtifact();
            boolean isReactorProject = getUseProjectReferences() && isAvailableAsAReactorProject(art);

            // don't resolve jars for reactor projects
            if (!isReactorProject) {
                try {
                    artifactResolver.resolve(art, node.getRemoteRepositories(), localRepository);
                } catch (ArtifactNotFoundException e) {
                    getLog().debug(e.getMessage(), e);
                    getLog().warn(Messages.getString("artifactdownload", new Object[] { //$NON-NLS-1$
                            e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() }));
                } catch (ArtifactResolutionException e) {
                    getLog().debug(e.getMessage(), e);
                    getLog().warn(Messages.getString("artifactresolution", new Object[] { //$NON-NLS-1$
                            e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() }));
                }
            }

            if (!isReactorProject
                    || emittedReactorProjectId.add(art.getGroupId() + '-' + art.getArtifactId())) {

                IdeDependency dep = new IdeDependency(art, isReactorProject);
                dep = resolveWorkspaceProject(dep);
                dependencyList.add(dep);

            }

        }

        // @todo a final report with the list of missingArtifacts?

    }

    IdeDependency[] deps = (IdeDependency[]) dependencyList.toArray(new IdeDependency[dependencyList.size()]);

    return deps;
}

From source file:org.eclipse.che.maven.CheArtifactResolver.java

License:Apache License

public ArtifactResolutionResult resolve(ArtifactResolutionRequest request) {
    Artifact rootArtifact = request.getArtifact();
    Set<Artifact> artifacts = request.getArtifactDependencies();
    Map<String, Artifact> managedVersions = request.getManagedVersionMap();
    List<ResolutionListener> listeners = request.getListeners();
    ArtifactFilter collectionFilter = request.getCollectionFilter();
    ArtifactFilter resolutionFilter = request.getResolutionFilter();
    RepositorySystemSession session = getSession(request.getLocalRepository());

    //TODO: hack because metadata isn't generated in m2e correctly and i want to run the maven i have in the workspace
    if (source == null) {
        try {/*  w ww .ja va2 s  . c  o  m*/
            source = container.lookup(ArtifactMetadataSource.class);
        } catch (ComponentLookupException e) {
            // won't happen
        }
    }

    if (listeners == null) {
        listeners = new ArrayList<ResolutionListener>();

        if (logger.isDebugEnabled()) {
            listeners.add(new DebugResolutionListener(logger));
        }

        listeners.add(new WarningResolutionListener(logger));
    }

    ArtifactResolutionResult result = new ArtifactResolutionResult();

    // The root artifact may, or may not be resolved so we need to check before we attempt to resolve.
    // This is often an artifact like a POM that is taken from disk and we already have hold of the
    // file reference. But this may be a Maven Plugin that we need to resolve from a remote repository
    // as well as its dependencies.

    if (request.isResolveRoot() /* && rootArtifact.getFile() == null */) {
        try {
            resolve(rootArtifact, request.getRemoteRepositories(), session);
        } catch (ArtifactResolutionException e) {
            result.addErrorArtifactException(e);
            return result;
        } catch (ArtifactNotFoundException e) {
            result.addMissingArtifact(request.getArtifact());
            return result;
        }
    }

    ArtifactResolutionRequest collectionRequest = request;

    if (request.isResolveTransitively()) {
        MetadataResolutionRequest metadataRequest = new DefaultMetadataResolutionRequest(request);

        metadataRequest.setArtifact(rootArtifact);
        metadataRequest.setResolveManagedVersions(managedVersions == null);

        try {
            ResolutionGroup resolutionGroup = source.retrieve(metadataRequest);

            if (managedVersions == null) {
                managedVersions = resolutionGroup.getManagedVersions();
            }

            Set<Artifact> directArtifacts = resolutionGroup.getArtifacts();

            if (artifacts == null || artifacts.isEmpty()) {
                artifacts = directArtifacts;
            } else {
                List<Artifact> allArtifacts = new ArrayList<Artifact>();
                allArtifacts.addAll(artifacts);
                allArtifacts.addAll(directArtifacts);

                Map<String, Artifact> mergedArtifacts = new LinkedHashMap<String, Artifact>();
                for (Artifact artifact : allArtifacts) {
                    String conflictId = artifact.getDependencyConflictId();
                    if (!mergedArtifacts.containsKey(conflictId)) {
                        mergedArtifacts.put(conflictId, artifact);
                    }
                }

                artifacts = new LinkedHashSet<Artifact>(mergedArtifacts.values());
            }

            collectionRequest = new ArtifactResolutionRequest(request);
            collectionRequest.setServers(request.getServers());
            collectionRequest.setMirrors(request.getMirrors());
            collectionRequest.setProxies(request.getProxies());
            collectionRequest.setRemoteRepositories(resolutionGroup.getResolutionRepositories());
        } catch (ArtifactMetadataRetrievalException e) {
            ArtifactResolutionException are = new ArtifactResolutionException(
                    "Unable to get dependency information for " + rootArtifact.getId() + ": " + e.getMessage(),
                    rootArtifact, metadataRequest.getRemoteRepositories(), e);
            result.addMetadataResolutionException(are);
            return result;
        }
    }

    if (artifacts == null || artifacts.isEmpty()) {
        if (request.isResolveRoot()) {
            result.addArtifact(rootArtifact);
        }
        return result;
    }

    // After the collection we will have the artifact object in the result but they will not be resolved yet.
    result = artifactCollector.collect(artifacts, rootArtifact, managedVersions, collectionRequest, source,
            collectionFilter, listeners, null);

    // We have metadata retrieval problems, or there are cycles that have been detected
    // so we give this back to the calling code and let them deal with this information
    // appropriately.

    if (result.hasMetadataResolutionExceptions() || result.hasVersionRangeViolations()
            || result.hasCircularDependencyExceptions()) {
        return result;
    }

    if (result.getArtifactResolutionNodes() != null) {
        ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

        CountDownLatch latch = new CountDownLatch(result.getArtifactResolutionNodes().size());

        for (ResolutionNode node : result.getArtifactResolutionNodes()) {
            Artifact artifact = node.getArtifact();

            if (resolutionFilter == null || resolutionFilter.include(artifact)) {
                executor.execute(new ResolveTask(classLoader, latch, artifact, session,
                        node.getRemoteRepositories(), result));
            } else {
                latch.countDown();
            }
        }
        try {
            latch.await();
        } catch (InterruptedException e) {
            result.addErrorArtifactException(
                    new ArtifactResolutionException("Resolution interrupted", rootArtifact, e));
        }
    }

    // We want to send the root artifact back in the result but we need to do this after the other dependencies
    // have been resolved.
    if (request.isResolveRoot()) {
        // Add the root artifact (as the first artifact to retain logical order of class path!)
        Set<Artifact> allArtifacts = new LinkedHashSet<Artifact>();
        allArtifacts.add(rootArtifact);
        allArtifacts.addAll(result.getArtifacts());
        result.setArtifacts(allArtifacts);
    }

    return result;
}

From source file:org.jetbrains.idea.maven.server.embedder.CustomMaven32ArtifactResolver.java

License:Apache License

public ArtifactResolutionResult resolve(ArtifactResolutionRequest request) {
    Artifact rootArtifact = request.getArtifact();
    Set<Artifact> artifacts = request.getArtifactDependencies();
    Map<String, Artifact> managedVersions = request.getManagedVersionMap();
    List<ResolutionListener> listeners = request.getListeners();
    ArtifactFilter collectionFilter = request.getCollectionFilter();
    ArtifactFilter resolutionFilter = request.getResolutionFilter();
    RepositorySystemSession session = getSession(request.getLocalRepository());

    //TODO: hack because metadata isn't generated in m2e correctly and i want to run the maven i have in the workspace
    if (source == null) {
        try {/*from ww w . j  av a2  s  . c o  m*/
            source = container.lookup(ArtifactMetadataSource.class);
        } catch (ComponentLookupException e) {
            // won't happen
        }
    }

    if (listeners == null) {
        listeners = new ArrayList<ResolutionListener>();

        if (logger.isDebugEnabled()) {
            listeners.add(new DebugResolutionListener(logger));
        }

        listeners.add(new WarningResolutionListener(logger));
    }

    ArtifactResolutionResult result = new ArtifactResolutionResult();

    // The root artifact may, or may not be resolved so we need to check before we attempt to resolve.
    // This is often an artifact like a POM that is taken from disk and we already have hold of the
    // file reference. But this may be a Maven Plugin that we need to resolve from a remote repository
    // as well as its dependencies.

    if (request.isResolveRoot() /* && rootArtifact.getFile() == null */ ) {
        try {
            resolve(rootArtifact, request.getRemoteRepositories(), session);
        } catch (ArtifactResolutionException e) {
            result.addErrorArtifactException(e);
            return result;
        } catch (ArtifactNotFoundException e) {
            result.addMissingArtifact(request.getArtifact());
            return result;
        }
    }

    ArtifactResolutionRequest collectionRequest = request;

    if (request.isResolveTransitively()) {
        MetadataResolutionRequest metadataRequest = new DefaultMetadataResolutionRequest(request);

        metadataRequest.setArtifact(rootArtifact);
        metadataRequest.setResolveManagedVersions(managedVersions == null);

        try {
            ResolutionGroup resolutionGroup = source.retrieve(metadataRequest);

            if (managedVersions == null) {
                managedVersions = resolutionGroup.getManagedVersions();
            }

            Set<Artifact> directArtifacts = resolutionGroup.getArtifacts();

            if (artifacts == null || artifacts.isEmpty()) {
                artifacts = directArtifacts;
            } else {
                List<Artifact> allArtifacts = new ArrayList<Artifact>();
                allArtifacts.addAll(artifacts);
                allArtifacts.addAll(directArtifacts);

                Map<String, Artifact> mergedArtifacts = new LinkedHashMap<String, Artifact>();
                for (Artifact artifact : allArtifacts) {
                    String conflictId = artifact.getDependencyConflictId();
                    if (!mergedArtifacts.containsKey(conflictId)) {
                        mergedArtifacts.put(conflictId, artifact);
                    }
                }

                artifacts = new LinkedHashSet<Artifact>(mergedArtifacts.values());
            }

            collectionRequest = new ArtifactResolutionRequest(request);
            collectionRequest.setServers(request.getServers());
            collectionRequest.setMirrors(request.getMirrors());
            collectionRequest.setProxies(request.getProxies());
            collectionRequest.setRemoteRepositories(resolutionGroup.getResolutionRepositories());
        } catch (ArtifactMetadataRetrievalException e) {
            ArtifactResolutionException are = new ArtifactResolutionException(
                    "Unable to get dependency information for " + rootArtifact.getId() + ": " + e.getMessage(),
                    rootArtifact, metadataRequest.getRemoteRepositories(), e);
            result.addMetadataResolutionException(are);
            return result;
        }
    }

    if (artifacts == null || artifacts.isEmpty()) {
        if (request.isResolveRoot()) {
            result.addArtifact(rootArtifact);
        }
        return result;
    }

    // After the collection we will have the artifact object in the result but they will not be resolved yet.
    result = artifactCollector.collect(artifacts, rootArtifact, managedVersions, collectionRequest, source,
            collectionFilter, listeners, null);

    // We have metadata retrieval problems, or there are cycles that have been detected
    // so we give this back to the calling code and let them deal with this information
    // appropriately.

    if (result.hasMetadataResolutionExceptions() || result.hasVersionRangeViolations()
            || result.hasCircularDependencyExceptions()) {
        return result;
    }

    if (result.getArtifactResolutionNodes() != null) {
        ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

        CountDownLatch latch = new CountDownLatch(result.getArtifactResolutionNodes().size());

        for (ResolutionNode node : result.getArtifactResolutionNodes()) {
            Artifact artifact = node.getArtifact();

            if (resolutionFilter == null || resolutionFilter.include(artifact)) {
                executor.execute(new ResolveTask(classLoader, latch, artifact, session,
                        node.getRemoteRepositories(), result));
            } else {
                latch.countDown();
            }
        }
        try {
            latch.await();
        } catch (InterruptedException e) {
            result.addErrorArtifactException(
                    new ArtifactResolutionException("Resolution interrupted", rootArtifact, e));
        }
    }

    // We want to send the root artifact back in the result but we need to do this after the other dependencies
    // have been resolved.
    if (request.isResolveRoot()) {
        // Add the root artifact (as the first artifact to retain logical order of class path!)
        Set<Artifact> allArtifacts = new LinkedHashSet<Artifact>();
        allArtifacts.add(rootArtifact);
        allArtifacts.addAll(result.getArtifacts());
        result.setArtifacts(allArtifacts);
    }

    return result;
}

From source file:org.universAAL.maven.treebuilder.DependencyTreeBuilder.java

License:Apache License

/**
 * Method resolves provided node with the use of provided
 * ArtifactMetadataSource and taking into account ManagedVersionMap. Output
 * is passed to listeners, passed as argument, which are notified about all
 * events related to dependencies detected in the tree.
 * /*from   w ww .j  a va 2s  .  c om*/
 * @param parentNode
 *            Parent node
 * @param child
 *            Child node
 * @param filter
 *            Filter for filtering artifacts for the resolving process.
 * @param managedVersions
 *            Map of managed versions.
 * @param listener
 *            Listener to be notified about events related to resolution
 *            process.
 * @param source
 *            ArtifactMetadataSource object passed by maven.
 * @param parentArtifact
 *            Parent artifact
 * @return returns true if the child should be recursively resolved.
 * @throws OverConstrainedVersionException
 *             Occurs when ranges exclude each other and no valid value
 *             remains.
 * @throws ArtifactMetadataRetrievalException
 *             Error while retrieving repository metadata from the
 *             repository
 */
private boolean resolveChildNode(final ResolutionNode parentNode, final ResolutionNode child,
        final ArtifactFilter filter, final ManagedVersionMap managedVersions,
        final DependencyTreeResolutionListener listener, final ArtifactMetadataSource source,
        final Artifact parentArtifact)
        throws OverConstrainedVersionException, ArtifactMetadataRetrievalException {
    // We leave in optional ones, but don't pick up its dependencies
    if (!child.isResolved() && (!child.getArtifact().isOptional() || child.isChildOfRootNode())) {
        Artifact artifact = child.getArtifact();
        artifact.setDependencyTrail(parentNode.getDependencyTrail());

        List childRemoteRepositories = child.getRemoteRepositories();
        try {
            Object childKey;
            do {
                childKey = child.getKey();

                if (managedVersions.containsKey(childKey)) {
                    // If this child node is a managed dependency,
                    // ensure
                    // we are using the dependency management
                    // version
                    // of this child if applicable b/c we want to
                    // use the
                    // managed version's POM, *not* any other
                    // version's POM.
                    // We retrieve the POM below in the retrieval
                    // step.
                    manageArtifact(child, managedVersions);

                    // Also, we need to ensure that any exclusions
                    // it presents are
                    // added to the artifact before we retrieve the
                    // metadata
                    // for the artifact; otherwise we may end up
                    // with unwanted
                    // dependencies.
                    Artifact ma = (Artifact) managedVersions.get(childKey);
                    ArtifactFilter managedExclusionFilter = ma.getDependencyFilter();
                    if (null != managedExclusionFilter) {
                        if (null != artifact.getDependencyFilter()) {
                            AndArtifactFilter aaf = new AndArtifactFilter();
                            aaf.add(artifact.getDependencyFilter());
                            aaf.add(managedExclusionFilter);
                            artifact.setDependencyFilter(aaf);
                        } else {
                            artifact.setDependencyFilter(managedExclusionFilter);
                        }
                    }
                }

                if (artifact.getVersion() == null) {
                    // set the recommended version
                    // TODO: maybe its better to just pass the range
                    // through to retrieval and use a
                    // transformation?
                    ArtifactVersion version;
                    if (artifact.isSelectedVersionKnown()) {
                        version = artifact.getSelectedVersion();
                    } else {
                        // go find the version
                        List versions = artifact.getAvailableVersions();
                        if (versions == null) {
                            versions = source.retrieveAvailableVersions(artifact, localRepository,
                                    childRemoteRepositories);
                            artifact.setAvailableVersions(versions);
                        }

                        Collections.sort(versions);

                        VersionRange versionRange = artifact.getVersionRange();

                        version = versionRange.matchVersion(versions);

                        if (version == null) {
                            if (versions.isEmpty()) {
                                throw new OverConstrainedVersionException(
                                        "No versions are present in the repository for the artifact with a range "
                                                + versionRange,
                                        artifact, childRemoteRepositories);
                            }

                            throw new OverConstrainedVersionException("Couldn't find a version in " + versions
                                    + " to match range " + versionRange, artifact, childRemoteRepositories);
                        }
                    }

                    // this is dangerous because
                    // artifact.getSelectedVersion() can
                    // return null. However it is ok here because we
                    // first check if the
                    // selected version is known. As currently coded
                    // we can't get a null here.
                    artifact.selectVersion(version.toString());
                    fireEvent(ResolutionListener.SELECT_VERSION_FROM_RANGE, listener, child);
                }

                // rotgier: it is not compatible with maven 3
                // Artifact relocated = source.retrieveRelocatedArtifact(
                // artifact, localRepository, childRemoteRepositories);
                // if (relocated != null && !artifact.equals(relocated)) {
                // relocated.setDependencyFilter(artifact
                // .getDependencyFilter());
                // artifact = relocated;
                // child.setArtifact(artifact);
                // }
            } while (!childKey.equals(child.getKey()));

            if (parentArtifact != null && parentArtifact.getDependencyFilter() != null
                    && !parentArtifact.getDependencyFilter().include(artifact)) {
                // MNG-3769: the [probably relocated] artifact is
                // excluded.
                // We could process exclusions on relocated artifact
                // details in the
                // MavenMetadataSource.createArtifacts(..) step, BUT
                // that would
                // require resolving the POM from the repository
                // very early on in
                // the build.
                return true;
            }

            ResolutionGroup rGroup = source.retrieve(artifact, localRepository, childRemoteRepositories);

            // TODO might be better to have source.retrieve() throw
            // a specific exception for this situation
            // and catch here rather than have it return null
            if (rGroup == null) {
                // relocated dependency artifact is declared
                // excluded, no need to add and recurse further
                return true;
            }
            child.addDependencies(rGroup.getArtifacts(), rGroup.getResolutionRepositories(), filter);

        } catch (CyclicDependencyException e) {
            // would like to throw this, but we have crappy stuff in
            // the repo

            fireEvent(ResolutionListener.OMIT_FOR_CYCLE, listener,
                    new ResolutionNode(e.getArtifact(), childRemoteRepositories, child));
        } catch (ArtifactMetadataRetrievalException e) {
            artifact.setDependencyTrail(parentNode.getDependencyTrail());
            throw e;
        }
    } else {
        return true;
    }
    return false;
}

From source file:org.universAAL.maven.treebuilder.DependencyTreeBuilder.java

License:Apache License

/**
 * The heart of the tree builder. Recursively resolves provided artifact.
 * Output is passed to listeners, passed as argument, which are notified
 * about all dependencies detected in the tree. Resolving of each child node
 * is delegated to resolveChildNode method.
 * /*from w  ww. j a  v a 2  s.  c  o m*/
 * @param originatingArtifact
 *            Rootnode of recursed subtree.
 * @param node
 *            Current node which is resolved.
 * @param resolvedArtifacts
 *            Map which is used for remembering already resolved artifacts.
 *            Artifacts are indexed by a key which calculation algorithm is
 *            the same as the one present in calculateDepKey method. Thanks
 *            to this map, duplicates and conflicts are detected and
 *            resolved.
 * @param managedVersions
 *            Information about dependency management extracted from the
 *            subtree rootnode - a maven project.
 * @param localRepository
 *            Local maven repository.
 * @param remoteRepositories
 *            Remote repositories provided by maven.
 * @param source
 *            ArtifactMetadataSource provided by maven.
 * @param filter
 *            Filter used for unfiltering artifacts which should not be
 *            included in the dependency tree.
 * @param listener
 *            Listener used for providing the output of the resolve process.
 * @param transitive
 *            If this parameter is false than the children of current node
 *            are not resolved.
 * 
 * @throws CyclicDependencyException
 *             Exception thrown when cyclic dependency detected.
 * @throws ArtifactResolutionException
 *             Exception thrown when a problem with artifact resolution
 *             occurs.
 * @throws OverConstrainedVersionException
 *             Occurs when ranges exclude each other and no valid value
 *             remains.
 * @throws ArtifactMetadataRetrievalException
 *             Error while retrieving repository metadata from the
 *             repository.
 * @throws NoSuchFieldException
 *             Signals that the class doesn't have a field of a specified
 *             name.
 * @throws SecurityException
 *             Thrown by the security manager to indicate a security
 *             violation.
 * @throws IllegalAccessException
 *             When illegal access is performed in the curse of java
 *             reflection operations.
 * @throws IllegalArgumentException
 *             Thrown to indicate that an illegal or inappropriate argument
 *             has been passed.
 */
private void recurse(final Artifact originatingArtifact, final ResolutionNode node, final Map resolvedArtifacts,
        final ManagedVersionMap managedVersions, final ArtifactRepository localRepository,
        final List remoteRepositories, final ArtifactMetadataSource source, final ArtifactFilter filter,
        final DependencyTreeResolutionListener listener, final boolean transitive,
        final Set<String> separatedGroupIds) throws CyclicDependencyException, ArtifactResolutionException,
        OverConstrainedVersionException, ArtifactMetadataRetrievalException, SecurityException,
        NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
    try {

        fireEvent(ResolutionListener.TEST_ARTIFACT, listener, node);
        Object key = node.getKey();

        // TODO: Does this check need to happen here? Had to add the same
        // call
        // below when we iterate on child nodes -- will that suffice?
        if (managedVersions.containsKey(key)) {
            manageArtifact(node, managedVersions);
        }

        List previousNodes = (List) resolvedArtifacts.get(key);
        if (previousNodes != null) {
            for (Iterator i = previousNodes.iterator(); i.hasNext();) {
                ResolutionNode previous = (ResolutionNode) i.next();

                if (previous.isActive()) {
                    // Version mediation
                    VersionRange previousRange = previous.getArtifact().getVersionRange();
                    VersionRange currentRange = node.getArtifact().getVersionRange();

                    if (previousRange != null && currentRange != null) {
                        // TODO: shouldn't need to double up on this work,
                        // only
                        // done for simplicity of handling recommended
                        // version but the restriction is identical
                        VersionRange newRange = previousRange.restrict(currentRange);
                        // TODO: ick. this forces the OCE that should have
                        // come
                        // from the previous call. It is still correct
                        if (newRange.isSelectedVersionKnown(previous.getArtifact())) {
                            fireEvent(ResolutionListener.RESTRICT_RANGE, listener, node, previous, newRange);
                        }
                        previous.getArtifact().setVersionRange(newRange);
                        node.getArtifact().setVersionRange(currentRange.restrict(previousRange));

                        // Select an appropriate available version from the
                        // (now
                        // restricted) range
                        // Note this version was selected before to get the
                        // appropriate POM
                        // But it was reset by the call to setVersionRange
                        // on
                        // restricting the version
                        ResolutionNode[] resetNodes = { previous, node };
                        for (int j = 0; j < 2; j++) {
                            Artifact resetArtifact = resetNodes[j].getArtifact();

                            // MNG-2123: if the previous node was not a
                            // range,
                            // then it wouldn't have any available
                            // versions. We just clobbered the selected
                            // version
                            // above. (why? i have no idea.)
                            // So since we are here and this is ranges we
                            // must
                            // go figure out the version (for a third
                            // time...)
                            if (resetArtifact.getVersion() == null && resetArtifact.getVersionRange() != null) {

                                // go find the version. This is a total
                                // hack.
                                // See previous comment.
                                List versions = resetArtifact.getAvailableVersions();
                                if (versions == null) {
                                    try {
                                        versions = source.retrieveAvailableVersions(resetArtifact,
                                                localRepository, remoteRepositories);
                                        resetArtifact.setAvailableVersions(versions);
                                    } catch (ArtifactMetadataRetrievalException e) {
                                        resetArtifact.setDependencyTrail(node.getDependencyTrail());
                                        throw e;
                                    }
                                }
                                // end hack

                                // MNG-2861: match version can return null
                                ArtifactVersion selectedVersion = resetArtifact.getVersionRange()
                                        .matchVersion(resetArtifact.getAvailableVersions());
                                if (selectedVersion != null) {
                                    resetArtifact.selectVersion(selectedVersion.toString());
                                } else {
                                    throw new OverConstrainedVersionException(
                                            " Unable to find a version in "
                                                    + resetArtifact.getAvailableVersions()
                                                    + " to match the range " + resetArtifact.getVersionRange(),
                                            resetArtifact);
                                }
                                fireEvent(ResolutionListener.SELECT_VERSION_FROM_RANGE, listener,
                                        resetNodes[j]);
                            }
                        }
                    }

                    // Conflict Resolution
                    // TODO: use as conflict resolver(s), chain

                    // TODO: should this be part of mediation?
                    // previous one is more dominant
                    ResolutionNode nearest;
                    ResolutionNode farthest;
                    if (previous.getDepth() <= node.getDepth()) {
                        nearest = previous;
                        farthest = node;
                    } else {
                        nearest = node;
                        farthest = previous;
                    }

                    if (checkScopeUpdate(farthest, nearest)) {
                        // if we need to update scope of nearest to use
                        // farthest
                        // scope, use the nearest version, but farthest
                        // scope
                        nearest.disable();
                        farthest.getArtifact().setVersion(nearest.getArtifact().getVersion());
                        fireEvent(ResolutionListener.OMIT_FOR_NEARER, listener, nearest, farthest);
                    } else {
                        farthest.disable();
                        fireEvent(ResolutionListener.OMIT_FOR_NEARER, listener, farthest, nearest);
                    }
                }
            }
        } else {
            previousNodes = new ArrayList();
            resolvedArtifacts.put(key, previousNodes);
        }
        previousNodes.add(node);

        if (node.isActive()) {
            fireEvent(ResolutionListener.INCLUDE_ARTIFACT, listener, node);
        }

        // don't pull in the transitive deps of a system-scoped dependency.
        if (node.isActive() && !Artifact.SCOPE_SYSTEM.equals(node.getArtifact().getScope())) {
            fireEvent(ResolutionListener.PROCESS_CHILDREN, listener, node);
            if (transitive) {
                Artifact parentArtifact = node.getArtifact();
                for (Iterator i = node.getChildrenIterator(); i.hasNext();) {
                    ResolutionNode child = (ResolutionNode) i.next();
                    if (!filter.include(child.getArtifact())) {
                        continue;
                    }
                    /*
                     * rotgier: In case of regular dependencies provided
                     * scope is simply ignored (artifact versions specified
                     * there conflict with the ones of runtime deps)
                     */
                    if (Artifact.SCOPE_PROVIDED.equals(child.getArtifact().getScope())) {
                        continue;
                    }
                    changeArtifactCoreToOsgi(node, child, separatedGroupIds, listener);
                    boolean isContinue = resolveChildNode(node, child, filter, managedVersions, listener,
                            source, parentArtifact);
                    if (isContinue) {
                        continue;
                    }
                    List<String> extractedSeparatedGroupIds = extractSeparatedGroupIds(child.getArtifact(),
                            remoteRepositories);
                    Set<String> combinedSeparatedGroupIds = new HashSet<String>(separatedGroupIds);
                    combinedSeparatedGroupIds.addAll(extractedSeparatedGroupIds);
                    recurse(originatingArtifact, child, resolvedArtifacts, managedVersions, localRepository,
                            child.getRemoteRepositories(), source, filter, listener, true,
                            combinedSeparatedGroupIds);
                }
                List runtimeDeps = getRuntimeDeps(node.getArtifact(), managedVersions, remoteRepositories);
                Field childrenField = node.getClass().getDeclaredField("children");
                childrenField.setAccessible(true);
                List nodesChildren = (List) childrenField.get(node);
                /* nodesChildren can be empty when dealing with parent POMs */
                if (nodesChildren == Collections.EMPTY_LIST) {
                    nodesChildren = new ArrayList();
                    childrenField.set(node, nodesChildren);
                }
                for (Object runtimeDepObj : runtimeDeps) {
                    DependencyNode runtimeDep = (DependencyNode) runtimeDepObj;
                    Artifact artifact = runtimeDep.getArtifact();
                    ResolutionNode childRuntime = new ResolutionNode(artifact, node.getRemoteRepositories(),
                            node);
                    /*
                     * rotgier: In case of runtime dependencies provided
                     * scope should be allowed
                     */
                    if (!filter.include(childRuntime.getArtifact())) {

                        if (!Artifact.SCOPE_PROVIDED.equals(artifact.getScope())) {
                            continue;
                        }
                    }
                    changeArtifactCoreToOsgi(node, childRuntime, separatedGroupIds, listener);
                    boolean isContinue = resolveChildNode(node, childRuntime, filter, managedVersions, listener,
                            source, parentArtifact);
                    if (isContinue) {
                        continue;
                    }
                    List<String> extractedSeparatedGroupIds = extractSeparatedGroupIds(
                            childRuntime.getArtifact(), remoteRepositories);
                    Set<String> combinedSeparatedGroupIds = new HashSet<String>(separatedGroupIds);
                    combinedSeparatedGroupIds.addAll(extractedSeparatedGroupIds);
                    recurse(originatingArtifact, childRuntime, resolvedArtifacts, managedVersions,
                            localRepository, childRuntime.getRemoteRepositories(), source, filter, listener,
                            true, combinedSeparatedGroupIds);
                    nodesChildren.add(childRuntime);
                }
            }
            fireEvent(ResolutionListener.FINISH_PROCESSING_CHILDREN, listener, node);
        }
    } catch (Exception ex) {
        StringBuilder msg = new StringBuilder();
        msg.append(String.format("\nUnpredicted exception during dependency tree recursion at node %s",
                FilteringVisitorSupport.stringify(node.getArtifact())));
        msg.append("\nNode's parent tree:\n");
        msg.append(printNodeParentsTree(node));
        throw new IllegalStateException(msg.toString(), ex);
    }
}

From source file:org.universAAL.maven.treebuilder.DependencyTreeResolutionListener.java

License:Apache License

/**
 * Creates a new dependency node for the specified artifact and appends it
 * to the current parent dependency node.
 * /*from  w  ww . j  av a 2 s.  co m*/
 * @param resolutionNode
 *            the attached artifact for the new dependency node
 * @return the new dependency node
 */
private DependencyNode createNode(final ResolutionNode resolutionNode) {
    DependencyNode node = new MyDependencyNode(resolutionNode.getArtifact(),
            resolutionNode.getRemoteRepositories());

    if (!parentNodes.isEmpty()) {
        DependencyNode parent = (DependencyNode) parentNodes.peek();

        parent.addChild(node);
    }

    return node;
}