Example usage for org.apache.commons.lang.time StopWatch start

List of usage examples for org.apache.commons.lang.time StopWatch start

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch start.

Prototype

public void start() 

Source Link

Document

Start the stopwatch.

This method starts a new timing session, clearing any previous values.

Usage

From source file:org.apache.archiva.metadata.repository.jcr.JcrRepositorySessionFactory.java

@PostConstruct
public void initialize() throws Exception {

    // skip initialisation if not cassandra
    if (!StringUtils.equals(repositorySessionFactoryBean.getId(), "jcr")) {
        return;/*from  ww  w. j  ava 2s  .c  o m*/
    }

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    metadataFacetFactories = applicationContext.getBeansOfType(MetadataFacetFactory.class);
    // olamy with spring the "id" is now "metadataFacetFactory#hint"
    // whereas was only hint with plexus so let remove  metadataFacetFactory#
    Map<String, MetadataFacetFactory> cleanedMetadataFacetFactories = new HashMap<>(
            metadataFacetFactories.size());

    for (Map.Entry<String, MetadataFacetFactory> entry : metadataFacetFactories.entrySet()) {
        cleanedMetadataFacetFactories.put(StringUtils.substringAfterLast(entry.getKey(), "#"),
                entry.getValue());
    }

    metadataFacetFactories = cleanedMetadataFacetFactories;

    JcrMetadataRepository metadataRepository = null;
    try {
        metadataRepository = new JcrMetadataRepository(metadataFacetFactories, repository);
        JcrMetadataRepository.initialize(metadataRepository.getJcrSession());
    } catch (RepositoryException e) {
        throw new RuntimeException(e.getMessage(), e);
    } finally {
        if (metadataRepository != null) {
            metadataRepository.close();
        }
    }

    stopWatch.stop();
    logger.info("time to initialize JcrRepositorySessionFactory: {}", stopWatch.getTime());
}

From source file:org.apache.archiva.metadata.repository.jcr.RepositoryFactory.java

public Repository createRepository() throws IOException, InvalidFileStoreVersionException {
    createExecutor();// w  w  w  .  java2s  .c o m

    if (SEGMENT_FILE_TYPE == storeType) {
        fileStore = FileStoreBuilder.fileStoreBuilder(repositoryPath.toFile()).build();
        nodeStore = SegmentNodeStoreBuilders.builder(fileStore) //
                .withStatisticsProvider(StatisticsProvider.NOOP) //
                .build();
    } else if (IN_MEMORY_TYPE == storeType) {
        nodeStore = null;
    } else {
        throw new IllegalArgumentException("Store type " + storeType + " not recognized");
    }

    Oak oak = nodeStore == null ? new Oak() : new Oak(nodeStore);
    oak.with(new RepositoryInitializer() {
        @Override
        public void initialize(@Nonnull NodeBuilder root) {
            log.info("Creating index ");

            NodeBuilder lucene = IndexUtils.getOrCreateOakIndex(root).child("lucene");
            lucene.setProperty(JcrConstants.JCR_PRIMARYTYPE, "oak:QueryIndexDefinition", Type.NAME);

            lucene.setProperty("compatVersion", 2);
            lucene.setProperty("type", "lucene");
            // lucene.setProperty("async", "async");
            lucene.setProperty(INCLUDE_PROPERTY_TYPES, ImmutableSet.of("String"), Type.STRINGS);
            // lucene.setProperty("refresh",true);
            lucene.setProperty("async", ImmutableSet.of("async", "sync"), Type.STRINGS);
            NodeBuilder rules = lucene.child("indexRules").setProperty(JcrConstants.JCR_PRIMARYTYPE,
                    JcrConstants.NT_UNSTRUCTURED, Type.NAME);
            rules.setProperty(":childOrder", ImmutableSet.of("archiva:projectVersion", //
                    "archiva:artifact", //
                    "archiva:facet", //
                    "archiva:namespace", //
                    "archiva:project"), //
                    Type.STRINGS);
            NodeBuilder allProps = rules.child("archiva:projectVersion") //
                    .child("properties") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME) //
                    .setProperty(":childOrder", ImmutableSet.of("allProps"), Type.STRINGS) //
                    .setProperty("indexNodeName", true) //
                    .child("allProps") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
            allProps.setProperty("name", ".*");
            allProps.setProperty("isRegexp", true);
            allProps.setProperty("nodeScopeIndex", true);
            allProps.setProperty("index", true);
            allProps.setProperty("analyzed", true);
            // allProps.setProperty("propertyIndex",true);
            allProps = rules.child("archiva:artifact") //
                    .child("properties") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME) //
                    .setProperty(":childOrder", ImmutableSet.of("allProps"), Type.STRINGS) //
                    .setProperty("indexNodeName", true).child("allProps") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
            allProps.setProperty("name", ".*");
            allProps.setProperty("isRegexp", true);
            allProps.setProperty("nodeScopeIndex", true);
            allProps.setProperty("index", true);
            allProps.setProperty("analyzed", true);
            allProps = rules.child("archiva:facet") //
                    .child("properties") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME) //
                    .setProperty(":childOrder", ImmutableSet.of("allProps"), Type.STRINGS) //
                    .setProperty("indexNodeName", true) //
                    .child("allProps") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
            allProps.setProperty("name", ".*");
            allProps.setProperty("isRegexp", true);
            allProps.setProperty("nodeScopeIndex", true);
            allProps.setProperty("index", true);
            allProps.setProperty("analyzed", true);
            allProps = rules.child("archiva:namespace") //
                    .child("properties") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME) //
                    .setProperty(":childOrder", ImmutableSet.of("allProps"), Type.STRINGS) //
                    .setProperty("indexNodeName", true) //
                    .child("allProps") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
            allProps.setProperty("name", ".*");
            allProps.setProperty("isRegexp", true);
            allProps.setProperty("nodeScopeIndex", true);
            allProps.setProperty("index", true);
            allProps.setProperty("analyzed", true);
            allProps = rules.child("archiva:project") //
                    .child("properties") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME) //
                    .setProperty(":childOrder", ImmutableSet.of("allProps"), Type.STRINGS) //
                    .setProperty("indexNodeName", true) //
                    .child("allProps") //
                    .setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
            allProps.setProperty("name", ".*");
            allProps.setProperty("isRegexp", true);
            allProps.setProperty("nodeScopeIndex", true);
            allProps.setProperty("index", true);
            allProps.setProperty("analyzed", true);

            log.info("Index: {} myIndex {}", lucene, lucene.getChildNode("myIndex"));
            log.info("myIndex {}", lucene.getChildNode("myIndex").getProperties());
            // IndexUtils.createIndexDefinition(  )

        }
    });

    StatisticsProvider statsProvider = StatisticsProvider.NOOP;
    int queueSize = Integer.getInteger("queueSize", 10000);
    Path indexDir = Files.createTempDirectory("archiva_index");
    log.info("Queue Index {}", indexDir.toString());
    IndexCopier indexCopier = new IndexCopier(executorService, indexDir.toFile(), true);
    NRTIndexFactory nrtIndexFactory = new NRTIndexFactory(indexCopier, statsProvider);
    MountInfoProvider mountInfoProvider = Mounts.defaultMountInfoProvider();
    IndexTracker tracker = new IndexTracker(new DefaultIndexReaderFactory(mountInfoProvider, indexCopier),
            nrtIndexFactory);
    DocumentQueue queue = new DocumentQueue(queueSize, tracker, executorService, statsProvider);
    LocalIndexObserver localIndexObserver = new LocalIndexObserver(queue, statsProvider);
    LuceneIndexProvider provider = new LuceneIndexProvider(tracker);

    //        ExternalObserverBuilder builder = new ExternalObserverBuilder(queue, tracker, statsProvider,
    //            executorService, queueSize);
    //        Observer observer = builder.build();
    //        builder.getBackgroundObserver();

    LuceneIndexEditorProvider editorProvider = //
            new LuceneIndexEditorProvider(null, tracker, //
                    new ExtractedTextCache(0, 0), //
                    null, mountInfoProvider);
    editorProvider.setIndexingQueue(queue);

    log.info("Oak: {} with nodeStore {}", oak, nodeStore);
    Jcr jcr = new Jcr(oak).with(editorProvider) //
            .with((Observer) provider) //
            .with(localIndexObserver)
            // .with(observer)
            .with((QueryIndexProvider) provider); //
    //.withAsyncIndexing( "async", 5 );
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    Repository r = jcr.createRepository();
    stopWatch.stop();
    log.info("time to create jcr repository: {} ms", stopWatch.getTime());
    //        try
    //        {
    //            Thread.currentThread().sleep( 1000 );
    //        }
    //        catch ( InterruptedException e )
    //        {
    //            log.error( e.getMessage(), e );
    //        }
    return r;

}

From source file:org.apache.archiva.metadata.repository.stats.DefaultRepositoryStatisticsManager.java

@Override
public RepositoryStatistics getLastStatistics(MetadataRepository metadataRepository, String repositoryId)
        throws MetadataRepositoryException {
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    // TODO: consider a more efficient implementation that directly gets the last one from the content repository
    List<String> scans = metadataRepository.getMetadataFacets(repositoryId, RepositoryStatistics.FACET_ID);
    if (scans == null) {
        return null;
    }//w w w. ja  va2 s .  co m
    Collections.sort(scans);
    if (!scans.isEmpty()) {
        String name = scans.get(scans.size() - 1);
        RepositoryStatistics repositoryStatistics = RepositoryStatistics.class
                .cast(metadataRepository.getMetadataFacet(repositoryId, RepositoryStatistics.FACET_ID, name));
        stopWatch.stop();
        log.debug("time to find last RepositoryStatistics: {} ms", stopWatch.getTime());
        return repositoryStatistics;
    } else {
        return null;
    }
}

From source file:org.apache.archiva.redback.rest.services.utils.EnvironmentChecker.java

@Inject
public EnvironmentChecker(ApplicationContext applicationContext) {
    Collection<EnvironmentCheck> checkers = applicationContext.getBeansOfType(EnvironmentCheck.class).values();

    StopWatch stopWatch = new StopWatch();
    stopWatch.reset();/*from  w  w w.j av a 2 s .c o  m*/
    stopWatch.start();

    if (checkers != null) {
        List<String> violations = new ArrayList<String>();

        for (EnvironmentCheck check : checkers) {
            check.validateEnvironment(violations);
        }

        if (!violations.isEmpty()) {
            StringBuilder msg = new StringBuilder();
            msg.append("EnvironmentCheck Failure.\n");
            msg.append("======================================================================\n");
            msg.append(" ENVIRONMENT FAILURE !! \n");
            msg.append("\n");

            for (String v : violations) {
                msg.append(v).append("\n");
            }

            msg.append("\n");
            msg.append("======================================================================");
            log.error(msg.toString());
        }
    }

    stopWatch.stop();
    log.info("time to execute all EnvironmentCheck: {} ms", stopWatch.getTime());
}

From source file:org.apache.archiva.redback.role.DefaultRoleManager.java

@PostConstruct
public void initialize() {

    knownResources = new HashMap<String, ModelApplication>();
    this.unblessedModel = new RedbackRoleModel();
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    try {//  w w w  . jav  a2 s . c  o  m
        URL baseResource = RoleManager.class.getResource("/META-INF/redback/redback-core.xml");

        if (baseResource == null) {
            throw new RuntimeException("unable to initialize role manager, missing redback-core.xml");
        }

        loadRoleModel(baseResource);

        Enumeration<URL> enumerator = RoleManager.class.getClassLoader()
                .getResources("META-INF/redback/redback.xml");

        while (enumerator.hasMoreElements()) {
            URL redbackResource = enumerator.nextElement();

            loadRoleModel(redbackResource);
        }
    } catch (RoleManagerException e) {
        throw new RuntimeException("unable to initialize RoleManager", e);
    } catch (IOException e) {
        throw new RuntimeException("unable to initialize RoleManager, problem with redback.xml loading", e);
    }

    stopWatch.stop();
    log.info("DefaultRoleManager initialize time {}", stopWatch.getTime());
}

From source file:org.apache.archiva.redback.role.processor.DefaultRoleModelProcessor.java

@SuppressWarnings("unchecked")
private void processRoles(RedbackRoleModel model) throws RoleManagerException {
    StopWatch stopWatch = new StopWatch();
    stopWatch.reset();/* w  w  w .  j  av  a 2 s  . c om*/
    stopWatch.start();
    List<String> sortedGraph;
    try {
        sortedGraph = RoleModelUtils.reverseTopologicalSortedRoleList(model);
    } catch (CycleDetectedException e) {
        throw new RoleManagerException("cycle detected: this should have been caught in validation", e);
    }

    List<Role> allRoles;
    try {
        allRoles = rbacManager.getAllRoles();
    } catch (RbacManagerException e) {
        throw new RoleManagerException(e.getMessage(), e);
    }

    Set<String> allRoleNames = new HashSet<String>(allRoles.size());
    for (Role role : allRoles) {
        allRoleNames.add(role.getName());
    }

    for (String roleId : sortedGraph) {
        ModelRole roleProfile = RoleModelUtils.getModelRole(model, roleId);

        List<Permission> permissions = processPermissions(roleProfile.getPermissions());

        boolean roleExists = allRoleNames.contains(roleProfile.getName());// false;

        /*try
        {
        roleExists = rbacManager.roleExists( roleProfile.getName() );
        }
        catch ( RbacManagerException e )
        {
        throw new RoleManagerException( e.getMessage(), e );
        }*/

        if (!roleExists) {
            try {
                Role role = rbacManager.createRole(roleProfile.getName());
                role.setDescription(roleProfile.getDescription());
                role.setPermanent(roleProfile.isPermanent());
                role.setAssignable(roleProfile.isAssignable());

                // add any permissions associated with this role
                for (Permission permission : permissions) {
                    role.addPermission(permission);
                }

                // add child roles to this role
                if (roleProfile.getChildRoles() != null) {
                    for (String childRoleId : roleProfile.getChildRoles()) {
                        ModelRole childRoleProfile = RoleModelUtils.getModelRole(model, childRoleId);
                        role.addChildRoleName(childRoleProfile.getName());
                    }
                }

                rbacManager.saveRole(role);
                allRoleNames.add(role.getName());

                // add link from parent roles to this new role
                if (roleProfile.getParentRoles() != null) {
                    for (String parentRoleId : roleProfile.getParentRoles()) {
                        ModelRole parentModelRole = RoleModelUtils.getModelRole(model, parentRoleId);
                        Role parentRole = rbacManager.getRole(parentModelRole.getName());
                        parentRole.addChildRoleName(role.getName());
                        rbacManager.saveRole(parentRole);
                        allRoleNames.add(parentRole.getName());
                    }
                }

            } catch (RbacManagerException e) {
                throw new RoleManagerException("error creating role '" + roleProfile.getName() + "'", e);
            }
        } else {
            try {
                Role role = rbacManager.getRole(roleProfile.getName());

                boolean changed = false;
                for (Permission permission : permissions) {
                    if (!role.getPermissions().contains(permission)) {
                        log.info("Adding new permission '{}' to role '{}'", permission.getName(),
                                role.getName());
                        role.addPermission(permission);
                        changed = true;
                    }
                }

                // Copy list to avoid concurrent modification [REDBACK-220]
                List<Permission> oldPermissions = new ArrayList<Permission>(role.getPermissions());
                for (Permission permission : oldPermissions) {
                    if (!permissions.contains(permission)) {
                        log.info("Removing old permission '{}' from role '{}'", permission.getName(),
                                role.getName());
                        role.removePermission(permission);
                        changed = true;
                    }
                }
                if (changed) {
                    rbacManager.saveRole(role);
                    allRoleNames.add(role.getName());
                }
            } catch (RbacManagerException e) {
                throw new RoleManagerException("error updating role '" + roleProfile.getName() + "'", e);
            }
        }
    }
    stopWatch.stop();
    log.info("time to process roles model: {} ms", stopWatch.getTime());
}

From source file:org.apache.archiva.scheduler.indexing.DownloadRemoteIndexTask.java

@Override
public void run() {

    // so short lock : not sure we need it
    synchronized (this.runningRemoteDownloadIds) {
        if (this.runningRemoteDownloadIds.contains(this.remoteRepository.getId())) {
            // skip it as it's running
            log.info("skip download index remote for repo {} it's already running",
                    this.remoteRepository.getId());
            return;
        }/*from  w w  w. j a  v  a 2 s .  co  m*/
        this.runningRemoteDownloadIds.add(this.remoteRepository.getId());
    }
    File tempIndexDirectory = null;
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    try {
        log.info("start download remote index for remote repository {}", this.remoteRepository.getId());
        IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext(this.remoteRepository);

        // create a temp directory to download files
        tempIndexDirectory = new File(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex");
        File indexCacheDirectory = new File(indexingContext.getIndexDirectoryFile().getParent(), ".indexCache");
        indexCacheDirectory.mkdirs();
        if (tempIndexDirectory.exists()) {
            FileUtils.deleteDirectory(tempIndexDirectory);
        }
        tempIndexDirectory.mkdirs();
        tempIndexDirectory.deleteOnExit();
        String baseIndexUrl = indexingContext.getIndexUpdateUrl();

        String wagonProtocol = new URL(this.remoteRepository.getUrl()).getProtocol();

        final StreamWagon wagon = (StreamWagon) wagonFactory
                .getWagon(new WagonFactoryRequest(wagonProtocol, this.remoteRepository.getExtraHeaders())
                        .networkProxy(this.networkProxy));
        // FIXME olamy having 2 config values
        wagon.setReadTimeout(remoteRepository.getRemoteDownloadTimeout() * 1000);
        wagon.setTimeout(remoteRepository.getTimeout() * 1000);

        if (wagon instanceof AbstractHttpClientWagon) {
            HttpConfiguration httpConfiguration = new HttpConfiguration();
            HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration();
            httpMethodConfiguration.setUsePreemptive(true);
            httpMethodConfiguration.setReadTimeout(remoteRepository.getRemoteDownloadTimeout() * 1000);
            httpConfiguration.setGet(httpMethodConfiguration);
            AbstractHttpClientWagon.class.cast(wagon).setHttpConfiguration(httpConfiguration);
        }

        wagon.addTransferListener(new DownloadListener());
        ProxyInfo proxyInfo = null;
        if (this.networkProxy != null) {
            proxyInfo = new ProxyInfo();
            proxyInfo.setType(this.networkProxy.getProtocol());
            proxyInfo.setHost(this.networkProxy.getHost());
            proxyInfo.setPort(this.networkProxy.getPort());
            proxyInfo.setUserName(this.networkProxy.getUsername());
            proxyInfo.setPassword(this.networkProxy.getPassword());
        }
        AuthenticationInfo authenticationInfo = null;
        if (this.remoteRepository.getUserName() != null) {
            authenticationInfo = new AuthenticationInfo();
            authenticationInfo.setUserName(this.remoteRepository.getUserName());
            authenticationInfo.setPassword(this.remoteRepository.getPassword());
        }
        wagon.connect(new Repository(this.remoteRepository.getId(), baseIndexUrl), authenticationInfo,
                proxyInfo);

        File indexDirectory = indexingContext.getIndexDirectoryFile();
        if (!indexDirectory.exists()) {
            indexDirectory.mkdirs();
        }

        ResourceFetcher resourceFetcher = new WagonResourceFetcher(log, tempIndexDirectory, wagon,
                remoteRepository);
        IndexUpdateRequest request = new IndexUpdateRequest(indexingContext, resourceFetcher);
        request.setForceFullUpdate(this.fullDownload);
        request.setLocalIndexCacheDir(indexCacheDirectory);

        this.indexUpdater.fetchAndUpdateIndex(request);
        stopWatch.stop();
        log.info("time update index from remote for repository {}: {} s", this.remoteRepository.getId(),
                (stopWatch.getTime() / 1000));

        // index packing optionnal ??
        //IndexPackingRequest indexPackingRequest =
        //    new IndexPackingRequest( indexingContext, indexingContext.getIndexDirectoryFile() );
        //indexPacker.packIndex( indexPackingRequest );
        indexingContext.updateTimestamp(true);

    } catch (MalformedURLException e) {
        log.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } catch (WagonFactoryException e) {
        log.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } catch (ConnectionException e) {
        log.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } catch (AuthenticationException e) {
        log.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } catch (IOException e) {
        log.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } catch (RepositoryAdminException e) {
        log.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } finally {
        deleteDirectoryQuiet(tempIndexDirectory);
        this.runningRemoteDownloadIds.remove(this.remoteRepository.getId());
    }
    log.info("end download remote index for remote repository {}", this.remoteRepository.getId());
}

From source file:org.apache.archiva.scheduler.indexing.maven.DownloadRemoteIndexTask.java

@Override
public void run() {

    // so short lock : not sure we need it
    synchronized (this.runningRemoteDownloadIds) {
        if (this.runningRemoteDownloadIds.contains(this.remoteRepository.getId())) {
            // skip it as it's running
            log.info("skip download index remote for repo {} it's already running",
                    this.remoteRepository.getId());
            return;
        }//from w w w  .  ja  v a  2 s.  c o m
        this.runningRemoteDownloadIds.add(this.remoteRepository.getId());
    }
    Path tempIndexDirectory = null;
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    try {
        log.info("start download remote index for remote repository {}", this.remoteRepository.getId());
        if (this.remoteRepository.getIndexingContext() == null) {
            throw new IndexNotFoundException("No index context set for repository " + remoteRepository.getId());
        }
        if (this.remoteRepository.getType() != RepositoryType.MAVEN) {
            throw new RepositoryException("Bad repository type");
        }
        if (!this.remoteRepository.supportsFeature(RemoteIndexFeature.class)) {
            throw new RepositoryException(
                    "Repository does not support RemotIndexFeature " + remoteRepository.getId());
        }
        RemoteIndexFeature rif = this.remoteRepository.getFeature(RemoteIndexFeature.class).get();
        IndexingContext indexingContext = this.remoteRepository.getIndexingContext()
                .getBaseContext(IndexingContext.class);
        // create a temp directory to download files
        tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex");
        Path indexCacheDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(),
                ".indexCache");
        Files.createDirectories(indexCacheDirectory);
        if (Files.exists(tempIndexDirectory)) {
            org.apache.archiva.common.utils.FileUtils.deleteDirectory(tempIndexDirectory);
        }
        Files.createDirectories(tempIndexDirectory);
        tempIndexDirectory.toFile().deleteOnExit();
        String baseIndexUrl = indexingContext.getIndexUpdateUrl();

        String wagonProtocol = this.remoteRepository.getLocation().getScheme();

        final StreamWagon wagon = (StreamWagon) wagonFactory
                .getWagon(new WagonFactoryRequest(wagonProtocol, this.remoteRepository.getExtraHeaders())
                        .networkProxy(this.networkProxy));
        // FIXME olamy having 2 config values
        wagon.setReadTimeout((int) rif.getDownloadTimeout().toMillis());
        wagon.setTimeout((int) remoteRepository.getTimeout().toMillis());

        if (wagon instanceof AbstractHttpClientWagon) {
            HttpConfiguration httpConfiguration = new HttpConfiguration();
            HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration();
            httpMethodConfiguration.setUsePreemptive(true);
            httpMethodConfiguration.setReadTimeout((int) rif.getDownloadTimeout().toMillis());
            httpConfiguration.setGet(httpMethodConfiguration);
            AbstractHttpClientWagon.class.cast(wagon).setHttpConfiguration(httpConfiguration);
        }

        wagon.addTransferListener(new DownloadListener());
        ProxyInfo proxyInfo = null;
        if (this.networkProxy != null) {
            proxyInfo = new ProxyInfo();
            proxyInfo.setType(this.networkProxy.getProtocol());
            proxyInfo.setHost(this.networkProxy.getHost());
            proxyInfo.setPort(this.networkProxy.getPort());
            proxyInfo.setUserName(this.networkProxy.getUsername());
            proxyInfo.setPassword(this.networkProxy.getPassword());
        }
        AuthenticationInfo authenticationInfo = null;
        if (this.remoteRepository.getLoginCredentials() != null
                && this.remoteRepository.getLoginCredentials() instanceof PasswordCredentials) {
            PasswordCredentials creds = (PasswordCredentials) this.remoteRepository.getLoginCredentials();
            authenticationInfo = new AuthenticationInfo();
            authenticationInfo.setUserName(creds.getUsername());
            authenticationInfo.setPassword(new String(creds.getPassword()));
        }
        log.debug("Connection to {}, authInfo={}", this.remoteRepository.getId(), authenticationInfo);
        wagon.connect(new Repository(this.remoteRepository.getId(), baseIndexUrl), authenticationInfo,
                proxyInfo);

        Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath();
        if (!Files.exists(indexDirectory)) {
            Files.createDirectories(indexDirectory);
        }
        log.debug("Downloading index file to {}", indexDirectory);
        log.debug("Index cache dir {}", indexCacheDirectory);

        ResourceFetcher resourceFetcher = new WagonResourceFetcher(log, tempIndexDirectory, wagon,
                remoteRepository);
        IndexUpdateRequest request = new IndexUpdateRequest(indexingContext, resourceFetcher);
        request.setForceFullUpdate(this.fullDownload);
        request.setLocalIndexCacheDir(indexCacheDirectory.toFile());

        IndexUpdateResult result = this.indexUpdater.fetchAndUpdateIndex(request);
        log.debug("Update result success: {}", result.isSuccessful());
        stopWatch.stop();
        log.info("time update index from remote for repository {}: {}ms", this.remoteRepository.getId(),
                (stopWatch.getTime()));

        // index packing optionnal ??
        //IndexPackingRequest indexPackingRequest =
        //    new IndexPackingRequest( indexingContext, indexingContext.getIndexDirectoryFile() );
        //indexPacker.packIndex( indexPackingRequest );
        indexingContext.updateTimestamp(true);

    } catch (Exception e) {
        log.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } finally {
        deleteDirectoryQuiet(tempIndexDirectory);
        this.runningRemoteDownloadIds.remove(this.remoteRepository.getId());
    }
    log.info("end download remote index for remote repository {}", this.remoteRepository.getId());
}

From source file:org.apache.archiva.scheduler.repository.DefaultRepositoryArchivaTaskScheduler.java

@PostConstruct
public void startup() throws ArchivaException {

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    archivaConfiguration.addListener(this);

    List<ManagedRepositoryConfiguration> repositories = archivaConfiguration.getConfiguration()
            .getManagedRepositories();/* w w w  . j  av  a2 s.c om*/

    RepositorySession repositorySession = repositorySessionFactory.createSession();
    try {
        MetadataRepository metadataRepository = repositorySession.getRepository();
        for (ManagedRepositoryConfiguration repoConfig : repositories) {
            if (repoConfig.isScanned()) {
                try {
                    scheduleRepositoryJobs(repoConfig);
                } catch (SchedulerException e) {
                    throw new ArchivaException("Unable to start scheduler: " + e.getMessage(), e);
                }

                try {
                    if (!isPreviouslyScanned(repoConfig, metadataRepository)) {
                        queueInitialRepoScan(repoConfig);
                    }
                } catch (MetadataRepositoryException e) {
                    log.warn(
                            "Unable to determine if a repository is already scanned, skipping initial scan: {}",
                            e.getMessage(), e);
                }
            }
        }
    } finally {
        repositorySession.close();
    }

    stopWatch.stop();
    log.info("Time to initalize DefaultRepositoryArchivaTaskScheduler: {} ms", stopWatch.getTime());
}

From source file:org.apache.archiva.web.startup.SecuritySynchronization.java

private void executeEnvironmentChecks() throws ArchivaException {
    if ((checkers == null) || CollectionUtils.isEmpty(checkers.values())) {
        throw new ArchivaException("Unable to initialize the Redback Security Environment, "
                + "no Environment Check components found.");
    }//www. j av  a2 s. c  om

    StopWatch stopWatch = new StopWatch();
    stopWatch.reset();
    stopWatch.start();

    List<String> violations = new ArrayList<>();

    for (Entry<String, EnvironmentCheck> entry : checkers.entrySet()) {
        EnvironmentCheck check = entry.getValue();
        List<String> v = new ArrayList<>();
        check.validateEnvironment(v);
        log.info("Environment Check: {} -> {} violation(s)", entry.getKey(), v.size());
        for (String s : v) {
            violations.add("[" + entry.getKey() + "] " + s);
        }
    }

    if (CollectionUtils.isNotEmpty(violations)) {
        StringBuilder msg = new StringBuilder();
        msg.append("EnvironmentCheck Failure.\n");
        msg.append("======================================================================\n");
        msg.append(" ENVIRONMENT FAILURE !! \n");
        msg.append("\n");

        for (String violation : violations) {
            msg.append(violation).append("\n");
        }

        msg.append("\n");
        msg.append("======================================================================");
        log.error(msg.toString());

        throw new ArchivaException("Unable to initialize Redback Security Environment, [" + violations.size()
                + "] violation(s) encountered, See log for details.");
    }

    stopWatch.stop();
    log.info("time to execute all EnvironmentCheck: {} ms", stopWatch.getTime());
}