Example usage for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY

List of usage examples for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY

Introduction

In this page you can find the example usage for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY.

Prototype

IOFileFilter DIRECTORY

To view the source code for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY.

Click Source Link

Document

Singleton instance of directory filter.

Usage

From source file:org.apache.samza.monitor.LocalStoreMonitor.java

/**
 * This monitor method is invoked periodically to delete the stale state stores
 * of dead jobs/tasks.//from   ww  w  .j  a  v  a  2  s.  c  om
 * @throws Exception if there was any problem in running the monitor.
 */
@Override
public void monitor() throws Exception {
    File localStoreDir = new File(config.getLocalStoreBaseDir());
    Preconditions.checkState(localStoreDir.isDirectory(),
            String.format("LocalStoreDir: %s is not a directory", localStoreDir.getAbsolutePath()));
    String localHostName = InetAddress.getLocalHost().getHostName();
    for (JobInstance jobInstance : getHostAffinityEnabledJobs(localStoreDir)) {
        File jobDir = new File(localStoreDir,
                String.format("%s-%s", jobInstance.getJobName(), jobInstance.getJobId()));
        try {
            JobStatus jobStatus = jobsClient.getJobStatus(jobInstance);
            LOG.info("Job: {} has the status: {}.", jobInstance, jobStatus);
            for (Task task : jobsClient.getTasks(jobInstance)) {
                LOG.info("Evaluating stores for task: {}", task);
                for (String storeName : jobDir.list(DirectoryFileFilter.DIRECTORY)) {
                    /**
                     *  A task store is active if all of the following conditions are true:
                     *  a) If the store is amongst the active stores of the task.
                     *  b) If the job has been started.
                     *  c) If the preferred host of the task is the localhost on which the monitor is run.
                     */
                    if (jobStatus.hasBeenStarted() && task.getStoreNames().contains(storeName)
                            && task.getPreferredHost().equals(localHostName)) {
                        LOG.info(String.format("Local store: %s is actively used by the task: %s.", storeName,
                                task.getTaskName()));
                    } else {
                        LOG.info(String.format("Local store: %s not used by the task: %s.", storeName,
                                task.getTaskName()));
                        markSweepTaskStore(TaskStorageManager.getStorePartitionDir(jobDir, storeName,
                                new TaskName(task.getTaskName())));
                    }
                }
            }
        } catch (Exception ex) {
            if (!config.getIgnoreFailures()) {
                throw ex;
            }
            LOG.warn(
                    "Config: {} turned on, failures will be ignored. Local store cleanup for job: {} resulted in exception: {}.",
                    new Object[] { LocalStoreMonitorConfig.CONFIG_IGNORE_FAILURES, jobInstance, ex });
        }
    }
}

From source file:org.artifactory.build.BuildServiceImpl.java

@Override
public void importFrom(ImportSettings settings) {
    final MutableStatusHolder multiStatusHolder = settings.getStatusHolder();
    multiStatusHolder.status("Starting build info import", log);

    dbService.invokeInTransaction("BuildImport-deleteAllBuilds", new Callable<Object>() {
        @Override/*from   w  w w  .  j a v a2 s.c  o  m*/
        public Object call() throws Exception {
            try {
                // delete all existing builds
                buildStoreService.deleteAllBuilds();
            } catch (Exception e) {
                multiStatusHolder.error("Failed to delete builds root node", e, log);
            }
            return null;
        }
    });

    File buildsFolder = new File(settings.getBaseDir(), BUILDS_EXPORT_DIR);
    String buildsFolderPath = buildsFolder.getPath();
    if (!buildsFolder.exists()) {
        multiStatusHolder.status("'" + buildsFolderPath + "' folder is either non-existent or not a "
                + "directory. Build info import was not performed", log);
        return;
    }

    IOFileFilter buildExportFileFilter = new AbstractFileFilter() {
        @Override
        public boolean accept(File file) {
            String fileName = file.getName();
            return fileName.startsWith("build") && fileName.endsWith(".xml");
        }
    };

    Collection<File> buildExportFiles = FileUtils.listFiles(buildsFolder, buildExportFileFilter,
            DirectoryFileFilter.DIRECTORY);

    if (buildExportFiles.isEmpty()) {
        multiStatusHolder.status("'" + buildsFolderPath + "' folder does not contain build export files. "
                + "Build info import was not performed", log);
        return;
    }

    importBuildFiles(settings, buildExportFiles);
    multiStatusHolder.status("Finished build info import", log);
}

From source file:org.artifactory.repo.service.DeployServiceImpl.java

@Override
public void deployBundle(File bundle, RealRepoDescriptor targetRepo, final BasicStatusHolder status,
        boolean failFast, String prefix, Properties properties) {
    long start = System.currentTimeMillis();
    if (!bundle.exists()) {
        String message = "Specified location '" + bundle + "' does not exist. Deployment aborted.";
        status.error(message, log);//from   www . j a  v a 2 s .c  o m
        return;
    }
    File extractFolder;
    try {
        extractFolder = extractArchive(status, bundle);
    } catch (Exception e) {
        status.error(e.getLocalizedMessage(), e, log);
        return;
    }
    if (extractFolder == null) {
        //We have errors
        return;
    }
    try {
        IOFileFilter deployableFilesFilter = new AbstractFileFilter() {
            @Override
            public boolean accept(File file) {
                if (NamingUtils.isSystem(file.getAbsolutePath()) || GlobalExcludes.isInGlobalExcludes(file)
                        || file.getName().contains(MavenNaming.MAVEN_METADATA_NAME)) {
                    status.debug("Excluding '" + file.getAbsolutePath() + "' from bundle deployment.", log);
                    return false;
                }

                return true;
            }
        };
        List<File> archiveContent = Lists.newArrayList(
                FileUtils.listFiles(extractFolder, deployableFilesFilter, DirectoryFileFilter.DIRECTORY));
        Collections.sort(archiveContent);

        Repo repo = repositoryService.repositoryByKey(targetRepo.getKey());
        for (File file : archiveContent) {
            String parentPath = extractFolder.getAbsolutePath();
            String filePath = file.getAbsolutePath();
            String relPath = PathUtils
                    .trimSlashes(prefix + "/" + PathUtils.getRelativePath(parentPath, filePath)).toString();

            ModuleInfo moduleInfo = repo.getItemModuleInfo(relPath);
            if (MavenNaming.isPom(file.getName())) {
                try {
                    mavenService.validatePomFile(file, relPath, moduleInfo,
                            targetRepo.isSuppressPomConsistencyChecks());
                } catch (Exception e) {
                    String msg = "The pom: " + file.getName()
                            + " could not be validated, and thus was not deployed.";
                    status.error(msg, e, log);
                    if (failFast) {
                        return;
                    }
                    continue;
                }
            }

            try {
                getTransactionalMe().deploy(targetRepo, new ArtifactInfo(relPath), file, null, false, true,
                        properties);
            } catch (IllegalArgumentException iae) {
                status.error(iae.getMessage(), iae, log);
                if (failFast) {
                    return;
                }
            } catch (Exception e) {
                // Fail fast
                status.error("Error during deployment: " + e.getMessage(), e, log);
                if (failFast) {
                    return;
                }
            }
        }

        String bundleName = bundle.getName();
        String timeTaken = DurationFormatUtils.formatPeriod(start, System.currentTimeMillis(), "s");
        int archiveContentSize = archiveContent.size();

        status.status("Successfully deployed " + archiveContentSize + " artifacts from archive: " + bundleName
                + " (" + timeTaken + " seconds).", log);
    } catch (Exception e) {
        status.error(e.getMessage(), e, log);
    } finally {
        FileUtils.deleteQuietly(extractFolder);
    }
}

From source file:org.artifactory.repo.service.ImportJob.java

private Collection<String> collectSubdirectoriesNames() {
    File[] subDirectories = importSettings.getBaseDir()
            .listFiles((java.io.FileFilter) DirectoryFileFilter.DIRECTORY);
    return Collections2.transform(Arrays.asList(subDirectories), new Function<File, String>() {
        @Nullable//from  w  w  w. j  a  va2 s. c om
        @Override
        public String apply(File file) {
            return file.getName();
        }
    });
}

From source file:org.artifactory.traffic.read.TrafficReader.java

/**
 * Returns a collection of traffic entry log files which are relevant to the given time window
 *
 * @param startDate Time window start date
 * @param endDate   Time window end date
 * @return Collection<File> - Collection of file objects that represent the traffic entry log files which are
 *         relevant to the given time window
 *///from   www. j a va2s  . c o  m
public Collection<File> readFiles(Date startDate, Date endDate) {
    IOFileFilter trafficLogFileFilter = new AbstractFileFilter() {
        @Override
        public boolean accept(File file) {
            String logFileName = file.getName();
            return logFileName.contains(LOG_PREFIX) && logFileName.contains(LOG_SUFFIX);
        }
    };
    Collection<File> collection = FileUtils.listFiles(logDir, trafficLogFileFilter,
            DirectoryFileFilter.DIRECTORY);
    List<File> trafficLogFiles = Lists.newArrayList(collection);
    Collections.sort(trafficLogFiles);
    List<File> selectedFiles = new ArrayList<>();
    for (File logFile : trafficLogFiles) {
        Date[] logFileDates = getLogFileDates(logFile);

        //Sanity check
        if (logFileDates.length != 2) {
            throw new RuntimeException("Could not read log file dates.");
        }

        //Sanity check
        Date logFileStartDate = logFileDates[0];
        Date logFileEndDate = logFileDates[1];
        if ((logFileStartDate == null) || (logFileEndDate == null)) {
            throw new RuntimeException("Log file dates cannot be null.");
        }

        boolean withinRange = isDateWithinRange(logFileStartDate, logFileEndDate, startDate, endDate);
        if (withinRange) {
            selectedFiles.add(logFile);
        }
    }
    return selectedFiles;
}

From source file:org.bonitasoft.engine.api.impl.ProcessAPIImpl.java

@Override
public Map<String, byte[]> getProcessResources(final long processDefinitionId, final String filenamesPattern)
        throws RetrieveException {
    String processesFolder;/*from  w  w w. j a  va 2s .co m*/
    try {
        processesFolder = BonitaHomeServer.getInstance().getProcessesFolder(getTenantAccessor().getTenantId());
    } catch (final BonitaHomeNotSetException e) {
        throw new RetrieveException("Problem accessing basic Bonita Home server resources", e);
    }
    final String sep = File.separator;
    processesFolder = StringUtils.uniformizePathPattern(processesFolder);
    if (!processesFolder.endsWith(sep)) {
        processesFolder = processesFolder + sep;
    }
    processesFolder = processesFolder + processDefinitionId + sep;
    final File processDirectory = new File(processesFolder);
    final Collection<File> files = FileUtils.listFiles(processDirectory,
            new DeepRegexFileFilter(processDirectory, filenamesPattern), DirectoryFileFilter.DIRECTORY);
    final Map<String, byte[]> res = new HashMap<String, byte[]>(files.size());
    try {
        for (final File f : files) {
            res.put(generateRelativeResourcePath(processDirectory, f), IOUtil.getAllContentFrom(f));
        }
    } catch (final IOException e) {
        throw new RetrieveException("Problem accessing resources " + filenamesPattern
                + " for processDefinitionId: " + processDefinitionId, e);
    }
    return res;
}

From source file:org.cds06.speleograph.actions.OpenAction.java

/**
 * Construct the import action./* www  .  j a  va 2 s  . c o  m*/
 *
 * @param component The parent component used to display dialogs.
 */
public OpenAction(JComponent component, Class<? extends DataFileReader> reader) {
    super(I18nSupport.translate("actions.openFile"));
    try {
        this.reader = reader.newInstance();
    } catch (InstantiationException | IllegalAccessException e) {
        log.info("Can not create action for reader " + reader.getName());
        throw new IllegalArgumentException(e);
    }
    putValue(NAME, this.reader.getButtonText());
    parent = component;
    fileFilter = new OrFileFilter(DirectoryFileFilter.DIRECTORY, this.reader.getFileFilter());
    chooser = new JFileChooser();
    chooser.setFileFilter(new FileFilter() {
        @Override
        public boolean accept(File f) {
            return fileFilter.accept(f);
        }

        @Override
        public String getDescription() {
            return OpenAction.this.getDescription();
        }
    });
}

From source file:org.chtijbug.drools.platform.runtime.utils.Xsd2JarTransformerTestCase.java

@Test
public void should_get_5_java_classes_generated() throws IOException {
    Xsd2JarTransformer toTest = new Xsd2JarTransformer();

    //____ Create Temp directory wher classes will be generated
    File outputDir = File.createTempFile("test-dir", "");
    outputDir.delete();//from  w  w  w.j  av  a 2s.c  om
    outputDir.mkdir();
    //____ Load the testing XSD
    URL xsdFile = this.getClass().getResource("/model.xsd");
    //____ Generate Java classes
    toTest.createJavaClassesFromXsd(new File(xsdFile.getFile()), outputDir, "org.pymma.drools");
    //____ Check that the counting of generated classes is the one expected.
    Collection javaFiles = FileUtils.listFiles(outputDir, FileFilterUtils.suffixFileFilter(".java"),
            DirectoryFileFilter.DIRECTORY);
    assertThat(javaFiles).hasSize(5);
}

From source file:org.chtijbug.drools.platform.runtime.utils.Xsd2JarTransformerTestCase.java

@Test
public void should_get_5_java_classes_compiled() throws IOException {
    Xsd2JarTransformer toTest = new Xsd2JarTransformer();

    //____ Create Temp directory wher classes will be generated
    File outputDir = File.createTempFile("test-dir", "");
    outputDir.delete();//from   w  w w .j  a va 2 s  . c  o m
    outputDir.mkdir();
    //____ Load the testing XSD
    URL xsdFile = this.getClass().getResource("/model.xsd");
    //____ Generate Java classes
    toTest.createJavaClassesFromXsd(new File(xsdFile.getFile()), outputDir, "org.pymma.drools");
    toTest.compileTarget(outputDir);
    //____ Check that the counting of generated classes is the one expected.
    Collection javaFiles = FileUtils.listFiles(outputDir, FileFilterUtils.suffixFileFilter(".class"),
            DirectoryFileFilter.DIRECTORY);
    assertThat(javaFiles).hasSize(5);

}

From source file:org.dataconservancy.ui.services.AtomDepositDocumentParserTest.java

/**
 * Seeds a {@code MockArchiveUtil} with entities, and constructs a {@code AtomDepositDocumentParser} for testing.
 * /*  www  .  j av a2 s.c o m*/
 * @throws URISyntaxException
 * @throws FileNotFoundException
 * @throws InvalidXmlException
 */
@Before
@SuppressWarnings("unchecked")
public void setUp() throws URISyntaxException, FileNotFoundException, InvalidXmlException {

    // Obtain the valid, production, sample feed
    URL feedResource = this.getClass().getResource(PRODUCTION_FEED_RESOURCE);
    assertNotNull("Unable to resolve classpath resource " + PRODUCTION_FEED_RESOURCE);
    productionFeedIn = this.getClass().getResourceAsStream(PRODUCTION_FEED_RESOURCE);

    // Instantiate a MockArchiveUtil which we will pre-populate with expected DCS entities.
    productionMockArchiveUtil = new MockArchiveUtil();

    // Set the mapping strategy to entity ids only
    productionMockArchiveUtil.setMappingStrategy(MockArchiveUtil.ID_MAPPING_STRATEGY.ENTITY_ID);

    // Populate the MockArchiveUtil with the valid, expected, DCS entities.
    File feedBaseDir = new File(feedResource.toURI()).getParentFile();
    Iterator<File> serializedEntities = FileUtils.iterateFiles(feedBaseDir,
            new AndFileFilter(new PrefixFileFilter("4260"), new SuffixFileFilter(".xml")),
            DirectoryFileFilter.DIRECTORY);
    while (serializedEntities.hasNext()) {
        Dcp dcp = modelBuilder.buildSip(new FileInputStream(serializedEntities.next()));
        for (DcsEntity e : dcp) {
            productionMockArchiveUtil.addEntity(e);
        }
    }

    assertTrue("Error loading expected entities from " + feedBaseDir,
            productionMockArchiveUtil.getEntities().size() > 0);

    // Construct the DocumentParser under test
    underTest = new AtomDepositDocumentParser(productionMockArchiveUtil);
}