List of usage examples for org.apache.commons.io.filefilter AbstractFileFilter AbstractFileFilter
AbstractFileFilter
From source file:com.gu.management.manifest.ApplicationFileProvider.java
public List<String> getChildDirectories(File parent) { FilenameFilter nonDottedDirectories = new AbstractFileFilter() { @Override// www.j a v a 2 s . c o m public boolean accept(File file) { return file.isDirectory() && !file.getName().startsWith("."); } }; String[] filenames = parent.list(nonDottedDirectories); return Arrays.asList(filenames); }
From source file:com.amazonaws.eclipse.dynamodb.testtool.TestToolProcess.java
/** * Searches within the install directory for the native libraries required * by DyanmoDB Local (i.e. SQLite) and returns the directory containing the * native libraries./*w w w. j a v a 2 s .c om*/ * * @return The directory within the install directory where native libraries * were found; otherwise, if no native libraries are found, the * install directory is returned. */ private File findLibraryDirectory() { // Mac and Linux libraries start with "libsqlite4java-" so // use that pattern to identify the library directory IOFileFilter fileFilter = new AbstractFileFilter() { public boolean accept(File dir, String name) { return name.startsWith("libsqlite4java-"); } }; Collection<File> files = FileUtils.listFiles(installDirectory, fileFilter, TrueFileFilter.INSTANCE); // Log a warning if we can't identify the library directory, // and then just try to use the install directory if (files == null || files.isEmpty()) { Status status = new Status(IStatus.WARNING, DynamoDBPlugin.PLUGIN_ID, "Unable to find DynamoDB Local native libraries in " + installDirectory); AwsToolkitCore.getDefault().getLog().log(status); return installDirectory; } return files.iterator().next().getParentFile(); }
From source file:cc.recommenders.io.Directory.java
public Set<String> findFiles(Predicate<String> predicate) { IOFileFilter fileFilter = new AbstractFileFilter() { @Override//from w w w. ja v a2s. c o m public boolean accept(File file) { return predicate.apply(file.getAbsolutePath()); } }; IOFileFilter allDirs = FileFilterUtils.trueFileFilter(); Iterator<File> it = FileUtils.iterateFiles(new File(rootDir), fileFilter, allDirs); Set<String> files = Sets.newLinkedHashSet(); while (it.hasNext()) { String absPath = it.next().getAbsolutePath(); String relPath = absPath.substring(rootDir.length()); // TODO -1? if (relPath.startsWith(File.separator)) { relPath = relPath.substring(1); } files.add(relPath); } return files; }
From source file:net.sf.zekr.engine.audio.AudioCacheManager.java
/** * Removes {@link #getCapacitySize()} megabytes of files from user's audio cache, so that cache size limit * ({@link #getCapacitySize()}) is met. It simply ignores if audio cache size is not exceeded * {@link #getCapacitySize()}.//from w w w . j ava 2s . com */ @SuppressWarnings("unchecked") public void flushCache() { logger.info("Flush audio cache."); long cacheSize = FileUtils.sizeOfDirectory(userPath); if (cacheSize > FileUtils.ONE_MB * capacitySize) { logger.info("Capacity size is " + capacitySize + " MB, of which " + FileUtils.byteCountToDisplaySize(cacheSize) + " is used. Flush size is " + flushSize + " MB."); Collection<File> audioDirectoryList = FileUtils.listFiles(userPath, new AbstractFileFilter() { public boolean accept(File file) { if (file.isDirectory()) { return true; } else { return false; } } }, null); List<File> fileList = new ArrayList<File>(); for (File dir : audioDirectoryList) { fileList.addAll(Arrays.asList(dir.listFiles())); } // return older files first Collections.sort(fileList, LastModifiedFileComparator.LASTMODIFIED_REVERSE); long deleteSize = 0; for (int i = 0; i < fileList.size(); i++) { if (deleteSize > flushSize + FileUtils.ONE_MB) { logger.info("Cache flushing suffices. " + FileUtils.byteCountToDisplaySize(deleteSize) + " were deleted."); break; } File file = fileList.get(i); deleteSize += file.length(); logger.debug("Delete: " + file); file.delete(); } } else { logger.info("No flush is required."); } }
From source file:org.artifactory.build.BuildServiceImpl.java
@Override public void importFrom(ImportSettings settings) { final MutableStatusHolder multiStatusHolder = settings.getStatusHolder(); multiStatusHolder.status("Starting build info import", log); dbService.invokeInTransaction("BuildImport-deleteAllBuilds", new Callable<Object>() { @Override//from w w w . ja va 2s .c o m public Object call() throws Exception { try { // delete all existing builds buildStoreService.deleteAllBuilds(); } catch (Exception e) { multiStatusHolder.error("Failed to delete builds root node", e, log); } return null; } }); File buildsFolder = new File(settings.getBaseDir(), BUILDS_EXPORT_DIR); String buildsFolderPath = buildsFolder.getPath(); if (!buildsFolder.exists()) { multiStatusHolder.status("'" + buildsFolderPath + "' folder is either non-existent or not a " + "directory. Build info import was not performed", log); return; } IOFileFilter buildExportFileFilter = new AbstractFileFilter() { @Override public boolean accept(File file) { String fileName = file.getName(); return fileName.startsWith("build") && fileName.endsWith(".xml"); } }; Collection<File> buildExportFiles = FileUtils.listFiles(buildsFolder, buildExportFileFilter, DirectoryFileFilter.DIRECTORY); if (buildExportFiles.isEmpty()) { multiStatusHolder.status("'" + buildsFolderPath + "' folder does not contain build export files. " + "Build info import was not performed", log); return; } importBuildFiles(settings, buildExportFiles); multiStatusHolder.status("Finished build info import", log); }
From source file:org.artifactory.repo.service.DeployServiceImpl.java
@Override public void deployBundle(File bundle, RealRepoDescriptor targetRepo, final BasicStatusHolder status, boolean failFast, String prefix, Properties properties) { long start = System.currentTimeMillis(); if (!bundle.exists()) { String message = "Specified location '" + bundle + "' does not exist. Deployment aborted."; status.error(message, log);/* ww w . ja va 2 s. com*/ return; } File extractFolder; try { extractFolder = extractArchive(status, bundle); } catch (Exception e) { status.error(e.getLocalizedMessage(), e, log); return; } if (extractFolder == null) { //We have errors return; } try { IOFileFilter deployableFilesFilter = new AbstractFileFilter() { @Override public boolean accept(File file) { if (NamingUtils.isSystem(file.getAbsolutePath()) || GlobalExcludes.isInGlobalExcludes(file) || file.getName().contains(MavenNaming.MAVEN_METADATA_NAME)) { status.debug("Excluding '" + file.getAbsolutePath() + "' from bundle deployment.", log); return false; } return true; } }; List<File> archiveContent = Lists.newArrayList( FileUtils.listFiles(extractFolder, deployableFilesFilter, DirectoryFileFilter.DIRECTORY)); Collections.sort(archiveContent); Repo repo = repositoryService.repositoryByKey(targetRepo.getKey()); for (File file : archiveContent) { String parentPath = extractFolder.getAbsolutePath(); String filePath = file.getAbsolutePath(); String relPath = PathUtils .trimSlashes(prefix + "/" + PathUtils.getRelativePath(parentPath, filePath)).toString(); ModuleInfo moduleInfo = repo.getItemModuleInfo(relPath); if (MavenNaming.isPom(file.getName())) { try { mavenService.validatePomFile(file, relPath, moduleInfo, targetRepo.isSuppressPomConsistencyChecks()); } catch (Exception e) { String msg = "The pom: " + file.getName() + " could not be validated, and thus was not deployed."; status.error(msg, e, log); if (failFast) { return; } continue; } } try { getTransactionalMe().deploy(targetRepo, new ArtifactInfo(relPath), file, null, false, true, properties); } catch (IllegalArgumentException iae) { status.error(iae.getMessage(), iae, log); if (failFast) { return; } } catch (Exception e) { // Fail fast status.error("Error during deployment: " + e.getMessage(), e, log); if (failFast) { return; } } } String bundleName = bundle.getName(); String timeTaken = DurationFormatUtils.formatPeriod(start, System.currentTimeMillis(), "s"); int archiveContentSize = archiveContent.size(); status.status("Successfully deployed " + archiveContentSize + " artifacts from archive: " + bundleName + " (" + timeTaken + " seconds).", log); } catch (Exception e) { status.error(e.getMessage(), e, log); } finally { FileUtils.deleteQuietly(extractFolder); } }
From source file:org.artifactory.traffic.read.TrafficReader.java
/** * Returns a collection of traffic entry log files which are relevant to the given time window * * @param startDate Time window start date * @param endDate Time window end date * @return Collection<File> - Collection of file objects that represent the traffic entry log files which are * relevant to the given time window *//*from w ww.j av a2 s .c o m*/ public Collection<File> readFiles(Date startDate, Date endDate) { IOFileFilter trafficLogFileFilter = new AbstractFileFilter() { @Override public boolean accept(File file) { String logFileName = file.getName(); return logFileName.contains(LOG_PREFIX) && logFileName.contains(LOG_SUFFIX); } }; Collection<File> collection = FileUtils.listFiles(logDir, trafficLogFileFilter, DirectoryFileFilter.DIRECTORY); List<File> trafficLogFiles = Lists.newArrayList(collection); Collections.sort(trafficLogFiles); List<File> selectedFiles = new ArrayList<>(); for (File logFile : trafficLogFiles) { Date[] logFileDates = getLogFileDates(logFile); //Sanity check if (logFileDates.length != 2) { throw new RuntimeException("Could not read log file dates."); } //Sanity check Date logFileStartDate = logFileDates[0]; Date logFileEndDate = logFileDates[1]; if ((logFileStartDate == null) || (logFileEndDate == null)) { throw new RuntimeException("Log file dates cannot be null."); } boolean withinRange = isDateWithinRange(logFileStartDate, logFileEndDate, startDate, endDate); if (withinRange) { selectedFiles.add(logFile); } } return selectedFiles; }
From source file:org.codelibs.fess.dict.DictionaryLocator.java
protected File[] findFiles(final String path, final String filenamePrefix, final List<String> excludedSet) { final File directory = new File(path); if (logger.isDebugEnabled()) { logger.debug("Load files from " + directory.getAbsolutePath()); }/*from w w w . ja va 2 s. com*/ final Collection<File> files = FileUtils.listFiles(directory, new AbstractFileFilter() { @Override public boolean accept(final File dir, final String name) { return name.startsWith(filenamePrefix); } }, new AbstractFileFilter() { @Override public boolean accept(final File dir, final String name) { return excludedSet == null || !excludedSet.contains(name); } }); if (logger.isDebugEnabled()) { logger.debug("Dictionary files: " + files); } return files.toArray(new File[files.size()]); }
From source file:org.gradle.initialization.DefaultCacheInvalidationStrategy.java
private boolean projectFilesAreOlderThanArtifact(File rootDir, final long timestamp) { final DirFilter dirFilter = new DirFilter(rootDir); IOFileFilter lastModifiedFilter = new AbstractFileFilter() { @Override/* ww w . java 2s . c o m*/ public boolean accept(File file) { if (file.isDirectory() && !dirFilter.accept(file)) { return false; } if (file.lastModified() > timestamp) { throw new InvalidCacheException(); } return false; } }; try { FileUtils.listFiles(rootDir, lastModifiedFilter, new DirFilter(rootDir)); } catch (InvalidCacheException e) { return false; } return true; }
From source file:org.jboss.pvt.harness.utils.DirUtils.java
/** * list accepted files in rootDir and its sub-dirs * @param rootDir//from ww w . j av a2 s . com * @param fileFilter * @return */ public static Collection<File> listFilesRecursively(File rootDir, final FileFilter fileFilter) { if (rootDir == null || !rootDir.exists() || rootDir.isFile()) { return Collections.emptyList(); } Collection<File> files = FileUtils.listFilesAndDirs(rootDir, new AbstractFileFilter() { @Override public boolean accept(File file) { return file.isFile() && fileFilter.accept(file); } }, TrueFileFilter.INSTANCE); // remove dirs for (Iterator<File> it = files.iterator(); it.hasNext();) { File file = it.next(); if (file.isDirectory()) { it.remove(); } } return files; }