List of usage examples for org.apache.commons.io.filefilter TrueFileFilter INSTANCE
IOFileFilter INSTANCE
To view the source code for org.apache.commons.io.filefilter TrueFileFilter INSTANCE.
Click Source Link
From source file:com.mediaworx.intellij.opencmsplugin.sync.SyncFileAnalyzer.java
private void addRfsOnlyFolderTreeToSyncList(OpenCmsModule ocmsModule, String vfsPath, File file, boolean replaceExistingEntity) { LOG.info("Adding RFS only folder " + vfsPath); SyncAction syncAction = getRfsOnlySyncAction(ocmsModule.getSyncMode()); SyncFolder syncFolder = new SyncFolder(ocmsModule, vfsPath, file, null, syncAction, replaceExistingEntity); syncList.add(syncFolder);//from w w w . ja va 2 s. c o m if (syncAction != SyncAction.DELETE_RFS) { LOG.info("Get children of folder " + vfsPath); Collection<File> rfsChildren = FileUtils.listFiles(file, TrueFileFilter.INSTANCE, null); for (File rfsChild : rfsChildren) { LOG.info("Handle PUSH child " + rfsChild.getPath()); walkFileTree(ocmsModule, rfsChild, FolderSyncMode.PUSH); } } }
From source file:de.jwi.jfm.Folder.java
private String zip(OutputStream out, String[] selectedIDs) throws IOException, OutOfSyncException { Collection c = null;// w w w . j a v a 2 s. c om List l = new ArrayList(); for (int i = 0; i < selectedIDs.length; i++) { File f = checkAndGet(selectedIDs[i]); if (null == f) { throw new OutOfSyncException(); } if (f.isDirectory()) { c = FileUtils.listFiles(f, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); l.addAll(c); } else { l.add(f); } } ZipOutputStream z = new ZipOutputStream(out); try { new Zipper().zip(z, l, myFile); } finally { z.close(); } return null; }
From source file:com.btoddb.fastpersitentqueue.FpqIT.java
@Test public void testThreading() throws Exception { final int numEntries = 1000; final int numPushers = 4; final int numPoppers = 4; final int entrySize = 1000; fpq1.setMaxTransactionSize(2000);/*from w w w . ja v a2 s .c o m*/ final int popBatchSize = 100; fpq1.setMaxMemorySegmentSizeInBytes(10000000); fpq1.setMaxJournalFileSize(10000000); fpq1.setMaxJournalDurationInMs(30000); fpq1.setFlushPeriodInMs(1000); fpq1.setNumberOfFlushWorkers(4); final Random pushRand = new Random(1000L); final Random popRand = new Random(1000000L); final AtomicInteger pusherFinishCount = new AtomicInteger(); final AtomicInteger numPops = new AtomicInteger(); final AtomicLong counter = new AtomicLong(); final AtomicLong pushSum = new AtomicLong(); final AtomicLong popSum = new AtomicLong(); fpq1.init(); ExecutorService execSrvc = Executors.newFixedThreadPool(numPushers + numPoppers); Set<Future> futures = new HashSet<Future>(); // start pushing for (int i = 0; i < numPushers; i++) { Future future = execSrvc.submit(new Runnable() { @Override public void run() { for (int i = 0; i < numEntries; i++) { try { long x = counter.getAndIncrement(); pushSum.addAndGet(x); ByteBuffer bb = ByteBuffer.wrap(new byte[entrySize]); bb.putLong(x); fpq1.beginTransaction(); fpq1.push(bb.array()); fpq1.commit(); if ((x + 1) % 500 == 0) { System.out.println("pushed ID = " + x); } Thread.sleep(pushRand.nextInt(5)); } catch (Exception e) { e.printStackTrace(); } } pusherFinishCount.incrementAndGet(); } }); futures.add(future); } // start popping for (int i = 0; i < numPoppers; i++) { Future future = execSrvc.submit(new Runnable() { @Override public void run() { while (pusherFinishCount.get() < numPushers || !fpq1.isEmpty()) { try { fpq1.beginTransaction(); try { Collection<FpqEntry> entries = fpq1.pop(popBatchSize); if (null == entries) { Thread.sleep(100); continue; } for (FpqEntry entry : entries) { ByteBuffer bb = ByteBuffer.wrap(entry.getData()); popSum.addAndGet(bb.getLong()); if (entry.getId() % 500 == 0) { System.out.println("popped ID = " + entry.getId()); } } numPops.addAndGet(entries.size()); fpq1.commit(); entries.clear(); } finally { if (fpq1.isTransactionActive()) { fpq1.rollback(); } } Thread.sleep(popRand.nextInt(10)); } catch (Exception e) { e.printStackTrace(); } } } }); futures.add(future); } boolean finished = false; while (!finished) { try { for (Future f : futures) { f.get(); } finished = true; } catch (InterruptedException e) { // ignore Thread.interrupted(); } } assertThat(numPops.get(), is(numEntries * numPushers)); assertThat(fpq1.getNumberOfEntries(), is(0L)); assertThat(pushSum.get(), is(popSum.get())); assertThat(fpq1.getMemoryMgr().getNumberOfActiveSegments(), is(1)); assertThat(fpq1.getMemoryMgr().getSegments(), hasSize(1)); assertThat(fpq1.getJournalMgr().getJournalFiles().entrySet(), hasSize(1)); assertThat(FileUtils.listFiles(fpq1.getPagingDirectory(), TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE), is(empty())); assertThat( FileUtils.listFiles(fpq1.getJournalDirectory(), TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE), hasSize(1)); }
From source file:com.cloudera.cdk.data.hbase.tool.SchemaTool.java
/** * Gets the list of HBase Common Avro schema strings from dir. It recursively * searches dir to find files that end in .avsc to locate those strings. * // w w w . j a v a 2 s . com * @param dir * The dir to recursively search for schema strings * @return The list of schema strings */ private List<String> getSchemaStringsFromDir(File dir) { List<String> schemaStrings = new ArrayList<String>(); Collection<File> schemaFiles = FileUtils.listFiles(dir, new SuffixFileFilter(".avsc"), TrueFileFilter.INSTANCE); for (File schemaFile : schemaFiles) { schemaStrings.add(getSchemaStringFromFile(schemaFile)); } return schemaStrings; }
From source file:com.isomorphic.maven.mojo.AbstractPackagerMojo.java
/** * Download the specified distributions, if necessary, extract resources from them, and use the results to create Maven artifacts as appropriate: * <p/>// ww w . j a v a 2 s . com * Try to install all of the main artifacts - e.g., those found in lib/*.jar and assembly/*.zip <br/> * Try to match main artifacts to 'subartifacts' by name and attach them (with classifiers as necessary) * * @param downloads The list of licenses top be included in the distribution. Multiple licenses are only allowed to support the inclusion of optional modules. * @param basedir The directory into which results should be written * @return A collection of Maven artifacts resulting from the download and preparation of a supported Isomorphic SDK. * @throws MojoExecutionException When any fatal error occurs. */ private Set<Module> collect(List<License> downloads, File basedir) throws MojoExecutionException { //allow execution to proceed without login credentials - it may be that they're not required Server server = settings.getServer(serverId); String username = null; String password = null; if (server != null) { username = server.getUsername(); password = server.getPassword(); } else { LOGGER.warn("No server configured with id '{}'. Will be unable to authenticate.", serverId); } UsernamePasswordCredentials credentials = null; if (username != null) { credentials = new UsernamePasswordCredentials(username, password); } File downloadTo = new File(basedir, "zip"); downloadTo.mkdirs(); Downloads downloadManager = new Downloads(credentials); downloadManager.setToFolder(downloadTo); downloadManager.setProxyConfiguration(settings.getActiveProxy()); downloadManager.setOverwriteExistingFiles(overwrite); File[] existing = downloadTo.listFiles(); List<Distribution> distributions = new ArrayList<Distribution>(); try { if (!skipDownload) { distributions.addAll( downloadManager.fetch(product, buildNumber, buildDate, downloads.toArray(new License[0]))); } else if (existing != null) { LOGGER.info("Creating local distribution from '{}'", downloadTo.getAbsolutePath()); Distribution distribution = Distribution.get(product, license, buildNumber, buildDate); distribution.getFiles().addAll(Arrays.asList(existing)); distributions.add(distribution); } if (!skipExtraction) { LOGGER.info("Unpacking downloaded file/s to '{}'", basedir); for (Distribution distribution : distributions) { distribution.unpack(basedir); } } //it doesn't strictly read this way, but we're looking for lib/*.jar, pom/*.xml, assembly/*.zip //TODO it'd be better if this didn't have to know where the files were located after unpacking Collection<File> files = FileUtils.listFiles(basedir, FileFilterUtils.or(FileFilterUtils.suffixFileFilter("jar"), FileFilterUtils.suffixFileFilter("xml"), FileFilterUtils.suffixFileFilter("zip")), FileFilterUtils.or(FileFilterUtils.nameFileFilter("lib"), FileFilterUtils.nameFileFilter("pom"), FileFilterUtils.nameFileFilter("assembly"))); if (files.isEmpty()) { throw new MojoExecutionException(String.format( "There don't appear to be any files to work with at '%s'. Check earlier log entries for clues.", basedir.getAbsolutePath())); } Set<Module> result = new TreeSet<Module>(); for (File file : files) { try { String base = FilenameUtils.getBaseName(file.getName().replaceAll("_", "-")); //poms don't need anything else if ("xml".equals(FilenameUtils.getExtension(file.getName()))) { result.add(new Module(getModelFromFile(file))); continue; } //for each jar/zip, find the matching pom IOFileFilter filter = new WildcardFileFilter(base + ".pom"); Collection<File> poms = FileUtils.listFiles(basedir, filter, TrueFileFilter.INSTANCE); if (poms.size() != 1) { LOGGER.warn( "Expected to find exactly 1 POM matching artifact with name '{}', but found {}. Skpping installation.", base, poms.size()); continue; } Model model = getModelFromFile(poms.iterator().next()); Module module = new Module(model, file); /* * Find the right javadoc bundle, matched on prefix. e.g., * smartgwt-eval -> smartgwt-javadoc * isomorphic-core-rpc -> isomorphic-javadoc * and add it to the main artifact with the javadoc classifier. This seems appropriate as long as * a) there is no per-jar javadoc * b) naming conventions are adhered to (or can be corrected by plugin at extraction) */ int index = base.indexOf("-"); String prefix = base.substring(0, index); Collection<File> doc = FileUtils.listFiles(new File(basedir, "doc"), FileFilterUtils.prefixFileFilter(prefix), FileFilterUtils.nameFileFilter("lib")); if (doc.size() != 1) { LOGGER.debug("Found {} javadoc attachments with prefix '{}'. Skipping attachment.", doc.size(), prefix); } else { module.attach(doc.iterator().next(), "javadoc"); } result.add(module); } catch (ModelBuildingException e) { throw new MojoExecutionException("Error building model from POM", e); } } return result; } catch (IOException e) { throw new MojoExecutionException("Failure during assembly collection", e); } }
From source file:com.mediaworx.opencms.moduleutils.manifestgenerator.OpenCmsModuleManifestGenerator.java
/** * Generates the manifest.xml for OpenCms modules from meta files (manifest_stub.xml and separate meta files for all * files and folders in the VFS).//from www .j a va 2 s.c o m * @param manifestRoot file representing the root folder of the manifest meta data (including manifest_stub.xml) * * @throws OpenCmsMetaXmlParseException if the XmlHelper can not be initialized or the manifest stub file or any * meta file can not be read or parsed * @throws OpenCmsMetaXmlFileWriteException if the resulting manifest file can not be written */ public void generateManifest(File manifestRoot) throws OpenCmsMetaXmlParseException, OpenCmsMetaXmlFileWriteException { manifestRootPath = manifestRoot.getPath(); handledSiblingResourceIds = new HashSet<String>(); String manifestStubPath = manifestRoot.getPath() + File.separator + FILENAME_MANIFEST_STUB; String manifestPath = manifestRoot.getPath() + File.separator + FILENAME_MANIFEST; LOG.info("manifestStubPath: {}", manifestStubPath); Node filesNode; Document manifest; try { xmlHelper = new XmlHelper(); Map<String, String> replacements = null; if (replaceMetaVariables) { replacements = new HashMap<String, String>(); replacements.put(META_VAR_CREATEDATE, formatDate((new Date()).getTime())); } manifest = xmlHelper.parseFile(manifestStubPath, replacements); filesNode = xmlHelper.getSingleNodeForXPath(manifest, FILES_NODE_XPATH); } catch (ParserConfigurationException e) { throw new OpenCmsMetaXmlParseException("The XmlHelper could not be initialized", e); } catch (IOException e) { throw new OpenCmsMetaXmlParseException("The manifest stub file could not be read", e); } catch (SAXException e) { throw new OpenCmsMetaXmlParseException("The manifest stub xml could not be parsed (parse error)", e); } catch (XPathExpressionException e) { throw new OpenCmsMetaXmlParseException("The manifest stub xml could not be parsed (xpath error)", e); } // Regular Expression matching anything but Strings ending with the VFS folder meta file suffix (".ocmsfolder.xml") String excludeFolderMetaRegex = "^(?:(?!" + Pattern.quote(FOLDER_META_SUFFIX) + "$).)*$"; // FileFilter filtering all VFS folder meta files (so only VFS file meta files and folders are included) IOFileFilter excludeFolderMetaFilter = new RegexFileFilter(excludeFolderMetaRegex); // read all files and folders excluding VFS folder meta files Collection<File> files = FileUtils.listFilesAndDirs(manifestRoot, excludeFolderMetaFilter, TrueFileFilter.INSTANCE); for (File file : files) { if (file.isDirectory()) { // exclude the manifest root if (file.getPath().equals(manifestRoot.getPath())) { continue; } addFolderToFilesNode(filesNode, file); } else { // exclude the manifest stub file and the manifest file if (file.getPath().equals(manifestPath) || file.getPath().equals(manifestStubPath)) { continue; } addFileToFilesNode(filesNode, file); } } // render the xml string String manifestString = xmlHelper.getXmlStringFromDocument(manifest, CDATA_NODES); // if a specific version is provided, replace the original version if (StringUtils.isNotBlank(moduleVersion)) { manifestString = manifestString.replaceFirst("<version>[^<]*</version>", "<version>" + Matcher.quoteReplacement(moduleVersion) + "</version>"); } // write the manifest to the disk try { writeManifest(manifestPath, manifestString); } catch (IOException e) { throw new OpenCmsMetaXmlFileWriteException("manifest.xml could not be written", e); } }
From source file:de.peran.dependency.ChangedTestClassesHandler.java
/** * Returns the mapping from changed classes to the methods, that have changed. If the method-set of a class is empty, all methods may have changed. * //from ww w . j av a 2 s . c om * @return */ public Map<String, Set<String>> getChangedMethods() { final Map<String, Set<String>> changedMethods = new TreeMap<>(); final Set<String> changedClasses = getChangedClasses(); for (final String clazz : changedClasses) { try { final File newFile = FileUtils.listFiles(new File(projectFolder, "src"), new WildcardFileFilter(clazz + "*"), TrueFileFilter.INSTANCE).iterator().next(); final File oldFile = FileUtils .listFiles(lastSourcesFolder, new WildcardFileFilter(clazz + "*"), TrueFileFilter.INSTANCE) .iterator().next(); LOG.info("Vergleiche {}", newFile, oldFile); // CompilationUnit } catch (final NoSuchElementException nse) { changedMethods.put(clazz, new HashSet<>()); } } return changedMethods; }
From source file:de.lmu.ifi.dbs.jfeaturelib.utils.Extractor.java
/** * creates a list of image files in the specified directory and all subdirectories (if recursive is enabled) * * @param dir directory to start from//from w w w .j ava2 s . c om * @return a list of image files in this directory (possibly empty) */ Collection<File> createFileList(File dir) { if (dir == null) { log.debug("directory is null, returning empty list"); return Collections.EMPTY_LIST; } else { SuffixFileFilter sff = new SuffixFileFilter(imageFormats, IOCase.INSENSITIVE); IOFileFilter recursiveFilter = recursive ? TrueFileFilter.INSTANCE : FalseFileFilter.INSTANCE; return FileUtils.listFiles(dir, sff, recursiveFilter); } }
From source file:com.btoddb.fastpersitentqueue.JournalMgrIT.java
@Test public void testThreading() throws IOException, ExecutionException { final int numEntries = 10000; final int numPushers = 3; int numPoppers = 3; final Random pushRand = new Random(1000L); final Random popRand = new Random(1000000L); final ConcurrentLinkedQueue<FpqEntry> events = new ConcurrentLinkedQueue<FpqEntry>(); final AtomicInteger pusherFinishCount = new AtomicInteger(); final AtomicInteger numPops = new AtomicInteger(); final AtomicLong pushSum = new AtomicLong(); final AtomicLong popSum = new AtomicLong(); mgr.setMaxJournalFileSize(1000);//from www . j av a2 s.c o m mgr.init(); ExecutorService execSrvc = Executors.newFixedThreadPool(numPushers + numPoppers); Set<Future> futures = new HashSet<Future>(); // start pushing for (int i = 0; i < numPushers; i++) { Future future = execSrvc.submit(new Runnable() { @Override public void run() { for (int i = 0; i < numEntries; i++) { try { long x = idGen.incrementAndGet(); FpqEntry entry = mgr.append(new FpqEntry(x, new byte[100])); events.offer(entry); pushSum.addAndGet(x); if (x % 500 == 0) { System.out.println("pushed ID = " + x); } Thread.sleep(pushRand.nextInt(5)); } catch (Exception e) { e.printStackTrace(); } } pusherFinishCount.incrementAndGet(); } }); futures.add(future); } // start popping for (int i = 0; i < numPoppers; i++) { Future future = execSrvc.submit(new Runnable() { @Override public void run() { while (pusherFinishCount.get() < numPushers || !events.isEmpty()) { try { FpqEntry entry; while (null != (entry = events.poll())) { if (entry.getId() % 500 == 0) { System.out.println("popped ID = " + entry.getId()); } popSum.addAndGet(entry.getId()); numPops.incrementAndGet(); mgr.reportTake(entry); Thread.sleep(popRand.nextInt(5)); } } catch (Exception e) { e.printStackTrace(); } } } }); futures.add(future); } boolean finished = false; while (!finished) { try { for (Future f : futures) { f.get(); } finished = true; } catch (InterruptedException e) { // ignore Thread.interrupted(); } } assertThat(numPops.get(), is(numEntries * numPushers)); assertThat(popSum.get(), is(pushSum.get())); assertThat(mgr.getJournalIdMap().entrySet(), hasSize(1)); assertThat(FileUtils.listFiles(theDir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE), hasSize(1)); }
From source file:com.taobao.android.tools.TPatchTool.java
private File createTPatchFile(File outPatchDir, File patchTmpDir) throws IOException { // bundle,bundle File mainBundleFoder = new File(patchTmpDir, ((TpatchInput) input).mainBundleName); File metaFile = new File(mainBundleFoder, SO_PATCH_META); if (soFileDefs.size() > 0) { com.taobao.android.tpatch.model.PatchFile patchFile = new com.taobao.android.tpatch.model.PatchFile( metaFile);/*from w w w .ja va 2 s . co m*/ soFileDefs.stream().forEach(new Consumer<SoFileDef>() { @Override public void accept(SoFileDef soFileDef) { patchFile.append(soFileDef); } }); patchFile.close(); } File mainBundleFile = new File(patchTmpDir, ((TpatchInput) input).mainBundleName + ".so"); if (FileUtils.listFiles(mainBundleFoder, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE).size() > 0) { hasMainBundle = true; CommandUtils.exec(mainBundleFoder, "zip -r " + mainBundleFile.getAbsolutePath() + " . -x */ -x .*"); } FileUtils.deleteDirectory(mainBundleFoder); // ??bundle File patchFile = null; patchFile = new File(outPatchDir, "patch-" + input.newApkBo.getVersionName() + "@" + input.baseApkBo.getVersionName() + ".tpatch"); if (patchFile.exists()) { FileUtils.deleteQuietly(patchFile); } File infoFile = new File(patchTmpDir, "patchInfo"); FileUtils.writeStringToFile(infoFile, "patch-" + input.newApkBo.getVersionName() + "@" + input.baseApkBo.getVersionName() + ".tpatch"); // zipBundle(patchTmpDir, patchFile); CommandUtils.exec(patchTmpDir, "zip -r " + patchFile.getAbsolutePath() + " . -x */ -x .*"); FileUtils.deleteDirectory(patchTmpDir); return patchFile; }