List of usage examples for org.apache.commons.io FileUtils listFiles
public static Collection listFiles(File directory, String[] extensions, boolean recursive)
From source file:com.dragome.compiler.utils.FileManager.java
public Collection<String> getAllFilesInClasspath() { Collection<String> files = new ArrayList<String>(); for (Object o : path) { if (o instanceof JarFile) { JarFile jarFile = (JarFile) o; List<String> classesInJar = findClassesInJar(jarFile); for (String file : classesInJar) { if (classpathFilter == null || classpathFilter.accept(new File(file))) { files.add(file);/* ww w . j av a 2 s. c o m*/ } } } else { File folder = (File) o; Collection<File> listFiles = FileUtils.listFiles(folder, new WildcardFileFilter("*.class"), DirectoryFileFilter.DIRECTORY); for (File file : listFiles) { if (classpathFilter == null || classpathFilter.accept(file)) { String substring = file.toString().substring(folder.toString().length() + 1); files.add(substring.replace(".class", "")); } } } } return files; }
From source file:it.geosolutions.geostore.services.rest.auditing.AuditingTestsUtils.java
static void checkDirectoryContainsFiles(File directory, File... expectedFiles) { Collection existingFiles = FileUtils.listFiles(directory, null, false); Assert.assertEquals(existingFiles.size(), expectedFiles.length); for (File expectedFile : expectedFiles) { Assert.assertTrue(existingFiles.contains(expectedFile)); }//from w w w .j a v a2 s .c o m }
From source file:com.garethahealy.camel.file.loadbalancer.example1.routes.ReadThreeFilesWithThreeReadersTest.java
@Test public void readThreeFilesWithThreeReaders() throws InterruptedException, MalformedURLException { Map<String, String> answer = getRouteToEndpointPriority(); //Used for debugging purposes, in-case we need to know which endpoint has what priority LOG.info("EndpointSetup: " + answer.toString()); MockEndpoint first = getMockEndpoint("mock:endFirst"); first.setExpectedMessageCount(1);/*from w ww . j av a2 s . c o m*/ first.setResultWaitTime(TimeUnit.SECONDS.toMillis(15)); first.setAssertPeriod(TimeUnit.SECONDS.toMillis(1)); MockEndpoint second = getMockEndpoint("mock:endSecond"); second.setExpectedMessageCount(1); second.setResultWaitTime(TimeUnit.SECONDS.toMillis(15)); second.setAssertPeriod(TimeUnit.SECONDS.toMillis(1)); MockEndpoint third = getMockEndpoint("mock:endThird"); third.setExpectedMessageCount(1); third.setResultWaitTime(TimeUnit.SECONDS.toMillis(15)); third.setAssertPeriod(TimeUnit.SECONDS.toMillis(1)); //Wait for the files to be processed sleep(10); File firstDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel0")); File secondDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel1")); File thirdDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel2")); Assert.assertTrue(".camel0 doesnt exist", firstDirectory.exists()); Assert.assertTrue(".camel1 doesnt exist", secondDirectory.exists()); Assert.assertTrue(".camel2 doesnt exist", thirdDirectory.exists()); Collection<File> firstFiles = FileUtils.listFiles(firstDirectory, FileFilterUtils.fileFileFilter(), null); Collection<File> secondFiles = FileUtils.listFiles(secondDirectory, FileFilterUtils.fileFileFilter(), null); Collection<File> thirdFiles = FileUtils.listFiles(thirdDirectory, FileFilterUtils.fileFileFilter(), null); Assert.assertNotNull(firstFiles); Assert.assertNotNull(secondFiles); Assert.assertNotNull(thirdFiles); //Check the files are unique, and we haven't copied the same file twice firstFiles.removeAll(secondFiles); firstFiles.removeAll(thirdFiles); secondFiles.removeAll(firstFiles); secondFiles.removeAll(thirdFiles); thirdFiles.removeAll(firstFiles); thirdFiles.removeAll(secondFiles); //Each directory should of only copied one file Assert.assertEquals(new Integer(1), new Integer(firstFiles.size())); Assert.assertEquals(new Integer(1), new Integer(secondFiles.size())); Assert.assertEquals(new Integer(1), new Integer(thirdFiles.size())); //Assert the endpoints last, as there seems to be a strange bug where they fail but the files have been processed, //so that would suggest the MockEndpoints are reporting a false-positive first.assertIsSatisfied(); second.assertIsSatisfied(); third.assertIsSatisfied(); }
From source file:com.textocat.textokit.eval.cas.FSCasDirectory.java
private Collection<File> getXmiFiles() { if (xmiFiles == null) { IOFileFilter sourceFileFilter = getSourceFileFilter(); xmiFiles = FileUtils.listFiles(dir, sourceFileFilter, trueFileFilter()); }/*from w ww .j ava 2 s .co m*/ return xmiFiles; }
From source file:com.haulmont.cuba.core.jmx.FileStorage.java
@Override public String findOrphanFiles() { FileStorageAPI fileStorageAPI = AppBeans.get(FileStorageAPI.class); if (!(fileStorageAPI instanceof com.haulmont.cuba.core.app.filestorage.FileStorage)) { return "<not supported>"; }//from w ww. j ava 2s. c om File[] roots = getStorageRoots(); if (roots.length == 0) return "No storage directories defined"; StringBuilder sb = new StringBuilder(); File storageFolder = roots[0]; if (!storageFolder.exists()) return ExceptionUtils.getStackTrace(new FileStorageException(FileStorageException.Type.FILE_NOT_FOUND, storageFolder.getAbsolutePath())); @SuppressWarnings("unchecked") Collection<File> systemFiles = FileUtils.listFiles(storageFolder, null, true); @SuppressWarnings("unchecked") Collection<File> filesInRootFolder = FileUtils.listFiles(storageFolder, null, false); //remove files of root storage folder (e.g. storage.log) from files collection systemFiles.removeAll(filesInRootFolder); List<FileDescriptor> fileDescriptors; Transaction tx = persistence.createTransaction(); try { EntityManager em = persistence.getEntityManager(); TypedQuery<FileDescriptor> query = em.createQuery("select fd from sys$FileDescriptor fd", FileDescriptor.class); fileDescriptors = query.getResultList(); tx.commit(); } catch (Exception e) { return ExceptionUtils.getStackTrace(e); } finally { tx.end(); } Set<String> descriptorsFileNames = new HashSet<>(); for (FileDescriptor fileDescriptor : fileDescriptors) { descriptorsFileNames .add(com.haulmont.cuba.core.app.filestorage.FileStorage.getFileName(fileDescriptor)); } for (File file : systemFiles) { if (!descriptorsFileNames.contains(file.getName())) //Encode file path if it contains non-ASCII characters if (!file.getPath().matches("\\p{ASCII}+")) { String encodedFilePath = URLEncodeUtils.encodeUtf8(file.getPath()); sb.append(encodedFilePath).append("\n"); } else { sb.append(file.getPath()).append("\n"); } } return sb.toString(); }
From source file:com.incapture.slate.DocGeneratorTest.java
@Test public void testGenerateOutput() throws Exception { DocGenerator generator = new DocGenerator(); IndexNode indexNode = new IndexNode(); indexNode.setTitle("Rapture API Docs"); indexNode.getLanguages().add(new LanguageNode("java")); indexNode.getLanguages().add(new LanguageNode("python")); indexNode.getLanguages().add(new LanguageNode("javascript")); indexNode.getLanguages().add(new LanguageNode("shell", "cURL")); indexNode.getTocFooters().add(new TextNode("by Incapture Technologies")); indexNode.getTocFooters()//from ww w . ja va 2s . c o m .add(new HrefNode("Documentation Powered by Slate", "http://github.com/tripit/slate")); indexNode.setSearchable(true); ApiNode introApi = createIntroApi(); ApiNode authApi = createAuthApi(); ApiNode docApi = createDocApi(); generator.generateOutput(rootDir, indexNode, Arrays.asList(introApi, authApi, docApi)); Collection<File> files = FileUtils.listFiles(rootDir, null, true); assertEquals(4, files.size()); for (File file : files) { if (file.getName().equals("index.md")) { String content = readResource("/output/expected/index.md"); assertEquals(content, FileUtils.readFileToString(file)); } else { String name = file.getName(); assertTrue("filename is " + name, FILE_NAMES.contains(name)); assertTrue(file.getAbsolutePath().endsWith("includes/" + name)); String content = readResource("/output/expected/includes/" + name); String expects[] = content.split("[\n]+"); String tmp[] = FileUtils.readFileToString(file).replaceAll("[\n]+", "\n").split("====\n"); StringBuilder sb = new StringBuilder(); // Strip out the MIT licence for (String s : tmp) { if (!s.startsWith(" The MIT License (MIT)")) sb.append(s); } String actuals[] = sb.toString().split("\n"); assertEquals(String.format("Expected number of lines for file %s does not match", file.getAbsolutePath()), expects.length, actuals.length); int i = 0; while (i < expects.length) { String expect = expects[i]; String actual = actuals[i]; i++; assertEquals(String.format("Line %d in file %s does not match", i, file.getAbsolutePath()), expect, actual); } } } }
From source file:$.MessageLogParser.java
/** * Gets lines from the log file which corresponds with specified correlation ID. *// ww w .j av a 2 s . com * @param correlationId the correlation ID * @param logDate which date to search log files for * @return log lines * @throws IOException when error occurred during file reading */ List<String> getLogLines(String correlationId, Date logDate) throws IOException { File logFolder = new File(logFolderPath); if (!logFolder.exists() || !logFolder.canRead()) { throw new FileNotFoundException("there is no readable log folder - " + logFolderPath); } final String logDateFormatted = fileFormat.format(logDate); // filter log files for current date IOFileFilter nameFilter = new IOFileFilter() { @Override public boolean accept(File file) { return logNameFilter.accept(file) && (StringUtils.contains(file.getName(), logDateFormatted) || file.getName().endsWith(BASE_FILE_EXTENSION)); } @Override public boolean accept(File dir, String name) { return StringUtils.contains(name, logDateFormatted) || name.endsWith(BASE_FILE_EXTENSION); } }; List<File> logFiles = new ArrayList<File>(FileUtils.listFiles(logFolder, nameFilter, null)); Collections.sort(logFiles, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR); // go through all log files List<String> logLines = new ArrayList<String>(); for (File logFile : logFiles) { logLines.addAll(getLogLines(logFile, correlationId)); } return logLines; }
From source file:com.edsoft.teknosaproject.bean.ReportBean.java
public String reportDirectory() { path = Paths.get(DIR, "AnaDepo", family, type, brand, document); //path = Paths.get("D:", "Teknosa", family, type, brand, document); list = (List<File>) FileUtils.listFiles(path.toFile(), TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); ConnectBean.list = list;/*from www . j a va 2 s. c om*/ FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(path.toString())); return "rapor2"; }
From source file:com.github.seqware.queryengine.system.Utility.java
/** * <p>dumpVCFFromFeatureSetID.</p> * * @param fSet a {@link com.github.seqware.queryengine.model.FeatureSet} object. * @param file a {@link java.lang.String} object. *//*ww w. j a va2 s. c om*/ public static boolean dumpFromMapReducePlugin(String header, Reference ref, FeatureSet fSet, Class<? extends PluginInterface> arbitraryPlugin, String file, Object... params) { BufferedWriter outputStream = null; try { if (file != null) { outputStream = new BufferedWriter(new FileWriter(file)); } else { outputStream = new BufferedWriter(new OutputStreamWriter(System.out)); } if (header != null) { outputStream.append(header); } } catch (IOException e) { Logger.getLogger(Utility.class.getName()).fatal("Exception thrown starting export to file:", e); System.exit(-1); } if (SWQEFactory.getQueryInterface() instanceof MRHBasePersistentBackEnd) { if (SWQEFactory.getModelManager() instanceof MRHBaseModelManager) { try { QueryFuture<File> future = SWQEFactory.getQueryInterface().getFeaturesByPlugin(0, arbitraryPlugin, ref, params); File get = future.get(); Collection<File> listFiles = FileUtils.listFiles(get, new WildcardFileFilter("part*"), DirectoryFileFilter.DIRECTORY); for (File f : listFiles) { BufferedReader in = new BufferedReader(new FileReader(f)); IOUtils.copy(in, outputStream); in.close(); } get.deleteOnExit(); assert (outputStream != null); outputStream.flush(); outputStream.close(); return true; } catch (IOException e) { Logger.getLogger(VCFDumper.class.getName()).fatal("Exception thrown exporting to file:", e); System.exit(-1); } catch (Exception e) { Logger.getLogger(VCFDumper.class.getName()) .fatal("MapReduce exporting failed, falling-through to normal exporting to file", e); } } } return false; }
From source file:com.wavemaker.tools.javaservice.JavaServiceDefinition.java
/** * Normal constructor. Attempts to use reflection (and a custom, very isolated classloader) to read information * about the class.//from www . j a v a2 s . co m */ @SuppressWarnings("unchecked") public JavaServiceDefinition(String serviceClassName, String serviceId, List<File> serviceCompiledDirs, List<File> serviceLibDirs, List<String> excludeTypeNames) throws ClassNotFoundException, LinkageError { List<File> classpath = new ArrayList<File>(); if (serviceCompiledDirs != null) { classpath.addAll(serviceCompiledDirs); } if (serviceLibDirs != null) { for (File serviceLibDir : serviceLibDirs) { if (!serviceLibDir.exists()) { continue; } else if (!serviceLibDir.isDirectory()) { throw new WMRuntimeException(MessageResource.LIB_DIR_NOT_DIR, serviceLibDir); } classpath.addAll(FileUtils.listFiles(serviceLibDir, new String[] { "jar" }, false)); } } ClassLoader cl = ClassLoaderUtils.getTempClassLoaderForFile(classpath.toArray(new File[] {})); Class<?> serviceClass = ClassLoaderUtils.loadClass(serviceClassName, false, cl); Class<?> runtimeServiceClass = ClassLoaderUtils.loadClass("com.wavemaker.runtime.service.LiveDataService", cl); this.serviceId = serviceId; this.excludeTypeNames = excludeTypeNames; init(serviceClass, runtimeServiceClass); }