List of usage examples for org.apache.commons.io FileUtils iterateFiles
public static Iterator iterateFiles(File directory, String[] extensions, boolean recursive)
From source file:org.canova.image.recordreader.VideoRecordReader.java
@Override public void initialize(InputSplit split) throws IOException, InterruptedException { if (split instanceof FileSplit) { URI[] locations = split.locations(); if (locations != null && locations.length >= 1) { if (locations.length > 1) { List<File> allFiles = new ArrayList<>(); for (URI location : locations) { File iter = new File(location); if (iter.isDirectory()) { allFiles.add(iter); if (appendLabel) { File parentDir = iter.getParentFile(); String name = parentDir.getName(); if (!labels.contains(name)) labels.add(name); }/*from w w w. j a v a 2s . c o m*/ } else { File parent = iter.getParentFile(); if (!allFiles.contains(parent) && containsFormat(iter.getAbsolutePath())) { allFiles.add(parent); if (appendLabel) { File parentDir = iter.getParentFile(); String name = parentDir.getName(); if (!labels.contains(name)) labels.add(name); } } } } iter = allFiles.iterator(); } else { File curr = new File(locations[0]); if (!curr.exists()) throw new IllegalArgumentException("Path " + curr.getAbsolutePath() + " does not exist!"); if (curr.isDirectory()) iter = FileUtils.iterateFiles(curr, null, true); else iter = Collections.singletonList(curr).iterator(); } } } else if (split instanceof InputStreamInputSplit) { InputStreamInputSplit split2 = (InputStreamInputSplit) split; InputStream is = split2.getIs(); URI[] locations = split2.locations(); INDArray load = imageLoader.asMatrix(is); record = RecordConverter.toRecord(load); if (appendLabel) { Path path = Paths.get(locations[0]); String parent = path.getParent().toString(); record.add(new DoubleWritable(labels.indexOf(parent))); } is.close(); } }
From source file:org.canova.sound.recordreader.WavFileRecordReader.java
@Override public void initialize(InputSplit split) throws IOException, InterruptedException { inputSplit = split;// w w w .j ava 2 s . c o m if (split instanceof FileSplit) { URI[] locations = split.locations(); if (locations != null && locations.length >= 1) { if (locations.length > 1) { List<File> allFiles = new ArrayList<>(); for (URI location : locations) { File iter = new File(location); if (iter.isDirectory()) { Iterator<File> allFiles2 = FileUtils.iterateFiles(iter, null, true); while (allFiles2.hasNext()) allFiles.add(allFiles2.next()); } else allFiles.add(iter); } iter = allFiles.iterator(); } else { File curr = new File(locations[0]); if (curr.isDirectory()) iter = FileUtils.iterateFiles(curr, null, true); else iter = Collections.singletonList(curr).iterator(); } } } else if (split instanceof InputStreamInputSplit) { record = new ArrayList<>(); InputStreamInputSplit split2 = (InputStreamInputSplit) split; InputStream is = split2.getIs(); URI[] locations = split2.locations(); if (appendLabel) { Path path = Paths.get(locations[0]); String parent = path.getParent().toString(); record.add(new DoubleWritable(labels.indexOf(parent))); } is.close(); } }
From source file:org.carracoo.maven.assist.AssistProcessor.java
protected Iterator<String> iterateClassnames(final String dir) { return new Iterator<String>() { final String[] extensions = { "class" }; final Iterator<File> classFiles = FileUtils.iterateFiles(new File(dir), extensions, true); @Override/*from ww w.ja v a2 s . c o m*/ public boolean hasNext() { return classFiles.hasNext(); } @Override public String next() { final File classFile = classFiles.next(); try { final String qualifiedFileName = classFile.getCanonicalPath().substring(dir.length() + 1); return removeExtension(qualifiedFileName.replace(File.separator, ".")); } catch (final IOException e) { throw new RuntimeException(e.getMessage()); } } @Override public void remove() { classFiles.remove(); } }; }
From source file:org.codehaus.mojo.hibernate3.HibernateExporterMojo.java
@SuppressWarnings("unchecked") protected void handleProcessor(List<GeneratedClassProcessor> processors) throws Throwable { try {/*from w ww . ja v a 2 s . c om*/ for (GeneratedClassProcessor processor : processors) { // make sure the processors have their dependencies ... ugh. if (processor instanceof ComponentPropertiesAware) { ((ComponentPropertiesAware) processor).setComponentProperties(this.componentProperties); } getLog().info("Using " + processor.getClass().getName()); Iterator<File> javaFiles = FileUtils.iterateFiles(getExporterOutputDir(), new String[] { "java" }, true); while (javaFiles.hasNext()) { File f = javaFiles.next(); Reader reader = null; Writer writer = null; try { reader = new BufferedReader(new FileReader(f)); String contents = IOUtils.toString(reader); writer = new FileWriter(f); String result = processor.processClass(f, contents); getLog().info("processed " + f.getAbsolutePath() + "."); IOUtils.write(result, writer); } finally { if (null != reader) { IOUtils.closeQuietly(reader); } if (null != writer) { IOUtils.closeQuietly(writer); } } } } } catch (Exception e) { getLog().error("couldn't load and delegate to the processor classes configured. " + ExceptionUtils.getFullStackTrace(e)); } }
From source file:org.craftercms.studio.impl.v1.repository.job.RebuildRepositoryMetadata.java
protected boolean populateRebuildRepositoryMetadataQueue(String site) { logger.debug("Populating Rebuild Repository Metadata queue for site " + site); Path siteContentRootPath = Paths.get(previewRepoRootPath, contentService.expandRelativeSitePath(site, "")); logger.debug("Retrieving files list for content repository"); Iterator<File> fileIterator = FileUtils.iterateFiles( Paths.get(previewRepoRootPath, contentService.expandRelativeSitePath(site, "")).toFile(), null, true);// w w w . j a v a2 s .com List<String> paths = new ArrayList<String>(); int id = 1; while (fileIterator.hasNext()) { File file = fileIterator.next(); Path filePath = Paths.get(file.toURI()); String relativePath = "/" + filePath.subpath(siteContentRootPath.getNameCount(), filePath.getNameCount()); logger.debug("Processing " + relativePath); paths.add(relativePath); if (paths.size() == batchSize) { logger.debug("Insert batch of file paths into queue."); Map<String, Object> params = new HashMap<String, Object>(); params.put("id", id); params.put("site", site); params.put("pathList", paths); rebuildRepositoryMetadataMapper.insertRebuildRepoMetadataQueue(params); id = id + paths.size(); paths = new ArrayList<String>(); } } if (paths != null && paths.size() > 0) { logger.debug("Insert batch of file paths into queue."); Map<String, Object> params = new HashMap<String, Object>(); params.put("id", id); params.put("site", site); params.put("pathList", paths); rebuildRepositoryMetadataMapper.insertRebuildRepoMetadataQueue(params); paths = new ArrayList<String>(); } return true; }
From source file:org.crudgenerator.mojo.exporter.ModelGeneratorMojo.java
@SuppressWarnings("rawtypes") @Override//from ww w. j av a 2 s. co m public void execute() throws MojoExecutionException, MojoFailureException { commonCorePackage = System.getProperty("commonPackage"); getComponentProperties().put("implementation", "jdbcconfiguration"); getComponentProperties().put("outputDirectory", (sourceDirectory != null) ? sourceDirectory : "${basedir}/target/appfuse/generated-sources"); // default location for reveng file is src/test/resources File revengFile = new File("src/test/resources/hibernate.reveng.xml"); if (revengFile.exists() && getComponentProperty("revengfile") == null) { getComponentProperties().put("revengfile", "src/test/resources/hibernate.reveng.xml"); } // Check for existence of hibernate.reveng.xml and if there isn't one, // create it // Specifying the file explicitly in pom.xml overrides default location if (getComponentProperty("revengfile") == null) { getComponentProperties().put("revengfile", "target/test-classes/hibernate.reveng.xml"); } File existingConfig = new File(getComponentProperty("revengfile")); if (!existingConfig.exists()) { InputStream in = this.getClass().getResourceAsStream("/appfuse/model/hibernate.reveng.ftl"); StringBuffer configFile = new StringBuffer(); try { InputStreamReader isr = new InputStreamReader(in); BufferedReader reader = new BufferedReader(isr); String line; while ((line = reader.readLine()) != null) { configFile.append(line).append("\n"); } reader.close(); getLog().info("Writing 'hibernate.reveng.xml' to " + existingConfig.getPath()); FileUtils.writeStringToFile(existingConfig, configFile.toString()); } catch (IOException io) { throw new MojoFailureException(io.getMessage()); } } // if package name is not configured, default to project's groupId if (getComponentProperty("packagename") == null) { getComponentProperties().put("packagename", getProject().getGroupId() + ".model"); } if (getComponentProperty("configurationfile") == null) { // look for jdbc.properties and set "propertyfile" to its path File jdbcProperties = new File("target/classes/jdbc.properties"); if (!jdbcProperties.exists()) { jdbcProperties = new File("target/test-classes/jdbc.properties"); } if (jdbcProperties.exists()) { if (getComponentProperty("propertyfile") == null) { getComponentProperties().put("propertyfile", jdbcProperties.getPath()); getLog().debug("Set propertyfile to '" + jdbcProperties.getPath() + "'"); } } else { throw new MojoFailureException("Failed to find jdbc.properties in classpath."); } } // For some reason, the classloader created in HibernateExporterMojo // does not work // when using jdbcconfiguration - it can't find the JDBC Driver (no // suitable driver). // Skipping the resetting of the classloader and manually adding the // dependency (with XML) works. // It's ugly, but it works. I wish there was a way to get get this // plugin to recognize the jdbc driver // from the project. super.doExecute(); if (System.getProperty("disableInstallation") != null) { disableInstallation = Boolean.valueOf(System.getProperty("disableInstallation")); } // allow installation to be supressed when testing if (!disableInstallation) { // copy the generated file to the model directory of the project try { String packageName = getComponentProperties().get("packagename"); String packageAsDir = packageName.replaceAll("\\.", "/"); File dir = new File(sourceDirectory + "/" + packageAsDir); if (dir.exists()) { Iterator filesIterator = FileUtils.iterateFiles(dir, new String[] { "java" }, false); while (filesIterator.hasNext()) { File f = (File) filesIterator.next(); getLog().info("Copying generated '" + f.getName() + "' to project..."); FileUtils.copyFileToDirectory(f, new File(destinationDirectory + "/src/main/java/" + packageAsDir)); } } else { throw new MojoFailureException("No tables found in database to generate code from."); } FileUtils.forceDelete(dir); } catch (IOException io) { throw new MojoFailureException(io.getMessage()); } } }
From source file:org.darkstar.batch.LuaScriptNpcIdUpdate.java
private void updateLuaScriptIds() { final Properties configProperties = DarkstarUtils.loadBatchConfiguration(); final Properties npcIdShiftProperties = DarkstarUtils.loadShiftProperties(configProperties); final String darkStarRoot = configProperties.getProperty("darkstar_root", ""); final String scriptsRoot = String.format("%s/%s", darkStarRoot, "scripts"); final File scriptsDirectory = new File(scriptsRoot); final String[] extensions = new String[1]; extensions[0] = "lua"; if (!scriptsDirectory.exists() || !scriptsDirectory.isDirectory()) { throw new RuntimeException(String.format("Cannot Find Scripts Directory! <%s>", scriptsRoot)); }/*from w ww . ja va2s . c o m*/ LOG.info("Preparing Shift Properties..."); final Set<String> shiftKeysSet = npcIdShiftProperties.stringPropertyNames(); final String[] shiftKeysArray = new String[shiftKeysSet.size()]; shiftKeysSet.toArray(shiftKeysArray); final List<String> shiftKeys = Arrays.asList(shiftKeysArray); Collections.sort(shiftKeys, Collections.reverseOrder()); if (shiftKeys.isEmpty()) { throw new RuntimeException("Error: Empty Shift Properties Detected!"); } LOG.info(String.format("Searching: %s", scriptsRoot)); final Iterator<File> luaFiles = FileUtils.iterateFiles(scriptsDirectory, extensions, true); while (luaFiles.hasNext()) { final File luaFile = luaFiles.next(); updateFile(luaFile, shiftKeys, npcIdShiftProperties); } final StringBuilder elevatorSqlPathBuilder = new StringBuilder(); elevatorSqlPathBuilder.append(configProperties.getProperty("darkstar_root", "")); elevatorSqlPathBuilder.append("sql/elevators.sql"); final File elevatorFile = new File(elevatorSqlPathBuilder.toString()); updateFile(elevatorFile, shiftKeys, npcIdShiftProperties); final StringBuilder transportSqlPathBuilder = new StringBuilder(); transportSqlPathBuilder.append(configProperties.getProperty("darkstar_root", "")); transportSqlPathBuilder.append("sql/transport.sql"); final File transportFile = new File(transportSqlPathBuilder.toString()); updateFile(transportFile, shiftKeys, npcIdShiftProperties); LOG.info(String.format("Finished Updating Lua Scripts With <%d> Errors!", errors)); }
From source file:org.dataconservancy.archive.impl.elm.fs.FsEntityStoreTest.java
@Test @SuppressWarnings("unchecked") public void filesInDirectoryTest() throws Exception { final String ID = "id"; FsEntityStore eStore = getEntityStore(); String content = "<filesInDirectoryTest />"; eStore.put(ID, IOUtils.toInputStream(content)); Iterator<File> fileIterator = FileUtils.iterateFiles(new File(eStore.getBaseDir()), null, true); boolean found = false; /* Brute force crawl directory structure to find our content */ while (fileIterator.hasNext()) { if (IOUtils.toString(new FileInputStream(fileIterator.next())).equals(content)) { found = true;//from w w w. j a va2 s . c o m } } Assert.assertTrue(found); }
From source file:org.dataconservancy.archive.impl.elm.fs.FsMetadataStore.java
public Iterable<Metadata> getAll(final String... types) { final List<String> accepted = Arrays.asList(types); return new Iterable<Metadata>() { @SuppressWarnings("unchecked") public Iterator<Metadata> iterator() { final Iterator<File> fileIterator = FileUtils.iterateFiles(new File(getBaseDir()), getSuffix(), true);/*from w w w .j a va2 s . c o m*/ return new Iterator<Metadata>() { private Metadata next; { advanceNext(); } public boolean hasNext() { return next != null; } public Metadata next() { Metadata toReturn = next; advanceNext(); return toReturn; } public void remove() { throw new UnsupportedOperationException(); } private void advanceNext() { while (fileIterator.hasNext()) { File candidate = fileIterator.next(); Metadata m = new FsMetadata(candidate); if (accepted.isEmpty() || accepted.contains(m.getType())) { next = m; return; } } next = null; } }; } }; }
From source file:org.dataconservancy.ui.services.AtomDepositDocumentParserTest.java
/** * Seeds a {@code MockArchiveUtil} with entities, and constructs a {@code AtomDepositDocumentParser} for testing. * /*from w ww .j a va2 s .c o m*/ * @throws URISyntaxException * @throws FileNotFoundException * @throws InvalidXmlException */ @Before @SuppressWarnings("unchecked") public void setUp() throws URISyntaxException, FileNotFoundException, InvalidXmlException { // Obtain the valid, production, sample feed URL feedResource = this.getClass().getResource(PRODUCTION_FEED_RESOURCE); assertNotNull("Unable to resolve classpath resource " + PRODUCTION_FEED_RESOURCE); productionFeedIn = this.getClass().getResourceAsStream(PRODUCTION_FEED_RESOURCE); // Instantiate a MockArchiveUtil which we will pre-populate with expected DCS entities. productionMockArchiveUtil = new MockArchiveUtil(); // Set the mapping strategy to entity ids only productionMockArchiveUtil.setMappingStrategy(MockArchiveUtil.ID_MAPPING_STRATEGY.ENTITY_ID); // Populate the MockArchiveUtil with the valid, expected, DCS entities. File feedBaseDir = new File(feedResource.toURI()).getParentFile(); Iterator<File> serializedEntities = FileUtils.iterateFiles(feedBaseDir, new AndFileFilter(new PrefixFileFilter("4260"), new SuffixFileFilter(".xml")), DirectoryFileFilter.DIRECTORY); while (serializedEntities.hasNext()) { Dcp dcp = modelBuilder.buildSip(new FileInputStream(serializedEntities.next())); for (DcsEntity e : dcp) { productionMockArchiveUtil.addEntity(e); } } assertTrue("Error loading expected entities from " + feedBaseDir, productionMockArchiveUtil.getEntities().size() > 0); // Construct the DocumentParser under test underTest = new AtomDepositDocumentParser(productionMockArchiveUtil); }