Example usage for org.apache.commons.io.filefilter TrueFileFilter TRUE

List of usage examples for org.apache.commons.io.filefilter TrueFileFilter TRUE

Introduction

In this page you can find the example usage for org.apache.commons.io.filefilter TrueFileFilter TRUE.

Prototype

IOFileFilter TRUE

To view the source code for org.apache.commons.io.filefilter TrueFileFilter TRUE.

Click Source Link

Document

Singleton instance of true filter.

Usage

From source file:org.dataconservancy.dcs.util.stream.fs.ModifiedSinceFileFilterTest.java

@Test
public void testModifiedSinceBeforeRecurseDirectories() throws IOException, InterruptedException {
    final File tempDir = createTemporaryDirectory(this.tempDir);
    final File tempFile = new File(tempDir, "a file");
    FileUtils.touch(tempFile);/* w  w w.j av  a 2s  .  c  om*/
    log.debug("Created temp file {}", tempFile);
    final Iterator files = FileUtils
            .listFiles(tempDir, new ModifiedSinceFileFilter(CLASS_INSTANTIATED_AT), TrueFileFilter.TRUE)
            .iterator();
    int count = 0;
    while (files.hasNext()) {
        count++;
        files.next();
    }
    assertEquals(1, count);
}

From source file:org.datavyu.controllers.project.ProjectController.java

private File huntForFile(final File workingDir, final String fileName) {
    // If we can't find the file, we will start looking for the file
    // using the easiest solution first and bump up the complexity as
    // we go along.

    // Solution 1: It is in the same directory as the project file.
    File file = new File(workingDir, fileName);

    if (file.exists()) {
        return file;
    }/* w ww .j ava  2  s  . co m*/

    IOFileFilter fileNameFilter = FileFilterUtils.nameFileFilter(fileName);

    // Solution 2: It is in a sub-directory of the project file.
    {
        Iterator<File> subFiles = FileUtils.iterateFiles(workingDir, fileNameFilter, TrueFileFilter.TRUE);

        if (subFiles.hasNext()) {
            file = subFiles.next();
        }

        if (file.exists()) {
            return file;
        }
    }

    // Solution 3: It is in the parent of the current directory.
    {
        Iterator<File> subFiles = FileUtils.iterateFiles(workingDir.getParentFile(), fileNameFilter, null);

        if (subFiles.hasNext()) {
            file = subFiles.next();
        }

        if (file.exists()) {
            return file;
        }
    }

    return null;
}

From source file:org.duracloud.chunk.FileChunkerDriver.java

/**
 * Main/*from www .  j ava2 s  .  c o m*/
 *
 * @param args
 * @throws IOException
 */
public static void main(String[] args) throws Exception {

    CommandLine cmd = parseArgs(args);

    ensureWritePermissionToLocalDir();

    // Are there credentials?
    Credential credential = null;
    String username = "";
    if (cmd.hasOption("username") || cmd.hasOption("password")) {
        username = cmd.getOptionValue("username");
        credential = getCredentials(username, cmd.getOptionValue("password"));
    }

    // Where will content be written?
    ContentWriter writer;
    if (cmd.hasOption("cloud-store")) {
        String[] vals = cmd.getOptionValues("cloud-store");
        String host = vals[0];
        String port = vals[1];
        ContentStoreManager mgr = new ContentStoreManagerImpl(host, port);

        if (credential != null) {
            mgr.login(credential);
        }

        writer = new DuracloudContentWriter(mgr.getPrimaryContentStore(), username);
    } else {
        writer = new FilesystemContentWriter();
    }

    // Will Chunk MD5's be preserved?
    boolean chunkMD5 = true;
    if (cmd.hasOption("exclude-chunk-md5s")) {
        chunkMD5 = false;
    }

    // Will large files be ignored?
    boolean ignoreLarge = false;
    if (cmd.hasOption("ignore-large-files")) {
        ignoreLarge = true;
    }

    // Will files be filtered?
    IOFileFilter fileFilter = TrueFileFilter.TRUE;
    if (cmd.hasOption("file-filter")) {
        String[] filterVals = cmd.getOptionValues("file-filter");
        fileFilter = buildFilter(new File(filterVals[0]));
    }

    // Will directories be filtered?
    IOFileFilter dirFilter = TrueFileFilter.TRUE;
    if (cmd.hasOption("dir-filter")) {
        String[] filterVals = cmd.getOptionValues("dir-filter");
        dirFilter = buildFilter(new File(filterVals[0]));
    }

    // Add content?
    FileChunkerOptions options;
    if (cmd.hasOption("add")) {
        String[] vals = cmd.getOptionValues("add");
        File fromDir = new File(vals[0]);
        File toDir = new File(vals[1]);
        Long chunkSize = getChunkSize(vals[2]);

        options = new FileChunkerOptions(fileFilter, dirFilter, chunkSize, chunkMD5, ignoreLarge);
        chunk(fromDir, toDir, options, writer);

        // ...or generate test data
    } else if (cmd.hasOption("generate")) {
        String[] vals = cmd.getOptionValues("generate");
        File outFile = new File(vals[0]);
        long contentSize = Long.parseLong(vals[1]);

        FileChunker.createTestContent(outFile, contentSize);

    } else {
        usage();
    }
}

From source file:org.gbif.dwca.io.ArchiveFactory.java

/**
 * Opens an archive from a local file and decompresses or copies it into the given archive directory.
 * Make sure the archive directory does not contain files already, any existing files will be removed!
 *
 * If the source archive is an uncompressed, single data file and a valid archive, it will be copied as is
 * to the archiveDir./*from   w  ww  .  j a  v a  2 s . co m*/
 *
 * @param archiveFile the location of a compressed archive or single data file
 * @param archiveDir  empty, writable directory used to keep decompress archive in
 */
public static Archive openArchive(File archiveFile, File archiveDir)
        throws IOException, UnsupportedArchiveException {
    if (archiveDir.exists()) {
        // clean up any existing folder
        LOG.debug("Deleting existing archive folder [{}]", archiveDir.getAbsolutePath());
        org.gbif.utils.file.FileUtils.deleteDirectoryRecursively(archiveDir);
    }
    FileUtils.forceMkdir(archiveDir);
    // try to decompress archive
    try {
        CompressionUtil.decompressFile(archiveDir, archiveFile, true);
        // we keep subfolder, but often the entire archive is within one subfolder. Remove that root folder if present
        File[] rootFiles = archiveDir.listFiles((FileFilter) HiddenFileFilter.VISIBLE);
        if (rootFiles.length == 1) {
            File root = rootFiles[0];
            if (root.isDirectory()) {
                // single root dir, flatten structure
                LOG.debug("Removing single root folder {} found in decompressed archive",
                        root.getAbsoluteFile());
                for (File f : FileUtils.listFiles(root, TrueFileFilter.TRUE, null)) {
                    File f2 = new File(archiveDir, f.getName());
                    f.renameTo(f2);
                }
            }
        }
        // continue to read archive from the tmp dir
        return openArchive(archiveDir);

    } catch (CompressionUtil.UnsupportedCompressionType e) {
        LOG.debug("Could not uncompress archive [{}], try to read as single text file", archiveFile, e);
        // If its a text file only we will get this exception - but also for corrupt compressions
        // try to open as text file only and if successful copy file to archive dir
        Archive arch = openArchiveDataFile(archiveFile);
        Files.copy(archiveFile, new File(archiveDir, archiveFile.getName()));
        return arch;
    }
}

From source file:org.jclouds.examples.blobstore.BlobUploaderMain.java

public static void main(String[] args) throws IOException {

    OptionParser parser = new OptionParser();
    parser.accepts("directory").withRequiredArg().required().ofType(String.class);
    parser.accepts("provider").withRequiredArg().required().ofType(String.class);
    parser.accepts("username").withRequiredArg().required().ofType(String.class);
    parser.accepts("password").withRequiredArg().required().ofType(String.class);
    parser.accepts("region").withRequiredArg().required().ofType(String.class);
    parser.accepts("threads").withRequiredArg().ofType(Integer.TYPE).describedAs("number of parallel threads");
    OptionSet options = null;/* ww  w  .jav a 2 s .  co  m*/

    try {
        options = parser.parse(args);
    } catch (OptionException e) {
        System.out.println(e.getLocalizedMessage());
        parser.printHelpOn(System.out);
        return;
    }

    if (options.has("threads")) {
        numThreads = Integer.valueOf((String) options.valueOf("numThreads"));
    }

    File rootDir = new File((String) options.valueOf("directory"));
    Collection<File> files = FileUtils.listFiles(rootDir, CanReadFileFilter.CAN_READ, TrueFileFilter.TRUE);
    totalBytes = FileUtils.sizeOfDirectory(rootDir);

    System.out.println("Uploading " + rootDir.getName() + " " + totalBytes / FileUtils.ONE_MB + "MB");

    ExecutorService executor = Executors.newFixedThreadPool(numThreads);

    for (File f : files) {
        BlobUploader b = new BlobUploader((String) options.valueOf("username"),
                (String) options.valueOf("password"), (String) options.valueOf("provider"),
                (String) options.valueOf("region"), f);
        executor.execute(b);
    }
    executor.shutdown();

    try {
        executor.awaitTermination(1, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:org.opennms.upgrade.implementations.DiscoveryConfigurationLocationMigratorOfflineTest.java

@Before
public void setUp() throws Exception {
    FileUtils.copyDirectory(new File("src/test/resources/etc3"), m_tempFolder.newFolder("etc"));
    System.setProperty("opennms.home", m_tempFolder.getRoot().getAbsolutePath());
    final List<File> files = new ArrayList<>(FileUtils.listFilesAndDirs(new File(m_tempFolder.getRoot(), "etc"),
            TrueFileFilter.TRUE, TrueFileFilter.INSTANCE));
    Collections.sort(files);/*from   w  w w.j  a v  a  2 s  .com*/
}

From source file:org.opennms.upgrade.implementations.DiscoveryConfigurationMigratorOfflineTest.java

@Before
public void setUp() throws Exception {
    FileUtils.copyDirectory(new File("src/test/resources/etc"), m_tempFolder.newFolder("etc"));
    System.setProperty("opennms.home", m_tempFolder.getRoot().getAbsolutePath());
    final List<File> files = new ArrayList<>(FileUtils.listFilesAndDirs(new File(m_tempFolder.getRoot(), "etc"),
            TrueFileFilter.TRUE, TrueFileFilter.INSTANCE));
    Collections.sort(files);//from   ww w .j av  a 2 s  .c  o  m
}

From source file:org.silverpeas.core.io.temp.TemporaryDataCleanerSchedulerInitializerIT.java

@Test
public void test() throws Exception {
    assertThat(scheduler.isJobScheduled(TemporaryDataCleanerSchedulerInitializer.JOB_NAME), is(true));
    initializer.startTask.join();/*from   w  w  w.ja  va  2s.  c om*/
    final Collection<File> files = FileUtils.listFilesAndDirs(rootTempFile, TrueFileFilter.TRUE,
            TrueFileFilter.TRUE);
    assertThat(files, contains(rootTempFile));
}

From source file:org.silverpeas.core.silverstatistics.volume.service.DirectoryWalkerSizeComputer.java

@Override
public DirectoryStats call() throws Exception {
    StatisticDirectoryWalker walker = new StatisticDirectoryWalker(directory.getName(), TrueFileFilter.TRUE,
            -1);/*from   w  ww .j  a  va2  s . c  o  m*/
    return walker.scan(directory);
}

From source file:org.silverpeas.util.data.TemporaryDataCleanerSchedulerInitializerTest.java

@Test
public void test() {
    assertThat(scheduler.isJobScheduled(TemporaryDataCleanerSchedulerInitializer.JOB_NAME), is(true));
    final Collection<File> files = FileUtils.listFilesAndDirs(new File(rootTempPath), TrueFileFilter.TRUE,
            TrueFileFilter.TRUE);/*from w w  w. j  a v  a  2  s. c  om*/
    assertThat(files.size(), is(1));
}