Example usage for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY

List of usage examples for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY

Introduction

In this page you can find the example usage for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY.

Prototype

IOFileFilter DIRECTORY

To view the source code for org.apache.commons.io.filefilter DirectoryFileFilter DIRECTORY.

Click Source Link

Document

Singleton instance of directory filter.

Usage

From source file:kr.co.leem.system.FileSystemUtils.java

/**
 *  ./*  ww  w .j  a v a2s . c  om*/
 *
 * @param srcDir ?  .
 * @param destDir   .
 * @param name .
 * @param fileNameSearch .
 * @param preserveFileDate   .
 * @see FileUtils#copyDirectory(File, File, FileFilter, boolean)
 * @see FileFilterUtils#nameFileFilter(long, long)
 * @see FileFilterUtils#prefixFileFilter(String)
 * @see FileFilterUtils#suffixFileFilter(String)
 * @see FileFilterUtils#or(IOFileFilter, IOFileFilter)
 */
public static void copyDirectory(final String srcDir, final String destDir, final String name,
        final searchFileName fileNameSearch, final boolean preserveFileDate) {
    processIO(new IOCallback<Object>() {
        public Object doInProcessIO() throws IOException, NullPointerException, IllegalArgumentException {
            IOFileFilter fileFilter;
            switch (fileNameSearch) {
            case FULL_MATCH:
                fileFilter = FileFilterUtils.nameFileFilter(name);
                break;
            case PREFIX:
                fileFilter = FileFilterUtils.prefixFileFilter(name);
                break;
            case SUFFIX:
                fileFilter = FileFilterUtils.suffixFileFilter(name);
                break;
            default:
                fileFilter = FileFilterUtils.nameFileFilter(name);
                break;
            }

            IOFileFilter fileNameFiles = FileFilterUtils.or(DirectoryFileFilter.DIRECTORY, fileFilter);
            FileUtils.copyDirectory(new File(srcDir), new File(destDir), fileNameFiles, preserveFileDate);
            return null;
        }
    });
}

From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java

public static void readLODPipelineTrainTest(String pathToDirectory) {
    Locale.setDefault(Locale.ENGLISH);

    String[] semanticFeatures = new String[] { "Baseline", "+ALL", "+LOC", "+TIME", "+LOD", "+LOC+TIME",
            "+LOC+LOD", "+TIME+LOD", "+TYPES", "+CAT" };
    String[] measures = new String[] { "Percent Correct", "Weighted Precision", "Weighted Recall",
            "Weighted F-Measure" };
    String outFileName = "AggregatedCVRandom.csv";

    logger.log(Level.INFO, String.format("Importing data from directory %s.", pathToDirectory));

    // Method requires input directory. Check this condition.
    File directory = new File(pathToDirectory);
    if (!directory.isDirectory()) {
        System.err.println("Please specify a directory with the source .csv files. Aborting.");
        return;/* w w w. j a v a  2  s.c o m*/
    }

    //Empty previous output file, if there was one
    File outputFile = new File(directory, outFileName);
    if (outputFile.exists()) {
        outputFile.delete();
    }
    try {
        String header = "Train;Test;Classifier;FeatureSet;Measure;Value";

        PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
        out.println(header);
        out.close();
    } catch (IOException e) {
        System.err.println("Error while writing aggregated Train-Test file.");
        e.printStackTrace();
    }

    // prepare files lists
    HashMap<String, ArrayList<File>> filesMap = new HashMap<>();

    // read all subdirectories that match the city names
    File[] subdirs = directory.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY);

    //Iterate all subdirectories
    for (File subDirectory : subdirs) {

        // get train set name
        String trainSetName = subDirectory.getName();

        // iterate all files in directory
        File[] filesInDirectory = subDirectory.listFiles();
        List<File> fileList = Arrays.asList(filesInDirectory);

        for (File subDirFile : fileList) {
            // get name of test data set
            String[] filenameTokens = subDirFile.getName().split("To");
            //String testDataName = filenameTokens[1].substring(0, filenameTokens[1].length() - 11);

            String testDataName;

            // if only this string is left, then CV
            if (filenameTokens[1].equals("Results.csv")) {
                testDataName = trainSetName;
            } else {
                testDataName = filenameTokens[1].split("Results.csv")[0];
                testDataName = testDataName.split("2C.csv|4C.csv|.csv")[0];
            }

            // put current file to test data name -> this way all files
            // corresponding to the same test set are in one map
            if (filesMap.get(testDataName) != null) {
                // get existing list and add file
                ArrayList<File> currentFileList = filesMap.get(testDataName);
                currentFileList.add(subDirFile);
            } else {
                // create new list and add current file
                ArrayList<File> newFileList = new ArrayList<>();
                newFileList.add(subDirFile);
                filesMap.put(testDataName, newFileList);
            }
        }

        ArrayList<String> outputRows = new ArrayList<String>();
        int nrDifferentClassifiers = 0;

        // iterate all files of one map
        Iterator<Entry<String, ArrayList<File>>> it = filesMap.entrySet().iterator();
        while (it.hasNext()) {
            Map.Entry pairs = (Map.Entry) it.next();
            String testSetName = (String) pairs.getKey();
            ArrayList<File> testFiles = (ArrayList<File>) pairs.getValue();

            nrDifferentClassifiers = testFiles.size();

            // initialize data store
            ArrayList<HashMap<String, Object>> values = new ArrayList<>();

            // get rows for first file to initialize store
            List<String[]> inputRowsFirstFile = readAndCheckCSV(testFiles.get(0).getAbsolutePath(), ';');

            for (int i = 0; i < inputRowsFirstFile.size(); i++) {
                HashMap<String, Object> currentRowValues = new HashMap<>();
                currentRowValues.put("semanticFeature", "");
                currentRowValues.put("classifierParameters", "");
                currentRowValues.put("aggregatedMeasureValues", new double[measures.length]);
                currentRowValues.put("nGrams", "");
                values.add(currentRowValues);
            }

            // get results from other files
            for (File testFile : testFiles) {
                // Only analyse files with .csv extension
                if (!FilenameUtils.getExtension(testFile.getName().toLowerCase()).equals("csv")
                        || testFile.getName().equals("AggregatedTrainTest.csv")) {
                    continue;
                }
                // check file for consistency
                List<String[]> inputRows = readAndCheckCSV(testFile.getAbsolutePath(), ';');

                // check if length matches first file
                if (!(inputRows.size() == values.size())) {
                    // TODO error message
                } else {
                    for (int i = 0; i < inputRows.size(); i++) {
                        String[] inputCells = inputRows.get(i);

                        // read current values and compare with entries
                        String semanticFeature = semanticFeatures[i % semanticFeatures.length];

                        if (values.get(i).get("semanticFeature") == "") {
                            values.get(i).put("semanticFeature", semanticFeature);
                        } else {
                            if (values.get(i).get("semanticFeature").equals(semanticFeature) == false) {
                                System.err.println("Semantic Features do not match.");
                                System.exit(1);
                            }
                        }

                        // needs rework as we do aggregation here
                        // String classifierParameters = inputCells[0];
                        //
                        // if (values.get(i).get("classifierParameters") ==
                        // "")
                        // {
                        // values.get(i).put("classifierParameters",
                        // classifierParameters);
                        // }
                        // else
                        // {
                        // if
                        // (values.get(i).get("classifierParameters").equals(classifierParameters)
                        // == false)
                        // {
                        // System.err.println("Classifier parameters do not match.");
                        // System.exit(1);
                        // }
                        // }

                        String nGrams = inputCells[12];

                        if (values.get(i).get("nGrams") == "") {
                            values.get(i).put("nGrams", nGrams);
                        } else {
                            if (values.get(i).get("nGrams").equals(nGrams) == false) {
                                System.err.println("N Gram Length does not match.");
                                System.exit(1);
                            }
                        }

                        // get and aggregate values
                        for (int j = 0; j < measures.length; j++) {
                            if (j == 0) {
                                //double currentValue = ((double[]) values.get(i).get("aggregatedMeasureValues"))[j];
                                double valueInFile = Double.parseDouble(inputCells[j + 16]) / 100;

                                ((double[]) values.get(i).get("aggregatedMeasureValues"))[j] += valueInFile;
                            } else {
                                //double currentValue = ((double[]) values.get(i).get("aggregatedMeasureValues"))[j];
                                double valueInFile = Double.parseDouble(inputCells[j + 16]);
                                ((double[]) values.get(i).get("aggregatedMeasureValues"))[j] += valueInFile;
                            }
                        }
                    }
                }
            }

            // write aggregated results to file
            for (HashMap<String, Object> currentValues : values) {
                String semFeature = (String) currentValues.get("semanticFeature");
                String nGrams = (String) currentValues.get("nGrams");
                String featureSet = String.format("%s, nGrams: %s", semFeature, nGrams);

                for (int j = 0; j < measures.length; j++) {
                    String outputRow = String.format("%s;%s;%s;%s;%s;%f", trainSetName, testSetName, "0",
                            featureSet, measures[j],
                            ((double[]) currentValues.get("aggregatedMeasureValues"))[j]
                                    / nrDifferentClassifiers);
                    outputRows.add(outputRow);
                }
            }

            // avoids a ConcurrentModificationException
            it.remove();
        }

        // Write aggregated data to a new file
        try {
            PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
            for (String s : outputRows) {
                out.println(s);
            }
            out.close();
        } catch (IOException e) {
            System.err.println("Error while writing aggregated Train-Test file.");
            e.printStackTrace();
        }
    }

    logger.log(Level.INFO,
            String.format("Finished import. The aggregated data was written to %s.", outFileName));

}

From source file:com.virtualparadigm.packman.processor.JPackageManager.java

public static boolean configure(File tempDir, File localConfigurationFile) {
    logger.info("PackageManager::configure()");
    boolean status = true;
    if (tempDir != null && localConfigurationFile != null) {
        Configuration configuration = null;
        try {//from  w w  w.java 2  s  .com
            configuration = new PropertiesConfiguration(localConfigurationFile);
        } catch (ConfigurationException ce) {
            //dont want to error out completely if config file is not loaded
            ce.printStackTrace();
        }

        if (configuration != null && !configuration.isEmpty()) {
            Map<String, String> substitutionContext = JPackageManager.createSubstitutionContext(configuration);
            StrSubstitutor strSubstitutor = new StrSubstitutor(substitutionContext);
            String templateContent = null;
            long lastModified;

            Collection<File> patchFiles = FileUtils.listFiles(
                    new File(tempDir.getAbsolutePath() + "/" + JPackageManager.PATCH_DIR_NAME + "/"
                            + JPackageManager.PATCH_FILES_DIR_NAME),
                    TEMPLATE_SUFFIX_FILE_FILTER, DirectoryFileFilter.DIRECTORY);

            if (patchFiles != null) {
                for (File pfile : patchFiles) {
                    logger.debug("  processing patch fileset file: " + pfile.getAbsolutePath());
                    try {
                        lastModified = pfile.lastModified();
                        templateContent = FileUtils.readFileToString(pfile);
                        templateContent = strSubstitutor.replace(templateContent);
                        FileUtils.writeStringToFile(pfile, templateContent);
                        pfile.setLastModified(lastModified);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }

            Collection<File> scriptFiles = FileUtils.listFiles(
                    new File(tempDir.getAbsolutePath() + "/" + JPackageManager.AUTORUN_DIR_NAME),
                    TEMPLATE_SUFFIX_FILE_FILTER, DirectoryFileFilter.DIRECTORY);

            if (scriptFiles != null) {
                for (File scriptfile : scriptFiles) {
                    logger.debug("  processing script file: " + scriptfile.getAbsolutePath());
                    try {
                        lastModified = scriptfile.lastModified();
                        templateContent = FileUtils.readFileToString(scriptfile);
                        templateContent = strSubstitutor.replace(templateContent);
                        FileUtils.writeStringToFile(scriptfile, templateContent);
                        scriptfile.setLastModified(lastModified);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        }
    }
    return status;
}

From source file:com.turn.griffin.GriffinLibCacheUtil.java

public List<File> getLocalFileList() {
    return Arrays
            .asList(new File(getLibCacheDirectory()).listFiles((FileFilter) DirectoryFileFilter.DIRECTORY));
}

From source file:com.redhat.victims.plugin.jenkins.VictimsPostBuildScanner.java

/**
 * Return a list of all jars in the output directory or the specified
 * jar as a list.  As these files/directory might not prior to the
 * build we are checking their existence here rather than during the
 * configuration of the VictimsPostBuildScanner.
 * //from w  w w.  j  av  a  2 s  .com
 * @return a list of jars to scan for vulnerabilities
 * @throws AbortException
 */
public Collection<File> listFiles(String outputDirectory) throws AbortException {
    File outputFile = new File(outputDirectory);
    if (!outputFile.exists()) {
        // Output file/dir should exist by now.
        throw new AbortException("Output directory/file does not exist");
    }

    if (outputFile.isFile()) {
        Collection<File> file = new ArrayList<File>();
        file.add(outputFile);
        return file;
    }

    if (outputFile.isDirectory()) {
        Collection<File> files = FileUtils.listFiles(outputFile, new RegexFileFilter("^(.*?)\\.jar"),
                DirectoryFileFilter.DIRECTORY);
        return files;
    }

    // Something has gone horribly wrong
    throw new AbortException("Supplied output location is neither a file nor directory");
}

From source file:com.turn.griffin.GriffinLibCacheUtil.java

public Map<String, String> getLocalFileLatestVersion() {

    Map<String, String> filenameAndVersion = new ArrayMap<>();

    Map<String, File> localFileMap = getLocalFileMap();
    for (Map.Entry<String, File> entry : localFileMap.entrySet()) {

        List<File> fileVersions = Arrays
                .asList(entry.getValue().listFiles((FileFilter) DirectoryFileFilter.DIRECTORY));

        List<String> versions = new ArrayList<>(
                Collections2.transform(fileVersions, new Function<File, String>() {
                    @Override/*from   www .j ava2  s . com*/
                    public String apply(File file) {
                        return file.getName();
                    }
                }));

        filenameAndVersion.put(entry.getKey(), Collections.max(versions));
    }
    return filenameAndVersion;
}

From source file:com.netthreads.mavenize.Mavenize.java

/**
 * Get project source files./*from   w  w w . j ava2s . co m*/
 * 
 * @param sourcePath
 *            The source directory path.
 * @param targetPath
 *            The target directory path.
 * 
 * @return List of parent source directories and child directories.
 * 
 * @throws IOException
 */
private List<ProjectFiles> buildProjectFiles(ProjectType projectType, String sourcePath, String targetPath)
        throws IOException {
    List<ProjectFiles> results = new LinkedList<ProjectFiles>();

    IOFileFilter srcDirFilter = DirectoryFileFilter.DIRECTORY;

    // Create a filter for Files ending in ".txt"
    String suffix = projectType.getSuffix();
    IOFileFilter srcFileFilter = FileFilterUtils.suffixFileFilter(suffix);

    // Combine the directory and file filters using an OR condition
    java.io.FileFilter srcFilter = FileFilterUtils.or(srcDirFilter, srcFileFilter);

    // Finder for all directories, no depth limit but we will limit on name.
    ProjectFileFinder projectFileFinder = new ProjectFileFinder(srcFilter, -1);

    results = projectFileFinder.find(sourcePath, targetPath, TEXT_SRC);

    return results;
}

From source file:com.netthreads.mavenize.Mavenize.java

/**
 * Get project resource files./*from ww  w.jav  a2s . c o  m*/
 * 
 * @param sourcePath
 *            The source directory path.
 * @param targetPath
 *            The target directory path.
 * 
 * @return List of parent source directories and child directories.
 * 
 * @throws IOException
 */
private List<ProjectFiles> buildResourceFiles(ProjectType projectType, String sourcePath, String targetPath)
        throws IOException {
    List<ProjectFiles> results = new LinkedList<ProjectFiles>();

    IOFileFilter resourceDirFilter = DirectoryFileFilter.DIRECTORY;

    // Create a filter for Files ending in ".txt"
    String suffix = projectType.getSuffix();
    IOFileFilter resourceFileFilter = FileFilterUtils.notFileFilter(FileFilterUtils.suffixFileFilter(suffix));

    // Combine the directory and file filters using an OR condition
    java.io.FileFilter srcFilter = FileFilterUtils.or(resourceDirFilter, resourceFileFilter);

    // Finder for all directories, no depth limit but we will limit on name.
    ProjectFileFinder projectFileFinder = new ProjectFileFinder(srcFilter, -1);

    results = projectFileFinder.find(sourcePath, targetPath, TEXT_SRC);

    return results;
}

From source file:com.virtualparadigm.packman.processor.JPackageManagerOld.java

public static boolean configureOld(File tempDir, Configuration configuration) {
    logger.info("PackageManager::configure()");
    boolean status = true;
    if (tempDir != null && configuration != null && !configuration.isEmpty()) {
        VelocityEngine velocityEngine = new VelocityEngine();
        Properties vProps = new Properties();
        vProps.setProperty("resource.loader", "string");
        vProps.setProperty("string.resource.loader.class",
                "org.apache.velocity.runtime.resource.loader.StringResourceLoader");
        velocityEngine.init(vProps);/*from w w  w.  j a  v a2  s .  co  m*/
        Template template = null;
        VelocityContext velocityContext = JPackageManagerOld.createVelocityContext(configuration);
        StringResourceRepository stringResourceRepository = StringResourceLoader.getRepository();
        String templateContent = null;
        StringWriter stringWriter = null;
        long lastModified;

        Collection<File> patchFiles = FileUtils.listFiles(
                new File(tempDir.getAbsolutePath() + "/" + JPackageManagerOld.PATCH_DIR_NAME + "/"
                        + JPackageManagerOld.PATCH_FILES_DIR_NAME),
                TEMPLATE_SUFFIX_FILE_FILTER, DirectoryFileFilter.DIRECTORY);

        if (patchFiles != null) {
            for (File pfile : patchFiles) {
                logger.debug("  processing patch fileset file: " + pfile.getAbsolutePath());
                try {
                    lastModified = pfile.lastModified();
                    templateContent = FileUtils.readFileToString(pfile);

                    if (templateContent.matches("(\\$)(\\{)([^\\}]*)(\\:)([^\\{]*)(\\})")) {
                        System.out.println("    converting $ to #");
                    }

                    templateContent = templateContent.replaceAll("#", "\\#");
                    templateContent = templateContent.replaceAll("(\\$)(\\{)([^\\}]*)(\\:)([^\\{]*)(\\})",
                            "#$2$3$4$5$6");

                    stringResourceRepository.putStringResource(JPackageManagerOld.CURRENT_TEMPLATE_NAME,
                            templateContent);
                    stringWriter = new StringWriter();
                    template = velocityEngine.getTemplate(JPackageManagerOld.CURRENT_TEMPLATE_NAME);
                    template.merge(velocityContext, stringWriter);

                    templateContent = stringWriter.toString();

                    if (templateContent.matches("(#)(\\{)([^\\}]*)(\\:)([^\\{]*)(\\})")) {
                        System.out.println("    converting # back to $");
                    }
                    templateContent = templateContent.replaceAll("(#)(\\{)([^\\}]*)(\\:)([^\\{]*)(\\})",
                            "\\$$2$3$4$5$6");
                    templateContent = templateContent.replaceAll("\\#", "#");

                    FileUtils.writeStringToFile(pfile, templateContent);
                    pfile.setLastModified(lastModified);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }

        Collection<File> scriptFiles = FileUtils.listFiles(
                new File(tempDir.getAbsolutePath() + "/" + JPackageManagerOld.AUTORUN_DIR_NAME),
                TEMPLATE_SUFFIX_FILE_FILTER, DirectoryFileFilter.DIRECTORY);

        if (scriptFiles != null) {
            for (File scriptfile : scriptFiles) {
                logger.debug("  processing script file: " + scriptfile.getAbsolutePath());
                try {
                    lastModified = scriptfile.lastModified();
                    templateContent = FileUtils.readFileToString(scriptfile);
                    templateContent = templateContent.replaceAll("#", "\\#");
                    templateContent = templateContent.replaceAll("(\\$)(\\{)([^\\}]*)(\\:)([^\\{]*)(\\})",
                            "#$2$3$4$5$6");

                    stringResourceRepository.putStringResource(JPackageManagerOld.CURRENT_TEMPLATE_NAME,
                            templateContent);
                    stringWriter = new StringWriter();
                    template = velocityEngine.getTemplate(JPackageManagerOld.CURRENT_TEMPLATE_NAME);
                    template.merge(velocityContext, stringWriter);

                    templateContent = stringWriter.toString();
                    templateContent = templateContent.replaceAll("(#)(\\{)([^\\}]*)(\\:)([^\\{]*)(\\})",
                            "\\$$2$3$4$5$6");
                    templateContent = templateContent.replaceAll("\\#", "#");

                    FileUtils.writeStringToFile(scriptfile, templateContent);
                    scriptfile.setLastModified(lastModified);

                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
    return status;
}

From source file:net.imagini.cassandra.DumpSSTables.SSTableExport.java

private static void readSSTables(File ssTableFileName, CommandLine cmd) throws IOException {
    if (ssTableFileName.exists()) {
        if (ssTableFileName.isDirectory()) {
            Collection<File> files = org.apache.commons.io.FileUtils.listFiles(ssTableFileName,
                    new RegexFileFilter("^.*Data\\.db"), DirectoryFileFilter.DIRECTORY);
            for (File file : files) {
                readSSTables(file, cmd);
            }/*from  w w  w .  ja  va  2 s . c  o  m*/
        } else if (ssTableFileName.isFile()) {
            Descriptor descriptor = Descriptor.fromFilename(ssTableFileName.getAbsolutePath());
            if (Schema.instance.getCFMetaData(descriptor) == null) {
                System.err.println(String.format(
                        "The provided column family is not part of this cassandra database: keysapce = %s, column family = %s",
                        descriptor.ksname, descriptor.cfname));
                System.exit(1);
            }

            if (cmd.hasOption(ENUMERATEKEYS_OPTION)) {
                enumeratekeys(descriptor, System.out);
            } else {
                if ((cmd.getOptionValues(KEY_OPTION) != null) && (cmd.getOptionValues(KEY_OPTION).length > 0))
                    export(descriptor, System.out, Arrays.asList(cmd.getOptionValues(KEY_OPTION)),
                            cmd.getOptionValues(EXCLUDEKEY_OPTION));
                else
                    export(descriptor, cmd.getOptionValues(EXCLUDEKEY_OPTION));
            }
        }
    }

}