Example usage for org.apache.commons.io FileUtils listFiles

List of usage examples for org.apache.commons.io FileUtils listFiles

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils listFiles.

Prototype

public static Collection listFiles(File directory, String[] extensions, boolean recursive) 

Source Link

Document

Finds files within a given directory (and optionally its subdirectories) which match an array of extensions.

Usage

From source file:io.github.bonigarcia.wdm.BrowserManager.java

public String forceCache(String repository) {
    String driverInCache = null;//w ww .j  a v  a  2s. co m
    for (String driverName : getDriverName()) {
        log.trace("Checking if {} exists in cache {}", driverName, repository);

        Collection<File> listFiles = FileUtils.listFiles(new File(repository), null, true);
        Object[] array = listFiles.toArray();
        Arrays.sort(array, Collections.reverseOrder());

        for (Object f : array) {
            driverInCache = f.toString();
            log.trace("Checking {}", driverInCache);
            if (driverInCache.contains(driverName)) {
                log.info("Found {} in cache: {} ", driverName, driverInCache);
                break;
            } else {
                driverInCache = null;
            }
        }

        if (driverInCache == null) {
            log.trace("{} do not exist in cache {}", driverName, repository);
        } else {
            break;
        }
    }
    return driverInCache;
}

From source file:gobblin.writer.AvroToParquetHdfsTimePartitionedWriterTest.java

@Test
public void testWriter() throws IOException {

    // Write three records, each should be written to a different file
    GenericRecordBuilder genericRecordBuilder = new GenericRecordBuilder(schema);

    // This timestamp corresponds to 2015/01/01
    genericRecordBuilder.set("timestamp", 1420099200000l);
    writer.write(genericRecordBuilder.build());

    // This timestamp corresponds to 2015/01/02
    genericRecordBuilder.set("timestamp", 1420185600000l);
    writer.write(genericRecordBuilder.build());

    // This timestamp corresponds to 2015/01/03
    genericRecordBuilder.set("timestamp", 1420272000000l);
    writer.write(genericRecordBuilder.build());

    // Check that the writer reports that 3 records have been written
    Assert.assertEquals(writer.recordsWritten(), 3);

    writer.close();//www.j  a  v  a 2  s.  c om
    writer.commit();

    // Check that 3 files were created
    Assert.assertEquals(FileUtils.listFiles(new File(TEST_ROOT_DIR), new String[] { "parquet" }, true).size(),
            3);

    // Check if each file exists, and in the correct location
    File baseOutputDir = new File(OUTPUT_DIR,
            BASE_FILE_PATH + Path.SEPARATOR + ConfigurationKeys.DEFAULT_WRITER_PARTITION_LEVEL);
    Assert.assertTrue(baseOutputDir.exists());

    File outputDir20150101 = new File(baseOutputDir,
            "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "01" + Path.SEPARATOR + FILE_NAME);
    Assert.assertTrue(outputDir20150101.exists());

    File outputDir20150102 = new File(baseOutputDir,
            "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "02" + Path.SEPARATOR + FILE_NAME);
    Assert.assertTrue(outputDir20150102.exists());

    File outputDir20150103 = new File(baseOutputDir,
            "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "03" + Path.SEPARATOR + FILE_NAME);
    Assert.assertTrue(outputDir20150103.exists());
}

From source file:initializers.FSInitializer.java

@Override
public void doAnalysis(Federation federation, List<?> dataProviders, boolean fedFlag, String[] elements2Analyze,
        String elmtVoc) throws InstantiationException, IllegalAccessException, ClassNotFoundException,
        SAXException, ParserConfigurationException {
    // TODO Auto-generated method stub

    // Vector<String> xmlElements = new Vector<>();
    HashMap<String, Double> xmlElements = new HashMap<>();
    Vector<String> xmlElementsDistinct = new Vector<>();

    //HashMap<String,HashMap<HashMap<String, String>, Integer>> attributes = new HashMap<>();
    HashMap<String, Integer> attributes = new HashMap<>();

    HashMap<String, Integer> elementDims = new HashMap<>();
    HashMap<String, Integer> elementCompletness = new HashMap<>();
    Vector<String> elementEntropy = new Vector<>();
    HashMap<String, Double> elementImportance = new HashMap<>();

    Properties props = new Properties();
    try {// w ww. j  a v  a2s . co  m
        props.load(new FileInputStream("configure.properties"));

    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(-1);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(-1);
    }

    String resultsPath = props.getProperty(AnalyticsConstants.resultsPath);
    String[] elementVocs = elmtVoc.split(",");

    ConfigureLogger conf = new ConfigureLogger();
    Logger logger = conf.getLogger("vocAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "vocAnalysis.log");

    Logger loggerAtt = conf.getLogger("attributeAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "attributeAnalysis.log");

    Logger loggerEl = conf.getLogger("elementAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "elementAnalysis.log");

    for (int i = 0; i < dataProviders.size(); i++) {

        String[] extensions = { "xml" };
        //FileUtils utils = new FileUtils();
        Collection<File> xmls = FileUtils.listFiles((File) dataProviders.get(i), extensions, true);

        String filterXMLs = props.getProperty(AnalyticsConstants.filteringEnabled);

        if (filterXMLs.equalsIgnoreCase("true")) {
            Filtering filtering = new Filtering();
            String expression = props.getProperty(AnalyticsConstants.xpathExpression);
            System.out.println("Filtering is enabled.");
            Iterator<File> iterator = xmls.iterator();
            while (iterator.hasNext()) {
                File next = iterator.next();
                if (!filtering.filterXML(next, expression)) {
                    System.out.println("File:" + next.getName() + " is filtered out.");
                    iterator.remove();
                } else
                    System.out.println("File:" + next.getName() + " is kept in xmls' collection.");

            }
        }

        try {

            // Repository repo = new Repository(xmls, elements2Analyze);

            //distinctAtts, 
            Repository repo = new Repository(xmls, attributes, xmlElements, xmlElementsDistinct, elementDims,
                    elementCompletness, elementEntropy, elementImportance, props);

            repo.setRepoName(((File) dataProviders.get(i)).getName());
            repo.setRecordsNum(xmls.size());

            if (fedFlag) {

                federation.addRepoName(((File) dataProviders.get(i)).getName());

                System.out.println("######################################");
                System.out.println("Analysing repository:" + repo.getRepoName());
                System.out.println("Number of records:" + xmls.size());
                repo.parseXMLs(elements2Analyze, elementVocs);

                federation.appendFreqElements(repo.getElementFrequency());

                federation.appendCompletnessElements(repo.getElementCompleteness(), dataProviders.size());
                federation.appendImportanceElements(repo.getElementImportance(), dataProviders.size());

                federation.appendDimensionalityElements(repo.getElementDimensions());

                federation.appendEntropyElements(repo.computeElementEntropy(), dataProviders.size());

                this.logElementAnalysis(loggerEl, repo.getRepoName(), resultsPath);

                repo.computeElementValueFreq(elementVocs, logger);

                // FileUtils.deleteDirectory(new File("buffer"));

                repo.getAttributeFrequency(loggerAtt);

                federation.appendFileSize(repo.getFileSizeDistribution());

                federation.appendNoRecords(repo.getXmls().size());
                repo.storeRepoGeneralInfo(true);
                federation.appendInformativeness(repo.getAvgRepoInformativeness());
                federation.appendSchemas(repo.getSchema(false));
                federation.appendRequirements(repo.getRequirements());

                System.out.println("Repository:" + repo.getRepoName() + " analysis completed.");
                System.out.println("======================================");
            } else {
                System.out.println("######################################");
                System.out.println("Analysing repository:" + repo.getRepoName());
                System.out.println("Number of records:" + repo.getXmls().size());
                repo.parseXMLs(elements2Analyze, elementVocs);
                repo.getElementFrequency();
                repo.getElementCompleteness();
                repo.getElementDimensions();
                repo.getElementImportance();

                repo.computeElementEntropy();

                this.logElementAnalysis(loggerEl, repo.getRepoName(), resultsPath);
                // System.out.println(repo.getVocabularies());

                repo.computeElementValueFreq(elementVocs, logger);

                repo.storeRepoGeneralInfo(false);

                // FileUtils.deleteDirectory(new File("buffer"));

                repo.getAttributeFrequency(loggerAtt);

                System.out.println("======================================");
                System.out.println("Repository:" + repo.getRepoName() + " analysis completed.");
                System.out.println("======================================");

            }
        } catch (IOException ex) {

            ex.printStackTrace();
        }
        xmlElements.clear();
        xmlElementsDistinct.clear();
        attributes.clear();
        //   distinctAtts.clear();
        elementDims.clear();
        elementCompletness.clear();

        elementEntropy.clear();
        elementImportance.clear();

    }

    if (fedFlag) {
        try {
            federation.getElementsSFrequency();
            federation.getElementsMCompletness();
            federation.getElementsMImportance();
            federation.getElementsMaxDimensionality();
            federation.getElementsMEntropy();
            federation.getAttributesSumFreq(loggerAtt);
            federation.getElementValueSumFreq(elmtVoc, logger);
            System.out.println("Average file size:" + federation.getAverageFileSize() + " Bytes");
            System.out.println("Sum number of records:" + federation.getRecordsSum() + " records");
            System.out.println("Sum storage requirements:" + federation.getRequirements() + " bytes");
            System.out.println("AVG informativeness(bits):" + federation.getAVGInformativeness());

            federation.storeGeneralInfo2CSV();
            this.logElementAnalysis(loggerEl, "Federation", resultsPath);

        } catch (IOException ex) {
            ex.printStackTrace();
        }

    }

}

From source file:com.github.mbenson.privileged.weaver.FilesystemWeaver.java

/**
 * Clear the way by deleting classfiles woven with a different
 * {@link Policy}./*from   w ww  .  j  ava2s . c o  m*/
 * 
 * @throws NotFoundException
 */
public void prepare() throws NotFoundException {
    info("preparing %s; policy = %s", target, policy);
    final Set<File> toDelete = new TreeSet<File>();
    for (final Class<?> type : getDeclaringClasses(findPrivilegedMethods())) {
        final CtClass ctClass = classPool.get(type.getName());
        final String policyValue = toString(ctClass.getAttribute(generateName(POLICY_NAME)));
        if (policyValue == null || policyValue.equals(policy.name())) {
            continue;
        }
        debug("class %s previously woven with policy %s", type.getName(), policyValue);
        final File packageDir = new File(target,
                StringUtils.replaceChars(ctClass.getPackageName(), '.', File.separatorChar));

        // simple classname of outermost class, plus any inner classes:
        final String pattern = new StringBuilder(getOutermost(type).getSimpleName()).append("(\\$.+)??\\.class")
                .toString();

        debug("searching %s for pattern '%s'", packageDir.getAbsolutePath(), pattern);
        toDelete.addAll(FileUtils.listFiles(packageDir, new RegexFileFilter(pattern), null));
    }
    if (toDelete.isEmpty()) {
        return;
    }
    info("Deleting %s files...", toDelete.size());
    debug(toDelete.toString());
    for (File f : toDelete) {
        if (!f.delete()) {
            debug("Failed to delete %s", f);
        }
    }
}

From source file:gobblin.writer.AvroHdfsTimePartitionedWriterTest.java

@Test
public void testWriter() throws IOException {

    // Write three records, each should be written to a different file
    GenericRecordBuilder genericRecordBuilder = new GenericRecordBuilder(this.schema);

    // This timestamp corresponds to 2015/01/01
    genericRecordBuilder.set("timestamp", 1420099200000l);
    this.writer.write(genericRecordBuilder.build());

    // This timestamp corresponds to 2015/01/02
    genericRecordBuilder.set("timestamp", 1420185600000l);
    this.writer.write(genericRecordBuilder.build());

    // This timestamp corresponds to 2015/01/03
    genericRecordBuilder.set("timestamp", 1420272000000l);
    this.writer.write(genericRecordBuilder.build());

    // Check that the writer reports that 3 records have been written
    Assert.assertEquals(this.writer.recordsWritten(), 3);

    this.writer.close();
    this.writer.commit();

    // Check that 3 files were created
    Assert.assertEquals(FileUtils.listFiles(new File(TEST_ROOT_DIR), new String[] { "avro" }, true).size(), 3);

    // Check if each file exists, and in the correct location
    File baseOutputDir = new File(OUTPUT_DIR,
            BASE_FILE_PATH + Path.SEPARATOR + ConfigurationKeys.DEFAULT_WRITER_PARTITION_LEVEL);
    Assert.assertTrue(baseOutputDir.exists());

    File outputDir20150101 = new File(baseOutputDir,
            "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "01" + Path.SEPARATOR + FILE_NAME);
    Assert.assertTrue(outputDir20150101.exists());

    File outputDir20150102 = new File(baseOutputDir,
            "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "02" + Path.SEPARATOR + FILE_NAME);
    Assert.assertTrue(outputDir20150102.exists());

    File outputDir20150103 = new File(baseOutputDir,
            "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "03" + Path.SEPARATOR + FILE_NAME);
    Assert.assertTrue(outputDir20150103.exists());
}

From source file:com.stevpet.sonar.plugins.dotnet.mscover.sensor.IntegrationTestCoverSensor.java

private List<File> convertVsTestCoverageFilesToXml(String integrationTestsDir) {
    List<File> xmlFiles = new ArrayList<File>();
    Collection<File> files = FileUtils.listFiles(new File(integrationTestsDir), new String[] { "coverage" },
            true);//from   www . j  a  va2 s .  c  o m
    for (File file : files) {
        String xmlPath = transformIfNeeded(file.getAbsolutePath());
        xmlFiles.add(new File(xmlPath));
    }
    return xmlFiles;
}

From source file:com.l2jserver.service.game.scripting.impl.ScriptContextImpl.java

@Override
public synchronized void init() {

    if (compilationResult != null) {
        log.error("", new Exception("Init request on initialized ScriptContext"));
        return;//from w  w  w  .j  ava  2  s  . c  o m
    }

    ScriptCompiler scriptCompiler = instantiateCompiler();

    Collection<File> files = FileUtils.listFiles(root, scriptCompiler.getSupportedFileTypes(), true);

    if (parentScriptContext != null) {
        scriptCompiler.setParentClassLoader(parentScriptContext.getCompilationResult().getClassLoader());
    }

    scriptCompiler.setLibraries(libraries);
    compilationResult = scriptCompiler.compile(files);

    getClassListener().postLoad(compilationResult.getCompiledClasses());

    if (childScriptContexts != null) {
        for (ScriptContext context : childScriptContexts) {
            context.init();
        }
    }
}

From source file:de.tudarmstadt.ukp.dkpro.tc.weka.task.CrossValidationExperimentWithFoldControl.java

@Override
protected void init()
        throws IllegalStateException, InstantiationException, IllegalAccessException, ClassNotFoundException {

    if (experimentName == null || preprocessing == null) {
        throw new IllegalStateException("You must set experiment name, datawriter and aggregate.");
    }/*from w  w w. j  av a2s .c  om*/

    if (numFolds < 2) {
        throw new IllegalStateException(
                "Number of folds is not configured correctly. Number of folds needs to be at least 2.");
    }

    // check the validity of the experiment setup first
    checkTask = new ValidityCheckTask();

    // preprocessing on the entire data set and only once
    preprocessTask = new PreprocessTask();
    preprocessTask.setPreprocessing(preprocessing);
    preprocessTask.setOperativeViews(operativeViews);
    preprocessTask.setType(preprocessTask.getType() + "-" + experimentName);

    // inner batch task (carried out numFolds times)
    BatchTask crossValidationTask = new BatchTask() {
        @Override
        public void execute(TaskContext aContext) throws Exception {
            File xmiPathRoot = aContext.getStorageLocation(PreprocessTask.OUTPUT_KEY_TRAIN,
                    AccessMode.READONLY);
            Collection<File> files = FileUtils.listFiles(xmiPathRoot, new String[] { "bin" }, true);
            String[] fileNames = new String[files.size()];
            int i = 0;
            for (File f : files) {
                // adding file paths, not names
                fileNames[i] = f.getAbsolutePath();
                i++;
            }
            Arrays.sort(fileNames);
            if (numFolds == Constants.LEAVE_ONE_OUT) {
                numFolds = fileNames.length;
            }
            // don't change any names!!
            FoldDimensionBundle<String> foldDim = new FoldDimensionBundle<String>("files",
                    Dimension.create("", fileNames), numFolds, comparator);
            Dimension<File> filesRootDim = Dimension.create("filesRoot", xmiPathRoot);

            ParameterSpace pSpace = new ParameterSpace(foldDim, filesRootDim);
            setParameterSpace(pSpace);

            super.execute(aContext);
        }
    };

    // ================== SUBTASKS OF THE INNER BATCH TASK =======================

    // collecting meta features only on the training data (numFolds times)
    metaTask = new MetaInfoTask();
    metaTask.setOperativeViews(operativeViews);
    metaTask.setType(metaTask.getType() + experimentName);

    // extracting features from training data (numFolds times)
    extractFeaturesTrainTask = new ExtractFeaturesTask();
    extractFeaturesTrainTask.setTesting(false);
    extractFeaturesTrainTask.setType(extractFeaturesTrainTask.getType() + "-Train-" + experimentName);
    extractFeaturesTrainTask.addImport(metaTask, MetaInfoTask.META_KEY);

    // extracting features from test data (numFolds times)
    extractFeaturesTestTask = new ExtractFeaturesTask();
    extractFeaturesTestTask.setTesting(true);
    extractFeaturesTestTask.setType(extractFeaturesTestTask.getType() + "-Test-" + experimentName);
    extractFeaturesTestTask.addImport(metaTask, MetaInfoTask.META_KEY);

    // classification (numFolds times)
    testTask = new WekaTestTask();
    testTask.setType(testTask.getType() + "-" + experimentName);

    if (innerReports != null) {
        for (Class<? extends Report> report : innerReports) {
            testTask.addReport(report);
        }
    } else {
        // add default report
        testTask.addReport(WekaClassificationReport.class);
    }
    // always add OutcomeIdReport
    testTask.addReport(WekaOutcomeIDReport.class);

    testTask.addImport(extractFeaturesTrainTask, ExtractFeaturesTask.OUTPUT_KEY,
            WekaTestTask.TEST_TASK_INPUT_KEY_TRAINING_DATA);
    testTask.addImport(extractFeaturesTestTask, ExtractFeaturesTask.OUTPUT_KEY,
            WekaTestTask.TEST_TASK_INPUT_KEY_TEST_DATA);

    // ================== CONFIG OF THE INNER BATCH TASK =======================

    crossValidationTask.addImport(preprocessTask, PreprocessTask.OUTPUT_KEY_TRAIN);
    crossValidationTask.setType(crossValidationTask.getType() + experimentName);
    crossValidationTask.addTask(metaTask);
    crossValidationTask.addTask(extractFeaturesTrainTask);
    crossValidationTask.addTask(extractFeaturesTestTask);
    crossValidationTask.addTask(testTask);
    // report of the inner batch task (sums up results for the folds)
    // we want to re-use the old CV report, we need to collect the evaluation.bin files from
    // the test task here (with another report)
    crossValidationTask.addReport(WekaBatchTrainTestReport.class);

    // DKPro Lab issue 38: must be added as *first* task
    addTask(checkTask);
    addTask(preprocessTask);
    addTask(crossValidationTask);
}

From source file:net.sf.zekr.engine.search.lucene.LuceneIndexManager.java

/**
 * Creates an index in the place user selects. This method first checks if an index already exists for
 * all-users or not. If not it continues to ask where to create index files.<br>
 * It uses underlying cache to store {@link ZekrIndexReader}s already read in this session.
 * /*w ww .ja va 2s.c o m*/
 * @param pathArray the first element should be for me-only mode, the second element for all-users.
 * @param quranText
 * @param indexId
 * @param indexPath
 * @param indexPathKey
 * @param indexVersionKey
 * @return cached or newly-created {@link ZekrIndexReader} instance
 * @throws IndexingException
 */
@SuppressWarnings("unchecked")
private ZekrIndexReader getIndex(String[] pathArray, IQuranText quranText, String indexId, String indexPath,
        String indexPathKey, String indexVersionKey) throws IndexingException {
    try {
        ZekrIndexReader zir = indexReaderMap.get(indexId);
        if (zir == null) {
            if (indexPath != null && IndexReader.indexExists(new SimpleFSDirectory(new File(indexPath)))) {
                return newIndexReader(quranText, indexId, indexPath);
            } else {
                // check if index is already created for all-users, and its modify date is newer than zekr build date
                File indexDir = new File(pathArray[1]);
                SimpleFSDirectory dir = new SimpleFSDirectory(indexDir);
                if (IndexReader.indexExists(dir)) {
                    Collection<File> listFiles = FileUtils.listFiles(indexDir, new String[] { "cfs" }, false);
                    if (listFiles.size() > 0) {
                        if (FileUtils.isFileNewer(listFiles.iterator().next(), GlobalConfig.ZEKR_BUILD_DATE)) {
                            ZekrIndexReader res;
                            res = newIndexReader(quranText, indexId, pathArray[1]);
                            props.setProperty(indexPathKey, pathArray[1]);
                            props.setProperty(indexVersionKey, GlobalConfig.ZEKR_BUILD_NUMBER);
                            return res;
                        }
                    }
                }
                IndexCreator indexCreator = new IndexCreator(pathArray, quranText,
                        LuceneAnalyzerFactory.getAnalyzer(quranText));
                if (indexCreator.indexQuranText()) {
                    props.setProperty(indexPathKey, indexCreator.getIndexDir());
                    props.setProperty(indexVersionKey, GlobalConfig.ZEKR_BUILD_NUMBER);
                    return newIndexReader(quranText, indexId, indexCreator.getIndexDir());
                } else {
                    // a non-interruption (bad) exception occurred
                    if (indexCreator.isIndexingErrorOccurred() && indexCreator.getIndexingException() != null) {
                        MessageBoxUtils.showActionFailureError(indexCreator.getIndexingException());
                    }
                    return null;
                }
            }
        } else {
            return zir;
        }
    } catch (Exception e) {
        throw new IndexingException(e);
    }
}

From source file:com.norconex.committer.elasticsearch.ElasticsearchCommitterTest.java

@Test
public void testRemoveQueuedFilesAfterAdd() throws Exception {

    // Add new doc to ES
    String id = "1";
    committer.add(id, new NullInputStream(0), new Properties());
    committer.commit();//from   w  w w. j av  a2 s  .  c o  m

    // After commit, make sure queue is emptied of all files
    assertTrue(FileUtils.listFiles(queue, null, true).isEmpty());
}