Example usage for org.apache.commons.io FileUtils listFiles

List of usage examples for org.apache.commons.io FileUtils listFiles

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils listFiles.

Prototype

public static Collection listFiles(File directory, String[] extensions, boolean recursive) 

Source Link

Document

Finds files within a given directory (and optionally its subdirectories) which match an array of extensions.

Usage

From source file:com.evolveum.midpoint.tools.gui.PropertiesGenerator.java

private List<File> reloadProperties(File parent, List<String> folders, boolean recursive, Locale locale,
        File target) throws IOException {
    List<File> actualTargetFiles = new ArrayList<File>();

    Properties baseProperties;//from   ww w. ja va  2s.c o m
    Properties targetProperties;
    for (String path : folders) {
        File realFolder = new File(parent, path);

        Reader baseReader = null;
        Reader targetReader = null;
        Collection<File> files = FileUtils.listFiles(realFolder, new String[] { "properties" }, recursive);
        for (File file : files) {
            try {
                File targetPropertiesFile = createTargetFile(file, target, locale);
                actualTargetFiles.add(targetPropertiesFile);
                if (targetPropertiesFile.exists() && !FileUtils.isFileNewer(file, targetPropertiesFile)) {
                    System.out.println("File was not modified: " + targetPropertiesFile.getName());
                    continue;
                }

                baseReader = new InputStreamReader(new FileInputStream(file), ENCODING);
                baseProperties = new Properties();
                baseProperties.load(baseReader);

                targetProperties = new SortedProperties();
                if (targetPropertiesFile.exists() && targetPropertiesFile.canRead()) {
                    targetReader = new InputStreamReader(new FileInputStream(targetPropertiesFile), ENCODING);
                    targetProperties.load(targetReader);
                }

                PropertiesStatistics stats = mergeProperties(baseProperties, targetProperties);
                this.stats.increment(stats);

                backupExistingAndSaveNewProperties(targetProperties, targetPropertiesFile);
                System.out.println(targetPropertiesFile.getName() + ": " + stats);
            } finally {
                IOUtils.closeQuietly(baseReader);
                IOUtils.closeQuietly(targetReader);
            }
        }
    }

    return actualTargetFiles;
}

From source file:de.pawlidi.openaletheia.generator.KeyGenerator.java

public static String readKeyFile(final String directory, final boolean privateKeyFile) {
    if (!StringUtils.isBlank(directory)) {
        File rootDir = new File(directory);
        if (rootDir.exists() && rootDir.isDirectory()) {
            Collection<File> files = FileUtils.listFiles(rootDir, new String[] { KEY_FILE_EXTENSION }, false);
            if (!files.isEmpty()) {
                for (File file : files) {
                    if (file.getName().endsWith((privateKeyFile ? PRIVATE_KEY_FILE : PUBLIC_KEY_FILE))) {
                        try {
                            return FileUtils.readFileToString(file, Converter.UTF_8);
                        } catch (IOException e) {
                            e.printStackTrace();
                        }/*from  w ww. j  a va 2  s  .c  om*/
                    }
                }

            }
        }
    }
    return null;
}

From source file:net.chris54721.infinitycubed.workers.PackLoader.java

@Override
public void run() {
    try {/*from w w  w. ja  v  a  2  s . co  m*/
        LogHelper.info("Loading and updating modpacks");
        String publicJson = Utils.toString(new URL(Reference.FILES_URL + "public.json"));
        Type stringIntMap = new TypeToken<LinkedHashMap<String, Integer>>() {
        }.getType();
        LinkedHashMap<String, Integer> publicObjects = Reference.DEFAULT_GSON.fromJson(publicJson,
                stringIntMap);
        File localJson = new File(Resources.getFolder(Reference.PACKS_FOLDER), "public.json");
        List<String> updatePacks = new ArrayList<String>();
        if (localJson.isFile()) {
            String localPublicJson = Files.toString(localJson, Charsets.UTF_8);
            LinkedHashMap<String, Integer> localPublicObjects = Reference.DEFAULT_GSON.fromJson(localPublicJson,
                    stringIntMap);
            for (String pack : publicObjects.keySet()) {
                if (!localPublicObjects.containsKey(pack)
                        || !localPublicObjects.get(pack).equals(publicObjects.get(pack))) {
                    updatePacks.add(pack);
                }
            }
        } else
            updatePacks.addAll(publicObjects.keySet());
        Files.write(publicJson, localJson, Charsets.UTF_8);
        if (updatePacks.size() > 0) {
            for (String pack : updatePacks) {
                LogHelper.info("Updating JSON file for modpack " + pack);
                URL packJsonURL = Resources.getUrl(Resources.ResourceType.PACK_JSON, pack + ".json");
                File packJsonFile = Resources.getFile(Resources.ResourceType.PACK_JSON, pack + ".json");
                if (packJsonFile.isFile())
                    packJsonFile.delete();
                Downloadable packJsonDownloadable = new Downloadable(packJsonURL, packJsonFile);
                if (!packJsonDownloadable.download())
                    LogHelper.error("Failed updating JSON for modpack " + pack);
            }
        }
        Collection<File> packJsons = FileUtils.listFiles(Resources.getFolder(Reference.DATA_FOLDER),
                new String[] { "json" }, false);
        for (File packJsonFile : packJsons)
            loadPack(FilenameUtils.getBaseName(packJsonFile.getName()));
    } catch (Exception e) {
        LogHelper.fatal("Failed updating and loading public packs", e);
    }
}

From source file:de.betterform.agent.web.utils.SortingWalker.java

private List sortFilesInDir(File directory) {
    List<File> list = new ArrayList(FileUtils.listFiles(directory, this.fileExtensions, this.recursive));
    Collections.sort(list, defaultComparator);
    return list;//from w  ww .  j  av  a 2s  . c  om
}

From source file:com.garethahealy.camel.file.loadbalancer.example1.routes.HandlesOneFileMultipleReadersTest.java

@Test
public void handlesOneFileMultipleReaders() throws InterruptedException, MalformedURLException {
    MockEndpoint first = getMockEndpoint("mock:endFirst");
    first.setExpectedMessageCount(1);/*w  ww . j ava  2  s. c  o  m*/
    first.expectedBodiesReceived("afile1.log");
    first.setResultWaitTime(TimeUnit.SECONDS.toMillis(15));
    first.setAssertPeriod(TimeUnit.SECONDS.toMillis(1));

    MockEndpoint second = getMockEndpoint("mock:endSecond");
    second.setExpectedMessageCount(0);
    second.setResultWaitTime(TimeUnit.SECONDS.toMillis(15));
    second.setAssertPeriod(TimeUnit.SECONDS.toMillis(1));

    MockEndpoint third = getMockEndpoint("mock:endThird");
    third.setExpectedMessageCount(0);
    third.setResultWaitTime(TimeUnit.SECONDS.toMillis(15));
    third.setAssertPeriod(TimeUnit.SECONDS.toMillis(1));

    first.assertIsSatisfied();
    second.assertIsSatisfied();
    third.assertIsSatisfied();

    File firstDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel0"));
    File secondDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel1"));
    File thirdDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel2"));

    Assert.assertTrue(".camel0 doesnt exist", firstDirectory.exists());
    Assert.assertFalse(".camel1 exists", secondDirectory.exists());
    Assert.assertFalse(".camel2 exists", thirdDirectory.exists());

    Collection<File> firstFiles = FileUtils.listFiles(firstDirectory,
            FileFilterUtils.prefixFileFilter("afile1.log"), null);

    Assert.assertNotNull(firstFiles);

    //Directory should of only copied one file
    Assert.assertEquals(new Integer(1), new Integer(firstFiles.size()));
}

From source file:functionaltests.workflow.TestXMLTransformer.java

@Test
public void testXMLTransformer() throws Throwable {
    File folder = new File(jobDescriptorsFolder.toURI());
    Collection<File> testJobDescrFiles = FileUtils.listFiles(folder, new String[] { "xml" }, true);

    File samplesJobDescrFiles = new File(System.getProperty("pa.scheduler.home") + File.separator + "samples"
            + File.separator + "workflows");

    log(samplesJobDescrFiles.getAbsolutePath());

    Collection<File> samples = FileUtils.listFiles(samplesJobDescrFiles, new String[] { "xml" }, true);
    samples.addAll(testJobDescrFiles);//from  w w  w  .  ja  v a  2 s  .c om

    log("Treating " + samples.size() + " job descriptors.");

    for (File file : samples) {
        // skip descriptor files which are there to test invalid job description
        if (file.getName().contains("invalid")) {
            continue;
        }

        try {
            transformAndCompare(file);
        } catch (Exception e) {
            e.printStackTrace();
            throw new Exception("An exception occured while treating the file " + file.getAbsolutePath(), e);
        }

    }
}

From source file:de.qaware.chronix.importer.csv.FileImporter.java

/**
 * Reads the given file / folder and calls the bi consumer with the extracted points
 *
 * @param points/*  w  w w  .  j  av  a 2s.co m*/
 * @param folder
 * @param databases
 * @return
 */
public Pair<Integer, Integer> importPoints(Map<Attributes, Pair<Instant, Instant>> points, File folder,
        BiConsumer<List<ImportPoint>, Attributes>... databases) {

    final AtomicInteger pointCounter = new AtomicInteger(0);
    final AtomicInteger tsCounter = new AtomicInteger(0);
    final File metricsFile = new File(METRICS_FILE_PATH);

    LOGGER.info("Writing imported metrics to {}", metricsFile);
    LOGGER.info("Import supports csv files as well as gz compressed csv files.");

    try {
        final FileWriter metricsFileWriter = new FileWriter(metricsFile);

        Collection<File> files = new ArrayList<>();
        if (folder.isFile()) {
            files.add(folder);
        } else {
            files.addAll(FileUtils.listFiles(folder, new String[] { "gz", "csv" }, true));
        }

        AtomicInteger counter = new AtomicInteger(0);

        files.parallelStream().forEach(file -> {
            SimpleDateFormat sdf = new SimpleDateFormat(dateFormat);
            NumberFormat nf = DecimalFormat.getInstance(numberLocal);

            InputStream inputStream = null;
            BufferedReader reader = null;
            try {
                inputStream = new FileInputStream(file);

                if (file.getName().endsWith("gz")) {
                    inputStream = new GZIPInputStream(inputStream);
                }
                reader = new BufferedReader(new InputStreamReader(inputStream));

                //Read the first line
                String headerLine = reader.readLine();

                if (headerLine == null || headerLine.isEmpty()) {
                    boolean deleted = deleteFile(file, inputStream, reader);
                    LOGGER.debug("File is empty {}. File {} removed {}", file.getName(), deleted);
                    return;
                }

                //Extract the attributes from the file name
                //E.g. first_second_third_attribute.csv
                String[] fileNameMetaData = file.getName().split("_");

                String[] metrics = headerLine.split(csvDelimiter);

                Map<Integer, Attributes> attributesPerTimeSeries = new HashMap<>(metrics.length);

                for (int i = 1; i < metrics.length; i++) {
                    String metric = metrics[i];
                    String metricOnlyAscii = Normalizer.normalize(metric, Normalizer.Form.NFD);
                    metricOnlyAscii = metric.replaceAll("[^\\x00-\\x7F]", "");
                    Attributes attributes = new Attributes(metricOnlyAscii, fileNameMetaData);

                    //Check if meta data is completely set
                    if (isEmpty(attributes)) {
                        boolean deleted = deleteFile(file, inputStream, reader);
                        LOGGER.info("Attributes contains empty values {}. File {} deleted {}", attributes,
                                file.getName(), deleted);
                        continue;
                    }

                    if (attributes.getMetric().equals(".*")) {
                        boolean deleted = deleteFile(file, inputStream, reader);
                        LOGGER.info("Attributes metric{}. File {} deleted {}", attributes.getMetric(),
                                file.getName(), deleted);
                        continue;
                    }
                    attributesPerTimeSeries.put(i, attributes);
                    tsCounter.incrementAndGet();

                }

                Map<Integer, List<ImportPoint>> dataPoints = new HashMap<>();

                String line;
                while ((line = reader.readLine()) != null) {
                    String[] splits = line.split(csvDelimiter);
                    String date = splits[0];

                    Instant dateObject;
                    if (instantDate) {
                        dateObject = Instant.parse(date);
                    } else if (sdfDate) {
                        dateObject = sdf.parse(date).toInstant();
                    } else {
                        dateObject = Instant.ofEpochMilli(Long.valueOf(date));
                    }

                    for (int column = 1; column < splits.length; column++) {

                        String value = splits[column];
                        double numericValue = nf.parse(value).doubleValue();

                        ImportPoint point = new ImportPoint(dateObject, numericValue);

                        if (!dataPoints.containsKey(column)) {
                            dataPoints.put(column, new ArrayList<>());
                        }
                        dataPoints.get(column).add(point);
                        pointCounter.incrementAndGet();
                    }

                }

                dataPoints.values().forEach(Collections::sort);

                IOUtils.closeQuietly(reader);
                IOUtils.closeQuietly(inputStream);

                dataPoints.forEach((key, importPoints) -> {
                    for (BiConsumer<List<ImportPoint>, Attributes> database : databases) {
                        database.accept(importPoints, attributesPerTimeSeries.get(key));
                    }
                    points.put(attributesPerTimeSeries.get(key), Pair.of(importPoints.get(0).getDate(),
                            importPoints.get(importPoints.size() - 1).getDate()));
                    //write the stats to the file
                    Instant start = importPoints.get(0).getDate();
                    Instant end = importPoints.get(importPoints.size() - 1).getDate();

                    try {
                        writeStatsLine(metricsFileWriter, attributesPerTimeSeries.get(key), start, end);
                    } catch (IOException e) {
                        LOGGER.error("Could not write stats line", e);
                    }
                    LOGGER.info("{} of {} time series imported", counter.incrementAndGet(), tsCounter.get());
                });

            } catch (Exception e) {
                LOGGER.info("Exception while reading points.", e);
            } finally {
                //close all streams
                IOUtils.closeQuietly(reader);
                IOUtils.closeQuietly(inputStream);
            }

        });
    } catch (Exception e) {
        LOGGER.error("Exception occurred during reading points.");
    }
    return Pair.of(tsCounter.get(), pointCounter.get());
}

From source file:de.tudarmstadt.ukp.dkpro.core.api.datasets.internal.LoadedDataset.java

private File[] getFiles(String aRole) {
    List<File> files = new ArrayList<>();

    List<String> patterns = description.getRoles().get(aRole);
    if (patterns == null) {
        return new File[0];
    }//from  w w w.  java  2 s  .  c  om

    for (String pattern : patterns) {
        Path baseDir = factory.resolve(description);

        Collection<File> matchedFiles = FileUtils.listFiles(baseDir.toFile(),
                new AntFileFilter(baseDir, asList(pattern), null), TrueFileFilter.TRUE);

        files.addAll(matchedFiles);
    }

    File[] all = files.toArray(new File[files.size()]);
    Arrays.sort(all, (File a, File b) -> {
        return a.getName().compareTo(b.getName());
    });

    return all;
}

From source file:com.textocat.textokit.morph.ruscorpora.RusCorporaCollectionReader.java

@Override
public void initialize(UimaContext ctx) throws ResourceInitializationException {
    super.initialize(ctx);
    tagMapper = InitializableFactory.create(ctx, tagMapperClassName, RusCorporaTagMapper.class);
    if (!inputDir.isDirectory()) {
        throw new IllegalArgumentException(String.format("%s is not existing directory", inputDir));
    }/*from   w  ww  .  ja  v  a  2 s . c  o  m*/
    relativeURIFunc = CorpusUtils.relativeURIFunction(inputDir);
    relativePathFunc = CorpusUtils.relativePathFunction(inputDir);
    String inputFileExt = DEFAULT_INPUT_FILE_EXT;
    inputFiles = ImmutableList
            .copyOf(FileUtils.listFiles(inputDir, suffixFileFilter(inputFileExt), TrueFileFilter.INSTANCE));
    getLogger().info(String.format("Detected *%s files in %s: %s", inputFileExt, inputDir, inputFiles.size()));
    try {
        SAXParser saxParser = SAXParserFactory.newInstance().newSAXParser();
        xmlReader = saxParser.getXMLReader();
    } catch (Exception e) {
        throw new ResourceInitializationException(e);
    }
}

From source file:com.skynetcomputing.skynetclient.WorkManagerTest.java

/**
 * Test of start method, of class WorkManager.
 *
 * @throws java.io.IOException//from   w ww . ja  v  a  2 s  . c  o  m
 * @throws java.lang.InterruptedException
 */
@Test
public void testStart() throws IOException, InterruptedException {
    IConnectionMgr conn = new IConnectionMgr() {
        @Override
        public void sendFile(File aFile) {
            try {
                Files.copy(aFile.toPath(), new File(LOCAL_INPUT_DIR + aFile.getName()).toPath(),
                        StandardCopyOption.REPLACE_EXISTING);
            } catch (IOException ex) {
                Logger.getLogger(WorkManagerTest.class.getName()).log(Level.SEVERE, "Error sending file", ex);
            }
        }

        @Override
        public void notifyJarOKNOK(boolean isOK) throws IOException {
            assertTrue("Checking Jar presence", isOK);
        }

        @Override
        public void notifyTaskCompleted() throws IOException {
            Logger.getLogger(WorkManagerTest.class.getName()).log(Level.INFO, "Task Completed");
            synchronized (isRunning) {
                isRunning.set(false);
                isRunning.notifyAll();
            }
        }

        @Override
        public void start(InetSocketAddress serverAddress) {
            System.out.println("Fake start listening..");
        }

        @Override
        public boolean isSendingFiles() {
            throw new UnsupportedOperationException("Not supported yet.");
        }

        @Override
        public void notifyTaskStarting() throws IOException {
            throw new UnsupportedOperationException("Not supported yet.");
        }
    };

    WorkManager workMgr = new WorkManager(conn, ROOT_DIR);
    // Read only, should not be used for saving files
    PersistenceManager persistMgr = new PersistenceManager(ROOT_DIR);

    File testDir = new File(MANDELTASK_DIR);
    File jobJar = FileUtils.listFiles(testDir, new String[] { "jar" }, false).iterator().next();
    workMgr.onJarReceived(jobJar);
    File localDir = new File(LOCAL_INPUT_DIR);
    FileUtils.deleteDirectory(localDir);
    localDir.mkdirs();
    for (File f : testDir.listFiles()) {
        Files.copy(f.toPath(), new File(LOCAL_INPUT_DIR + f.getName()).toPath(),
                StandardCopyOption.REPLACE_EXISTING);
    }

    for (int i = 1; i <= 4; i++) {
        isRunning.getAndSet(true);
        File taskJson = new File(localDir, i + ".task");
        File jobData = new File(localDir, i + ".data");

        workMgr.onTaskReceived(taskJson);
        workMgr.onDataReceived(jobData);

        synchronized (isRunning) {
            while (isRunning.get()) {
                isRunning.wait();
            }
        }
    }

    isRunning.getAndSet(true);

    File combineTaskFile = new File(localDir, "20" + SrzTask.EXT);
    workMgr.onTaskReceived(combineTaskFile);

    SrzTask combineTask = persistMgr.readTaskFile(combineTaskFile);
    for (int id : combineTask.getDependencies()) {
        workMgr.onDataReceived(new File(localDir, id + SrzData.EXT));
    }

    synchronized (isRunning) {
        while (isRunning.get()) {
            isRunning.wait();
        }
    }

    Logger.getLogger(WorkManagerTest.class.getName()).log(Level.INFO, "Test Finished");
}