Example usage for org.apache.commons.io.filefilter DirectoryFileFilter INSTANCE

List of usage examples for org.apache.commons.io.filefilter DirectoryFileFilter INSTANCE

Introduction

In this page you can find the example usage for org.apache.commons.io.filefilter DirectoryFileFilter INSTANCE.

Prototype

IOFileFilter INSTANCE

To view the source code for org.apache.commons.io.filefilter DirectoryFileFilter INSTANCE.

Click Source Link

Document

Singleton instance of directory filter.

Usage

From source file:org.geoserver.config.GeoServerLoader.java

/**
 * Reads the catalog from disk./*from  w  w w  . j  a va2 s.com*/
 */
Catalog readCatalog(XStreamPersister xp) throws Exception {
    CatalogImpl catalog = new CatalogImpl();
    catalog.setResourceLoader(resourceLoader);
    xp.setCatalog(catalog);
    xp.setUnwrapNulls(false);

    CatalogFactory factory = catalog.getFactory();

    //global styles
    loadStyles(resourceLoader.find("styles"), catalog, xp);

    //workspaces, stores, and resources
    File workspaces = resourceLoader.find("workspaces");
    if (workspaces != null) {
        //do a first quick scan over all workspaces, setting the default
        File dws = new File(workspaces, "default.xml");
        WorkspaceInfo defaultWorkspace = null;
        if (dws.exists()) {
            try {
                defaultWorkspace = depersist(xp, dws, WorkspaceInfo.class);
                LOGGER.info("Loaded default workspace " + defaultWorkspace.getName());
            } catch (Exception e) {
                LOGGER.log(Level.WARNING, "Failed to load default workspace", e);
            }
        } else {
            LOGGER.warning("No default workspace was found.");
        }

        for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) {
            File f = new File(wsd, "workspace.xml");
            if (!f.exists()) {
                continue;
            }

            WorkspaceInfo ws = null;
            try {
                ws = depersist(xp, f, WorkspaceInfo.class);
                catalog.add(ws);
            } catch (Exception e) {
                LOGGER.log(Level.WARNING, "Failed to load workspace '" + wsd.getName() + "'", e);
                continue;
            }

            LOGGER.info("Loaded workspace '" + ws.getName() + "'");

            //load the namespace
            File nsf = new File(wsd, "namespace.xml");
            NamespaceInfo ns = null;
            if (nsf.exists()) {
                try {
                    ns = depersist(xp, nsf, NamespaceInfo.class);
                    catalog.add(ns);
                } catch (Exception e) {
                    LOGGER.log(Level.WARNING, "Failed to load namespace for '" + wsd.getName() + "'", e);
                }
            }

            //set the default workspace, this value might be null in the case of coming from a 
            // 2.0.0 data directory. See http://jira.codehaus.org/browse/GEOS-3440
            if (defaultWorkspace != null) {
                if (ws.getName().equals(defaultWorkspace.getName())) {
                    catalog.setDefaultWorkspace(ws);
                    if (ns != null) {
                        catalog.setDefaultNamespace(ns);
                    }
                }
            } else {
                //create the default.xml file
                defaultWorkspace = catalog.getDefaultWorkspace();
                if (defaultWorkspace != null) {
                    try {
                        persist(xp, defaultWorkspace, dws);
                    } catch (Exception e) {
                        LOGGER.log(Level.WARNING, "Failed to persist default workspace '" + wsd.getName() + "'",
                                e);
                    }

                }
            }

            //load the styles for the workspace
            File styles = resourceLoader.find(wsd, "styles");
            if (styles != null) {
                loadStyles(styles, catalog, xp);
            }
        }

        for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) {

            //load the stores for this workspace
            for (File sd : list(wsd, DirectoryFileFilter.INSTANCE)) {
                File f = new File(sd, "datastore.xml");
                if (f.exists()) {
                    //load as a datastore
                    DataStoreInfo ds = null;
                    try {
                        ds = depersist(xp, f, DataStoreInfo.class);
                        catalog.add(ds);

                        LOGGER.info("Loaded data store '" + ds.getName() + "'");

                        if (ds.isEnabled()) {
                            //connect to the datastore to determine if we should disable it
                            try {
                                ds.getDataStore(null);
                            } catch (Throwable t) {
                                LOGGER.warning("Error connecting to '" + ds.getName() + "'. Disabling.");
                                LOGGER.log(Level.INFO, "", t);

                                ds.setError(t);
                                ds.setEnabled(false);
                            }
                        }
                    } catch (Exception e) {
                        LOGGER.log(Level.WARNING, "Failed to load data store '" + sd.getName() + "'", e);
                        continue;
                    }

                    //load feature types
                    for (File ftd : list(sd, DirectoryFileFilter.INSTANCE)) {
                        f = new File(ftd, "featuretype.xml");
                        if (f.exists()) {
                            FeatureTypeInfo ft = null;
                            try {
                                ft = depersist(xp, f, FeatureTypeInfo.class);
                            } catch (Exception e) {
                                LOGGER.log(Level.WARNING, "Failed to load feature type '" + ftd.getName() + "'",
                                        e);
                                continue;
                            }

                            catalog.add(ft);

                            LOGGER.info("Loaded feature type '" + ds.getName() + "'");

                            f = new File(ftd, "layer.xml");
                            if (f.exists()) {
                                try {
                                    LayerInfo l = depersist(xp, f, LayerInfo.class);
                                    catalog.add(l);

                                    LOGGER.info("Loaded layer '" + l.getName() + "'");
                                } catch (Exception e) {
                                    LOGGER.log(Level.WARNING,
                                            "Failed to load layer for feature type '" + ft.getName() + "'", e);
                                }
                            }
                        } else {
                            LOGGER.warning("Ignoring feature type directory " + ftd.getAbsolutePath());
                        }
                    }
                } else {
                    //look for a coverage store
                    f = new File(sd, "coveragestore.xml");
                    if (f.exists()) {
                        CoverageStoreInfo cs = null;
                        try {
                            cs = depersist(xp, f, CoverageStoreInfo.class);
                            catalog.add(cs);

                            LOGGER.info("Loaded coverage store '" + cs.getName() + "'");
                        } catch (Exception e) {
                            LOGGER.log(Level.WARNING, "Failed to load coverage store '" + sd.getName() + "'",
                                    e);
                            continue;
                        }

                        //load coverages
                        for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) {
                            f = new File(cd, "coverage.xml");
                            if (f.exists()) {
                                CoverageInfo c = null;
                                try {
                                    c = depersist(xp, f, CoverageInfo.class);
                                    catalog.add(c);

                                    LOGGER.info("Loaded coverage '" + cs.getName() + "'");
                                } catch (Exception e) {
                                    LOGGER.log(Level.WARNING, "Failed to load coverage '" + cd.getName() + "'",
                                            e);
                                    continue;
                                }

                                f = new File(cd, "layer.xml");
                                if (f.exists()) {
                                    try {
                                        LayerInfo l = depersist(xp, f, LayerInfo.class);
                                        catalog.add(l);

                                        LOGGER.info("Loaded layer '" + l.getName() + "'");
                                    } catch (Exception e) {
                                        LOGGER.log(Level.WARNING,
                                                "Failed to load layer coverage '" + c.getName() + "'", e);
                                    }
                                }
                            } else {
                                LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath());
                            }
                        }
                    } else {
                        f = new File(sd, "wmsstore.xml");
                        if (f.exists()) {
                            WMSStoreInfo wms = null;
                            try {
                                wms = depersist(xp, f, WMSStoreInfo.class);
                                catalog.add(wms);

                                LOGGER.info("Loaded wmsstore '" + wms.getName() + "'");
                            } catch (Exception e) {
                                LOGGER.log(Level.WARNING, "Failed to load wms store '" + sd.getName() + "'", e);
                                continue;
                            }

                            //load wms layers
                            for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) {
                                f = new File(cd, "wmslayer.xml");
                                if (f.exists()) {
                                    WMSLayerInfo wl = null;
                                    try {
                                        wl = depersist(xp, f, WMSLayerInfo.class);
                                        catalog.add(wl);

                                        LOGGER.info("Loaded wms layer'" + wl.getName() + "'");
                                    } catch (Exception e) {
                                        LOGGER.log(Level.WARNING,
                                                "Failed to load wms layer '" + cd.getName() + "'", e);
                                        continue;
                                    }

                                    f = new File(cd, "layer.xml");
                                    if (f.exists()) {
                                        try {
                                            LayerInfo l = depersist(xp, f, LayerInfo.class);
                                            catalog.add(l);

                                            LOGGER.info("Loaded layer '" + l.getName() + "'");
                                        } catch (Exception e) {
                                            LOGGER.log(Level.WARNING,
                                                    "Failed to load cascaded wms layer '" + wl.getName() + "'",
                                                    e);
                                        }
                                    }
                                } else {
                                    LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath());
                                }
                            }
                        } else {
                            LOGGER.warning("Ignoring store directory '" + sd.getName() + "'");
                            continue;
                        }
                    }
                }
            }

            //load hte layer groups for this workspace
            File layergroups = resourceLoader.find(wsd, "layergroups");
            if (layergroups != null) {
                loadLayerGroups(layergroups, catalog, xp);
            }
        }
    } else {
        LOGGER.warning("No 'workspaces' directory found, unable to load any stores.");
    }

    //namespaces

    //layergroups
    File layergroups = resourceLoader.find("layergroups");
    if (layergroups != null) {
        loadLayerGroups(layergroups, catalog, xp);
    }
    xp.setUnwrapNulls(true);
    catalog.resolve();
    return catalog;
}

From source file:org.geoserver.gwc.web.blob.FileBlobStorePanel.java

@SuppressWarnings("unchecked")
@Override//from  ww  w . ja  v  a 2s.com
public void onInitialize() {
    super.onInitialize();

    DirectoryParamPanel paramPanel;
    add(paramPanel = new DirectoryParamPanel("baseDirectory",
            new PropertyModel<String>(getDefaultModel().getObject(), "baseDirectory"),
            new ParamResourceModel("baseDirectory", this), true));
    paramPanel.add(new AttributeModifier("title", new ResourceModel("baseDirectory.title")));
    paramPanel.getFormComponent().setModel((IModel<String>) paramPanel.getDefaultModel()); // disable filemodel
    paramPanel
            .setFileFilter(new Model<DirectoryFileFilter>((DirectoryFileFilter) DirectoryFileFilter.INSTANCE));
    add(new TextField<Integer>("fileSystemBlockSize").setRequired(true)
            .add(new AttributeModifier("title", new ResourceModel("fileSystemBlockSize.title"))));

}

From source file:org.geoserver.gwc.web.blob.MbtilesBlobStorePanel.java

@Override
protected void onInitialize() {
    super.onInitialize();
    // the directory that may contain user provided mbtiles metadata
    DirectoryParamPanel directoryPanel = new DirectoryParamPanel("mbtilesMetadataDirectory",
            new PropertyModel<>(getDefaultModel().getObject(), "mbtilesMetadataDirectory"),
            new ParamResourceModel("mbtilesMetadataDirectory", this), false);
    add(directoryPanel);// w w w .j  a v  a  2s.c  o m
    directoryPanel.getFormComponent()
            .setModel(new PropertyModel<>(getDefaultModel().getObject(), "mbtilesMetadataDirectory"));
    directoryPanel.setFileFilter(new Model<>((DirectoryFileFilter) DirectoryFileFilter.INSTANCE));
    // controls the store executor concurrency (this is used to parallelize some operations)
    add(new TextField<Integer>("executorConcurrency").setRequired(true)
            .add(new AttributeModifier("executorConcurrency", new ResourceModel("executorConcurrency"))));
}

From source file:org.geoserver.gwc.web.blob.SqliteBlobStorePanel.java

@Override
protected void onInitialize() {

    super.onInitialize();

    // the root directory of this blob store
    DirectoryParamPanel directoryPanel = new DirectoryParamPanel("rootDirectory",
            new PropertyModel<>(getDefaultModel().getObject(), "rootDirectory"),
            new ParamResourceModel("rootDirectory", this), true);
    add(directoryPanel);/* w  ww .ja v a  2  s .  com*/
    directoryPanel.getFormComponent()
            .setModel(new PropertyModel<>(getDefaultModel().getObject(), "rootDirectory"));
    directoryPanel.setFileFilter(new Model<>((DirectoryFileFilter) DirectoryFileFilter.INSTANCE));

    // properties that will be used to build a database file path
    add(new TextField<String>("templatePath").setRequired(true)
            .add(new AttributeModifier("templatePath", new ResourceModel("templatePath"))));
    add(new TextField<Long>("rowRangeCount").setRequired(true)
            .add(new AttributeModifier("rowRangeCount", new ResourceModel("rowRangeCount"))));
    add(new TextField<Long>("columnRangeCount").setRequired(true)
            .add(new AttributeModifier("columnRangeCount", new ResourceModel("columnRangeCount"))));

    // connection pool related properties
    add(new TextField<Long>("poolSize").setRequired(true)
            .add(new AttributeModifier("poolSize", new ResourceModel("poolSize"))));
    add(new TextField<Long>("poolReaperIntervalMs").setRequired(true)
            .add(new AttributeModifier("poolReaperIntervalMs", new ResourceModel("poolReaperIntervalMs"))));

    // should database files be deleted or should we delete tiles ranges
    add(new CheckBox("eagerDelete")
            .add(new AttributeModifier("eagerDelete", new ResourceModel("eagerDelete"))));

    // controls if the blob store will set and use the tile creation time
    add(new CheckBox("useCreateTime")
            .add(new AttributeModifier("useCreateTime", new ResourceModel("useCreateTime"))));
}

From source file:org.geotools.utils.imagemosaic.MosaicIndexBuilder.java

/**
 * Recurses the directory tree and returns valid files.
 *//*from   w  w  w .  j a  v a 2s . c  o  m*/
private void recurse(List<File> allFiles, String locationPath) {
    final File dir = new File(locationPath);
    final FileFilter fileFilter = new WildcardFileFilter(wildcardString);
    final File[] files = dir.listFiles(fileFilter);
    final File[] dirs = dir.listFiles((FileFilter) DirectoryFileFilter.INSTANCE);

    for (int i = 0; i < files.length; i++) {
        allFiles.add(files[i]);
    }

    for (int i = 0; i < dirs.length; i++) {
        recurse(allFiles, new StringBuilder(locationPath).append('/').append(dirs[i].getName()).toString());
    }
}

From source file:org.giavacms.base.common.util.ResourceUtils.java

public static List<String> getFilesName(String directory, List<String> extensions) {
    File rootDir = new File(getRealPath() + directory);
    IOFileFilter filesFilter = new SuffixFileFilter(extensions, IOCase.INSENSITIVE);
    IOFileFilter notDirectory = new NotFileFilter(DirectoryFileFilter.INSTANCE);
    FilenameFilter fileFilter = new AndFileFilter(filesFilter, notDirectory);
    String[] resultFiles = rootDir.list(fileFilter);
    Arrays.sort(resultFiles);/*  ww  w . ja  va  2s  .  c o  m*/
    if (resultFiles.length > 0) {
        return Arrays.asList(resultFiles);
    }
    return new ArrayList<String>();
}

From source file:org.itstechupnorth.walrus.dir.DirectorySource.java

public void walk(final File directory) {
    if (directory.isDirectory()) {
        for (final File file : directory.listFiles(filter)) {
            output(file);//from  www .  ja v a  2 s .  co  m
        }
        for (final File file : directory.listFiles((FileFilter) DirectoryFileFilter.INSTANCE)) {
            walk(file);
        }
    } else {
        output(directory);
    }
}

From source file:org.jclouds.filesystem.strategy.internal.FilesystemStorageStrategyImpl.java

/**
 * Return an iterator that reports all the containers under base path
 * // www.java 2  s.com
 * @return
 */
@Override
public Iterable<String> getAllContainerNames() {
    Iterable<String> containers = new Iterable<String>() {
        @Override
        public Iterator<String> iterator() {
            return new FileIterator(buildPathStartingFromBaseDir(), DirectoryFileFilter.INSTANCE);
        }
    };

    return containers;
}

From source file:org.jumpmind.symmetric.service.impl.FileSyncService.java

protected List<IncomingBatch> processZip(InputStream is, String sourceNodeId, ProcessInfo processInfo)
        throws IOException {
    File unzipDir = new File(parameterService.getTempDirectory(), String.format("filesync_incoming/%s/%s",
            engine.getNodeService().findIdentityNodeId(), sourceNodeId));
    FileUtils.deleteDirectory(unzipDir);
    unzipDir.mkdirs();// w ww.ja  va2 s  .  co m

    AppUtils.unzip(is, unzipDir);

    Set<Long> batchIds = new TreeSet<Long>();
    String[] files = unzipDir.list(DirectoryFileFilter.INSTANCE);

    if (files != null) {
        for (int i = 0; i < files.length; i++) {
            try {
                batchIds.add(Long.parseLong(files[i]));
            } catch (NumberFormatException e) {
                log.error(
                        "Unexpected directory name.  Expected a number representing a batch id.  Instead the directory was named '{}'",
                        files[i]);
            }
        }
    }

    List<IncomingBatch> batchesProcessed = new ArrayList<IncomingBatch>();

    IIncomingBatchService incomingBatchService = engine.getIncomingBatchService();

    processInfo.setStatus(ProcessInfo.Status.LOADING);
    for (Long batchId : batchIds) {
        processInfo.setCurrentBatchId(batchId);
        processInfo.incrementBatchCount();
        File batchDir = new File(unzipDir, Long.toString(batchId));

        IncomingBatch incomingBatch = new IncomingBatch();

        File batchInfo = new File(batchDir, "batch-info.txt");
        if (batchInfo.exists()) {
            List<String> info = FileUtils.readLines(batchInfo);
            if (info != null && info.size() > 0) {
                incomingBatch.setChannelId(info.get(0).trim());
            } else {
                incomingBatch.setChannelId(Constants.CHANNEL_FILESYNC);
            }
        } else {
            incomingBatch.setChannelId(Constants.CHANNEL_FILESYNC);
        }

        incomingBatch.setBatchId(batchId);
        incomingBatch.setStatus(IncomingBatch.Status.LD);
        incomingBatch.setNodeId(sourceNodeId);
        incomingBatch.setByteCount(FileUtils.sizeOfDirectory(batchDir));
        batchesProcessed.add(incomingBatch);
        if (incomingBatchService.acquireIncomingBatch(incomingBatch)) {
            File syncScript = new File(batchDir, "sync.bsh");
            if (syncScript.exists()) {
                String script = FileUtils.readFileToString(syncScript);
                Interpreter interpreter = new Interpreter();
                boolean isLocked = false;
                try {
                    interpreter.set("log", log);
                    interpreter.set("batchDir", batchDir.getAbsolutePath().replace('\\', '/'));
                    interpreter.set("engine", engine);
                    interpreter.set("sourceNodeId", sourceNodeId);

                    long waitMillis = getParameterService().getLong(ParameterConstants.FILE_SYNC_LOCK_WAIT_MS);
                    log.debug("The {} node is attempting to get shared lock for to update incoming status",
                            sourceNodeId);
                    isLocked = engine.getClusterService().lock(ClusterConstants.FILE_SYNC_SHARED,
                            ClusterConstants.TYPE_SHARED, waitMillis);
                    if (isLocked) {
                        log.debug("The {} node got a shared file sync lock", sourceNodeId);
                        @SuppressWarnings("unchecked")
                        Map<String, String> filesToEventType = (Map<String, String>) interpreter.eval(script);
                        updateFileIncoming(sourceNodeId, filesToEventType);
                        incomingBatch.setStatementCount(filesToEventType != null ? filesToEventType.size() : 0);
                    } else {
                        throw new RuntimeException(
                                "Could not obtain file sync shared lock within " + waitMillis + " millis");
                    }
                    incomingBatch.setStatus(IncomingBatch.Status.OK);
                    if (incomingBatchService.isRecordOkBatchesEnabled()) {
                        incomingBatchService.updateIncomingBatch(incomingBatch);
                    } else if (incomingBatch.isRetry()) {
                        incomingBatchService.deleteIncomingBatch(incomingBatch);
                    }
                } catch (Throwable ex) {
                    if (ex instanceof TargetError) {
                        Throwable target = ((TargetError) ex).getTarget();
                        if (target != null) {
                            ex = target;
                        }
                    } else if (ex instanceof EvalError) {
                        log.error("Failed to evalulate the script:\n{}", script);
                    }

                    if (ex instanceof FileConflictException) {
                        log.error(ex.getMessage() + ".  Failed to process file sync batch " + batchId);
                    } else {
                        log.error("Failed to process file sync batch " + batchId, ex);
                    }

                    incomingBatch.setErrorFlag(true);
                    incomingBatch.setStatus(IncomingBatch.Status.ER);
                    incomingBatch.setSqlMessage(ex.getMessage());
                    if (incomingBatchService.isRecordOkBatchesEnabled() || incomingBatch.isRetry()) {
                        incomingBatchService.updateIncomingBatch(incomingBatch);
                    } else {
                        incomingBatchService.insertIncomingBatch(incomingBatch);
                    }
                    processInfo.setStatus(ProcessInfo.Status.ERROR);
                    break;
                } finally {
                    log.debug("The {} node is done processing file sync files", sourceNodeId);
                    if (isLocked) {
                        engine.getClusterService().unlock(ClusterConstants.FILE_SYNC_SHARED,
                                ClusterConstants.TYPE_SHARED);
                    }
                }
            } else {
                log.error("Could not find the sync.bsh script for batch {}", batchId);
            }
        }

    }

    return batchesProcessed;
}

From source file:org.metaeffekt.dita.maven.installation.DitaInstallationHelper.java

/**
 * Get a pointer to the location of a ready to use Dita Toolkit, where to
 * content of the location corresponds to the content of the given {@link
 * this#installationArchive}./*from  w  w w  .j av  a  2 s  .  c  o  m*/
 * 
 * @return String representing the path to the Dita Toolkit installation.
 * @throws IOException
 */
public File getDitaToolkitRoot() throws IOException {
    String checksum = this.getInstallationArchiveChecksum();
    File installationRoot = new File(installationFolder, checksum);
    File[] subDirs = installationRoot.listFiles((FileFilter) DirectoryFileFilter.INSTANCE);

    if (subDirs.length != 1) {
        throw new IOException(
                "The Dita installation cache contains more than one directory for the current Toolkit version.");
    }
    return subDirs[0];
}