Example usage for org.apache.commons.io FileUtils cleanDirectory

List of usage examples for org.apache.commons.io FileUtils cleanDirectory

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils cleanDirectory.

Prototype

public static void cleanDirectory(File directory) throws IOException 

Source Link

Document

Cleans a directory without deleting it.

Usage

From source file:org.apache.hadoop.gateway.AmbariServiceDefinitionTest.java

@After
public void cleanupTest() throws Exception {
    FileUtils.cleanDirectory(new File(config.getGatewayTopologyDir()));
    FileUtils.cleanDirectory(new File(config.getGatewayDeploymentDir()));
}

From source file:org.apache.hadoop.mapreduce.v2.hs.TestUnnecessaryBlockingOnHistoryFileInfo.java

@BeforeClass
public static void setUp() throws IOException {
    if (USER_DIR.exists()) {
        FileUtils.cleanDirectory(USER_DIR);
    }//from  w  w w .java  2s .co m
    USER_DIR.mkdirs();
}

From source file:org.apache.jackrabbit.core.cluster.ClusterTest.java

/**
 * {@inheritDoc}//from w ww .  j ava 2s  .c  o  m
 */
protected void setUp() throws Exception {
    repositoryHome = new File(REPOSITORY_HOME);
    repositoryHome.mkdirs();
    FileUtils.cleanDirectory(repositoryHome);

    super.setUp();
}

From source file:org.apache.jackrabbit.core.data.CachingDataStore.java

/**
 * Initialized the data store. If the path is not set, <repository
 * home>/repository/datastore is used. This directory is automatically
 * created if it does not yet exist. During first initialization, it upload
 * all files from local datastore to backed and local datastore act as a
 * local cache./*  w  w w .  jav  a  2s .  co m*/
 */
@Override
public void init(String homeDir) throws RepositoryException {
    try {
        if (path == null) {
            path = homeDir + "/repository/datastore";
            tmpDir = new File(homeDir, "/repository/s3tmp");
        } else {
            // cache is moved from 'path' to 'path'/repository/datastore
            tmpDir = new File(path, "/repository/s3tmp");
            path = path + "/repository/datastore";
        }
        LOG.info("path=[{}],  tmpPath=[{}]", path, tmpDir.getPath());
        directory = new File(path);
        mkdirs(directory);
        if (!mkdirs(tmpDir)) {
            FileUtils.cleanDirectory(tmpDir);
            LOG.info("tmp=[{}] cleaned.", tmpDir.getPath());
        }

        asyncWriteCache = new AsyncUploadCache();
        asyncWriteCache.init(homeDir, path, asyncUploadLimit);

        backend = createBackend();
        backend.init(this, path, config);
        String markerFileName = getMarkerFile();
        if (markerFileName != null) {
            // create marker file in homeDir to avoid deletion in cache
            // cleanup.
            File markerFile = new File(homeDir, markerFileName);
            if (!markerFile.exists()) {
                LOG.info("load files from local cache");
                uploadFilesFromCache();
                try {
                    markerFile.createNewFile();
                } catch (IOException e) {
                    throw new DataStoreException("Could not create marker file " + markerFile.getAbsolutePath(),
                            e);
                }
            } else {
                LOG.info("marker file = [{}] exists ", markerFile.getAbsolutePath());
            }
        }
        // upload any leftover async uploads to backend during last shutdown
        Set<String> fileList = asyncWriteCache.getAll();
        if (fileList != null && !fileList.isEmpty()) {
            List<String> errorFiles = new ArrayList<String>();
            LOG.info("Uploading [{}] and size=[{}] from AsyncUploadCache.", fileList, fileList.size());
            long totalSize = 0;
            List<File> files = new ArrayList<File>(fileList.size());
            for (String fileName : fileList) {
                File f = new File(path, fileName);
                if (!f.exists()) {
                    errorFiles.add(fileName);
                    LOG.error("Cannot upload pending file [{}]. File doesn't exist.", f.getAbsolutePath());
                } else {
                    totalSize += f.length();
                    files.add(new File(path, fileName));
                }
            }
            new FilesUploader(files, totalSize, concurrentUploadsThreads, true).upload();
            if (!continueOnAsyncUploadFailure && errorFiles.size() > 0) {
                LOG.error("Pending uploads of files [{}] failed. Files do not exist in Local cache.",
                        errorFiles);
                LOG.error("To continue set [continueOnAsyncUploadFailure] "
                        + "to true in Datastore configuration in "
                        + "repository.xml. There would be inconsistent data "
                        + "in repository due the missing files. ");
                throw new RepositoryException("Cannot upload async uploads from local cache. Files not found.");
            } else {
                if (errorFiles.size() > 0) {
                    LOG.error("Pending uploads of files [{}] failed. Files do"
                            + " not exist in Local cache. Continuing as "
                            + "[continueOnAsyncUploadFailure] is set to true.", errorFiles);
                }
                LOG.info("Reseting AsyncWrite Cache list.");
                asyncWriteCache.reset();
            }
        }
        cache = new LocalCache(path, tmpDir.getAbsolutePath(), cacheSize, cachePurgeTrigFactor,
                cachePurgeResizeFactor, asyncWriteCache);
    } catch (Exception e) {
        throw new RepositoryException(e);
    }
}

From source file:org.apache.jackrabbit.core.journal.FileJournalTest.java

/**
 * {@inheritDoc}//www. ja  va  2  s  .  c  o m
 */
protected void setUp() throws Exception {
    repositoryHome = new File(REPOSITORY_HOME);
    repositoryHome.mkdirs();
    FileUtils.cleanDirectory(repositoryHome);
    journalDirectory = new File(repositoryHome, "journal");

    super.setUp();
}

From source file:org.apache.jackrabbit.oak.plugins.blob.ClusterRepositoryInfoTest.java

@After
public void close() throws IOException {
    FileUtils.cleanDirectory(new File(DataStoreUtils.getHomeDir()));
}

From source file:org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStoreTest.java

@Test
public void testGetAllIdentifiers() throws Exception {
    File testDir = new File("./target", "oak-fds-test");
    FileUtils.touch(new File(testDir, "ab/cd/ef/abcdef"));
    FileUtils.touch(new File(testDir, "bc/de/fg/bcdefg"));
    FileUtils.touch(new File(testDir, "cd/ef/gh/cdefgh"));
    FileUtils.touch(new File(testDir, "c"));

    FileDataStore fds = new OakFileDataStore();
    fds.setPath(testDir.getAbsolutePath());
    fds.init(null);/*from  w ww . j  av a 2  s  .co  m*/

    Iterator<DataIdentifier> dis = fds.getAllIdentifiers();
    Set<String> fileNames = Sets.newHashSet(Iterators.transform(dis, new Function<DataIdentifier, String>() {
        @Override
        public String apply(@Nullable DataIdentifier input) {
            return input.toString();
        }
    }));

    Set<String> expectedNames = Sets.newHashSet("abcdef", "bcdefg", "cdefgh");
    assertEquals(expectedNames, fileNames);
    FileUtils.cleanDirectory(testDir);
}

From source file:org.apache.jackrabbit.oak.plugins.blob.SharedDataStoreUtilsTest.java

@After
public void close() throws IOException {
    FileUtils.cleanDirectory(new File(DataStoreUtils.getHomeDir()));
    try {/*from  ww w .  j  a  va  2s  .com*/
        cleanup(dataStore.getDataStore(), new Date());
    } catch (Exception e) {
        log.error("Error closing data store", e);
    }
}

From source file:org.apache.jackrabbit.oak.plugins.document.SharedBlobStoreGCTest.java

@After
public void tearDown() throws Exception {
    DataStoreUtils.cleanup(cluster1.getDataStore(), cluster1.getDate());
    FileUtils.cleanDirectory((new File(DataStoreUtils.getHomeDir())).getParentFile());
    DataStoreUtils.time = -1;//w w w  . j  a  v a 2  s  .  c o m
    cluster1.getDocumentNodeStore().dispose();
    cluster2.getDocumentNodeStore().dispose();
}

From source file:org.apache.jackrabbit.oak.run.osgi.OakOSGiRepositoryFactoryTest.java

@Before
public void setUp() throws IOException {
    repositoryHome = tmpFolder.getRoot().getAbsolutePath();
    config.put("org.apache.jackrabbit.repository.home", repositoryHome);

    File repoHome = new File(repositoryHome);
    if (repoHome.exists()) {
        FileUtils.cleanDirectory(new File(repositoryHome));
    }//from   ww w .j  av  a2s  . c  om
    copyConfig("common");
}