Example usage for org.apache.commons.io FileUtils deleteDirectory

List of usage examples for org.apache.commons.io FileUtils deleteDirectory

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils deleteDirectory.

Prototype

public static void deleteDirectory(File directory) throws IOException 

Source Link

Document

Deletes a directory recursively.

Usage

From source file:com.collaide.fileuploader.controllers.LocalFileSynchronizer.java

@Override
public void itemDeleted(ItemDeleted item) {
    try {// www.  j  a  v a2 s . c o m
        File fileToDelete = new File(item.getFromPath());
        if (fileToDelete.isDirectory()) {
            FileUtils.deleteDirectory(fileToDelete);
        } else {
            Files.deleteIfExists(new File(item.getFromPath()).toPath());
        }
    } catch (IOException ex) {
        LogManager.getLogger(LocalFileSynchronizer.class).error("cannot delete file file : " + ex);
    }
}

From source file:de.oppermann.pomutils.PomMergeDriverTest.java

@Override
protected void setUp() throws Exception {
    super.setUp();
    System.setProperty(org.slf4j.impl.SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "DEBUG");

    File testTargetResourceFolder = new File("target/testresources/merge");
    FileUtils.deleteDirectory(testTargetResourceFolder);
    FileUtils.copyDirectory(new File("src/test/resources/merge"), testTargetResourceFolder);
}

From source file:com.btisystems.pronx.ems.AppIntegrationTest.java

@After
public void after() throws Exception {
    if (!failed) {
        FileUtils.deleteDirectory(new File(GENERATED_DIRECTORY));
    }//from w w w. j ava  2s. c om
}

From source file:com.dexels.navajo.tipi.dev.server.appmanager.impl.UnsignJarTask.java

public static void downloadDepencency(Dependency d, File repoDir, File destinationFolder,
        List<String> extraHeaders) throws IOException {
    String assembledName = d.getFileNameWithVersion();
    String tmpAssembledFile = "tmp_" + assembledName;
    String tmpAssembled = d.getArtifactId() + "_" + d.getVersion();
    File dest = new File(destinationFolder, tmpAssembledFile);
    FileOutputStream fos = new FileOutputStream(dest);
    File ff = d.getFilePathForDependency(repoDir);
    File parent = ff.getParentFile();
    if (!parent.exists()) {
        parent.mkdirs();/*from   w w w.  j av  a2 s  . c o  m*/
    }
    logger.info("Downloading: " + ff.getAbsolutePath());
    ZipUtils.copyResource(fos, d.getUrl().openStream());
    File tmpDir = new File(destinationFolder, tmpAssembled);
    tmpDir.mkdirs();
    ZipUtils.unzip(dest, tmpDir);
    cleanSigningData(tmpDir, extraHeaders);
    File destinationZip = new File(destinationFolder, assembledName);
    ZipUtils.zipAll(tmpDir, destinationZip);
    FileUtils.copyFileToDirectory(destinationZip, parent);
    FileUtils.deleteDirectory(tmpDir);
    dest.delete();
}

From source file:edu.kit.trufflehog.model.FileSystemTest.java

/**
 * <p>//  w  ww  .  j a v a 2 s.c  o  m
 *     Deletes all folders created by the previous test.
 * </p>
 *
 * @throws Exception Passes any errors that occurred during the test on
 */
@After
public void tearDown() throws Exception {
    if (fileSystem.getDataFolder().exists()) {
        FileUtils.deleteDirectory(fileSystem.getDataFolder());
    }
}

From source file:azkaban.storage.LocalStorageTest.java

@After
public void tearDown() throws Exception {
    FileUtils.deleteDirectory(BASE_DIRECTORY);
}

From source file:de.jcup.egradle.other.GroovyParserSourceCollector.java

protected void execute(File sourceRootDir, File targetDir) throws IOException {
    System.out.println("delete:" + targetDir);
    FileUtils.deleteDirectory(targetDir);
    targetDir.mkdirs();/*w w w .ja  va  2s  .c o m*/
    /* copy directories + subdirectories */
    List<String> fullPackages = new ArrayList<>();
    // fullPackages.add("org/codehaus/groovy/antlr");
    // fullPackages.add("org/codehaus/groovy/ast");
    // fullPackages.add("org/codehaus/groovy/syntax");
    // fullPackages.add("org/codehaus/groovy/control");
    for (String i : fullPackages) {
        copyDirFull(sourceRootDir, targetDir, i);
    }
    /* remove some directories */
    List<String> fullPackagesToDrop = new ArrayList<>();
    fullPackagesToDrop.add("org/codehaus/groovy/antlr/java");
    for (String i : fullPackagesToDrop) {
        System.out.println("Delete:" + i);
        FileUtils.deleteDirectory(new File(targetDir, i));
    }
    /* import dedicated parts */
    List<String> imports = new ArrayList<>();
    // @formatter:off
    imports.add(imported("import org.codehaus.groovy.antlr.LineColumn;"));
    imports.add(imported("import org.codehaus.groovy.antlr.SourceBuffer;"));
    imports.add(imported("import org.codehaus.groovy.GroovyBugError;"));
    imports.add(imported("org.codehaus.groovy.antlr.SourceInfo"));
    imports.add(imported("org.codehaus.groovy.antlr.GroovySourceToken"));
    imports.add(imported("org.codehaus.groovy.antlr.java.JavaLexer"));
    imports.add(imported("org.codehaus.groovy.antlr.java.JavaTokenTypes"));
    imports.add(imported("org.codehaus.groovy.antlr.java.JavaRecognizer"));
    imports.add(imported("import org.codehaus.groovy.antlr.GroovySourceAST;"));
    imports.add(imported("import org.codehaus.groovy.antlr.UnicodeEscapingReader;"));
    imports.add(imported("import org.codehaus.groovy.antlr.parser.GroovyLexer;"));
    imports.add(imported("import org.codehaus.groovy.antlr.parser.GroovyRecognizer;"));
    imports.add(imported("org.codehaus.groovy.antlr.parser.GroovyTokenTypes"));
    // @formatter:on

    for (String i : imports) {
        copyFile(sourceRootDir, targetDir, i);
    }

}

From source file:com.thoughtworks.go.agent.common.util.JarUtilTest.java

@After
public void tearDown() throws IOException {
    FileUtils.deleteQuietly(new File(PATH_WITH_HASHES + "test-agent.jar"));
    FileUtils.deleteDirectory(new File(PATH_WITH_HASHES));
}

From source file:com.datatorrent.demos.dimensions.ads.AdsDimensionStoreOperatorTest.java

@Test
public void testQuery() throws Exception {
    File file = new File(testInfo.getDir());
    FileUtils.deleteDirectory(file);

    AdsDimensionStoreOperator hdsOut = new AdsDimensionStoreOperator() {
        @Override/*from www.j  a  v  a2  s. co m*/
        public void setup(OperatorContext arg0) {
            super.setup(arg0);
            super.writeExecutor = super.queryExecutor = MoreExecutors.sameThreadExecutor(); // synchronous processing
        }
    };
    TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
    hdsOut.setFileStore(hdsFile);
    hdsFile.setBasePath(testInfo.getDir());
    hdsOut.setAggregator(new AdInfo.AdInfoAggregator());
    hdsOut.setMaxCacheSize(1);
    hdsOut.setFlushIntervalCount(0);
    hdsOut.setup(null);

    hdsOut.setDebug(false);

    CollectorTestSink<AdsDimensionStoreOperator.TimeSeriesQueryResult> queryResults = new CollectorTestSink<AdsDimensionStoreOperator.TimeSeriesQueryResult>();
    @SuppressWarnings({ "unchecked", "rawtypes" })
    CollectorTestSink<Object> tmp = (CollectorTestSink) queryResults;
    hdsOut.queryResult.setSink(tmp);

    hdsOut.beginWindow(1);

    long baseTime = System.currentTimeMillis();
    long baseMinute = TimeUnit.MILLISECONDS.convert(TimeUnit.MINUTES.convert(baseTime, TimeUnit.MILLISECONDS),
            TimeUnit.MINUTES);

    // Check aggregation for ae1 and ae2 as they have same key.
    AdInfo.AdInfoAggregateEvent ae1 = new AdInfo.AdInfoAggregateEvent();
    ae1.publisherId = 1;
    ae1.advertiserId = 2;
    ae1.adUnit = 3;
    ae1.timestamp = baseMinute;
    ae1.clicks = 10;
    hdsOut.input.process(ae1);

    AdInfo.AdInfoAggregateEvent ae2 = new AdInfo.AdInfoAggregateEvent();
    ae2.publisherId = 1;
    ae2.advertiserId = 2;
    ae2.adUnit = 3;
    ae2.timestamp = baseMinute;
    ae2.clicks = 20;
    hdsOut.input.process(ae2);

    AdInfo.AdInfoAggregateEvent ae3 = new AdInfo.AdInfoAggregateEvent();
    ae3.publisherId = 1;
    ae3.advertiserId = 2;
    ae3.adUnit = 3;
    ae3.timestamp = baseMinute + TimeUnit.MILLISECONDS.convert(1, TimeUnit.MINUTES);
    ae3.clicks = 40;
    hdsOut.input.process(ae3);

    hdsOut.endWindow();

    hdsOut.beginWindow(2);

    JSONObject keys = new JSONObject();
    keys.put("publisherId", String.valueOf(1));
    keys.put("advertiserId", String.valueOf(2));
    keys.put("adUnit", String.valueOf(3));

    JSONObject query = new JSONObject();
    query.put("numResults", "20");
    query.put("keys", keys);
    query.put("id", "query1");
    query.put("startTime", baseMinute);
    query.put("endTime", baseMinute + TimeUnit.MILLISECONDS.convert(20, TimeUnit.MINUTES));

    hdsOut.query.process(query.toString());

    Assert.assertEquals("timeSeriesQueries " + hdsOut.timeSeriesQueries, 1, hdsOut.timeSeriesQueries.size());
    AdsDimensionStoreOperator.TimeSeriesQuery aq = hdsOut.timeSeriesQueries.values().iterator().next();
    Assert.assertEquals("numTimeUnits " + hdsOut.timeSeriesQueries, baseMinute, aq.startTime);

    hdsOut.endWindow();

    Assert.assertEquals("queryResults " + queryResults.collectedTuples, 1, queryResults.collectedTuples.size());
    TimeSeriesQueryResult r = queryResults.collectedTuples.iterator().next();
    Assert.assertEquals("result points " + r, 2, r.data.size());

    // ae1 object is stored as referenced in cache, and when new tuple is aggregated,
    // the new values are updated in ae1 itself, causing following check to fail.
    //Assert.assertEquals("clicks", ae1.clicks + ae2.clicks, r.data.get(0).clicks);
    Assert.assertEquals("clicks", 10 + ae2.clicks, r.data.get(0).clicks);
    Assert.assertEquals("clicks", ae3.clicks, r.data.get(1).clicks);

    Assert.assertNotSame("deserialized", ae1, r.data.get(1));
    Assert.assertSame("from cache", ae3, r.data.get(1));

}

From source file:io.druid.segment.loading.LocalDataSegmentPullerTest.java

@After
public void after() throws IOException {
    FileUtils.deleteDirectory(tmpDir);
}