Example usage for org.apache.commons.io FileUtils deleteDirectory

List of usage examples for org.apache.commons.io FileUtils deleteDirectory

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils deleteDirectory.

Prototype

public static void deleteDirectory(File directory) throws IOException 

Source Link

Document

Deletes a directory recursively.

Usage

From source file:com.dp.bigdata.taurus.agent.utils.FileExtractUtilsTest.java

@Test
public void TestunTarunGzip() {
    String path = getAbsolutePath("extract/test.tar.gz");
    File file = new File(path);
    try {//from w  w  w . j  a  va  2s  .com
        FileExtractUtils.unTar(FileExtractUtils.unGzip(file));
        File testFile1 = new File(file.getParent() + "/test/test1.txt");
        assertTrue(testFile1.exists());
        FileUtils.deleteDirectory(new File(file.getParent() + "/test"));
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ArchiveException e) {
        e.printStackTrace();
    }
}

From source file:com.datatorrent.demos.dimensions.generic.DimensionStoreOperatorTest.java

@Test
public void testQueryFromHDS() throws Exception {
    File file = new File(testInfo.getDir());
    FileUtils.deleteDirectory(file);

    DimensionStoreOperator hdsOut = new DimensionStoreOperator() {
        @Override//  w w w. ja  va  2s .  c  o m
        public void setup(OperatorContext arg0) {
            super.setup(arg0);
            super.writeExecutor = super.queryExecutor = MoreExecutors.sameThreadExecutor(); // synchronous processing
        }
    };
    TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
    hdsOut.setFileStore(hdsFile);
    hdsFile.setBasePath(testInfo.getDir());
    EventSchema eventSchema = GenericAggregateSerializerTest.getEventSchema();
    GenericAggregator aggregator = new GenericAggregator(eventSchema);
    aggregator.init("time=MINUTES:pubId:adId:adUnit");
    hdsOut.setEventSchemaJSON(GenericAggregateSerializerTest.TEST_SCHEMA_JSON);
    hdsOut.setAggregator(aggregator);
    hdsOut.setMaxCacheSize(1);
    hdsOut.setFlushIntervalCount(0);
    hdsOut.setup(null);

    CollectorTestSink<DimensionStoreOperator.HDSRangeQueryResult> queryResults = new CollectorTestSink<DimensionStoreOperator.HDSRangeQueryResult>();
    @SuppressWarnings({ "unchecked", "rawtypes" })
    CollectorTestSink<Object> tmp = (CollectorTestSink) queryResults;
    hdsOut.queryResult.setSink(tmp);

    hdsOut.beginWindow(1);

    long baseTime = System.currentTimeMillis();
    long baseMinute = TimeUnit.MILLISECONDS.convert(TimeUnit.MINUTES.convert(baseTime, TimeUnit.MILLISECONDS),
            TimeUnit.MINUTES);

    // Events ae1 and ae2 fall into same aggregation as they have same key
    Map<String, Object> eventMap = Maps.newHashMap();
    eventMap.put("timestamp", baseMinute);
    eventMap.put("pubId", 1);
    eventMap.put("adId", 2);
    eventMap.put("adUnit", 3);
    eventMap.put("clicks", 10L);

    GenericAggregate ae1 = new GenericAggregate(eventSchema.convertMapToGenericEvent(eventMap));
    hdsOut.input.process(ae1);

    // Modify click count and create new event
    eventMap.put("clicks", 20L);
    GenericAggregate ae2 = new GenericAggregate(eventSchema.convertMapToGenericEvent(eventMap));
    hdsOut.input.process(ae2);

    // Modify clicks to 10 and time by 1 minute and create new event
    eventMap.put("clicks", 10L);
    eventMap.put("timestamp", baseMinute + TimeUnit.MILLISECONDS.convert(1, TimeUnit.MINUTES));
    GenericAggregate ae3 = new GenericAggregate(eventSchema.convertMapToGenericEvent(eventMap));
    hdsOut.input.process(ae3);

    hdsOut.endWindow();

    hdsOut.beginWindow(2);

    JSONObject keys = new JSONObject();
    keys.put("pubId", 1);
    keys.put("adId", 2);
    keys.put("adUnit", 3);

    JSONObject query = new JSONObject();
    query.put("numResults", "20");
    query.put("keys", keys);
    query.put("id", "query1");
    query.put("startTime", baseMinute);
    query.put("endTime", baseMinute + TimeUnit.MILLISECONDS.convert(20, TimeUnit.MINUTES));

    hdsOut.query.process(query.toString());

    Assert.assertEquals("timeSeriesQueries " + hdsOut.rangeQueries, 1, hdsOut.rangeQueries.size());
    DimensionStoreOperator.HDSRangeQuery aq = hdsOut.rangeQueries.values().iterator().next();
    Assert.assertEquals("numTimeUnits " + hdsOut.rangeQueries, baseMinute, aq.startTime);

    hdsOut.endWindow();

    Assert.assertEquals("queryResults " + queryResults.collectedTuples, 1, queryResults.collectedTuples.size());
    HDSRangeQueryResult r = queryResults.collectedTuples.iterator().next();
    Assert.assertEquals("result points " + r, 2, r.data.size());

    // ae1 object is stored as referenced in cache, and when new tuple is aggregated,
    // the new values are updated in ae1 itself, causing following check to fail.
    //Assert.assertEquals("clicks", ae1.clicks + ae2.clicks, r.data.get(0).clicks);
    Assert.assertEquals("clicks", 30L, r.data.get(0).get("clicks"));
    Assert.assertEquals("clicks", eventSchema.getValue(ae3, "clicks"), r.data.get(1).get("clicks"));
}

From source file:com.googlecode.t7mp.TomcatArtifactDispatcherTest.java

@After
public void tearDown() throws IOException {
    FileUtils.deleteDirectory(catalinaBaseDir);
    if (catalinaBaseDir.exists()) {
        System.err.println("Could not delete directory " + catalinaBaseDir.getAbsolutePath());
    }//from   w  w w  .  j  a  v a  2 s .c  o m
}

From source file:com.microsoft.alm.plugin.idea.common.setup.ApplicationStartupTest.java

@After
public void localCleanup() throws Exception {
    FileUtils.deleteDirectory(VSTS_DIR);
}

From source file:com.assemblade.opendj.OpenDJTestRunner.java

@Override
public void run(RunNotifier notifier) {
    try {/*from   www . ja  v a 2  s . co  m*/
        FileUtils.deleteDirectory(new File(DATA_STORE_LOCATION));
        directoryService = new OpenDJDirectoryService(DATA_STORE_LOCATION);
        super.run(notifier);
        directoryService.stop();
    } catch (Exception e) {
        notifier.fireTestFailure(new Failure(getDescription(), e));
    }
}

From source file:net.geoprism.gis.geoserver.CleanupFacade.java

@Transaction
private static void cleanupUnusedFiles_Transaction() {
    File root = new File(new File(VaultProperties.getPath("vault.default")), "files");

    if (!root.exists()) {
        String[] list = root.list();

        if (list != null && list.length > 0) {
            LinkedList<String> names = new LinkedList<String>(Arrays.asList(list));

            new Iterables<String>().remove(names, new SessionPredicate());

            for (String name : names) {
                File directory = new File(new File(VaultProperties.getPath("vault.default"), "files"), name);

                try {
                    FileUtils.deleteDirectory(directory);
                } catch (IOException e) {
                    throw new ProgrammingErrorException(e);
                }/* w w w.j  a  v  a  2s  . c o m*/
            }
        }
    }
}

From source file:net.mindengine.blogix.tests.acceptance.ExporterAccTest.java

@AfterClass
public void removeTempDirectories() throws IOException {
    if (destinationDir.exists()) {
        FileUtils.deleteDirectory(destinationDir);
    }/* w  ww  .  ja  va2  s . c om*/
}

From source file:dao.EntryDaoTest.java

@AfterClass
public static void tearDownClass() {
    String fSeparator = File.separator;
    File file = new File(System.getProperty("user.dir") + fSeparator + "MyDiaryBook");
    try {/*from www .  j  av a2 s . c  om*/
        FileUtils.deleteDirectory(file);
    } catch (IOException ex) {
        Logger.getLogger(CheckIfFileExistsDaoTest.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:gov.nih.nci.cabig.caaers.testdata.TestDataFileUtils.java

/**
 * Delete a folder//from   w  ww.  ja  v  a2s  .  co m
 * @param folder
 * @throws Exception
 */
public static void deleteDirectory(File folder) throws Exception {
    if (folder.exists() && folder.isDirectory())
        FileUtils.deleteDirectory(folder);
}

From source file:io.symcpe.hendrix.api.dao.TestTenantManager.java

@BeforeClass
public static void beforeClass() throws Exception {
    Properties config = new Properties(System.getProperties());
    File db = new File(TARGET_RULES_DB);
    if (db.exists()) {
        FileUtils.deleteDirectory(db);
    }//from  w ww  . ja  v a 2s . c  om
    config.setProperty("javax.persistence.jdbc.url", CONNECTION_STRING);
    try {
        emf = Persistence.createEntityManagerFactory("hendrix", config);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    }
}