Example usage for org.apache.hadoop.fs FileUtil fullyDelete

List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil fullyDelete.

Prototype

public static boolean fullyDelete(final File dir) 

Source Link

Document

Delete a directory and all its contents.

Usage

From source file:org.apache.hive.hcatalog.pig.TestHCatLoaderPredicatePushDown.java

License:Apache License

private static void clearUpLocalFileSystemDirectories() {
    File f = new File(TEST_DATA_DIR);
    if (f.exists()) {
        FileUtil.fullyDelete(f);
    }/*from   ww w .j a v a2  s  . c o m*/
}

From source file:org.apache.hive.hcatalog.pig.TestHCatLoaderStorer.java

License:Apache License

/**
 * Test round trip of smallint/tinyint: Hive->Pig->Hive.  This is a more general use case in HCatalog:
 * 'read some data from Hive, process it in Pig, write result back to a Hive table'
 *///  w ww  . j  a v  a2s.c  om
@Test
public void testReadWrite() throws Exception {
    final String tblName = "small_ints_table";
    final String tblName2 = "pig_hcatalog_1";
    File dataDir = new File(TEST_DATA_DIR + File.separator + "testReadWrite");
    FileUtil.fullyDelete(dataDir); // Might not exist
    Assert.assertTrue(dataDir.mkdir());
    final String INPUT_FILE_NAME = dataDir + "/inputtrw.data";

    TestHCatLoader.dropTable(tblName, driver);
    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, new String[] { "40\t1" });

    TestHCatLoader.executeStatementOnDriver(
            "create external table " + tblName + " (my_small_int smallint, my_tiny_int tinyint)"
                    + " row format delimited fields terminated by '\t' stored as textfile location '"
                    + dataDir.toURI().getPath() + "'",
            driver);
    TestHCatLoader.dropTable(tblName2, driver);
    TestHCatLoader.createTable(tblName2, "my_small_int smallint, my_tiny_int tinyint", null, driver,
            "textfile");

    LOG.debug("File=" + INPUT_FILE_NAME);
    TestHCatStorer.dumpFile(INPUT_FILE_NAME);
    PigServer server = createPigServer(true);
    try {
        int queryNumber = 1;
        logAndRegister(server, "A = load '" + tblName
                + "' using org.apache.hive.hcatalog.pig.HCatLoader() as (my_small_int:int, my_tiny_int:int);",
                queryNumber++);
        logAndRegister(server,
                "b = foreach A generate my_small_int + my_tiny_int as my_small_int, my_tiny_int;",
                queryNumber++);
        logAndRegister(server,
                "store b into '" + tblName2 + "' using org.apache.hive.hcatalog.pig.HCatStorer();",
                queryNumber);
        //perform simple checksum here; make sure nothing got turned to NULL
        TestHCatLoader.executeStatementOnDriver("select my_small_int from " + tblName2, driver);
        ArrayList l = new ArrayList();
        driver.getResults(l);
        for (Object t : l) {
            LOG.debug("t=" + t);
        }
        Assert.assertEquals("Expected '1' rows; got '" + l.size() + "'", 1, l.size());
        int result = Integer.parseInt((String) l.get(0));
        Assert.assertEquals("Expected value '41'; got '" + result + "'", 41, result);
    } finally {
        server.shutdown();
    }
}

From source file:org.apache.hive.hcatalog.pig.TestHCatLoaderStorer.java

License:Apache License

/**
 * Ensure Pig can read/write tinyint/smallint columns.
 *//*from   www.j av a  2  s  .  c  om*/
@Test
public void testSmallTinyInt() throws Exception {

    String readTblName = "test_small_tiny_int";
    File dataDir = new File(TEST_DATA_DIR + "/testSmallTinyIntData");
    File dataFile = new File(dataDir, "testSmallTinyInt.tsv");

    String writeTblName = "test_small_tiny_int_write";
    File writeDataFile = new File(TEST_DATA_DIR, writeTblName + ".tsv");

    FileUtil.fullyDelete(dataDir); // Might not exist
    Assert.assertTrue(dataDir.mkdir());

    HcatTestUtils.createTestDataFile(dataFile.getAbsolutePath(),
            new String[] { String.format("%d\t%d", Short.MIN_VALUE, Byte.MIN_VALUE),
                    String.format("%d\t%d", Short.MAX_VALUE, Byte.MAX_VALUE) });

    // Create a table with smallint/tinyint columns, load data, and query from Hive.
    Assert.assertEquals(0, driver.run("drop table if exists " + readTblName).getResponseCode());
    Assert.assertEquals(0,
            driver.run("create external table " + readTblName + " (my_small_int smallint, my_tiny_int tinyint)"
                    + " row format delimited fields terminated by '\t' stored as textfile").getResponseCode());
    Assert.assertEquals(0, driver.run("load data local inpath '" + dataDir.getPath().replaceAll("\\\\", "/")
            + "' into table " + readTblName).getResponseCode());

    PigServer server = new PigServer(ExecType.LOCAL);
    server.registerQuery("data = load '" + readTblName + "' using org.apache.hive.hcatalog.pig.HCatLoader();");

    // Ensure Pig schema is correct.
    Schema schema = server.dumpSchema("data");
    Assert.assertEquals(2, schema.getFields().size());
    Assert.assertEquals("my_small_int", schema.getField(0).alias);
    Assert.assertEquals(DataType.INTEGER, schema.getField(0).type);
    Assert.assertEquals("my_tiny_int", schema.getField(1).alias);
    Assert.assertEquals(DataType.INTEGER, schema.getField(1).type);

    // Ensure Pig can read data correctly.
    Iterator<Tuple> it = server.openIterator("data");
    Tuple t = it.next();
    Assert.assertEquals(new Integer(Short.MIN_VALUE), t.get(0));
    Assert.assertEquals(new Integer(Byte.MIN_VALUE), t.get(1));
    t = it.next();
    Assert.assertEquals(new Integer(Short.MAX_VALUE), t.get(0));
    Assert.assertEquals(new Integer(Byte.MAX_VALUE), t.get(1));
    Assert.assertFalse(it.hasNext());

    // Ensure Pig can write correctly to smallint/tinyint columns. This means values within the
    // bounds of the column type are written, and values outside throw an exception.
    Assert.assertEquals(0, driver.run("drop table if exists " + writeTblName).getResponseCode());
    Assert.assertEquals(
            0, driver
                    .run("create table " + writeTblName
                            + " (my_small_int smallint, my_tiny_int tinyint) stored as rcfile")
                    .getResponseCode());

    // Values within the column type bounds.
    HcatTestUtils.createTestDataFile(writeDataFile.getAbsolutePath(),
            new String[] { String.format("%d\t%d", Short.MIN_VALUE, Byte.MIN_VALUE),
                    String.format("%d\t%d", Short.MAX_VALUE, Byte.MAX_VALUE) });
    smallTinyIntBoundsCheckHelper(writeDataFile.getPath().replaceAll("\\\\", "/"),
            ExecJob.JOB_STATUS.COMPLETED);

    // Values outside the column type bounds will fail at runtime.
    HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/shortTooSmall.tsv",
            new String[] { String.format("%d\t%d", Short.MIN_VALUE - 1, 0) });
    smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/shortTooSmall.tsv", ExecJob.JOB_STATUS.FAILED);

    HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/shortTooBig.tsv",
            new String[] { String.format("%d\t%d", Short.MAX_VALUE + 1, 0) });
    smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/shortTooBig.tsv", ExecJob.JOB_STATUS.FAILED);

    HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/byteTooSmall.tsv",
            new String[] { String.format("%d\t%d", 0, Byte.MIN_VALUE - 1) });
    smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/byteTooSmall.tsv", ExecJob.JOB_STATUS.FAILED);

    HcatTestUtils.createTestDataFile(TEST_DATA_DIR + "/byteTooBig.tsv",
            new String[] { String.format("%d\t%d", 0, Byte.MAX_VALUE + 1) });
    smallTinyIntBoundsCheckHelper(TEST_DATA_DIR + "/byteTooBig.tsv", ExecJob.JOB_STATUS.FAILED);
}

From source file:org.apache.kylin.engine.mr.steps.MergeCuboidJobTest.java

License:Apache License

@Test
public void test() throws Exception {
    // String input =
    // "src/test/resources/data/base_cuboid,src/test/resources/data/6d_cuboid";
    String output = "target/test-output/merged_cuboid";
    String cubeName = "test_kylin_cube_with_slr_ready";
    String jobname = "merge_cuboid";

    File baseFolder = File.createTempFile("kylin-f24668f6-dcff-4cb6-a89b-77f1119df8fa-", "base");
    FileUtils.forceDelete(baseFolder);//w  w w  .  j a  v a  2 s .c  om
    baseFolder.mkdir();
    FileUtils.copyDirectory(new File("src/test/resources/data/base_cuboid"), baseFolder);
    FileUtils.forceDeleteOnExit(baseFolder);

    File eightFoler = File.createTempFile("kylin-f24668f6-dcff-4cb6-a89b-77f1119df8fa-", "8d");
    FileUtils.forceDelete(eightFoler);
    eightFoler.mkdir();
    FileUtils.copyDirectory(new File("src/test/resources/data/base_cuboid"), eightFoler);
    FileUtils.forceDeleteOnExit(eightFoler);

    FileUtil.fullyDelete(new File(output));

    // CubeManager cubeManager =
    // CubeManager.getInstanceFromEnv(getTestConfig());

    String[] args = { "-input", baseFolder.getAbsolutePath() + "," + eightFoler.getAbsolutePath(), "-cubename",
            cubeName, "-segmentname", "20130331080000_20131212080000", "-output", output, "-jobname", jobname };
    assertEquals("Job failed", 0, ToolRunner.run(conf, new MergeCuboidJob(), args));

}

From source file:org.apache.kylin.engine.mr.steps.NDCuboidJobTest.java

License:Apache License

@Test
public void testsJob8D() throws Exception {
    String input = "src/test/resources/data/base_cuboid/";
    String output = "target/test-output/8d_cuboid";
    String cubeName = "test_kylin_cube_with_slr_1_new_segment";
    String segmentName = "20130331080000_20131212080000";
    String jobname = "8d_cuboid";
    String level = "1";

    FileUtil.fullyDelete(new File(output));

    String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output,
            "-jobname", jobname, "-level", level };
    assertEquals("Job failed", 0, ToolRunner.run(conf, new NDCuboidJob(), args));
}

From source file:org.apache.kylin.engine.mr.steps.NDCuboidJobTest.java

License:Apache License

@Test
public void testJob7D() throws Exception {
    final String input = "src/test/resources/data/8d_cuboid/";
    final String output = "target/test-output/7d_cuboid";
    final String cubeName = "test_kylin_cube_with_slr_1_new_segment";
    String segmentName = "20130331080000_20131212080000";
    String jobname = "7d_cuboid";
    String level = "2";

    FileUtil.fullyDelete(new File(output));

    String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output,
            "-jobname", jobname, "-level", level };
    assertEquals("Job failed", 0, ToolRunner.run(conf, new NDCuboidJob(), args));
}

From source file:org.apache.kylin.job.hadoop.cube.MergeCuboidJobTest.java

License:Apache License

@Test
public void test() throws Exception {
    // String input =
    // "src/test/resources/data/base_cuboid,src/test/resources/data/6d_cuboid";
    String output = "target/test-output/merged_cuboid";
    String cubeName = "test_kylin_cube_with_slr_ready";
    String jobname = "merge_cuboid";

    File baseFolder = File.createTempFile("kylin-f24668f6-dcff-4cb6-a89b-77f1119df8fa-", "base");
    baseFolder.delete();//from www  . j a  v a 2  s.c  om
    baseFolder.mkdir();
    FileUtils.copyDirectory(new File("src/test/resources/data/base_cuboid"), baseFolder);
    baseFolder.deleteOnExit();

    File sixDFolder = File.createTempFile("kylin-f24668f6-dcff-4cb6-a89b-77f1119df8fa-", "6d");
    sixDFolder.delete();
    sixDFolder.mkdir();
    FileUtils.copyDirectory(new File("src/test/resources/data/base_cuboid"), sixDFolder);
    sixDFolder.deleteOnExit();

    FileUtil.fullyDelete(new File(output));

    // CubeManager cubeManager =
    // CubeManager.getInstanceFromEnv(getTestConfig());

    String[] args = { "-input", baseFolder.getAbsolutePath() + "," + sixDFolder.getAbsolutePath(), "-cubename",
            cubeName, "-segmentname", "20130331080000_20131212080000", "-output", output, "-jobname", jobname };
    assertEquals("Job failed", 0, ToolRunner.run(conf, new MergeCuboidJob(), args));

}

From source file:org.apache.kylin.job.tools.ColumnCardinalityJobTest.java

License:Apache License

@Test
@Ignore("not maintaining")
public void testJob() throws Exception {
    final String input = "src/test/resources/data/test_cal_dt/";
    final String output = "target/test-output/column-cardinality/";

    FileUtil.fullyDelete(new File(output));

    String[] args = { "-input", input, "-output", output, "-cols", "1,2,3,4,5,6,9,0" };
    assertEquals("Job failed", 0, ToolRunner.run(new HiveColumnCardinalityJob(), args));
}

From source file:org.apache.kylin.storage.hbase.steps.RangeKeyDistributionJobTest.java

License:Apache License

@Test
public void testJob() throws Exception {
    String input = "src/test/resources/data/base_cuboid/,src/test/resources/data/8d_cuboid/";
    String output = "target/test-output/key_distribution_range/";
    String jobname = "calculate_splits";
    String cubename = "test_kylin_cube_with_slr_ready";

    FileUtil.fullyDelete(new File(output));

    String[] args = { "-input", input, "-output", output, "-jobname", jobname, "-cubename", cubename };
    assertEquals("Job failed", 0, ToolRunner.run(conf, new RangeKeyDistributionJob(), args));
}

From source file:org.apache.metron.management.FileSystemFunctionsTest.java

License:Apache License

@After
public void teardown() {
    if (type == FileSystemFunctions.FS_TYPE.HDFS) {
        hdfsCluster.shutdown();//w w w.j a  v a 2s.  com
        FileUtil.fullyDelete(baseDir);
    } else {
        new File(prefix).delete();
    }
}