List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete
public static boolean fullyDelete(final File dir)
From source file:com.ifeng.vdn.parser.VideoLogParseLocalDriver.java
License:Apache License
public static void main(String[] args) throws Exception { String input = "/home/lhfei/app_tmp/vdnlog/input/*/*.gz"; String output = "/home/lhfei/app_tmp/vdnlog/output/*/"; //String input = "/home/lhfei/app_tmp/vdnlog/result_ALL.txt"; //String output = "/home/lhfei/app_tmp/vdnlog/output/"; if (args == null || args.length != 2) { args = new String[] { input, output }; }/*from w w w . j a va 2 s . c o m*/ FileUtil.fullyDelete(new File(output)); int exitCode = ToolRunner.run(new VideoLogParseLocalDriver(), args); System.exit(exitCode); }
From source file:com.ifeng.vdn.videolog.sort.SortGroupResultPreprocessor.java
License:Apache License
public static void main(String[] args) throws Exception { if (args == null || args.length != 2) { args = new String[] { "src/test/resources/input/sort/count-result.txt", "src/test/resources/output/sort-countresult" }; }/* w w w .ja va 2 s .c o m*/ FileUtil.fullyDelete(new File(args[1])); int exitCode = ToolRunner.run(new SortGroupResultPreprocessor(), args); System.exit(exitCode); }
From source file:com.ifeng.vdn.videolog.VideologGroupLocalDriver.java
License:Apache License
public static void main(String[] args) throws Exception { String input = "src/test/resources/input/videolog/0000.txt"; String output = "src/test/resources/output/log"; if (args == null || args.length != 2) { args = new String[] { input, output }; }/*from w w w .j a va 2s . c o m*/ FileUtil.fullyDelete(new File(output)); int exitCode = ToolRunner.run(new VideologGroupLocalDriver(), args); System.exit(exitCode); }
From source file:com.impetus.client.hbase.junits.HBaseCli.java
License:Apache License
public static void cleanUp() { try {// w w w . jav a2s .c om if (utility != null) { // utility.getMiniHBaseCluster().shutdown(); // File workingDirectory = new File("./"); // utility.closeRegion("localhost"); utility.cleanupTestDir(); // utility.cleanupTestDir(dir.getAbsolutePath()); // ZooKeeperServer server = new ZooKeeperServer(zkDir, zkDir, // 2000); // ZooKeeperServerBean bean = new ZooKeeperServerBean(server); // String path = (String)this.makeFullPath(null,bean); // MBeanS // MBeanRegistry.getInstance().unregister(bean); // MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); // mbs.unregisterMBean(makeObjectName(path,bean)); // utility.getHbaseCluster().shutdown(); utility.shutdownMiniCluster(); FileUtil.fullyDelete(zkDir); FileUtil.fullyDelete(masterDir); utility = null; } } catch (IOException e) { logger.error(e.getMessage()); } catch (NullPointerException e) { // TODO Auto-generated catch block } catch (Exception e) { // TODO Auto-generated catch block } }
From source file:com.kylinolap.job.hadoop.cube.BaseCuboidJobTest.java
License:Apache License
@Test public void testJob() throws Exception { String input = "src/test/resources/data/flat_table/"; String output = "target/test-output/base_cuboid/"; String cubeName = "test_kylin_cube_with_slr_1_new_segment"; String segmentName = "20130331080000_20131212080000"; String jobname = "base_cuboid_job"; String level = "0"; FileUtil.fullyDelete(new File(output)); String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output, "-jobname", jobname, "-level", level }; assertEquals("Job failed", 0, ToolRunner.run(conf, new BaseCuboidJob(), args)); }
From source file:com.kylinolap.job.hadoop.cube.BaseCuboidJobTest.java
License:Apache License
@Test public void testJobWithBadParas() throws Exception { final String input = "src/test/resources/data/flat_table/"; final String output = "target/test-output/base_cuboid/"; final String metadata = AbstractKylinTestCase.LOCALMETA_TEST_DATA; FileUtil.fullyDelete(new File(output)); String[] args = { "-input", input, "-output", output, "-metadata", metadata }; assertEquals(2, ToolRunner.run(conf, new BaseCuboidJob(), args)); }
From source file:com.kylinolap.job.hadoop.cube.MergeCuboidJobTest.java
License:Apache License
@Test public void test() throws Exception { // String input = // "src/test/resources/data/base_cuboid,src/test/resources/data/6d_cuboid"; String output = "target/test-output/merged_cuboid"; String cubeName = "test_kylin_cube_with_slr_ready"; String jobname = "merge_cuboid"; File baseFolder = File.createTempFile("kylin-f24668f6-dcff-4cb6-a89b-77f1119df8fa-", "base"); baseFolder.delete();// w ww . j a v a 2 s. co m baseFolder.mkdir(); FileUtils.copyDirectory(new File("src/test/resources/data/base_cuboid"), baseFolder); baseFolder.deleteOnExit(); File sixDFolder = File.createTempFile("kylin-f24668f6-dcff-4cb6-a89b-77f1119df8fa-", "6d"); sixDFolder.delete(); sixDFolder.mkdir(); FileUtils.copyDirectory(new File("src/test/resources/data/base_cuboid"), sixDFolder); sixDFolder.deleteOnExit(); FileUtil.fullyDelete(new File(output)); // CubeManager cubeManager = // CubeManager.getInstanceFromEnv(this.getTestConfig()); String[] args = { "-input", baseFolder.getAbsolutePath() + "," + sixDFolder.getAbsolutePath(), "-cubename", cubeName, "-segmentname", "20130331080000_20131212080000", "-output", output, "-jobname", jobname }; assertEquals("Job failed", 0, ToolRunner.run(conf, new MergeCuboidJob(), args)); }
From source file:com.kylinolap.job.hadoop.cube.NDCuboidJobTest.java
License:Apache License
@Test public void testJob6D() throws Exception { String input = "src/test/resources/data/base_cuboid/"; String output = "target/test-output/6d_cuboid"; String cubeName = "test_kylin_cube_with_slr_1_new_segment"; String segmentName = "20130331080000_20131212080000"; String jobname = "6d_cuboid"; String level = "1"; FileUtil.fullyDelete(new File(output)); String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output, "-jobname", jobname, "-level", level }; assertEquals("Job failed", 0, ToolRunner.run(conf, new NDCuboidJob(), args)); }
From source file:com.kylinolap.job.hadoop.cube.NDCuboidJobTest.java
License:Apache License
@Test public void testJob5D() throws Exception { final String input = "src/test/resources/data/6d_cuboid/"; final String output = "target/test-output/5d_cuboid"; final String cubeName = "test_kylin_cube_with_slr_1_new_segment"; String segmentName = "20130331080000_20131212080000"; String jobname = "5d_cuboid"; String level = "2"; FileUtil.fullyDelete(new File(output)); String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output, "-jobname", jobname, "-level", level }; assertEquals("Job failed", 0, ToolRunner.run(conf, new NDCuboidJob(), args)); }
From source file:com.kylinolap.job.hadoop.cube.RangeKeyDistributionJobTest.java
License:Apache License
@Test public void testJob() throws Exception { String input = "src/test/resources/data/base_cuboid/,src/test/resources/data/6d_cuboid/"; String output = "target/test-output/key_distribution_range/"; String jobname = "calculate_splits"; String cubename = "test_kylin_cube_with_slr_ready"; FileUtil.fullyDelete(new File(output)); String[] args = { "-input", input, "-output", output, "-jobname", jobname, "-cubename", cubename }; assertEquals("Job failed", 0, ToolRunner.run(conf, new RangeKeyDistributionJob(), args)); }