Example usage for org.apache.hadoop.fs FileUtil fullyDelete

List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil fullyDelete.

Prototype

public static boolean fullyDelete(final File dir) 

Source Link

Document

Delete a directory and all its contents.

Usage

From source file:ParascaleFsTestCase.java

License:Apache License

/**
 * Removes the emulated mount directory for the {@link ParascaleFileSystem}
 *
 * @return <code>true</code> if the operation was successful otherwise
 *         <code>false</code>
 *
 * @throws IOException//from   www . j av  a 2 s .co  m
 */
protected boolean rmMountDir() throws IOException {

    return FileUtil.fullyDelete(getMountDir());
}

From source file:com.alexholmes.hadooputils.combine.avro.mapred.WordCountUtil.java

License:Apache License

public static void writeLinesFile() throws IOException {
    FileUtil.fullyDelete(DIR);
    DatumWriter<Utf8> writer = new GenericDatumWriter<Utf8>();
    DataFileWriter<Utf8> out = new DataFileWriter<Utf8>(writer);
    LINES_FILE.getParentFile().mkdirs();
    out.create(Schema.create(Schema.Type.STRING), LINES_FILE);
    for (String line : LINES)
        out.append(new Utf8(line));
    out.close();//from  w  ww  .  java  2s  .com
}

From source file:com.alexholmes.hadooputils.combine.avro.mapred.WordCountUtil.java

License:Apache License

public static void writeLinesBytesFile() throws IOException {
    FileUtil.fullyDelete(DIR);
    DatumWriter<ByteBuffer> writer = new GenericDatumWriter<ByteBuffer>();
    DataFileWriter<ByteBuffer> out = new DataFileWriter<ByteBuffer>(writer);
    LINES_FILE.getParentFile().mkdirs();
    out.create(Schema.create(Schema.Type.BYTES), LINES_FILE);
    for (String line : LINES)
        out.append(ByteBuffer.wrap(line.getBytes("UTF-8")));
    out.close();//  ww w . j  a v  a2 s .  c  o m
}

From source file:com.alexholmes.hadooputils.combine.avro.mapred.WordCountUtil.java

License:Apache License

public static void writeLinesTextFile() throws IOException {
    FileUtil.fullyDelete(DIR);
    LINES_FILE.getParentFile().mkdirs();
    PrintStream out = new PrintStream(LINES_TEXT_FILE);
    for (String line : LINES)
        out.println(line);//from  ww w . j ava  2s.c  o  m
    out.close();
}

From source file:com.chinamobile.bcbsp.util.RunJar.java

License:Apache License

/**
 * Run a BC-BSP job jar. If the main class is not in the jar's manifest, then
 * it must be provided on the command line.
 *
 * @param args Parameters of the array// w  w w .  ja  v a  2  s  .  c  o  m
 */
public static void main(String[] args) throws Throwable {
    String usage = "Usage: bcbsp jar <jar> [mainClass] args...";
    if (args.length < 1) {
        System.err.println(usage);
        System.exit(-1);
    }

    int firstArg = 0;
    String fileName = args[firstArg++];
    File file = new File(fileName);
    String mainClassName = null;

    JarFile jarFile = new JarFile(fileName);
    Manifest manifest = jarFile.getManifest();
    if (manifest != null) {
        mainClassName = manifest.getMainAttributes().getValue("Main-Class");
    }
    jarFile.close();
    if (mainClassName == null) {
        if (args.length < 2) {
            System.err.println(usage);
            System.exit(-1);
        }
        mainClassName = args[firstArg++];
    }
    mainClassName = mainClassName.replaceAll("/", ".");
    final File workDir = File.createTempFile("bcbsp-unjar", "");
    workDir.delete();
    workDir.mkdirs();
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override
        public void run() {
            try {
                FileUtil.fullyDelete(workDir);
            } catch (IOException e) {
            }
        }
    });
    unJar(file, workDir);
    List<URL> classPath = new ArrayList<URL>();
    classPath.add(new File(workDir + "/").toURI().toURL());
    classPath.add(file.toURI().toURL());
    classPath.add(new File(workDir, "classes/").toURI().toURL());
    File[] libs = new File(workDir, "lib").listFiles();
    if (libs != null) {
        for (int i = 0; i < libs.length; i++) {
            classPath.add(libs[i].toURI().toURL());
        }
    }
    ClassLoader loader = new URLClassLoader(classPath.toArray(new URL[0]));
    Thread.currentThread().setContextClassLoader(loader);
    Class<?> mainClass = loader.loadClass(mainClassName);
    Method main = mainClass.getMethod("main", new Class[] { Array.newInstance(String.class, 0).getClass() });
    String[] newArgs = Arrays.asList(args).subList(firstArg, args.length).toArray(new String[0]);
    try {
        main.invoke(null, new Object[] { newArgs });
    } catch (InvocationTargetException e) {
        throw e.getTargetException();
    }
}

From source file:com.cloudera.cdk.data.hcatalog.TestHCatalogDatasetRepository.java

License:Apache License

@Test
public void testManagedTable() throws IOException {
    File tableDir = new File("/tmp/test/user/hive/warehouse/" + TABLE_NAME);
    FileUtil.fullyDelete(tableDir); // clear out possible previous failed runs
    Assert.assertFalse("Data directory should not exist before test", tableDir.exists());
    repo = new HCatalogDatasetRepository();

    Dataset ds = repo.create(TABLE_NAME, new DatasetDescriptor.Builder().schema(USER_SCHEMA_URL).get());
    Assert.assertTrue("Data directory should exist after dataset creation", tableDir.exists());

    writeTestUsers(ds, 10);/*from  ww  w . jav  a  2s .c o m*/
    checkTestUsers(ds, 10);
    Assert.assertTrue("Data directory should exist after writing", tableDir.exists());

    repo.delete(TABLE_NAME);
    Assert.assertFalse("Data directory should not exist after dropping", tableDir.exists());
}

From source file:com.cloudera.cdk.morphline.hadoop.core.DownloadHdfsFileTest.java

License:Apache License

@After
public void tearDown() throws IOException {
    if (dst != null) {
        if (isDir) {
            FileUtil.fullyDelete(dst.getParentFile());
        } else {//  www  .  j  a  v a2  s .c o m
            FileUtil.fullyDelete(dst);
        }
    }
    fileSystem.delete(testDirectory, true);
}

From source file:com.cloudera.cdk.morphline.hadoop.core.DownloadHdfsFileTest.java

License:Apache License

@Test
public void testBasic() throws IOException {
    String msg = "hello world";

    // setup: copy a file to HDFS to prepare inputFile    
    Path inputFile = fileSystem.makeQualified(new Path(testDirectory, fileName));
    FSDataOutputStream out = fileSystem.create(inputFile);
    IOUtils.copyBytes(new ByteArrayInputStream(msg.getBytes(Charsets.UTF_8)), out, fileSystem.getConf());
    out.close();/*from  w w w.  j a va  2  s. co m*/

    File cwd = Files.createTempDir().getAbsoluteFile();
    if (isDir) {
        dst = new File(cwd, testDirectory.getName() + "/" + inputFile.getName());
        inputFile = inputFile.getParent();
    } else {
        dst = new File(cwd, inputFile.getName());
    }
    Assert.assertFalse(dst.exists());
    new File(cwd, fileName).mkdirs(); // will be auto deleted!
    Files.write("wrong msg", new File(new File(cwd, fileName), fileName), Charsets.UTF_8); // will be auto deleted!

    Command morphline = createMorphline("test-morphlines/testDownloadHdfsFile", inputFile, cwd);
    Assert.assertTrue(morphline.process(new Record()));
    Assert.assertEquals(msg, Files.toString(dst, Charsets.UTF_8));
    if (isDir) {
        FileUtil.fullyDelete(dst.getParentFile());
    } else {
        FileUtil.fullyDelete(dst);
    }
    Assert.assertTrue(fileSystem.exists(inputFile));
    Assert.assertTrue(FileUtil.fullyDelete(cwd));

    // verify that subsequent calls with same inputFile won't copy the file again (to prevent races)
    morphline = createMorphline("test-morphlines/downloadHdfsFile", inputFile, cwd);
    Assert.assertTrue(morphline.process(new Record()));
    Assert.assertFalse(dst.exists());
    Assert.assertTrue(morphline.process(new Record()));
    Assert.assertFalse(dst.exists());
    Assert.assertFalse(cwd.exists());

    Assert.assertTrue(fileSystem.delete(inputFile, true));

    try {
        morphline = createMorphline("test-morphlines/downloadHdfsFile", new Path("nonExistingInputFile"), cwd);
        Assert.fail("failed to detect non-existing input file");
    } catch (MorphlineCompilationException e) {
        Assert.assertTrue(e.getCause() instanceof FileNotFoundException);
    }
    Assert.assertFalse(dst.exists());
}

From source file:com.cloudera.cdk.tools.TestCombinedLogFormatConverter.java

License:Apache License

@Before
public void setUp() {
    FileUtil.fullyDelete(TEST_DIR);
}

From source file:com.cloudera.science.quince.LoadVariantsToolIT.java

License:Open Source License

@Test
public void testSmallAvro() throws Exception {

    String baseDir = "target/datasets";

    FileUtil.fullyDelete(new File(baseDir));

    String sampleGroup = "sample1";
    String input = "datasets/variants_avro";
    String output = "dataset:file:target/datasets/variants_flat_locuspart";

    int exitCode = tool.run(new String[] { "--sample-group", sampleGroup, input, output });

    assertEquals(0, exitCode);//w w  w  .j a v  a 2  s  .  c  o m
    File partition = new File(baseDir, "variants_flat_locuspart/chr=1/pos=0/sample_group=sample1");
    assertTrue(partition.exists());

    File[] dataFiles = partition.listFiles(new FileFilter() {
        @Override
        public boolean accept(File pathname) {
            return !pathname.getName().startsWith(".");
        }
    });

    assertEquals(1, dataFiles.length);
    assertTrue(dataFiles[0].getName().endsWith(".parquet"));

    // loading into the same sample group again should fail
    exitCode = tool.run(new String[] { "--sample-group", sampleGroup, input, output });
    assertEquals(1, exitCode);

    // unless the overwrite option is specified
    exitCode = tool.run(new String[] { "--overwrite", "--sample-group", sampleGroup, input, output });
    assertEquals(0, exitCode);

    // loading into a new sample group should always succeed
    exitCode = tool.run(new String[] { "--sample-group", "sample2", input, output });
    assertEquals(0, exitCode);

}