Example usage for org.apache.hadoop.fs FileUtil fullyDelete

List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil fullyDelete.

Prototype

public static boolean fullyDelete(final File dir) 

Source Link

Document

Delete a directory and all its contents.

Usage

From source file:org.dkpro.bigdata.io.hadoop.HdfsResourceLoaderLocatorTest.java

License:Apache License

@Before
public void startCluster() throws Exception {
    // Start dummy HDFS
    File target = folder.newFolder("hdfs");
    hadoopTmp = folder.newFolder("hadoop");

    File baseDir = new File(target, "hdfs").getAbsoluteFile();
    FileUtil.fullyDelete(baseDir);
    Configuration conf = new Configuration();
    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
    conf.set("hadoop.tmp.dir", hadoopTmp.getAbsolutePath());
    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
    hdfsCluster = builder.build();//from   w ww  .jav  a 2s . c om
    hdfsCluster.waitActive();
}

From source file:org.elasticsearch.plugin.hadoop.hdfs.MiniHDFSCluster.java

License:Apache License

@SuppressForbidden(reason = "Hadoop is messy")
public static void main(String[] args) throws Exception {
    FileUtil.fullyDelete(new File(System.getProperty("test.build.data", "build/test/data"), "dfs/"));
    // MiniHadoopClusterManager.main(new String[] { "-nomr" });
    Configuration cfg = new Configuration();
    cfg.set(DataNode.DATA_DIR_PERMISSION_KEY, "666");
    cfg.set("dfs.replication", "0");
    MiniDFSCluster dfsCluster = new MiniDFSCluster(cfg, 1, true, null);
    FileSystem fs = dfsCluster.getFileSystem();
    System.out.println(fs.getClass());
    System.out.println(fs.getUri());
    System.out.println(dfsCluster.getHftpFileSystem().getClass());

    // dfsCluster.shutdown();
}

From source file:org.lilyproject.hadooptestfw.fork.MiniZooKeeperCluster.java

License:Apache License

private void recreateDir(File dir) throws IOException {
    // Lily change: take clearData flag into account
    if (clearData && dir.exists()) {
        FileUtil.fullyDelete(dir);
    }//from ww  w.ja va2s.com
    try {
        dir.mkdirs();
    } catch (SecurityException e) {
        throw new IOException("creating dir: " + dir, e);
    }
}

From source file:org.springframework.data.hadoop.impala.mapreduce.MapReduceCommands.java

License:Apache License

/**
 * @param jarFileName//from w w w.  ja  va 2s  .  co m
 * @param mainClassName
 * @param args
 * @throws Throwable 
 */
public void runJar(final String jarFileName, final String mainClassName, final String args) throws Throwable {
    File file = new File(jarFileName);
    File tmpDir = new File(new Configuration().get("hadoop.tmp.dir"));
    String os = System.getProperty("os.name").toLowerCase();
    if (os.contains("win")) {
        tmpDir = new File(System.getProperty("java.io.tmpdir"), "impala");
    }
    tmpDir.mkdirs();
    if (!tmpDir.isDirectory()) {
        LOG.severe("Mkdirs failed to create " + tmpDir);
    }

    try {
        final File workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
        workDir.delete();
        workDir.mkdirs();
        if (!workDir.isDirectory()) {
            LOG.severe("Mkdirs failed to create " + workDir);
            return;
        }

        Runtime.getRuntime().addShutdownHook(new Thread() {
            public void run() {
                try {
                    FileUtil.fullyDelete(workDir);
                } catch (IOException e) {
                }
            }
        });

        unJar(file, workDir);

        ArrayList<URL> classPath = new ArrayList<URL>();

        //This is to add hadoop configuration dir to classpath so that 
        //user's configuration can be accessed when running the jar
        File hadoopConfigurationDir = new File(workDir + Path.SEPARATOR + "impala-hadoop-configuration");
        writeHadoopConfiguration(hadoopConfigurationDir, this.getHadoopConfiguration());
        classPath.add(hadoopConfigurationDir.toURL());
        //classPath.add(new File(System.getenv("HADOOP_CONF_DIR")).toURL());

        classPath.add(new File(workDir + Path.SEPARATOR).toURL());
        classPath.add(file.toURL());
        classPath.add(new File(workDir, "classes" + Path.SEPARATOR).toURL());
        File[] libs = new File(workDir, "lib").listFiles();
        if (libs != null) {
            for (int i = 0; i < libs.length; i++) {
                classPath.add(libs[i].toURL());
            }
        }
        ClassLoader loader = new URLClassLoader(classPath.toArray(new URL[0]),
                this.getClass().getClassLoader());
        Thread.currentThread().setContextClassLoader(loader);
        Class<?> mainClass = Class.forName(mainClassName, true, loader);
        Method main = mainClass.getMethod("main",
                new Class[] { Array.newInstance(String.class, 0).getClass() });
        String[] newArgs = args.split(" ");
        main.invoke(null, new Object[] { newArgs });
    } catch (Exception e) {
        if (e instanceof InvocationTargetException) {
            if (e.getCause() instanceof ExitTrappedException) {
                throw (ExitTrappedException) e.getCause();
            }
        } else {
            throw e;
        }
    }
}

From source file:org.trustedanalytics.auth.gateway.hdfs.integration.config.LocalConfiguration.java

License:Apache License

@Bean
@Qualifier(Qualifiers.CONFIGURATION)//from   w  ww  . ja  v a  2  s.  c o  m
public org.apache.hadoop.conf.Configuration initializeHdfsCluster()
        throws IOException, InterruptedException, URISyntaxException {
    File baseDir = new File("./target/hdfs/" + "testName").getAbsoluteFile();
    FileUtil.fullyDelete(baseDir);
    org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(false);
    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
    conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true);
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
    MiniDFSCluster cluster = builder.build();

    UserGroupInformation.createUserForTesting("cf", new String[] { "cf" });
    UserGroupInformation.createUserForTesting("super", new String[] { "supergroup" });

    return cluster.getConfiguration(0);
}

From source file:org.trustedanalytics.cfbroker.store.hdfs.service.SimpleHdfsClientTest.java

License:Apache License

@BeforeClass
public static void initialize() throws IOException {
    File baseDir = new File("./target/hdfs/" + "testName").getAbsoluteFile();
    FileUtil.fullyDelete(baseDir);
    Configuration conf = new Configuration(false);
    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
    conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true);
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
    cluster = builder.build();//from   ww w  .  j a  v  a 2 s . c o  m
    cluster.waitClusterUp();
}

From source file:smile.wide.algorithms.em.RunDistributedEM.java

License:Apache License

/**
 * Initializes the job configuration and starts the job.
 * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
 *///w  ww . j  ava2  s  .  co m
@Override
public int run(String[] params) throws Exception {
    for (int i = 0; i < params.length; i++) {
        System.out.println("parameter " + i + ":" + params[i]);
    }

    initParams(params);

    if (getParam(ConfKeys.COLUMNS) == null) {
        conf.set(ConfKeys.COLUMNS, readFirstLine(FileSystem.get(conf), getParam(ConfKeys.DATA_FILE)));
        conf.set(ConfKeys.IGNORE_FIRST_LINE, "true");
    }

    if (getParam(ConfKeys.MISSING_TOKEN) == null) {
        conf.set(ConfKeys.MISSING_TOKEN, "*");
    }

    if (getParam(ConfKeys.SEPARATOR) == null) {
        conf.set(ConfKeys.SEPARATOR, " ");
    } else {
        String s = getParam(ConfKeys.SEPARATOR);
        if (Character.isDigit(s.charAt(0))) {
            int code = Integer.parseInt(s);
            conf.set(ConfKeys.SEPARATOR, new String(Character.toChars(code)));
        }
    }

    System.out.println("Configuration map:");
    Iterator<Entry<String, String>> iter = conf.iterator();
    while (iter.hasNext()) {
        Entry<String, String> e = iter.next();
        if (e.getKey().startsWith("em.")) {
            System.out.println(e.getKey() + "=\"" + e.getValue() + "\"");
        }
    }

    Job job = new Job(conf);
    job.setJobName("smile-wide-em, iter " + iteration + " on " + getParam(ConfKeys.INITIAL_NET_FILE));

    FileInputFormat.addInputPath(job, new Path(getParam(ConfKeys.DATA_FILE)));
    Path outputPath = new Path(getParam(ConfKeys.STAT_FILE));
    FileOutputFormat.setOutputPath(job, outputPath);
    outputPath.getFileSystem(conf).delete(outputPath, true);

    job.setJarByClass(RunDistributedEM.class);
    job.setMapperClass(StatEstimator.class);
    job.setCombinerClass(StatCombiner.class);
    job.setReducerClass(StatNormalizer.class);
    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(DoubleArrayWritable.class);
    job.waitForCompletion(true);

    FileSystem dfs = FileSystem.get(conf);
    System.out.println(dfs);
    LocalFileSystem lfs = LocalFileSystem.getLocal(conf);
    System.out.println(lfs);

    String localStatFile = getParam(ConfKeys.LOCAL_STAT_FILE);
    FileUtil.fullyDelete(new File(localStatFile));
    FileUtil.copyMerge(dfs, new Path(getParam(ConfKeys.STAT_FILE)), lfs, new Path(localStatFile), false, conf,
            null);

    score = updateParameters(getParam(ConfKeys.WORK_NET_FILE), localStatFile);
    System.out.println(">>>>>>  FINISHED ITERATION:" + iteration + ", logLik=" + score);
    return 0;
}

From source file:Utils.GeoCoordinates.java

public static void main(String args[]) {
    ArrayList<String> coord = new ArrayList<>();
    String coordinates = getCoordinatesJS("Alabama", "Tweets", String.valueOf(20));
    coord.add(coordinates);/*from w  w w .  j a  v a  2s .c o  m*/

    coordinates = getCoordinatesJS("Massachusetts", "Tweets", String.valueOf(100));
    coord.add(coordinates);

    coordinates = getCoordinatesJS("New York", "Tweets", String.valueOf(90));
    coord.add(coordinates);

    coordinates = getCoordinatesJS("California", "Tweets", String.valueOf(80));
    coord.add(coordinates);

    coordinates = getCoordinatesJS("Texas", "Tweets", String.valueOf(45));
    coord.add(coordinates);

    coordinates = getCoordinatesJS("Ohio", "Tweets", String.valueOf(15));
    coord.add(coordinates);
    File outputFile = new File(TweetUtils.OUTPUT_PREFIX + "Test/test.js");
    if (outputFile.exists()) {
        FileUtil.fullyDelete(outputFile);
    }
    try {
        outputFile.getParentFile().mkdirs();
        outputFile.createNewFile();
    } catch (Exception ex) {
        System.out.println("File creation failed");
    }
    try {
        PrintWriter writer = new PrintWriter(outputFile);
        writer.println("var statesData = {\"type\":\"FeatureCollection\",\"features\":[");
        for (int i = 0; i < coord.size() - 1; i++) {
            writer.println(coord.get(i) + ",");
        }
        writer.println(coord.get(coord.size() - 1)); //Print last without comma
        writer.println("]};");
        writer.close();
    } catch (Exception ex) {
        System.out.println("Error Reading Data");
    }
}