Example usage for org.apache.hadoop.fs FileUtil fullyDelete

List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil fullyDelete.

Prototype

public static boolean fullyDelete(final File dir) 

Source Link

Document

Delete a directory and all its contents.

Usage

From source file:io.confluent.connect.hdfs.TestWithSecureMiniDFSCluster.java

License:Apache License

@BeforeClass
public static void initKdc() throws Exception {
    baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"));
    FileUtil.fullyDelete(baseDir);
    assertTrue(baseDir.mkdirs());//from   w w w  . j  av  a2  s . co m
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();

    File keytabFile = new File(baseDir, "hdfs" + ".keytab");
    keytab = keytabFile.getAbsolutePath();
    kdc.createPrincipal(keytabFile, "hdfs" + "/localhost", "HTTP/localhost");
    hdfsPrincipal = "hdfs" + "/localhost@" + kdc.getRealm();
    spnegoPrincipal = "HTTP/localhost@" + kdc.getRealm();

    keytabFile = new File(baseDir, "connect-hdfs" + ".keytab");
    connectorKeytab = keytabFile.getAbsolutePath();
    kdc.createPrincipal(keytabFile, "connect-hdfs/localhost");
    connectorPrincipal = "connect-hdfs/localhost@" + kdc.getRealm();
}

From source file:io.confluent.connect.hdfs.TestWithSecureMiniDFSCluster.java

License:Apache License

@AfterClass
public static void shutdownKdc() {
    if (kdc != null) {
        kdc.stop();
    }
    FileUtil.fullyDelete(baseDir);
}

From source file:io.gzinga.hadoop.TestSplittableGZipCodec.java

License:Apache License

@Test
public void testSplittableGZipCodec() {
    try {/*from  w  ww.  j a  v  a 2 s. c o  m*/
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "file:///");
        FileSystem fs = FileSystem.get(conf);
        fs.mkdirs(new Path("target/test"));
        GZipOutputStreamRandomAccess gzip = new GZipOutputStreamRandomAccess(
                fs.create(new Path("target/test/testfile1.gz")));
        String str = "This is line\n";
        for (int i = 1; i <= 10000; i++) {
            gzip.write(str.getBytes());
            if (i % 100 == 0) {
                gzip.addOffset(i / 100l);
            }
        }
        Assert.assertEquals(gzip.getOffsetMap().size(), 100);
        gzip.close();

        conf.set("mapreduce.framework.name", "local");
        conf.set("io.compression.codecs", "io.gzinga.hadoop.SplittableGZipCodec");
        conf.set("mapreduce.input.fileinputformat.split.maxsize", "20000");
        Job job = new Job(conf, "word count");
        job.setJarByClass(WordCount.class);
        job.setMapperClass(WordCount.TokenizerMapper.class);
        job.setCombinerClass(IntSumReducer.class);
        job.setReducerClass(IntSumReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        FileInputFormat.addInputPath(job, new Path("target/test/testfile1.gz"));
        FileOutputFormat.setOutputPath(job, new Path("target/test/testfile2"));
        job.waitForCompletion(true);

        BufferedReader br = new BufferedReader(
                new InputStreamReader(fs.open(new Path("target/test/testfile2/part-r-00000"))));
        Assert.assertEquals("This\t10000", br.readLine());
        Assert.assertEquals("is\t10000", br.readLine());
        Assert.assertEquals("line\t10000", br.readLine());
        br.close();
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail();
    } finally {
        FileUtil.fullyDelete(new File("target/test/testfile2"));
        FileUtil.fullyDelete(new File("target/test/testfile1.gz"));
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.Submitter
 *
 * @throws Exception//from   w w  w.  j a v  a2 s .c om
 */
@Test
public void testSubmitter() throws Exception {

    Configuration conf = new Configuration();

    File[] psw = cleanTokenPasswordFile();

    System.setProperty("test.build.data", "target/tmp/build/TEST_SUBMITTER_MAPPER/data");
    conf.set("hadoop.log.dir", "target/tmp");

    // prepare configuration
    Submitter.setIsJavaMapper(conf, false);
    Submitter.setIsJavaReducer(conf, false);
    Submitter.setKeepCommandFile(conf, false);
    Submitter.setIsJavaRecordReader(conf, false);
    Submitter.setIsJavaRecordWriter(conf, false);
    PipesPartitioner<IntWritable, Text> partitioner = new PipesPartitioner<IntWritable, Text>();
    partitioner.configure(conf);

    Submitter.setJavaPartitioner(conf, partitioner.getClass());

    assertEquals(PipesPartitioner.class, (Submitter.getJavaPartitioner(conf)));
    // test going to call main method with System.exit(). Change Security
    SecurityManager securityManager = System.getSecurityManager();
    // store System.out
    PrintStream oldps = System.out;
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    ExitUtil.disableSystemExit();
    // test without parameters
    try {
        System.setOut(new PrintStream(out));
        Submitter.main(new String[0]);
        fail();
    } catch (ExitUtil.ExitException e) {
        // System.exit prohibited! output message test
        assertTrue(out.toString().contains(""));
        assertTrue(out.toString().contains("bin/hadoop pipes"));
        assertTrue(out.toString().contains("[-input <path>] // Input directory"));
        assertTrue(out.toString().contains("[-output <path>] // Output directory"));
        assertTrue(out.toString().contains("[-jar <jar file> // jar filename"));
        assertTrue(out.toString().contains("[-inputformat <class>] // InputFormat class"));
        assertTrue(out.toString().contains("[-map <class>] // Java Map class"));
        assertTrue(out.toString().contains("[-partitioner <class>] // Java Partitioner"));
        assertTrue(out.toString().contains("[-reduce <class>] // Java Reduce class"));
        assertTrue(out.toString().contains("[-writer <class>] // Java RecordWriter"));
        assertTrue(out.toString().contains("[-program <executable>] // executable URI"));
        assertTrue(out.toString().contains("[-reduces <num>] // number of reduces"));
        assertTrue(out.toString().contains("[-lazyOutput <true/false>] // createOutputLazily"));

        assertTrue(out.toString()
                .contains("-conf <configuration file>     specify an application configuration file"));
        assertTrue(out.toString().contains("-D <property=value>            use value for given property"));
        assertTrue(out.toString().contains("-fs <local|namenode:port>      specify a namenode"));
        assertTrue(out.toString().contains("-jt <local|jobtracker:port>    specify a job tracker"));
        assertTrue(out.toString().contains(
                "-files <comma separated list of files>    specify comma separated files to be copied to the map reduce cluster"));
        assertTrue(out.toString().contains(
                "-libjars <comma separated list of jars>    specify comma separated jar files to include in the classpath."));
        assertTrue(out.toString().contains(
                "-archives <comma separated list of archives>    specify comma separated archives to be unarchived on the compute machines."));
    } finally {
        System.setOut(oldps);
        // restore
        System.setSecurityManager(securityManager);
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
    // test call Submitter form command line
    try {
        File fCommand = getFileCommand(null);
        String[] args = new String[20];
        File input = new File(workSpace + File.separator + "input");
        if (!input.exists()) {
            Assert.assertTrue(input.createNewFile());
        }
        File outPut = new File(workSpace + File.separator + "output");
        FileUtil.fullyDelete(outPut);

        args[0] = "-input";
        args[1] = input.getAbsolutePath();// "input";
        args[2] = "-output";
        args[3] = outPut.getAbsolutePath();// "output";
        args[4] = "-inputformat";
        args[5] = "org.apache.hadoop.mapreduce.lib.input.TextInputFormat";
        args[6] = "-map";
        args[7] = "org.apache.hadoop.mapreduce.lib.map.InverseMapper";
        args[8] = "-partitioner";
        args[9] = "it.crs4.pydoop.mapreduce.pipes.PipesPartitioner";
        args[10] = "-reduce";
        args[11] = "org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer";
        args[12] = "-writer";
        args[13] = "org.apache.hadoop.mapreduce.lib.output.TextOutputFormat";
        args[14] = "-program";
        args[15] = fCommand.getAbsolutePath();// "program";
        args[16] = "-reduces";
        args[17] = "2";
        args[18] = "-lazyOutput";
        args[19] = "lazyOutput";
        Submitter.main(args);
        fail();
    } catch (ExitUtil.ExitException e) {
        // status should be 0
        assertEquals(e.status, 0);

    } finally {
        System.setOut(oldps);
        System.setSecurityManager(securityManager);
    }

}

From source file:it.isislab.sof.core.engine.hadoop.mapreduce.mason.util.SimulationMASON.java

License:Apache License

/**
 * Method that run a Mason Simulation/*from w  w  w  .java 2s. c o  m*/
 * 
 * @param program_path
 * @param input
 * @param sim_input_path
 * @param sim_output_path
 * @param sim_home
 * @param output
 * @param conf1
 * @throws Exception
 */
public void run(String program_path, String input, String sim_input_path, String sim_output_path,
        String sim_home, OutputCollector<Text, Text> output, Configuration conf1) throws Exception {

    conf = conf1;
    String SIMULATION_NAME = conf.get("simulation.name");
    String SIMULATION_HOME = conf.get("simulation.home");
    String SIM_OUTPUT_MAPPER = conf.get("simulation.executable.output");
    String AUTHOR = conf.get("simulation.executable.author");
    String DESCRIPTION = conf.get("simulation.executable.description");

    String fileName = program_path;
    File file = new File(fileName);
    String mainClassName = null;

    JarFile jarFile;

    jarFile = new JarFile(fileName);

    Manifest manifest = jarFile.getManifest();
    if (manifest != null) {
        mainClassName = manifest.getMainAttributes().getValue(Attributes.Name.MAIN_CLASS);
    }
    jarFile.close();

    mainClassName = mainClassName.replaceAll("/", ".");

    File tmpDir = new File(new Configuration().get("hadoop.tmp.dir"));
    ensureDirectory(tmpDir);

    final File workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
    if (!workDir.delete()) {
        System.err.println("Delete failed for " + workDir);
        System.exit(-1);
    }
    ensureDirectory(workDir);

    Runtime.getRuntime().addShutdownHook(new Thread() {
        public void run() {
            FileUtil.fullyDelete(workDir);
        }
    });

    unJar(file, workDir);

    ArrayList<URL> classPath = new ArrayList<URL>();
    classPath.add(new File(workDir + "/").toURI().toURL());
    classPath.add(file.toURI().toURL());
    classPath.add(new File(workDir, "classes/").toURI().toURL());
    File[] libs = new File(workDir, "lib").listFiles();
    File fileSimState = new File("sim.engine.SimState");
    classPath.add(fileSimState.toURI().toURL());

    if (libs != null) {

        for (int i = 0; i < libs.length; i++) {
            classPath.add(libs[i].toURI().toURL());

        }
    }

    ClassLoader loader = new URLClassLoader(classPath.toArray(new URL[0]));

    Thread.currentThread().setContextClassLoader(loader);

    Class<?> mainClass = Class.forName(mainClassName, true, loader);

    Constructor c = mainClass.getConstructor(long.class);
    Object obj = c.newInstance(new Object[] { System.currentTimeMillis() });

    HashMap<String, String> inputSimulation = new HashMap<String, String>();

    //stringa di .tmp
    String line = input;

    //System.out.println(line);

    String[] aparam = line.split(";");
    String[] couple = aparam[0].split(":");
    int idInputSimulation = Integer.parseInt(couple[1]);
    couple = aparam[1].split(":");
    int rounds = Integer.parseInt(couple[1]);
    for (int i = 2; i < aparam.length; i++) {
        couple = aparam[i].split(":");
        inputSimulation.put(couple[0], couple[1]);
    }

    String output_template = conf.get("simulation.description.output.domain");

    //converte il file output.xml con i soli campi in un'unica stringa da processare 
    String output_string_vars = XmlToText.convertOutputXmlIntoText(conf, output_template, idInputSimulation);

    ArrayList<String> outputSimulation = new ArrayList<String>();
    line = output_string_vars;
    aparam = line.split(";");
    for (int i = 0; i < aparam.length; i++) {
        String[] couple2 = aparam[i].split(":");
        outputSimulation.add(couple2[0]);

    }

    Method[] methods = mainClass.getMethods();
    String param = "";
    Object[] toSend = {};
    for (String field : inputSimulation.keySet()) {
        for (Method toInvoke : methods) {
            if (toInvoke.getName().equalsIgnoreCase("set" + field)) {
                toInvoke.setAccessible(true);
                //toInvoke.invoke(obj,inputSimulation.get(field));
                param = toInvoke.getGenericParameterTypes()[0].toString();
                if (param.equalsIgnoreCase(int.class.getName()))
                    toSend = new Object[] { Integer.valueOf(inputSimulation.get(field)) };
                else if (param.equalsIgnoreCase(double.class.getName()))
                    toSend = new Object[] { Double.valueOf(inputSimulation.get(field)) };
                else if (param.equalsIgnoreCase(long.class.getName()))
                    toSend = new Object[] { Long.valueOf(inputSimulation.get(field)) };
                else if (param.equalsIgnoreCase(short.class.getName()))
                    toSend = new Object[] { Short.valueOf(inputSimulation.get(field)) };
                else if (param.equalsIgnoreCase(float.class.getName()))
                    toSend = new Object[] { Float.valueOf(inputSimulation.get(field)) };
                toInvoke.invoke(obj, toSend);
                break;
            }
        }
    }
    SimState ss = null;
    HashMap<String, ArrayList<String>> output_collection = new HashMap<String, ArrayList<String>>();

    for (int i = 0; i < rounds; i++) {
        ss = (SimState) obj;
        ss.setSeed(System.currentTimeMillis());
        int j = 0;
        ss.start();
        int step = Integer.valueOf(inputSimulation.get("step"));
        while (j < step) {
            ss.schedule.step(ss);
            j++;

        }

        //Collect OUTPUTs
        for (String field : outputSimulation) {
            for (Method toInvoke : methods) {
                if (toInvoke.getName().equalsIgnoreCase("get" + field)) {
                    toInvoke.setAccessible(true);
                    if (output_collection.containsKey(field))
                        output_collection.get(field).add("" + toInvoke.invoke(obj, new Object[] {}).toString());
                    else {
                        ArrayList<String> l = new ArrayList<String>();
                        l.add("" + toInvoke.invoke(obj, new Object[] {}).toString());
                        output_collection.put(field, l);
                    }
                }
            }
        }
    }

    String inOutput = "";

    /*for(String field : outputSimulation){
       for (Method toInvoke : methods){
    if(toInvoke.getName().equalsIgnoreCase("get"+field)){
       toInvoke.setAccessible(true);
       inOutput+=field+":"+toInvoke.invoke(obj, new Object[]{}).toString()+";";
    }
       }
    }*/

    for (String field : output_collection.keySet()) {
        inOutput += field + ":" + getAVG(output_collection.get(field), rounds) + ";";

    }

    //Random r=new Random(System.currentTimeMillis());
    //String id=MD5(line+r.nextDouble());

    Path file_output = null;
    int id = (new String(inOutput + "" + System.currentTimeMillis())).hashCode();
    file_output = generateOutput(input, inOutput, SIM_OUTPUT_MAPPER, id, idInputSimulation, SIMULATION_NAME,
            AUTHOR, DESCRIPTION, SIMULATION_HOME);

    output.collect(new Text(file_output.toString()), new Text(""));

}

From source file:MapReduce.DeviceCountPerCountry.java

public static void main(String args[]) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new HBaseConfiguration();

    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Device Count");
    job.setJarByClass(DeviceCountPerCountry.class);

    Scan sc = new Scan();
    sc.setCaching(500);// ww w. j  ava 2 s.c o  m
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("tweetdata", // input table
            sc, // Scan instance to control CF and attribute selection
            DeviceCountPerCountry.MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(DeviceCountPerCountry.MapClass.class);
    job.setReducerClass(DeviceCountPerCountry.ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    String dest = TweetUtils.OUTPUT_PREFIX + "DeviceCount";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.GenderPerTimezone.java

public static void main(String args[]) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Gender Per Timezone");
    job.setJarByClass(GenderPerTimezone.class);

    Scan sc = new Scan();
    sc.setCaching(500);//w  w  w  . j ava2 s.c o  m
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("twittergenderprediction", // input table
            sc, // Scan instance to control CF and attribute selection
            GenderPerTimezone.MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(GenderPerTimezone.MapClass.class);
    job.setReducerClass(GenderPerTimezone.ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    String dest = TweetUtils.OUTPUT_PREFIX + "GenderPerTimezone";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.LanguageCount.java

public static void main(String[] args) throws Exception {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Language Count");
    job.setJarByClass(LanguageCount.class);

    Scan sc = new Scan();
    sc.setCaching(500);/* w ww  .jav a2s.  c  o m*/
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("twitteruser", // input table
            sc, // Scan instance to control CF and attribute selection
            MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(MapClass.class);
    job.setReducerClass(ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    String dest = TweetUtils.OUTPUT_PREFIX + "LanguageCount";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.PopularHashtags.java

public static void main(String[] args) throws Exception {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Popular Hashtags");
    job.setJarByClass(PopularHashtags.class);

    Scan sc = new Scan();
    sc.setCaching(500);/* w  w  w . j av a2  s .co m*/
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("tweetdata", // input table
            sc, // Scan instance to control CF and attribute selection
            MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(MapClass.class);
    job.setReducerClass(ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    String dest = TweetUtils.OUTPUT_PREFIX + "PopularHashtags";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.SentimentAnalysis.java

public static void main(String[] args) throws Exception {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Sentiment Analysis");
    job.setJarByClass(SentimentAnalysis.class);

    Scan sc = new Scan();
    sc.setCaching(500);//from  w  w  w.j  a va 2  s  . c  o m
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("tweetdata", // input table
            sc, // Scan instance to control CF and attribute selection
            MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(MapClass.class);
    job.setReducerClass(ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    String dest = TweetUtils.OUTPUT_PREFIX + "SentimentAnalysis";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}