List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete
public static boolean fullyDelete(final File dir)
From source file:MapReduce.SentimentsPerTimezone.java
public static void main(String args[]) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new HBaseConfiguration(); conf.addResource(TweetUtils.HBASE_CONF); Job job = Job.getInstance(conf, "Device count per country"); job.setJarByClass(SentimentsPerTimezone.class); Scan sc = new Scan(); sc.setCaching(500);//w w w .j a v a 2s . c o m sc.setCacheBlocks(false); TableMapReduceUtil.initTableMapperJob("twitteruser", // input table sc, // Scan instance to control CF and attribute selection SentimentsPerTimezone.MapClass.class, // mapper class Text.class, // mapper output key LongWritable.class, // mapper output value job); job.setMapperClass(SentimentsPerTimezone.MapClass.class); job.setReducerClass(SentimentsPerTimezone.ReducerClass.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); String dest = TweetUtils.OUTPUT_PREFIX + "SentimentsPerTimezone"; if (args.length > 0) { dest = args[0]; } File destination = new File(dest); FileUtil.fullyDelete(destination); FileOutputFormat.setOutputPath(job, new Path(dest)); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:MapReduce.TopUserMentions.java
public static void main(String[] args) throws Exception { Configuration conf = new HBaseConfiguration(); conf.addResource(TweetUtils.HBASE_CONF); Job job = Job.getInstance(conf, "Top User Mentions"); job.setJarByClass(TopUserMentions.class); Scan sc = new Scan(); sc.setCaching(500);//from ww w. j av a 2 s . c om sc.setCacheBlocks(false); TableMapReduceUtil.initTableMapperJob("tweetdata", // input table sc, // Scan instance to control CF and attribute selection MapClass.class, // mapper class Text.class, // mapper output key LongWritable.class, // mapper output value job); job.setMapperClass(MapClass.class); job.setReducerClass(ReducerClass.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); String dest = TweetUtils.OUTPUT_PREFIX + "TopUserMentions"; if (args.length > 0) { dest = args[0]; } File destination = new File(dest); FileUtil.fullyDelete(destination); FileOutputFormat.setOutputPath(job, new Path(dest)); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:MapReduce.TweetCountPerState.java
public static void main(String[] args) throws Exception { Configuration conf = new HBaseConfiguration(); conf.addResource(TweetUtils.HBASE_CONF); Job job = Job.getInstance(conf, "Tweets Per State"); job.setJarByClass(TweetCountPerState.class); Scan sc = new Scan(); sc.setCaching(500);/*from ww w . j av a 2 s . c o m*/ sc.setCacheBlocks(false); TableMapReduceUtil.initTableMapperJob("tweetdata", // input table sc, // Scan instance to control CF and attribute selection MapClass.class, // mapper class Text.class, // mapper output key LongWritable.class, // mapper output value job); job.setMapperClass(MapClass.class); job.setReducerClass(ReducerClass.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); String dest = TweetUtils.OUTPUT_PREFIX + "TweetsPerState"; if (args.length > 0) { dest = args[0]; } File destination = new File(dest); FileUtil.fullyDelete(destination); FileOutputFormat.setOutputPath(job, new Path(dest)); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:MapReduce.UsersPerTimeZone.java
public static void main(String[] args) throws Exception { Configuration conf = new HBaseConfiguration(); conf.addResource(TweetUtils.HBASE_CONF); Job job = Job.getInstance(conf, "Users Per Time Zone"); job.setJarByClass(UsersPerTimeZone.class); Scan sc = new Scan(); sc.setCaching(500);// ww w . jav a 2 s . c o m sc.setCacheBlocks(false); TableMapReduceUtil.initTableMapperJob("twitteruser", // input table sc, // Scan instance to control CF and attribute selection MapClass.class, // mapper class Text.class, // mapper output key LongWritable.class, // mapper output value job); job.setMapperClass(MapClass.class); job.setReducerClass(ReducerClass.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); String dest = TweetUtils.OUTPUT_PREFIX + "UsersPerTimeZone"; if (args.length > 0) { dest = args[0]; } File destination = new File(dest); FileUtil.fullyDelete(destination); FileOutputFormat.setOutputPath(job, new Path(dest)); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:net.sf.katta.indexing.IndexerJobTest.java
License:Apache License
@Test public void testStartJob() throws Exception { IndexerJob indexerJob = new IndexerJob(); File tmp = new File("./build/extras/indexing/tmp/IndexerJobTest"); tmp.mkdirs();/*from w w w. j ava 2 s . c om*/ File in = new File(tmp, "in"); File out = new File(tmp, "out"); FileUtil.fullyDelete(out); String alicePath = SearchPathUtil.findPath("sample-data/texts/alice.txt", "../../sample-data/texts/alice.txt"); String sampleText = SequenceFileCreator.getSampleText(alicePath); SequenceFileCreator creator = new SequenceFileCreator(); creator.create(in.getAbsolutePath(), sampleText, 100000); indexerJob.startIndexer(in.getAbsolutePath(), out.getAbsolutePath(), 5); }
From source file:net.sf.katta.node.NodeMockTest.java
License:Apache License
@Before public void setUp() throws IOException { File shardFolder = new NodeConfiguration().getShardFolder(); FileUtil.fullyDelete(shardFolder); when(_protocol.publishNode(eq(_node), (NodeMetaData) notNull())).thenReturn(_queue); }
From source file:org.apache.ambari.view.filebrowser.FilebrowserTest.java
License:Apache License
@Before public void setUp() throws Exception { handler = createNiceMock(ViewResourceHandler.class); context = createNiceMock(ViewContext.class); httpHeaders = createNiceMock(HttpHeaders.class); uriInfo = createNiceMock(UriInfo.class); properties = new HashMap<String, String>(); File baseDir = new File("./target/hdfs/" + "FilebrowserTest").getAbsoluteFile(); FileUtil.fullyDelete(baseDir); Configuration conf = new Configuration(); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath()); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*"); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*"); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); hdfsCluster = builder.build();//from w w w .j a v a 2s . c om String hdfsURI = hdfsCluster.getURI() + "/"; properties.put("webhdfs.url", hdfsURI); expect(context.getProperties()).andReturn(properties).anyTimes(); expect(context.getUsername()).andReturn(System.getProperty("user.name")).anyTimes(); replay(handler, context, httpHeaders, uriInfo); fileBrowserService = getService(FileBrowserService.class, handler, context); FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest(); request.path = "/tmp"; fileBrowserService.fileOps().mkdir(request); }
From source file:org.apache.ambari.view.hive.BaseHiveTest.java
License:Apache License
@BeforeClass public static void startUp() throws Exception { File baseDir = new File(DATA_DIRECTORY).getAbsoluteFile(); FileUtil.fullyDelete(baseDir); }
From source file:org.apache.ambari.view.hive.HDFSTest.java
License:Apache License
@BeforeClass public static void startUp() throws Exception { BaseHiveTest.startUp(); // super File hdfsDir = new File("./target/HiveTest/hdfs/").getAbsoluteFile(); FileUtil.fullyDelete(hdfsDir); Configuration conf = new Configuration(); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDir.getAbsolutePath()); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*"); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*"); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); hdfsCluster = builder.build();// www . j a v a2 s .c o m hdfsURI = hdfsCluster.getURI().toString(); }
From source file:org.apache.ambari.view.pig.HDFSTest.java
License:Apache License
@BeforeClass public static void startUp() throws Exception { BasePigTest.startUp(); // super File hdfsDir = new File("./target/PigTest/hdfs/").getAbsoluteFile(); FileUtil.fullyDelete(hdfsDir); Configuration conf = new Configuration(); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDir.getAbsolutePath()); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*"); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*"); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); hdfsCluster = builder.build();/* w w w. j a v a 2s . c o m*/ hdfsURI = hdfsCluster.getURI().toString(); hdfsCluster.getFileSystem().mkdir(new Path("/tmp"), FsPermission.getDefault()); hdfsCluster.getFileSystem().setPermission(new Path("/tmp"), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); }