Example usage for org.apache.hadoop.fs FileSystem get

List of usage examples for org.apache.hadoop.fs FileSystem get

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem get.

Prototype

public static FileSystem get(Configuration conf) throws IOException 

Source Link

Document

Returns the configured FileSystem implementation.

Usage

From source file:br.com.lassal.nqueens.grid.job.NQueenCounter.java

/**
 * Forma de chamada/*from   w  ww.j a va  2 s .  com*/
 * <> {numero de rainhas} {diretorio raiz} -F
 *
 * @param strings
 * @return
 * @throws Exception
 */
public int run(String[] args) throws Exception {
    // Configuration processed by ToolRunner
    Configuration conf = getConf();

    // Create a JobConf using the processed conf
    Job job = new Job(conf, "nqueens-counter");
    job.setJarByClass(NQueenCounter.class);

    int queensNumber = Integer.parseInt(args[0]);
    String workingFolder = args.length >= 2 ? args[1] : null;
    boolean isFinal = args.length >= 3 && "-F".equals(args[2]) ? true : false;

    Path sourcePath = this.setWorkingFolder(queensNumber, workingFolder, isFinal, job);
    job.setOutputKeyClass(org.apache.hadoop.io.Text.class);
    job.setOutputValueClass(org.apache.hadoop.io.Text.class);

    if (isFinal) {
        job.setMapperClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterResultMapper.class);
        job.setReducerClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterResultReducer.class);
    } else {
        job.setMapperClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterMapper.class);
        job.setReducerClass(br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterReducer.class);
    }

    // Submit the job, then poll for progress until the job is complete
    boolean result = job.waitForCompletion(true);

    if (sourcePath != null) {
        FileSystem fs = FileSystem.get(conf);
        fs.delete(sourcePath, true);
    }

    return result ? 0 : 1;

}

From source file:br.com.lassal.nqueens.grid.job.NQueenCounter.java

private Path setWorkingFolder(int queensSize, String workingFolder, boolean isFinal, Job job)
        throws IOException {
    Configuration conf = getConf();
    FileSystem fs = FileSystem.get(conf);
    Path returnPath = null;//w  w  w . java  2s  .  co  m

    if (workingFolder == null) {
        workingFolder = "";
    }

    Path partialSolDir = new Path(workingFolder + "/nqueens/board-" + queensSize + "/partial/");
    Path inputPath = null;
    Path outputPath = null;
    String nextRunPath = "run_1";

    if (fs.exists(partialSolDir)) {
        RemoteIterator<LocatedFileStatus> dirsFound = fs.listLocatedStatus(partialSolDir);
        String lastRunPath = null;
        Path lastPath = null;

        while (dirsFound.hasNext()) {
            LocatedFileStatus dir = dirsFound.next();

            if (dir.isDirectory()) {
                if (lastRunPath == null || dir.getPath().getName().compareTo(lastRunPath) > 0) {
                    lastPath = dir.getPath();
                    lastRunPath = lastPath.getName();
                }
            }
        }
        if (lastRunPath != null) {
            String[] runParts = lastRunPath.split("_");
            int lastRun = Integer.parseInt(runParts[1]);
            nextRunPath = runParts[0] + "_" + (++lastRun);
            inputPath = lastPath;
        }

    }
    if (inputPath == null) {
        inputPath = new Path(workingFolder + "/nqueens/board-" + queensSize + "/seed");
        if (!fs.exists(inputPath)) {
            FSDataOutputStream seedFile = fs.create(inputPath, true);
            seedFile.writeBytes(queensSize + ":");
            seedFile.close();
        }
    } else {
        returnPath = inputPath;
    }
    // Input
    FileInputFormat.addInputPath(job, inputPath);
    job.setInputFormatClass(TextInputFormat.class);

    if (isFinal) {
        outputPath = new Path(workingFolder + "/nqueens/board-" + queensSize + "/final");
    } else {
        outputPath = new Path(workingFolder + "/nqueens/board-" + queensSize + "/partial/" + nextRunPath);
    }

    // Output
    FileOutputFormat.setOutputPath(job, outputPath);
    job.setOutputFormatClass(TextOutputFormat.class);

    return returnPath;
}

From source file:br.ufpr.inf.hpath.HPath.java

License:Apache License

/**
 * Execute the XPath query as a Hadoop job
 * @param xpath_query XPath query submitted by the user via cli.
 * @param inputFile XML file which has all data.
 * @param outputFile Query's result is stored in this file. 
 * @throws Exception/*www .ja  v  a  2 s.  c o  m*/
 */
public static void main(String[] args) throws Exception {

    if (args.length < 1) {
        System.out.println("USAGE: hpath [xpath_query] [input_file] [<output_dir>]");
        System.exit(-1);
    }

    System.out.println("***************");
    System.out.println(" Query  -> " + args[2]);
    System.out.println(" Input  -> " + args[0]);
    System.out.println(" Output -> " + args[1]);
    System.out.println("***************");

    String xpath_query = args[2];
    String inputFile = args[0];
    String outputFile = args[1];
    String tag = "";

    // tag = getFisrtQueryTag(xpath_query);
    tag = getLastQueryTag(xpath_query);
    Configuration conf = new Configuration();
    conf.set("xmlinput.start", "<" + tag);
    conf.set("xmlinput.end", "</" + tag + ">");
    conf.set("xpath.query", xpath_query);

    @SuppressWarnings("deprecation")
    Job job = new Job(conf, "HPath");
    FileSystem fs = FileSystem.get(conf);
    Path inFile = new Path(inputFile);
    Path outFile = new Path(outputFile);

    if (!fs.exists(inFile)) {
        System.out.println("error: Input file not found.");
        System.exit(-1);
    }
    if (!fs.isFile(inFile)) {
        System.out.println("error: Input should be a file.");
        System.exit(-1);
    }
    if (fs.exists(outFile)) {
        System.out.println("error: Output already exists.");
        System.exit(-1);
    }

    job.setJarByClass(HPath.class);

    job.setMapperClass(Map.class);
    job.setReducerClass(Reduce.class);

    job.setInputFormatClass(XmlItemInputFormat.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    FileInputFormat.addInputPath(job, inFile);
    FileOutputFormat.setOutputPath(job, outFile);
    job.waitForCompletion(true);
}

From source file:br.ufrj.nce.recureco.distributedindex.search.controller.DocumentViewerServlet.java

License:Open Source License

protected void doGet(javax.servlet.http.HttpServletRequest request,
        javax.servlet.http.HttpServletResponse response) throws javax.servlet.ServletException, IOException {

    String doc = request.getParameter("doc");

    if (doc != null && doc.trim().length() > 0) {

        try {/*from ww  w .j ava  2  s  .  co m*/

            String filePath = DIR_DOWNLOAD + doc;

            Configuration conf = new Configuration();

            conf.addResource(new Path(DIR_HADOOP_CONF + "core-site.xml"));
            conf.addResource(new Path(DIR_HADOOP_CONF + "hdfs-site.xml"));
            conf.addResource(new Path(DIR_HADOOP_CONF + "mapred-site.xml"));

            FileSystem fileSystem = FileSystem.get(conf);

            Path path = new Path(filePath);
            if (!fileSystem.exists(path)) {
                response.getWriter().print("File not found.");
                return;
            }

            FSDataInputStream in = fileSystem.open(path);

            response.setContentType("text/plain");

            int read = 0;
            byte[] bytes = new byte[BYTES_DOWNLOAD];
            OutputStream os = response.getOutputStream();

            while ((read = in.read(bytes)) != -1) {
                os.write(bytes, 0, read);
            }
            os.flush();
            os.close();
        } catch (FileNotFoundException e) {
            response.getWriter().print("File not found.");
        }

    } else {
        //print invalid document
        response.getWriter().print("File not informed.");
    }

}

From source file:Brush.AdjustMateEdge.java

License:Apache License

public RunningJob run(String inputPath, String outputPath, long reads, long ctg_sum) throws Exception {
    sLogger.info("Tool name: AdjustMateEdge");
    sLogger.info(" - input: " + inputPath);
    sLogger.info(" - output: " + outputPath);

    //JobConf conf = new JobConf(Stats.class);
    JobConf conf = new JobConf(AdjustMateEdge.class);
    conf.setJobName("AdjustMateEdge " + inputPath);

    conf.setLong("READS", reads);
    conf.setLong("CTG_SUM", ctg_sum);
    BrushConfig.initializeConfiguration(conf);

    FileInputFormat.addInputPath(conf, new Path(inputPath));
    FileOutputFormat.setOutputPath(conf, new Path(outputPath));

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(Text.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    conf.setMapperClass(AdjustMateEdgeMapper.class);
    conf.setReducerClass(AdjustMateEdgeReducer.class);

    //delete the output directory if it exists already
    FileSystem.get(conf).delete(new Path(outputPath), true);

    return JobClient.runJob(conf);
}

From source file:Brush.BrushAssembler.java

License:Apache License

public void cleanup(String path) throws IOException {
    FileSystem.get(baseconf).delete(new Path(path), true);
}

From source file:Brush.BrushAssembler.java

License:Apache License

public void save_result(String base, String opath, String npath) throws IOException {
    //System.err.println("Renaming " + base + opath + " to " + base + npath);

    msg("Save result to " + npath + "\n\n");

    FileSystem.get(baseconf).delete(new Path(base + npath), true);
    FileSystem.get(baseconf).rename(new Path(base + opath), new Path(base + npath));
}

From source file:Brush.BrushAssembler.java

License:Apache License

public void computeStats(String base, String dir) throws Exception {
    start("Compute Stats " + dir);
    Stats stats = new Stats();
    RunningJob job = stats.run(base + dir, base + dir + ".stats");
    end(job);/*  ww w  .  java  2s.  c  o  m*/

    msg("\n\nStats " + dir + "\n");
    msg("==================================================================================\n");

    FSDataInputStream statstream = FileSystem.get(baseconf).open(new Path(base + dir + ".stats/part-00000"));
    BufferedReader b = new BufferedReader(new InputStreamReader(statstream));

    //\\declare bar data
    //DefaultCategoryDataset dataset = new DefaultCategoryDataset();
    //\\
    String s;
    while ((s = b.readLine()) != null) {
        //\\ input bar data
        //\\
        if (s.charAt(0) == '>') {
            String[] vals = s.substring(0).split("\t");
            //dataset.addValue((long)Double.parseDouble(vals[3]), "Mean", vals[0]);
            //dataset.addValue((long)Double.parseDouble(vals[4]), "N50", vals[0]);

            //dataset.addValue(Integer.parseInt(vals[1]), "Cnt", vals[0]);

        }
        msg(s);
        msg("\n");
    }
    msg("\n");
    //\\ build bar chart
    /*JFreeChart jfreechart = ChartFactory.createBarChart(
    "Summary",       // chart title
    "Contig Size Cutoff",               // domain axis label
    "N50 (bp)",                  // range axis label
    dataset,                  // data
    PlotOrientation.HORIZONTAL, // the plot orientation
    true,                    // include legend
    true,
    false
    );*/
    //\\
    //CategoryAxis axis = plot.getDomainAxis(); //x
    //axis.setMaximumCategoryLabelLines(10);  //?
    //axis.setMaximumCategoryLabelWidthRatio(0.5f);  //?1

    //\\
    //FileOutputStream fos_jpg = null;
    /*try {         
       //fos_jpg = new FileOutputStream("c:\\Pie.jpg");
    FSDataOutputStream fos_jpg = FileSystem.get(baseconf).create(new Path(base+"jpeg/stats.jpg"),true);
       ChartUtilities.writeChartAsJPEG(fos_jpg,0.99f,jfreechart,480,320,null);
       fos_jpg.close();
    } catch (Exception e) {
       e.printStackTrace();
    }*/
}

From source file:Brush.Compressible.java

License:Apache License

public RunningJob run(String inputPath, String outputPath) throws Exception {
    sLogger.info("Tool name: Compressible");
    sLogger.info(" - input: " + inputPath);
    sLogger.info(" - output: " + outputPath);

    //JobConf conf = new JobConf(Stats.class);
    JobConf conf = new JobConf(Compressible.class);
    conf.setJobName("Compressible " + inputPath);

    BrushConfig.initializeConfiguration(conf);

    FileInputFormat.addInputPath(conf, new Path(inputPath));
    FileOutputFormat.setOutputPath(conf, new Path(outputPath));

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(Text.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    conf.setMapperClass(CompressibleMapper.class);
    conf.setReducerClass(CompressibleReducer.class);

    //delete the output directory if it exists already
    FileSystem.get(conf).delete(new Path(outputPath), true);

    return JobClient.runJob(conf);
}

From source file:Brush.CountBraid.java

License:Apache License

public RunningJob run(String inputPath, String outputPath) throws Exception {
    sLogger.info("Tool name: CountBraid");
    sLogger.info(" - input: " + inputPath);
    sLogger.info(" - output: " + outputPath);

    JobConf conf = new JobConf(CountBraid.class);
    conf.setJobName("CountBraid " + inputPath + " " + BrushConfig.K);
    //conf.setFloat("Error_Rate", ErrorRate);
    //conf.setFloat("Exp_Cov", Exp_Cov);

    BrushConfig.initializeConfiguration(conf);

    FileInputFormat.addInputPath(conf, new Path(inputPath));
    FileOutputFormat.setOutputPath(conf, new Path(outputPath));

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(Text.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    conf.setMapperClass(CountBraidMapper.class);
    conf.setReducerClass(CountBraidReducer.class);

    //delete the output directory if it exists already
    FileSystem.get(conf).delete(new Path(outputPath), true);

    return JobClient.runJob(conf);
}