Example usage for org.apache.hadoop.fs FileSystem get

List of usage examples for org.apache.hadoop.fs FileSystem get

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem get.

Prototype

public static FileSystem get(Configuration conf) throws IOException 

Source Link

Document

Returns the configured FileSystem implementation.

Usage

From source file:at.illecker.hama.rootbeer.examples.piestimator.PiEstimatorBenchmark.java

License:Apache License

@Override
protected void setUp() throws Exception {
    m_conf = new Configuration();

    // Try to load Hadoop configuration
    String HADOOP_HOME = System.getenv("HADOOP_HOME");
    String HADOOP_INSTALL = System.getenv("HADOOP_INSTALL");
    if ((HADOOP_HOME != null) || (HADOOP_INSTALL != null) && (!m_runLocally)) {
        String HADOOP = ((HADOOP_HOME != null) ? HADOOP_HOME : HADOOP_INSTALL);

        m_conf.addResource(new Path(HADOOP, "src/core/core-default.xml"));
        m_conf.addResource(new Path(HADOOP, "src/hdfs/hdfs-default.xml"));
        m_conf.addResource(new Path(HADOOP, "src/mapred/mapred-default.xml"));
        m_conf.addResource(new Path(HADOOP, "conf/core-site.xml"));
        m_conf.addResource(new Path(HADOOP, "conf/hdfs-site.xml"));
        m_conf.addResource(new Path(HADOOP, "conf/mapred-site.xml"));
        // System.out.println("Loaded Hadoop configuration from " + HADOOP);

        try {/*from  w ww  .  j  av  a 2  s . co  m*/
            // Connect to HDFS Filesystem
            FileSystem.get(m_conf);
        } catch (Exception e) {
            // HDFS not reachable run Benchmark locally
            m_conf = new Configuration();
            m_runLocally = true;
        }
        // System.out.println("Run Benchmark local: " + m_runLocally);
    }

    // Try to load Hama configuration
    String HAMA_HOME = System.getenv("HAMA_HOME");
    String HAMA_INSTALL = System.getenv("HAMA_INSTALL");
    if ((HAMA_HOME != null) || (HAMA_INSTALL != null) && (!m_runLocally)) {
        String HAMA = ((HAMA_HOME != null) ? HAMA_HOME : HAMA_INSTALL);

        m_conf.addResource(new Path(HAMA, "conf/hama-default.xml"));
        m_conf.addResource(new Path(HAMA, "conf/hama-site.xml"));
        // System.out.println("Loaded Hama configuration from " + HAMA);
    }

    // Setup outputs
    m_OUTPUT_DIR_PATH = new Path(OUTPUT_DIR + "/bench_" + System.currentTimeMillis());
    System.out.println("OUTPUT_DIR_PATH: " + m_OUTPUT_DIR_PATH);

    m_blockSize = PiEstimatorGpuBSP.blockSize;
    m_gridSize = PiEstimatorGpuBSP.gridSize;
    m_totalIterations = (long) m_blockSize * (long) m_gridSize * n;

    System.out.println("Benchmark PiEstimator[blockSize=" + m_blockSize + ",gridSize=" + m_gridSize + "] n=" + n
            + ", totalSamples=" + m_totalIterations);
}

From source file:at.illecker.hama.rootbeer.examples.piestimator.PiEstimatorBenchmark.java

License:Apache License

@Override
protected void tearDown() throws Exception {

    printOutput(m_conf, m_OUTPUT_DIR_PATH);

    // Cleanup/*from   w  w  w. j  av  a2s  . co m*/
    FileSystem fs = FileSystem.get(m_conf);
    fs.delete(m_OUTPUT_DIR_PATH, true);
}

From source file:at.illecker.hama.rootbeer.examples.util.RandomGraphGenerator.java

License:Apache License

public static void main(String[] args) throws Exception {
    if (args.length != 4) {
        System.out.println(/*  ww  w. j  av a2s .c o m*/
                "USAGE: <Number of vertices> <Number of edges per vertex> <Number of partitions> <Outpath>");
        return;
    }
    System.out.println(Arrays.toString(args));
    Configuration conf = new Configuration();
    conf.setInt("hama.num.vertices", Integer.parseInt(args[0]));
    conf.setInt("hama.num.partitions", Integer.parseInt(args[2]));
    conf.setInt("number.edges", Integer.parseInt(args[1]));
    Job job = new Job(conf);

    Path generated = new Path(new Path(args[3]).getParent(), "generated");
    FileOutputFormat.setOutputPath(job, generated);
    FileSystem.get(conf).delete(generated, true);

    job.setJobName("RangeWriter");

    job.setJarByClass(SortGenMapper.class);
    job.setMapperClass(SortGenMapper.class);
    job.setNumReduceTasks(0);

    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);

    job.setInputFormatClass(RangeInputFormat.class);

    job.waitForCompletion(true);
    conf.setInt("max.id", Integer.valueOf(args[0]));
    job = new Job(conf);

    FileOutputFormat.setOutputPath(job, new Path(args[3]));
    FileSystem.get(conf).delete(new Path(args[3]), true);

    job.setJobName("Random Vertex Writer");

    FileInputFormat.addInputPath(job, generated);

    job.setJarByClass(RandomMapper.class);
    job.setMapperClass(RandomMapper.class);
    job.setReducerClass(Reducer.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);

    job.setNumReduceTasks(conf.getInt("hama.num.partitions", 2));
    job.setPartitionerClass(HashPartitioner.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setInputFormatClass(SequenceFileInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    job.waitForCompletion(true);
}

From source file:audr.text.utils.FileUtils.java

License:Open Source License

/**
 * hadoop//from  ww  w. j av a 2s .c  om
 * 
 * @param localFile
 *            
 * @param hadoopFile
 *            hadoop
 * @return
 */
public static void uploadFile2HDFS(String localFile, String hadoopFile) {
    try {
        Configuration conf = new Configuration();
        FileSystem src = FileSystem.getLocal(conf);
        FileSystem dst = FileSystem.get(conf);
        Path srcpath = new Path(localFile);
        Path dstpath = new Path(hadoopFile);
        FileUtil.copy(src, srcpath, dst, dstpath, false, conf);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:audr.text.utils.FileUtils.java

License:Open Source License

/**
 * HDFSpath/*from www . j  a  v a2 s .  c  om*/
 * 
 * @param path
 */
public static void deleteFileFromHDFS(String path) {
    try {
        Configuration conf = new Configuration();
        FileSystem dst = FileSystem.get(conf);
        Path dstpath = new Path(path);
        dst.delete(dstpath, true);
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:audr.text.utils.FileUtils.java

License:Open Source License

/**
 * /*from   w  w w  .  java 2  s .com*/
 * 
 * @param path
 * @return true  false 
 */
public static boolean isDirectoryExist(Path path) {
    int existFlag = 0;
    try {
        Configuration conf = new Configuration();
        FileSystem dst = FileSystem.get(conf);
        if (dst.exists(path))
            existFlag = 1;

    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    if (existFlag == 1)
        return true;
    return false;

}

From source file:audr.text.utils.FileUtils.java

License:Open Source License

public static String readTextFromHDFS(Path filePath) throws IOException {

    // Configuration
    Configuration conf = new Configuration();
    // DataInputStream
    FSDataInputStream in = null;//from   w w  w. ja v  a 2 s  .  c  o m
    // byteFSDataInputStream
    byte[] line;

    // 
    FileSystem fs = FileSystem.get(conf);
    // FileSystemopen
    in = fs.open(filePath);
    line = new byte[MAX_LENGTH];
    in.read(line);

    return new String(line).trim();

}

From source file:audr.text.utils.FileUtils.java

License:Open Source License

/**
 * create index dirs for each text category
 * @param root//ww w. j  a  v  a 2 s  . c  o  m
 * @throws IOException 
 */
public static void makeIndexDirs() throws IOException {
    FileSystem fs = FileSystem.get(new Configuration());
    for (int i = 0; i < TextCategoryFields.TEXT_CATEGOTIES_ENUM.length; ++i) {
        String oriDir = Constants.INPUT_PATH.replace("%Category%", TextCategoryFields.TEXT_CATEGOTIES_ENUM[i]);
        String lfDir = Constants.INPUT_PATH_LF.replace("%Category%",
                TextCategoryFields.TEXT_CATEGOTIES_ENUM[i]);
        FileSystem.mkdirs(fs, new Path(oriDir), FsPermission.getDefault());
        FileSystem.mkdirs(fs, new Path(lfDir), FsPermission.getDefault());

        for (int j = 0; j < Constants.INDEX_SHARDS.length; ++j) {
            String indexDir = Constants.INDEX_SHARDS[j].replace("%Category%",
                    TextCategoryFields.TEXT_CATEGOTIES_ENUM[i]);
            FileSystem.mkdirs(fs, new Path(indexDir), FsPermission.getDefault());
        }
    }
}

From source file:avro.HadoopAvro.java

License:Open Source License

private JobConf createJobConfig() throws IOException {
    Path inputPath = new Path(INPUT_PATH);
    Path outputPath = new Path(OUTPUT_PATH);

    FileSystem.get(new Configuration()).delete(outputPath, true);

    JobConf jobConfig = new JobConf();
    jobConfig.setInputFormat(AvroInputFormat.class);
    jobConfig.setOutputFormat(AvroOutputFormat.class);
    AvroOutputFormat.setOutputPath(jobConfig, outputPath);
    AvroInputFormat.addInputPath(jobConfig, inputPath);
    jobConfig.set(AvroJob.OUTPUT_SCHEMA, User.SCHEMA.toString());
    jobConfig.set(AvroJob.INPUT_SCHEMA, User.SCHEMA.toString());
    return jobConfig;
}

From source file:avro.HadoopAvro.java

License:Open Source License

private void createAvroFile() throws IOException {
    Path inputPath = new Path(INPUT_PATH);
    FileSystem fs = FileSystem.get(new Configuration());
    fs.delete(inputPath, true);/*from  w ww.  j  a  va  2  s. co m*/

    DataFileWriter<User> fileWriter = new DataFileWriter<>(new GenericDatumWriter<User>(User.SCHEMA));

    fileWriter.create(User.SCHEMA, fs.create(new Path(inputPath, "file.avro")));
    IntStream.range(0, 100).mapToObj(i -> new User("name" + i, "pass" + i, i, i % 2 == 0))
            .forEach(user -> Util.uncheckRun(() -> fileWriter.append(user)));
    fileWriter.close();
    fs.close();
}