Example usage for org.apache.hadoop.fs AbstractFileSystem listStatus

List of usage examples for org.apache.hadoop.fs AbstractFileSystem listStatus

Introduction

In this page you can find the example usage for org.apache.hadoop.fs AbstractFileSystem listStatus.

Prototype

public abstract FileStatus[] listStatus(final Path f)
        throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException;

Source Link

Document

The specification of this method matches that of FileContext.Util#listStatus(Path) except that Path f must be for this file system.

Usage

From source file:org.apache.ignite.internal.processors.hadoop.GridHadoopSortingTest.java

License:Apache License

/**
 * @throws Exception If failed./*  ww w.ja v  a  2s . c om*/
 */
public void testSortSimple() throws Exception {
    // Generate test data.
    Job job = Job.getInstance();

    job.setInputFormatClass(InFormat.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);

    job.setMapperClass(Mapper.class);
    job.setNumReduceTasks(0);

    setupFileSystems(job.getConfiguration());

    FileOutputFormat.setOutputPath(job, new Path(igfsScheme() + PATH_INPUT));

    X.printerrln("Data generation started.");

    grid(0).hadoop().submit(new GridHadoopJobId(UUID.randomUUID(), 1), createJobInfo(job.getConfiguration()))
            .get(180000);

    X.printerrln("Data generation complete.");

    // Run main map-reduce job.
    job = Job.getInstance();

    setupFileSystems(job.getConfiguration());

    job.getConfiguration().set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
            JavaSerialization.class.getName() + "," + WritableSerialization.class.getName());

    FileInputFormat.setInputPaths(job, new Path(igfsScheme() + PATH_INPUT));
    FileOutputFormat.setOutputPath(job, new Path(igfsScheme() + PATH_OUTPUT));

    job.setSortComparatorClass(JavaSerializationComparator.class);

    job.setMapperClass(MyMapper.class);
    job.setReducerClass(MyReducer.class);

    job.setNumReduceTasks(2);

    job.setMapOutputKeyClass(UUID.class);
    job.setMapOutputValueClass(NullWritable.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);

    X.printerrln("Job started.");

    grid(0).hadoop().submit(new GridHadoopJobId(UUID.randomUUID(), 2), createJobInfo(job.getConfiguration()))
            .get(180000);

    X.printerrln("Job complete.");

    // Check result.
    Path outDir = new Path(igfsScheme() + PATH_OUTPUT);

    AbstractFileSystem fs = AbstractFileSystem.get(new URI(igfsScheme()), job.getConfiguration());

    for (FileStatus file : fs.listStatus(outDir)) {
        X.printerrln("__ file: " + file);

        if (file.getLen() == 0)
            continue;

        FSDataInputStream in = fs.open(file.getPath());

        Scanner sc = new Scanner(in);

        UUID prev = null;

        while (sc.hasNextLine()) {
            UUID next = UUID.fromString(sc.nextLine());

            //                X.printerrln("___ check: " + next);

            if (prev != null)
                assertTrue(prev.compareTo(next) < 0);

            prev = next;
        }
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.HadoopSortingTest.java

License:Apache License

/**
 * @throws Exception If failed.//from w  w  w  .  j a  va 2  s.  c o m
 */
public void testSortSimple() throws Exception {
    // Generate test data.
    Job job = Job.getInstance();

    job.setInputFormatClass(InFormat.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);

    job.setMapperClass(Mapper.class);
    job.setNumReduceTasks(0);

    setupFileSystems(job.getConfiguration());

    FileOutputFormat.setOutputPath(job, new Path(igfsScheme() + PATH_INPUT));

    X.printerrln("Data generation started.");

    grid(0).hadoop().submit(new HadoopJobId(UUID.randomUUID(), 1), createJobInfo(job.getConfiguration()))
            .get(180000);

    X.printerrln("Data generation complete.");

    // Run main map-reduce job.
    job = Job.getInstance();

    setupFileSystems(job.getConfiguration());

    job.getConfiguration().set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
            JavaSerialization.class.getName() + "," + WritableSerialization.class.getName());

    FileInputFormat.setInputPaths(job, new Path(igfsScheme() + PATH_INPUT));
    FileOutputFormat.setOutputPath(job, new Path(igfsScheme() + PATH_OUTPUT));

    job.setSortComparatorClass(JavaSerializationComparator.class);

    job.setMapperClass(MyMapper.class);
    job.setReducerClass(MyReducer.class);

    job.setNumReduceTasks(2);

    job.setMapOutputKeyClass(UUID.class);
    job.setMapOutputValueClass(NullWritable.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);

    X.printerrln("Job started.");

    grid(0).hadoop().submit(new HadoopJobId(UUID.randomUUID(), 2), createJobInfo(job.getConfiguration()))
            .get(180000);

    X.printerrln("Job complete.");

    // Check result.
    Path outDir = new Path(igfsScheme() + PATH_OUTPUT);

    AbstractFileSystem fs = AbstractFileSystem.get(new URI(igfsScheme()), job.getConfiguration());

    for (FileStatus file : fs.listStatus(outDir)) {
        X.printerrln("__ file: " + file);

        if (file.getLen() == 0)
            continue;

        FSDataInputStream in = fs.open(file.getPath());

        Scanner sc = new Scanner(in);

        UUID prev = null;

        while (sc.hasNextLine()) {
            UUID next = UUID.fromString(sc.nextLine());

            //                X.printerrln("___ check: " + next);

            if (prev != null)
                assertTrue(prev.compareTo(next) < 0);

            prev = next;
        }
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.java

License:Apache License

/**
 * Deletes all files from the given file system.
 *
 * @param fs The file system to clean up.
 * @throws IOException On error.// w w  w.  ja v  a 2s.  c o  m
 */
public static void clear(AbstractFileSystem fs) throws IOException {
    // Delete root contents:
    FileStatus[] statuses = fs.listStatus(new Path("/"));

    if (statuses != null) {
        for (FileStatus stat : statuses)
            fs.delete(stat.getPath(), true);
    }
}

From source file:org.gridgain.grid.kernal.processors.hadoop.GridHadoopSortingTest.java

License:Open Source License

/**
 * @throws Exception If failed./*from  w  w  w. j a v  a2s.  c  om*/
 */
public void testSortSimple() throws Exception {
    // Generate test data.
    Job job = Job.getInstance();

    job.setInputFormatClass(InFormat.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);

    job.setMapperClass(Mapper.class);
    job.setNumReduceTasks(0);

    setupFileSystems(job.getConfiguration());

    FileOutputFormat.setOutputPath(job, new Path(ggfsScheme() + PATH_INPUT));

    X.printerrln("Data generation started.");

    grid(0).hadoop().submit(new GridHadoopJobId(UUID.randomUUID(), 1), createJobInfo(job.getConfiguration()))
            .get(180000);

    X.printerrln("Data generation complete.");

    // Run main map-reduce job.
    job = Job.getInstance();

    setupFileSystems(job.getConfiguration());

    job.getConfiguration().set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
            JavaSerialization.class.getName() + "," + WritableSerialization.class.getName());

    FileInputFormat.setInputPaths(job, new Path(ggfsScheme() + PATH_INPUT));
    FileOutputFormat.setOutputPath(job, new Path(ggfsScheme() + PATH_OUTPUT));

    job.setSortComparatorClass(JavaSerializationComparator.class);

    job.setMapperClass(MyMapper.class);
    job.setReducerClass(MyReducer.class);

    job.setNumReduceTasks(2);

    job.setMapOutputKeyClass(UUID.class);
    job.setMapOutputValueClass(NullWritable.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);

    X.printerrln("Job started.");

    grid(0).hadoop().submit(new GridHadoopJobId(UUID.randomUUID(), 2), createJobInfo(job.getConfiguration()))
            .get(180000);

    X.printerrln("Job complete.");

    // Check result.
    Path outDir = new Path(ggfsScheme() + PATH_OUTPUT);

    AbstractFileSystem fs = AbstractFileSystem.get(new URI(ggfsScheme()), job.getConfiguration());

    for (FileStatus file : fs.listStatus(outDir)) {
        X.printerrln("__ file: " + file);

        if (file.getLen() == 0)
            continue;

        FSDataInputStream in = fs.open(file.getPath());

        Scanner sc = new Scanner(in);

        UUID prev = null;

        while (sc.hasNextLine()) {
            UUID next = UUID.fromString(sc.nextLine());

            //                X.printerrln("___ check: " + next);

            if (prev != null)
                assertTrue(prev.compareTo(next) < 0);

            prev = next;
        }
    }
}