Example usage for org.apache.hadoop.fs FsShell FsShell

List of usage examples for org.apache.hadoop.fs FsShell FsShell

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FsShell FsShell.

Prototype

public FsShell(Configuration conf) 

Source Link

Document

Construct a FsShell with the given configuration.

Usage

From source file:com.pinterest.hdfsbackup.distcp.DistCp.java

License:Apache License

/** Delete the dst files/dirs which do not exist in src */
static private void deleteNonexisting(FileSystem dstfs, FileStatus dstroot, Path dstsorted, FileSystem jobfs,
        Path jobdir, JobConf jobconf, Configuration conf) throws IOException {
    if (!dstroot.isDir()) {
        throw new IOException("dst must be a directory when option " + Options.DELETE.cmd
                + " is set, but dst (= " + dstroot.getPath() + ") is not a directory.");
    }// w  w w  .j av  a 2 s .  c  o  m

    //write dst lsr results
    final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr");
    final SequenceFile.Writer writer = SequenceFile.createWriter(jobfs, jobconf, dstlsr, Text.class,
            FileStatus.class, SequenceFile.CompressionType.NONE);
    try {
        //do lsr to get all file statuses in dstroot
        final Stack<FileStatus> lsrstack = new Stack<FileStatus>();
        for (lsrstack.push(dstroot); !lsrstack.isEmpty();) {
            final FileStatus status = lsrstack.pop();
            if (status.isDir()) {
                for (FileStatus child : dstfs.listStatus(status.getPath())) {
                    String relative = makeRelative(dstroot.getPath(), child.getPath());
                    writer.append(new Text(relative), child);
                    lsrstack.push(child);
                }
            }
        }
    } finally {
        checkAndClose(writer);
    }

    //sort lsr results
    final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted");
    SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs, new Text.Comparator(), Text.class,
            FileStatus.class, jobconf);
    sorter.sort(dstlsr, sortedlsr);

    //compare lsr list and dst list
    SequenceFile.Reader lsrin = null;
    SequenceFile.Reader dstin = null;
    try {
        lsrin = new SequenceFile.Reader(jobfs, sortedlsr, jobconf);
        dstin = new SequenceFile.Reader(jobfs, dstsorted, jobconf);

        //compare sorted lsr list and sorted dst list
        final Text lsrpath = new Text();
        final FileStatus lsrstatus = new FileStatus();
        final Text dstpath = new Text();
        final Text dstfrom = new Text();
        final FsShell shell = new FsShell(conf);
        final String[] shellargs = { "-rmr", null };

        boolean hasnext = dstin.next(dstpath, dstfrom);
        for (; lsrin.next(lsrpath, lsrstatus);) {
            int dst_cmp_lsr = dstpath.compareTo(lsrpath);
            for (; hasnext && dst_cmp_lsr < 0;) {
                hasnext = dstin.next(dstpath, dstfrom);
                dst_cmp_lsr = dstpath.compareTo(lsrpath);
            }

            if (dst_cmp_lsr == 0) {
                //lsrpath exists in dst, skip it
                hasnext = dstin.next(dstpath, dstfrom);
            } else {
                //lsrpath does not exist, delete it
                String s = new Path(dstroot.getPath(), lsrpath.toString()).toString();
                if (shellargs[1] == null || !isAncestorPath(shellargs[1], s)) {
                    shellargs[1] = s;
                    int r = 0;
                    try {
                        r = shell.run(shellargs);
                    } catch (Exception e) {
                        throw new IOException("Exception from shell.", e);
                    }
                    if (r != 0) {
                        throw new IOException(
                                "\"" + shellargs[0] + " " + shellargs[1] + "\" returns non-zero value " + r);
                    }
                }
            }
        }
    } finally {
        checkAndClose(lsrin);
        checkAndClose(dstin);
    }
}

From source file:com.scaleunlimited.cascading.DistCp.java

License:Apache License

/** Delete the dst files/dirs which do not exist in src */
static private void deleteNonexisting(FileSystem dstfs, FileStatus dstroot, Path dstsorted, FileSystem jobfs,
        Path jobdir, JobConf jobconf, Configuration conf) throws IOException {
    if (!dstroot.isDir()) {
        throw new IOException("dst must be a directory when option " + Options.DELETE.cmd
                + " is set, but dst (= " + dstroot.getPath() + ") is not a directory.");
    }/*from  ww  w .ja v a 2s. c  o  m*/

    //write dst lsr results
    final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr");
    final SequenceFile.Writer writer = SequenceFile.createWriter(jobfs, jobconf, dstlsr, Text.class,
            FileStatus.class, SequenceFile.CompressionType.NONE);
    try {
        //do lsr to get all file statuses in dstroot
        final Stack<FileStatus> lsrstack = new Stack<FileStatus>();
        for (lsrstack.push(dstroot); !lsrstack.isEmpty();) {
            final FileStatus status = lsrstack.pop();
            if (status.isDir()) {
                for (FileStatus child : dstfs.listStatus(status.getPath())) {
                    String relative = makeRelative(dstroot.getPath(), child.getPath());
                    writer.append(new Text(relative), child);
                    lsrstack.push(child);
                }
            }
        }
    } finally {
        checkAndClose(writer);
    }

    //sort lsr results
    final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted");
    SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs, new Text.Comparator(), Text.class,
            FileStatus.class, jobconf);
    sorter.sort(dstlsr, sortedlsr);

    //compare lsr list and dst list  
    SequenceFile.Reader lsrin = null;
    SequenceFile.Reader dstin = null;
    try {
        lsrin = new SequenceFile.Reader(jobfs, sortedlsr, jobconf);
        dstin = new SequenceFile.Reader(jobfs, dstsorted, jobconf);

        //compare sorted lsr list and sorted dst list
        final Text lsrpath = new Text();
        final FileStatus lsrstatus = new FileStatus();
        final Text dstpath = new Text();
        final Text dstfrom = new Text();
        final FsShell shell = new FsShell(conf);
        final String[] shellargs = { "-rmr", null };

        boolean hasnext = dstin.next(dstpath, dstfrom);
        for (; lsrin.next(lsrpath, lsrstatus);) {
            int dst_cmp_lsr = dstpath.compareTo(lsrpath);
            for (; hasnext && dst_cmp_lsr < 0;) {
                hasnext = dstin.next(dstpath, dstfrom);
                dst_cmp_lsr = dstpath.compareTo(lsrpath);
            }

            if (dst_cmp_lsr == 0) {
                //lsrpath exists in dst, skip it
                hasnext = dstin.next(dstpath, dstfrom);
            } else {
                //lsrpath does not exist, delete it
                String s = new Path(dstroot.getPath(), lsrpath.toString()).toString();
                if (shellargs[1] == null || !isAncestorPath(shellargs[1], s)) {
                    shellargs[1] = s;
                    int r = 0;
                    try {
                        r = shell.run(shellargs);
                    } catch (Exception e) {
                        throw new IOException("Exception from shell.", e);
                    }
                    if (r != 0) {
                        throw new IOException(
                                "\"" + shellargs[0] + " " + shellargs[1] + "\" returns non-zero value " + r);
                    }
                }
            }
        }
    } finally {
        checkAndClose(lsrin);
        checkAndClose(dstin);
    }
}

From source file:com.shopzilla.hadoop.repl.commands.FSShellCommandProvider.java

License:Apache License

@Override
public Map<Call, Command> apply(final SessionState sessionState) {
    final Iterable<Call> REPL_COMMANDS = ImmutableSet.<Call>builder()
            .add(call("ls", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("lsr", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("df", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("du", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("dus", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("count", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("mv", new HDFSFileNameCompletor(sessionState.configuration()),
                    new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("cp", new HDFSFileNameCompletor(sessionState.configuration()),
                    new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("rm", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("rmr", new HDFSFileNameCompletor(sessionState.configuration()))).add(call("expunge"))
            .add(call("put", new FileNameCompleter(), new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("cat", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("text", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("copyToLocal", new HDFSFileNameCompletor(sessionState.configuration()),
                    new FileNameCompleter()))
            .add(call("moveToLocal", new HDFSFileNameCompletor(sessionState.configuration()),
                    new FileNameCompleter()))
            .add(call("mkdir", new HDFSFileNameCompletor(sessionState.configuration())))
            //            .add(call("setrep"))
            .add(call("touchz", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("stat", new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("tail", new HDFSFileNameCompletor(sessionState.configuration())))
            //            .add(call("chmod"))
            //            .add(call("chown"))
            //            .add(call("chgrp"))
            .add(call("copyFromLocal", new FileNameCompleter(),
                    new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("moveFromLocal", new FileNameCompleter(),
                    new HDFSFileNameCompletor(sessionState.configuration())))
            .add(call("get", new HDFSFileNameCompletor(sessionState.configuration()), new FileNameCompleter()))
            .add(call("getmerge", new HDFSFileNameCompletor(sessionState.configuration()),
                    new FileNameCompleter()))
            .build();/*  w  w  w . j a v  a2 s.co  m*/
    final ImmutableMap.Builder<Call, Command> commandMappingBuilder = new ImmutableMap.Builder<Call, Command>();
    for (final Call call : REPL_COMMANDS) {
        commandMappingBuilder.put(call, new Command() {
            @Override
            public void execute(final CommandInvocation call, final SessionState sessionState) {
                try {
                    new FsShell(sessionState.configuration()).run(
                            Joiner.on(" ").join("-" + call.command, Joiner.on(" ").join(call.args)).split(" "));
                } catch (final Exception ex) {
                    sessionState.error(ex);
                }
            }

            @Override
            public Usage usage(final SessionState sessionState) {
                return new Usage(call.commandName, "" // TODO: Set this up!
                );
            }
        });
    }
    return commandMappingBuilder.build();
}

From source file:com.shopzilla.hadoop.repl.commands.util.ClusterStateManager.java

License:Apache License

public void serializePath(final Path path, final File output) throws Exception {
    final File tmpRoot = Files.createTempDir();
    final File tmp = new File(tmpRoot, "hdfs");
    FileUtils.forceMkdir(tmp);/*from  w w w  .j ava 2  s  .  com*/
    new FsShell(fs.getConf()).run(new String[] { "-copyToLocal", path.toString(), tmp.getAbsolutePath() });
    compressFile(tmp, output);
    FileUtils.deleteQuietly(tmpRoot);
}

From source file:fr.ens.biologie.genomique.eoulsan.modules.mgmt.hadoop.DistCp.java

License:LGPL

/** Delete the dst files/dirs which do not exist in src */
static private void deleteNonexisting(final FileSystem dstfs, final FileStatus dstroot, final Path dstsorted,
        final FileSystem jobfs, final Path jobdir, final JobConf jobconf, final Configuration conf)
        throws IOException {
    if (!dstroot.isDir()) {
        throw new IOException("dst must be a directory when option " + Options.DELETE.cmd
                + " is set, but dst (= " + dstroot.getPath() + ") is not a directory.");
    }/*from w  w w .  j a va 2s. co m*/

    // write dst lsr results
    final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr");
    final SequenceFile.Writer writer = SequenceFile.createWriter(jobfs, jobconf, dstlsr, Text.class,
            dstroot.getClass(), SequenceFile.CompressionType.NONE);
    try {
        // do lsr to get all file statuses in dstroot
        final Stack<FileStatus> lsrstack = new Stack<>();
        for (lsrstack.push(dstroot); !lsrstack.isEmpty();) {
            final FileStatus status = lsrstack.pop();
            if (status.isDir()) {
                for (FileStatus child : dstfs.listStatus(status.getPath())) {
                    String relative = makeRelative(dstroot.getPath(), child.getPath());
                    writer.append(new Text(relative), child);
                    lsrstack.push(child);
                }
            }
        }
    } finally {
        checkAndClose(writer);
    }

    // sort lsr results
    final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted");
    SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs, new Text.Comparator(), Text.class,
            FileStatus.class, jobconf);
    sorter.sort(dstlsr, sortedlsr);

    // compare lsr list and dst list
    SequenceFile.Reader lsrin = null;
    SequenceFile.Reader dstin = null;
    try {
        lsrin = new SequenceFile.Reader(jobfs, sortedlsr, jobconf);
        dstin = new SequenceFile.Reader(jobfs, dstsorted, jobconf);

        // compare sorted lsr list and sorted dst list
        final Text lsrpath = new Text();
        final FileStatus lsrstatus = new FileStatus();
        final Text dstpath = new Text();
        final Text dstfrom = new Text();
        final FsShell shell = new FsShell(conf);
        final String[] shellargs = { "-rmr", null };

        boolean hasnext = dstin.next(dstpath, dstfrom);
        for (; lsrin.next(lsrpath, lsrstatus);) {
            int dst_cmp_lsr = dstpath.compareTo(lsrpath);
            for (; hasnext && dst_cmp_lsr < 0;) {
                hasnext = dstin.next(dstpath, dstfrom);
                dst_cmp_lsr = dstpath.compareTo(lsrpath);
            }

            if (dst_cmp_lsr == 0) {
                // lsrpath exists in dst, skip it
                hasnext = dstin.next(dstpath, dstfrom);
            } else {
                // lsrpath does not exist, delete it
                String s = new Path(dstroot.getPath(), lsrpath.toString()).toString();
                if (shellargs[1] == null || !isAncestorPath(shellargs[1], s)) {
                    shellargs[1] = s;
                    int r = 0;
                    try {
                        r = shell.run(shellargs);
                    } catch (Exception e) {
                        throw new IOException("Exception from shell.", e);
                    }
                    if (r != 0) {
                        throw new IOException(
                                "\"" + shellargs[0] + " " + shellargs[1] + "\" returns non-zero value " + r);
                    }
                }
            }
        }
    } finally {
        checkAndClose(lsrin);
        checkAndClose(dstin);
    }
}

From source file:io.aos.hadoop.AosFsShell.java

License:Apache License

public static void main(String... args) throws Exception {
    FsShell fsShell = new FsShell(new Configuration());
    fsShell.run(new String[] { "-ls", "/" });
    //        fsShell.run(new String[]{"-ls /"});
    //        fsShell.run(new String[]{"-ls", "hdfs://"});
}

From source file:org.apache.accumulo.examples.mapreduce.bulk.BulkIngestExample.java

License:Apache License

@Override
public int run(String[] args) {
    Opts opts = new Opts();
    opts.parseArgs(BulkIngestExample.class.getName(), args);

    Configuration conf = getConf();
    PrintStream out = null;/* w  w w. j a  va 2 s. c  o m*/
    try {
        Job job = Job.getInstance(conf);
        job.setJobName("bulk ingest example");
        job.setJarByClass(this.getClass());

        job.setInputFormatClass(TextInputFormat.class);

        job.setMapperClass(MapClass.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setReducerClass(ReduceClass.class);
        job.setOutputFormatClass(AccumuloFileOutputFormat.class);
        opts.setAccumuloConfigs(job);

        Connector connector = opts.getConnector();

        TextInputFormat.setInputPaths(job, new Path(opts.inputDir));
        AccumuloFileOutputFormat.setOutputPath(job, new Path(opts.workDir + "/files"));

        FileSystem fs = FileSystem.get(conf);
        out = new PrintStream(new BufferedOutputStream(fs.create(new Path(opts.workDir + "/splits.txt"))));

        Collection<Text> splits = connector.tableOperations().listSplits(opts.getTableName(), 100);
        for (Text split : splits)
            out.println(Base64.getEncoder().encodeToString(TextUtil.getBytes(split)));

        job.setNumReduceTasks(splits.size() + 1);
        out.close();

        job.setPartitionerClass(RangePartitioner.class);
        RangePartitioner.setSplitFile(job, opts.workDir + "/splits.txt");

        job.waitForCompletion(true);
        Path failures = new Path(opts.workDir, "failures");
        fs.delete(failures, true);
        fs.mkdirs(new Path(opts.workDir, "failures"));
        // With HDFS permissions on, we need to make sure the Accumulo user can read/move the rfiles
        FsShell fsShell = new FsShell(conf);
        fsShell.run(new String[] { "-chmod", "-R", "777", opts.workDir });
        connector.tableOperations().importDirectory(opts.getTableName(), opts.workDir + "/files",
                opts.workDir + "/failures", false);

    } catch (Exception e) {
        throw new RuntimeException(e);
    } finally {
        if (out != null)
            out.close();
    }

    return 0;
}

From source file:org.apache.accumulo.test.BulkImportSequentialRowsIT.java

License:Apache License

@Test
public void testBulkImportFailure() throws Exception {
    String tableName = getUniqueNames(1)[0];
    TableOperations to = getConnector().tableOperations();
    to.create(tableName);/*ww  w.j av  a2s .  c o m*/
    FileSystem fs = getFileSystem();
    Path rootPath = new Path(fs.makeQualified(getUsableDir()), getClass().getSimpleName());
    log.info("Writing to {}", rootPath);
    if (fs.exists(rootPath)) {
        assertTrue(fs.delete(rootPath, true));
    }
    assertTrue(fs.mkdirs(rootPath));

    Path bulk = new Path(rootPath, "bulk");
    log.info("bulk: {}", bulk);
    assertTrue(fs.mkdirs(bulk));
    Path err = new Path(rootPath, "err");
    log.info("err: {}", err);

    assertTrue(fs.mkdirs(bulk));
    assertTrue(fs.mkdirs(err));

    Path rfile = new Path(bulk, "file.rf");

    log.info("Generating RFile {}", rfile.toUri().toString());

    GenerateSequentialRFile.main(new String[] { "-f", rfile.toUri().toString(), "-nr", Long.toString(NR), "-nv",
            Long.toString(NV) });

    assertTrue("Expected that " + rfile + " exists, but it does not", fs.exists(rfile));

    FsShell fsShell = new FsShell(fs.getConf());
    assertEquals("Failed to chmod " + rootPath, 0,
            fsShell.run(new String[] { "-chmod", "-R", "777", rootPath.toString() }));

    // Add some splits
    to.addSplits(tableName, getSplits());

    // Then import a single rfile to all the tablets, hoping that we get a failure to import because of the balancer moving tablets around
    // and then we get to verify that the bug is actually fixed.
    to.importDirectory(tableName, bulk.toString(), err.toString(), false);

    // The bug is that some tablets don't get imported into.
    assertEquals(NR * NV, Iterables.size(getConnector().createScanner(tableName, Authorizations.EMPTY)));
}

From source file:org.apache.bigtop.itest.hadoop.hcfs.TestCLI.java

License:Apache License

/**
 * Execute given hadoop FsShell command (via Toolrunner).
 *//*  w  ww .ja  v a  2s.  c  o  m*/
@Override
protected CommandExecutor.Result execute(CLICommand cmd) throws Exception {
    if (cmd.getType() instanceof CLICommandFS) {
        CommandExecutor cmdExecutor = new FSCmdExecutor(namenode, new FsShell(conf));
        return cmdExecutor.executeCommand(cmd.getCmd());
    } else {
        throw new IllegalArgumentException("Unknown type of test command: " + cmd.getType());
    }
}

From source file:org.apache.bigtop.itest.hadoop.hdfs.TestCLI.java

License:Apache License

@Override
protected CommandExecutor.Result execute(CLICommand cmd) throws Exception {
    if (cmd.getType() instanceof CLICommandFS) {
        CommandExecutor cmdExecutor = new FSCmdExecutor(nn, new FsShell(conf));
        return cmdExecutor.executeCommand(cmd.getCmd());
    } else {//from ww w .  j  a  v a  2 s.  co  m
        throw new IllegalArgumentException("Unknown type of test command: " + cmd.getType());
    }
}