Example usage for org.apache.hadoop.fs Path depth

List of usage examples for org.apache.hadoop.fs Path depth

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Path depth.

Prototype

public int depth() 

Source Link

Document

Returns the number of elements in this path.

Usage

From source file:de.huberlin.wbi.hiway.common.Data.java

License:Apache License

/** Writes the local file to the distributed file system. */
public void stageOut() throws IOException {
    Path localPath = getLocalPath();
    Path hdfsDirectory = getHdfsPath().getParent();
    Path hdfsPath = getHdfsPath();
    if (hdfsDirectory.depth() > 0) {
        mkHdfsDir(hdfsDirectory);/*from  ww  w  .  j av  a2  s  .  c om*/
    }
    hdfs.copyFromLocalFile(false, true, localPath, hdfsPath);
}

From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java

License:Open Source License

/**
 * Make a path relative to another path by removing all common ancestors
 * @param parent/*www  .  j  a v  a2s.c om*/
 * @param descendant
 * @return
 */
private static Path makeRelative(Path parent, Path descendant) {
    Stack<String> components = new Stack<String>();
    while (descendant.depth() > parent.depth()) {
        components.push(descendant.getName());
        descendant = descendant.getParent();
    }
    if (!descendant.equals(parent))
        throw new RuntimeException("descendant not a child of parent");
    if (components.isEmpty())
        return new Path(".");
    Path relative = new Path(components.pop());
    while (!components.isEmpty())
        relative = new Path(relative, components.pop());
    return relative;
}

From source file:hydrograph.engine.cascading.integration.FlowBuilder.java

License:Apache License

public void cleanup(List<String> tmpPathList, RuntimeContext runtimeContext) {
    if (tmpPathList != null) {
        for (String tmpPath : tmpPathList) {

            Path fullPath = new Path(tmpPath);
            // do not delete the root directory
            if (fullPath.depth() == 0)
                continue;
            FileSystem fileSystem;

            LOG.info("Deleting temp path:" + tmpPath);
            try {
                fileSystem = FileSystem.get(runtimeContext.getJobConf());

                fileSystem.delete(fullPath, true);
            } catch (NullPointerException exception) {
                // hack to get around npe thrown when fs reaches root directory
                // if (!(fileSystem instanceof NativeS3FileSystem))
                throw new RuntimeException(exception);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }/* ww  w  .ja  va  2  s .c  om*/

        }
    }
}

From source file:io.dataapps.chlorine.hadoop.HDFSScanMR.java

License:Apache License

public static Job makeJob(Configuration conf, Path in, Path out, String matchPath, long scanSince,
        String chlorineConfigFilePath, String queue, String maskPath) throws IOException {
    conf.setBoolean("mapred.output.compress", false);
    conf.setLong("scanSince", scanSince);
    conf.set("matchPath", matchPath);
    conf.set("maskPath", maskPath);
    conf.set("inputPath", in.toString());
    if (queue != null) {
        conf.set("mapred.job.queue.name", queue);
    }/*w ww  .j a va2  s . c o  m*/
    conf.set("fs.permissions.umask-mode", "007");
    conf.setInt("input_path_depth", in.depth());
    Job job = Job.getInstance(conf, "Chlorine_HDFS_Scan");
    job.setJarByClass(HDFSScanMR.class);
    if (chlorineConfigFilePath != null) {
        try {
            job.addCacheFile(new URI(chlorineConfigFilePath));
            conf.set("finder_file", (new File(chlorineConfigFilePath)).getName());
        } catch (URISyntaxException e) {
            LOG.error(e);
        }
    }
    job.setMapperClass(DeepScanMapper.class);
    job.setNumReduceTasks(0);
    job.setInputFormatClass(TextInputFormat.class);
    TextInputFormat.addInputPath(job, in);
    TextInputFormat.setInputDirRecursive(job, true);
    TextInputFormat.setInputPathFilter(job, NewFilesFilter.class);
    FileOutputFormat.setOutputPath(job, out);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
    return job;
}

From source file:org.apache.accumulo.server.fs.ViewFSUtils.java

License:Apache License

public static Path matchingFileSystem(Path source, String[] options, Configuration conf) throws IOException {

    if (!isViewFS(source, conf))
        throw new IllegalArgumentException("source " + source + " is not view fs");

    String sourceUriPath = source.toUri().getPath();

    Path match = null;/*from w w  w .  ja va2s  .  co  m*/
    int matchPrefixLen = 0;

    // find the option with the longest commmon path prefix
    for (String option : options) {
        Path optionPath = new Path(option);
        if (isViewFS(optionPath, conf)) {
            String optionUriPath = optionPath.toUri().getPath();

            int commonPrefixLen = 0;
            for (int i = 0; i < Math.min(sourceUriPath.length(), optionUriPath.length()); i++) {
                if (sourceUriPath.charAt(i) == optionUriPath.charAt(i)) {
                    if (sourceUriPath.charAt(i) == '/')
                        commonPrefixLen++;
                } else {
                    break;
                }
            }

            if (commonPrefixLen > matchPrefixLen) {
                matchPrefixLen = commonPrefixLen;
                match = optionPath;
            } else if (match != null && commonPrefixLen == matchPrefixLen
                    && optionPath.depth() < match.depth()) {
                // take path with less depth when match perfix length is the same
                match = optionPath;
            }
        }
    }

    return match;
}

From source file:org.apache.accumulo.server.master.recovery.RecoveryPath.java

License:Apache License

public static Path getRecoveryPath(VolumeManager fs, Path walPath) throws IOException {
    if (walPath.depth() >= 3 && walPath.toUri().getScheme() != null) {
        // its a fully qualified path
        String uuid = walPath.getName();
        // drop uuid
        walPath = walPath.getParent();/*w ww  .j a  v a 2 s .com*/
        // recovered 1.4 WALs won't have a server component
        if (!walPath.getName().equals(FileType.WAL.getDirectory())) {
            // drop server
            walPath = walPath.getParent();
        }

        if (!walPath.getName().equals(FileType.WAL.getDirectory()))
            throw new IllegalArgumentException("Bad path " + walPath);

        // drop wal
        walPath = walPath.getParent();

        walPath = new Path(walPath, FileType.RECOVERY.getDirectory());
        walPath = new Path(walPath, uuid);

        return walPath;
    }

    throw new IllegalArgumentException("Bad path " + walPath);

}

From source file:org.apache.accumulo.test.VolumeIT.java

License:Apache License

@Test
public void testRelativePaths() throws Exception {

    List<String> expected = new ArrayList<>();

    Connector connector = getConnector();
    String tableName = getUniqueNames(1)[0];
    connector.tableOperations().create(tableName, new NewTableConfiguration().withoutDefaultIterators());

    String tableId = connector.tableOperations().tableIdMap().get(tableName);

    SortedSet<Text> partitions = new TreeSet<>();
    // with some splits
    for (String s : "c,g,k,p,s,v".split(","))
        partitions.add(new Text(s));

    connector.tableOperations().addSplits(tableName, partitions);

    BatchWriter bw = connector.createBatchWriter(tableName, new BatchWriterConfig());

    // create two files in each tablet

    String[] rows = "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z".split(",");
    for (String s : rows) {
        Mutation m = new Mutation(s);
        m.put("cf1", "cq1", "1");
        bw.addMutation(m);//ww w.j av  a 2 s. co  m
        expected.add(s + ":cf1:cq1:1");
    }

    bw.flush();
    connector.tableOperations().flush(tableName, null, null, true);

    for (String s : rows) {
        Mutation m = new Mutation(s);
        m.put("cf1", "cq1", "2");
        bw.addMutation(m);
        expected.add(s + ":cf1:cq1:2");
    }

    bw.close();
    connector.tableOperations().flush(tableName, null, null, true);

    verifyData(expected, connector.createScanner(tableName, Authorizations.EMPTY));

    connector.tableOperations().offline(tableName, true);

    connector.securityOperations().grantTablePermission("root", MetadataTable.NAME, TablePermission.WRITE);

    Scanner metaScanner = connector.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
    metaScanner.fetchColumnFamily(MetadataSchema.TabletsSection.DataFileColumnFamily.NAME);
    metaScanner.setRange(new KeyExtent(tableId, null, null).toMetadataRange());

    BatchWriter mbw = connector.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());

    for (Entry<Key, Value> entry : metaScanner) {
        String cq = entry.getKey().getColumnQualifier().toString();
        if (cq.startsWith(v1.toString())) {
            Path path = new Path(cq);
            String relPath = "/" + path.getParent().getName() + "/" + path.getName();
            Mutation fileMut = new Mutation(entry.getKey().getRow());
            fileMut.putDelete(entry.getKey().getColumnFamily(), entry.getKey().getColumnQualifier());
            fileMut.put(entry.getKey().getColumnFamily().toString(), relPath, entry.getValue().toString());
            mbw.addMutation(fileMut);
        }
    }

    mbw.close();

    connector.tableOperations().online(tableName, true);

    verifyData(expected, connector.createScanner(tableName, Authorizations.EMPTY));

    connector.tableOperations().compact(tableName, null, null, true, true);

    verifyData(expected, connector.createScanner(tableName, Authorizations.EMPTY));

    for (Entry<Key, Value> entry : metaScanner) {
        String cq = entry.getKey().getColumnQualifier().toString();
        Path path = new Path(cq);
        Assert.assertTrue("relative path not deleted " + path.toString(), path.depth() > 2);
    }

}

From source file:org.apache.accumulo.tserver.log.SortedLogRecovery.java

License:Apache License

private String getPathSuffix(String pathString) {
    Path path = new Path(pathString);
    if (path.depth() < 2)
        throw new IllegalArgumentException("Bad path " + pathString);
    return path.getParent().getName() + "/" + path.getName();
}

From source file:org.apache.drill.exec.store.ColumnExplorer.java

License:Apache License

public static int getPartitionDepth(FileSelection selection) {
    // a depth of table root path
    int rootDepth = selection.getSelectionRoot().depth();

    int partitionsCount = 0;
    for (Path file : selection.getFiles()) {
        // Calculates partitions count for the concrete file:
        // depth of file path - depth of table root path - 1.
        // The depth of file path includes file itself,
        // so we should subtract 1 to consider only directories.
        int currentPartitionsCount = file.depth() - rootDepth - 1;
        // max depth of files path should be used to handle all partitions
        partitionsCount = Math.max(partitionsCount, currentPartitionsCount);
    }//  www . j av  a  2 s  .co  m
    return partitionsCount;
}

From source file:org.apache.drill.exec.store.ColumnExplorer.java

License:Apache License

/**
 * Low-level parse of partitions, returned as a string array. Returns a
 * null array for invalid values./*  ww  w.j a  v  a2  s  .  co m*/
 *
 * @param file file path
 * @param root root directory
 * @param hasDirsOnly whether it is file or directory
 * @return array of directory names, or null if the arguments are invalid
 */
public static String[] parsePartitions(Path file, Path root, boolean hasDirsOnly) {
    if (file == null || root == null) {
        return null;
    }

    if (!hasDirsOnly) {
        file = file.getParent();
    }

    int rootDepth = root.depth();
    int fileDepth = file.depth();
    int diffCount = fileDepth - rootDepth;
    if (diffCount < 0) {
        return null;
    }

    String[] diffDirectoryNames = new String[diffCount];

    // start filling in array from the end
    for (int i = rootDepth; fileDepth > i; i++) {
        // place in the end of array
        diffDirectoryNames[fileDepth - i - 1] = file.getName();
        file = file.getParent();
    }

    return diffDirectoryNames;
}