Example usage for org.apache.hadoop.fs Path toUri

List of usage examples for org.apache.hadoop.fs Path toUri

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Path toUri.

Prototype

public URI toUri() 

Source Link

Document

Convert this Path to a URI.

Usage

From source file:com.aliyun.odps.volume.VolumeFSUtil.java

License:Apache License

/**
 * Get volume name from a specific {@link Path}
 * /*from ww w.  j  a  v  a2  s . c  o  m*/
 * @param path
 * @throws VolumeException
 */
public static String getVolumeFromPath(Path path) throws VolumeException {
    path = Path.getPathWithoutSchemeAndAuthority(path);
    if (path.depth() == 0) {
        throw new VolumeException(VolumeFSErrorCode.VolumeMissing, "No volume found!");
    } else {
        String p = path.toUri().getPath();
        String volume = p.split(VolumeFSConstants.SEPARATOR)[1];
        return volume;
    }
}

From source file:com.aliyun.odps.volume.VolumeFSUtil.java

License:Apache License

/**
 * Probe for a path being a parent of another
 * /*www. j av  a  2s  . co  m*/
 * @param parent parent path
 * @param child possible child path
 * @return true if the parent's path matches the start of the child's
 */
public static boolean isParentOf(Path parent, Path child) {
    URI parentURI = parent.toUri();
    String parentPath = parentURI.getPath();
    if (!parentPath.endsWith("/")) {
        parentPath += "/";
    }
    URI childURI = child.toUri();
    String childPath = childURI.getPath();
    return childPath.startsWith(parentPath);
}

From source file:com.ambimmort.webos.plugins.vfs4hdfs.HdfsFileObject.java

License:Apache License

/**
 * @see org.apache.commons.vfs2.provider.AbstractFileObject#doListChildrenResolved()
 *///  w w  w  . j  a  v  a  2  s. co m
@Override
protected FileObject[] doListChildrenResolved() throws Exception {
    if (this.doGetType() != FileType.FOLDER) {
        return null;
    }
    final String[] children = doListChildren();
    final FileObject[] fo = new FileObject[children.length];
    for (int i = 0; i < children.length; i++) {
        final Path p = new Path(this.path, children[i]);
        fo[i] = this.fs.resolveFile(p.toUri().toString());
    }
    return fo;
}

From source file:com.asakusafw.bulkloader.common.FileNameUtil.java

License:Apache License

/**
 * Resolves the raw path./* w  w w.  ja v  a  2s. c om*/
 * @param conf current configuration
 * @param rawPaths raw paths
 * @param executionId current execution ID
 * @param user current user name
 * @return the resolved full path
 * @throws BulkLoaderSystemException if failed to resolve the path
 * @since 0.4.0
 */
public static List<Path> createPaths(Configuration conf, List<String> rawPaths, String executionId, String user)
        throws BulkLoaderSystemException {
    String basePathString = ConfigurationLoader.getProperty(Constants.PROP_KEY_BASE_PATH);
    Path basePath;
    if (basePathString == null || basePathString.isEmpty()) {
        basePath = null;
    } else {
        basePath = new Path(basePathString);
    }
    VariableTable variables = Constants.createVariableTable();
    variables.defineVariable(Constants.HDFS_PATH_VARIABLE_USER, user);
    variables.defineVariable(Constants.HDFS_PATH_VARIABLE_EXECUTION_ID, executionId);
    FileSystem fs;
    try {
        if (basePath == null) {
            fs = FileSystem.get(conf);
        } else {
            fs = FileSystem.get(basePath.toUri(), conf);
            basePath = fs.makeQualified(basePath);
        }
    } catch (IOException e) {
        throw new BulkLoaderSystemException(e, CLASS, "TG-COMMON-00019", rawPaths);
    }
    List<Path> results = new ArrayList<>();
    for (String rawPath : rawPaths) {
        String resolved = variables.parse(rawPath, false);
        Path fullPath;
        if (basePath == null) {
            fullPath = fs.makeQualified(new Path(resolved));
        } else {
            fullPath = new Path(basePath, resolved);
        }
        results.add(fullPath);
    }
    return results;
}

From source file:com.asakusafw.bulkloader.extractor.DfsFileImport.java

License:Apache License

private long putCachePatch(FileProtocol protocol, InputStream content, ImportBean bean, String user)
        throws BulkLoaderSystemException {
    assert protocol != null;
    assert content != null;
    assert bean != null;
    assert user != null;
    assert protocol.getKind() == FileProtocol.Kind.CREATE_CACHE
            || protocol.getKind() == FileProtocol.Kind.UPDATE_CACHE;

    CacheInfo info = protocol.getInfo();
    assert info != null;

    ImportTargetTableBean targetTableBean = bean.getTargetTable(info.getTableName());
    if (targetTableBean == null) {
        // ???DSL?????
        throw new BulkLoaderSystemException(getClass(), "TG-EXTRACTOR-02001", MessageFormat.format(
                "????DSL?????{0}",
                info.getTableName()));/*  ww w  . j  ava2 s  .com*/
    }

    URI dfsFilePath = resolveLocation(bean, user, protocol.getLocation());
    try (CacheStorage storage = new CacheStorage(new Configuration(), dfsFilePath)) {
        LOG.info("TG-EXTRACTOR-11001", info.getId(), info.getTableName(), storage.getPatchProperties());
        storage.putPatchCacheInfo(info);
        LOG.info("TG-EXTRACTOR-11002", info.getId(), info.getTableName(), storage.getPatchProperties());

        Class<?> targetTableModel = targetTableBean.getImportTargetType();
        Path targetUri = storage.getPatchContents("0");
        LOG.info("TG-EXTRACTOR-11003", info.getId(), info.getTableName(), targetUri);
        long recordCount = write(targetTableModel, targetUri.toUri(), content);
        LOG.info("TG-EXTRACTOR-11004", info.getId(), info.getTableName(), targetUri, recordCount);
        LOG.info("TG-PROFILE-01002", bean.getTargetName(), bean.getBatchId(), bean.getJobflowId(),
                bean.getExecutionId(), info.getTableName(), recordCount);
        return recordCount;
    } catch (IOException e) {
        throw new BulkLoaderSystemException(e, getClass(), "TG-EXTRACTOR-11005", info.getId(),
                info.getTableName(), dfsFilePath);
    }
}

From source file:com.asakusafw.dag.runtime.internalio.InternalInputAdapter.java

License:Apache License

private <T extends Writable> void resolve(List<Path> paths, Class<T> type, Consumer<TaskInfo> sink)
        throws IOException {
    FileSystem fs = FileSystem.get(configuration);
    Supplier<? extends T> supplier = () -> objectFactory.newInstance(type);
    List<FileStatus> stats = new ArrayList<>();
    for (Path path : paths) {
        List<FileStatus> s = TemporaryStorage.listStatus(configuration, path);
        stats.addAll(s);//from w w  w. j a  va  2 s.  c om
    }
    for (FileStatus stat : stats) {
        Path p = stat.getPath();
        File local = null;
        URI uri = p.toUri();
        String scheme = uri.getScheme();
        if (scheme != null && scheme.equals("file")) { //$NON-NLS-1$
            local = new File(uri);
        }
        long length = stat.getLen();
        if (length == 0) {
            continue;
        }
        int blocks = (int) ((length + TemporaryFile.BLOCK_SIZE - 1) / TemporaryFile.BLOCK_SIZE);
        for (int i = 0; i < blocks; i++) {
            if (local == null) {
                sink.accept(new HadoopInternalInputTaskInfo<>(fs, p, i, 1, supplier));
            } else {
                sink.accept(new LocalInternalInputTaskInfo<>(local, i, 1, supplier));
            }
        }
    }
}

From source file:com.asakusafw.operation.tools.hadoop.fs.Clean.java

License:Apache License

boolean remove(Path path, Context context) {
    LOG.info(MessageFormat.format("[OT-CLEAN-I01000] Start cleaning: {0}", path));
    FileSystem fs;//from   w w  w .ja  va  2  s. com
    try {
        fs = FileSystem.get(path.toUri(), getConf());
    } catch (Exception e) {
        LOG.error(MessageFormat.format("[OT-CLEAN-E01001] Failed to connect to filesystem: {0}", path), e);
        context.setError();
        return false;
    }
    List<FileStatus> files;
    try {
        files = asList(fs.globStatus(path));
    } catch (Exception e) {
        LOG.error(MessageFormat.format("[OT-CLEAN-E01002] Failed to glob path pattern: {0}", path), e);
        context.setError();
        return false;
    }
    if (files.isEmpty()) {
        LOG.warn(MessageFormat.format("[OT-CLEAN-W01001] Target file is not found: {0}", path));
        context.setError();
        return false;
    }
    boolean removed = true;
    long start = System.currentTimeMillis();
    for (FileStatus file : files) {
        removed &= remove(fs, file, context);
    }
    long end = System.currentTimeMillis();
    LOG.info(MessageFormat.format("[OT-CLEAN-I01999] Finish cleaning: {0} (all-removed={1}, elapsed={2}ms)",
            path, removed, end - start));
    return removed;
}

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java

License:Apache License

/**
 * Returns whether the parent path contains the child path, or not.
 * If the parent and child is same, this returns {@code false}.
 * @param parent the parent path// w  ww.  j  av  a 2s  .c  o  m
 * @param child the child path
 * @return {@code true} if parent path strictly contains the child, otherwise {@code false}
 * @throws IllegalArgumentException if some parameters were {@code null}
 */
public static boolean contains(Path parent, Path child) {
    if (parent == null) {
        throw new IllegalArgumentException("parent must not be null"); //$NON-NLS-1$
    }
    if (child == null) {
        throw new IllegalArgumentException("child must not be null"); //$NON-NLS-1$
    }
    if (parent.depth() >= child.depth()) {
        return false;
    }
    URI parentUri = parent.toUri();
    URI childUri = child.toUri();
    URI relative = parentUri.relativize(childUri);
    if (relative.equals(childUri) == false) {
        return true;
    }
    return false;
}

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java

License:Apache License

private static boolean isLocalPath(Path path) {
    assert path != null;
    String scheme = path.toUri().getScheme();
    return scheme != null && scheme.equals("file"); //$NON-NLS-1$
}

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java

License:Apache License

@SuppressWarnings("unchecked")
private static List<Path> createFileListRelative(Counter counter, FileSystem fs, Path source)
        throws IOException {
    assert counter != null;
    assert fs != null;
    assert source != null;
    assert source.isAbsolute();
    URI baseUri = source.toUri();
    FileStatus root;/*  w  w  w.  j a  v a2  s. c  o  m*/
    try {
        root = fs.getFileStatus(source);
    } catch (FileNotFoundException e) {
        LOG.warn(MessageFormat.format("Source path is not found: {0} (May be already moved)", baseUri));
        return Collections.emptyList();
    }
    counter.add(1);
    List<FileStatus> all = recursiveStep(fs, Collections.singletonList(root));
    if (LOG.isDebugEnabled()) {
        LOG.debug(MessageFormat.format("Source path contains {1} files/directories: {0}", //$NON-NLS-1$
                baseUri, all.size()));
    }
    List<Path> results = new ArrayList<>();
    for (FileStatus stat : all) {
        if (FileSystemCompatibility.isDirectory(stat)) {
            continue;
        }
        Path path = stat.getPath();
        URI uri = path.toUri();
        URI relative = baseUri.relativize(uri);
        if (relative.equals(uri) == false) {
            results.add(new Path(relative));
        } else {
            throw new IOException(MessageFormat.format("Failed to compute relative path: base={0}, target={1}",
                    baseUri, uri));
        }
        counter.add(1);
    }
    Collections.sort(results);
    return results;
}