Example usage for org.apache.hadoop.fs Path toString

List of usage examples for org.apache.hadoop.fs Path toString

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Path toString.

Prototype

@Override
    public String toString() 

Source Link

Usage

From source file:com.alibaba.jstorm.hdfs.spout.HdfsSpout.java

License:Apache License

/**
 * Renames files with .inprogress suffix
 * @return path of renamed file//from w  ww.  j  a va  2 s .  co m
 * @throws if operation fails
 */
private Path renameToInProgressFile(Path file) throws IOException {
    Path newFile = new Path(file.toString() + inprogress_suffix);
    try {
        if (hdfs.rename(file, newFile)) {
            return newFile;
        }
        throw new RenameException(file, newFile);
    } catch (IOException e) {
        throw new RenameException(file, newFile, e);
    }
}

From source file:com.alibaba.jstorm.hdfs.spout.HdfsSpout.java

License:Apache License

private Path renameCompletedFile(Path file) throws IOException {
    String fileName = file.toString();
    String fileNameMinusSuffix = fileName.substring(0, fileName.indexOf(inprogress_suffix));
    String newName = new Path(fileNameMinusSuffix).getName();

    Path newFile = new Path(archiveDirPath + Path.SEPARATOR + newName);
    LOG.info("Completed consuming file {}", fileNameMinusSuffix);
    if (!hdfs.rename(file, newFile)) {
        throw new IOException("Rename failed for file: " + file);
    }//from  w  w  w.  j a  va2s  . c  o m
    LOG.debug("Renamed file {} to {} ", file, newFile);
    return newFile;
}

From source file:com.alibaba.jstorm.hdfs.trident.HdfsState.java

License:Apache License

/**
 * Reads the last txn record from index file if it exists, if not
 * from .tmp file if exists./*from   ww w .j  a v  a  2  s.  c  om*/
 *
 * @param indexFilePath the index file path
 * @return the txn record from the index file or a default initial record.
 * @throws IOException
 */
private TxnRecord getTxnRecord(Path indexFilePath) throws IOException {
    Path tmpPath = tmpFilePath(indexFilePath.toString());
    if (this.options.fs.exists(indexFilePath)) {
        return readTxnRecord(indexFilePath);
    } else if (this.options.fs.exists(tmpPath)) {
        return readTxnRecord(tmpPath);
    }
    return new TxnRecord(0, options.currentFile.toString(), 0);
}

From source file:com.aliyun.fs.oss.blk.OssFileSystem.java

License:Apache License

@Override
public FileStatus[] listStatus(Path f) throws IOException {
    Path absolutePath = makeAbsolute(f);
    INode inode = store.retrieveINode(absolutePath);
    if (inode == null) {
        return new FileStatus[0];
    }//w  ww .j a  v a  2s. c o  m
    if (inode.isFile()) {
        return new FileStatus[] { new OssFileStatus(f.makeQualified(this), inode) };
    }
    ArrayList<FileStatus> ret = new ArrayList<FileStatus>();
    for (Path p : store.listSubPaths(absolutePath)) {
        // Here, we need to convert "file/path" to "/file/path".
        // Otherwise, Path.makeQualified will throw `URISyntaxException`.
        Path modifiedPath = new Path("/" + p.toString());
        ret.add(getFileStatus(modifiedPath.makeQualified(this)));
    }
    return ret.toArray(new FileStatus[0]);
}

From source file:com.aliyun.fs.oss.blk.OssFileSystem.java

License:Apache License

/**
 * @param permission Currently ignored./*from   w  w  w. j a v  a 2 s.c  o m*/
 */
@Override
public FSDataOutputStream create(Path file, FsPermission permission, boolean overwrite, int bufferSize,
        short replication, long blockSize, Progressable progress) throws IOException {
    this.blocksForOneTime.clear();
    INode inode = store.retrieveINode(makeAbsolute(file));
    if (inode != null) {
        if (overwrite) {
            delete(file);
        } else {
            throw new IOException("File already exists: " + file);
        }
    } else {
        Path parent = file.getParent();
        if (parent != null) {
            if (!mkdirs(parent)) {
                throw new IOException("Mkdirs failed to create " + parent.toString());
            }
        }
    }
    return new FSDataOutputStream(new OssOutputStream(getConf(), store, makeAbsolute(file), blockSize, progress,
            bufferSize, blocksForOneTime), statistics);
}

From source file:com.aliyun.fs.oss.blk.OssFileSystem.java

License:Apache License

public boolean delete(Path path, boolean recursive) throws IOException {
    Path absolutePath = makeAbsolute(path);
    INode inode = store.retrieveINode(absolutePath);
    if (inode == null) {
        return false;
    }/*from   w  w w. ja  v  a  2 s .  c  o m*/
    if (inode.isFile()) {
        store.deleteINode(absolutePath);
        for (Block block : inode.getBlocks()) {
            store.deleteBlock(block);
        }
    } else {
        FileStatus[] contents = listStatus(absolutePath);
        if (contents == null) {
            return false;
        }
        if ((contents.length != 0) && (!recursive)) {
            throw new IOException("Directory " + path.toString() + " is not empty.");
        }
        for (FileStatus p : contents) {
            if (!delete(p.getPath(), recursive)) {
                return false;
            }
        }
        store.deleteINode(absolutePath);
    }
    return true;
}

From source file:com.aliyun.fs.oss.nat.NativeOssFileSystem.java

License:Apache License

/**
 * <p>/*from w  ww. j  a v a2s.  c  o  m*/
 * If <code>f</code> is a file, this method will make a single call to Oss.
 * If <code>f</code> is a directory, this method will make a maximum of
 * (<i>n</i> / 1000) + 2 calls to Oss, where <i>n</i> is the total number of
 * files and directories contained directly in <code>f</code>.
 * </p>
 */
@Override
public FileStatus[] listStatus(Path f) throws IOException {

    Path absolutePath = makeAbsolute(f);
    String key = pathToKey(absolutePath);

    if (key.length() > 0) {
        final FileStatus fileStatus = getFileStatus(f);
        if (fileStatus.isFile()) {
            return new FileStatus[] { fileStatus };
        }
    }

    URI pathUri = absolutePath.toUri();
    Set<FileStatus> status = new TreeSet<FileStatus>();
    String priorLastKey = null;
    do {
        PartialListing listing = store.list(key, OSS_MAX_LISTING_LENGTH, priorLastKey, false);
        for (FileMetadata fileMetadata : listing.getFiles()) {
            Path subPath = keyToPath(fileMetadata.getKey());
            String relativePath = pathUri.relativize(subPath.toUri()).getPath();

            if (fileMetadata.getKey().equals(key + "/")) {
                // this is just the directory we have been asked to list
            } else if (relativePath.endsWith(FOLDER_SUFFIX)) {
                status.add(newDirectory(
                        new Path("/" + relativePath.substring(0, relativePath.indexOf(FOLDER_SUFFIX)))));
            } else {
                // Here, we need to convert "file/path" to "/file/path".
                // Otherwise, Path.makeQualified will throw `URISyntaxException`.
                Path modifiedPath = new Path("/" + subPath.toString());
                status.add(newFile(fileMetadata, modifiedPath));
            }
        }
        for (String commonPrefix : listing.getCommonPrefixes()) {
            Path subPath = keyToPath(commonPrefix);
            String relativePath = pathUri.relativize(subPath.toUri()).getPath();
            status.add(newDirectory(new Path("/" + relativePath)));
        }
        priorLastKey = listing.getPriorLastKey();
    } while (priorLastKey != null);

    if (status.isEmpty()) {
        return new FileStatus[0];
    }

    return status.toArray(new FileStatus[status.size()]);
}

From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java

License:Apache License

/**
 * Add a {@link Path} with a custom {@link InputFormat} to the list of
 * inputs for the map-reduce job.//from ww w  .ja v a 2s.  c  o m
 *
 * @param job The {@link Job}
 * @param path {@link Path} to be added to the list of inputs for the job
 * @param inputFormatClass {@link InputFormat} class to use for this path
 */
@SuppressWarnings("unchecked")
public static void addInputPath(Job job, Path path, Class<? extends InputFormat> inputFormatClass) {
    /* WAS not encoded */
    String inputFormatMapping = encode(path.toString() + ";" + inputFormatClass.getName());
    Configuration conf = job.getConfiguration();
    String inputFormats = conf.get(DIR_FORMATS);
    conf.set(DIR_FORMATS, inputFormats == null ? inputFormatMapping : inputFormats + "," + inputFormatMapping);

    job.setInputFormatClass(DelegatingInputFormat.class);
}

From source file:com.ambiata.ivory.operation.hadoop.MultipleInputs.java

License:Apache License

/**
 * Add a {@link Path} with a custom {@link InputFormat} and
 * {@link Mapper} to the list of inputs for the map-reduce job.
 *
 * @param job The {@link Job}/*from   ww w. j a  v  a  2  s.  c om*/
 * @param path {@link Path} to be added to the list of inputs for the job
 * @param inputFormatClass {@link InputFormat} class to use for this path
 * @param mapperClass {@link Mapper} class to use for this path
 */
@SuppressWarnings("unchecked")
public static void addInputPath(Job job, Path path, Class<? extends InputFormat> inputFormatClass,
        Class<? extends Mapper> mapperClass) {

    addInputPath(job, path, inputFormatClass);
    Configuration conf = job.getConfiguration();
    /* WAS not encoded */
    String mapperMapping = encode(path.toString() + ";" + mapperClass.getName());
    String mappers = conf.get(DIR_MAPPERS);
    conf.set(DIR_MAPPERS, mappers == null ? mapperMapping : mappers + "," + mapperMapping);

    job.setMapperClass(DelegatingMapper.class);
}

From source file:com.anhth12.lambda.app.ml.als.ALSUpdate.java

private static JavaPairRDD<String, double[]> readFeaturesRDD(JavaSparkContext sparkContext, Path path) {
    log.info("Loading features RDD from {}", path);

    JavaRDD<String> featureLines = sparkContext.textFile(path.toString());

    return featureLines.mapToPair(new PairFunction<String, String, double[]>() {

        @Override//from  ww w .j  a v a 2s  . c om
        public Tuple2<String, double[]> call(String t) throws Exception {
            List<?> update = MAPPER.readValue(t, List.class);
            String key = update.get(0).toString();
            double[] vector = MAPPER.convertValue(update.get(1), double[].class);
            return new Tuple2<>(key, vector);
        }
    });
}