Example usage for org.apache.hadoop.fs Path Path

List of usage examples for org.apache.hadoop.fs Path Path

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Path Path.

Prototype

public Path(URI aUri) 

Source Link

Document

Construct a path from a URI

Usage

From source file:alluxio.underfs.hdfs.HdfsUnderFileSystem.java

License:Apache License

@Override
public void setOwner(String path, String user, String group) throws IOException {
    try {/*from  www  .ja  va 2  s. c om*/
        FileStatus fileStatus = mFileSystem.getFileStatus(new Path(path));
        LOG.info("Changing file '{}' user from: {} to {}, group from: {} to {}", fileStatus.getPath(),
                fileStatus.getOwner(), user, fileStatus.getGroup(), group);
        mFileSystem.setOwner(fileStatus.getPath(), user, group);
    } catch (IOException e) {
        LOG.error("Fail to set owner for {} with user: {}, group: {}", path, user, group, e);
        LOG.warn("In order for Alluxio to create HDFS files with the correct user and groups, "
                + "Alluxio should be added to the HDFS superusers.");
        throw e;
    }
}

From source file:alluxio.underfs.hdfs.HdfsUnderFileSystem.java

License:Apache License

@Override
public void setMode(String path, short mode) throws IOException {
    try {//  w w w .  j a va 2  s . c o  m
        FileStatus fileStatus = mFileSystem.getFileStatus(new Path(path));
        LOG.info("Changing file '{}' permissions from: {} to {}", fileStatus.getPath(),
                fileStatus.getPermission(), mode);
        mFileSystem.setPermission(fileStatus.getPath(), new FsPermission(mode));
    } catch (IOException e) {
        LOG.error("Fail to set permission for {} with perm {}", path, mode, e);
        throw e;
    }
}

From source file:alluxio.underfs.hdfs.HdfsUnderFileSystem.java

License:Apache License

@Override
public String getOwner(String path) throws IOException {
    try {/*from   www .  ja  va  2s  .  co  m*/
        return mFileSystem.getFileStatus(new Path(path)).getOwner();
    } catch (IOException e) {
        LOG.error("Fail to get owner for {} ", path, e);
        throw e;
    }
}

From source file:alluxio.underfs.hdfs.HdfsUnderFileSystem.java

License:Apache License

@Override
public String getGroup(String path) throws IOException {
    try {/*from  w w  w  .  j  av a  2  s. com*/
        return mFileSystem.getFileStatus(new Path(path)).getGroup();
    } catch (IOException e) {
        LOG.error("Fail to get group for {} ", path, e);
        throw e;
    }
}

From source file:alluxio.underfs.hdfs.HdfsUnderFileSystem.java

License:Apache License

@Override
public short getMode(String path) throws IOException {
    try {/*from w  w w  .java  2  s.c om*/
        return mFileSystem.getFileStatus(new Path(path)).getPermission().toShort();
    } catch (IOException e) {
        LOG.error("Fail to get permission for {} ", path, e);
        throw e;
    }
}

From source file:alluxio.underfs.hdfs.HdfsUnderFileSystemFactory.java

License:Apache License

@Override
public UnderFileSystem create(String path, Configuration configuration, Object conf) {
    Preconditions.checkArgument(path != null, "path may not be null");

    // Normalize the path to just its root. This is all that's needed to identify which FileSystem
    // the Path belongs to.
    Path rootPath = getRoot(new Path(path));
    synchronized (mHdfsUfsCache) {
        if (!mHdfsUfsCache.containsKey(rootPath)) {
            mHdfsUfsCache.put(rootPath, new HdfsUnderFileSystem(new AlluxioURI(path), configuration, conf));
        }/*from   www .j a  va2  s .c  om*/
        return mHdfsUfsCache.get(rootPath);
    }
}

From source file:alluxio.underfs.hdfs.LocalMiniDFSCluster.java

License:Apache License

/**
 * Tests the local minidfscluster only.//from w w w .j a va  2s .com
 */
public static void main(String[] args) throws Exception {
    LocalMiniDFSCluster cluster = null;
    try {
        cluster = new LocalMiniDFSCluster("/tmp/dfs", 1, 54321);
        cluster.start();
        System.out.println("Address of local minidfscluster: " + cluster.getUnderFilesystemAddress());
        Thread.sleep(10);
        DistributedFileSystem dfs = cluster.getDFSClient();
        dfs.mkdirs(new Path("/1"));
        mkdirs(cluster.getUnderFilesystemAddress() + "/1/2");
        FileStatus[] fs = dfs.listStatus(new Path(AlluxioURI.SEPARATOR));
        assert fs.length != 0;
        System.out.println(fs[0].getPath().toUri());
        dfs.close();

        cluster.shutdown();

        cluster = new LocalMiniDFSCluster("/tmp/dfs", 3);
        cluster.start();
        System.out.println("Address of local minidfscluster: " + cluster.getUnderFilesystemAddress());

        dfs = cluster.getDFSClient();
        dfs.mkdirs(new Path("/1"));

        UnderFileSystemUtils
                .touch(cluster.getUnderFilesystemAddress() + "/1" + "/_format_" + System.currentTimeMillis());
        fs = dfs.listStatus(new Path("/1"));
        assert fs.length != 0;
        System.out.println(fs[0].getPath().toUri());
        dfs.close();

        cluster.shutdown();
    } finally {
        if (cluster != null && cluster.isStarted()) {
            cluster.shutdown();
        }
    }
}

From source file:alluxio.yarn.YarnUtils.java

License:Apache License

/**
 * Creates a local resource for a file on HDFS.
 *
 * @param yarnConf YARN configuration/*from www. j  a va 2 s  . c o m*/
 * @param resource the path to a resource file on HDFS
 * @throws IOException if the file can not be found on HDFS
 * @return the created local resource
 */
public static LocalResource createLocalResourceOfFile(YarnConfiguration yarnConf, String resource)
        throws IOException {
    LocalResource localResource = Records.newRecord(LocalResource.class);

    Path resourcePath = new Path(resource);

    FileStatus jarStat = FileSystem.get(resourcePath.toUri(), yarnConf).getFileStatus(resourcePath);
    localResource.setResource(ConverterUtils.getYarnUrlFromPath(resourcePath));
    localResource.setSize(jarStat.getLen());
    localResource.setTimestamp(jarStat.getModificationTime());
    localResource.setType(LocalResourceType.FILE);
    localResource.setVisibility(LocalResourceVisibility.PUBLIC);
    return localResource;
}

From source file:Analysis.A10_Weekday_v_Weekend_Listens.Listen_History_Weekday_Weekend_Driver.java

/**
 * @param args the command line arguments
 *//*from   w ww.  ja  v  a 2  s .com*/
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Listen History - Weekday v Weekend");
    job.setJarByClass(Listen_History_Weekday_Weekend_Driver.class);

    job.setMapperClass(Listen_History_Weekday_Weekend_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    int code = job.waitForCompletion(true) ? 0 : 1;

    if (code == 0) {
        for (Counter counter : job.getCounters()
                .getGroup(Listen_History_Weekday_Weekend_Mapper.DAY_COUNTER_GROUP)) {
            System.out.println(counter.getDisplayName() + "\t" + counter.getValue());
        }
    }

    FileSystem.get(conf).delete(new Path(args[1]), true);

    System.exit(code);
}

From source file:Analysis.A1_Total_Unique_Artists_on_Service.Distinct_Artist_Driver.java

/**
 * @param args the command line arguments
 *//*from  ww w  .ja v a  2  s. co  m*/

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Distinct Artists available on Service");
    job.setJarByClass(Distinct_Artist_Driver.class);
    job.setMapperClass(Distinct_Artist_Mapper.class);
    job.setCombinerClass(Distinct_Artist_Reducer.class);
    job.setReducerClass(Distinct_Artist_Reducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}