Example usage for org.apache.hadoop.fs FileSystem getLocal

List of usage examples for org.apache.hadoop.fs FileSystem getLocal

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem getLocal.

Prototype

public static LocalFileSystem getLocal(Configuration conf) throws IOException 

Source Link

Document

Get the local FileSystem.

Usage

From source file:edu.nyu.vida.data_polygamy.utils.GetMergeFiles.java

License:BSD License

public static void main(String[] args) throws IllegalArgumentException, IOException, URISyntaxException {
    String fromDirectory = args[0];
    String toEventsDirectory = args[1];
    String toOutliersDirectory = args[2];
    String metadataFile = args[3];

    // Detecting datasets.

    HashSet<String> datasets = new HashSet<String>();

    FileReader fileReader = new FileReader(metadataFile);
    BufferedReader bufferedReader = new BufferedReader(fileReader);

    String line;//from   w w  w  . j a va  2  s.  c  o m
    while ((line = bufferedReader.readLine()) != null) {
        String[] parts = line.split(",");
        datasets.add(parts[0]);
    }
    bufferedReader.close();

    // Downloading relationships.

    String relationshipPatternStr = "([a-zA-Z0-9]{4}\\-[a-zA-Z0-9]{4})\\-([a-zA-Z0-9]{4}\\-[a-zA-Z0-9]{4})";
    Pattern relationshipPattern = Pattern.compile(relationshipPatternStr);

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    FileSystem localFS = FileSystem.getLocal(conf);

    for (FileStatus status : fs.listStatus(new Path(fs.getHomeDirectory() + "/" + fromDirectory))) {
        if (!status.isDirectory()) {
            continue;
        }
        Path file = status.getPath();

        Matcher m = relationshipPattern.matcher(file.getName());
        if (!m.find())
            continue;

        String ds1 = m.group(1);
        String ds2 = m.group(2);

        if (!datasets.contains(ds1))
            continue;
        if (!datasets.contains(ds2))
            continue;

        for (FileStatus statusDir : fs.listStatus(file)) {
            if (!statusDir.isDirectory()) {
                continue;
            }

            Path fromPath = statusDir.getPath();
            String toPathStr;
            if (fromPath.getName().contains("events")) {
                toPathStr = toEventsDirectory + "/" + fromPath.getParent().getName() + "-" + fromPath.getName();
            } else {
                toPathStr = toOutliersDirectory + "/" + fromPath.getParent().getName() + "-"
                        + fromPath.getName();
            }
            Path toPath = new Path(toPathStr);

            System.out.println("Copying:");
            System.out.println("  From: " + fromPath.toString());
            System.out.println("  To: " + toPath.toString());

            FileUtil.copyMerge(fs, // HDFS File System
                    fromPath, // HDFS path
                    localFS, // Local File System
                    toPath, // Local Path
                    false, // Do not delete HDFS path
                    conf, // Configuration
                    null);
        }
    }
}

From source file:edu.purdue.cybercenter.dm.storage.AbstractStorageFileManager.java

public AbstractStorageFileManager() throws IOException {
    Configuration configuration = new Configuration();
    this.localFileSystem = FileSystem.getLocal(configuration).getRawFileSystem();

    // the one and only default storage
    List<Storage> storages = (List<Storage>) Storage.findAllStorages();
    if (storages.size() == 1) {
        localStorage = storages.get(0);/*from   w  w  w . j  a va2s .c  om*/
    } else if (storages.isEmpty()) {
        throw new RuntimeException("no storage configured");
    } else {
        throw new RuntimeException("more than one storage configured");
    }
}

From source file:edu.purdue.cybercenter.dm.storage.HdfsStorageFileManager.java

private FileSystem getFileSystemType(String type) throws IOException {
    FileSystem fs = null;/* ww w . j a  va2s .  c  o m*/
    if (type.equals(AccessMethodType.FILE)) {
        fs = FileSystem.getLocal(configuration).getRawFileSystem();
    }
    return fs;
}

From source file:edu.stolaf.cs.wmrserver.JobServiceHandler.java

License:Apache License

private void checkPath(FileSystem fs, Path path) throws PermissionException, NotFoundException, IOException {
    if (_disallowLocalInput) {
        // If we update to Hadoop 1.0, we should use the canonical URI which is definitely unique to each file system. However, the normal one should be, too.
        if (fs.getUri().equals(FileSystem.getLocal(new Configuration()).getUri())) {
            throw new PermissionException("Not allowed to read from the local file system.");
        }/*from  ww  w .  j a v a  2 s  . c  om*/
    }

    if (!fs.exists(path))
        throw new NotFoundException("Input path does not exist: " + path.toString());

    if (_enforceInputContainment) {
        // Check that path is inside home directory
        Path relativePath = relativizePath(_homeDir, path);
        if (relativePath.isAbsolute())
            ; // Has authority or begins with "/"
        throw new PermissionException("Not allowed to read outside the " + "WebMapReduce home directory ("
                + _homeDir.toString() + "). Please specify a relative path.");
    }
}

From source file:edu.uci.ics.asterix.aoya.test.YARNCluster.java

License:Apache License

private void cleanupLocal() throws IOException {
    // cleanup artifacts created on the local file system
    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
}

From source file:edu.uci.ics.hyracks.hdfs.dataflow.DataflowTest.java

License:Apache License

/**
 * Start the HDFS cluster and setup the data files
 * /*from   w  ww.  ja  va2 s  .  c  o  m*/
 * @throws IOException
 */
private void startHDFS() throws IOException {
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
    dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
    FileSystem dfs = FileSystem.get(conf);
    Path src = new Path(DATA_PATH);
    Path dest = new Path(HDFS_INPUT_PATH);
    Path result = new Path(HDFS_OUTPUT_PATH);
    dfs.mkdirs(dest);
    dfs.mkdirs(result);
    dfs.copyFromLocalFile(src, dest);

    DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
    conf.writeXml(confOutput);
    confOutput.flush();
    confOutput.close();
}

From source file:edu.uci.ics.hyracks.hdfs2.dataflow.DataflowTest.java

License:Apache License

/**
 * Start the HDFS cluster and setup the data files
 * /*from  www.jav  a 2 s  .  c o m*/
 * @throws IOException
 */
private void startHDFS() throws IOException {
    conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
    dfsCluster = dfsClusterFactory.getMiniDFSCluster(conf.getConfiguration(), numberOfNC);
    FileSystem dfs = FileSystem.get(conf.getConfiguration());
    Path src = new Path(DATA_PATH);
    Path dest = new Path(HDFS_INPUT_PATH);
    Path result = new Path(HDFS_OUTPUT_PATH);
    dfs.mkdirs(dest);
    dfs.mkdirs(result);
    dfs.copyFromLocalFile(src, dest);

    DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
    conf.getConfiguration().writeXml(confOutput);
    confOutput.flush();
    confOutput.close();
}

From source file:edu.uci.ics.pregelix.example.asterixdb.ConnectorTest.java

License:Apache License

@SuppressWarnings("deprecation")
private static void startHDFS() throws IOException {
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
    dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
    DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
    conf.writeXml(confOutput);/*from   w ww . j a va  2  s .c o  m*/
    confOutput.flush();
    confOutput.close();
}

From source file:edu.uci.ics.pregelix.example.dataload.DataLoadTest.java

License:Apache License

private void startHDFS() throws IOException {
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
    dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
}

From source file:edu.uci.ics.pregelix.example.jobrun.RunJobTestSuite.java

License:Apache License

private void startHDFS() throws IOException {
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
    dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
    FileSystem dfs = FileSystem.get(conf);
    Path src = new Path(DATA_PATH);
    Path dest = new Path(HDFS_PATH);
    dfs.mkdirs(dest);/*w ww.  j  a  va 2 s . c  om*/
    dfs.copyFromLocalFile(src, dest);

    src = new Path(DATA_PATH2);
    dest = new Path(HDFS_PATH2);
    dfs.mkdirs(dest);
    dfs.copyFromLocalFile(src, dest);

    src = new Path(DATA_PATH3);
    dest = new Path(HDFS_PATH3);
    dfs.mkdirs(dest);
    dfs.copyFromLocalFile(src, dest);

    DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
    conf.writeXml(confOutput);
    confOutput.flush();
    confOutput.close();
}