Example usage for org.apache.hadoop.fs FileSystem getHomeDirectory

List of usage examples for org.apache.hadoop.fs FileSystem getHomeDirectory

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem getHomeDirectory.

Prototype

public Path getHomeDirectory() 

Source Link

Document

Return the current user's home directory in this FileSystem.

Usage

From source file:edu.uci.ics.asterix.aoya.test.AsterixYARNInstanceUtil.java

License:Apache License

public void tearDown() throws Exception {
    FileSystem fs = FileSystem.get(appConf);
    Path instance = new Path(fs.getHomeDirectory(), AsterixYARNClient.CONF_DIR_REL + "/");
    fs.delete(instance, true);/* w  w  w  .j  a  va  2 s  .c  o m*/
    miniCluster.close();
    File outdir = new File(PATH_ACTUAL);
    File[] files = outdir.listFiles();
    if (files == null || files.length == 0) {
        outdir.delete();
    }
}

From source file:edu.uci.ics.asterix.aoya.Utils.java

License:Apache License

/**
 * Lists the deployed instances of AsterixDB on a YARN cluster
 * /* w  ww  . j av  a2  s.  c  om*/
 * @param conf
 *            Hadoop configuration object
 * @param confDirRel
 *            Relative AsterixDB configuration path for DFS
 * @throws IOException
 */

public static void listInstances(Configuration conf, String confDirRel) throws IOException {
    FileSystem fs = FileSystem.get(conf);
    Path instanceFolder = new Path(fs.getHomeDirectory(), confDirRel);
    if (!fs.exists(instanceFolder)) {
        System.out.println("No running or stopped AsterixDB instances exist in this cluster.");
        return;
    }
    FileStatus[] instances = fs.listStatus(instanceFolder);
    if (instances.length != 0) {
        System.out.println("Existing AsterixDB instances: ");
        for (int i = 0; i < instances.length; i++) {
            FileStatus st = instances[i];
            String name = st.getPath().getName();
            ApplicationId lockFile = AsterixYARNClient.getLockFile(name, conf);
            if (lockFile != null) {
                System.out.println(
                        "Instance " + name + " is running with Application ID: " + lockFile.toString());
            } else {
                System.out.println("Instance " + name + " is stopped");
            }
        }
    } else {
        System.out.println("No running or stopped AsterixDB instances exist in this cluster");
    }
}

From source file:edu.uci.ics.asterix.aoya.Utils.java

License:Apache License

/**
 * Return the available snapshot names //  ww  w .java2 s .  c  o  m
 * @param conf 
 * @param confDirRel
 * @param instance
 * @return
 * @throws IOException
 */
public static List<String> getBackups(Configuration conf, String confDirRel, String instance)
        throws IOException {
    FileSystem fs = FileSystem.get(conf);
    Path backupFolder = new Path(fs.getHomeDirectory(), confDirRel + "/" + instance + "/" + "backups");
    FileStatus[] backups = fs.listStatus(backupFolder);
    List<String> backupNames = new ArrayList<String>();
    for (FileStatus f : backups) {
        backupNames.add(f.getPath().getName());
    }
    return backupNames;
}

From source file:edu.uci.ics.asterix.aoya.Utils.java

License:Apache License

/**
 * Removes backup snapshots from the DFS
 * //from www  . ja va2  s .  co m
 * @param conf
 *            DFS Configuration
 * @param confDirRel
 *            Configuration relative directory
 * @param instance
 *            The asterix instance name
 * @param timestamp
 *            The snapshot timestap (ID)
 * @throws IOException
 */
public static void rmBackup(Configuration conf, String confDirRel, String instance, long timestamp)
        throws IOException {
    FileSystem fs = FileSystem.get(conf);
    Path backupFolder = new Path(fs.getHomeDirectory(), confDirRel + "/" + instance + "/" + "backups");
    FileStatus[] backups = fs.listStatus(backupFolder);
    if (backups.length != 0) {
        System.out.println("Backups for instance " + instance + ": ");
    } else {
        System.out.println("No backups found for instance " + instance + ".");
    }
    for (FileStatus f : backups) {
        String name = f.getPath().getName();
        long file_ts = Long.parseLong(name);
        if (file_ts == timestamp) {
            System.out.println("Deleting backup " + timestamp);
            if (!fs.delete(f.getPath(), true)) {
                System.out.println("Backup could not be deleted");
                return;
            } else {
                return;
            }
        }
    }
    System.out.println("No backup found with specified timestamp");

}

From source file:edu.uci.ics.asterix.aoya.Utils.java

License:Apache License

public static String getCCHostname(String instanceName, Configuration conf) throws YarnException {
    try {/*from  www. j  av a 2s  .  c  om*/
        FileSystem fs = FileSystem.get(conf);
        String instanceFolder = instanceName + "/";
        String pathSuffix = CONF_DIR_REL + instanceFolder + "cluster-config.xml";
        Path dstConf = new Path(fs.getHomeDirectory(), pathSuffix);
        File tmp = File.createTempFile("cluster-config", "xml");
        tmp.deleteOnExit();
        fs.copyToLocalFile(dstConf, new Path(tmp.getPath()));
        JAXBContext clusterConf = JAXBContext.newInstance(Cluster.class);
        Unmarshaller unm = clusterConf.createUnmarshaller();
        Cluster cl = (Cluster) unm.unmarshal(tmp);
        String ccIp = cl.getMasterNode().getClientIp();
        return ccIp;
    } catch (IOException | JAXBException e) {
        throw new YarnException(e);
    }
}

From source file:es.tid.cosmos.platform.injection.server.HadoopFileSystemViewTest.java

License:Open Source License

/**
 * /*from w  w w  . jav  a  2s  .  c  o  m*/
 * @throws Exception
 */
@Test
public void testGetFileWithEmptyFilename() throws Exception {
    HadoopSshFile file = this.hadoopFileSystemView.getFile("");
    FileSystem hadoopFS = FileSystem.get(URI.create(this.conf.get("fs.default.name")),
            this.conf);/*, this.userName);*/
    String homePath = hadoopFS.getHomeDirectory().toString().replaceFirst(hadoopFS.getUri().toString(), "");
    assertEquals(homePath, file.getAbsolutePath());
    assertEquals(homePath.substring(homePath.lastIndexOf("/") + 1), file.getName());
}

From source file:es.tid.cosmos.platform.injection.server.HadoopFileSystemViewTest.java

License:Open Source License

/**
 * /*  www .j a v a2s.com*/
 * @throws Exception
 */
@Test
public void testRedirectionToHomePath() throws Exception {
    FileSystem fs = FileSystem.get(URI.create(this.conf.get("fs.default.name")), this.conf);//, "test");
    String homePath = fs.getHomeDirectory().toString().replaceFirst(fs.getUri().toString(), "");
    HadoopSshFile init = this.hadoopFileSystemView.getFile(new HadoopSshFile(".", "test", fs), ".");
    assertEquals(homePath, init.getAbsolutePath());
}

From source file:eu.scape_project.spacip.ContainerProcessing.java

License:Apache License

/**
 * Prepare input//  w  w w.  ja  v  a2s  .  c o  m
 *
 * @param pt
 * @throws IOException IO Error
 * @throws java.lang.InterruptedException
 */
public void prepareInput(Path pt) throws InterruptedException, IOException {
    FileSystem fs = FileSystem.get(context.getConfiguration());
    InputStream containerFileStream = fs.open(pt);
    String containerFileName = pt.getName();
    ArchiveReader reader = ArchiveReaderFactory.get(containerFileName, containerFileStream, true);
    long currTM = System.currentTimeMillis();
    String unpackHdfsPath = conf.get("unpack_hdfs_path", "spacip_unpacked");
    String hdfsUnpackDirStr = StringUtils.normdir(unpackHdfsPath, Long.toString(currTM));
    String hdfsJoboutputPath = conf.get("tooloutput_hdfs_path", "spacip_tooloutput");
    String hdfsOutputDirStr = StringUtils.normdir(hdfsJoboutputPath, Long.toString(currTM));
    Iterator<ArchiveRecord> recordIterator = reader.iterator();
    recordIterator.next(); // skip filedesc record (arc filedesc)
    // Number of files which should be processed per invokation
    int numItemsPerInvocation = conf.getInt("num_items_per_task", 50);
    int numItemCounter = numItemsPerInvocation;
    // List of input files to be processed
    String inliststr = "";
    // List of output files to be generated
    String outliststr = "";
    try {
        while (recordIterator.hasNext()) {
            ArchiveRecord nativeArchiveRecord = recordIterator.next();
            String recordKey = getRecordKey(nativeArchiveRecord);
            String outFileName = RandomStringUtils.randomAlphabetic(25);
            String hdfsPathStr = hdfsUnpackDirStr + outFileName;
            Path hdfsPath = new Path(hdfsPathStr);
            String outputFileSuffix = conf.get("output_file_suffix", ".fits.xml");
            String hdfsOutPathStr = hdfsOutputDirStr + outFileName + outputFileSuffix;
            FSDataOutputStream hdfsOutStream = fs.create(hdfsPath);
            ContainerProcessing.recordToOutputStream(nativeArchiveRecord, hdfsOutStream);
            Text key = new Text(recordKey);
            Text value = new Text(fs.getHomeDirectory() + File.separator + hdfsOutPathStr);
            mos.write("keyfilmapping", key, value);
            String scapePlatformInvoke = conf.get("scape_platform_invoke", "fits dirxml");
            Text ptmrkey = new Text(scapePlatformInvoke);
            // for the configured number of items per invokation, add the 
            // files to the input and output list of the command.
            inliststr += "," + fs.getHomeDirectory() + File.separator + hdfsPathStr;
            outliststr += "," + fs.getHomeDirectory() + File.separator + hdfsOutPathStr;
            if (numItemCounter > 1 && recordIterator.hasNext()) {
                numItemCounter--;
            } else if (numItemCounter == 1 || !recordIterator.hasNext()) {
                inliststr = inliststr.substring(1); // cut off leading comma 
                outliststr = outliststr.substring(1); // cut off leading comma 
                String pattern = conf.get("tomar_param_pattern", "%1$s %2$s");
                String ptMrStr = StringUtils.formatCommandOutput(pattern, inliststr, outliststr);
                Text ptmrvalue = new Text(ptMrStr);
                // emit tomar input line where the key is the tool invokation
                // (tool + operation) and the value is the parameter list
                // where input and output strings contain file lists.
                mos.write("tomarinput", ptmrkey, ptmrvalue);
                numItemCounter = numItemsPerInvocation;
                inliststr = "";
                outliststr = "";
            }
        }
    } catch (Exception ex) {
        mos.write("error", new Text("Error"), new Text(pt.toString()));
    }

}

From source file:eu.scape_project.tb.lsdr.seqfileutility.hadoop.HadoopJob.java

License:Apache License

/**
 * Run hadoop job/* www  .  j  a va2s .c o  m*/
 *
 * @param strings Command line arguments
 * @return Success indicator
 * @throws Exception
 */
@Override
public int run(String[] strings) throws Exception {
    try {
        String hdfsInputDir = null;
        FileSystem hdfs = FileSystem.get(conf);

        // hdfs input path is given as command parameter
        if (pc.getHdfsInputPath() != null) {
            hdfsInputDir = pc.getHdfsInputPath();
            // hdfs input file is created
        } else {
            hdfsInputDir = "input/" + System.currentTimeMillis() + "sfu/";

            String[] extensions = null;
            if (pc.getExtStr() != null) {
                StringTokenizer st = new StringTokenizer(pc.getExtStr(), ",");
                extensions = new String[st.countTokens()];
                int i = 0;
                while (st.hasMoreTokens()) {
                    extensions[i] = st.nextToken();
                    i++;
                }
            }

            hdfs.mkdirs(new Path(hdfsInputDir));

            String hdfsIinputPath = hdfsInputDir + "inputpaths.txt";
            Path path = new Path(hdfsIinputPath);

            FSDataOutputStream outputStream = hdfs.create(path);

            List<String> dirs = StringUtils.getStringListFromString(pc.getDirsStr(), ",");
            for (String dir : dirs) {
                File directory = new File(dir);
                if (directory.isDirectory()) {
                    // Alternatively, the java traverse method can be used
                    // for creating the file paths:
                    //traverse(directory, outputStream);
                    writeFilePaths(directory, outputStream);
                } else {
                    logger.warn("Parameter \"" + dir + "\" is not a directory " + "(skipped)");
                }
            }
            outputStream.close();
            if (hdfs.exists(path)) {
                logger.info(
                        "Input paths created in \"" + hdfs.getHomeDirectory() + "/" + path.toString() + "\"");
            } else {
                logger.error("Input paths have not been created in hdfs.");
                return 1;
            }
        }
        String hadoopJobName = "Hadoop_sequence_file_creation";
        if (pc.getHadoopJobName() != null && !pc.getHadoopJobName().equals(""))
            hadoopJobName = pc.getHadoopJobName();
        Job job = new Job(conf, hadoopJobName);

        job.setJarByClass(SequenceFileUtility.class);
        job.setMapperClass(SmallFilesSequenceFileMapper.class);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(BytesWritable.class);
        job.setOutputFormatClass(SequenceFileOutputFormat.class);
        TextInputFormat.addInputPath(job, new Path(hdfsInputDir));

        // todo: support absolute paths
        String hdfsOutputDir = pc.getOutputDirectory() != null ? pc.getOutputDirectory()
                : "output/" + System.currentTimeMillis() + "sfu/";

        SequenceFileOutputFormat.setOutputPath(job, new Path(hdfsOutputDir));
        SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.get(pc.getCompressionType()));

        int success = job.waitForCompletion(true) ? 0 : 1;
        boolean seqFileExists = hdfs.exists(new Path(hdfsOutputDir + "part-r-00000"));
        if (success == 0 && seqFileExists) {
            logger.info("Sequence file created: \""
                    //+ hdfs.getHomeDirectory() + "/"
                    + new Path(hdfsOutputDir).toString() + "/part-r-00000" + "\"");
            pc.setOutputDirectory(hdfsOutputDir);
            return 0;
        } else {
            logger.error("Sequence file not created in hdfs");
            return 1;
        }
    } catch (Exception e) {
        logger.error("IOException occurred", e);
    } finally {
    }
    return 0;
}

From source file:eu.scape_project.tpid.ContainerProcessing.java

License:Apache License

/**
 * Prepare input/*from w w  w .  j av a  2  s .com*/
 *
 * @param pt
 * @throws IOException IO Error
 * @throws java.lang.InterruptedException
 */
public void prepareInput(Path pt) throws InterruptedException, IOException {
    FileSystem fs = FileSystem.get(context.getConfiguration());
    InputStream containerFileStream = fs.open(pt);
    String containerFileName = pt.getName();

    ArcReader reader;
    // Read first two bytes to check if we have a gzipped input stream
    PushbackInputStream pb = new PushbackInputStream(containerFileStream, 2);
    byte[] signature = new byte[2];
    pb.read(signature);
    pb.unread(signature);
    // use compressed reader if gzip magic number is matched
    if (signature[0] == (byte) 0x1f && signature[1] == (byte) 0x8b) {
        reader = ArcReaderFactory.getReaderCompressed(pb);
    } else {
        reader = ArcReaderFactory.getReaderUncompressed(pb);
    }
    long currTM = System.currentTimeMillis();
    String unpackHdfsPath = conf.get("unpack_hdfs_path", "tpid_unpacked");
    String hdfsUnpackDirStr = StringUtils.normdir(unpackHdfsPath, Long.toString(currTM));
    String hdfsJoboutputPath = conf.get("tooloutput_hdfs_path", "tpid_tooloutput");
    String hdfsOutputDirStr = StringUtils.normdir(hdfsJoboutputPath, Long.toString(currTM));

    Iterator<ArcRecordBase> arcIterator = reader.iterator();

    // Number of files which should be processed per invokation
    int numItemsPerInvocation = conf.getInt("num_items_per_task", 50);
    int numItemCounter = numItemsPerInvocation;
    // List of input files to be processed
    String inliststr = "";
    // List of output files to be generated
    String outliststr = "";
    try {
        while (arcIterator.hasNext()) {
            ArcRecordBase arcRecord = arcIterator.next();
            String recordKey = getRecordKey(arcRecord, containerFileName);
            String outFileName = RandomStringUtils.randomAlphabetic(25);
            String hdfsPathStr = hdfsUnpackDirStr + outFileName;
            Path hdfsPath = new Path(hdfsPathStr);
            String outputFileSuffix = conf.get("output_file_suffix", ".fits.xml");
            String hdfsOutPathStr = hdfsOutputDirStr + outFileName + outputFileSuffix;
            FSDataOutputStream hdfsOutStream = fs.create(hdfsPath);
            ArcUtils.recordToOutputStream(arcRecord, hdfsOutStream);
            Text key = new Text(recordKey);
            Text value = new Text(fs.getHomeDirectory() + File.separator + hdfsOutPathStr);
            mos.write("keyfilmapping", key, value);
            String scapePlatformInvoke = conf.get("scape_platform_invoke", "fits dirxml");
            Text ptmrkey = new Text(scapePlatformInvoke);
            // for the configured number of items per invokation, add the 
            // files to the input and output list of the command.
            inliststr += "," + fs.getHomeDirectory() + File.separator + hdfsPathStr;
            outliststr += "," + fs.getHomeDirectory() + File.separator + hdfsOutPathStr;
            if (numItemCounter > 1 && arcIterator.hasNext()) {
                numItemCounter--;
            } else if (numItemCounter == 1 || !arcIterator.hasNext()) {
                inliststr = inliststr.substring(1); // cut off leading comma 
                outliststr = outliststr.substring(1); // cut off leading comma 
                String pattern = conf.get("tomar_param_pattern", "%1$s %2$s");
                String ptMrStr = StringUtils.formatCommandOutput(pattern, inliststr, outliststr);
                Text ptmrvalue = new Text(ptMrStr);
                // emit tomar input line where the key is the tool invokation
                // (tool + operation) and the value is the parameter list
                // where input and output strings contain file lists.
                mos.write("tomarinput", ptmrkey, ptmrvalue);
                numItemCounter = numItemsPerInvocation;
                inliststr = "";
                outliststr = "";
            }
        }
    } catch (Exception ex) {
        mos.write("error", new Text("Error"), new Text(pt.toString()));
    }

}