Example usage for org.apache.hadoop.tools HadoopArchives HadoopArchives

List of usage examples for org.apache.hadoop.tools HadoopArchives HadoopArchives

Introduction

In this page you can find the example usage for org.apache.hadoop.tools HadoopArchives HadoopArchives.

Prototype

public HadoopArchives(Configuration conf) 

Source Link

Usage

From source file:org.apache.hcatalog.har.HarOutputCommitterPostProcessor.java

License:Apache License

/**
 * Creates a har file from the contents of a given directory, using that as root.
 * @param dir Directory to archive/*  www.ja v  a 2  s. c o  m*/
 * @param harFile The HAR file to create
 */
public static void makeHar(JobContext context, String dir, String harFile) throws IOException {
    //    Configuration conf = context.getConfiguration();
    //    Credentials creds = context.getCredentials();

    //    HCatUtil.logAllTokens(LOG,context);

    int lastSep = harFile.lastIndexOf(Path.SEPARATOR_CHAR);
    Path archivePath = new Path(harFile.substring(0, lastSep));
    final String[] args = { "-archiveName", harFile.substring(lastSep + 1, harFile.length()), "-p", dir, "*",
            archivePath.toString() };
    //    for (String arg : args){
    //      LOG.info("Args to har : "+ arg);
    //    }
    try {
        Configuration newConf = new Configuration();
        FileSystem fs = archivePath.getFileSystem(newConf);

        String hadoopTokenFileLocationEnvSetting = System
                .getenv(HCatConstants.SYSENV_HADOOP_TOKEN_FILE_LOCATION);
        if ((hadoopTokenFileLocationEnvSetting != null) && (!hadoopTokenFileLocationEnvSetting.isEmpty())) {
            newConf.set(HCatConstants.CONF_MAPREDUCE_JOB_CREDENTIALS_BINARY, hadoopTokenFileLocationEnvSetting);
            //      LOG.info("System.getenv(\"HADOOP_TOKEN_FILE_LOCATION\") =["+  System.getenv("HADOOP_TOKEN_FILE_LOCATION")+"]");
        }
        //      for (FileStatus ds : fs.globStatus(new Path(dir, "*"))){
        //        LOG.info("src : "+ds.getPath().toUri().toString());
        //      }

        final HadoopArchives har = new HadoopArchives(newConf);
        int rc = ToolRunner.run(har, args);
        if (rc != 0) {
            throw new Exception("Har returned error code " + rc);
        }

        //      for (FileStatus hs : fs.globStatus(new Path(harFile, "*"))){
        //        LOG.info("dest : "+hs.getPath().toUri().toString());
        //      }
        //      doHarCheck(fs,harFile);
        //      LOG.info("Nuking " + dir);
        fs.delete(new Path(dir), true);
    } catch (Exception e) {
        throw new HCatException("Error creating Har [" + harFile + "] from [" + dir + "]", e);
    }
}