Example usage for org.apache.hadoop.conf Configuration writeXml

List of usage examples for org.apache.hadoop.conf Configuration writeXml

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration writeXml.

Prototype

public void writeXml(Writer out) throws IOException 

Source Link

Usage

From source file:JaqlShell.java

License:Apache License

/**
 * @param mrc/*from   w w w. j  av a2s  . c  o m*/
 * @param conf
 * @throws Exception
 */
private static void setupOverride(MiniMRCluster mrc, Configuration conf) throws Exception {
    File overrideDir = new File(System.getProperty("hadoop.conf.override"));
    if (!overrideDir.exists()) {
        overrideDir.mkdirs();
    }

    // write out the JobConf from MiniMR to the override dir
    JobConf jc = mrc.createJobConf();
    conf.set("mapred.job.tracker", jc.get("mapred.job.tracker", null));
    String name = "mapred.job.tracker.info.port";
    String addr = jc.get(name, null);
    if (addr == null) {
        name = "mapred.job.tracker.http.address";
        addr = jc.get(name, null);
    }
    conf.set(name, addr);
    OutputStream outCore = new FileOutputStream(
            overrideDir.getCanonicalPath() + File.separator + "core-default.xml");
    OutputStream outMapred = new FileOutputStream(
            overrideDir.getCanonicalPath() + File.separator + "mapred-default.xml");
    OutputStream outHdfs = new FileOutputStream(
            overrideDir.getCanonicalPath() + File.separator + "hdfs-default.xml");
    conf.writeXml(outCore);
    conf.writeXml(outMapred);
    conf.writeXml(outHdfs);
    outCore.close();
    outMapred.close();
    outHdfs.close();
}

From source file:accumulo.AccumuloStuff.java

License:Apache License

private static void setCoreSite(MiniAccumuloClusterImpl cluster) throws Exception {
    File csFile = new File(cluster.getConfig().getConfDir(), "core-site.xml");
    if (csFile.exists())
        throw new RuntimeException(csFile + " already exist");

    Configuration coreSite = new Configuration(false);
    coreSite.set("fs.file.impl", RawLocalFileSystem.class.getName());
    OutputStream out = new BufferedOutputStream(
            new FileOutputStream(new File(cluster.getConfig().getConfDir(), "core-site.xml")));
    coreSite.writeXml(out);
    out.close();//w w w  .j  a va2  s .c  om
}

From source file:azkaban.jobtype.HadoopConfigurationInjector.java

License:Apache License

/**
 * Writes out the XML configuration file that will be injected by the client
 * as a configuration resource./* w  ww .j  a va  2  s  .c  om*/
 * <p>
 * This file will include a series of links injected by Azkaban as well as
 * any job properties that begin with the designated injection prefix.
 *
 * @param props The Azkaban properties
 * @param workingDir The Azkaban job working directory
 */
public static void prepareResourcesToInject(Props props, String workingDir) {
    try {
        Configuration conf = new Configuration(false);

        // First, inject a series of Azkaban links. These are equivalent to
        // CommonJobProperties.[EXECUTION,WORKFLOW,JOB,JOBEXEC,ATTEMPT]_LINK
        addHadoopProperties(props);

        // Next, automatically inject any properties that begin with the
        // designated injection prefix.
        Map<String, String> confProperties = props.getMapByPrefix(INJECT_PREFIX);

        for (Map.Entry<String, String> entry : confProperties.entrySet()) {
            String confKey = entry.getKey().replace(INJECT_PREFIX, "");
            String confVal = entry.getValue();
            conf.set(confKey, confVal);
        }

        // Now write out the configuration file to inject.
        File file = getConfFile(props, workingDir, INJECT_FILE);
        OutputStream xmlOut = new FileOutputStream(file);
        conf.writeXml(xmlOut);
        xmlOut.close();
    } catch (Throwable e) {
        _logger.error("Encountered error while preparing the Hadoop configuration resource file", e);
    }
}

From source file:co.cask.cdap.app.runtime.spark.SparkRuntimeService.java

License:Apache License

/**
 * Serialize {@link Configuration} to a file.
 *
 * @return The {@link File} of the serialized configuration in the given target directory.
 *//*w ww  . ja v  a2 s.co m*/
private File saveHConf(Configuration hConf, File targetDir) throws IOException {
    File file = new File(targetDir, SparkRuntimeContextProvider.HCONF_FILE_NAME);
    try (Writer writer = Files.newWriter(file, Charsets.UTF_8)) {
        hConf.writeXml(writer);
    }
    return file;
}

From source file:co.cask.cdap.common.twill.TwillRunnerMain.java

License:Apache License

private static File saveHConf(Configuration conf, File file) throws IOException {
    Writer writer = Files.newWriter(file, Charsets.UTF_8);
    try {/*from   w  w  w. j  a  v  a2  s  .  c om*/
        conf.writeXml(writer);
    } finally {
        writer.close();
    }
    return file;
}

From source file:co.cask.cdap.data.runtime.main.MasterServiceMain.java

License:Apache License

private Path saveHConf(Configuration conf, Path file) throws IOException {
    try (Writer writer = Files.newBufferedWriter(file, Charsets.UTF_8)) {
        conf.writeXml(writer);
    }/*  w w  w.j ava 2 s.  com*/
    return file;
}

From source file:co.cask.cdap.data.runtime.main.MasterTwillApplication.java

License:Apache License

/**
 * Serializes the given {@link Configuration} to the give file.
 *//*from  w w  w.ja  v  a2  s .  c o m*/
private Path saveHConf(Configuration conf, Path file) throws IOException {
    try (Writer writer = Files.newBufferedWriter(file, Charsets.UTF_8)) {
        conf.writeXml(writer);
    }
    return file;
}

From source file:co.cask.cdap.explore.service.ExploreServiceUtils.java

License:Apache License

/**
 * Change yarn-site.xml file, and return a temp copy of it to which are added
 * necessary options./*from  w  w  w.  j a va2  s .  c  o m*/
 */
private static File updateYarnConfFile(File confFile, File tempDir) {
    Configuration conf = new Configuration(false);
    try {
        conf.addResource(confFile.toURI().toURL());
    } catch (MalformedURLException e) {
        LOG.error("File {} is malformed.", confFile, e);
        throw Throwables.propagate(e);
    }

    String yarnAppClassPath = conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
            Joiner.on(",").join(YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH));

    // add the pwd/* at the beginning of classpath. so user's jar will take precedence and without this change,
    // job.jar will be at the beginning of the classpath, since job.jar has old guava version classes,
    // we want to add pwd/* before
    yarnAppClassPath = "$PWD/*," + yarnAppClassPath;

    conf.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, yarnAppClassPath);

    File newYarnConfFile = new File(tempDir, "yarn-site.xml");
    try (FileOutputStream os = new FileOutputStream(newYarnConfFile)) {
        conf.writeXml(os);
    } catch (IOException e) {
        LOG.error("Problem creating and writing to temporary yarn-conf.xml conf file at {}", newYarnConfFile,
                e);
        throw Throwables.propagate(e);
    }

    return newYarnConfFile;
}

From source file:co.cask.cdap.explore.service.ExploreServiceUtils.java

License:Apache License

/**
 * Change mapred-site.xml file, and return a temp copy of it to which are added
 * necessary options.//from w  w w .  j  a v  a  2s .co  m
 */
private static File updateMapredConfFile(File confFile, File tempDir) {
    Configuration conf = new Configuration(false);
    try {
        conf.addResource(confFile.toURI().toURL());
    } catch (MalformedURLException e) {
        LOG.error("File {} is malformed.", confFile, e);
        throw Throwables.propagate(e);
    }

    String mrAppClassPath = conf.get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
            MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH);

    // Add the pwd/* at the beginning of classpath. Without this change, old jars from mr framework classpath
    // get into classpath.
    mrAppClassPath = "$PWD/*," + mrAppClassPath;

    conf.set(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, mrAppClassPath);

    File newMapredConfFile = new File(tempDir, "mapred-site.xml");
    try (FileOutputStream os = new FileOutputStream(newMapredConfFile)) {
        conf.writeXml(os);
    } catch (IOException e) {
        LOG.error("Problem creating and writing to temporary mapred-site.xml conf file at {}",
                newMapredConfFile, e);
        throw Throwables.propagate(e);
    }

    return newMapredConfFile;
}

From source file:co.cask.cdap.explore.service.ExploreServiceUtils.java

License:Apache License

/**
 * Change hive-site.xml file, and return a temp copy of it to which are added
 * necessary options./*w w w . jav  a  2  s  .  co  m*/
 */
private static File updateHiveConfFile(File confFile, File tempDir) {
    Configuration conf = new Configuration(false);
    try {
        conf.addResource(confFile.toURI().toURL());
    } catch (MalformedURLException e) {
        LOG.error("File {} is malformed.", confFile, e);
        throw Throwables.propagate(e);
    }

    // we prefer jars at container's root directory before job.jar,
    // we edit the YARN_APPLICATION_CLASSPATH in yarn-site.xml using
    // co.cask.cdap.explore.service.ExploreServiceUtils.updateYarnConfFile and
    // setting the MAPREDUCE_JOB_CLASSLOADER and MAPREDUCE_JOB_USER_CLASSPATH_FIRST to false will put
    // YARN_APPLICATION_CLASSPATH before job.jar for container's classpath.
    conf.setBoolean(Job.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false);
    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, false);

    String sparkHome = System.getenv(Constants.SPARK_HOME);
    if (sparkHome != null) {
        LOG.debug("Setting spark.home in hive conf to {}", sparkHome);
        conf.set("spark.home", sparkHome);
    }

    File newHiveConfFile = new File(tempDir, "hive-site.xml");

    try (FileOutputStream os = new FileOutputStream(newHiveConfFile)) {
        conf.writeXml(os);
    } catch (IOException e) {
        LOG.error("Problem creating temporary hive-site.xml conf file at {}", newHiveConfFile, e);
        throw Throwables.propagate(e);
    }
    return newHiveConfFile;
}