Example usage for org.apache.hadoop.conf Configuration set

List of usage examples for org.apache.hadoop.conf Configuration set

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:com.architecting.ch07.MapReduceIndexerTool.java

License:Apache License

private void addDistributedCacheFile(File file, Configuration conf) throws IOException {
    String HADOOP_TMP_FILES = "tmpfiles";// see Hadoop's GenericOptionsParser
    String tmpFiles = conf.get(HADOOP_TMP_FILES, "");
    if (tmpFiles.length() > 0) { // already present?
        tmpFiles = tmpFiles + ",";
    }/*from  w  ww .  j a va 2 s.co  m*/
    GenericOptionsParser parser = new GenericOptionsParser(new Configuration(conf),
            new String[] { "--files", file.getCanonicalPath() });
    String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES);
    assert additionalTmpFiles != null;
    assert additionalTmpFiles.length() > 0;
    tmpFiles += additionalTmpFiles;
    conf.set(HADOOP_TMP_FILES, tmpFiles);
}

From source file:com.asakusafw.bridge.adapter.ResourceBrokerAdapterTest.java

License:Apache License

/**
 * simple case./*from   w ww .ja  v  a  2 s .com*/
 * @throws Exception if failed
 */
@Test
public void simple() throws Exception {
    Configuration conf = new Configuration(false);
    conf.set(StageConstants.PROP_USER, "a");
    conf.set(StageConstants.PROP_BATCH_ID, "b");
    conf.set(StageConstants.PROP_FLOW_ID, "c");
    conf.set(StageConstants.PROP_EXECUTION_ID, "d");
    conf.set(StageConstants.PROP_ASAKUSA_BATCH_ARGS, "e=f");
    RuntimeResourceManager manager = new RuntimeResourceManager(conf);
    try {
        manager.setup();
        StageInfo info = ResourceBroker.get(StageInfo.class);
        assertThat(info.getUserName(), is("a"));
        assertThat(info.getBatchId(), is("b"));
        assertThat(info.getFlowId(), is("c"));
        assertThat(info.getExecutionId(), is("d"));
        assertThat(info.getStageId(), is(nullValue()));
        assertThat(info.getBatchArguments(), hasEntry("e", "f"));
    } finally {
        manager.cleanup();
    }
}

From source file:com.asakusafw.bridge.hadoop.ConfigurationEditor.java

License:Apache License

/**
 * Merge the extra configuration into the Hadoop configuration.
 * @param conf the target Hadoop configuration
 * @param extra the extra properties// ww  w  . j  av a2  s .  com
 */
public static void merge(Configuration conf, Map<String, String> extra) {
    for (Map.Entry<String, String> entry : extra.entrySet()) {
        conf.set(entry.getKey(), entry.getValue());
    }
}

From source file:com.asakusafw.bridge.hadoop.ConfigurationEditor.java

License:Apache License

/**
 * Puts the {@link StageInfo} object into the Hadoop configuration.
 * @param conf the target Hadoop configuration
 * @param info the {@link StageInfo} object
 *///from  w  ww .  ja  va2s  .  c om
public static void putStageInfo(Configuration conf, StageInfo info) {
    conf.set(KEY_STAGE_INFO, info.serialize());
}

From source file:com.asakusafw.bridge.hadoop.directio.DirectFileInputFormatTest.java

License:Apache License

private Configuration conf(String basePath, String resourcePath, Class<?> filter, String optional,
        String args) {/*from   w ww  .  ja  v a2s  . c om*/
    Configuration conf = context.newConfiguration();
    conf.set(DirectFileInputFormat.KEY_BASE_PATH, basePath);
    conf.set(DirectFileInputFormat.KEY_RESOURCE_PATH, resourcePath);
    conf.set(DirectFileInputFormat.KEY_DATA_CLASS, MockData.class.getName());
    conf.set(DirectFileInputFormat.KEY_FORMAT_CLASS, MockDataFormat.class.getName());
    if (filter != null) {
        conf.set(DirectFileInputFormat.KEY_FILTER_CLASS, filter.getName());
    }
    if (optional != null) {
        conf.set(DirectFileInputFormat.KEY_OPTIONAL, optional);
    }
    ConfigurationEditor.putStageInfo(conf, new StageInfo("u", "b", "f", "s", "e", args));
    return conf;
}

From source file:com.asakusafw.compiler.util.tester.HadoopDriver.java

License:Apache License

private boolean runInProcess(File runtimeLib, List<File> libjars, String className, File confFile,
        Map<String, String> properties) {
    logger.info("EMULATE: {} with {}", className, libjars);
    List<String> arguments = new ArrayList<>();
    arguments.add(className);// w w w  .j a  v a 2 s . c  o m
    addHadoopConf(arguments, confFile);
    addHadoopLibjars(libjars, arguments);
    addBuiltInMapReduceJobRunner(arguments);
    addSuppressCopyLibraries(arguments);
    ClassLoader original = Thread.currentThread().getContextClassLoader();
    try {
        Configuration conf = configurations.newInstance();
        for (Map.Entry<String, String> entry : properties.entrySet()) {
            conf.set(entry.getKey(), entry.getValue());
        }
        try {
            int exitValue = ApplicationLauncher.exec(conf, arguments.toArray(new String[arguments.size()]));
            if (exitValue != 0) {
                logger.info("running {} returned {}", className, exitValue);
                return false;
            }
            return true;
        } catch (Exception e) {
            logger.info("error occurred", e);
            return false;
        }
    } finally {
        Thread.currentThread().setContextClassLoader(original);
    }
}

From source file:com.asakusafw.dag.compiler.codegen.CleanupStageClientGeneratorTest.java

License:Apache License

private void run(ClassDescription generated, String executionId) {
    Configuration conf = new Configuration();
    conf.set(StageConstants.PROP_USER, "testing");
    conf.set(StageConstants.PROP_EXECUTION_ID, executionId);
    loading(generated, c -> {/*from  ww  w  .ja  va2 s .  co m*/
        Tool t = ReflectionUtils.newInstance(c.asSubclass(Tool.class), conf);
        assertThat(t.run(new String[0]), is(0));
    });
}

From source file:com.asakusafw.lang.compiler.mapreduce.testing.MapReduceRunner.java

License:Apache License

private static void configure(Configuration conf, String executionId, Map<String, String> arguments) {
    conf.set(StageConstants.PROP_EXECUTION_ID, executionId);
    conf.set(StageConstants.PROP_USER, System.getProperty("user.name")); //$NON-NLS-1$
    conf.set(StageConstants.PROP_ASAKUSA_BATCH_ARGS, serialize(arguments));
    conf.setBoolean(InProcessStageConfigurator.KEY_FORCE, true);
}

From source file:com.asakusafw.lang.compiler.mapreduce.testing.mock.DirectIoContext.java

License:Apache License

/**
 * Adds Direct I/O settings for the configuration.
 * @param conf the target configuration//from w ww  .ja v  a2  s .  c om
 * @return the configured object
 */
public Configuration configure(Configuration conf) {
    Path system = new Path(new File(getRoot(), "system").toURI()); //$NON-NLS-1$
    conf.set("com.asakusafw.output.system.dir", system.toString()); //$NON-NLS-1$
    conf.set("com.asakusafw.directio.root", HadoopDataSource.class.getName()); //$NON-NLS-1$
    conf.set("com.asakusafw.directio.root.path", "/"); //$NON-NLS-1$ //$NON-NLS-2$
    conf.set("com.asakusafw.directio.root.fs.path", getRootPath().toString()); //$NON-NLS-1$
    return conf;
}

From source file:com.asakusafw.m3bp.client.Launcher.java

License:Apache License

private BasicProcessorContext newContext() {
    BasicProcessorContext context = new BasicProcessorContext(applicationLoader);
    configuration.getEngineProperties().forEach((k, v) -> {
        if (k.startsWith(KEY_HADOOP_PREFIX) == false) {
            LOG.debug("Engine configuration: {}={}", k, v);
            context.withProperty(k, v);//from   ww  w.j  a  va2s .  c  o m
        }
    });

    Configuration hadoop = new Configuration();
    configuration.getHadoopProperties().forEach((k, v) -> {
        LOG.debug("Hadoop configuration: {}={}", k, v);
        hadoop.set(k, v);
    });
    configuration.getEngineProperties().forEach((k, v) -> {
        if (k.startsWith(KEY_HADOOP_PREFIX)) {
            String key = k.substring(KEY_HADOOP_PREFIX.length());
            LOG.debug("Hadoop configuration: {}={}", key, v);
            hadoop.set(key, v);
        }
    });

    context.withResource(StageInfo.class, configuration.getStageInfo());
    context.withResource(Configuration.class, hadoop);
    return context;
}