Java tutorial
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.elasticsearch.hadoop.integration; import java.lang.reflect.Field; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.JobSubmissionFiles; import org.elasticsearch.hadoop.util.ReflectionUtils; import org.elasticsearch.hadoop.util.TestSettings; import org.elasticsearch.hadoop.util.TestUtils; public class HdpBootstrap { private static boolean hackVerified = false; /** * Hack to allow Hadoop client to run on windows (which otherwise fails due to some permission problem). */ public static void hackHadoopStagingOnWin() { // do the assignment only on Windows systems if (TestUtils.isWindows()) { // 0655 = -rwxr-xr-x JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort((short) 0650); JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort((short) 0650); // handle jar permissions as well - temporarily disable for CDH 4 / YARN try { Class<?> tdcm = Class.forName("org.apache.hadoop.filecache.TrackerDistributedCacheManager"); Field field = ReflectionUtils.findField(tdcm, "PUBLIC_CACHE_OBJECT_PERM"); ReflectionUtils.makeAccessible(field); FsPermission perm = (FsPermission) ReflectionUtils.getField(field, null); perm.fromShort((short) 0650); } catch (ClassNotFoundException cnfe) { //ignore return; } catch (Exception ex) { LogFactory.getLog(TestUtils.class).warn("Cannot set permission for TrackerDistributedCacheManager", ex); } } } public static JobConf addProperties(JobConf conf, Properties props) { for (Entry<Object, Object> entry : props.entrySet()) { conf.set(entry.getKey().toString(), entry.getValue().toString()); } return conf; } public static JobConf hadoopConfig() { if (!hackVerified) { hackVerified = true; // check local execution if ("local".equals(TestSettings.TESTING_PROPS.get("mapred.job.tracker"))) { hackHadoopStagingOnWin(); } // damn HADOOP-9123 System.setProperty("path.separator", ":"); } JobConf conf = addProperties(new JobConf(), TestSettings.TESTING_PROPS); conf.setBoolean("mapred.used.genericoptionsparser", true); // provision if not local if (!isJtLocal(conf)) { Provisioner.provision(conf); HdfsUtils.rmr(conf, ".staging"); } return conf; } public static Properties asProperties(Configuration cfg) { Properties props = new Properties(); if (cfg != null) { for (Map.Entry<String, String> entry : cfg) { props.setProperty(entry.getKey(), entry.getValue()); } } return props; } public static boolean isJtLocal(Configuration cfg) { return "local".equals(cfg.get("mapred.job.tracker")); } }