List of usage examples for org.apache.hadoop.mapreduce JobSubmissionFiles JOB_FILE_PERMISSION
FsPermission JOB_FILE_PERMISSION
To view the source code for org.apache.hadoop.mapreduce JobSubmissionFiles JOB_FILE_PERMISSION.
Click Source Link
From source file:ml.shifu.guagua.yarn.GuaguaSplitWriter.java
License:Apache License
public static <T extends InputSplit> void createSplitFiles(Path jobSubmitDir, Configuration conf, FileSystem fs, T[] splits) throws IOException, InterruptedException { FSDataOutputStream out = createFile(fs, JobSubmissionFiles.getJobSplitFile(jobSubmitDir), conf); SplitMetaInfo[] info = writeNewSplits(conf, splits, out); out.close();/*ww w. ja va 2 s . co m*/ writeJobSplitMetaInfo(fs, JobSubmissionFiles.getJobSplitMetaFile(jobSubmitDir), new FsPermission(JobSubmissionFiles.JOB_FILE_PERMISSION), splitVersion, info); }
From source file:ml.shifu.guagua.yarn.GuaguaSplitWriter.java
License:Apache License
public static void createSplitFiles(Path jobSubmitDir, Configuration conf, FileSystem fs, org.apache.hadoop.mapred.InputSplit[] splits) throws IOException { FSDataOutputStream out = createFile(fs, JobSubmissionFiles.getJobSplitFile(jobSubmitDir), conf); SplitMetaInfo[] info = writeOldSplits(splits, out, conf); out.close();/* w w w. j a v a2s.com*/ writeJobSplitMetaInfo(fs, JobSubmissionFiles.getJobSplitMetaFile(jobSubmitDir), new FsPermission(JobSubmissionFiles.JOB_FILE_PERMISSION), splitVersion, info); }
From source file:ml.shifu.guagua.yarn.GuaguaSplitWriter.java
License:Apache License
private static FSDataOutputStream createFile(FileSystem fs, Path splitFile, Configuration job) throws IOException { FSDataOutputStream out = FileSystem.create(fs, splitFile, new FsPermission(JobSubmissionFiles.JOB_FILE_PERMISSION)); int replication = job.getInt("mapred.submit.replication", 10); fs.setReplication(splitFile, (short) replication); writeSplitHeader(out);/*from www .j av a 2 s.c o m*/ return out; }
From source file:org.elasticsearch.hadoop.HdpBootstrap.java
License:Apache License
/** * Hack to allow Hadoop client to run on windows (which otherwise fails due to some permission problem). *//*from w ww . ja v a 2s .c o m*/ public static void hackHadoopStagingOnWin() { // do the assignment only on Windows systems if (TestUtils.isWindows()) { // 0655 = -rwxr-xr-x , 0650 = -rwxr-x--- JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort((short) 0650); JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort((short) 0650); Field field = null; // handle distributed cache permissions on Hadoop < 2.4 try { Class<?> jl = Class.forName("org.apache.hadoop.mapred.JobLocalizer"); field = ReflectionUtils.findField(jl, "privateCachePerms"); if (field != null) { ReflectionUtils.makeAccessible(field); FsPermission perm = (FsPermission) ReflectionUtils.getField(field, null); perm.fromShort((short) 0650); } } catch (ClassNotFoundException cnfe) { // ignore } // handle jar permissions as well - temporarily disable for CDH 4 / YARN try { Class<?> tdcm = Class.forName("org.apache.hadoop.filecache.TrackerDistributedCacheManager"); field = ReflectionUtils.findField(tdcm, "PUBLIC_CACHE_OBJECT_PERM"); ReflectionUtils.makeAccessible(field); FsPermission perm = (FsPermission) ReflectionUtils.getField(field, null); perm.fromShort((short) 0650); } catch (ClassNotFoundException cnfe) { //ignore return; } catch (Exception ex) { LogFactory.getLog(TestUtils.class).warn("Cannot set permission for TrackerDistributedCacheManager", ex); } } }
From source file:org.elasticsearch.hadoop.integration.HdpBootstrap.java
License:Apache License
/** * Hack to allow Hadoop client to run on windows (which otherwise fails due to some permission problem). *///ww w . j a va 2 s.c om public static void hackHadoopStagingOnWin() { // do the assignment only on Windows systems if (TestUtils.isWindows()) { // 0655 = -rwxr-xr-x JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort((short) 0650); JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort((short) 0650); // handle jar permissions as well - temporarily disable for CDH 4 / YARN try { Class<?> tdcm = Class.forName("org.apache.hadoop.filecache.TrackerDistributedCacheManager"); Field field = ReflectionUtils.findField(tdcm, "PUBLIC_CACHE_OBJECT_PERM"); ReflectionUtils.makeAccessible(field); FsPermission perm = (FsPermission) ReflectionUtils.getField(field, null); perm.fromShort((short) 0650); } catch (ClassNotFoundException cnfe) { //ignore return; } catch (Exception ex) { LogFactory.getLog(TestUtils.class).warn("Cannot set permission for TrackerDistributedCacheManager", ex); } } }
From source file:org.springframework.data.hadoop.util.PermissionUtils.java
License:Apache License
public static void hackHadoopStagingOnWin() { // do the assignment only on Windows systems if (System.getProperty("os.name").toLowerCase().startsWith("win")) { // 0655 = -rwxr-xr-x JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort((short) 0650); JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort((short) 0650); if (trackerDistributedCacheManagerClass != null) { // handle jar permissions as well Field field = ReflectionUtils.findField(trackerDistributedCacheManagerClass, "PUBLIC_CACHE_OBJECT_PERM"); ReflectionUtils.makeAccessible(field); FsPermission perm = (FsPermission) ReflectionUtils.getField(field, null); perm.fromShort((short) 0650); }//from www . j a va 2 s . c o m } }