List of usage examples for org.apache.hadoop.mapreduce TaskInputOutputContext getWorkingDirectory
public Path getWorkingDirectory() throws IOException;
From source file:com.moz.fiji.mapreduce.util.SerializeLoggerAspect.java
License:Apache License
/** * Logic to serialize collected profiling content to a file on HDFS. The files are stored * in the current working directory for this context, in a folder specified by STATS_DIR. The per * task file is named by the task attempt id. * We obtain the profiling stats collected by the LogTimerAspect in FijiSchema. The format of the * file is as follows: Job Name, Job ID, Task Attempt, Function Signature, * Aggregate Time (nanoseconds), Number of Invocations, Time per call (nanoseconds)'\n' * * @param context The {@link TaskInputOutputContext} for this job. * @throws IOException If the writes to HDFS fail. *///from ww w. j a v a2 s .co m private void serializeToFile(TaskInputOutputContext context) throws IOException { Path parentPath = new Path(context.getWorkingDirectory(), STATS_DIR); FileSystem fs = parentPath.getFileSystem(context.getConfiguration()); fs.mkdirs(parentPath); Path path = new Path(parentPath, context.getTaskAttemptID().toString()); OutputStreamWriter out = new OutputStreamWriter(fs.create(path, true), "UTF-8"); try { out.write("Job Name, Job ID, Task Attempt, Function Signature, Aggregate Time (nanoseconds), " + "Number of Invocations, Time per call (nanoseconds)\n"); ConcurrentHashMap<String, LoggingInfo> signatureTimeMap = mLogTimerAspect.getSignatureTimeMap(); for (Map.Entry<String, LoggingInfo> entrySet : signatureTimeMap.entrySet()) { writeProfileInformation(out, context, entrySet.getKey(), entrySet.getValue()); } signatureTimeMap = mMRLogTimerAspect.getSignatureTimeMap(); for (Map.Entry<String, LoggingInfo> entrySet : signatureTimeMap.entrySet()) { writeProfileInformation(out, context, entrySet.getKey(), entrySet.getValue()); } } finally { out.close(); } }
From source file:org.kiji.mapreduce.util.SerializeLoggerAspect.java
License:Apache License
/** * Logic to serialize collected profiling content to a file on HDFS. The files are stored * in the current working directory for this context, in a folder specified by STATS_DIR. The per * task file is named by the task attempt id. * We obtain the profiling stats collected by the LogTimerAspect in KijiSchema. The format of the * file is as follows: Job Name, Job ID, Task Attempt, Function Signature, * Aggregate Time (nanoseconds), Number of Invocations, Time per call (nanoseconds)'\n' * * @param context The {@link TaskInputOutputContext} for this job. * @throws IOException If the writes to HDFS fail. *//* ww w. j a va 2 s .c o m*/ private void serializeToFile(TaskInputOutputContext context) throws IOException { Path parentPath = new Path(context.getWorkingDirectory(), STATS_DIR); FileSystem fs = FileSystem.get(context.getConfiguration()); fs.mkdirs(parentPath); Path path = new Path(parentPath, context.getTaskAttemptID().toString()); OutputStreamWriter out = new OutputStreamWriter(fs.create(path, true), "UTF-8"); try { out.write("Job Name, Job ID, Task Attempt, Function Signature, Aggregate Time (nanoseconds), " + "Number of Invocations, Time per call (nanoseconds)\n"); ConcurrentHashMap<String, LoggingInfo> signatureTimeMap = mLogTimerAspect.getSignatureTimeMap(); for (Map.Entry<String, LoggingInfo> entrySet : signatureTimeMap.entrySet()) { writeProfileInformation(out, context, entrySet.getKey(), entrySet.getValue()); } signatureTimeMap = mMRLogTimerAspect.getSignatureTimeMap(); for (Map.Entry<String, LoggingInfo> entrySet : signatureTimeMap.entrySet()) { writeProfileInformation(out, context, entrySet.getKey(), entrySet.getValue()); } } finally { out.close(); } }