List of usage examples for org.apache.hadoop.mapred JobConf get
public String get(String name)
name
property, null
if no such property exists. From source file:FormatStorage.Head.java
License:Open Source License
public void fromJobConf(JobConf job) throws Exception { byte var = (byte) job.getInt(ConstVar.HD_var, 0); byte compress = (byte) job.getInt(ConstVar.HD_compress, 0); byte compressStyle = (byte) job.getInt(ConstVar.HD_compressStyle, 0); short primaryIndex = (short) job.getInt(ConstVar.HD_primaryIndex, -1); byte encode = (byte) job.getInt(ConstVar.HD_encode, 0); byte encodeStyle = (byte) job.getInt(ConstVar.HD_encodeStyle, 0); String keyString = job.get(ConstVar.HD_key); String[] fieldStrings = job.getStrings(ConstVar.HD_fieldMap); LOG.info("in fromJobConf, compressed:" + compress + ",compressStyle:" + compressStyle); setVar(var);/*w w w. ja va 2s. c om*/ setCompress(compress); setCompressStyle(compressStyle); setEncode(encode); setEncodeStyle(encodeStyle); if (keyString != null && keyString.length() != 0) { setKey(keyString); } short fieldNum = 0; if (fieldStrings != null) { fieldNum = (short) fieldStrings.length; } FieldMap fieldMap = new FieldMap(); for (short i = 0; i < fieldNum; i++) { String[] def = fieldStrings[i].split(ConstVar.RecordSplit); byte type = Byte.valueOf(def[0]); int len = Integer.valueOf(def[1]); short index = Short.valueOf(def[2]); fieldMap.addField(new Field(type, len, index)); } setFieldMap(fieldMap); setPrimaryIndex(primaryIndex); }
From source file:fr.ens.biologie.genomique.eoulsan.modules.mgmt.hadoop.DistCp.java
License:LGPL
/** * Driver to copy srcPath to destPath depending on required protocol. * @param args arguments/*from w w w. j a v a 2 s . c om*/ */ static void copy(final Configuration conf, final Arguments args) throws IOException { getLogger().info("srcPaths=" + args.srcs); getLogger().info("destPath=" + args.dst); checkSrcPath(conf, args.srcs); JobConf job = createJobConf(conf); if (args.preservedAttributes != null) { job.set(PRESERVE_STATUS_LABEL, args.preservedAttributes); } if (args.mapredSslConf != null) { job.set("dfs.client.https.keystore.resource", args.mapredSslConf); } // Initialize the mapper try { setup(conf, job, args); JobClient.runJob(job); finalize(conf, job, args.dst, args.preservedAttributes); } finally { // delete tmp fullyDelete(job.get(TMP_DIR_LABEL), job); // delete jobDirectory fullyDelete(job.get(JOB_DIR_LABEL), job); } }
From source file:fr.ens.biologie.genomique.eoulsan.modules.mgmt.hadoop.DistCp.java
License:LGPL
static private void finalize(final Configuration conf, final JobConf jobconf, final Path destPath, final String presevedAttributes) throws IOException { if (presevedAttributes == null) { return;//from w w w . j av a 2s . c om } EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes); if (!preseved.contains(FileAttribute.USER) && !preseved.contains(FileAttribute.GROUP) && !preseved.contains(FileAttribute.PERMISSION)) { return; } FileSystem dstfs = destPath.getFileSystem(conf); Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL)); SequenceFile.Reader in = null; try { in = new SequenceFile.Reader(dstdirlist.getFileSystem(jobconf), dstdirlist, jobconf); Text dsttext = new Text(); FilePair pair = new FilePair(); for (; in.next(dsttext, pair);) { Path absdst = new Path(destPath, pair.output); updatePermissions(pair.input, dstfs.getFileStatus(absdst), preseved, dstfs); } } finally { checkAndClose(in); } }
From source file:graphbuilding.GenomixMapper.java
License:Apache License
@Override public void configure(JobConf job) { KMER_SIZE = Integer.parseInt(job.get("sizeKmer")); outputAdjList = new KmerCountValue(); outputKmer = new Kmer(KMER_SIZE); }
From source file:hadoop.RegexParserMap.java
License:Open Source License
@Override public void configure(JobConf job) { pattern = Pattern.compile(job.get("logparser.regex")); matcher = pattern.matcher(""); // lets re-use the matcher }
From source file:hitune.analysis.mapreduce.CSVFileOutputFormat.java
License:Apache License
public RecordWriter<K, V> getRecordWriter(FileSystem fs, JobConf job, String name, Progressable progress) throws IOException { job.set("mapred.textoutputformat.separator", SEPEARATOR_COMMA); String filename = job.get(AnalysisProcessorConfiguration.outputfilename); if (filename == null || filename.equals("")) { filename = name;/*from w w w. ja v a 2 s . c o m*/ } else { filename += "-" + name; //FileOutputFormat.setWorkOutputPath(job,new Path(job.get("mapred.output.dir")+"/" + filename)); } return new CSVFileWriter(super.getRecordWriter(fs, job, filename, progress)); }
From source file:hitune.analysis.mapreduce.processor.HadoopMetrics.java
License:Apache License
@Override public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); JobConf conf = new JobConf(this.conf, HadoopMetrics.class); try {//from w w w . j av a 2 s. c om conf.setJobName(this.getClass().getSimpleName() + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(HadoopMetrics.MapClass.class); conf.setReducerClass(SystemLog.ReduceClass.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(TextArrayWritable.class); conf.setOutputFormat(CSVFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(temp_outputPaths)); try { JobClient.runJob(conf); moveResults(conf, outputPaths, temp_outputPaths); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }
From source file:hitune.analysis.mapreduce.processor.HistoryLog.java
License:Apache License
public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); JobConf conf = new JobConf(this.conf, HistoryLog.class); try {/*from ww w . j a v a2s.co m*/ conf.setJobName(this.getClass().getSimpleName() + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(HistoryLog.MapClass.class); conf.setReducerClass(HistoryLog.ReduceClass.class); conf.setOutputKeyClass(Text.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputValueClass(TextArrayWritable.class); conf.setOutputFormat(CSVFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(temp_outputPaths)); try { JobClient.runJob(conf); moveResults(conf, outputPaths, temp_outputPaths); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }
From source file:hitune.analysis.mapreduce.processor.InstrumentDataflow.java
License:Apache License
@Override public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); JobConf conf = new JobConf(this.conf, InstrumentDataflow.class); try {//from w ww . j a va2s . c om conf.setJobName(this.getClass().getSimpleName() + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(InstrumentDataflow.MapClass.class); conf.setReducerClass(InstrumentDataflow.ReduceClass.class); conf.setOutputKeyClass(Text.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputValueClass(TextArrayWritable.class); conf.setOutputFormat(CSVFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(temp_outputPaths)); //FileInputFormat.setInputPathFilter(conf, evtFileFilter.class); //conf.setNumReduceTasks(1); try { JobClient.runJob(conf); moveResults(conf, outputPaths, temp_outputPaths); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }
From source file:hitune.analysis.mapreduce.processor.InstrumentSamplingTop.java
License:Apache License
@Override public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); try {// w ww . j a v a 2 s. co m JobConf conf = new JobConf(this.conf, InstrumentSamplingTop.class); conf.setJobName(this.getClass().getSimpleName() + "_1_" + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(InstrumentSamplingTop.MapClass.class); conf.setReducerClass(InstrumentSamplingTop.ReduceClass.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputKeyClass(outputKeyClass); conf.setOutputValueClass(outputValueClass); conf.setOutputFormat(SequenceFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(outputPaths + "_1_" + timestamp)); try { //first job JobClient.runJob(conf); JobConf secondconf = new JobConf(this.conf, InstrumentSamplingTop.class); secondconf.setJobName(this.getClass().getSimpleName() + "_2_" + timestamp); secondconf.setInputFormat(SequenceFileInputFormat.class); secondconf.setMapperClass(IdentityMapper.class); secondconf.setReducerClass(InstrumentSamplingTop.TopClass.class); secondconf.setMapOutputKeyClass(outputKeyClass); secondconf.setMapOutputValueClass(outputValueClass); secondconf.setOutputKeyClass(Text.class); secondconf.setOutputValueClass(TextArrayWritable.class); secondconf.setOutputFormat(CSVFileOutputFormat.class); FileInputFormat.setInputPaths(secondconf, outputPaths + "_1_" + timestamp); FileOutputFormat.setOutputPath(secondconf, new Path(temp_outputPaths)); //second job to get ranking list JobClient.runJob(secondconf); moveResults(secondconf, outputPaths, temp_outputPaths); Path temp = new Path(outputPaths + "_1_" + timestamp); temp.getFileSystem(conf).delete(temp); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }