Example usage for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration

List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration.

Prototype

public Configuration getConfiguration();

Source Link

Document

Return the configuration for the job.

Usage

From source file:edu.umn.cs.spatialHadoop.visualization.ImageOutputFormat.java

License:Open Source License

@Override
public RecordWriter<Object, Canvas> getRecordWriter(TaskAttemptContext task)
        throws IOException, InterruptedException {
    Path file = getDefaultWorkFile(task, "");
    FileSystem fs = file.getFileSystem(task.getConfiguration());
    return new ImageRecordWriter(fs, file, task);
}

From source file:edu.umn.cs.spatialHadoop.visualization.PyramidOutputFormat2.java

License:Open Source License

@Override
public RecordWriter<TileIndex, Canvas> getRecordWriter(TaskAttemptContext task)
        throws IOException, InterruptedException {
    Path file = getDefaultWorkFile(task, "").getParent();
    FileSystem fs = file.getFileSystem(task.getConfiguration());
    return new ImageRecordWriter(fs, file, task);
}

From source file:edu.umn.cs.spatialHadoop.visualization.RasterOutputFormat.java

License:Open Source License

@Override
public RecordWriter<Object, RasterLayer> getRecordWriter(TaskAttemptContext task)
        throws IOException, InterruptedException {
    Path file = getDefaultWorkFile(task, "");
    FileSystem fs = file.getFileSystem(task.getConfiguration());
    return new RasterRecordWriter(fs, file, task);
}

From source file:edu.umn.cs.sthadoop.mapreduce.SpatioTemporalInputFormat.java

License:Open Source License

@Override
public RecordReader<K, Iterable<V>> createRecordReader(InputSplit split, TaskAttemptContext context)
        throws IOException, InterruptedException {
    Path path;//from  w  w  w.  j  ava2  s  .  c  o m
    String extension;
    if (split instanceof FileSplit) {
        FileSplit fsplit = (FileSplit) split;
        extension = FileUtil.getExtensionWithoutCompression(path = fsplit.getPath());
    } else if (split instanceof CombineFileSplit) {
        CombineFileSplit csplit = (CombineFileSplit) split;
        extension = FileUtil.getExtensionWithoutCompression(path = csplit.getPath(0));
    } else {
        throw new RuntimeException("Cannot process plits of type " + split.getClass());
    }
    // If this extension is for a compression, skip it and take the previous
    // extension
    if (extension.equals("hdf")) {
        // HDF File. Create HDFRecordReader
        return (RecordReader) new HDFRecordReader();
    }
    if (extension.equals("rtree")) {
        // File is locally indexed as RTree
        return (RecordReader) new RTreeRecordReader3<V>();
    }
    // For backward compatibility, check if the file is RTree indexed from
    // its signature
    Configuration conf = context != null ? context.getConfiguration() : new Configuration();
    if (SpatialSite.isRTree(path.getFileSystem(conf), path)) {
        return (RecordReader) new RTreeRecordReader3<V>();
    }
    // Check if a custom record reader is configured with this extension
    Class<?> recordReaderClass = conf.getClass("SpatialInputFormat." + extension + ".recordreader",
            SpatioTemporalRecordReader.class);
    try {
        return (RecordReader<K, Iterable<V>>) recordReaderClass.newInstance();
    } catch (InstantiationException e) {
    } catch (IllegalAccessException e) {
    }
    // Use the default SpatioTemporalRecordReader if none of the above worked
    return (RecordReader) new SpatioTemporalRecordReader<V>();
}

From source file:edu.usc.pgroup.louvain.hadoop.GraphPartitionRecordReader.java

License:Apache License

@Override
public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
        throws IOException, InterruptedException {

    split = (FileSplit) inputSplit;//ww  w  . j a  v a  2s .  c  o m
    conf = taskAttemptContext.getConfiguration();

}

From source file:eu.dnetlib.iis.core.javamapreduce.hack.AvroMultipleOutputs.java

License:Apache License

/** Hacked: make public from private */
// by being synchronized MultipleOutputTask can be use with a
// MultithreadedMapper.
@SuppressWarnings("unchecked")
public synchronized RecordWriter getRecordWriter(TaskAttemptContext taskContext, String baseFileName)
        throws IOException, InterruptedException {

    // look for record-writer in the cache
    RecordWriter writer = recordWriters.get(baseFileName);

    // If not in cache, create a new one
    if (writer == null) {
        // get the record writer from context output format
        //FileOutputFormat.setOutputName(taskContext, baseFileName);
        taskContext.getConfiguration().set("avro.mo.config.namedOutput", baseFileName);
        try {//from  w w w . j  a v  a 2s. co  m
            writer = ((OutputFormat) ReflectionUtils.newInstance(taskContext.getOutputFormatClass(),
                    taskContext.getConfiguration())).getRecordWriter(taskContext);
        } catch (ClassNotFoundException e) {
            throw new IOException(e);
        }

        // if counters are enabled, wrap the writer with context 
        // to increment counters 
        if (countersEnabled) {
            writer = new RecordWriterWithCounter(writer, baseFileName, context);
        }

        // add the record-writer to the cache
        recordWriters.put(baseFileName, writer);
    }
    return writer;
}

From source file:eu.scape_project.arc2warc.mapreduce.WarcOutputFormat.java

License:Apache License

@Override
public RecordWriter<LongWritable, ArcRecord> getRecordWriter(TaskAttemptContext tac)
        throws IOException, InterruptedException {

    //get the current path
    Path path = FileOutputFormat.getOutputPath(tac);

    //create the full path with the output directory plus our filename
    String filename = "result" + System.currentTimeMillis() + ".warc";
    Path fullPath = new Path(path, filename);

    //create the file in the file system
    FileSystem fs = path.getFileSystem(tac.getConfiguration());

    FSDataOutputStream fileOut = fs.create(fullPath, true);

    //create our record writer with the new file
    return new WarcOutputFormat.WarcRecordWriter(filename, fileOut);
}

From source file:eu.scape_project.archiventory.hadoop.ArcRecordReader.java

License:Apache License

@Override
public void initialize(InputSplit is, TaskAttemptContext tac) throws IOException, InterruptedException {
    //throw new UnsupportedOperationException("Unused.");

    FileSplit fileSplit = (FileSplit) is;
    try {//  ww w  .jav a2  s. c om
        Path path = fileSplit.getPath();

        FileSystem fileSystem = path.getFileSystem(tac.getConfiguration());

        FSDataInputStream fileInputStream = fileSystem.open(path);
        FileStatus fileStatus = fileSystem.getFileStatus(path);
        fileLength = fileStatus.getLen();
        ArchiveReader reader = ArchiveReaderFactory.get(path.getName(), fileInputStream, true);
        recordIterator = reader.iterator();

        currentKey = new Text();
        currentArcRecord = new ArcRecord();
    } catch (IOException ex) {
        Logger.getLogger(ArcRecordReader.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:eu.scape_project.arcunpacker.mapreduce.ArcRecordReader.java

License:Apache License

@Override
public void initialize(InputSplit is, TaskAttemptContext tac) throws IOException, InterruptedException {
    //throw new UnsupportedOperationException("Unused.");

    FileSplit fileSplit = (FileSplit) is;
    try {//from  ww w .j a va 2  s.  c  o  m
        Path path = fileSplit.getPath();

        FileSystem fileSystem = path.getFileSystem(tac.getConfiguration());

        FSDataInputStream fileInputStream = fileSystem.open(path);
        FileStatus fileStatus = fileSystem.getFileStatus(path);
        long fileLength = fileStatus.getLen();

        archiveReaderDelegate = new HeritrixWrapper(path.getName(), fileInputStream, fileLength);
        key = new Text();
        value = new HadoopArcRecord();

    } catch (IOException ex) {
        Logger.getLogger(ArcRecordReader.class.getName()).log(Level.SEVERE, null, ex);
        throw new IOException(ex);
    }

}

From source file:eu.scape_project.pt.mets.hadoop.MetsInputFormat.java

License:Apache License

@Override
public RecordReader<Text, DTO> createRecordReader(InputSplit split, TaskAttemptContext context) {

    try {//  w  w w.j ava 2 s . c  om
        LOG.debug("split.length = " + split.getLength());
        LOG.debug("split.string = " + split.toString());
    } catch (IOException ex) {
        LOG.error(ex.getMessage());
    } catch (InterruptedException ex) {
        LOG.error(ex.getMessage());
    }
    String tag = context.getConfiguration().get(MetsInputFormat.TAG);
    return new MetsRecordReader(tag);
}