Example usage for org.apache.hadoop.mapreduce TaskInputOutputContext getConfiguration

List of usage examples for org.apache.hadoop.mapreduce TaskInputOutputContext getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskInputOutputContext getConfiguration.

Prototype

public Configuration getConfiguration();

Source Link

Document

Return the configuration for the job.

Usage

From source file:Payload.java

License:Open Source License

/** Shadow setup function
 *///from  w  w w. j  a v  a  2  s. co  m
private static void setup(TaskInputOutputContext task, String name) throws IOException {
    script = new Script();
    EggGlobal.script = script;
    EggIterator.script = script;
    EggContext.task = task;

    Scriptable global = script.newObject("EggGlobal", null);
    script.setGlobalScope(global);
    thisObj = script.newObject("EggContext", null);

    Object o = script.deserialize(task.getConfiguration(), name);
    if (o instanceof NativeObject) {
        NativeObject obj = (NativeObject) o;
        fmap = (Function) script.getProperty("map", obj);
        freduce = (Function) script.getProperty("reduce", obj);
        fsetup = (Function) script.getProperty("setup", obj);
        fcleanup = (Function) script.getProperty("cleanup", obj);
        if (fsetup != null)
            script.callFunction(fsetup, thisObj, null);
    } else if (o instanceof Function) {
        fmapred = (Function) o;
    }
}

From source file:be.ugent.intec.halvade.utils.HalvadeFileUtils.java

License:Open Source License

public static String downloadGFF(TaskInputOutputContext context, String id)
        throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = context.getConfiguration();
    String refDir = HalvadeConf.getRefDirOnScratch(conf);
    String gff = HalvadeConf.getGff(context.getConfiguration());
    if (gff == null)
        return null;
    String gffSuffix = null;//ww  w.  j  av a  2s .  c o  m
    int si = gff.lastIndexOf('.');
    if (si > 0)
        gffSuffix = gff.substring(si);
    else
        throw new InterruptedException("Illegal filename for gff file: " + gff);
    Logger.DEBUG("suffix: " + gffSuffix);
    if (!refDir.endsWith("/"))
        refDir = refDir + "/";
    HalvadeFileLock lock = new HalvadeFileLock(context, refDir, GFF_LOCK);
    String gffFile = null;
    try {
        lock.getLock();

        ByteBuffer bytes = ByteBuffer.allocate(4);
        if (lock.read(bytes) > 0) {
            bytes.flip();
            long val = bytes.getInt();
            if (val == DEFAULT_LOCK_VAL)
                Logger.DEBUG("gff has been downloaded to local scratch: " + val);
            else {
                Logger.INFO("downloading missing gff file to local scratch");
                FileSystem fs = FileSystem.get(new URI(gff), conf);
                gffFile = findFile(refDir, gffSuffix, false);
                if (gffFile == null)
                    gffFile = refDir + id;

                attemptDownloadFileFromHDFS(context, fs, gff, gffFile + gffSuffix, RETRIES);
                Logger.INFO("FINISHED downloading the complete reference index to local scratch");
                bytes.clear();
                bytes.putInt(DEFAULT_LOCK_VAL).flip();
                lock.forceWrite(bytes);
            }
        } else {
            Logger.INFO("downloading missing gff file to local scratch");
            Logger.DEBUG("gff file: " + gff);
            FileSystem fs = FileSystem.get(new URI(gff), conf);
            gffFile = findFile(refDir, gffSuffix, false);
            if (gffFile == null)
                gffFile = refDir + id;
            attemptDownloadFileFromHDFS(context, fs, gff, gffFile + gffSuffix, RETRIES);
            Logger.INFO("FINISHED downloading the complete reference index to local scratch");
            bytes.clear();
            bytes.putInt(DEFAULT_LOCK_VAL).flip();
            lock.forceWrite(bytes);
        }

    } catch (InterruptedException ex) {
        Logger.EXCEPTION(ex);
    } finally {
        lock.releaseLock();
    }
    if (gffFile == null)
        gffFile = findFile(refDir, gffSuffix, false);
    return gffFile + gffSuffix;

}

From source file:be.ugent.intec.halvade.utils.HalvadeFileUtils.java

License:Open Source License

protected static String downloadAlignerIndex(TaskInputOutputContext context, String id, String refName,
        String refSuffix, String[] refFiles) throws IOException, URISyntaxException {
    Configuration conf = context.getConfiguration();
    String refDir = HalvadeConf.getRefDirOnScratch(conf);
    if (!refDir.endsWith("/"))
        refDir = refDir + "/";
    HalvadeFileLock lock = new HalvadeFileLock(context, refDir, REF_LOCK);
    String refBase = null;/* w  w w. j a  v a 2  s  .c o  m*/
    try {
        lock.getLock();

        ByteBuffer bytes = ByteBuffer.allocate(4);
        if (lock.read(bytes) > 0) {
            bytes.flip();
            long val = bytes.getInt();
            if (val == REF_BOTH)
                Logger.DEBUG("reference has been downloaded to local scratch: " + val);
            else {
                Logger.INFO("downloading missing reference index files to local scratch");
                String HDFSRef = HalvadeConf.getRefOnHDFS(conf);
                FileSystem fs = FileSystem.get(new URI(HDFSRef), conf);
                refBase = findFile(refDir, refSuffix, false); // refSuffix = HALVADE_BWA_SUFFIX
                boolean foundExisting = (refBase != null);
                if (!foundExisting)
                    refBase = refDir + refName + id; // refName = bwa_ref-

                for (String suffix : refFiles) { //  refFiles = BWA_REF_FILES
                    attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix, RETRIES);
                }
                Logger.INFO("FINISHED downloading the complete reference index to local scratch");
                if (!foundExisting) {
                    File f = new File(refBase + refSuffix);
                    f.createNewFile();
                    f = new File(refBase + HALVADE_GATK_SUFFIX);
                    f.createNewFile();
                }
                bytes.clear();
                bytes.putInt(REF_BOTH).flip();
                lock.forceWrite(bytes);
            }
        } else {
            Logger.INFO("downloading missing reference index files to local scratch");
            String HDFSRef = HalvadeConf.getRefOnHDFS(conf);
            FileSystem fs = FileSystem.get(new URI(HDFSRef), conf);
            refBase = findFile(refDir, refSuffix, false);
            boolean foundExisting = (refBase != null);
            if (!foundExisting)
                refBase = refDir + refName + id;

            for (String suffix : refFiles) {
                attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix, RETRIES);
            }
            Logger.INFO("FINISHED downloading the complete reference index to local scratch");
            if (!foundExisting) {
                File f = new File(refBase + refSuffix);
                f.createNewFile();
                f = new File(refBase + HALVADE_GATK_SUFFIX);
                f.createNewFile();
            }
            bytes.clear();
            bytes.putInt(REF_BOTH).flip();
            lock.forceWrite(bytes);
        }

    } catch (InterruptedException ex) {
        Logger.EXCEPTION(ex);
    } finally {
        lock.releaseLock();
    }
    if (refBase == null)
        refBase = findFile(refDir, refSuffix, false);
    return refBase + refFiles[0];
}

From source file:be.ugent.intec.halvade.utils.HalvadeFileUtils.java

License:Open Source License

public static String downloadGATKIndex(TaskInputOutputContext context, String id)
        throws IOException, URISyntaxException {
    Configuration conf = context.getConfiguration();
    String tmpDir = HalvadeConf.getScratchTempDir(conf);
    String refDir = HalvadeConf.getRefDirOnScratch(conf);
    if (!refDir.endsWith("/"))
        refDir = refDir + "/";
    HalvadeFileLock lock = new HalvadeFileLock(context, refDir, REF_LOCK);
    String refBase = null;//from  w  w w  .ja  v  a  2s.  co  m
    try {
        lock.getLock();

        ByteBuffer bytes = ByteBuffer.allocate(4);
        if (lock.read(bytes) > 0) {
            bytes.flip();
            long val = bytes.getInt();
            if (val == REF_BOTH || val == DEFAULT_LOCK_VAL)
                Logger.DEBUG("reference has been downloaded to local scratch: " + val);
            else {
                Logger.INFO("downloading missing reference index files to local scratch");
                String HDFSRef = HalvadeConf.getRefOnHDFS(conf);
                FileSystem fs = FileSystem.get(new URI(HDFSRef), conf);
                refBase = findFile(refDir, HALVADE_GATK_SUFFIX, false);
                boolean foundExisting = (refBase != null);
                if (!foundExisting)
                    refBase = refDir + "bwa_ref-" + id;

                for (String suffix : GATK_REF_FILES) {
                    attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix, RETRIES);
                }
                Logger.INFO("FINISHED downloading the complete reference index to local scratch");
                if (!foundExisting) {
                    File f = new File(refBase + HALVADE_GATK_SUFFIX);
                    f.createNewFile();
                }
                bytes.clear();
                bytes.putInt(DEFAULT_LOCK_VAL).flip();
                lock.forceWrite(bytes);
            }
        } else {
            Logger.INFO("downloading missing reference index files to local scratch");
            String HDFSRef = HalvadeConf.getRefOnHDFS(conf);
            FileSystem fs = FileSystem.get(new URI(HDFSRef), conf);
            refBase = findFile(refDir, HALVADE_GATK_SUFFIX, false);
            boolean foundExisting = (refBase != null);
            if (!foundExisting)
                refBase = refDir + "bwa_ref-" + id;

            for (String suffix : GATK_REF_FILES) {
                attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix, RETRIES);
            }
            Logger.INFO("FINISHED downloading the complete reference index to local scratch");
            if (!foundExisting) {
                File f = new File(refBase + HALVADE_GATK_SUFFIX);
                f.createNewFile();
            }
            bytes.clear();
            bytes.putInt(DEFAULT_LOCK_VAL).flip();
            lock.forceWrite(bytes);
        }

    } catch (InterruptedException ex) {
        Logger.EXCEPTION(ex);
    } finally {
        lock.releaseLock();
    }
    if (refBase == null)
        refBase = findFile(refDir, HALVADE_GATK_SUFFIX, false);
    return refBase + GATK_REF_FILES[0];
}

From source file:be.ugent.intec.halvade.utils.HalvadeFileUtils.java

License:Open Source License

public static String downloadSTARIndex(TaskInputOutputContext context, String id, boolean usePass2Genome)
        throws IOException, URISyntaxException {
    Configuration conf = context.getConfiguration();
    String tmpDir = HalvadeConf.getScratchTempDir(conf);
    String refDir = HalvadeConf.getRefDirOnScratch(conf);
    if (!refDir.endsWith("/"))
        refDir = refDir + "/";
    HalvadeFileLock lock = new HalvadeFileLock(context, tmpDir, STARG_LOCK);
    String refBase = null;//from www .  j  av a2 s . c  om
    try {
        lock.getLock();

        ByteBuffer bytes = ByteBuffer.allocate(4);
        if (lock.read(bytes) > 0) {
            bytes.flip();
            long val = bytes.getInt();
            if (val == DEFAULT_LOCK_VAL)
                Logger.DEBUG("reference has been downloaded to local scratch: " + val);
            else {
                Logger.INFO("downloading missing reference index files to local scratch");
                if (usePass2Genome)
                    Logger.DEBUG("using Pass2 genome");
                String HDFSRef = usePass2Genome ? HalvadeConf.getStarDirPass2HDFS(conf)
                        : HalvadeConf.getStarDirOnHDFS(conf);
                Logger.DEBUG("downloading STAR genome from: " + HDFSRef);
                FileSystem fs = FileSystem.get(new URI(HDFSRef), conf);
                refBase = findFile(refDir, usePass2Genome ? HALVADE_STAR_SUFFIX_P2 : HALVADE_STAR_SUFFIX_P1,
                        true);
                boolean foundExisting = (refBase != null);
                if (!foundExisting) {
                    refBase = refDir + id + "-star/";
                    //make dir
                    File makeRefDir = new File(refBase);
                    makeRefDir.mkdir();
                }
                Logger.DEBUG("STAR dir: " + refBase);
                if (!usePass2Genome) {
                    for (String suffix : STAR_REF_FILES) {
                        attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix, RETRIES);
                    }
                    for (String suffix : STAR_REF_OPTIONAL_FILES) {
                        if (fs.exists(new Path(HDFSRef + suffix)))
                            attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix,
                                    RETRIES);
                    }
                }
                Logger.INFO("FINISHED downloading the complete reference index to local scratch");
                if (!foundExisting) {
                    File f = new File(
                            refBase + (usePass2Genome ? HALVADE_STAR_SUFFIX_P2 : HALVADE_STAR_SUFFIX_P1));
                    f.createNewFile();
                }
                bytes.clear();
                bytes.putInt(DEFAULT_LOCK_VAL).flip();
                lock.forceWrite(bytes);
            }
        } else {
            Logger.INFO("downloading missing reference index files to local scratch");
            if (usePass2Genome)
                Logger.DEBUG("using Pass2 genome");
            String HDFSRef = usePass2Genome ? HalvadeConf.getStarDirPass2HDFS(conf)
                    : HalvadeConf.getStarDirOnHDFS(conf);
            Logger.DEBUG("downloading STAR genome from: " + HDFSRef);
            FileSystem fs = FileSystem.get(new URI(HDFSRef), conf);
            refBase = findFile(refDir, usePass2Genome ? HALVADE_STAR_SUFFIX_P2 : HALVADE_STAR_SUFFIX_P1, true);
            boolean foundExisting = (refBase != null);
            if (!foundExisting) {
                refBase = refDir + id + "-star/";
                //make dir
                File makeRefDir = new File(refBase);
                makeRefDir.mkdir();
            }
            Logger.DEBUG("STAR dir: " + refBase);
            if (!usePass2Genome) {
                for (String suffix : STAR_REF_FILES) {
                    attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix, RETRIES);
                }
                for (String suffix : STAR_REF_OPTIONAL_FILES) {
                    if (fs.exists(new Path(HDFSRef + suffix)))
                        attemptDownloadFileFromHDFS(context, fs, HDFSRef + suffix, refBase + suffix, RETRIES);
                }
            }
            Logger.INFO("FINISHED downloading the complete reference index to local scratch");
            if (!foundExisting) {
                File f = new File(refBase + (usePass2Genome ? HALVADE_STAR_SUFFIX_P2 : HALVADE_STAR_SUFFIX_P1));
                f.createNewFile();
            }
            bytes.clear();
            bytes.putInt(DEFAULT_LOCK_VAL).flip();
            lock.forceWrite(bytes);
        }

    } catch (InterruptedException ex) {
        Logger.EXCEPTION(ex);
    } finally {
        lock.releaseLock();
    }
    if (refBase == null)
        refBase = findFile(refDir, usePass2Genome ? HALVADE_STAR_SUFFIX_P2 : HALVADE_STAR_SUFFIX_P1, true);
    return refBase;
}

From source file:be.ugent.intec.halvade.utils.HalvadeFileUtils.java

License:Open Source License

public static String[] downloadSites(TaskInputOutputContext context, String id)
        throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = context.getConfiguration();
    String tmpDir = HalvadeConf.getScratchTempDir(conf);
    String refDir = HalvadeConf.getRefDirOnScratch(conf);
    String HDFSsites[] = HalvadeConf.getKnownSitesOnHDFS(conf);
    String[] localSites = new String[HDFSsites.length];
    if (!refDir.endsWith("/"))
        refDir = refDir + "/";
    HalvadeFileLock lock = new HalvadeFileLock(context, refDir, DBSNP_LOCK);
    String refBase = null;/*from  w w  w .  j a  v a2 s  .  com*/
    try {
        lock.getLock();

        ByteBuffer bytes = ByteBuffer.allocate(4);
        if (lock.read(bytes) > 0) {
            bytes.flip();
            long val = bytes.getInt();
            if (val == DEFAULT_LOCK_VAL)
                Logger.DEBUG("dbSNP has been downloaded to local scratch: " + val);
            else {
                Logger.INFO("downloading missing dbSNP to local scratch");
                refBase = findFile(refDir, HALVADE_DBSNP_SUFFIX, true);
                boolean foundExisting = (refBase != null);
                if (!foundExisting) {
                    refBase = refDir + id + "-dbsnp/";
                    //make dir
                    File makeRefDir = new File(refBase);
                    makeRefDir.mkdir();
                }
                Logger.DEBUG("dbSNP dir: " + refBase);

                for (int i = 0; i < HDFSsites.length; i++) {
                    String fullName = HDFSsites[i];
                    String name = fullName.substring(fullName.lastIndexOf('/') + 1);
                    Logger.DEBUG("Downloading " + name);
                    FileSystem fs = FileSystem.get(new URI(fullName), conf);
                    attemptDownloadFileFromHDFS(context, fs, fullName, refBase + name, RETRIES);
                    localSites[i] = refBase + name;
                    // attempt to download .idx file
                    if (!foundExisting && fs.exists(new Path(fullName + ".idx")))
                        attemptDownloadFileFromHDFS(context, fs, fullName + ".idx", refBase + name + ".idx",
                                RETRIES);
                }

                Logger.INFO("finished downloading the new sites to local scratch");
                if (!foundExisting) {
                    File f = new File(refBase + HALVADE_DBSNP_SUFFIX);
                    f.createNewFile();
                }
                bytes.clear();
                bytes.putInt(DEFAULT_LOCK_VAL).flip();
                lock.forceWrite(bytes);
            }
        } else {
            Logger.INFO("downloading missing dbSNP to local scratch");
            refBase = findFile(refDir, HALVADE_DBSNP_SUFFIX, true);
            boolean foundExisting = (refBase != null);
            if (!foundExisting) {
                refBase = refDir + id + "-dbsnp/";
                //make dir
                File makeRefDir = new File(refBase);
                makeRefDir.mkdir();
            }
            Logger.DEBUG("dbSNP dir: " + refBase);

            for (int i = 0; i < HDFSsites.length; i++) {
                String fullName = HDFSsites[i];
                String name = fullName.substring(fullName.lastIndexOf('/') + 1);
                Logger.DEBUG("Downloading " + name);
                FileSystem fs = FileSystem.get(new URI(fullName), conf);
                attemptDownloadFileFromHDFS(context, fs, fullName, refBase + name, RETRIES);
                localSites[i] = refBase + name;
                // attempt to download .idx file
                if (!foundExisting && fs.exists(new Path(fullName + ".idx")))
                    attemptDownloadFileFromHDFS(context, fs, fullName + ".idx", refBase + name + ".idx",
                            RETRIES);
            }

            Logger.INFO("finished downloading the new sites to local scratch");
            if (!foundExisting) {
                File f = new File(refBase + HALVADE_DBSNP_SUFFIX);
                f.createNewFile();
            }
            bytes.clear();
            bytes.putInt(DEFAULT_LOCK_VAL).flip();
            lock.forceWrite(bytes);
        }
    } catch (InterruptedException ex) {
        Logger.EXCEPTION(ex);
    } finally {
        lock.releaseLock();
    }
    if (refBase == null) {
        refBase = findFile(refDir, HALVADE_DBSNP_SUFFIX, true);
        File dir = new File(refBase);
        File[] directoryListing = dir.listFiles();
        if (directoryListing != null) {
            int found = 0;
            for (int i = 0; i < HDFSsites.length; i++) {
                String fullName = HDFSsites[i];
                String name = fullName.substring(fullName.lastIndexOf('/') + 1);
                localSites[i] = refBase + name;
                if ((new File(localSites[i])).exists())
                    found++;
                else
                    Logger.DEBUG(name + " not found in local scratch");
            }
            if (found != HDFSsites.length) {
                throw new IOException(
                        refBase + " has different number of files: " + found + " vs " + localSites.length);
            }
        } else {
            throw new IOException(refBase + " has no files");
        }
    }
    return localSites;
}

From source file:com.asakusafw.runtime.stage.output.StageOutputDriver.java

License:Apache License

private static Map<String, ResultOutput<?>> prepareSinks(TaskInputOutputContext<?, ?, ?, ?> context) {
    assert context != null;
    Map<String, ResultOutput<?>> results = new HashMap<>();
    Configuration conf = context.getConfiguration();
    for (String name : conf.getStringCollection(K_NAMES)) {
        results.put(name, null);//from w  ww.j a  v a  2s.com
    }
    return results;
}

From source file:com.cloudera.oryx.als.computation.iterate.row.YState.java

License:Open Source License

synchronized void initialize(TaskInputOutputContext<?, ?, ?, ?> context, int currentPartition,
        int numPartitions) {

    Configuration conf = context.getConfiguration();

    LongSet expectedIDs;/*from   www .  j av a2s.c  om*/
    try {
        expectedIDs = ComputationDataUtils.readExpectedIDsFromPartition(currentPartition, numPartitions,
                conf.get(RowStep.POPULAR_KEY), context, conf);
    } catch (IOException e) {
        throw new IllegalStateException(e);
    }

    String yKey = conf.get(RowStep.Y_KEY_KEY);
    log.info("Reading X or Y from {}", yKey);

    Y = new LongObjectMap<float[]>();

    Iterable<MatrixRow> in;
    try {
        in = new AvroFileSource<MatrixRow>(Namespaces.toPath(yKey), (AvroType<MatrixRow>) ptype).read(conf);
    } catch (IOException e) {
        throw new IllegalStateException(e);
    }

    RealMatrix theYTY = null;
    int dimension = 0;
    long count = 0;
    for (MatrixRow record : in) {
        long keyID = record.getRowId();
        float[] vector = record.getValues();
        Preconditions.checkNotNull(vector, "Vector was null for %s?", keyID);

        if (theYTY == null) {
            dimension = vector.length;
            theYTY = new Array2DRowRealMatrix(dimension, dimension);
        }
        for (int row = 0; row < dimension; row++) {
            double rowValue = vector[row];
            for (int col = 0; col < dimension; col++) {
                theYTY.addToEntry(row, col, rowValue * vector[col]);
            }
        }

        if (expectedIDs == null || expectedIDs.contains(keyID)) {
            Y.put(keyID, vector);
        }

        if (++count % 1000 == 0) {
            context.progress();
        }
    }

    Preconditions.checkNotNull(theYTY);
    YTY = theYTY;
}

From source file:com.moz.fiji.mapreduce.impl.DirectFijiTableWriterContext.java

License:Apache License

/**
 * Constructs a new context that can write cells directly to a Fiji table.
 *
 * @param hadoopContext is the Hadoop {@link TaskInputOutputContext} that will be used to perform
 *     the writes./*  ww  w .java 2 s  . c  o m*/
 * @throws IOException on I/O error.
 */
public DirectFijiTableWriterContext(TaskInputOutputContext<?, ?, ?, ?> hadoopContext) throws IOException {
    super(hadoopContext);
    final Configuration conf = new Configuration(hadoopContext.getConfiguration());
    final FijiURI outputURI = FijiURI.newBuilder(conf.get(FijiConfKeys.FIJI_OUTPUT_TABLE_URI)).build();
    mFiji = Fiji.Factory.open(outputURI, conf);
    mTable = mFiji.openTable(outputURI.getTable());
    mWriter = mTable.getWriterFactory().openBufferedWriter();
    mEntityIdFactory = EntityIdFactory.getFactory(mTable.getLayout());
}

From source file:com.moz.fiji.mapreduce.impl.FijiTableContextFactory.java

License:Apache License

/**
 * Instantiates the configured FijiTableContext.
 *
 * @param taskContext Hadoop task context.
 * @return the configured FijiTableContext.
 * @throws IOException on I/O error.//from   ww w. ja  va  2 s  .co m
 */
public static FijiTableContext create(TaskInputOutputContext taskContext) throws IOException {
    final Configuration conf = taskContext.getConfiguration();
    final String className = conf.get(FijiConfKeys.FIJI_TABLE_CONTEXT_CLASS);
    if (className == null) {
        throw new IOException(String.format("FijiTableContext class missing from configuration (key '%s').",
                FijiConfKeys.FIJI_TABLE_CONTEXT_CLASS));
    }

    Throwable throwable = null;
    try {
        final Class<?> genericClass = Class.forName(className);
        final Class<? extends FijiTableContext> klass = genericClass.asSubclass(FijiTableContext.class);
        final Constructor<? extends FijiTableContext> constructor = klass
                .getConstructor(TaskInputOutputContext.class);
        final FijiTableContext context = constructor.newInstance(taskContext);
        return context;
    } catch (ClassCastException cce) {
        throwable = cce;
    } catch (ClassNotFoundException cnfe) {
        throwable = cnfe;
    } catch (NoSuchMethodException nsme) {
        throwable = nsme;
    } catch (InvocationTargetException ite) {
        throwable = ite;
    } catch (IllegalAccessException iae) {
        throwable = iae;
    } catch (InstantiationException ie) {
        throwable = ie;
    }
    throw new IOException(String.format("Error instantiating FijiTableWriter class '%s': %s.", className,
            throwable.getMessage()), throwable);
}