Example usage for org.apache.hadoop.mapred JobContextImpl getConfiguration

List of usage examples for org.apache.hadoop.mapred JobContextImpl getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobContextImpl getConfiguration.

Prototype

public Configuration getConfiguration() 

Source Link

Document

Return the configuration for the job.

Usage

From source file:org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2ReduceTask.java

License:Apache License

/** {@inheritDoc} */
@SuppressWarnings({ "ConstantConditions", "unchecked" })
@Override/* ww w.j  a  va  2s  .c  o  m*/
public void run0(HadoopV2TaskContext taskCtx) throws IgniteCheckedException {
    OutputFormat outputFormat = null;
    Exception err = null;

    JobContextImpl jobCtx = taskCtx.jobContext();

    // Set mapper index for combiner tasks
    if (!reduce && taskCtx.taskInfo().hasMapperIndex())
        HadoopMapperUtils.mapperIndex(taskCtx.taskInfo().mapperIndex());
    else
        HadoopMapperUtils.clearMapperIndex();

    try {
        outputFormat = reduce || !taskCtx.job().info().hasReducer() ? prepareWriter(jobCtx) : null;

        Reducer reducer;

        if (reduce)
            reducer = ReflectionUtils.newInstance(jobCtx.getReducerClass(), jobCtx.getConfiguration());
        else
            reducer = ReflectionUtils.newInstance(jobCtx.getCombinerClass(), jobCtx.getConfiguration());

        try {
            reducer.run(new WrappedReducer().getReducerContext(hadoopContext()));

            if (!reduce)
                taskCtx.onMapperFinished();
        } finally {
            closeWriter();
        }

        commit(outputFormat);
    } catch (InterruptedException e) {
        err = e;

        Thread.currentThread().interrupt();

        throw new IgniteInterruptedCheckedException(e);
    } catch (Exception e) {
        err = e;

        throw new IgniteCheckedException(e);
    } finally {
        if (!reduce)
            HadoopMapperUtils.clearMapperIndex();

        if (err != null)
            abort(outputFormat);
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.v2.GridHadoopV2ReduceTask.java

License:Apache License

/** {@inheritDoc} */
@SuppressWarnings({ "ConstantConditions", "unchecked" })
@Override/*from  w  ww. ja  v  a 2  s.co m*/
public void run0(GridHadoopV2TaskContext taskCtx) throws IgniteCheckedException {
    OutputFormat outputFormat = null;
    Exception err = null;

    JobContextImpl jobCtx = taskCtx.jobContext();

    try {
        outputFormat = reduce || !taskCtx.job().info().hasReducer() ? prepareWriter(jobCtx) : null;

        Reducer reducer = ReflectionUtils.newInstance(
                reduce ? jobCtx.getReducerClass() : jobCtx.getCombinerClass(), jobCtx.getConfiguration());

        try {
            reducer.run(new WrappedReducer().getReducerContext(hadoopContext()));
        } finally {
            closeWriter();
        }

        commit(outputFormat);
    } catch (InterruptedException e) {
        err = e;

        Thread.currentThread().interrupt();

        throw new IgniteInterruptedCheckedException(e);
    } catch (Exception e) {
        err = e;

        throw new IgniteCheckedException(e);
    } finally {
        if (err != null)
            abort(outputFormat);
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.v2.HadoopV2ReduceTask.java

License:Apache License

/** {@inheritDoc} */
@SuppressWarnings({ "ConstantConditions", "unchecked" })
@Override//from w  w w  .j a v a2  s .  c o m
public void run0(HadoopV2TaskContext taskCtx) throws IgniteCheckedException {
    OutputFormat outputFormat = null;
    Exception err = null;

    JobContextImpl jobCtx = taskCtx.jobContext();

    try {
        outputFormat = reduce || !taskCtx.job().info().hasReducer() ? prepareWriter(jobCtx) : null;

        Reducer reducer;
        if (reduce)
            reducer = ReflectionUtils.newInstance(jobCtx.getReducerClass(), jobCtx.getConfiguration());
        else
            reducer = ReflectionUtils.newInstance(jobCtx.getCombinerClass(), jobCtx.getConfiguration());

        try {
            reducer.run(new WrappedReducer().getReducerContext(hadoopContext()));
        } finally {
            closeWriter();
        }

        commit(outputFormat);
    } catch (InterruptedException e) {
        err = e;

        Thread.currentThread().interrupt();

        throw new IgniteInterruptedCheckedException(e);
    } catch (Exception e) {
        err = e;

        throw new IgniteCheckedException(e);
    } finally {
        if (err != null)
            abort(outputFormat);
    }
}

From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2ReduceTask.java

License:Open Source License

/** {@inheritDoc} */
@SuppressWarnings({ "ConstantConditions", "unchecked" })
@Override/*w w w.  j  av  a2 s.co m*/
public void run0(GridHadoopV2TaskContext taskCtx) throws GridException {
    OutputFormat outputFormat = null;
    Exception err = null;

    JobContextImpl jobCtx = taskCtx.jobContext();

    try {
        outputFormat = reduce || !taskCtx.job().info().hasReducer() ? prepareWriter(jobCtx) : null;

        Reducer reducer = ReflectionUtils.newInstance(
                reduce ? jobCtx.getReducerClass() : jobCtx.getCombinerClass(), jobCtx.getConfiguration());

        try {
            reducer.run(new WrappedReducer().getReducerContext(hadoopContext()));
        } finally {
            closeWriter();
        }

        commit(outputFormat);
    } catch (InterruptedException e) {
        err = e;

        Thread.currentThread().interrupt();

        throw new GridInterruptedException(e);
    } catch (Exception e) {
        err = e;

        throw new GridException(e);
    } finally {
        if (err != null)
            abort(outputFormat);
    }
}