Example usage for org.apache.hadoop.mapreduce JobContext getJobName

List of usage examples for org.apache.hadoop.mapreduce JobContext getJobName

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce JobContext getJobName.

Prototype

public String getJobName();

Source Link

Document

Get the user-specified job name.

Usage

From source file:org.cloudgraph.hbase.mapreduce.GraphServiceDelegate.java

License:Apache License

@Override
public void commit(DataGraph graph, JobContext jobContext) throws IOException {

    String jobName = this.getClass().getSimpleName();
    if (jobContext != null)
        jobName = jobContext.getJobName();
    SnapshotMap snapshotMap = new SnapshotMap(new Timestamp((new Date()).getTime()));
    MutationCollector collector = null;/*from   w  w  w  . ja va 2s . c  om*/
    Connection connection = HBaseConnectionManager.instance().getConnection();
    try {
        collector = new GraphMutationCollector(this.context, snapshotMap, jobName);

        // FIXME: if an exception happens here we don't have table writers
        // to close
        // as required by the 1.0.0 HBase client API. Will cause resource
        // bleed
        // Map<TableWriter, List<Row>> mutations = new HashMap<TableWriter,
        // List<Row>>();
        Map<TableWriter, Map<String, Mutations>> mutations = new HashMap<>();
        try {
            mutations = collector.collectChanges(graph, connection);
        } catch (IllegalAccessException e) {
            throw new GraphServiceException(e);
        }

        TableWriter[] tableWriters = new TableWriter[mutations.keySet().size()];
        mutations.keySet().toArray(tableWriters);
        GraphMutationWriter writer = new GraphMutationWriter();
        try {
            writer.writeChanges(tableWriters, mutations, snapshotMap, jobName);
        } finally {
            for (TableWriter tableWriter : tableWriters)
                tableWriter.close();
        }
        List<DataObject> changedObjects = graph.getChangeSummary().getChangedDataObjects();
        for (DataObject dataObject : changedObjects)
            if (!graph.getChangeSummary().isDeleted(dataObject))
                ((PlasmaNode) dataObject).getDataObject().reset(snapshotMap, jobName);
        graph.getChangeSummary().endLogging();
        graph.getChangeSummary().beginLogging();
    } finally {
        try {
            connection.close();
        } catch (IOException e) {
            log.error(e.getMessage(), e);
        }
        if (collector != null)
            collector.close();
    }
}

From source file:org.cloudgraph.hbase.mapreduce.GraphServiceDelegate.java

License:Apache License

@Override
public void commit(DataGraph[] graphs, JobContext jobContext) throws IOException {
    String jobName = this.getClass().getSimpleName();
    if (jobContext != null)
        jobName = jobContext.getJobName();
    SnapshotMap snapshotMap = new SnapshotMap(new Timestamp((new Date()).getTime()));
    // Map<TableWriter, List<Row>> mutations = new HashMap<TableWriter,
    // List<Row>>();
    Map<TableWriter, Map<String, Mutations>> mutations = new HashMap<>();
    MutationCollector collector = null;//from w w  w  .j av  a  2  s . c o m
    Connection connection = HBaseConnectionManager.instance().getConnection();
    try {
        collector = new GraphMutationCollector(this.context, snapshotMap, jobName);
        try {
            mutations = collector.collectChanges(graphs, connection);
        } catch (IllegalAccessException e) {
            throw new GraphServiceException(e);
        }
        TableWriter[] tableWriters = new TableWriter[mutations.keySet().size()];
        mutations.keySet().toArray(tableWriters);

        GraphMutationWriter writer = new GraphMutationWriter();
        try {
            writer.writeChanges(tableWriters, mutations, snapshotMap, jobName);
        } finally {
            for (TableWriter tableWriter : tableWriters)
                tableWriter.close();
        }
        for (DataGraph graph : graphs) {
            List<DataObject> changedObjects = graph.getChangeSummary().getChangedDataObjects();
            for (DataObject dataObject : changedObjects)
                if (!graph.getChangeSummary().isDeleted(dataObject))
                    ((PlasmaNode) dataObject).getDataObject().reset(snapshotMap, jobName);
            graph.getChangeSummary().endLogging();
            graph.getChangeSummary().beginLogging();
        }
    } finally {
        try {
            connection.close();
        } catch (IOException e) {
            log.error(e.getMessage(), e);
        }
        if (collector != null)
            collector.close();
    }
}

From source file:org.cloudgraph.hbase.service.HBaseGraphService.java

License:Apache License

@Override
public void commit(DataGraph graph, JobContext context) throws IOException {
    String username = "graph-service";
    if (context != null)
        username = context.getJobName();
    this.commit(graph, username);
}

From source file:org.cloudgraph.hbase.service.HBaseGraphService.java

License:Apache License

@Override
public void commit(DataGraph[] graphs, JobContext context) throws IOException {
    String username = "graph-service";
    if (context != null)
        username = context.getJobName();
    this.commit(graphs, username);
}

From source file:org.opencloudengine.flamingo.mapreduce.core.AbstractJob.java

License:Apache License

/**
 * Mapper Reducer  Hadoop Job ?? .//  www .  j  a  v  a2 s.  c  om
 *
 * @param job     Hadoop Job
 * @param mapper  {@link org.apache.hadoop.mapreduce.Mapper}
 * @param reducer Reducer
 * @return Job Name
 */
private String getCustomJobName(JobContext job, Class<? extends Mapper> mapper,
        Class<? extends Reducer> reducer) {
    StringBuilder name = new StringBuilder(100);
    String customJobName = job.getJobName();
    if (customJobName == null || customJobName.trim().length() == 0) {
        name.append(getClass().getSimpleName());
    } else {
        name.append(customJobName);
    }
    return name.toString();
}

From source file:org.opencloudengine.flamingo.mapreduce.core.AbstractJob.java

License:Apache License

/**
 * Mapper  Hadoop Job ?? .//from   w w w .j a v  a2  s  . c o  m
 *
 * @param job    Hadoop Job
 * @param mapper {@link org.apache.hadoop.mapreduce.Mapper}
 * @return Job Name
 */
private String getCustomJobName(JobContext job, Class<? extends Mapper> mapper) {
    StringBuilder name = new StringBuilder(100);
    String customJobName = job.getJobName();
    if (customJobName == null || customJobName.trim().length() == 0) {
        name.append(getClass().getSimpleName());
    } else {
        name.append(customJobName);
    }
    return name.toString();
}