Example usage for org.apache.hadoop.mapred JobConf get

List of usage examples for org.apache.hadoop.mapred JobConf get

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf get.

Prototype

public String get(String name) 

Source Link

Document

Get the value of the name property, null if no such property exists.

Usage

From source file:com.ibm.jaql.io.hadoop.Db2InputFormat.java

License:Apache License

protected void init(JobConf conf) throws IOException, SQLException {
    String url = conf.get(URL_KEY);
    String jsonRec = conf.get(PROPERTIES_KEY);
    Class<? extends Driver> driverClass = conf.getClass(DRIVER_KEY, null, Driver.class);
    if (driverClass == null) {
        throw new RuntimeException("jdbc driver class not found: " + conf.get(DRIVER_KEY));
    }/*from  ww w .  j a  v a  2  s  .  c o  m*/

    Driver driver;
    try {
        driver = driverClass.newInstance();
    } catch (Exception e) {
        throw new UndeclaredThrowableException(e);// IOException("Error constructing jdbc driver", e);
    }

    Properties props = new Properties();

    if (jsonRec != null && !"".equals(jsonRec)) {
        try {
            JsonParser parser = new JsonParser(new StringReader(jsonRec));
            JsonRecord jrec = (JsonRecord) parser.JsonVal();
            for (Entry<JsonString, JsonValue> f : jrec) {
                JsonString key = f.getKey();
                JsonValue value = f.getValue();
                props.setProperty(key.toString(), value == null ? null : value.toString());
            }
        } catch (ParseException pe) {
            throw new UndeclaredThrowableException(pe); // IOException("couldn't parse "+PROPERTIES_KEY+" = "+jsonRec, pe);
        }
    }

    // conn = DriverManager.getConnection(url, props);
    conn = driver.connect(url, props);
}

From source file:com.ibm.jaql.io.hadoop.Db2InputFormat.java

License:Apache License

public InputSplit[] getSplits(JobConf conf, int numSplits) throws IOException {
    try {/*  ww w  . j  a va 2s .  c o m*/
        String dataQuery = conf.get(QUERY_KEY);
        String splitQuery = conf.get(SPLIT_QUERY_KEY);
        // String col        = conf.get(SPLIT_COLUMN_KEY);
        // String rate       = conf.get(SPLIT_SAMPLING_RATE_KEY, "0.0001");

        init(conf);

        //      String sample = "";
        //      if( ! BigDecimal.ONE.equals(new BigDecimal(rate)) )
        //      {
        //        sample = " tablesample system(100*decimal('"+rate+"'))";
        //      }

        ArrayList<InputSplit> splits = new ArrayList<InputSplit>();
        if (numSplits <= 1) {
            // we only want one split
            splits.add(new JdbcSplit(dataQuery, null, null));
            return splits.toArray(new InputSplit[splits.size()]);
        }

        // Make sure that the data query is executable and get the key column type.
        ResultSetMetaData meta = conn.prepareStatement(dataQuery).getMetaData();
        int dataColCount = meta.getColumnCount();
        // int keyType = meta.getColumnType(1);
        // KeyConverter converter = makeKeyConverter(keyType);

        String query = "with T";
        String sep = "(";
        for (int i = 1; i <= dataColCount; i++) {
            query += sep + meta.getColumnName(i);
            sep = ",";
        }
        query += ") as (" + dataQuery + ") " + " select * from T ";
        String keycol = meta.getColumnName(1);
        dataQuery = query;

        query = "with S1(c) as (" + splitQuery + "), " + "     S2(c) as (select distinct c from S1), "
                + "     S3(c,r) as (select c, dense_rank() over (order by c) from S2), "
                + "     S4(c,r,i) as (select c, r, mod(r,(select count(*) from S2) / " + numSplits
                + ") from S3), " + "     S5(c) as (select c from S4 where i = 0) "
                + "select c from S5 order by c fetch first " + (numSplits - 1) + " rows only";

        Statement stmt = conn.createStatement();
        final ResultSet rs = stmt.executeQuery(query);

        if (!rs.next()) {
            splits.add(new JdbcSplit(dataQuery, null, null));
        } else {
            String prevKey = rs.getString(1);
            // prevKey = converter.convert(prevKey);
            // query = dataQuery + " where "+keycol+" <= "+prevKey;
            query = dataQuery + " where " + keycol + " <= ?";
            splits.add(new JdbcSplit(query, null, prevKey));

            while (rs.next()) {
                String key = rs.getString(1);
                // key = converter.convert(key);
                // query = dataQuery + " where "+keycol+" > "+prevKey+" and "+keycol+" <= "+key;
                query = dataQuery + " where " + keycol + " > ? and " + keycol + " <= ?";
                splits.add(new JdbcSplit(query, prevKey, key));
                prevKey = key;
            }

            // query = dataQuery + " where "+keycol+" > "+prevKey;
            query = dataQuery + " where " + keycol + " > ?";
            splits.add(new JdbcSplit(query, prevKey, null));
        }

        rs.close();
        stmt.close();
        conn.close();

        return splits.toArray(new InputSplit[splits.size()]);
    } catch (SQLException e) {
        throw new UndeclaredThrowableException(e); // IOException(e);
    }
}

From source file:com.ibm.jaql.io.hbase.JaqlTableInputFormat.java

License:Apache License

public void configure(JobConf job) {
    // table name
    tableName = job.get(JOB_TABLE).getBytes();

    // column names
    String colArg = job.get(JOB_COLUMNS);
    String[] splitArr = colArg.split(" ");
    columnNames = new JsonString[splitArr.length];
    for (int i = 0; i < columnNames.length; i++) {
        columnNames[i] = new JsonString(splitArr[i]);
    }/*from   w w w  .ja v a  2 s . com*/

    // option arguments

    // low key
    String lowKeyArg = job.get(JOB_LOWKEY);
    if (lowKeyArg != null)
        lowKey = new JsonString(lowKeyArg);

    // high key
    String highKeyArg = job.get(JOB_HIGHKEY);
    if (highKeyArg != null)
        highKey = new JsonString(highKeyArg);

    // timestamp
    String timestampArg = job.get(JOB_TS);
    if (timestampArg != null)
        timeStamp = Long.parseLong(timestampArg);

    // setup the table interface
    try {
        table = new HTable(new HBaseConfiguration(), tableName);
    } catch (Exception e) {
        LOG.error(e);
    }
}

From source file:com.ibm.jaql.io.hbase.JaqlTableInputFormat.java

License:Apache License

public void validateInput(JobConf job) throws IOException {
    // expecting one table name

    String tableArg = job.get(JOB_TABLE);
    if (tableArg == null || tableArg.length() == 0) {
        throw new IOException("expecting a table name");
    }//from   ww  w  .  j a va  2  s . c o m

    // expecting at least one column

    String colArg = job.get(JOB_COLUMNS);
    if (colArg == null || colArg.length() == 0) {
        throw new IOException("expecting at least one column");
    }
}

From source file:com.ibm.jaql.io.hbase.JaqlTableOutputFormat.java

License:Apache License

@Override
public RecordWriter<JsonHolder, JsonHolder> getRecordWriter(FileSystem ignored, JobConf job, String name,
        Progressable progress) throws IOException {

    // expecting exactly one path

    String tableName = job.get(TableOutputFormat.OUTPUT_TABLE);

    HTable table = null;/*  ww  w.  ja  v  a 2s. co m*/
    try {
        table = new HTable(new HBaseConfiguration(), tableName);
    } catch (Exception e) {
        LOG.error(e);
    }
    return new JaqlTableRecordWriter(table);
}

From source file:com.ibm.jaql.io.hbase.JaqlTableOutputFormat.java

License:Apache License

@Override
public void checkOutputSpecs(FileSystem ignored, JobConf job)
        throws FileAlreadyExistsException, InvalidJobConfException, IOException {

    String tableName = job.get(TableOutputFormat.OUTPUT_TABLE);
    if (tableName == null) {
        throw new IOException("Must specify table name");
    }//w ww  .  j av  a  2 s.c o m
}

From source file:com.ibm.jaql.lang.expr.core.RegisterExceptionHandler.java

License:Apache License

public static void readConf(String name, JobConf conf) throws Exception {
    String s = conf.get(name);
    JsonParser p = new JsonParser();
    JsonRecord opts = (JsonRecord) p.parse(s);
    JsonNumber tval = (JsonNumber) opts.get(ERROR_THRESH_FIELD_NAME);
    if (tval != null) {
        ExceptionHandler handler = new ThresholdExceptionHandler(tval.intValue());
        JaqlUtil.setExceptionHandler(handler);
    }/* w w w .  ja  v a  2s  .  co  m*/
}

From source file:com.ibm.jaql.lang.expr.hadoop.ReadConfExpr.java

License:Apache License

@Override
public JsonValue eval(Context context) throws Exception {
    JobConf conf = Globals.getJobConf();
    if (conf == null) {
        conf = new JobConf();
        Globals.setJobConf(conf);/*from w  ww . j ava  2  s .c  om*/
    }

    JsonString name = (JsonString) exprs[0].eval(context);
    String val = conf.get(name.toString());
    JsonValue dflt = null;
    if (val == null) {
        dflt = exprs[1].eval(context);
        return dflt;
    }

    return new JsonString(val);
}

From source file:com.inmobi.conduit.local.LocalStreamServiceTest.java

License:Apache License

private ConduitConfig buildTestConduitConfig() throws Exception {
    JobConf conf = super.CreateJobConf();
    return buildTestConduitConfig(conf.get("mapred.job.tracker"), "file:///tmp", "conduit", "48", "24");
}

From source file:com.inmobi.databus.local.LocalStreamServiceTest.java

License:Apache License

private DatabusConfig buildTestDatabusConfig() throws Exception {
    JobConf conf = super.CreateJobConf();
    return buildTestDatabusConfig(conf.get("mapred.job.tracker"), "file:///tmp", "databus", "48", "24");
}