Example usage for org.apache.hadoop.util StringUtils COMMA_STR

List of usage examples for org.apache.hadoop.util StringUtils COMMA_STR

Introduction

In this page you can find the example usage for org.apache.hadoop.util StringUtils COMMA_STR.

Prototype

String COMMA_STR

To view the source code for org.apache.hadoop.util StringUtils COMMA_STR.

Click Source Link

Usage

From source file:cascading.flow.hadoop.util.HadoopUtil.java

License:Open Source License

public static void addInputPath(Configuration conf, Path path) {
    Path workingDirectory = getWorkingDirectory(conf);
    path = new Path(workingDirectory, path);
    String dirStr = StringUtils.escapeString(path.toString());
    String dirs = conf.get("mapred.input.dir");
    conf.set("mapred.input.dir", dirs == null ? dirStr : dirs + StringUtils.COMMA_STR + dirStr);
}

From source file:com.bianfeng.bfas.hive.io.RealtimeInputFormat2.java

License:Apache License

/**
 * Set the array of {@link Path}s as the list of inputs
 * for the map-reduce job./*  w  ww .  j  av  a 2 s.  co m*/
 * 
 * @param conf Configuration of the job. 
 * @param inputPaths the {@link Path}s of the input directories/files 
 * for the map-reduce job.
 */
public static void setInputPaths(JobConf conf, Path... inputPaths) {
    Path path = new Path(conf.getWorkingDirectory(), inputPaths[0]);
    StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString()));
    for (int i = 1; i < inputPaths.length; i++) {
        str.append(StringUtils.COMMA_STR);
        path = new Path(conf.getWorkingDirectory(), inputPaths[i]);
        str.append(StringUtils.escapeString(path.toString()));
    }
    conf.set("mapred.input.dir", str.toString());
}

From source file:com.bianfeng.bfas.hive.io.RealtimeInputFormat2.java

License:Apache License

/**
 * Add a {@link Path} to the list of inputs for the map-reduce job.
 * /*from   ww w .j a v a2  s . co m*/
 * @param conf The configuration of the job 
 * @param path {@link Path} to be added to the list of inputs for 
 *            the map-reduce job.
 */
public static void addInputPath(JobConf conf, Path path) {
    path = new Path(conf.getWorkingDirectory(), path);
    String dirStr = StringUtils.escapeString(path.toString());
    String dirs = conf.get("mapred.input.dir");
    conf.set("mapred.input.dir", dirs == null ? dirStr : dirs + StringUtils.COMMA_STR + dirStr);
}

From source file:com.cloudera.recordservice.mr.RecordServiceConfig.java

License:Apache License

/**
 * Set the array of {@link Path}s as the list of inputs
 * for the map-reduce job.// ww w  .  ja v a  2 s . c  o  m
 */
public static void setInputPaths(Configuration conf, Path... inputPaths) throws IOException {
    Path path = inputPaths[0].getFileSystem(conf).makeQualified(inputPaths[0]);
    StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString()));
    for (int i = 1; i < inputPaths.length; ++i) {
        str.append(StringUtils.COMMA_STR);
        path = inputPaths[i].getFileSystem(conf).makeQualified(inputPaths[i]);
        str.append(StringUtils.escapeString(path.toString()));
    }
    conf.set("mapred.input.dir", str.toString());
}

From source file:com.dinglicom.clouder.mapreduce.input.FileInputFormat.java

License:Apache License

/**
 * Set the array of {@link Path}s as the list of inputs
 * for the map-reduce job.//ww w.j a va  2s.  c om
 * 
 * @param job The job to modify 
 * @param inputPaths the {@link Path}s of the input directories/files 
 * for the map-reduce job.
 */
public static void setInputPaths(Job job, Path... inputPaths) throws IOException {
    Configuration conf = job.getConfiguration();
    Path path = inputPaths[0].getFileSystem(conf).makeQualified(inputPaths[0]);
    StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString()));
    for (int i = 1; i < inputPaths.length; i++) {
        str.append(StringUtils.COMMA_STR);
        path = inputPaths[i].getFileSystem(conf).makeQualified(inputPaths[i]);
        str.append(StringUtils.escapeString(path.toString()));
    }
    conf.set(INPUT_DIR, str.toString());
}

From source file:com.ebay.nest.io.sede.ColumnProjectionUtils.java

License:Apache License

/**
 * Sets read columns' ids(start from zero) for RCFile's Reader. Once a column
 * is included in the list, RCFile's reader will not skip its value.
 * //  w w w  .  j a v a  2s. c om
 */
public static void appendReadColumnIDs(Configuration conf, List<Integer> ids) {
    String id = toReadColumnIDString(ids);
    if (id != null) {
        String old = conf.get(READ_COLUMN_IDS_CONF_STR, null);
        String newConfStr = id;
        if (old != null) {
            newConfStr = newConfStr + StringUtils.COMMA_STR + old;
        }

        setReadColumnIDConf(conf, newConfStr);
    }
}

From source file:com.ebay.nest.io.sede.ColumnProjectionUtils.java

License:Apache License

private static String toReadColumnIDString(List<Integer> ids) {
    String id = null;/*from   w w w .  j  a v  a 2 s.com*/
    if (ids != null) {
        for (int i = 0; i < ids.size(); i++) {
            if (i == 0) {
                id = "" + ids.get(i);
            } else {
                id = id + StringUtils.COMMA_STR + ids.get(i);
            }
        }
    }
    return id;
}

From source file:com.github.dryangkun.hbase.tidx.hive.HBaseSerDeHelper.java

License:Apache License

/**
 * Autogenerates the columns from the given serialization class
 * /* ww  w  .j  a va  2  s  . c  o  m*/
 * @param tbl the hive table properties
 * @param columnsMapping the hbase columns mapping determining hbase column families and
 *          qualifiers
 * @param sb StringBuilder to form the list of columns
 * @throws IllegalArgumentException if any of the given arguments was null
 * */
public static void generateColumns(Properties tbl, List<ColumnMapping> columnsMapping, StringBuilder sb) {
    // Generate the columns according to the column mapping provided
    // Note: The generated column names are same as the
    // family_name.qualifier_name. If the qualifier
    // name is null, each column is familyname_col[i] where i is the index of
    // the column ranging
    // from 0 to n-1 where n is the size of the column mapping. The filter
    // function removes any
    // special characters other than alphabets and numbers from the column
    // family and qualifier name
    // as the only special character allowed in a column name is "_" which is
    // used as a separator
    // between the column family and qualifier name.

    if (columnsMapping == null) {
        throw new IllegalArgumentException("columnsMapping cannot be null");
    }

    if (sb == null) {
        throw new IllegalArgumentException("StringBuilder cannot be null");
    }

    for (int i = 0; i < columnsMapping.size(); i++) {
        ColumnMapping colMap = columnsMapping.get(i);

        if (colMap.hbaseRowKey) {
            sb.append("key").append(StringUtils.COMMA_STR);
        } else if (colMap.qualifierName == null) {
            // this corresponds to a map<string,?>

            if (colMap.qualifierPrefix != null) {
                sb.append(filter(colMap.familyName)).append("_").append(filter(colMap.qualifierPrefix) + i)
                        .append(StringUtils.COMMA_STR);
            } else {
                sb.append(filter(colMap.familyName)).append("_").append("col" + i)
                        .append(StringUtils.COMMA_STR);
            }
        } else {
            // just an individual column
            sb.append(filter(colMap.familyName)).append("_").append(filter(colMap.qualifierName))
                    .append(StringUtils.COMMA_STR);
        }
    }

    // trim off the ending ",", if any
    trim(sb);

    if (LOG.isDebugEnabled()) {
        LOG.debug("Generated columns: [" + sb.toString() + "]");
    }
}

From source file:com.github.dryangkun.hbase.tidx.hive.HBaseSerDeHelper.java

License:Apache License

/**
 * Auto-generates the key struct for composite keys
 * /*w  w w  .j  av a 2  s  .  c  o  m*/
 * @param compositeKeyTypes comma separated list of composite key types in order
 * @param sb StringBuilder object to construct the struct
 * */
private static void generateKeyStruct(String compositeKeyTypes, StringBuilder sb) {
    sb.append("struct<");

    // composite key types is a comma separated list of different parts of the
    // composite keys in
    // order in which they appear in the key
    String[] keyTypes = compositeKeyTypes.split(",");

    for (int i = 0; i < keyTypes.length; i++) {
        sb.append("col" + i).append(":").append(keyTypes[i]).append(StringUtils.COMMA_STR);
    }

    // trim the trailing ","
    trim(sb);
    sb.append(">");
}

From source file:com.vertica.hadoop.FixedSplitFileInputFormat.java

License:Apache License

/**
 * Set the array of {@link Path}s as the list of inputs
 * for the map-reduce job./*from  ww w . ja v a2s .c  o m*/
 * 
 * @param conf Configuration of the job. 
 * @param inputPaths the {@link Path}s of the input directories/files 
 * for the map-reduce job.
 */
public static void setInputPaths(JobConf conf, Path... inputPaths) {
    Path path = new Path(conf.getWorkingDirectory(), inputPaths[0]);
    StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString()));
    for (int i = 1; i < inputPaths.length; i++) {
        str.append(StringUtils.COMMA_STR);
        path = new Path(conf.getWorkingDirectory(), inputPaths[i]);
        str.append(StringUtils.escapeString(path.toString()));
    }
    conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR, str.toString());
}