Example usage for org.apache.hadoop.conf Configuration getStringCollection

List of usage examples for org.apache.hadoop.conf Configuration getStringCollection

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getStringCollection.

Prototype

public Collection<String> getStringCollection(String name) 

Source Link

Document

Get the comma delimited values of the name property as a collection of Strings.

Usage

From source file:andromache.config.CassandraConfigHelper.java

License:Apache License

public static Collection<String> getInputColumnFamilies(Configuration conf) {
    return conf.getStringCollection(INPUT_COLUMNFAMILIES_CONFIG);
}

From source file:cascading.scheme.DeprecatedAvroScheme.java

License:Apache License

private void addAvroSerializations(Configuration conf) {
    Collection<String> serializations = conf.getStringCollection("io.serializations");
    if (!serializations.contains(AvroSerialization.class.getName())) {
        serializations.add(AvroSerialization.class.getName());
        serializations.add(AvroSpecificRecordSerialization.class.getName());
    }//from ww  w  .  j a va2s  . c  om

    conf.setStrings("io.serializations", serializations.toArray(new String[serializations.size()]));
}

From source file:cascading.tap.hadoop.DistCacheTap.java

License:Open Source License

@Override
protected void addLocalCacheFiles(Configuration conf, URI uri) {
    String key = CASCADING_LOCAL_RESOURCES + Tap.id(this);
    Collection<String> resources = conf.getStringCollection(key);

    if (resources == null)
        resources = new ArrayList<>();

    resources.add(uri.toString());/*from  ww  w.  j a v a  2 s . co  m*/

    conf.setStrings(key, resources.toArray(new String[resources.size()]));
}

From source file:com.asakusafw.runtime.stage.output.StageOutputDriver.java

License:Apache License

private static Map<String, ResultOutput<?>> prepareSinks(TaskInputOutputContext<?, ?, ?, ?> context) {
    assert context != null;
    Map<String, ResultOutput<?>> results = new HashMap<>();
    Configuration conf = context.getConfiguration();
    for (String name : conf.getStringCollection(K_NAMES)) {
        results.put(name, null);//from w ww  . j av a 2s . com
    }
    return results;
}

From source file:com.asakusafw.runtime.stage.output.StageOutputDriver.java

License:Apache License

private static void addOutput(Job job, String name, Class<?> formatClass, Class<?> keyClass,
        Class<?> valueClass) {
    assert job != null;
    assert name != null;
    assert formatClass != null;
    assert keyClass != null;
    assert valueClass != null;
    if (isValidName(name) == false) {
        throw new IllegalArgumentException(MessageFormat.format("Output name \"{0}\" is not valid", name));
    }/*ww w.  j  a  v a2s .  c o  m*/
    Configuration conf = job.getConfiguration();
    Set<String> names = new TreeSet<>(conf.getStringCollection(K_NAMES));
    if (names.contains(name)) {
        throw new IllegalArgumentException(
                MessageFormat.format("Output name \"{0}\" is already declared", name));
    }
    names.add(name);
    conf.setStrings(K_NAMES, names.toArray(new String[names.size()]));
    conf.setClass(getPropertyName(K_FORMAT_PREFIX, name), formatClass, OutputFormat.class);
    conf.setClass(getPropertyName(K_KEY_PREFIX, name), keyClass, Object.class);
    conf.setClass(getPropertyName(K_VALUE_PREFIX, name), valueClass, Object.class);
}

From source file:com.ci.backports.avro.mapreduce.AvroJob.java

License:Apache License

private static void addAvroSerialization(Configuration conf) {
    Collection<String> serializations = conf.getStringCollection("io.serializations");
    if (!serializations.contains(AvroSerialization.class.getName())) {
        serializations.add(AvroSerialization.class.getName());
        conf.setStrings("io.serializations", serializations.toArray(new String[0]));
    }//  www.j av a 2s  .  c o m
}

From source file:com.datasalt.pangool.tuplemr.serialization.TupleSerialization.java

License:Apache License

/**
 * Use this method to enable this serialization in Hadoop
 *//*w  w  w  . j  ava 2  s. c o m*/
public static void enableSerialization(Configuration conf) {
    String serClass = TupleSerialization.class.getName();
    Collection<String> currentSers = conf.getStringCollection("io.serializations");

    if (currentSers.size() == 0) {
        conf.set("io.serializations", serClass);
        return;
    }

    // Check if it is already present
    if (!currentSers.contains(serClass)) {
        currentSers.add(serClass);
        conf.setStrings("io.serializations", currentSers.toArray(new String[] {}));
    }
}

From source file:com.datasalt.pangool.utils.AvroUtils.java

License:Apache License

public static void addAvroSerialization(Configuration conf) {
    Collection<String> serializations = conf.getStringCollection("io.serializations");
    if (!serializations.contains(AvroSerialization.class.getName())) {
        serializations.add(AvroSerialization.class.getName());
        conf.setStrings("io.serializations", serializations.toArray(new String[0]));
    }//from  www.  j a  va  2 s .c om
}

From source file:com.facebook.hiveio.mapreduce.output.WritingTool.java

License:Apache License

/**
 * add string to collection//from  ww  w  .j  a va 2 s .  c o  m
 *
 * @param conf   Configuration
 * @param name   to add
 * @param values values for collection
 */
private static void addToStringCollection(Configuration conf, String name,
        Collection<? extends String> values) {
    Collection<String> tmpfiles = conf.getStringCollection(name);
    tmpfiles.addAll(values);
    conf.setStrings(name, tmpfiles.toArray(new String[tmpfiles.size()]));
}

From source file:com.marklogic.mapreduce.NodeWriter.java

License:Apache License

public NodeWriter(Configuration conf, String host) {
    super(conf, host);
    String opTypeStr = conf.get(NODE_OPERATION_TYPE);
    if (opTypeStr == null || opTypeStr.isEmpty()) {
        throw new IllegalArgumentException(NODE_OPERATION_TYPE + " is not specified.");
    }//from   w  ww .j av  a  2s.  co m
    NodeOpType opType = NodeOpType.valueOf(opTypeStr);
    Collection<String> nsCol = conf.getStringCollection(OUTPUT_NAMESPACE);
    StringBuilder buf = new StringBuilder();
    if (nsCol != null) {
        for (Iterator<String> nsIt = nsCol.iterator(); nsIt.hasNext();) {
            String ns = nsIt.next();
            buf.append('"').append(ns).append('"');
            if (nsIt.hasNext()) {
                buf.append(',');
            }
        }
    }
    query = opType.getQuery(buf.toString());
}