Example usage for org.apache.hadoop.io Text readString

List of usage examples for org.apache.hadoop.io Text readString

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text readString.

Prototype

public static String readString(DataInput in) throws IOException 

Source Link

Document

Read a UTF8 encoded string from in

Usage

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.input.TaggedInputSplit.java

License:Apache License

private Class<?> readClass(DataInput in) throws IOException {
    String className = StringInterner.weakIntern(Text.readString(in));
    try {/*from w ww.  j a  va 2 s.  c o m*/
        return conf.getClassByName(className);
    } catch (ClassNotFoundException e) {
        throw new RuntimeException("readObject can't find class", e);
    }
}

From source file:com.aerospike.hadoop.mapreduce.AerospikeSplit.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    type = new String(Text.readString(in));
    node = new String(Text.readString(in));
    host = new String(Text.readString(in));
    port = in.readInt();/*from ww w . ja v a2 s  .c o  m*/
    namespace = new String(Text.readString(in));
    setName = new String(Text.readString(in));
    int nBinNames = in.readInt();
    if (nBinNames == 0) {
        binNames = null;
    } else {
        binNames = new String[nBinNames];
        for (int ii = 0; ii < nBinNames; ++ii)
            binNames[ii] = new String(Text.readString(in));
    }
    numrangeBin = new String(Text.readString(in));
    numrangeBegin = in.readLong();
    numrangeEnd = in.readLong();
}

From source file:com.asakusafw.bridge.hadoop.directio.DirectFileInputSplit.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    containerPath = Text.readString(in);
    fragment = readFragment(in);//from   w w  w . ja v a 2 s . c  om
    batchArguments = readMap(in);
    definition = readDataDefinition(in, batchArguments, conf);
}

From source file:com.asakusafw.bridge.hadoop.directio.Util.java

License:Apache License

static DataDefinition<?> readDataDefinition(DataInput in, Map<String, String> batchArguments,
        Configuration conf) throws IOException {
    Class<?> data;//from   w  ww  . ja  v a2s  . co m
    DataFormat<?> format;
    DataFilter<?> filter;
    try {
        data = conf.getClassByName(Text.readString(in));
        format = (DataFormat<?>) ReflectionUtils.newInstance(conf.getClassByName(Text.readString(in)), conf);
        String filterClass = Text.readString(in);
        if (filterClass.isEmpty()) {
            filter = null;
        } else {
            filter = createFilter(conf.getClassByName(filterClass), batchArguments, conf);
        }
    } catch (ReflectiveOperationException e) {
        throw new IOException("error occurred while extracting data definition", e);
    }
    return SimpleDataDefinition.newInstance(data, format, filter);
}

From source file:com.asakusafw.bridge.hadoop.directio.Util.java

License:Apache License

static Map<String, String> readMap(DataInput in) throws IOException {
    int size = WritableUtils.readVInt(in);
    if (size == 0) {
        return Collections.emptyMap();
    }/*from ww  w. j  a va 2s .  c o m*/
    Map<String, String> result = new LinkedHashMap<>();
    for (int i = 0; i < size; i++) {
        String k = Text.readString(in);
        String v = Text.readString(in);
        result.put(k, v);
    }
    return result;
}

From source file:com.asakusafw.example.direct.seqfile.writable.ItemInfoWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    itemName = Text.readString(in);
    departmentCode = Text.readString(in);
    departmentName = Text.readString(in);
    categoryCode = Text.readString(in);
    categoryName = Text.readString(in);
    unitSellingPrice = in.readInt();//from  ww w.j ava2 s  .  c  o  m
    registeredDate = new Date(in.readLong());
    beginDate = new Date(in.readLong());
    endDate = new Date(in.readLong());
}

From source file:com.asakusafw.example.direct.seqfile.writable.SalesDetailWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    salesDateTime = new Date(in.readLong());
    storeCode = Text.readString(in);
    itemCode = Text.readString(in);
    amount = in.readInt();//from w ww . jav a 2 s.c  o m
    unitSellingPrice = in.readInt();
    sellingPrice = in.readInt();
}

From source file:com.asakusafw.lang.compiler.mapreduce.testing.mock.WritableModelInput.java

License:Apache License

private boolean readHeader() throws IOException {
    if (readSeparator() == false) {
        return false;
    }/*from  w w  w.  j  a v  a2  s. c o m*/
    className = Text.readString(input);
    return true;
}

From source file:com.bah.culvert.constraints.Join.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    ObjectWritable ow = new ObjectWritable();
    Configuration conf = new Configuration();
    ow.setConf(conf);//from w  w w .ja  va 2s  .  c  om

    // read in left table
    ow.readFields(in);
    this.leftTable = (TableAdapter) ow.get();

    // read in left constraint
    ow.readFields(in);
    this.left = (Constraint) ow.get();

    // read in left column
    this.leftColumn = new CColumn();
    this.leftColumn.readFields(in);

    // read in right table
    this.rightTable = Text.readString(in);

    ow.readFields(in);
    this.database = (DatabaseAdapter) ow.get();
}

From source file:com.baynote.hadoop.TaggedInputSplit.java

License:Apache License

private Class<?> readClass(DataInput in) throws IOException {
    String className = Text.readString(in);
    try {/* w  ww . j  a  v a 2 s.  c  o  m*/
        return conf.getClassByName(className);
    } catch (ClassNotFoundException e) {
        throw new RuntimeException("readObject can't find class", e);
    }
}