Example usage for org.apache.hadoop.io Text readString

List of usage examples for org.apache.hadoop.io Text readString

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text readString.

Prototype

public static String readString(DataInput in) throws IOException 

Source Link

Document

Read a UTF8 encoded string from in

Usage

From source file:com.fullcontact.sstable.hadoop.mapreduce.SSTableSplit.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    file = new Path(Text.readString(in));
    length = in.readLong();/*from w  w w  .  j a  v a2  s . co  m*/
    start = in.readLong();
    end = in.readLong();
    hosts = null;
}

From source file:com.hortonworks.hbase.replication.bridge.ConnectionHeader.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    protocol = Text.readString(in);
    if (protocol.isEmpty()) {
        protocol = null;/*w w  w .  j a  v  a 2s  . c o  m*/
    }
}

From source file:com.iflytek.spider.parse.ParseText.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    byte version = in.readByte();
    switch (version) {
    case 1:/*from   w  ww.  j a  va 2  s  .  c o m*/
        text = WritableUtils.readCompressedString(in);
        break;
    case VERSION:
        text = Text.readString(in);
        break;
    default:
        throw new VersionMismatchException(VERSION, version);
    }
}

From source file:com.ikanow.aleph2.analytics.hadoop.assets.Aleph2MultiInputSplit.java

License:Apache License

@SuppressWarnings("unchecked")
@Override// ww  w  . j  ava 2 s .  co  m
public void readFields(DataInput in) throws IOException {
    _name = Text.readString(in);
    _input_split = (Class<? extends InputSplit>) readClass(in);
    _input_format = (Class<? extends InputFormat<?, ?>>) readClass(in);
    _mapper = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
    _delegate = (InputSplit) ReflectionUtils.newInstance(_input_split, _conf);
    final SerializationFactory factory = new SerializationFactory(_conf);
    final Deserializer deserializer = factory.getDeserializer(_input_split);
    deserializer.open((DataInputStream) in);
    _delegate = (InputSplit) deserializer.deserialize(_delegate);
}

From source file:com.ikanow.aleph2.analytics.hadoop.assets.Aleph2MultiInputSplit.java

License:Apache License

private Class<?> readClass(DataInput in) throws IOException {
    String className = Text.readString(in);
    try {/*from  ww  w .ja  va  2s  .c o  m*/
        return _conf.getClassByName(className);
    } catch (ClassNotFoundException e) {
        throw new RuntimeException("readObject can't find class", e);
    }
}

From source file:com.linkedin.cubert.io.MultiMapperSplit.java

License:Open Source License

@Override
public void readFields(DataInput in) throws IOException {
    multiMapperIndex = in.readInt();//from w  w w  .ja va  2  s .c  om

    // patch the conf to this multiMapperIndex
    ConfigurationDiff confDiff = new ConfigurationDiff(conf);
    confDiff.applyDiff(multiMapperIndex);

    boolean isFileSplit = in.readBoolean();
    if (isFileSplit) {
        Path file = new Path(Text.readString(in));
        long start = in.readLong();
        long length = in.readLong();

        actualSplit = new FileSplit(file, start, length, null);
    } else {
        String actualSplitClass = Text.readString(in);
        try {
            actualSplit = ClassCache.forName(actualSplitClass).asSubclass(InputSplit.class).newInstance();
            if (actualSplit instanceof Configurable)
                ((Configurable) actualSplit).setConf(conf);

            ((Writable) actualSplit).readFields(in);

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

}

From source file:com.marklogic.contentpump.CombineDocumentSplit.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    // splits/*from ww w .j  a  v  a2  s. com*/
    int splitSize = in.readInt();
    splits = new ArrayList<FileSplit>();
    for (int i = 0; i < splitSize; i++) {
        Path path = new Path(Text.readString(in));
        long start = in.readLong();
        long len = in.readLong();
        FileSplit split = new FileSplit(path, start, len, null);
        splits.add(split);
    }
    // length
    length = in.readLong();
    // locations
    locations = new HashSet<String>();
}

From source file:com.marklogic.contentpump.ContentWithFileNameWritable.java

License:Apache License

@SuppressWarnings("unchecked")
@Override/* w  w w.  j  a va  2 s. c  om*/
public void readFields(DataInput in) throws IOException {
    String fn = Text.readString(in);
    fileName = fn;
    byte valueType = in.readByte();
    switch (valueType) {
    case 0:
        value = (VALUE) new Text();
        ((Text) value).readFields(in);
        break;
    case 1:
        value = (VALUE) new MarkLogicNode();
        ((MarkLogicNode) value).readFields(in);
        break;
    case 2:
        value = (VALUE) new BytesWritable();
        ((BytesWritable) value).readFields(in);
        break;
    default:
        throw new IOException("incorrect type");
    }
    type = valueType;
}

From source file:com.marklogic.mapreduce.DocumentURI.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    uri = Text.readString(in);
}

From source file:com.marklogic.mapreduce.DocumentURIWithSourceInfo.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    uri = Text.readString(in);
    srcId = Text.readString(in);//  ww  w  .  j a va 2 s .c  o  m
    subId = Text.readString(in);
    lineNumber = in.readInt();
    colNumber = in.readInt();
    skipReason = Text.readString(in);
}