Example usage for org.apache.hadoop.io Text readString

List of usage examples for org.apache.hadoop.io Text readString

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text readString.

Prototype

public static String readString(DataInput in) throws IOException 

Source Link

Document

Read a UTF8 encoded string from in

Usage

From source file:org.apache.nutch.scoring.webgraph.LinkDatum.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    url = Text.readString(in);
    anchor = Text.readString(in);
    score = in.readFloat();/* w w w. j  a va  2 s.c  om*/
    timestamp = in.readLong();
    linkType = in.readByte();
}

From source file:org.apache.nutch.searcher.Hits.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    total = in.readLong(); // read total hits
    top = new Hit[in.readInt()]; // read hits returned
    Class sortClass = null;//from w  w w  .  j a  v  a 2  s.  c om
    if (top.length > 0) { // read sort value class
        try {
            sortClass = Class.forName(Text.readString(in));
        } catch (ClassNotFoundException e) {
            throw new IOException(e.toString());
        }
    }

    for (int i = 0; i < top.length; i++) {
        int indexDocNo = in.readInt(); // read indexDocNo

        WritableComparable sortValue = null;
        try {
            sortValue = (WritableComparable) sortClass.newInstance();
        } catch (Exception e) {
            throw new IOException(e.toString());
        }
        sortValue.readFields(in); // read sortValue
        String dedupValue = Text.readString(in); // read dedupValue

        top[i] = new Hit(indexDocNo, sortValue, dedupValue);
    }
}

From source file:org.apache.nutch.searcher.Summary.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    int nbFragments = in.readInt();
    Fragment fragment = null;//from  www . j ava 2 s.  c  om
    for (int i = 0; i < nbFragments; i++) {
        int type = in.readByte();
        if (type == HIGHLIGHT) {
            fragment = new Highlight(Text.readString(in));
        } else if (type == ELLIPSIS) {
            fragment = new Ellipsis();
        } else {
            fragment = new Fragment(Text.readString(in));
        }
        fragments.add(fragment);
    }
}

From source file:org.apache.nutch.util.hostdb.HostDatum.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    byte version = in.readByte();
    if (version > CUR_VERSION) // check version
        throw new VersionMismatchException(CUR_VERSION, version);

    score = in.readFloat();/*from w  w  w.jav a 2 s.  c  o  m*/
    lastCheck = new Date(in.readLong());
    homepageUrl = Text.readString(in);

    dnsFailures = in.readInt();
    connectionFailures = in.readInt();

    statCounts.put(CrawlDatum.STATUS_DB_UNFETCHED, in.readInt());
    statCounts.put(CrawlDatum.STATUS_DB_FETCHED, in.readInt());
    statCounts.put(CrawlDatum.STATUS_DB_NOTMODIFIED, in.readInt());
    statCounts.put(CrawlDatum.STATUS_DB_REDIR_PERM, in.readInt());
    statCounts.put(CrawlDatum.STATUS_DB_REDIR_TEMP, in.readInt());
    statCounts.put(CrawlDatum.STATUS_DB_GONE, in.readInt());

    metaData = new MapWritable();
    metaData.readFields(in);
}

From source file:org.apache.phoenix.hive.HivePhoenixInputSplit.java

License:Apache License

public void readFields(DataInput input) throws IOException {
    this.path = new Path(Text.readString(input));
    this.keyRange = new KeyRange();
    this.keyRange.readFields(input);
}

From source file:org.apache.pig.piggybank.squeal.backend.storm.state.CombineTupleWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    int card = WritableUtils.readVInt(in);
    values = new ArrayList<Writable>(card);
    Class<? extends Writable>[] cls = new Class[card];
    try {/*w  w w .  ja v  a  2 s .  co  m*/
        for (int i = 0; i < card; ++i) {
            cls[i] = Class.forName(Text.readString(in)).asSubclass(Writable.class);
        }
        for (int i = 0; i < card; ++i) {
            values.add(i, cls[i].newInstance());
            values.get(i).readFields(in);
        }
    } catch (ClassNotFoundException e) {
        throw (IOException) new IOException("Failed tuple init").initCause(e);
    } catch (IllegalAccessException e) {
        throw (IOException) new IOException("Failed tuple init").initCause(e);
    } catch (InstantiationException e) {
        throw (IOException) new IOException("Failed tuple init").initCause(e);
    }
}

From source file:org.apache.reef.io.checkpoint.fs.FSCheckpointID.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    this.path = new Path(Text.readString(in));
}

From source file:org.apache.sqoop.manager.oracle.OraOopDBInputSplit.java

License:Apache License

@SuppressWarnings("unchecked")
@Override//from   w  ww . j av  a 2  s  .  c  om
/** {@inheritDoc} */
public void readFields(DataInput input) throws IOException {

    this.splitId = input.readInt();

    int dataChunkCount = input.readInt();
    if (dataChunkCount == 0) {
        this.oracleDataChunks = null;
    } else {
        Class<? extends OraOopOracleDataChunk> dataChunkClass;
        OraOopOracleDataChunk dataChunk;
        this.oracleDataChunks = new ArrayList<OraOopOracleDataChunk>(dataChunkCount);
        for (int idx = 0; idx < dataChunkCount; idx++) {
            try {
                dataChunkClass = (Class<? extends OraOopOracleDataChunk>) Class.forName(Text.readString(input));
                dataChunk = dataChunkClass.newInstance();
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
            dataChunk.readFields(input);
            this.oracleDataChunks.add(dataChunk);
        }
    }
}

From source file:org.apache.sqoop.manager.oracle.OraOopOracleDataChunkExtent.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    this.setId(Text.readString(input));
    this.oracleDataObjectId = input.readInt();
    this.relativeDatafileNumber = input.readInt();
    this.startBlockNumber = input.readLong();
    this.finishBlockNumber = input.readLong();
}

From source file:org.apache.sqoop.manager.oracle.OraOopOracleDataChunkPartition.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    this.setId(Text.readString(input));
    this.isSubPartition = input.readBoolean();
    this.blocks = input.readLong();
}