Example usage for org.apache.hadoop.io WritableUtils readCompressedStringArray

List of usage examples for org.apache.hadoop.io WritableUtils readCompressedStringArray

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils readCompressedStringArray.

Prototype

public static String[] readCompressedStringArray(DataInput in) throws IOException 

Source Link

Usage

From source file:com.chinamobile.bcbsp.action.Directive.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    this.faultSSStep = in.readInt();
    this.timestamp = in.readLong();
    int t = in.readInt();
    if (Directive.Type.Request.value() == t) {
        this.type = Directive.Type.Request;
        int length = WritableUtils.readVInt(in);
        if (length > 0) {
            this.actionList = new ArrayList<WorkerManagerAction>();
            for (int i = 0; i < length; ++i) {
                WorkerManagerAction.ActionType actionType = WritableUtils.readEnum(in,
                        WorkerManagerAction.ActionType.class);
                WorkerManagerAction action = WorkerManagerAction.createAction(actionType);
                action.readFields(in);//  w  ww .j  av a 2 s .  c  o m
                this.actionList.add(action);
            }
        } else {
            this.actionList = null;
        }

        this.workerManagersName = WritableUtils.readCompressedStringArray(in);
    } else if (Directive.Type.Response.value() == t) {
        this.type = Directive.Type.Response;
        this.status = new WorkerManagerStatus();
        this.status.readFields(in);
    } else {
        throw new IllegalStateException("Wrong directive type:" + t);
    }

    /* Zhicheng Liu added */
    this.migrateSSStep = in.readInt();

}

From source file:com.chinamobile.bcbsp.bspcontroller.ClusterStatus.java

License:Apache License

/**
 * rewrite the read method to read information from hdfs.
 * @param in//w  w  w. ja  v  a  2s  .  com
 *        DataInput object to read from hdfs.
 */
@Override
public void readFields(DataInput in) throws IOException {
    this.activeWorkerManagersCount = in.readInt();
    boolean detailed = in.readBoolean();
    if (detailed) {
        this.activeWorkerManagersName = WritableUtils.readCompressedStringArray(in);
    }
    this.maxClusterStaffs = in.readInt();
    this.runningClusterStaffs = in.readInt();
    this.state = WritableUtils.readEnum(in, BSPController.State.class);
}

From source file:com.chinamobile.bcbsp.sync.SuperStepCommand.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    this.commandType = in.readInt();
    this.initWritePath = Text.readString(in);
    this.initReadPath = Text.readString(in);
    this.ableCheckPoint = in.readInt();
    this.nextSuperStepNum = in.readInt();
    this.oldCheckPoint = in.readInt();
    int count = in.readInt();
    this.aggValues = new String[count];
    for (int i = 0; i < count; i++) {
        this.aggValues[i] = Text.readString(in);
    }//from  w  w w  .  j a v  a  2 s  .  co m
    int size = WritableUtils.readVInt(in);
    if (size > 0) {
        String[] partitionToWMName = WritableUtils.readCompressedStringArray(in);
        this.partitionToWorkerManagerNameAndPort = new HashMap<Integer, String>();
        for (int j = 0; j < size; j++) {
            this.partitionToWorkerManagerNameAndPort.put(j, partitionToWMName[j]);
        }
    }
    this.migrateStaffIDs = in.readUTF();
    this.migrateVertexCommand.readFields(in);

}

From source file:com.iflytek.spider.protocol.ProtocolStatus.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    byte version = in.readByte();
    switch (version) {
    case 1:/*from  w  w w  .  j  a  v  a 2 s. com*/
        code = in.readByte();
        lastModified = in.readLong();
        args = WritableUtils.readCompressedStringArray(in);
        break;
    case VERSION:
        code = in.readByte();
        lastModified = in.readLong();
        args = WritableUtils.readStringArray(in);
        break;
    default:
        throw new VersionMismatchException(VERSION, version);
    }
}

From source file:com.newland.bi.bigdata.hdfs.Configuration.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    clear();//from w ww. j av  a  2 s . co  m
    int size = WritableUtils.readVInt(in);
    for (int i = 0; i < size; ++i) {
        String key = org.apache.hadoop.io.Text.readString(in);
        String value = org.apache.hadoop.io.Text.readString(in);
        set(key, value);
        String sources[] = WritableUtils.readCompressedStringArray(in);
        updatingResource.put(key, sources);
    }
}

From source file:org.apache.nutch.parse.ParseStatus.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    byte version = in.readByte();
    switch (version) {
    case 1://from  ww  w .  j a v a2  s  .  c  o  m
        majorCode = in.readByte();
        minorCode = in.readShort();
        args = WritableUtils.readCompressedStringArray(in);
        break;
    case 2:
        majorCode = in.readByte();
        minorCode = in.readShort();
        args = WritableUtils.readStringArray(in);
        break;
    default:
        throw new VersionMismatchException(VERSION, version);
    }
}