Example usage for org.apache.hadoop.mapred InputSplit readFields

List of usage examples for org.apache.hadoop.mapred InputSplit readFields

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred InputSplit readFields.

Prototype

void readFields(DataInput in) throws IOException;

Source Link

Document

Deserialize the fields of this object from in.

Usage

From source file:com.ibm.jaql.lang.expr.io.ReadSplitFn.java

License:Apache License

@Override
public JsonIterator iter(Context context) throws Exception {
    // Close the previous adapter, if still open:
    if (adapter != null) {
        adapter.close();/*from w  w w  .  ja v a 2  s . c om*/
        adapter = null;
    }

    // evaluate the arguments
    JsonValue args = exprs[0].eval(context);
    JsonRecord splitRec = (JsonRecord) exprs[1].eval(context);

    if (splitRec == null) {
        return JsonIterator.EMPTY;
    }

    // get the InputAdapter according to the type
    HadoopInputAdapter hia = (HadoopInputAdapter) JaqlUtil.getAdapterStore().input.getAdapter(args);
    adapter = hia;
    JobConf conf = new JobConf(); // TODO: allow configuration
    hia.setParallel(conf); // right thing to do?

    JsonString jsplitClassName = (JsonString) splitRec.get(InputSplitsFn.CLASS_TAG);
    Class<? extends InputSplit> splitCls = (Class<? extends InputSplit>) ClassLoaderMgr
            .resolveClass(jsplitClassName.toString());
    InputSplit split = (InputSplit) ReflectionUtils.newInstance(splitCls, conf);

    DataInputBuffer in = new DataInputBuffer();
    JsonBinary rawSplit = (JsonBinary) splitRec.get(InputSplitsFn.SPLIT_TAG);
    in.reset(rawSplit.getInternalBytes(), rawSplit.bytesOffset(), rawSplit.bytesLength());
    split.readFields(in);

    RecordReader<JsonHolder, JsonHolder> rr = hia.getRecordReader(split, conf, Reporter.NULL);
    return new RecordReaderValueIter(rr);
}

From source file:com.yolodata.tbana.hadoop.mapred.splunk.split.InputSplitTest.java

License:Open Source License

protected void testSerialization(InputSplit split, InputSplit emptySplit) throws IOException {

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    DataOutputStream stream = new DataOutputStream(outputStream);

    split.write(stream);/*from ww w. j a  v a2 s .com*/

    byte[] output = outputStream.toByteArray();

    DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(output));

    emptySplit.readFields(inputStream);

    assertEquals(split, emptySplit);

}

From source file:org.apache.drill.exec.store.hive.HiveInputReader.java

License:Apache License

public static InputSplit deserializeInputSplit(String base64, String className) throws Exception {
    InputSplit split;
    if (Class.forName(className) == FileSplit.class) {
        split = new FileSplit((Path) null, 0, 0, (String[]) null);
    } else {/*from   w w  w .j  a  va2 s.  c om*/
        split = (InputSplit) Class.forName(className).getConstructor().newInstance();
    }
    ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(Base64.decodeBase64(base64));
    split.readFields(byteArrayDataInput);
    return split;
}

From source file:org.apache.drill.exec.store.hive.HiveSubScan.java

License:Apache License

public static InputSplit deserializeInputSplit(String base64, String className)
        throws IOException, ReflectiveOperationException {
    Constructor<?> constructor = Class.forName(className).getDeclaredConstructor();
    if (constructor == null) {
        throw new ReflectiveOperationException(
                "Class " + className + " does not implement a default constructor.");
    }//from w w w .  j av  a  2s.co m
    constructor.setAccessible(true);
    InputSplit split = (InputSplit) constructor.newInstance();
    ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(Base64.decodeBase64(base64));
    split.readFields(byteArrayDataInput);
    return split;
}