Example usage for org.apache.hadoop.mapred FileSplit readFields

List of usage examples for org.apache.hadoop.mapred FileSplit readFields

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred FileSplit readFields.

Prototype

public void readFields(DataInput in) throws IOException 

Source Link

Usage

From source file:com.ibm.jaql.lang.expr.io.FileSplitToRecordFn.java

License:Apache License

@Override
public JsonRecord eval(Context context) throws Exception {
    // { path: string, start: long, length: long, locations: [string...] }
    if (in == null) {
        in = new DataInputBuffer();
        jpath = new MutableJsonString();
        jstart = new MutableJsonLong();
        jlength = new MutableJsonLong();
        jlocations = new BufferedJsonArray();
        values = new JsonValue[] { jpath, jstart, jlength, jlocations };
        resultRec = new BufferedJsonRecord();
        resultRec.set(NAMES, values, NAMES.length);
    }//  www. j ava2  s  .  c o  m

    JsonRecord splitRec = (JsonRecord) exprs[0].eval(context);

    JsonString jsplitClassName = (JsonString) splitRec.get(InputSplitsFn.CLASS_TAG);
    Class<? extends FileSplit> splitCls = (Class<? extends FileSplit>) ClassLoaderMgr
            .resolveClass(jsplitClassName.toString());
    FileSplit split = (FileSplit) ReflectionUtils.newInstance(splitCls, null);
    JsonBinary rawSplit = (JsonBinary) splitRec.get(InputSplitsFn.SPLIT_TAG);
    in.reset(rawSplit.getInternalBytes(), rawSplit.bytesOffset(), rawSplit.bytesLength());
    split.readFields(in);
    JsonArray jlocs = (JsonArray) splitRec.get(InputSplitsFn.LOCATIONS_TAG);

    jpath.setCopy(split.getPath().toString());
    jstart.set(split.getStart());
    jlength.set(split.getLength());
    if (jlocs != null) {
        values[3] = jlocs;
    } else {
        String[] locs = split.getLocations();
        jlocations.resize(locs.length);
        for (int i = 0; i < locs.length; i++) {
            jlocations.set(i, new JsonString(locs[i]));
        }
        values[3] = jlocations;
    }

    return resultRec;
}

From source file:org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1Splitter.java

License:Apache License

/**
 * @param clsName Input split class name.
 * @param in Input stream.//ww w .java2 s  .c o m
 * @param hosts Optional hosts.
 * @return File block or {@code null} if it is not a {@link FileSplit} instance.
 * @throws IgniteCheckedException If failed.
 */
@Nullable
public static HadoopFileBlock readFileBlock(String clsName, FSDataInputStream in, @Nullable String[] hosts)
        throws IgniteCheckedException {
    if (!FileSplit.class.getName().equals(clsName))
        return null;

    FileSplit split = U.newInstance(FileSplit.class);

    try {
        split.readFields(in);
    } catch (IOException e) {
        throw new IgniteCheckedException(e);
    }

    if (hosts == null)
        hosts = EMPTY_HOSTS;

    return new HadoopFileBlock(hosts, split.getPath().toUri(), split.getStart(), split.getLength());
}

From source file:org.apache.ignite.internal.processors.hadoop.v1.GridHadoopV1Splitter.java

License:Apache License

/**
 * @param clsName Input split class name.
 * @param in Input stream./*from   w ww .  j ava  2 s  . c  o m*/
 * @param hosts Optional hosts.
 * @return File block or {@code null} if it is not a {@link FileSplit} instance.
 * @throws IgniteCheckedException If failed.
 */
@Nullable
public static GridHadoopFileBlock readFileBlock(String clsName, FSDataInputStream in, @Nullable String[] hosts)
        throws IgniteCheckedException {
    if (!FileSplit.class.getName().equals(clsName))
        return null;

    FileSplit split = U.newInstance(FileSplit.class);

    try {
        split.readFields(in);
    } catch (IOException e) {
        throw new IgniteCheckedException(e);
    }

    if (hosts == null)
        hosts = EMPTY_HOSTS;

    return new GridHadoopFileBlock(hosts, split.getPath().toUri(), split.getStart(), split.getLength());
}

From source file:org.gridgain.grid.kernal.processors.hadoop.v1.GridHadoopV1Splitter.java

License:Open Source License

/**
 * @param clsName Input split class name.
 * @param in Input stream./*from   w  w w .  j ava2s  . c  om*/
 * @param hosts Optional hosts.
 * @return File block or {@code null} if it is not a {@link FileSplit} instance.
 * @throws GridException If failed.
 */
@Nullable
public static GridHadoopFileBlock readFileBlock(String clsName, FSDataInputStream in, @Nullable String[] hosts)
        throws GridException {
    if (!FileSplit.class.getName().equals(clsName))
        return null;

    FileSplit split = U.newInstance(FileSplit.class);

    try {
        split.readFields(in);
    } catch (IOException e) {
        throw new GridException(e);
    }

    if (hosts == null)
        hosts = EMPTY_HOSTS;

    return new GridHadoopFileBlock(hosts, split.getPath().toUri(), split.getStart(), split.getLength());
}