Example usage for org.apache.commons.io EndianUtils writeSwappedShort

List of usage examples for org.apache.commons.io EndianUtils writeSwappedShort

Introduction

In this page you can find the example usage for org.apache.commons.io EndianUtils writeSwappedShort.

Prototype

public static void writeSwappedShort(OutputStream output, short value) throws IOException 

Source Link

Document

Writes a "short" value to an OutputStream.

Usage

From source file:org.apache.hadoop.hive.ql.io.TeradataBinaryFileOutputFormat.java

/**
 * create the final out file, and output row by row. After one row is
 * appended, a configured row separator is appended
 *
 * @param jc//  ww w.j a  va2  s.c om
 *          the job configuration file
 * @param outPath
 *          the final output file to be created
 * @param valueClass
 *          the value class used for create
 * @param isCompressed
 *          whether the content is compressed or not
 * @param tableProperties
 *          the tableProperties of this file's corresponding table
 * @param progress
 *          progress used for status report
 * @return the RecordWriter
 */
@Override
public RecordWriter getHiveRecordWriter(JobConf jc, Path outPath, Class<? extends Writable> valueClass,
        boolean isCompressed, Properties tableProperties, Progressable progress) throws IOException {
    FileSystem fs = outPath.getFileSystem(jc);
    final OutputStream outStream = Utilities.createCompressedStream(jc, fs.create(outPath, progress),
            isCompressed);
    return new RecordWriter() {
        @Override
        public void write(Writable r) throws IOException {
            BytesWritable bw = (BytesWritable) r;
            int recordLength = bw.getLength();

            //Based on the row length to decide if the length is int or short
            String rowLength = tableProperties.getProperty(TeradataBinaryRecordReader.TD_ROW_LENGTH,
                    TeradataBinaryRecordReader.DEFAULT_TD_ROW_LENGTH).toLowerCase();
            LOG.debug(format("The table property %s is: %s", TeradataBinaryRecordReader.TD_ROW_LENGTH,
                    rowLength));

            if (TeradataBinaryRecordReader.TD_ROW_LENGTH_TO_BYTE_NUM.containsKey(rowLength)) {
                if (rowLength.equals(TeradataBinaryRecordReader.DEFAULT_TD_ROW_LENGTH)) {
                    EndianUtils.writeSwappedShort(outStream, (short) recordLength); // write the length using little endian
                } else if (rowLength.equals(TeradataBinaryRecordReader.TD_ROW_LENGTH_1MB)) {
                    EndianUtils.writeSwappedInteger(outStream, recordLength); // write the length using little endian
                }
            } else {
                throw new IllegalArgumentException(
                        format("%s doesn't support the value %s, the supported values are %s",
                                TeradataBinaryRecordReader.TD_ROW_LENGTH, rowLength,
                                TeradataBinaryRecordReader.TD_ROW_LENGTH_TO_BYTE_NUM.keySet()));
            }

            outStream.write(bw.getBytes(), 0, bw.getLength()); // write the content (the content is in little endian)
            outStream.write(RECORD_END_BYTE); //write the record ending
        }

        @Override
        public void close(boolean abort) throws IOException {
            outStream.close();
        }
    };
}

From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataOutputStream.java

/**
 * Write VARCHAR(N)./*from www  . j  av a 2s  .c  o  m*/
 * The representation of Varchar in Teradata binary format is:
 * the first two bytes represent the length N of this varchar field,
 * the next N bytes represent the content of this varchar field.
 * To pad the null varchar, the length will be 0 and the content will be none.
 *
 * @param writable the writable
 * @throws IOException the io exception
 */
public void writeVarChar(HiveVarcharWritable writable) throws IOException {
    if (writable == null) {
        EndianUtils.writeSwappedShort(this, (short) 0);
        return;
    }
    Text t = writable.getTextValue();
    int varcharLength = t.getLength();
    EndianUtils.writeSwappedShort(this, (short) varcharLength); // write the varchar length
    write(t.getBytes(), 0, varcharLength); // write the varchar content
}

From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataOutputStream.java

/**
 * Write SHORT.//from w w w.  ja v  a2  s.  c  o  m
 * using little-endian to write short.
 *
 * @param s the s
 * @throws IOException the io exception
 */
public void writeShort(short s) throws IOException {
    EndianUtils.writeSwappedShort(this, s);
}

From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataOutputStream.java

/**
 * Write VARBYTE(N).//  w  ww  .  j a v a 2 s. co m
 * The representation of VARBYTE in Teradata binary format is:
 * the first two bytes represent the length N of this varchar field,
 * the next N bytes represent the content of this varchar field.
 * To pad the null varbyte, the length will be 0 and the content will be none.
 *
 * @param writable the writable
 * @throws IOException the io exception
 */
public void writeVarByte(BytesWritable writable) throws IOException {
    if (writable == null) {
        EndianUtils.writeSwappedShort(this, (short) 0);
        return;
    }
    int varbyteLength = writable.getLength();
    EndianUtils.writeSwappedShort(this, (short) varbyteLength); // write the varbyte length
    write(writable.getBytes(), 0, varbyteLength); // write the varchar content
}