List of usage examples for org.apache.commons.io EndianUtils writeSwappedInteger
public static void writeSwappedInteger(OutputStream output, int value) throws IOException
From source file:org.apache.hadoop.hive.ql.io.TeradataBinaryFileOutputFormat.java
/** * create the final out file, and output row by row. After one row is * appended, a configured row separator is appended * * @param jc/* w w w . j a va2 s. com*/ * the job configuration file * @param outPath * the final output file to be created * @param valueClass * the value class used for create * @param isCompressed * whether the content is compressed or not * @param tableProperties * the tableProperties of this file's corresponding table * @param progress * progress used for status report * @return the RecordWriter */ @Override public RecordWriter getHiveRecordWriter(JobConf jc, Path outPath, Class<? extends Writable> valueClass, boolean isCompressed, Properties tableProperties, Progressable progress) throws IOException { FileSystem fs = outPath.getFileSystem(jc); final OutputStream outStream = Utilities.createCompressedStream(jc, fs.create(outPath, progress), isCompressed); return new RecordWriter() { @Override public void write(Writable r) throws IOException { BytesWritable bw = (BytesWritable) r; int recordLength = bw.getLength(); //Based on the row length to decide if the length is int or short String rowLength = tableProperties.getProperty(TeradataBinaryRecordReader.TD_ROW_LENGTH, TeradataBinaryRecordReader.DEFAULT_TD_ROW_LENGTH).toLowerCase(); LOG.debug(format("The table property %s is: %s", TeradataBinaryRecordReader.TD_ROW_LENGTH, rowLength)); if (TeradataBinaryRecordReader.TD_ROW_LENGTH_TO_BYTE_NUM.containsKey(rowLength)) { if (rowLength.equals(TeradataBinaryRecordReader.DEFAULT_TD_ROW_LENGTH)) { EndianUtils.writeSwappedShort(outStream, (short) recordLength); // write the length using little endian } else if (rowLength.equals(TeradataBinaryRecordReader.TD_ROW_LENGTH_1MB)) { EndianUtils.writeSwappedInteger(outStream, recordLength); // write the length using little endian } } else { throw new IllegalArgumentException( format("%s doesn't support the value %s, the supported values are %s", TeradataBinaryRecordReader.TD_ROW_LENGTH, rowLength, TeradataBinaryRecordReader.TD_ROW_LENGTH_TO_BYTE_NUM.keySet())); } outStream.write(bw.getBytes(), 0, bw.getLength()); // write the content (the content is in little endian) outStream.write(RECORD_END_BYTE); //write the record ending } @Override public void close(boolean abort) throws IOException { outStream.close(); } }; }
From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataOutputStream.java
/** * Write INT./*from w w w . j a va 2 s . co m*/ * using little-endian to write integer. * * @param i the * @throws IOException the io exception */ public void writeInt(int i) throws IOException { EndianUtils.writeSwappedInteger(this, i); }
From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataOutputStream.java
/** * Write DATE./*from ww w . ja v a 2s . c o m*/ * The representation of date in Teradata binary format is: * The Date D is a int with 4 bytes using little endian. * The representation is (YYYYMMDD - 19000000).toInt -> D * eg. 1911.11.11 -> 19111111 -> 111111 -> 07 b2 01 00 in little endian. * the null date will use 0 to pad. * * @param date the date * @throws IOException the io exception */ public void writeDate(DateWritableV2 date) throws IOException { if (date == null) { EndianUtils.writeSwappedInteger(this, 0); return; } int toWrite = date.get().getYear() * 10000 + date.get().getMonth() * 100 + date.get().getDay() - 19000000; EndianUtils.writeSwappedInteger(this, toWrite); }