List of usage examples for org.apache.hadoop.io DataOutputBuffer DataOutputBuffer
public DataOutputBuffer()
From source file:TestCodec.java
License:Open Source License
public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); DefaultCodec codec = new DefaultCodec(); codec.setConf(conf);//w w w .j a va2 s . co m DataOutputBuffer chunksWriteBuffer = new DataOutputBuffer(); CompressionOutputStream compressionOutputStream = codec.createOutputStream(chunksWriteBuffer); DataInputBuffer chunkReadBuffer = new DataInputBuffer(); CompressionInputStream compressionInputStream = codec.createInputStream(chunkReadBuffer); String str = "laksjldfkjalskdjfl;aksjdflkajsldkfjalksjdflkajlsdkfjlaksjdflka"; compressionOutputStream.write(str.getBytes()); compressionOutputStream.finish(); byte[] data = chunksWriteBuffer.getData(); System.out.println(str.length()); System.out.println(chunksWriteBuffer.getLength()); chunkReadBuffer.reset(data, chunksWriteBuffer.getLength()); DataOutputBuffer dob = new DataOutputBuffer(); IOUtils.copyBytes(compressionInputStream, dob, conf); System.out.println(dob.getData()); }
From source file:TestIFieldMap.java
License:Open Source License
public void testPersistable() throws IOException { IFieldMap fieldMap = new IFieldMap(); fieldMap.addFieldType(new IFieldType.IFieldByteType()); fieldMap.addFieldType(new IFieldType.IFieldShortType()); fieldMap.addFieldType(new IFieldType.IFieldIntType()); fieldMap.addFieldType(new IFieldType.IFieldLongType()); fieldMap.addFieldType(new IFieldType.IFieldFloatType()); fieldMap.addFieldType(new IFieldType.IFieldDoubleType()); DataOutputBuffer dob = new DataOutputBuffer(); fieldMap.persistent(dob);//from www.j a va 2 s . com byte[] data = dob.getData(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(data, data.length); IFieldMap fieldMap2 = new IFieldMap(); fieldMap2.unpersistent(dib); System.out.println(); assertEquals(fieldMap.fieldtypes().get((short) 0).getType(), fieldMap2.fieldtypes().get((short) 0).getType()); assertEquals(fieldMap.fieldtypes().get((short) 1).getType(), fieldMap2.fieldtypes().get((short) 1).getType()); assertEquals(fieldMap.fieldtypes().get((short) 2).getType(), fieldMap2.fieldtypes().get((short) 2).getType()); assertEquals(fieldMap.fieldtypes().get((short) 3).getType(), fieldMap2.fieldtypes().get((short) 3).getType()); assertEquals(fieldMap.fieldtypes().get((short) 4).getType(), fieldMap2.fieldtypes().get((short) 4).getType()); assertEquals(fieldMap.fieldtypes().get((short) 5).getType(), fieldMap2.fieldtypes().get((short) 5).getType()); }
From source file:TestIRecord.java
License:Open Source License
public void testPersistable() throws IOException { IRecord irc = new IRecord(); TreeMap<Short, IFieldType> fieldtypes = new TreeMap<Short, IFieldType>(); fieldtypes.put((short) 0, new IFieldType(ConstVar.FieldType_Double)); fieldtypes.put((short) 1, new IFieldType(ConstVar.FieldType_Int)); irc.setFieldTypes(fieldtypes);/*from w w w . j ava 2s . com*/ irc.addFieldValue(new IFieldValue((double) 4, (short) 0)); irc.addFieldValue(new IFieldValue(4, (short) 1)); DataOutputBuffer dob = new DataOutputBuffer(); irc.persistent(dob); byte[] data = dob.getData(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(data, data.length); IRecord irc2 = new IRecord(); irc2.setFieldTypes(fieldtypes); irc2.unpersistent(dib); assertTrue(irc.fieldValues().get((short) 0).compareTo(irc2.fieldValues().get((short) 0)) == 0); assertTrue(irc.fieldValues().get((short) 1).compareTo(irc2.fieldValues().get((short) 1)) == 0); }
From source file:TestIHead.java
License:Open Source License
public void testPersistable() throws IOException { IHead ih = new IHead(); ih.setCompress((byte) 1); ih.setCompressStyle((byte) 1); ih.setEncode((byte) 1); ih.setEncodeStyle((byte) 1); ih.setLineindex((byte) 1); ih.setVar((byte) 1); ih.setVer((short) 1); ih.setMagic(1);/* w ww .ja v a 2 s. co m*/ ih.setPrimaryIndex((short) 1); IUserDefinedHeadInfo udi = new IUserDefinedHeadInfo(); udi.addInfo(0, "aaa"); udi.addInfo(1, "bbb"); ih.setUserDefinedInfo(udi); IFieldMap fieldMap = new IFieldMap(); fieldMap.addFieldType(new IFieldType.IFieldByteType()); fieldMap.addFieldType(new IFieldType.IFieldShortType()); fieldMap.addFieldType(new IFieldType.IFieldIntType()); fieldMap.addFieldType(new IFieldType.IFieldLongType()); fieldMap.addFieldType(new IFieldType.IFieldFloatType()); fieldMap.addFieldType(new IFieldType.IFieldDoubleType()); ih.setFieldMap(fieldMap); DataOutputBuffer dob = new DataOutputBuffer(); ih.persistent(dob); byte[] data = dob.getData(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(data, data.length); IHead ih2 = new IHead(); ih2.unpersistent(dib); assertEquals(ih2.getMagic(), ih.getMagic()); assertEquals(ih2.getCompress(), ih.getCompress()); assertEquals(ih2.getCompressStyle(), ih.getCompressStyle()); assertEquals(ih2.getEncode(), ih.getEncode()); assertEquals(ih2.getEncodeStyle(), ih.getEncodeStyle()); assertEquals(ih2.getPrimaryIndex(), ih.getPrimaryIndex()); assertEquals(ih2.getVar(), ih.getVar()); assertEquals(ih2.getVer(), ih.getVer()); assertEquals(ih2.lineindex(), ih.lineindex()); }
From source file:TestIKeyIndex.java
License:Open Source License
public void testPersistable() throws IOException { IFieldValue beginkey = new IFieldValue(100); IFieldValue endkey = new IFieldValue(1000); IKeyIndex iki = new IKeyIndex(beginkey, endkey, 100); DataOutputBuffer dob = new DataOutputBuffer(); iki.persistent(dob);//from w ww.jav a 2s.com byte[] data = dob.getData(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(data, data.length); IKeyIndex iki2 = new IKeyIndex(beginkey.fieldType()); iki2.unpersistent(dib); assertTrue(iki.beginkey().compareTo(iki2.beginkey()) == 0); assertTrue(iki.endkey().compareTo(iki2.endkey()) == 0); assertEquals(iki.recnum(), iki2.recnum()); }
From source file:TestIUnitIndex.java
License:Open Source License
public void testIUnitIndexWrite() throws IOException { IFileInfo fileInfo = TestUtil.genfileinfo(false, 2); IUnitIndex iui = new IUnitIndex(fileInfo); iui.update(TestUtil.genunitinfo(fileInfo, 0, 0, 5)); iui.update(TestUtil.genunitinfo(fileInfo, 0, 1, 5)); iui.update(TestUtil.genunitinfo(fileInfo, 0, 2, 5)); iui.update(TestUtil.genunitinfo(fileInfo, 0, 3, 5)); iui.update(TestUtil.genunitinfo(fileInfo, 0, 4, 5)); System.out.println(iui.getUnitnum()); assertEquals(0, iui.getUnitid(0));/*from ww w. j ava 2s. c o m*/ assertEquals(0, iui.getUnitid(4)); assertEquals(1, iui.getUnitid(5)); assertEquals(1, iui.getUnitid(9)); assertEquals(4, iui.getUnitid(24)); assertEquals(5, iui.getUnitid(25)); DataOutputBuffer dob = new DataOutputBuffer(); iui.persistent(dob); byte[] data = dob.getData(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(data, data.length); IUnitIndex iui2 = new IUnitIndex(fileInfo); iui2.unpersistent(dib); for (int j = 0; j < iui.getUnitnum(); j++) { assertEquals(iui.getUnitOffset(j), iui2.getUnitOffset(j)); assertTrue(iui.getKeyIndex(j).compareTo(iui2.getKeyIndex(j)) == 0); assertTrue(iui.getLineIndex(j).compareTo(iui2.getLineIndex(j)) == 0); } }
From source file:alluxio.yarn.ApplicationMaster.java
License:Apache License
/** * Starts the application master.// w w w . j ava 2 s . c om * * @throws IOException if registering the application master fails due to an IO error * @throws YarnException if registering the application master fails due to an internal Yarn error */ public void start() throws IOException, YarnException { if (UserGroupInformation.isSecurityEnabled()) { Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials(); DataOutputBuffer credentialsBuffer = new DataOutputBuffer(); credentials.writeTokenStorageToStream(credentialsBuffer); // Now remove the AM -> RM token so that containers cannot access it. Iterator<Token<?>> iter = credentials.getAllTokens().iterator(); while (iter.hasNext()) { Token<?> token = iter.next(); if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) { iter.remove(); } } mAllTokens = ByteBuffer.wrap(credentialsBuffer.getData(), 0, credentialsBuffer.getLength()); } mNMClient.init(mYarnConf); mNMClient.start(); mRMClient.init(mYarnConf); mRMClient.start(); mYarnClient.init(mYarnConf); mYarnClient.start(); // Register with ResourceManager String hostname = NetworkAddressUtils.getLocalHostName(); mRMClient.registerApplicationMaster(hostname, 0 /* port */, "" /* tracking url */); LOG.info("ApplicationMaster registered"); }
From source file:alluxio.yarn.Client.java
License:Apache License
private void setupContainerLaunchContext() throws IOException, YarnException { Map<String, String> applicationMasterArgs = ImmutableMap.<String, String>of("-num_workers", Integer.toString(mNumWorkers), "-master_address", mMasterAddress, "-resource_path", mResourcePath); final String amCommand = YarnUtils.buildCommand(YarnContainerType.APPLICATION_MASTER, applicationMasterArgs);/*w w w .ja v a 2s . c o m*/ System.out.println("ApplicationMaster command: " + amCommand); mAmContainer.setCommands(Collections.singletonList(amCommand)); // Setup local resources Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); localResources.put("alluxio.tar.gz", YarnUtils.createLocalResourceOfFile(mYarnConf, mResourcePath + "/alluxio.tar.gz")); localResources.put("alluxio-yarn-setup.sh", YarnUtils.createLocalResourceOfFile(mYarnConf, mResourcePath + "/alluxio-yarn-setup.sh")); localResources.put("alluxio.jar", YarnUtils.createLocalResourceOfFile(mYarnConf, mResourcePath + "/alluxio.jar")); mAmContainer.setLocalResources(localResources); // Setup CLASSPATH for ApplicationMaster Map<String, String> appMasterEnv = new HashMap<String, String>(); setupAppMasterEnv(appMasterEnv); mAmContainer.setEnvironment(appMasterEnv); // Set up security tokens for launching our ApplicationMaster container. if (UserGroupInformation.isSecurityEnabled()) { Credentials credentials = new Credentials(); String tokenRenewer = mYarnConf.get(YarnConfiguration.RM_PRINCIPAL); if (tokenRenewer == null || tokenRenewer.length() == 0) { throw new IOException("Can't get Master Kerberos principal for the RM to use as renewer"); } org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(mYarnConf); // getting tokens for the default file-system. final Token<?>[] tokens = fs.addDelegationTokens(tokenRenewer, credentials); if (tokens != null) { for (Token<?> token : tokens) { LOG.info("Got dt for " + fs.getUri() + "; " + token); } } // getting yarn resource manager token org.apache.hadoop.conf.Configuration config = mYarnClient.getConfig(); Token<TokenIdentifier> token = ConverterUtils.convertFromYarn( mYarnClient.getRMDelegationToken(new org.apache.hadoop.io.Text(tokenRenewer)), ClientRMProxy.getRMDelegationTokenService(config)); LOG.info("Added RM delegation token: " + token); credentials.addToken(token.getService(), token); DataOutputBuffer dob = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dob); ByteBuffer buffer = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); mAmContainer.setTokens(buffer); } }
From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryArrayZigZarByteReader.java
License:Apache License
public byte[] CompressensureDecompressed() throws IOException { FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray(); dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength()); FlexibleEncoding.Parquet.DeltaByteArrayReader reader = new FlexibleEncoding.Parquet.DeltaByteArrayReader(); ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size()); dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size()); byteBuf.flip();//from w w w.j a v a2s .c o m reader.initFromPage(numPairs, byteBuf.array(), 0); DataOutputBuffer decoding = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs); decoding.writeInt(startPos); for (int i = 0; i < numPairs; i++) { byte tmp = Byte.parseByte(reader.readBytes().toStringUsingUTF8()); decoding.writeByte(tmp); } byteBuf.clear(); inBuf.close(); return decoding.getData(); }
From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryArrayZigZarByteReader.java
License:Apache License
@Override public byte[] ensureDecompressed() throws IOException { FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray(); dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12); FlexibleEncoding.Parquet.DeltaByteArrayReader reader = new FlexibleEncoding.Parquet.DeltaByteArrayReader(); ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size()); dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size()); byteBuf.flip();/*from w w w . j a v a2 s.co m*/ reader.initFromPage(numPairs, byteBuf.array(), 0); DataOutputBuffer decoding = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs); decoding.writeInt(startPos); for (int i = 0; i < numPairs; i++) { byte tmp = Byte.parseByte(reader.readBytes().toStringUsingUTF8()); decoding.writeByte(tmp); } byteBuf.clear(); inBuf.close(); return decoding.getData(); }