List of usage examples for com.google.common.io LittleEndianDataOutputStream LittleEndianDataOutputStream
public LittleEndianDataOutputStream(OutputStream out)
From source file:org.linguafranca.pwdb.kdbx.stream_3_1.KdbxSerializer.java
private static void writeStartBytes(KdbxHeader kdbxHeader, OutputStream encryptedOutputStream) throws IOException { LittleEndianDataOutputStream ledos = new LittleEndianDataOutputStream(encryptedOutputStream); ledos.write(kdbxHeader.getStreamStartBytes()); }
From source file:org.bimserver.serializers.binarygeometry.BinaryGeometryMessagingSerializer.java
@SuppressWarnings("unchecked") private boolean writeData(OutputStream outputStream) throws IOException { IdEObject ifcProduct = iterator.next(); LittleEndianDataOutputStream dataOutputStream = new LittleEndianDataOutputStream(outputStream); GeometryInfo geometryInfo = (GeometryInfo) ifcProduct .eGet(ifcProduct.eClass().getEStructuralFeature("geometry")); if (geometryInfo != null && geometryInfo.getTransformation() != null) { GeometryData geometryData = geometryInfo.getData(); int totalNrIndices = geometryData.getIndices().length / 4; int maxIndexValues = 16389; Object reuse = concreteGeometrySent.get(geometryData.getOid()); MessageType messageType = null;// ww w . j ava2 s.c o m if (reuse == null) { if (totalNrIndices > maxIndexValues) { messageType = MessageType.GEOMETRY_TRIANGLES_PARTED; } else { messageType = MessageType.GEOMETRY_TRIANGLES; } } else { if (reuse instanceof List) { messageType = MessageType.GEOMETRY_INSTANCE_PARTED; } else { messageType = MessageType.GEOMETRY_INSTANCE; } } dataOutputStream.writeByte(messageType.getId()); dataOutputStream.writeUTF(ifcProduct.eClass().getName()); Long roid = model.getPidRoidMap().get(ifcProduct.getPid()); dataOutputStream.writeLong(roid); dataOutputStream.writeLong(ifcProduct.getOid()); // BEWARE, ByteOrder is always LITTLE_ENDIAN, because that's what GPU's seem to prefer, Java's ByteBuffer default is BIG_ENDIAN though! int skip = 4 - ((3 + ifcProduct.eClass().getName().getBytes(Charsets.UTF_8).length) % 4); if (skip != 0 && skip != 4) { dataOutputStream.write(new byte[skip]); } dataOutputStream.write(geometryInfo.getTransformation()); if (reuse != null && reuse instanceof Long) { // Reused geometry, only send the id of the reused geometry data dataOutputStream.writeLong(geometryData.getOid()); } else if (reuse != null && reuse instanceof List) { List<Long> list = (List<Long>) reuse; dataOutputStream.writeInt(list.size()); for (long coreId : list) { dataOutputStream.writeLong(coreId); } } else { if (totalNrIndices > maxIndexValues) { // Split geometry, this algorithm - for now - just throws away all the reuse of vertices that might be there // Also, although usually the vertices buffers are too large, this algorithm is based on the indices, so we // probably are not cramming as much data as we can in each "part", but that's not really a problem I think int nrParts = (totalNrIndices + maxIndexValues - 1) / maxIndexValues; dataOutputStream.writeInt(nrParts); Bounds objectBounds = new Bounds(geometryInfo.getMinBounds(), geometryInfo.getMaxBounds()); objectBounds.writeTo(dataOutputStream); ByteBuffer indicesBuffer = ByteBuffer.wrap(geometryData.getIndices()); indicesBuffer.order(ByteOrder.LITTLE_ENDIAN); IntBuffer indicesIntBuffer = indicesBuffer.asIntBuffer(); ByteBuffer vertexBuffer = ByteBuffer.wrap(geometryData.getVertices()); vertexBuffer.order(ByteOrder.LITTLE_ENDIAN); FloatBuffer verticesFloatBuffer = vertexBuffer.asFloatBuffer(); ByteBuffer normalsBuffer = ByteBuffer.wrap(geometryData.getNormals()); normalsBuffer.order(ByteOrder.LITTLE_ENDIAN); FloatBuffer normalsFloatBuffer = normalsBuffer.asFloatBuffer(); for (int part = 0; part < nrParts; part++) { long splitId = splitCounter--; dataOutputStream.writeLong(splitId); int indexCounter = 0; int upto = Math.min((part + 1) * maxIndexValues, totalNrIndices); dataOutputStream.writeInt(upto - part * maxIndexValues); for (int i = part * maxIndexValues; i < upto; i++) { dataOutputStream.writeInt(indexCounter++); } dataOutputStream.writeInt((upto - part * maxIndexValues) * 3); for (int i = part * maxIndexValues; i < upto; i += 3) { int oldIndex1 = indicesIntBuffer.get(i); int oldIndex2 = indicesIntBuffer.get(i + 1); int oldIndex3 = indicesIntBuffer.get(i + 2); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex1 * 3)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex1 * 3 + 1)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex1 * 3 + 2)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex2 * 3)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex2 * 3 + 1)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex2 * 3 + 2)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex3 * 3)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex3 * 3 + 1)); dataOutputStream.writeFloat(verticesFloatBuffer.get(oldIndex3 * 3 + 2)); } dataOutputStream.writeInt((upto - part * maxIndexValues) * 3); for (int i = part * maxIndexValues; i < upto; i += 3) { int oldIndex1 = indicesIntBuffer.get(i); int oldIndex2 = indicesIntBuffer.get(i + 1); int oldIndex3 = indicesIntBuffer.get(i + 2); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex1 * 3)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex1 * 3 + 1)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex1 * 3 + 2)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex2 * 3)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex2 * 3 + 1)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex2 * 3 + 2)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex3 * 3)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex3 * 3 + 1)); dataOutputStream.writeFloat(normalsFloatBuffer.get(oldIndex3 * 3 + 2)); } dataOutputStream.writeInt(0); } } else { Bounds objectBounds = new Bounds(geometryInfo.getMinBounds(), geometryInfo.getMaxBounds()); objectBounds.writeTo(dataOutputStream); dataOutputStream.writeLong(geometryData.getOid()); ByteBuffer indicesBuffer = ByteBuffer.wrap(geometryData.getIndices()); dataOutputStream.writeInt(indicesBuffer.capacity() / 4); dataOutputStream.write(indicesBuffer.array()); ByteBuffer vertexByteBuffer = ByteBuffer.wrap(geometryData.getVertices()); dataOutputStream.writeInt(vertexByteBuffer.capacity() / 4); dataOutputStream.write(vertexByteBuffer.array()); ByteBuffer normalsBuffer = ByteBuffer.wrap(geometryData.getNormals()); dataOutputStream.writeInt(normalsBuffer.capacity() / 4); dataOutputStream.write(normalsBuffer.array()); // Only when materials are used we send them if (geometryData.getMaterials() != null) { ByteBuffer materialsByteBuffer = ByteBuffer.wrap(geometryData.getMaterials()); dataOutputStream.writeInt(materialsByteBuffer.capacity() / 4); dataOutputStream.write(materialsByteBuffer.array()); } else { // No materials used dataOutputStream.writeInt(0); } List<Long> arrayList = new ArrayList<Long>(); arrayList.add(geometryData.getOid()); concreteGeometrySent.put(geometryData.getOid(), arrayList); } } } return iterator.hasNext(); }
From source file:com.google.devrel.gmscore.tools.apk.arsc.Chunk.java
/** * Converts this chunk into an array of bytes representation. Normally you will not need to * override this method unless your header changes based on the contents / size of the payload. *///w w w.j a v a 2s . c o m @Override public final byte[] toByteArray(boolean shrink) throws IOException { ByteBuffer header = ByteBuffer.allocate(getHeaderSize()).order(ByteOrder.LITTLE_ENDIAN); writeHeader(header, 0); // The chunk size isn't known yet. This will be filled in later. ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (LittleEndianDataOutputStream payload = new LittleEndianDataOutputStream(baos)) { writePayload(payload, header, shrink); } byte[] payloadBytes = baos.toByteArray(); int chunkSize = getHeaderSize() + payloadBytes.length; header.putInt(CHUNK_SIZE_OFFSET, chunkSize); // Combine results ByteBuffer result = ByteBuffer.allocate(chunkSize).order(ByteOrder.LITTLE_ENDIAN); result.put(header.array()); result.put(payloadBytes); return result.array(); }
From source file:com.google.devrel.gmscore.tools.apk.arsc.TypeChunk.java
@Override protected void writePayload(DataOutput output, ByteBuffer header, boolean shrink) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteBuffer offsets = ByteBuffer.allocate(getOffsetSize()).order(ByteOrder.LITTLE_ENDIAN); try (LittleEndianDataOutputStream payload = new LittleEndianDataOutputStream(baos)) { writeEntries(payload, offsets, shrink); }//from ww w . j a v a2 s. c o m output.write(offsets.array()); output.write(baos.toByteArray()); }
From source file:com.google.devrel.gmscore.tools.apk.arsc.StringPoolChunk.java
@Override protected void writePayload(DataOutput output, ByteBuffer header, boolean shrink) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); int stringOffset = 0; ByteBuffer offsets = ByteBuffer.allocate(getOffsetSize()); offsets.order(ByteOrder.LITTLE_ENDIAN); // Write to a temporary payload so we can rearrange this and put the offsets first try (LittleEndianDataOutputStream payload = new LittleEndianDataOutputStream(baos)) { stringOffset = writeStrings(payload, offsets, shrink); writeStyles(payload, offsets, shrink); }/*ww w . j ava 2 s .c om*/ output.write(offsets.array()); output.write(baos.toByteArray()); if (!styles.isEmpty()) { header.putInt(STYLE_START_OFFSET, getHeaderSize() + getOffsetSize() + stringOffset); } }
From source file:org.linguafranca.pwdb.kdbx.stream_3_1.KdbxSerializer.java
/** * Write a KdbxHeader to the output stream supplied. The header is updated with the * message digest of the written stream. * @param kdbxHeader the header to write and update * @param outputStream the output stream * @throws IOException on error//from w ww.j av a 2s . c om */ public static void writeKdbxHeader(KdbxHeader kdbxHeader, OutputStream outputStream) throws IOException { MessageDigest messageDigest = Encryption.getMessageDigestInstance(); DigestOutputStream digestOutputStream = new DigestOutputStream(outputStream, messageDigest); LittleEndianDataOutputStream ledos = new LittleEndianDataOutputStream(digestOutputStream); // write the magic number ledos.writeInt(SIG1); ledos.writeInt(SIG2); // write a file version ledos.writeInt(FILE_VERSION_32); ledos.writeByte(HeaderType.CIPHER_ID); ledos.writeShort(16); byte[] b = new byte[16]; ByteBuffer bb = ByteBuffer.wrap(b); bb.putLong(kdbxHeader.getCipherUuid().getMostSignificantBits()); bb.putLong(8, kdbxHeader.getCipherUuid().getLeastSignificantBits()); ledos.write(b); ledos.writeByte(HeaderType.COMPRESSION_FLAGS); ledos.writeShort(4); ledos.writeInt(kdbxHeader.getCompressionFlags().ordinal()); ledos.writeByte(HeaderType.MASTER_SEED); ledos.writeShort(kdbxHeader.getMasterSeed().length); ledos.write(kdbxHeader.getMasterSeed()); ledos.writeByte(HeaderType.TRANSFORM_SEED); ledos.writeShort(kdbxHeader.getTransformSeed().length); ledos.write(kdbxHeader.getTransformSeed()); ledos.writeByte(HeaderType.TRANSFORM_ROUNDS); ledos.writeShort(8); ledos.writeLong(kdbxHeader.getTransformRounds()); ledos.writeByte(HeaderType.ENCRYPTION_IV); ledos.writeShort(kdbxHeader.getEncryptionIv().length); ledos.write(kdbxHeader.getEncryptionIv()); ledos.writeByte(HeaderType.PROTECTED_STREAM_KEY); ledos.writeShort(kdbxHeader.getProtectedStreamKey().length); ledos.write(kdbxHeader.getProtectedStreamKey()); ledos.writeByte(HeaderType.STREAM_START_BYTES); ledos.writeShort(kdbxHeader.getStreamStartBytes().length); ledos.write(kdbxHeader.getStreamStartBytes()); ledos.writeByte(HeaderType.INNER_RANDOM_STREAM_ID); ledos.writeShort(4); ledos.writeInt(kdbxHeader.getProtectedStreamAlgorithm().ordinal()); ledos.writeByte(HeaderType.END); ledos.writeShort(0); MessageDigest digest = digestOutputStream.getMessageDigest(); kdbxHeader.setHeaderHash(digest.digest()); }