Example usage for com.google.common.io ByteArrayDataOutput write

List of usage examples for com.google.common.io ByteArrayDataOutput write

Introduction

In this page you can find the example usage for com.google.common.io ByteArrayDataOutput write.

Prototype

@Override
    void write(byte b[]);

Source Link

Usage

From source file:shadowmage.ancient_framework.common.network.Packet00MultiPart.java

@Override
public void writeDataToStream(ByteArrayDataOutput data) {
    data.writeInt(uniquePacketID);//w  w w .ja  va  2s  .co  m
    data.writeInt(sourcePacketType);
    data.writeInt(chunkNumber);
    data.writeInt(totalChunks);
    data.writeInt(startIndex);
    data.writeInt(chunkLength);
    data.writeInt(totalLength);
    data.write(datas);
}

From source file:nxminetilities.network.MultilightToolPacket.java

@Override
public void write(ByteArrayDataOutput out) {
    out.writeShort(is.itemID);/*from w  ww .  j a v  a 2s . c  o  m*/
    out.writeByte(is.stackSize);
    out.writeShort(is.getItemDamage());

    try {
        byte[] abyte;
        abyte = CompressedStreamTools.compress(is.stackTagCompound);
        out.writeShort((short) abyte.length);
        out.write(abyte);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:garmintools.adapters.garmin.MetadataGarminAdapter.java

private void writeMetadata(Proto.Metadata data, ByteArrayDataOutput output) {
    output.writeShort(data.getCycleNumber());
    writeDate(output, data.getEffectiveDate());
    writeDate(output, data.getExpiresDate());
    writeDate(output, data.getAeronauticalDataSnapshotDate());
    output.write(data.getUnknownData1());
    output.writeShort(data.getUnknownData2());
    output.write(data.getUnknownData3());
    output.write(SIMPLE_ENCODING.encode(StringUtil.pad(data.getPartNumber(), PART_NUMBER_LENGTH)));
    writeStringAndPadWithSpace(output, StringUtil.pad(data.getCoverageRegion(), COVERAGE_REGION_PAD_LENGTH),
            COVERAGE_REGION_LENGTH);/*from w  ww.  j a va2 s .c o m*/
    writeStringAndPadWithSpace(output, data.getCopyrightLine1(), COPYRIGHT_LINE_LENGTH);
    writeStringAndPadWithSpace(output, data.getCopyrightLine2(), COPYRIGHT_LINE_LENGTH);
    output.write(data.getUnknownData4());
    for (int i = 0; i < TRAILING_ZERO_BYTE_LENGTH; ++i) {
        output.write(0);
    }
}

From source file:pw.simplyintricate.bitcoin.models.datastructures.NetworkAddress.java

public byte[] toByteArray() {
    ByteArrayDataOutput writer = ByteStreams.newDataOutput();

    //writer.writeInt(EndianUtils.swapInteger(time));
    writer.writeLong(EndianUtils.swapLong(services.longValue()));
    // write the two ipv4 to ipv6 pads
    for (int i = 0; i < 10; i++) {
        writer.writeByte(0);//from  www . ja  v  a  2 s  .  com
    }
    writer.writeByte(0xFF);
    writer.writeByte(0xFF);
    writer.write(ipAddress);
    writer.writeShort(port);

    return writer.toByteArray();
}

From source file:com.volumetricpixels.rockyplugin.chunk.ChunkCacheHandler.java

/**
 * //  w  w  w  . j av  a  2 s .c  o  m
 * @param buffer
 * @throws IOException
 */
public static byte[] handleCompression(String playerName, byte[] buffer) throws IOException {
    Set<Long> playerCache = cache.getPlayerCache(playerName);

    // Each chunk sended is handled by:
    // - BlockType: Whole byte per block
    // - BlockMetaData: Half byte per block
    // - BlockLight: Half byte per block
    // - SkyLight: Half byte per block (Only of handleLight is TRUE)
    // - AddArray: Half byte per block (Only if extraMask has the bit,
    // support for FORGE)
    // - BiomeArray: Whole byte per XZ coordinate (Only if isContinous is
    // TRUE)
    int chunkLen = buffer.length / ChunkCache.CHUNK_PARTITION_SIZE;
    if ((chunkLen & 0x7FF) != 0) {
        chunkLen++;
    }

    ByteArrayDataOutput out = ByteStreams.newDataOutput();
    byte[] chunkData = new byte[ChunkCache.CHUNK_PARTITION_SIZE];

    // Write the magic number
    out.writeInt(ChunkCache.CHUNK_MAGIC_NUMBER);

    // Write the number of section inside the packet
    out.writeShort(chunkLen);

    // For each CHUNK_PARTITION_SIZE block, check the hash of it.
    for (int i = 0; i < chunkLen; i++) {
        int index = i * ChunkCache.CHUNK_PARTITION_SIZE;
        int length = ChunkCache.CHUNK_PARTITION_SIZE;

        if (index + ChunkCache.CHUNK_PARTITION_SIZE > buffer.length) {
            length = buffer.length - index;
        }

        // Calculate the hash of the current block
        System.arraycopy(buffer, index, chunkData, 0x0000, length);
        long hash = ChunkCache.calculateHash(chunkData);

        // Write the hash into the packet
        out.writeLong(hash);

        // Add the hash into the player cache
        boolean isPresent = playerCache.add(hash);

        // Writes the length of the section
        out.writeShort(isPresent ? length : 0);

        // Check for the chunk with the player cache
        if (isPresent) {
            // Writes the data of the section
            out.write(chunkData);
            statCacheMiss.incrementAndGet();
        } else {
            statCacheHit.incrementAndGet();
        }
        statTotal.incrementAndGet();
    }
    return out.toByteArray();
}

From source file:de.nx42.maps4cim.header.CustomHeader.java

@Override
public byte[] generateHeader() throws IOException {

    // first part
    ByteArrayDataOutput outP1 = ByteStreams.newDataOutput(4096);

    // static intro
    outP1.write(intro);
    outP1.write(formatHeaderString(staticString01));
    // gap of 4 bytes
    outP1.write(new byte[4]);

    // dates and timestamps
    outP1.writeLong(DateUtils.dateToTicks(unusedDate1));
    outP1.writeLong(DateUtils.dateToTicks(unusedDate2));
    outP1.writeLong(DateUtils.dateToTicks(lastSaved));
    outP1.writeLong(DateUtils.dateToTicks(mapCreated));
    outP1.writeLong(workTime1);/*from www .j  a v  a 2s  .  co  m*/
    outP1.writeLong(workTime2);

    // static data
    outP1.write(staticBinary01);
    outP1.write(formatHeaderString(staticString02));

    // map name
    outP1.write(formatHeaderString(mapName));
    if (buildingSet == BuildingSet.EUROPEAN) {
        outP1.write(formatHeaderString(staticString02eur01));
    }

    // map overview image
    outP1.write(pngLength);
    outP1.write(png);

    // static data
    outP1.write(staticBinary02);
    if (buildingSet == BuildingSet.EUROPEAN) {
        outP1.write(formatHeaderString(staticString02eur02));
        outP1.write(staticBinary02eur);
    }
    outP1.write(formatHeaderString(staticString03));
    outP1.write(new byte[34]);
    outP1.write(staticBinary03);
    outP1.write(formatHeaderString(staticString04));
    outP1.write(formatHeaderString(staticString05));

    // second part
    ByteArrayDataOutput outP2 = ByteStreams.newDataOutput(256);

    // static data
    outP2.write(intro);
    outP2.write(formatHeaderString(staticString06));
    outP2.write(staticBinary04);
    for (String s : staticStrings07) {
        outP2.write(formatHeaderString(s));
    }
    outP2.write(staticBinary05);

    // combine the parts
    ByteArrayDataOutput out = ByteStreams.newDataOutput(4352);

    byte[] p1 = outP1.toByteArray();
    out.write(p1);
    // fill with 0s until next next free index % 4096 = 0
    out.write(new byte[((p1.length / 4096) + 1) * 4096 - p1.length]);

    byte[] p2 = outP2.toByteArray();
    out.write(p2);
    // fill with 0s until 256 bytes are filled after the beginning of p2
    out.write(new byte[256 - p2.length]);

    // return combined result
    return out.toByteArray();
}

From source file:io.github.aritzhack.aritzh.bds.BDSCompound.java

private byte[] getUncompressedBytes() {
    ByteArrayDataOutput output = ByteStreams.newDataOutput();
    output.writeByte(this.getType().toByte());
    output.writeUTF(this.name);
    for (BDS bds : this.items) {
        if (bds instanceof BDSCompound) {
            output.write(((BDSCompound) bds).getUncompressedBytes());
        } else//from   w w w .  j  ava2s.c  o m
            output.write(bds.getBytes());
    }
    output.write(new BDSCompEnd().getBytes());
    return output.toByteArray();
}

From source file:org.haiku.pkg.AttributeIterator.java

/**
 * <p>This method will return the next {@link Attribute}.  If there is not another value to return then
 * this method will return null.  It will throw an instance of @{link HpkException} in any situation in which
 * it is not able to parse the data or chunks such that it is not able to read the next attribute.</p>
 *//*from   w  ww  .ja v  a  2s. c om*/

public Attribute next() throws HpkException {

    Attribute result = null;

    // first, the LEB128 has to be read in which is the 'tag' defining what sort of attribute this is that
    // we are dealing with.

    BigInteger tag = getNextTag();

    // if we encounter 0 tag then we know that we have finished the list.

    if (0 != tag.signum()) {

        int encoding = deriveAttributeTagEncoding(tag);
        int id = deriveAttributeTagId(tag);

        if (id <= 0 || id >= AttributeId.values().length) {
            throw new HpkException("illegal id; " + Integer.toString(id));
        }
        AttributeId attributeId = AttributeId.values()[id];

        switch (deriveAttributeTagType(tag)) {

        case ATTRIBUTE_TYPE_INVALID:
            throw new HpkException("an invalid attribute tag type has been encountered");

        case ATTRIBUTE_TYPE_INT: {
            ensureValidEncodingForInt(encoding);
            byte[] buffer = new byte[encoding + 1];
            context.getHeapReader().readHeap(buffer, 0, new HeapCoordinates(offset, encoding + 1));
            offset += encoding + 1;
            result = new IntAttribute(attributeId, new BigInteger(buffer));
        }
            break;

        case ATTRIBUTE_TYPE_UINT: {
            ensureValidEncodingForInt(encoding);
            byte[] buffer = new byte[encoding + 1];
            context.getHeapReader().readHeap(buffer, 0, new HeapCoordinates(offset, encoding + 1));
            offset += encoding + 1;
            result = new IntAttribute(attributeId, new BigInteger(1, buffer));
        }
            break;

        case ATTRIBUTE_TYPE_STRING: {
            switch (encoding) {

            case ATTRIBUTE_ENCODING_STRING_INLINE: {
                ByteArrayDataOutput assembly = ByteStreams.newDataOutput();

                while (null == result) {
                    int b = context.getHeapReader().readHeap(offset);
                    offset++;

                    if (0 != b) {
                        assembly.write(b);
                    } else {
                        result = new StringInlineAttribute(attributeId,
                                new String(assembly.toByteArray(), Charsets.UTF_8));
                    }
                }
            }
                break;

            case ATTRIBUTE_ENCODING_STRING_TABLE: {
                BigInteger index = readUnsignedLeb128();

                if (index.compareTo(BigInteger.valueOf(Integer.MAX_VALUE)) > 0) {
                    throw new IllegalStateException("the string table index is preposterously large");
                }

                result = new StringTableRefAttribute(attributeId, index.intValue());
            }
                break;

            default:
                throw new HpkException("unknown string encoding; " + encoding);
            }
        }
            break;

        case ATTRIBUTE_TYPE_RAW: {
            switch (encoding) {
            case ATTRIBUTE_ENCODING_RAW_INLINE: {
                BigInteger length = readUnsignedLeb128();

                if (length.compareTo(BigInteger.valueOf(Integer.MAX_VALUE)) > 0) {
                    throw new HpkException("the length of the inline data is too large");
                }

                byte[] buffer = new byte[length.intValue()];
                context.getHeapReader().readHeap(buffer, 0, new HeapCoordinates(offset, length.intValue()));
                offset += length.intValue();

                result = new RawInlineAttribute(attributeId, buffer);
            }
                break;

            case ATTRIBUTE_ENCODING_RAW_HEAP: {
                BigInteger rawLength = readUnsignedLeb128();
                BigInteger rawOffset = readUnsignedLeb128();

                if (rawLength.compareTo(BigInteger.valueOf(Integer.MAX_VALUE)) > 0) {
                    throw new HpkException("the length of the heap data is too large");
                }

                if (rawOffset.compareTo(BigInteger.valueOf(Integer.MAX_VALUE)) > 0) {
                    throw new HpkException("the offset of the heap data is too large");
                }

                result = new RawHeapAttribute(attributeId,
                        new HeapCoordinates(rawOffset.longValue(), rawLength.longValue()));
            }
                break;

            default:
                throw new HpkException("unknown raw encoding; " + encoding);
            }
        }
            break;

        default:
            throw new HpkException("unable to read the tag type; " + deriveAttributeTagType(tag));

        }

        // each attribute id has a type associated with it; now check that the attribute matches
        // its intended type.

        if (result.getAttributeId().getAttributeType() != result.getAttributeType()) {
            throw new HpkException(
                    String.format("mismatch in attribute type for id %s; expecting %s, but got %s",
                            result.getAttributeId().getName(), result.getAttributeId().getAttributeType(),
                            result.getAttributeType()));
        }

        // possibly there are child attributes after this attribute; if this is the
        // case then open-up a new iterator to work across those and load them in.

        if (deriveAttributeTagHasChildAttributes(tag)) {

            AttributeIterator childAttributeIterator = new AttributeIterator(context, offset);

            while (childAttributeIterator.hasNext()) {
                result.addChildAttribute(childAttributeIterator.next());
            }

            offset = childAttributeIterator.getOffset();

        }

        nextTag = null;
    }

    return result;
}

From source file:io.github.leonardosnt.bungeechannelapi.BungeeChannelApi.java

/**
 * Send a custom plugin message to said server. This is one of the most useful channels ever.
 * <b>Remember, the sending and receiving server(s) need to have a player online.</b>
 *
 * @param server the name of the server to send to,
 *        ALL to send to every server (except the one sending the plugin message),
 *        or ONLINE to send to every server that's online (except the one sending the plugin message).
 *
 * @param channelName Subchannel for plugin usage.
 * @param data data to send./*from  w ww .  j  av  a 2 s  .co m*/
 * @throws IllegalArgumentException if there is no players online.
 */
public void forward(String server, String channelName, byte[] data) {
    Player player = getFirstPlayer();

    ByteArrayDataOutput output = ByteStreams.newDataOutput();
    output.writeUTF("Forward");
    output.writeUTF(server);
    output.writeUTF(channelName);
    output.writeShort(data.length);
    output.write(data);
    player.sendPluginMessage(this.plugin, "BungeeCord", output.toByteArray());
}

From source file:io.github.leonardosnt.bungeechannelapi.BungeeChannelApi.java

/**
 * Send a custom plugin message to specific player.
 *
 * @param playerName the name of the player to send to.
 * @param channelName Subchannel for plugin usage.
 * @param data data to send.// w  w  w .  ja v  a  2  s  .co m
 * @throws IllegalArgumentException if there is no players online.
 */
public void forwardToPlayer(String playerName, String channelName, byte[] data) {
    Player player = getFirstPlayer();

    ByteArrayDataOutput output = ByteStreams.newDataOutput();
    output.writeUTF("ForwardToPlayer");
    output.writeUTF(playerName);
    output.writeUTF(channelName);
    output.writeShort(data.length);
    output.write(data);
    player.sendPluginMessage(this.plugin, "BungeeCord", output.toByteArray());
}