Example usage for java.io DataInput readByte

List of usage examples for java.io DataInput readByte

Introduction

In this page you can find the example usage for java.io DataInput readByte.

Prototype

byte readByte() throws IOException;

Source Link

Document

Reads and returns one input byte.

Usage

From source file:com.aliyun.openservices.tablestore.hadoop.Credential.java

@Override
public void readFields(DataInput in) throws IOException {
    byte tag = in.readByte();
    if (tag != WritableConsts.CREDENTIAL) {
        throw new IOException("broken input stream");
    }/*  w  ww .j  a v a 2  s  . co m*/
    accessKeyId = null;
    accessKeySecret = null;
    securityToken = null;
    accessKeyId = in.readUTF();
    accessKeySecret = in.readUTF();
    byte tagSecurityToken = nextTag(in);
    if (tagSecurityToken == WritableConsts.CREDENTIAL_SECURITY_TOKEN) {
        securityToken = in.readUTF();
    }
}

From source file:com.aliyun.openservices.tablestore.hadoop.Credential.java

private byte nextTag(DataInput in) throws IOException {
    try {/* w w w . j  a  v  a 2  s  . c o m*/
        return in.readByte();
    } catch (EOFException ex) {
        return 0;
    }
}

From source file:libra.common.hadoop.io.datatypes.CompressedSequenceWritable.java

@Override
public void readFields(DataInput in) throws IOException {
    this.seqLength = in.readByte();
    int byteLen = SequenceHelper.getCompressedSize(this.seqLength);
    this.compressedSequence = new byte[byteLen];
    in.readFully(this.compressedSequence, 0, byteLen);
}

From source file:io.dstream.hadoop.TypeAwareWritable.java

/**
 * //from   w  w w.ja  va  2 s  . c  o m
 */
@Override
public void readFields(DataInput in) throws IOException {
    this.valueType = in.readByte();
    switch (this.valueType) {
    case INTEGER:
        this.value = (T) Integer.valueOf(in.readInt());
        break;
    case LONG:
        this.value = (T) Long.valueOf(in.readLong());
        break;
    case NULL:
        this.value = null;
        break;
    case OBJECT:
        try {
            ObjectInputStream ois = new ObjectInputStream((DataInputStream) in);
            T value = (T) ois.readObject();
            this.value = (T) value;
        } catch (Exception e) {
            throw new IllegalStateException("Failed to deserialize value", e);
        }
        break;
    default:
        throw new IllegalStateException("Unsupported or unrecognized value type: " + this.valueType);
    }
}

From source file:io.dstream.tez.io.TypeAwareWritable.java

/**
 *
 *///www. j  a v a  2 s  . co m
@Override
public void readFields(DataInput in) throws IOException {
    this.valueType = in.readByte();
    switch (this.valueType) {
    case INTEGER:
        this.value = (T) Integer.valueOf(in.readInt());
        break;
    case LONG:
        this.value = (T) Long.valueOf(in.readLong());
        break;
    case NULL:
        this.value = null;
        break;
    case OBJECT:
        try {
            ObjectInputStream ois = new ObjectInputStream((DataInputStream) in);
            T value = (T) ois.readObject();
            this.value = value;
        } catch (Exception e) {
            throw new IllegalStateException("Failed to deserialize value", e);
        }
        break;
    default:
        throw new IllegalStateException("Unsupported or unrecognized value type: " + this.valueType);
    }
}

From source file:com.marklogic.contentpump.ContentWithFileNameWritable.java

@SuppressWarnings("unchecked")
@Override//from  w  ww. j a va2  s.co m
public void readFields(DataInput in) throws IOException {
    String fn = Text.readString(in);
    fileName = fn;
    byte valueType = in.readByte();
    switch (valueType) {
    case 0:
        value = (VALUE) new Text();
        ((Text) value).readFields(in);
        break;
    case 1:
        value = (VALUE) new MarkLogicNode();
        ((MarkLogicNode) value).readFields(in);
        break;
    case 2:
        value = (VALUE) new BytesWritable();
        ((BytesWritable) value).readFields(in);
        break;
    default:
        throw new IOException("incorrect type");
    }
    type = valueType;
}

From source file:com.marklogic.contentpump.RDFWritable.java

@SuppressWarnings("unchecked")
@Override//from  ww  w.j a va  2s  . c o  m
public void readFields(DataInput in) throws IOException {
    byte hasCollection = in.readByte();
    if (hasCollection != 0) {
        Text t = new Text();
        t.readFields(in);
        graphUri = t.toString();
    }
    byte valueType = in.readByte();
    switch (valueType) {
    case 0:
        value = (VALUE) new Text();
        ((Text) value).readFields(in);
        break;
    case 1:
        value = (VALUE) new MarkLogicNode();
        ((MarkLogicNode) value).readFields(in);
        break;
    case 2:
        value = (VALUE) new BytesWritable();
        ((BytesWritable) value).readFields(in);
        break;
    default:
        throw new IOException("incorrect type");
    }
    type = valueType;
    byte hasPerms = in.readByte();
    if (hasPerms != 0) {
        int length = hasPerms;
        permissions = new ContentPermission[length];
        for (int i = 0; i < length; i++) {
            Text t = new Text();
            t.readFields(in);
            String role = t.toString();
            t.readFields(in);
            String perm = t.toString();
            ContentCapability capability = null;
            if (perm.equalsIgnoreCase(ContentCapability.READ.toString())) {
                capability = ContentCapability.READ;
            } else if (perm.equalsIgnoreCase(ContentCapability.EXECUTE.toString())) {
                capability = ContentCapability.EXECUTE;
            } else if (perm.equalsIgnoreCase(ContentCapability.INSERT.toString())) {
                capability = ContentCapability.INSERT;
            } else if (perm.equalsIgnoreCase(ContentCapability.UPDATE.toString())) {
                capability = ContentCapability.UPDATE;
            } else {
                LOG.error("Illegal permission: " + perm);
            }
            permissions[i] = new ContentPermission(capability, role);
        }

    }
}

From source file:libra.common.hadoop.io.datatypes.CompressedIntArrayWritable.java

@Override
public void readFields(DataInput in) throws IOException {
    byte flag = in.readByte();
    int count = 0;
    if ((flag & 0x0f) == 0x00) {
        count = in.readByte();/*w  w  w.j  a  v  a  2s  .c o m*/
    } else if ((flag & 0x0f) == 0x01) {
        count = in.readShort();
    } else if ((flag & 0x0f) == 0x02) {
        count = in.readInt();
    } else {
        throw new IOException("unhandled flag");
    }

    this.positiveEntries = 0;
    this.negativeEntries = 0;

    int[] arr = new int[count];
    if ((flag & 0xf0) == 0x00) {
        for (int i = 0; i < count; i++) {
            arr[i] = in.readByte();
            if (arr[i] >= 0) {
                this.positiveEntries++;
            } else {
                this.negativeEntries++;
            }
        }
    } else if ((flag & 0xf0) == 0x10) {
        for (int i = 0; i < count; i++) {
            arr[i] = in.readShort();
            if (arr[i] >= 0) {
                this.positiveEntries++;
            } else {
                this.negativeEntries++;
            }
        }
    } else if ((flag & 0xf0) == 0x20) {
        for (int i = 0; i < count; i++) {
            arr[i] = in.readInt();
            if (arr[i] >= 0) {
                this.positiveEntries++;
            } else {
                this.negativeEntries++;
            }
        }
    } else {
        throw new IOException("unhandled flag");
    }

    this.intArray = arr;
    this.prevBytes = null;
}

From source file:de.tudarmstadt.ukp.dkpro.core.rftagger.RfTagger.java

@Override
public void initialize(UimaContext aContext) throws ResourceInitializationException {
    super.initialize(aContext);

    runtimeProvider = new RuntimeProvider("classpath:/de/tudarmstadt/ukp/dkpro/core/rftagger/bin/");

    modelProvider = new ModelProviderBase<File>(this, "rftagger", "morph") {
        @Override//  ww w  .  java2  s. co  m
        protected File produceResource(URL aUrl) throws IOException {
            Properties metadata = getResourceMetaData();
            encodingLoadedFromModel = (String) metadata.get("model.encoding");

            SingletonTagset morphFeats = new SingletonTagset(MorphologicalFeatures.class,
                    metadata.getProperty("morph.tagset"));

            SingletonTagset posTags = new SingletonTagset(POS.class, metadata.getProperty("pos.tagset"));

            try (LittleEndianDataInputStream is = new LittleEndianDataInputStream(aUrl.openStream())) {
                long n = is.readLong(); // alphabet size
                for (int i = 0; i < n; i++) {
                    String symbol = readZeroTerminatedString(is, getEncoding());
                    if ("BOUNDARY".equals(symbol)) {
                        // Appears to be an internally used symbol
                        continue;
                    }
                    morphFeats.add(symbol);
                    posTags.add(extractTag(symbol));
                }
            }
            addTagset(posTags);
            addTagset(morphFeats);

            if (printTagSet) {
                getLogger().info(getTagset().toString());
            }

            // FIXME Actually, this is the place where the rftagger process should be
            // started/stopped so that if the language changes during processing, the
            // rftagger is reloaded with the required model.
            // It might not be easy to fix this - but then at least a bug should be
            // opened.
            return ResourceUtils.getUrlAsFile(aUrl, true);
        }

        private String readZeroTerminatedString(DataInput aIn, String aEncoding) throws IOException {
            ByteArrayOutputStream bos = new ByteArrayOutputStream();
            byte b = aIn.readByte();
            while (b != 0) {
                bos.write(b);
                b = aIn.readByte();
            }
            return new String(bos.toByteArray(), aEncoding);
        }
    };

    mappingProvider = MappingProviderFactory.createPosMappingProvider(posMappingLocation, language,
            modelProvider);

    featuresParser = new MorphologicalFeaturesParser(this, modelProvider);
}

From source file:it.unimi.dsi.sux4j.mph.CHDMinimalPerfectHashFunction.java

/**
 * Creates a new CHD minimal perfect hash function for the given keys.
 * /*from  w  w  w.java2 s  . c  o m*/
 * @param keys the keys to hash, or {@code null}.
 * @param transform a transformation strategy for the keys.
 * @param lambda the average bucket size.
 * @param loadFactor the load factor.
 * @param signatureWidth a signature width, or 0 for no signature.
 * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory.
 * @param chunkedHashStore a chunked hash store containing the keys, or {@code null}; the store
 * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. 
 */
protected CHDMinimalPerfectHashFunction(final Iterable<? extends T> keys,
        final TransformationStrategy<? super T> transform, final int lambda, double loadFactor,
        final int signatureWidth, final File tempDir, ChunkedHashStore<T> chunkedHashStore) throws IOException {
    this.transform = transform;

    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    final RandomGenerator r = new XorShift1024StarRandomGenerator();
    pl.itemsName = "keys";

    final boolean givenChunkedHashStore = chunkedHashStore != null;
    if (!givenChunkedHashStore) {
        chunkedHashStore = new ChunkedHashStore<T>(transform, tempDir, pl);
        chunkedHashStore.reset(r.nextLong());
        chunkedHashStore.addAll(keys.iterator());
    }
    n = chunkedHashStore.size();

    defRetValue = -1; // For the very few cases in which we can decide

    int log2NumChunks = Math.max(0, Fast.mostSignificantBit(n >> LOG2_CHUNK_SIZE));
    chunkShift = chunkedHashStore.log2Chunks(log2NumChunks);
    final int numChunks = 1 << log2NumChunks;

    LOGGER.debug("Number of chunks: " + numChunks);
    LOGGER.debug("Average chunk size: " + (double) n / numChunks);

    offsetNumBucketsSeed = new long[(numChunks + 1) * 3 + 2];

    int duplicates = 0;
    final LongArrayList holes = new LongArrayList();

    @SuppressWarnings("resource")
    final OfflineIterable<MutableLong, MutableLong> coefficients = new OfflineIterable<MutableLong, MutableLong>(
            new Serializer<MutableLong, MutableLong>() {

                @Override
                public void write(final MutableLong a, final DataOutput dos) throws IOException {
                    long x = a.longValue();
                    while ((x & ~0x7FL) != 0) {
                        dos.writeByte((int) (x | 0x80));
                        x >>>= 7;
                    }
                    dos.writeByte((int) x);
                }

                @Override
                public void read(final DataInput dis, final MutableLong x) throws IOException {
                    byte b = dis.readByte();
                    long t = b & 0x7F;
                    for (int shift = 7; (b & 0x80) != 0; shift += 7) {
                        b = dis.readByte();
                        t |= (b & 0x7FL) << shift;
                    }
                    x.setValue(t);
                }
            }, new MutableLong());

    for (;;) {
        LOGGER.debug("Generating minimal perfect hash function...");

        holes.clear();
        coefficients.clear();
        pl.expectedUpdates = numChunks;
        pl.itemsName = "chunks";
        pl.start("Analysing chunks... ");

        try {
            int chunkNumber = 0;

            for (ChunkedHashStore.Chunk chunk : chunkedHashStore) {
                /* We treat a chunk as a single hash function. The number of bins is thus
                 * the first prime larger than the chunk size divided by the load factor. */
                final int p = Primes.nextPrime((int) Math.ceil(chunk.size() / loadFactor) + 1);
                final boolean used[] = new boolean[p];

                final int numBuckets = (chunk.size() + lambda - 1) / lambda;
                numBuckets(chunkNumber + 1, numBuckets(chunkNumber) + numBuckets);
                final int[] cc0 = new int[numBuckets];
                final int[] cc1 = new int[numBuckets];
                @SuppressWarnings("unchecked")
                final ArrayList<long[]>[] bucket = new ArrayList[numBuckets];
                for (int i = bucket.length; i-- != 0;)
                    bucket[i] = new ArrayList<long[]>();

                tryChunk: for (;;) {
                    for (ArrayList<long[]> b : bucket)
                        b.clear();
                    Arrays.fill(used, false);

                    /* At each try, the allocation to keys to bucket is randomized differently. */
                    final long seed = r.nextLong();
                    // System.err.println( "Number of keys: " + chunk.size()  + " Number of bins: " + p + " seed: " + seed );
                    /* We distribute the keys in this chunks in the buckets. */
                    for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) {
                        final long[] triple = iterator.next();
                        final long[] h = new long[3];
                        Hashes.spooky4(triple, seed, h);
                        final ArrayList<long[]> b = bucket[(int) ((h[0] >>> 1) % numBuckets)];
                        h[1] = (int) ((h[1] >>> 1) % p);
                        h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1;

                        // All elements in a bucket must have either different h[ 1 ] or different h[ 2 ]
                        for (long[] t : b)
                            if (t[1] == h[1] && t[2] == h[2]) {
                                LOGGER.info("Duplicate index" + Arrays.toString(t));
                                continue tryChunk;
                            }
                        b.add(h);
                    }

                    final int[] perm = Util.identity(bucket.length);
                    IntArrays.quickSort(perm, new AbstractIntComparator() {
                        private static final long serialVersionUID = 1L;

                        @Override
                        public int compare(int a0, int a1) {
                            return Integer.compare(bucket[a1].size(), bucket[a0].size());
                        }
                    });

                    for (int i = 0; i < perm.length;) {
                        final LinkedList<Integer> bucketsToDo = new LinkedList<Integer>();
                        final int size = bucket[perm[i]].size();
                        //System.err.println( "Bucket size: " + size );
                        int j;
                        // Gather indices of all buckets with the same size
                        for (j = i; j < perm.length && bucket[perm[j]].size() == size; j++)
                            bucketsToDo.add(Integer.valueOf(perm[j]));

                        // Examine for each pair (c0,c1) the buckets still to do
                        ext: for (int c1 = 0; c1 < p; c1++)
                            for (int c0 = 0; c0 < p; c0++) {
                                //System.err.println( "Testing " + c0 + ", " + c1 + " (to do: " + bucketsToDo.size() + ")" );
                                for (Iterator<Integer> iterator = bucketsToDo.iterator(); iterator.hasNext();) {
                                    final int k = iterator.next().intValue();
                                    final ArrayList<long[]> b = bucket[k];
                                    boolean completed = true;
                                    final IntArrayList done = new IntArrayList();
                                    // Try to see whether the necessary entries are not used
                                    for (long[] h : b) {
                                        //assert k == h[ 0 ];

                                        int pos = (int) ((h[1] + c0 * h[2] + c1) % p);
                                        //System.err.println( "Testing pos " + pos + " for " + Arrays.toString( e  ));
                                        if (used[pos]) {
                                            completed = false;
                                            break;
                                        } else {
                                            used[pos] = true;
                                            done.add(pos);
                                        }
                                    }

                                    if (completed) {
                                        // All positions were free
                                        cc0[k] = c0;
                                        cc1[k] = c1;
                                        iterator.remove();
                                    } else
                                        for (int d : done)
                                            used[d] = false;
                                }
                                if (bucketsToDo.isEmpty())
                                    break ext;
                            }
                        if (!bucketsToDo.isEmpty())
                            continue tryChunk;

                        seed(chunkNumber, seed);
                        i = j;
                    }
                    break;
                }

                // System.err.println("DONE!");

                if (ASSERTS) {
                    final IntOpenHashSet pos = new IntOpenHashSet();
                    final long h[] = new long[3];
                    for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) {
                        final long[] triple = iterator.next();
                        Hashes.spooky4(triple, seed(chunkNumber), h);
                        h[0] = (h[0] >>> 1) % numBuckets;
                        h[1] = (int) ((h[1] >>> 1) % p);
                        h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1;
                        //System.err.println( Arrays.toString(  e  ) );
                        assert pos.add((int) ((h[1] + cc0[(int) (h[0])] * h[2] + cc1[(int) (h[0])]) % p));
                    }
                }

                final MutableLong l = new MutableLong();
                for (int i = 0; i < numBuckets; i++) {
                    l.setValue(cc0[i] + cc1[i] * p);
                    coefficients.add(l);
                }

                for (int i = 0; i < p; i++)
                    if (!used[i])
                        holes.add(offset(chunkNumber) + i);

                offset(chunkNumber + 1, offset(chunkNumber) + p);
                chunkNumber++;
                pl.update();
            }

            pl.done();
            break;
        } catch (ChunkedHashStore.DuplicateException e) {
            if (keys == null)
                throw new IllegalStateException(
                        "You provided no keys, but the chunked hash store was not checked");
            if (duplicates++ > 3)
                throw new IllegalArgumentException("The input list contains duplicates");
            LOGGER.warn("Found duplicate. Recomputing triples...");
            chunkedHashStore.reset(r.nextLong());
            chunkedHashStore.addAll(keys.iterator());
        }
    }

    rank = new SparseRank(offset(offsetNumBucketsSeed.length / 3 - 1), holes.size(), holes.iterator());

    globalSeed = chunkedHashStore.seed();

    this.coefficients = new EliasFanoLongBigList(new AbstractLongIterator() {
        final OfflineIterator<MutableLong, MutableLong> iterator = coefficients.iterator();

        @Override
        public boolean hasNext() {
            return iterator.hasNext();
        }

        public long nextLong() {
            return iterator.next().longValue();
        }
    }, 0, true);

    coefficients.close();

    LOGGER.info("Completed.");
    LOGGER.info("Actual bit cost per key: " + (double) numBits() / n);

    if (signatureWidth != 0) {
        signatureMask = -1L >>> Long.SIZE - signatureWidth;
        (signatures = LongArrayBitVector.getInstance().asLongBigList(signatureWidth)).size(n);
        pl.expectedUpdates = n;
        pl.itemsName = "signatures";
        pl.start("Signing...");
        for (ChunkedHashStore.Chunk chunk : chunkedHashStore) {
            Iterator<long[]> iterator = chunk.iterator();
            for (int i = chunk.size(); i-- != 0;) {
                final long[] triple = iterator.next();
                long t = getLongByTripleNoCheck(triple);
                signatures.set(t, signatureMask & triple[0]);
                pl.lightUpdate();
            }
        }
        pl.done();
    } else {
        signatureMask = 0;
        signatures = null;
    }

    if (!givenChunkedHashStore)
        chunkedHashStore.close();
}