Example usage for java.nio ByteBuffer getLong

List of usage examples for java.nio ByteBuffer getLong

Introduction

In this page you can find the example usage for java.nio ByteBuffer getLong.

Prototype

public abstract long getLong();

Source Link

Document

Returns the long at the current position and increases the position by 8.

Usage

From source file:com.example.IdGenerator.java

public long generateLong() {
    final LocalDateTime now = LocalDateTime.now(clock);
    final Duration duration = Duration.between(BASE, now);
    final int high = (int) duration.getSeconds();
    final int low = duration.getNano();
    final byte[] hbs = ByteBuffer.allocate(4).putInt(high).array();
    final byte[] lbs = ByteBuffer.allocate(4).putInt(low).array();
    final byte[] bytes = new byte[8];
    System.arraycopy(hbs, 0, bytes, 0, 4);
    System.arraycopy(lbs, 0, bytes, 4, 4);
    final ByteBuffer buffer = ByteBuffer.allocate(8).put(bytes, 0, 8);
    buffer.flip();/*from w  w  w .j  a  v a 2s  . c o m*/
    return buffer.getLong();
}

From source file:it.unimi.di.big.mg4j.index.DiskBasedIndex.java

/** Commodity method for loading from a channel a big list of binary longs with specified endianness into a {@linkplain LongBigArrays long big array}.
 * /*  ww w  .j  a  va 2  s  .co  m*/
 * @param channel the channel.
 * @param byteOrder the endianness of the longs.
 * @return a big list of longs containing the longs returned by <code>channel</code>.
 */
public static LongBigArrayBigList loadLongBigList(final ReadableByteChannel channel, final long length,
        final ByteOrder byteOrder) throws IOException {
    final ByteBuffer byteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(byteOrder);

    LongBigArrayBigList list = new LongBigArrayBigList(length);

    while (channel.read(byteBuffer) > 0) {
        byteBuffer.flip();
        while (byteBuffer.hasRemaining())
            list.add(byteBuffer.getLong());
        byteBuffer.clear();
    }

    return list;
}

From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java

private static Map<UUID, List<UUID>> parseGraph(ByteBuffer graphByteBuffer) {
    int count = graphByteBuffer.getInt(graphByteBuffer.limit() - 12);

    ByteBuffer buffer = graphByteBuffer.duplicate();
    buffer.limit(graphByteBuffer.limit() - 16);

    List<UUID> uuids = newArrayListWithCapacity(count);
    for (int i = 0; i < count; i++) {
        uuids.add(new UUID(buffer.getLong(), buffer.getLong()));
    }/*from   w w  w  .ja  v a2s.  co m*/

    Map<UUID, List<UUID>> graph = newHashMap();
    while (buffer.hasRemaining()) {
        UUID uuid = uuids.get(buffer.getInt());
        List<UUID> list = newArrayList();
        int refid = buffer.getInt();
        while (refid != -1) {
            list.add(uuids.get(refid));
            refid = buffer.getInt();
        }
        graph.put(uuid, list);
    }
    return graph;
}

From source file:org.apache.airavata.gfac.core.GFacUtils.java

public static long bytesToLong(byte[] bytes) {
    ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES);
    buffer.put(bytes);/*from w w  w .j  a va2 s.  co m*/
    buffer.flip();//need flip
    return buffer.getLong();
}

From source file:org.mrgeo.data.accumulo.partitioners.AccumuloMrGeoRangePartitioner.java

private synchronized TileIdWritable[] getCutPoints() throws IOException {
    if (cutPointArray == null) {
        String cutFileName = conf.get(CUTFILE_KEY);
        Path[] cf = DistributedCache.getLocalCacheFiles(conf);

        if (cf != null) {
            for (Path path : cf) {
                if (path.toUri().getPath().endsWith(cutFileName.substring(cutFileName.lastIndexOf('/')))) {
                    TreeSet<Text> cutPoints = new TreeSet<Text>();
                    Scanner in = new Scanner(new BufferedReader(new FileReader(path.toString())));
                    try {
                        while (in.hasNextLine())
                            cutPoints.add(new Text(Base64.decodeBase64(in.nextLine().getBytes())));
                    } finally {
                        in.close();/*from  ww  w. j av a 2  s.  c o m*/
                    }
                    cutPointArray = cutPoints.toArray(new Text[cutPoints.size()]);
                    break;
                }
            }
        }
        if (cutPointArray == null)
            throw new FileNotFoundException(cutFileName + " not found in distributed cache");
    }
    tileIdPointArray = new TileIdWritable[cutPointArray.length];
    for (int x = 0; x < cutPointArray.length; x++) {
        byte[] b = cutPointArray[x].getBytes();
        ByteBuffer buffer = ByteBuffer.wrap(b);
        long k = buffer.getLong();
        tileIdPointArray[x] = new TileIdWritable(k);
    }

    return tileIdPointArray;
}

From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java

private static Map<UUID, List<UUID>> parseGraph(ByteBuffer graphByteBuffer, boolean bulkOnly) {
    int count = graphByteBuffer.getInt(graphByteBuffer.limit() - 12);

    ByteBuffer buffer = graphByteBuffer.duplicate();
    buffer.limit(graphByteBuffer.limit() - 16);

    List<UUID> uuids = newArrayListWithCapacity(count);
    for (int i = 0; i < count; i++) {
        uuids.add(new UUID(buffer.getLong(), buffer.getLong()));
    }//from ww  w .  j a  va 2s .  c  om

    Map<UUID, List<UUID>> graph = newHashMap();
    while (buffer.hasRemaining()) {
        UUID uuid = uuids.get(buffer.getInt());
        List<UUID> list = newArrayList();
        int refid = buffer.getInt();
        while (refid != -1) {
            UUID ref = uuids.get(refid);
            if (!bulkOnly || !isDataSegmentId(ref.getLeastSignificantBits())) {
                list.add(ref);
            }
            refid = buffer.getInt();
        }
        graph.put(uuid, list);
    }
    return graph;
}

From source file:com.olacabs.fabric.compute.sources.kafka.impl.TransactionManager.java

private long readLastTransactionId() throws Exception {
    byte[] data = curator.getData().forPath(txnIdPath());
    ByteBuffer read = ByteBuffer.allocate(Long.BYTES).put(data, 0, data.length);
    read.flip();/*from   w ww.ja  v  a 2  s. co m*/
    return read.getLong();
}

From source file:org.apache.hadoop.hdfs.hoss.db.HotStore.java

/**
 * object size(unit:MB)/*from w  w  w .  ja  v a 2  s . co  m*/
 * @param objId
 * @return
 */
public long getObjectSizeMB(long objId) {
    final ByteBuffer buf = fbs.get((int) objId);
    if (buf == null) {
        LOG.error("Error trying read object " + objId);
    }
    buf.getLong();
    buf.getLong();
    long size = buf.getLong();
    return size;
}

From source file:org.cosmo.common.record.DefnRecord.java

public Object readImpl(ByteBuffer dataIO, boolean directConvertFromBytes) throws IOException {
    // read record
    long recordId = dataIO.getLong();
    Record record = Meta.Instance((Class<? extends Record>) field().getType()).store().newInstance(recordId);
    if (record instanceof IntrinsicRecord) {
        ((IntrinsicRecord) record).setDefn(this);
    }/*from   w  ww . jav a  2  s.  co  m*/
    return record;
}

From source file:mastermind.RandomGenerator.java

public long getLongSeed() {
    if (isValid) {
        byte[] seed = secureRandom.generateSeed(NUM_SEED_BYTES);
        ByteBuffer byteBuffer = ByteBuffer.wrap(seed);
        return byteBuffer.getLong();
    } else {//from   w ww  . ja  v a  2 s  . c  o  m
        return -1;
    }
}