Example usage for java.nio ByteBuffer allocateDirect

List of usage examples for java.nio ByteBuffer allocateDirect

Introduction

In this page you can find the example usage for java.nio ByteBuffer allocateDirect.

Prototype

public static ByteBuffer allocateDirect(int capacity) 

Source Link

Document

Creates a direct byte buffer based on a newly allocated memory block.

Usage

From source file:org.apache.sysml.runtime.matrix.data.SinglePrecisionCudaSupportFunctions.java

@Override
public void deviceToHost(GPUContext gCtx, Pointer src, double[] dest, String instName, boolean isEviction) {
    long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
    // We invoke transfer matrix from device to host in two cases:
    // 1. During eviction of unlocked matrices
    // 2. During acquireHostRead
    // // ww w.  j  a  v  a 2  s  .  c  o m
    // If the single-precision support is enabled, then float-to-double conversion is required as CP expects the data to be in double format. 
    // This conversion can be done on host or on device. We typically prefer to do this conversion on device due to GPU's high-memory bandwidth. 
    // However, the conversion requires an additional space to be allocated for the conversion, which can lead to infinite recursion 
    // during eviction: `evict -> devictToHost -> float2double -> allocate -> ensureFreeSpace -> evict`. 
    // To avoid this recursion, it is necessary to perform this conversion in host.
    if (PERFORM_CONVERSION_ON_DEVICE && !isEviction) {
        Pointer deviceDoubleData = gCtx.allocate(((long) dest.length) * Sizeof.DOUBLE);
        LibMatrixCUDA.float2double(gCtx, src, deviceDoubleData, dest.length);
        cudaMemcpy(Pointer.to(dest), deviceDoubleData, ((long) dest.length) * Sizeof.DOUBLE,
                cudaMemcpyDeviceToHost);
        gCtx.cudaFreeHelper(instName, deviceDoubleData, DMLScript.EAGER_CUDA_FREE);
    } else {
        LOG.debug("Potential OOM: Allocated additional space on host in deviceToHost");
        FloatBuffer floatData = ByteBuffer.allocateDirect(Sizeof.FLOAT * dest.length)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
        cudaMemcpy(Pointer.to(floatData), src, ((long) dest.length) * Sizeof.FLOAT, cudaMemcpyDeviceToHost);
        LibMatrixNative.fromFloatBuffer(floatData, dest);
    }
    if (DMLScript.STATISTICS) {
        long totalTime = System.nanoTime() - t0;
        GPUStatistics.cudaFloat2DoubleTime.add(totalTime);
        GPUStatistics.cudaFloat2DoubleCount.add(1);
        if (DMLScript.FINEGRAINED_STATISTICS && instName != null)
            GPUStatistics.maintainCPMiscTimes(instName, GPUInstruction.MISC_TIMER_DEVICE_TO_HOST, totalTime);
    }
}

From source file:edu.northwestern.jcr.adapter.fedora.persistence.FedoraConnectorREST.java

/**
 * Wrapper of getDatastreamDissemination in REST.
 *
 * @param pid pid of the object//from  w w  w.j a v  a 2 s . c  o m
 * @param dsID id of the datastream
 * @return byte content of the data stream
 */
public byte[] getDataStream(String pid, String dsID) {
    HttpInputStream inputStream;
    ReadableByteChannel channel;
    ByteBuffer buf;
    byte[] bytes;
    int numRead = 0;
    int length = 0;

    try {
        inputStream = fc.get(
                String.format("/objects/%s/datastreams/%s/content", URLEncoder.encode(pid, "UTF-8"), dsID),
                true, false);
    } catch (Exception e) {
        return null;
    }

    channel = Channels.newChannel(inputStream);
    // Create a direct ByteBuffer
    buf = ByteBuffer.allocateDirect(10 * 1024 * 1024);

    while (numRead >= 0) {
        // Read bytes from the channel
        try {
            numRead = channel.read(buf);
        } catch (Exception e) {
            return null;
        }

        if (numRead > 0) {
            length += numRead;
        }
    }

    bytes = new byte[length];
    // reset the position of the buffer to zero
    buf.rewind();
    buf.get(bytes);

    return bytes;
}

From source file:org.apache.hadoop.hive.ql.io.orc.RecordReaderUtils.java

/**
 * Read the list of ranges from the file.
 * @param file the file to read//  www.  ja  v  a2 s . c om
 * @param base the base of the stripe
 * @param ranges the disk ranges within the stripe to read
 * @return the bytes read for each disk range, which is the same length as
 *    ranges
 * @throws IOException
 */
static DiskRangeList readDiskRanges(FSDataInputStream file, ZeroCopyReaderShim zcr, long base,
        DiskRangeList range, boolean doForceDirect) throws IOException {
    if (range == null)
        return null;
    DiskRangeList prev = range.prev;
    if (prev == null) {
        prev = new DiskRangeListMutateHelper(range);
    }
    while (range != null) {
        if (range.hasData()) {
            range = range.next;
            continue;
        }
        int len = (int) (range.getEnd() - range.getOffset());
        long off = range.getOffset();
        file.seek(base + off);
        if (zcr != null) {
            boolean hasReplaced = false;
            while (len > 0) {
                ByteBuffer partial = zcr.readBuffer(len, false);
                BufferChunk bc = new BufferChunk(partial, off);
                if (!hasReplaced) {
                    range.replaceSelfWith(bc);
                    hasReplaced = true;
                } else {
                    range.insertAfter(bc);
                }
                range = bc;
                int read = partial.remaining();
                len -= read;
                off += read;
            }
        } else if (doForceDirect) {
            ByteBuffer directBuf = ByteBuffer.allocateDirect(len);
            readDirect(file, len, directBuf);
            range = range.replaceSelfWith(new BufferChunk(directBuf, range.getOffset()));
        } else {
            byte[] buffer = new byte[len];
            file.readFully(buffer, 0, buffer.length);
            range = range.replaceSelfWith(new BufferChunk(ByteBuffer.wrap(buffer), range.getOffset()));
        }
        range = range.next;
    }
    return prev.next;
}

From source file:org.gephi.io.importer.api.ImportUtils.java

/**
 * Uncompress a GZIP file.//from w  ww  . j  av a  2 s  .  c o  m
 */
public static File getGzFile(FileObject in, File out, boolean isTar) throws IOException {

    // Stream buffer
    final int BUFF_SIZE = 8192;
    final byte[] buffer = new byte[BUFF_SIZE];

    GZIPInputStream inputStream = null;
    FileOutputStream outStream = null;

    try {
        inputStream = new GZIPInputStream(new FileInputStream(in.getPath()));
        outStream = new FileOutputStream(out);

        if (isTar) {
            // Read Tar header
            int remainingBytes = readTarHeader(inputStream);

            // Read content
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE);
            byte[] tmpCache = new byte[BUFF_SIZE];
            int nRead, nGet;
            while ((nRead = inputStream.read(tmpCache)) != -1) {
                if (nRead == 0) {
                    continue;
                }
                bb.put(tmpCache);
                bb.position(0);
                bb.limit(nRead);
                while (bb.hasRemaining() && remainingBytes > 0) {
                    nGet = Math.min(bb.remaining(), BUFF_SIZE);
                    nGet = Math.min(nGet, remainingBytes);
                    bb.get(buffer, 0, nGet);
                    outStream.write(buffer, 0, nGet);
                    remainingBytes -= nGet;
                }
                bb.clear();
            }
        } else {
            int len;
            while ((len = inputStream.read(buffer)) > 0) {
                outStream.write(buffer, 0, len);
            }
        }
    } catch (IOException ex) {
        Exceptions.printStackTrace(ex);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
        if (outStream != null) {
            outStream.close();
        }
    }

    return out;
}

From source file:org.apache.hadoop.hbase.io.encoding.TestDataBlockEncoders.java

static ByteBuffer encodeKeyValues(DataBlockEncoding encoding, List<KeyValue> kvs,
        HFileBlockEncodingContext encodingContext, boolean useOffheapData) throws IOException {
    DataBlockEncoder encoder = encoding.getEncoder();
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    baos.write(HFILEBLOCK_DUMMY_HEADER);
    DataOutputStream dos = new DataOutputStream(baos);
    encoder.startBlockEncoding(encodingContext, dos);
    for (KeyValue kv : kvs) {
        encoder.encode(kv, encodingContext, dos);
    }//from   w ww  .java  2 s . c o m
    encoder.endBlockEncoding(encodingContext, dos, baos.getBuffer());
    byte[] encodedData = new byte[baos.size() - ENCODED_DATA_OFFSET];
    System.arraycopy(baos.toByteArray(), ENCODED_DATA_OFFSET, encodedData, 0, encodedData.length);
    if (useOffheapData) {
        ByteBuffer bb = ByteBuffer.allocateDirect(encodedData.length);
        bb.put(encodedData);
        bb.rewind();
        return bb;
    }
    return ByteBuffer.wrap(encodedData);
}

From source file:org.pentaho.di.trans.steps.fixedinput.FixedInput.java

public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (FixedInputMeta) smi;//from ww w  . j  a va 2 s  .c  o m
    data = (FixedInputData) sdi;

    if (super.init(smi, sdi)) {
        try {
            data.preferredBufferSize = Integer.parseInt(environmentSubstitute(meta.getBufferSize()));
            data.lineWidth = Integer.parseInt(environmentSubstitute(meta.getLineWidth()));
            data.filename = environmentSubstitute(meta.getFilename());

            if (Const.isEmpty(data.filename)) {
                logError(BaseMessages.getString(PKG, "FixedInput.MissingFilename.Message"));
                return false;
            }

            FileObject fileObject = KettleVFS.getFileObject(data.filename, getTransMeta());
            try {
                data.fis = getFileInputStream(fileObject.getURL());
                data.fc = data.fis.getChannel();
                data.bb = ByteBuffer.allocateDirect(data.preferredBufferSize);
            } catch (IOException e) {
                logError(e.toString());
                return false;
            }

            // Add filename to result filenames ?
            if (meta.isAddResultFile()) {
                ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject,
                        getTransMeta().getName(), toString());
                resultFile.setComment("File was read by a Fixed input step");
                addResultFile(resultFile);
            }

            logBasic("Opened file with name [" + data.filename + "]");

            data.stopReading = false;

            if (meta.isRunningInParallel()) {
                data.stepNumber = getUniqueStepNrAcrossSlaves();
                data.totalNumberOfSteps = getUniqueStepCountAcrossSlaves();
                data.fileSize = fileObject.getContent().getSize();
            }

            // OK, now we need to skip a number of bytes in case we're doing a parallel read.
            //
            if (meta.isRunningInParallel()) {

                int totalLineWidth = data.lineWidth + meta.getLineSeparatorLength(); // including line separator bytes
                long nrRows = data.fileSize / totalLineWidth; // 100.000 / 100 = 1000 rows
                long rowsToSkip = Math.round(data.stepNumber * nrRows / (double) data.totalNumberOfSteps); // 0, 333, 667
                // 333, 667, 1000
                long nextRowsToSkip = Math
                        .round((data.stepNumber + 1) * nrRows / (double) data.totalNumberOfSteps);
                data.rowsToRead = nextRowsToSkip - rowsToSkip;
                long bytesToSkip = rowsToSkip * totalLineWidth;

                logBasic("Step #" + data.stepNumber + " is skipping " + bytesToSkip
                        + " to position in file, then it's reading " + data.rowsToRead + " rows.");

                data.fc.position(bytesToSkip);
            }

            return true;
        } catch (Exception e) {
            logError("Error opening file '" + meta.getFilename() + "'", e);
        }
    }
    return false;
}

From source file:com.ibm.crail.tools.CrailBenchmark.java

void readSequential(String filename, int size, int loop, boolean buffered) throws Exception {
    System.out.println("readSequential, filename " + filename + ", size " + size + ", loop " + loop
            + ", buffered " + buffered);

    CrailBuffer buf = null;//from  w  w w . j  av a 2  s. c  om
    if (size == CrailConstants.BUFFER_SIZE) {
        buf = fs.allocateBuffer();
    } else if (size < CrailConstants.BUFFER_SIZE) {
        CrailBuffer _buf = fs.allocateBuffer();
        _buf.clear().limit(size);
        buf = _buf.slice();
    } else {
        buf = OffHeapBuffer.wrap(ByteBuffer.allocateDirect(size));
    }

    //warmup
    ConcurrentLinkedQueue<CrailBuffer> bufferQueue = new ConcurrentLinkedQueue<CrailBuffer>();
    bufferQueue.add(buf);
    warmUp(filename, warmup, bufferQueue);

    CrailFile file = fs.lookup(filename).get().asFile();
    CrailBufferedInputStream bufferedStream = file.getBufferedInputStream(file.getCapacity());
    CrailInputStream directStream = file.getDirectInputStream(file.getCapacity());

    //benchmark
    System.out.println("starting benchmark...");
    fs.getStatistics().reset();
    double sumbytes = 0;
    double ops = 0;
    long start = System.currentTimeMillis();
    while (ops < loop) {
        if (buffered) {
            buf.clear();
            double ret = (double) bufferedStream.read(buf.getByteBuffer());
            if (ret > 0) {
                sumbytes = sumbytes + ret;
                ops = ops + 1.0;
            } else {
                ops = ops + 1.0;
                if (bufferedStream.position() == 0) {
                    break;
                } else {
                    bufferedStream.seek(0);
                }
            }
        } else {
            buf.clear();
            double ret = (double) directStream.read(buf).get().getLen();
            if (ret > 0) {
                sumbytes = sumbytes + ret;
                ops = ops + 1.0;
            } else {
                ops = ops + 1.0;
                if (directStream.position() == 0) {
                    break;
                } else {
                    directStream.seek(0);
                }
            }
        }
    }
    long end = System.currentTimeMillis();
    double executionTime = ((double) (end - start)) / 1000.0;
    double throughput = 0.0;
    double latency = 0.0;
    double sumbits = sumbytes * 8.0;
    if (executionTime > 0) {
        throughput = sumbits / executionTime / 1000.0 / 1000.0;
        latency = 1000000.0 * executionTime / ops;
    }
    bufferedStream.close();
    directStream.close();

    System.out.println("execution time " + executionTime);
    System.out.println("ops " + ops);
    System.out.println("sumbytes " + sumbytes);
    System.out.println("throughput " + throughput);
    System.out.println("latency " + latency);

    fs.getStatistics().print("close");
}

From source file:io.github.dsheirer.source.tuner.hackrf.HackRFTunerController.java

@Override
public void setTunedFrequency(long frequency) throws SourceException {
    ByteBuffer buffer = ByteBuffer.allocateDirect(8);
    buffer.order(ByteOrder.LITTLE_ENDIAN);

    int mhz = (int) (frequency / 1E6);
    int hz = (int) (frequency - (mhz * 1E6));

    buffer.putInt(mhz);//from   w ww  .  j a v a 2s. c om
    buffer.putInt(hz);

    buffer.rewind();

    try {
        write(Request.SET_FREQUENCY, 0, 0, buffer);
    } catch (UsbException e) {
        mLog.error("error setting frequency [" + frequency + "]", e);

        throw new SourceException("error setting frequency [" + frequency + "]", e);
    }
}

From source file:org.apache.bookkeeper.bookie.BookieJournalTest.java

private JournalChannel writeV5Journal(File journalDir, int numEntries, byte[] masterKey) throws Exception {
    long logId = System.currentTimeMillis();
    JournalChannel jc = new JournalChannel(journalDir, logId);

    BufferedChannel bc = jc.getBufferedChannel();

    ByteBuffer paddingBuff = ByteBuffer.allocateDirect(2 * JournalChannel.SECTOR_SIZE);
    ZeroBuffer.put(paddingBuff);/* w  w  w .ja  va  2s. c  o m*/
    byte[] data = new byte[4 * 1024 * 1024];
    Arrays.fill(data, (byte) 'X');
    long lastConfirmed = LedgerHandle.INVALID_ENTRY_ID;
    long length = 0;
    for (int i = 0; i <= numEntries; i++) {
        ByteBuffer packet;
        if (i == 0) {
            packet = generateMetaEntry(1, masterKey);
        } else {
            packet = ClientUtil.generatePacket(1, i, lastConfirmed, length, data, 0, i).toByteBuffer();
        }
        lastConfirmed = i;
        length += i;
        ByteBuffer lenBuff = ByteBuffer.allocate(4);
        lenBuff.putInt(packet.remaining());
        lenBuff.flip();
        bc.write(lenBuff);
        bc.write(packet);
        Journal.writePaddingBytes(jc, paddingBuff, JournalChannel.SECTOR_SIZE);
    }
    // write fence key
    ByteBuffer packet = generateFenceEntry(1);
    ByteBuffer lenBuf = ByteBuffer.allocate(4);
    lenBuf.putInt(packet.remaining());
    lenBuf.flip();
    bc.write(lenBuf);
    bc.write(packet);
    Journal.writePaddingBytes(jc, paddingBuff, JournalChannel.SECTOR_SIZE);
    bc.flush(true);
    updateJournalVersion(jc, JournalChannel.V5);
    return jc;
}

From source file:com.castis.sysComp.PoisConverterSysComp.java

private void writeFile(File file) throws IOException {

    Map<String, String> middleNodeMap = new HashMap<String, String>();

    String line = "";
    FileInputStream in = null;//  www.  j ava 2 s.  co  m
    Reader isReader = null;
    LineNumberReader bufReader = null;

    FileOutputStream fos = null;
    String dir = filePolling.getValidFileDirectory(resultDir);

    String fileName = file.getName();

    int index = fileName.indexOf("-");
    if (index != -1) {
        fileName = fileName.substring(index + 1, fileName.length());
    }

    String tempDir = dir + "/temp/";
    File targetDirectory = new File(CiFileUtil.getReplaceFullPath(tempDir));
    if (!targetDirectory.isDirectory()) {
        CiFileUtil.createDirectory(tempDir);
    }

    fos = new FileOutputStream(tempDir + fileName);
    int byteSize = 2048;
    ByteBuffer byteBuffer = ByteBuffer.allocateDirect(byteSize);
    GatheringByteChannel outByteCh = fos.getChannel();

    try {
        String encodingCharset = FILE_CHARSET;
        in = new FileInputStream(file);
        isReader = new InputStreamReader(in, encodingCharset);
        bufReader = new LineNumberReader(isReader);

        boolean first = true;
        while ((line = bufReader.readLine()) != null) {

            if (line.length() == 0) {
                continue;
            }

            InputDataDTO data = new InputDataDTO();
            String result[] = line.split("\\|");

            if (first == true && result.length <= 1) {
                first = false;
                continue;
            }
            String platform = result[4];

            if (platform != null && platform.equalsIgnoreCase("stb"))
                platform = "STB";
            else if (platform != null && platform.equalsIgnoreCase("mobile")) {
                platform = "Mobile";
            }
            data.setPlatform(platform);

            List<TreeNodeDTO> tree = treeMap.get(platform);

            if (tree == null) {
                tree = getAxis(platform);
                treeMap.put(platform, tree);
            }

            String fullpath = getFullPath(tree, result[0]);

            data.setRegion(fullpath);
            data.setCategory(result[1]);
            data.setWeekday(result[2]);
            data.setHour(result[3]);
            data.setCount(Integer.parseInt(result[5]));

            List<subDataDTO> subDataList = writeNodeInfoOnFile(byteSize, byteBuffer, outByteCh, data, "Y");
            if (subDataList != null && subDataList.size() > 0) {
                writeMiddleNode(byteSize, byteBuffer, outByteCh, data, middleNodeMap, subDataList, "N");
            }
        }

        fos.close();

        index = fileName.indexOf("_");

        String targetDir = resultDir;
        File sourceFile = new File(tempDir + fileName);
        if (index != -1) {
            String directory = fileName.substring(0, index);
            targetDir += "/viewCount/" + directory;
        }

        try {

            File resultTargetDir = new File(CiFileUtil.getReplaceFullPath(targetDir));
            if (!resultTargetDir.isDirectory()) {
                CiFileUtil.createDirectory(targetDir);
            }

            CiFileUtil.renameFile(sourceFile, targetDir, fileName);
        } catch (Exception e) {
            log.error(e.getMessage());
        }

    } catch (Exception e) {
        String errorMsg = "Fail to parsing Line.[current line(" + bufReader.getLineNumber() + ") :" + line
                + "] : ";
        log.error(errorMsg, e);
        throw new DataParsingException(errorMsg, e); //throw(e);

    } finally {
        if (in != null)
            in.close();
        if (isReader != null)
            isReader.close();
        if (bufReader != null)
            bufReader.close();
    }
}