Example usage for java.nio ByteBuffer allocateDirect

List of usage examples for java.nio ByteBuffer allocateDirect

Introduction

In this page you can find the example usage for java.nio ByteBuffer allocateDirect.

Prototype

public static ByteBuffer allocateDirect(int capacity) 

Source Link

Document

Creates a direct byte buffer based on a newly allocated memory block.

Usage

From source file:com.hadoop.compression.lzo.LzoCompressor.java

/**
 * Reallocates a direct byte buffer by freeing the old one and allocating
 * a new one, unless the size is the same, in which case it is simply
 * cleared and returned.//from w ww  . j a  v a2s  .co m
 *
 * NOTE: this uses unsafe APIs to manually free memory - if anyone else
 * has a reference to the 'buf' parameter they will likely read random
 * data or cause a segfault by accessing it.
 */
private ByteBuffer realloc(ByteBuffer buf, int newSize) {
    if (buf != null) {
        if (buf.capacity() == newSize) {
            // Can use existing buffer
            buf.clear();
            return buf;
        }
        try {
            // Manually free the old buffer using undocumented unsafe APIs.
            // If this fails, we'll drop the reference and hope GC finds it
            // eventually.
            Method cleanerMethod = buf.getClass().getMethod("cleaner");
            cleanerMethod.setAccessible(true);
            Object cleaner = cleanerMethod.invoke(buf);
            Method cleanMethod = cleaner.getClass().getMethod("clean");
            cleanMethod.setAccessible(true);
            cleanMethod.invoke(cleaner);
        } catch (Exception e) {
            // Perhaps a non-sun-derived JVM - contributions welcome
            LOG.warn("Couldn't realloc bytebuffer", e);
        }
    }
    return ByteBuffer.allocateDirect(newSize);
}

From source file:com.kentdisplays.synccardboarddemo.MainActivity.java

/**
 * Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
 * arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
 * @param config The EGL configuration used when creating the surface.
 *///ww w  .  j a  v a  2  s.com
@Override
public void onSurfaceCreated(EGLConfig config) {
    Log.i(TAG, "onSurfaceCreated");

    // make a floor
    ByteBuffer bbFloorVertices = ByteBuffer.allocateDirect(DATA.FLOOR_COORDS.length * 4);
    bbFloorVertices.order(ByteOrder.nativeOrder());
    mFloorVertices = bbFloorVertices.asFloatBuffer();
    mFloorVertices.put(DATA.FLOOR_COORDS);
    mFloorVertices.position(0);

    ByteBuffer bbFloorNormals = ByteBuffer.allocateDirect(DATA.FLOOR_NORMALS.length * 4);
    bbFloorNormals.order(ByteOrder.nativeOrder());
    mFloorNormals = bbFloorNormals.asFloatBuffer();
    mFloorNormals.put(DATA.FLOOR_NORMALS);
    mFloorNormals.position(0);

    ByteBuffer bbFloorColors = ByteBuffer.allocateDirect(DATA.FLOOR_COLORS.length * 4);
    bbFloorColors.order(ByteOrder.nativeOrder());
    mFloorColors = bbFloorColors.asFloatBuffer();
    mFloorColors.put(DATA.FLOOR_COLORS);
    mFloorColors.position(0);

    int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, R.raw.light_vertex);
    int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.grid_fragment);

    mGlProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(mGlProgram, vertexShader);
    GLES20.glAttachShader(mGlProgram, gridShader);
    GLES20.glLinkProgram(mGlProgram);

    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    Matrix.setIdentityM(mModelFloor, 0);
    Matrix.translateM(mModelFloor, 0, 0, -mFloorDepth, 0); // Floor appears below user

    // Create the placeholder pages.
    mPages = new Page[4];
    mPages[0] = new Page(getResources().openRawResource(R.raw.boogie_board), mGlProgram, 0);
    mPages[1] = new Page(getResources().openRawResource(R.raw.house), mGlProgram, 1);
    mPages[2] = new Page(getResources().openRawResource(R.raw.placeholder), mGlProgram, 2);
    mPages[3] = new Page(getResources().openRawResource(R.raw.cylinder), mGlProgram, 3);

    checkGLError("onSurfaceCreated");
}

From source file:com.alvermont.terraj.fracplanet.geom.VertexBufferArray.java

/**
 * Creates a new instance of VertexBufferArray
 *
 * @param capacity The initial capacity of this buffer
 */// w ww .  j  a  va  2 s .c  o m
public VertexBufferArray(int capacity) {
    this.buffer = ByteBuffer.allocateDirect(capacity * ELEMENTSIZE).order(ByteOrder.nativeOrder());

    sliceAndDice(capacity);
}

From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java

@Override
public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
    if (argOIs.length < 3) {
        throw new UDFArgumentException(
                "_FUNC_ takes 3 arguments: INT user, INT item, FLOAT rating [, CONSTANT STRING options]");
    }//from w  ww. j av  a 2 s .c o  m
    this.userOI = HiveUtils.asIntCompatibleOI(argOIs[0]);
    this.itemOI = HiveUtils.asIntCompatibleOI(argOIs[1]);
    this.ratingOI = HiveUtils.asDoubleCompatibleOI(argOIs[2]);

    processOptions(argOIs);

    this.model = new FactorizedModel(this, factor, meanRating, rankInit);
    this.count = 0L;
    this.lastWritePos = 0L;
    this.userProbe = new float[factor];
    this.itemProbe = new float[factor];

    if (mapredContext != null && iterations > 1) {
        // invoke only at task node (initialize is also invoked in compilation)
        final File file;
        try {
            file = File.createTempFile("hivemall_mf", ".sgmt");
            file.deleteOnExit();
            if (!file.canWrite()) {
                throw new UDFArgumentException("Cannot write a temporary file: " + file.getAbsolutePath());
            }
        } catch (IOException ioe) {
            throw new UDFArgumentException(ioe);
        } catch (Throwable e) {
            throw new UDFArgumentException(e);
        }
        this.fileIO = new NioFixedSegment(file, RECORD_BYTES, false);
        this.inputBuf = ByteBuffer.allocateDirect(65536); // 64 KiB
    }

    ArrayList<String> fieldNames = new ArrayList<String>();
    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
    fieldNames.add("idx");
    fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
    fieldNames.add("Pu");
    fieldOIs.add(ObjectInspectorFactory
            .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableFloatObjectInspector));
    fieldNames.add("Qi");
    fieldOIs.add(ObjectInspectorFactory
            .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableFloatObjectInspector));
    if (useBiasClause) {
        fieldNames.add("Bu");
        fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
        fieldNames.add("Bi");
        fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
        if (updateMeanRating) {
            fieldNames.add("mu");
            fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
        }
    }
    return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}

From source file:com.yobidrive.diskmap.needles.NeedleManager.java

private void initializeBuffersAndCache() throws NeedleManagerException {
    if (logNumber < 0)
        createNewLog();/*from ww w . jav  a2s  . c  o  m*/
    // Now prepare read threads
    threadBufferQ = new ArrayBlockingQueue<ByteBuffer>(readThreads, true);
    try {
        for (int i = 0; i < readThreads; i++) {
            ByteBuffer needleBuffer = ByteBuffer
                    .allocateDirect(MAXKEYSIZE + MAXVERSIONSIZE + MAXDATASIZE + Needle.NEEDLEOVERHEAD);
            threadBufferQ.put(needleBuffer);
        }
    } catch (Throwable th) {
        logger.error("Error building needle reader buffers", th);
    }
    // Finally create readCaches
    NeedleWeighter needleWeighter = new NeedleWeighter();
    needleReadCache = CacheBuilder.newBuilder().weigher(needleWeighter).maximumWeight(this.maxCachedBytes)
            .build(new NeedleCacheLoader(this));
    NeedleHeaderWeighter needleHeaderWeighter = new NeedleHeaderWeighter();
    needleHeaderReadCache = (CacheBuilder.newBuilder()).weigher(needleHeaderWeighter)
            .maximumWeight(this.maxCachedHeaderBytes).build(new NeedleHeaderCacheLoader(this));
    logger.info(needleHeaderReadCache.stats().toString());
    // Create compacting buffer
    compactBuffer = ByteBuffer
            .allocateDirect(MAXKEYSIZE + MAXVERSIONSIZE + MAXDATASIZE + Needle.NEEDLEOVERHEAD);
}

From source file:com.linkedin.pinot.common.utils.MmapUtils.java

/**
 * Allocates a direct byte buffer, tracking usage information.
 *
 * @param capacity The capacity to allocate.
 * @param file The file that this byte buffer refers to
 * @param details Further details about the allocation
 * @return A new allocated byte buffer with the requested capacity
 *//*from   w  ww .j a va 2s .  c o  m*/
public static ByteBuffer allocateDirectByteBuffer(final int capacity, final File file, final String details) {
    final String context;
    final AllocationContext allocationContext;
    if (file != null) {
        context = file.getAbsolutePath() + " (" + details + ")";
        allocationContext = new AllocationContext(file, details, AllocationType.DIRECT_BYTE_BUFFER);
    } else {
        context = "no file (" + details + ")";
        allocationContext = new AllocationContext(details, AllocationType.DIRECT_BYTE_BUFFER);
    }

    DIRECT_BYTE_BUFFER_USAGE.addAndGet(capacity);

    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug("Allocating byte buffer of size {} with context {}, allocation after operation {}",
                capacity, context, getTrackedAllocationStatus());
    }

    ByteBuffer byteBuffer = null;

    try {
        byteBuffer = ByteBuffer.allocateDirect(capacity);
    } catch (OutOfMemoryError e) {
        LOGGER.error("Ran out of direct memory while trying to allocate {} bytes (context {})", capacity,
                context, e);
        LOGGER.error("Allocation status {}", getTrackedAllocationStatus());
        Utils.rethrowException(e);
    }

    BUFFER_TO_CONTEXT_MAP.put(byteBuffer, allocationContext);

    return byteBuffer;
}

From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java

/**
 * Creates the buffers we use to store information about the 3D world.
 *
 * <p>OpenGL doesn't use Java arrays, but rather needs data in a format it can understand.
 * Hence we use ByteBuffers.//from w w w  .  java 2  s .c o m
 *
 * @param config The EGL configuration used when creating the surface.
 */
@Override
public void onSurfaceCreated(EGLConfig config) {
    Log.i(TAG, "onSurfaceCreated");
    GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well.

    ByteBuffer bbVertices = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COORDS.length * 4);
    bbVertices.order(ByteOrder.nativeOrder());
    cubeVertices = bbVertices.asFloatBuffer();
    cubeVertices.put(WorldLayoutData.CUBE_COORDS);
    cubeVertices.position(0);

    ByteBuffer bbColors = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COLORS.length * 4);
    bbColors.order(ByteOrder.nativeOrder());
    cubeColors = bbColors.asFloatBuffer();
    cubeColors.put(WorldLayoutData.CUBE_COLORS);
    cubeColors.position(0);

    ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_FOUND_COLORS.length * 4);
    bbFoundColors.order(ByteOrder.nativeOrder());
    cubeFoundColors = bbFoundColors.asFloatBuffer();
    cubeFoundColors.put(WorldLayoutData.CUBE_FOUND_COLORS);
    cubeFoundColors.position(0);

    ByteBuffer bbNormals = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_NORMALS.length * 4);
    bbNormals.order(ByteOrder.nativeOrder());
    cubeNormals = bbNormals.asFloatBuffer();
    cubeNormals.put(WorldLayoutData.CUBE_NORMALS);
    cubeNormals.position(0);

    ByteBuffer mcbbVertices = ByteBuffer.allocateDirect(WorldLayoutData.MINI_CUBE_COORDS.length * 4);
    mcbbVertices.order(ByteOrder.nativeOrder());
    miniCubeVertices = mcbbVertices.asFloatBuffer();
    miniCubeVertices.put(WorldLayoutData.MINI_CUBE_COORDS);
    miniCubeVertices.position(0);

    ByteBuffer mcbbColors = ByteBuffer.allocateDirect(WorldLayoutData.MINI_CUBE_COLORS.length * 4);
    mcbbColors.order(ByteOrder.nativeOrder());
    miniCubeColors = mcbbColors.asFloatBuffer();
    miniCubeColors.put(WorldLayoutData.MINI_CUBE_COLORS);
    miniCubeColors.position(0);

    ByteBuffer mcbbNormals = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_NORMALS.length * 4);
    mcbbNormals.order(ByteOrder.nativeOrder());
    miniCubeNormals = mcbbNormals.asFloatBuffer();
    miniCubeNormals.put(WorldLayoutData.CUBE_NORMALS);
    miniCubeNormals.position(0);

    // make a floor
    ByteBuffer bbFloorVertices = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COORDS.length * 4);
    bbFloorVertices.order(ByteOrder.nativeOrder());
    floorVertices = bbFloorVertices.asFloatBuffer();
    floorVertices.put(WorldLayoutData.FLOOR_COORDS);
    floorVertices.position(0);

    ByteBuffer bbFloorNormals = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_NORMALS.length * 4);
    bbFloorNormals.order(ByteOrder.nativeOrder());
    floorNormals = bbFloorNormals.asFloatBuffer();
    floorNormals.put(WorldLayoutData.FLOOR_NORMALS);
    floorNormals.position(0);

    ByteBuffer bbFloorColors = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COLORS.length * 4);
    bbFloorColors.order(ByteOrder.nativeOrder());
    floorColors = bbFloorColors.asFloatBuffer();
    floorColors.put(WorldLayoutData.FLOOR_COLORS);
    floorColors.position(0);

    int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, R.raw.light_vertex);
    int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.grid_fragment);
    int passthroughShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.passthrough_fragment);

    cubeProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(cubeProgram, vertexShader);
    GLES20.glAttachShader(cubeProgram, passthroughShader);
    GLES20.glLinkProgram(cubeProgram);
    GLES20.glUseProgram(cubeProgram);

    checkGLError("Cube program");

    cubePositionParam = GLES20.glGetAttribLocation(cubeProgram, "a_Position");
    cubeNormalParam = GLES20.glGetAttribLocation(cubeProgram, "a_Normal");
    cubeColorParam = GLES20.glGetAttribLocation(cubeProgram, "a_Color");

    cubeModelParam = GLES20.glGetUniformLocation(cubeProgram, "u_Model");
    cubeModelViewParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVMatrix");
    cubeModelViewProjectionParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVP");
    cubeLightPosParam = GLES20.glGetUniformLocation(cubeProgram, "u_LightPos");

    GLES20.glEnableVertexAttribArray(cubePositionParam);
    GLES20.glEnableVertexAttribArray(cubeNormalParam);
    GLES20.glEnableVertexAttribArray(cubeColorParam);

    checkGLError("Cube program params");

    //Minicube
    miniCubeProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(miniCubeProgram, vertexShader);
    GLES20.glAttachShader(miniCubeProgram, passthroughShader);
    GLES20.glLinkProgram(miniCubeProgram);
    GLES20.glUseProgram(miniCubeProgram);

    checkGLError("Cube program");

    miniCubePositionParam = GLES20.glGetAttribLocation(miniCubeProgram, "a_Position");
    miniCubeNormalParam = GLES20.glGetAttribLocation(miniCubeProgram, "a_Normal");
    miniCubeColorParam = GLES20.glGetAttribLocation(miniCubeProgram, "a_Color");

    miniCubeModelParam = GLES20.glGetUniformLocation(miniCubeProgram, "u_Model");
    miniCubeModelViewParam = GLES20.glGetUniformLocation(miniCubeProgram, "u_MVMatrix");
    miniCubeModelViewProjectionParam = GLES20.glGetUniformLocation(miniCubeProgram, "u_MVP");
    miniCubeLightPosParam = GLES20.glGetUniformLocation(miniCubeProgram, "u_LightPos");

    GLES20.glEnableVertexAttribArray(miniCubePositionParam);
    GLES20.glEnableVertexAttribArray(miniCubeNormalParam);
    GLES20.glEnableVertexAttribArray(miniCubeColorParam);

    checkGLError("Cube program params");

    floorProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(floorProgram, vertexShader);
    GLES20.glAttachShader(floorProgram, gridShader);
    GLES20.glLinkProgram(floorProgram);
    GLES20.glUseProgram(floorProgram);

    checkGLError("Floor program");

    floorModelParam = GLES20.glGetUniformLocation(floorProgram, "u_Model");
    floorModelViewParam = GLES20.glGetUniformLocation(floorProgram, "u_MVMatrix");
    floorModelViewProjectionParam = GLES20.glGetUniformLocation(floorProgram, "u_MVP");
    floorLightPosParam = GLES20.glGetUniformLocation(floorProgram, "u_LightPos");

    floorPositionParam = GLES20.glGetAttribLocation(floorProgram, "a_Position");
    floorNormalParam = GLES20.glGetAttribLocation(floorProgram, "a_Normal");
    floorColorParam = GLES20.glGetAttribLocation(floorProgram, "a_Color");

    GLES20.glEnableVertexAttribArray(floorPositionParam);
    GLES20.glEnableVertexAttribArray(floorNormalParam);
    GLES20.glEnableVertexAttribArray(floorColorParam);

    checkGLError("Floor program params");

    Matrix.setIdentityM(modelFloor, 0);
    Matrix.translateM(modelFloor, 0, 0, -floorDepth, 0); // Floor appears below user.

    // Avoid any delays during start-up due to decoding of sound files.
    new Thread(new Runnable() {
        public void run() {
            // Start spatial audio playback of SOUND_FILE at the model postion. The returned
            //soundId handle is stored and allows for repositioning the sound object whenever
            // the cube position changes.
            cardboardAudioEngine.preloadSoundFile(SOUND_FILE);
            soundId = cardboardAudioEngine.createSoundObject(SOUND_FILE);
            cardboardAudioEngine.setSoundObjectPosition(soundId, modelPosition[0], modelPosition[1],
                    modelPosition[2]);
            cardboardAudioEngine.playSound(soundId, true /* looped playback */);
        }
    }).start();

    updateModelPosition();

    checkGLError("onSurfaceCreated");
}

From source file:password.pwm.util.secure.SecureEngine.java

public static String hash(final File file, final PwmHashAlgorithm hashAlgorithm)
        throws IOException, PwmUnrecoverableException {
    FileInputStream fileInputStream = null;
    try {/*from w w w.j a v  a2s .  c  o m*/
        final MessageDigest messageDigest = MessageDigest.getInstance(hashAlgorithm.getAlgName());
        fileInputStream = new FileInputStream(file);
        final FileChannel fileChannel = fileInputStream.getChannel();
        final ByteBuffer byteBuffer = ByteBuffer.allocateDirect(1024 * 8);

        while (fileChannel.read(byteBuffer) > 0) {
            byteBuffer.flip();
            messageDigest.update(byteBuffer);
            byteBuffer.clear();
        }

        return JavaHelper.byteArrayToHexString(messageDigest.digest());

    } catch (NoSuchAlgorithmException | IOException e) {
        final String errorMsg = "unexpected error during file hash operation: " + e.getMessage();
        final ErrorInformation errorInformation = new ErrorInformation(PwmError.ERROR_CRYPT_ERROR, errorMsg);
        throw new PwmUnrecoverableException(errorInformation);
    } finally {
        IOUtils.closeQuietly(fileInputStream);
    }
}

From source file:org.dawnsci.dde.templates.AbstractTemplateTestBase.java

private static boolean isEqual(InputStream i1, InputStream i2) throws IOException {

    ReadableByteChannel ch1 = Channels.newChannel(i1);
    ReadableByteChannel ch2 = Channels.newChannel(i2);

    ByteBuffer buf1 = ByteBuffer.allocateDirect(1024);
    ByteBuffer buf2 = ByteBuffer.allocateDirect(1024);

    try {/*from   ww w .  j  a v a  2s .  co  m*/
        while (true) {

            int n1 = ch1.read(buf1);
            int n2 = ch2.read(buf2);

            if (n1 == -1 || n2 == -1)
                return n1 == n2;

            buf1.flip();
            buf2.flip();

            for (int i = 0; i < Math.min(n1, n2); i++)
                if (buf1.get() != buf2.get())
                    return false;

            buf1.compact();
            buf2.compact();
        }

    } finally {
        if (i1 != null)
            i1.close();
        if (i2 != null)
            i2.close();
    }
}

From source file:com.panet.imeta.trans.steps.fixedinput.FixedInput.java

public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (FixedInputMeta) smi;/*from  w w w .  j a v  a  2s .  com*/
    data = (FixedInputData) sdi;

    if (super.init(smi, sdi)) {
        try {
            data.preferredBufferSize = Integer.parseInt(environmentSubstitute(meta.getBufferSize()));
            data.lineWidth = Integer.parseInt(environmentSubstitute(meta.getLineWidth()));
            data.filename = environmentSubstitute(meta.getFilename());

            if (Const.isEmpty(data.filename)) {
                logError(Messages.getString("FixedInput.MissingFilename.Message"));
                return false;
            }

            FileObject fileObject = KettleVFS.getFileObject(data.filename);
            try {
                FileInputStream fileInputStream = KettleVFS.getFileInputStream(fileObject);
                data.fc = fileInputStream.getChannel();
                data.bb = ByteBuffer.allocateDirect(data.preferredBufferSize);
            } catch (IOException e) {
                logError(e.toString());
                return false;
            }

            // Add filename to result filenames ?
            if (meta.isAddResultFile()) {
                ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject,
                        getTransMeta().getName(), toString());
                resultFile.setComment("File was read by a Fixed input step");
                addResultFile(resultFile);
            }

            logBasic("Opened file with name [" + data.filename + "]");

            data.stopReading = false;

            if (meta.isRunningInParallel()) {
                data.stepNumber = getUniqueStepNrAcrossSlaves();
                data.totalNumberOfSteps = getUniqueStepCountAcrossSlaves();
                data.fileSize = fileObject.getContent().getSize();
            }

            // OK, now we need to skip a number of bytes in case we're doing a parallel read.
            //
            if (meta.isRunningInParallel()) {

                int totalLineWidth = data.lineWidth + meta.getLineSeparatorLength(); // including line separator bytes
                long nrRows = data.fileSize / totalLineWidth; // 100.000 / 100 = 1000 rows
                long rowsToSkip = Math.round(data.stepNumber * nrRows / (double) data.totalNumberOfSteps); // 0, 333, 667
                long nextRowsToSkip = Math
                        .round((data.stepNumber + 1) * nrRows / (double) data.totalNumberOfSteps); // 333, 667, 1000
                data.rowsToRead = nextRowsToSkip - rowsToSkip;
                long bytesToSkip = rowsToSkip * totalLineWidth;

                logBasic("Step #" + data.stepNumber + " is skipping " + bytesToSkip
                        + " to position in file, then it's reading " + data.rowsToRead + " rows.");

                data.fc.position(bytesToSkip);
            }

            return true;
        } catch (IOException e) {
            logError("Error opening file '" + meta.getFilename() + "' : " + e.toString());
            logError(Const.getStackTracker(e));
        }
    }
    return false;
}