Example usage for android.opengl Matrix multiplyMM

List of usage examples for android.opengl Matrix multiplyMM

Introduction

In this page you can find the example usage for android.opengl Matrix multiplyMM.

Prototype

public static native void multiplyMM(float[] result, int resultOffset, float[] lhs, int lhsOffset, float[] rhs,
        int rhsOffset);

Source Link

Document

Multiplies two 4x4 matrices together and stores the result in a third 4x4 matrix.

Usage

From source file:Main.java

public static float[] multMatrix(float[] m1, float[] m2) {
    // TODO/*from  w  w w .  ja va2  s  .co m*/
    float[] m1i = new float[16];
    Matrix.transposeM(m1i, 0, m1, 0);
    float[] m2i = new float[16];
    Matrix.transposeM(m2i, 0, m2, 0);
    float[] m3 = new float[16];

    Matrix.multiplyMM(m3, 0, m1i, 0, m2i, 0);

    float[] m3i = new float[16];
    Matrix.transposeM(m3i, 0, m3, 0);
    return m3i;
}

From source file:Main.java

/**
 * Returns new matrix with the result of a * b.
 *//*  ww w  .j a va  2s. com*/
public static float[] multiplyMatrices(float[] a, float[] b) {
    final float[] resultMatrix = new float[16];
    Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
    return resultMatrix;
}

From source file:com.dmitrybrant.android.cardboardmpo.MainActivity.java

@Override
public void onDrawEye(Eye eye) {
    GLES20.glDisable(GLES20.GL_DEPTH_TEST);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    // Apply the eye transformation to the camera.
    Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0);

    // TODO: Do something with the head transform (e.g. pan the photo around)
    // For now, just reset the view matrix, so that the photo is in the center at all times.
    Matrix.setIdentityM(view, 0);/* ww  w  .j  av  a2  s.c  o  m*/

    float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR);
    if (eye.getType() == 1) {
        Matrix.multiplyMM(modelView, 0, view, 0, rectLeftEye.getModelMatrix(), 0);
        Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);
        rectLeftEye.draw(modelViewProjection);
    } else {
        Matrix.multiplyMM(modelView, 0, view, 0, rectRightEye.getModelMatrix(), 0);
        Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);
        rectRightEye.draw(modelViewProjection);
    }
}

From source file:com.kentdisplays.synccardboarddemo.Page.java

/**
 * Encapsulates the OpenGL ES instructions for drawing this page.
 *
 * @param perspective//from  ww  w  .j av a  2  s  .  com
 * @param view
 */
public void draw(float[] perspective, float[] view) {
    mPositionParam = GLES20.glGetAttribLocation(mGlProgram, "a_Position");
    mNormalParam = GLES20.glGetAttribLocation(mGlProgram, "a_Normal");
    mColorParam = GLES20.glGetAttribLocation(mGlProgram, "a_Color");
    mModelViewProjectionParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVP");
    mIsFloorParam = GLES20.glGetUniformLocation(mGlProgram, "u_IsFloor");
    mModelParam = GLES20.glGetUniformLocation(mGlProgram, "u_Model");
    mModelViewParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVMatrix");

    // This is not the floor!
    GLES20.glUniform1f(mIsFloorParam, 0f);

    // Set the Model in the shader, used to calculate lighting
    GLES20.glUniformMatrix4fv(mModelParam, 1, false, mModel, 0);

    // Build the ModelView and ModelViewProjection matrices
    // for calculating cube position and light.
    float[] modelView = new float[16];
    float[] modelViewProjection = new float[16];
    Matrix.multiplyMM(modelView, 0, view, 0, mModel, 0);
    Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);

    // Set the ModelView in the shader, used to calculate lighting
    GLES20.glUniformMatrix4fv(mModelViewParam, 1, false, modelView, 0);

    // Set the position of the cube
    GLES20.glVertexAttribPointer(mPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mPageVertices);

    // Set the ModelViewProjection matrix in the shader.
    GLES20.glUniformMatrix4fv(mModelViewProjectionParam, 1, false, modelViewProjection, 0);

    // Set the normal positions of the cube, again for shading
    GLES20.glVertexAttribPointer(mNormalParam, 3, GLES20.GL_FLOAT, false, 0, mPageNormals);

    GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false, 0, mPageColors);

    // Animate over all the paths every 30 seconds.
    long time = SystemClock.uptimeMillis() % 30000L;
    int numberOfPathsToDraw = Math.round(mNumberOfPaths / 30000.0f * time);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, numberOfPathsToDraw * 6);
}

From source file:com.wlanjie.streaming.camera.CameraView.java

@Override
public void onDrawFrame(GL10 gl) {
    GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    mSurfaceTexture.updateTexImage();//from  w w  w .j  a  v  a2  s .co m

    mSurfaceTexture.getTransformMatrix(mSurfaceMatrix);
    Matrix.multiplyMM(mTransformMatrix, 0, mSurfaceMatrix, 0, mProjectionMatrix, 0);
    mEglCore.setTextureTransformMatrix(mTransformMatrix);
    mEglCore.onDrawFrame(mTextureId);
    mHandler.sendEmptyMessage(0);
}

From source file:com.kentdisplays.synccardboarddemo.MainActivity.java

/**
 * Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
 * a parameter./*from  w w w.j  a  v a2s  .c  o m*/
 * @param transform The transformations to apply to render this eye.
 */
@Override
public void onDrawEye(EyeTransform transform) {
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    GLES20.glClearColor(0f, 0f, 0f, 1.00f); // Dark background so text shows up well

    mPositionParam = GLES20.glGetAttribLocation(mGlProgram, "a_Position");
    mNormalParam = GLES20.glGetAttribLocation(mGlProgram, "a_Normal");
    mColorParam = GLES20.glGetAttribLocation(mGlProgram, "a_Color");

    GLES20.glEnableVertexAttribArray(mPositionParam);
    GLES20.glEnableVertexAttribArray(mNormalParam);
    GLES20.glEnableVertexAttribArray(mColorParam);
    checkGLError("mColorParam");

    // Apply the eye transformation to the camera.
    Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);

    // Set the position of the light
    Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0);
    GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]);

    // Draw the pages.
    for (Page page : mPages) {
        page.draw(transform.getPerspective(), mView);
        checkGLError("Drawing page");
    }

    // Set mModelView for the floor, so we draw floor in the correct location
    Matrix.multiplyMM(mModelView, 0, mView, 0, mModelFloor, 0);
    Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0, mModelView, 0);
    drawFloor(transform.getPerspective());
}

From source file:com.projecttango.examples.java.openglar.OpenGlAugmentedRealityActivity.java

/**
 * Here is where you would set up your rendering logic. We're replacing it with a minimalistic,
 * dummy example, using a standard GLSurfaceView and a basic renderer, for illustration purposes
 * only./*from w ww  .  ja  v  a  2 s . c om*/
 */
private void setupRenderer() {
    mSurfaceView.setEGLContextClientVersion(2);
    mRenderer = new OpenGlAugmentedRealityRenderer(this, new OpenGlAugmentedRealityRenderer.RenderCallback() {
        private double lastRenderedTimeStamp;

        @Override
        public void preRender() {
            // This is the work that you would do on your main OpenGL render thread.

            try {
                // Synchronize against concurrently disconnecting the service triggered
                // from the UI thread.
                synchronized (OpenGlAugmentedRealityActivity.this) {
                    // We need to be careful not to run any Tango-dependent code in the
                    // OpenGL thread unless we know the Tango Service is properly
                    // set up and connected.
                    if (!mIsConnected) {
                        return;
                    }

                    // Set up scene camera projection to match RGB camera intrinsics.
                    if (!mRenderer.isProjectionMatrixConfigured()) {
                        TangoCameraIntrinsics intrinsics = TangoSupport
                                .getCameraIntrinsicsBasedOnDisplayRotation(
                                        TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mDisplayRotation);
                        mRenderer.setProjectionMatrix(projectionMatrixFromCameraIntrinsics(intrinsics));
                    }
                    // Connect the Tango SDK to the OpenGL texture ID where we are
                    // going to render the camera.
                    // NOTE: This must be done after both the texture is generated
                    // and the Tango Service is connected.
                    if (mConnectedTextureIdGlThread != mRenderer.getTextureId()) {
                        mTango.connectTextureId(TangoCameraIntrinsics.TANGO_CAMERA_COLOR,
                                mRenderer.getTextureId());
                        mConnectedTextureIdGlThread = mRenderer.getTextureId();
                        Log.d(TAG, "connected to texture id: " + mRenderer.getTextureId());
                    }
                    // If there is a new RGB camera frame available, update the texture
                    // and scene camera pose.
                    if (mIsFrameAvailableTangoThread.compareAndSet(true, false)) {
                        // {@code mRgbTimestampGlThread} contains the exact timestamp at
                        // which the rendered RGB frame was acquired.
                        mRgbTimestampGlThread = mTango.updateTexture(TangoCameraIntrinsics.TANGO_CAMERA_COLOR);

                        // Get the transform from color camera to Start of Service
                        // at the timestamp of the RGB image in OpenGL coordinates.
                        //
                        // When drift correction mode is enabled in config file, we need
                        // to query the device with respect to Area Description pose in
                        // order to use the drift-corrected pose.
                        //
                        // Note that if you don't want to use the drift corrected pose,
                        // the normal device with respect to start of service pose is
                        // still available.
                        TangoSupport.TangoMatrixTransformData transform = TangoSupport.getMatrixTransformAtTime(
                                mRgbTimestampGlThread, TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION,
                                TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR,
                                TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL,
                                TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL, mDisplayRotation);
                        if (transform.statusCode == TangoPoseData.POSE_VALID) {

                            mRenderer.updateViewMatrix(transform.matrix);
                            double deltaTime = mRgbTimestampGlThread - lastRenderedTimeStamp;
                            lastRenderedTimeStamp = mRgbTimestampGlThread;

                            // Set the earth rotation around itself.
                            float[] openGlTEarth = new float[16];
                            Matrix.rotateM(mEarthMoonCenterTEarth, 0, (float) deltaTime * 360 / 10, 0, 1, 0);
                            Matrix.multiplyMM(openGlTEarth, 0, mOpenGLTEarthMoonCenter, 0,
                                    mEarthMoonCenterTEarth, 0);

                            // Set moon rotation around the earth and moon center.
                            float[] openGlTMoon = new float[16];
                            Matrix.rotateM(mEarthMoonCenterTMoonRotation, 0, (float) deltaTime * 360 / 50, 0, 1,
                                    0);
                            float[] mEarthTMoon = new float[16];
                            Matrix.multiplyMM(mEarthTMoon, 0, mEarthMoonCenterTMoonRotation, 0,
                                    mEarthMoonCenterTTranslation, 0);
                            Matrix.multiplyMM(openGlTMoon, 0, mOpenGLTEarthMoonCenter, 0, mEarthTMoon, 0);

                            mRenderer.setEarthTransform(openGlTEarth);
                            mRenderer.setMoonTransform(openGlTMoon);
                        } else {
                            // When the pose status is not valid, it indicates tracking
                            // has been lost. In this case, we simply stop rendering.
                            //
                            // This is also the place to display UI to suggest that the
                            // user walk to recover tracking.
                            Log.w(TAG, "Could not get a valid transform at time " + mRgbTimestampGlThread);
                        }
                    }
                }
                // Avoid crashing the application due to unhandled exceptions.
            } catch (TangoErrorException e) {
                Log.e(TAG, "Tango API call error within the OpenGL render thread", e);
            } catch (Throwable t) {
                Log.e(TAG, "Exception on the OpenGL thread", t);
            }
        }
    });

    // Set the starting position and orientation of the Earth and Moon with respect to the
    // OpenGL frame.
    Matrix.setIdentityM(mOpenGLTEarthMoonCenter, 0);
    Matrix.translateM(mOpenGLTEarthMoonCenter, 0, 0, 0, -1f);
    Matrix.setIdentityM(mEarthMoonCenterTEarth, 0);
    Matrix.setIdentityM(mEarthMoonCenterTMoonRotation, 0);
    Matrix.setIdentityM(mEarthMoonCenterTTranslation, 0);
    Matrix.translateM(mEarthMoonCenterTTranslation, 0, 0.5f, 0, 0);

    mSurfaceView.setRenderer(mRenderer);
}

From source file:com.projecttango.examples.java.modelcorrespondence.ModelCorrespondenceActivity.java

/**
 * Connects the view and renderer to the color camara and callbacks.
 *//*w  w  w. jav  a  2  s.co  m*/
private void connectRenderer() {
    // Register a Rajawali Scene Frame Callback to update the scene camera pose whenever a new
    // RGB frame is rendered.
    // (@see https://github.com/Rajawali/Rajawali/wiki/Scene-Frame-Callbacks)
    mRenderer.getCurrentScene().registerFrameCallback(new ASceneFrameCallback() {
        @Override
        public void onPreFrame(long sceneTime, double deltaTime) {
            // Prevent concurrent access to {@code mIsFrameAvailableTangoThread} from the Tango
            // callback thread and service disconnection from an onPause event.
            try {
                synchronized (ModelCorrespondenceActivity.this) {
                    // Don't execute any Tango API actions if we're not connected to the
                    // service.
                    if (!mIsConnected) {
                        return;
                    }

                    // Set up scene camera projection to match RGB camera intrinsics.
                    if (!mRenderer.isSceneCameraConfigured()) {
                        TangoCameraIntrinsics intrinsics = TangoSupport
                                .getCameraIntrinsicsBasedOnDisplayRotation(
                                        TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mDisplayRotation);
                        mRenderer.setProjectionMatrix(projectionMatrixFromCameraIntrinsics(intrinsics));
                    }

                    // Connect the camera texture to the OpenGL Texture if necessary.
                    // NOTE: When the OpenGL context is recycled, Rajawali may re-generate the
                    // texture with a different ID.
                    if (mConnectedTextureIdGlThread != mRenderer.getTextureId()) {
                        mTango.connectTextureId(TangoCameraIntrinsics.TANGO_CAMERA_COLOR,
                                mRenderer.getTextureId());
                        mConnectedTextureIdGlThread = mRenderer.getTextureId();
                        Log.d(TAG, "connected to texture id: " + mRenderer.getTextureId());
                    }

                    // If there is a new RGB camera frame available, update the texture with
                    // it.
                    if (mIsFrameAvailableTangoThread.compareAndSet(true, false)) {
                        mRgbTimestampGlThread = mTango.updateTexture(TangoCameraIntrinsics.TANGO_CAMERA_COLOR);
                    }

                    // If a new RGB frame has been rendered, update the camera pose to match.
                    if (mRgbTimestampGlThread > mCameraPoseTimestamp) {
                        // Calculate the camera color pose at the camera frame update time in
                        // OpenGL engine.
                        TangoPoseData lastFramePose = TangoSupport.getPoseAtTime(mRgbTimestampGlThread,
                                TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
                                TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR,
                                TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL,
                                TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL, mDisplayRotation);

                        if (lastFramePose.statusCode == TangoPoseData.POSE_VALID) {
                            // Update the camera pose from the renderer.
                            mRenderer.updateRenderCameraPose(lastFramePose);
                            mCameraPoseTimestamp = lastFramePose.timestamp;
                            // While the correspondence is not done, fix the model to the upper
                            // right corner of the screen by following the camera.
                            if (!mCorrespondenceDone) {
                                TangoSupport.TangoMatrixTransformData transform = TangoSupport
                                        .getMatrixTransformAtTime(mCameraPoseTimestamp,
                                                TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
                                                TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR,
                                                TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL,
                                                TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL, mDisplayRotation);
                                if (transform.statusCode == TangoPoseData.POSE_VALID) {
                                    // Place it in the top left corner, and rotate and scale it
                                    // accordingly.
                                    float[] rgbTHouse = calculateModelTransformFixedToCam(mDisplayRotation);
                                    // Combine the two transforms.
                                    float[] openGlTHouse = new float[16];
                                    Matrix.multiplyMM(openGlTHouse, 0, transform.matrix, 0, rgbTHouse, 0);
                                    mOpenGlTHouse = openGlTHouse;
                                    mModelUpdated = true;
                                } else {
                                    Log.w(TAG, "Can't get camera transform at time: " + mCameraPoseTimestamp);
                                }
                            }
                        } else {
                            Log.w(TAG, "Can't get device pose at time: " + mRgbTimestampGlThread);
                        }
                    }

                    // If the model was updated then it must be re-rendered.
                    if (mModelUpdated) {
                        mRenderer.updateModelRendering(mHouseModel, mOpenGlTHouse, mDestPointList);
                        mModelUpdated = false;
                    }
                }
                // Avoid crashing the application due to unhandled exceptions.
            } catch (TangoErrorException e) {
                Log.e(TAG, "Tango API call error within the OpenGL render thread", e);
            } catch (Throwable t) {
                Log.e(TAG, "Exception on the OpenGL thread", t);
            }
        }

        @Override
        public void onPreDraw(long sceneTime, double deltaTime) {

        }

        @Override
        public void onPostFrame(long sceneTime, double deltaTime) {

        }

        @Override
        public boolean callPreFrame() {
            return true;
        }
    });
}

From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java

private void updateMiniCubePosition(float x, float y, float z, float w) {
    //Matrix.setIdentityM(modelMiniCube, 0);
    //We normalize the distance as well
    //Matrix.translateM(modelMiniCube, 0, handPos[0]/(float)50.0, handPos[1]/(float)50.0, handPos[2]/(float)50.0);
    //This accounts for the reversing
    //Matrix.translateM(modelMiniCube, 0, -handPos[0]/(float)50.0, -handPos[2]/(float)50.0, -handPos[1]/(float)50.0);

    float[] temp_modelMiniCube = new float[16];
    float[] temp_mRotate = new float[16];
    Matrix.setIdentityM(temp_modelMiniCube, 0);
    Matrix.translateM(temp_modelMiniCube, 0, -handPos[0] / (float) 50.0, -handPos[2] / (float) 50.0,
            -handPos[1] / (float) 50.0);
    //Matrix.setIdentityM(temp_mRotate, 0);
    //Matrix.rotateM(temp_mRotate, 0, 45, 1, 0, 0); //This rotates the cube
    quaternionToMatrix(temp_mRotate, -x, -y, -z, w);

    Matrix.multiplyMM(modelMiniCube, 0, temp_mRotate, 0, temp_modelMiniCube, 0);

    Log.i("Armo", String.format("%f", -handPos[1] / (float) 50));
    if (-handPos[1] / (float) 50 < -WALL_DIST + 1) { //offset because this is cube center, we want the wall position
        sendArmoRequest(true);/*w  w  w .j  a v a 2 s.c o m*/
        Log.i("Armo", "Sending_lock");
    } else {
        sendArmoRequest(false);
        Log.i("Armo", "Sending_unlock");
    }

}

From source file:com.google.vrtoolkit.cardboard.samples.treasurehunt.MainActivity.java

private void updateMiniCubePosition(float pitch, float yaw, float roll) {
    float[] temp_modelMiniCube = new float[16];
    float[] temp_mRotate = new float[16];
    Matrix.setIdentityM(temp_modelMiniCube, 0);
    Matrix.translateM(temp_modelMiniCube, 0, -handPos[0] / (float) 50.0, -handPos[2] / (float) 50.0,
            -handPos[1] / (float) 50.0);
    Matrix.setIdentityM(temp_mRotate, 0);
    Matrix.rotateM(temp_mRotate, 0, 45, 1, 0, 0); //This rotates the cube
    //quaternionToMatrix(temp_mRotate, x, y, z, w);

    Matrix.multiplyMM(modelMiniCube, 0, temp_mRotate, 0, temp_modelMiniCube, 0);
}