Example usage for android.opengl EGL14 eglGetCurrentContext

List of usage examples for android.opengl EGL14 eglGetCurrentContext

Introduction

In this page you can find the example usage for android.opengl EGL14 eglGetCurrentContext.

Prototype

public static native EGLContext eglGetCurrentContext();

Source Link

Usage

From source file:com.aimfire.demo.CamcorderActivity.java

@Override
public void onDrawFrame(GL10 unused) {
    if (BuildConfig.DEBUG)
        if (VERBOSE)
            Log.d(TAG, "onDrawFrame tex=" + mTextureId);

    /*//from  ww  w  . ja va2s.co  m
     * Latch the latest frame. If there isn't anything new, we'll just re-use 
     * whatever was there before.
     */
    mSurfaceTexture.updateTexImage();

    Long currTimeUs = SystemClock.elapsedRealtimeNanos() / 1000;
    Long currTimeMs = currTimeUs / 1000;

    if (!mIsRecording) {
        long captureWaitMs = (mRecordingStartUs - currTimeUs) / 1000;

        /*
         * TODO: hard-coded value
         */
        if (captureWaitMs < 500) {
            /*
             * if we are close to the start of capture time, we start estimating 
             * frame rate, and use it to control when frame capture will begin
             */
            if (mLastTimeMs != 0) {
                mNumOfFrameDelays++;

                int frameDelayMs = (int) (currTimeMs - mLastTimeMs);
                mAvgFrameDelayMs = (mAvgFrameDelayMs * (float) (mNumOfFrameDelays - 1) + (float) frameDelayMs)
                        / (float) mNumOfFrameDelays;

                //if(BuildConfig.DEBUG) Log.d(TAG, "preview frame delay " + frameDelayMs + "ms" +
                //", new avg = " + mAvgFrameDelayMs);
            }
            mLastTimeMs = currTimeMs;

            if (!mEncoderThreadStarted) {
                File outputFile;
                if (mSyncCapture) {
                    /*
                     * for sync capture, set a temp path which will be renamed later on
                     */
                    String path = mPrefix + "_" + Integer.toString(mIndex) + ".mp4";
                    outputFile = new File(path);
                } else {
                    /*
                     * for solo capture, set the correct path to use
                     */
                    String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
                    mOutputFilepath = mPrefix + timeStamp + ".mp4";
                    outputFile = new File(mOutputFilepath);
                }

                /*
                 * If we are getting close, start the encoder thread, so we are
                 * ready to record right away when time is right. the name
                 * "startRecording" below may be confusing - it means we start
                 * the encoding thread. but we won't actually feed it frames
                 * until time is right.
                 * 
                 * note there is only one instance of TextureMovieEncoder, but
                 * each time startRecording is invoked, a new encoding thread 
                 * is created. we want to call startRecording only once per
                 * recording.
                 */
                mEncoder.startRecording(mTextureId,
                        new TextureMovieEncoder.EncoderConfig(outputFile, mIncomingWidth, mIncomingHeight,
                                mIncomingRotation, mIncomingBitrate, EGL14.eglGetCurrentContext()));

                mEncoderThreadStarted = true;
            }
        }

        if (captureWaitMs < mAvgFrameDelayMs) {
            /*
             * If the recording state is changing, take care of it here.  Ideally we 
             * wouldn't be doing all this in onDrawFrame(), but the EGLContext sharing 
             * with GLSurfaceView makes it hard to do elsewhere.
             * 
             * to synchronize the left/right video, we could tweak the encoder to
             * adjust presentation time recorded in the stream, based on offset; 
             * currently we do not do this, but rather record the offset in the
             * file name of the video files, and adjust the timing at playback time
             */
            if (mSyncCapture) {
                mOutputFilepath = mPrefix
                        + ((captureWaitMs > 0) ? ("_m" + Long.toString(captureWaitMs))
                                : ("_p" + Long.toString(-captureWaitMs)))
                        + "_" + Integer.toString(mIndex) + ".mp4";
            }

            if (BuildConfig.DEBUG)
                Log.d(TAG, "onDrawFrame: recording start, captureWaitMs=" + captureWaitMs + ", mAvgFrameDelay="
                        + mAvgFrameDelayMs + "ms");

            /*
             * Tell the UI thread recording is starting. 
             */
            mCameraHandler.sendMessage(mCameraHandler
                    .obtainMessage(CamcorderActivity.CameraHandler.MSG_SET_RECORDING_START, mOutputFilepath));

            mIsRecording = true;
        }
    } else if (currTimeUs >= mRecordingStopUs) {
        /*
         * stop recording
         */
        long captureLengthMs = (currTimeUs - mRecordingStartUs) / 1000;
        if (BuildConfig.DEBUG)
            Log.d(TAG, "onDrawFrame: recording done, captureLengthMs=" + captureLengthMs);

        mEncoder.stopRecording();

        /*
         * Tell the UI thread recording is done. time to send file to 
         * remote device
         */
        mCameraHandler.sendMessage(mCameraHandler
                .obtainMessage(CamcorderActivity.CameraHandler.MSG_SET_RECORDING_STOP, mOutputFilepath));

        /*
         * reset recording flags and get ready for next capture
         */
        resetRecordingState();
        mIsRecording = false;
    }

    /*
     * tell the video encoder thread that a new frame is available.
     * this will be ignored if we're not actually recording.
     */
    if (mIsRecording) {
        mEncoder.frameAvailable(mSurfaceTexture);
    }

    if (mIncomingWidth <= 0 || mIncomingHeight <= 0) {
        /*
         * Texture size isn't set yet.  This is only used for the filters, but 
         * to be safe we can just skip drawing while we wait for the various 
         * races to resolve. (this seems to happen if you toggle the screen off/on 
         * with power button.)
         */
        //if(BuildConfig.DEBUG) Log.d(TAG, "Drawing before incoming texture size set; skipping");
        return;
    }

    /*
     *  Update the filter, if necessary.
     */
    if (mCurrentFilter != mNewFilter) {
        updateFilter();
    }

    if (mIncomingSizeUpdated) {
        mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
        mIncomingSizeUpdated = false;
    }

    /*
     * Draw the video frame.
     */
    mSurfaceTexture.getTransformMatrix(mSTMatrix);
    mFullScreen.drawFrame(mTextureId, mSTMatrix);

    /*
     * update the time counter
     */
    if (mIsRecording) {
        updateTimeCounter(currTimeUs - mRecordingStartUs);
    }
}