Example usage for android.os SystemClock elapsedRealtimeNanos

List of usage examples for android.os SystemClock elapsedRealtimeNanos

Introduction

In this page you can find the example usage for android.os SystemClock elapsedRealtimeNanos.

Prototype

@CriticalNative
public static native long elapsedRealtimeNanos();

Source Link

Document

Returns nanoseconds since boot, including time spent in sleep.

Usage

From source file:org.onebusaway.android.map.googlemapsv2.StopOverlay.java

@Override
public boolean onMarkerClick(Marker marker) {
    long startTime = Long.MAX_VALUE, endTime = Long.MAX_VALUE;
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
        startTime = SystemClock.elapsedRealtimeNanos();
    }//  w  w  w .ja  v  a 2  s.c o m

    ObaStop stop = mMarkerData.getStopFromMarker(marker);

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
        endTime = SystemClock.elapsedRealtimeNanos();
        Log.d(TAG, "Stop HashMap read time: "
                + TimeUnit.MILLISECONDS.convert(endTime - startTime, TimeUnit.NANOSECONDS) + "ms");
    }

    if (stop == null) {
        // The marker isn't a stop that is contained in this StopOverlay - return unhandled
        return false;
    }

    if (BuildConfig.DEBUG) {
        // Show the stop_id in a toast for debug purposes
        Toast.makeText(mActivity, stop.getId(), Toast.LENGTH_SHORT).show();
    }

    doFocusChange(stop);

    // Report Stop distance metric
    Location stopLocation = stop.getLocation();
    Location myLocation = Application.getLastKnownLocation(mActivity, null);
    // Track the users distance to bus stop
    ObaAnalytics.trackBusStopDistance(stop.getId(), myLocation, stopLocation);
    return true;
}

From source file:org.onebusaway.android.util.UIUtils.java

/**
 * Returns the current time for comparison against another current time.  For API levels >=
 * Jelly Bean MR1 the SystemClock.getElapsedRealtimeNanos() method is used, and for API levels
 * </*w  ww.  j a  v a2 s .  com*/
 * Jelly Bean MR1 System.currentTimeMillis() is used.
 *
 * @return the current time for comparison against another current time, in nanoseconds
 */
public static long getCurrentTimeForComparison() {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
        // Use elapsed real-time nanos, since its guaranteed monotonic
        return SystemClock.elapsedRealtimeNanos();
    } else {
        return TimeUnit.MILLISECONDS.toNanos(System.currentTimeMillis());
    }
}

From source file:com.aimfire.demo.CameraActivity.java

/**
 * onClick handler for "capture" button.
 *//*from  w  ww. jav a  2s  .c  o  m*/
public void clickCapture(View unused) {
    if (!mP2pConnected) {
        if (BuildConfig.DEBUG)
            Log.e(TAG, "clickCapture: error, P2P not connected");
        return;
    }

    if (mRemoteCameraPaused) {
        CustomToast.show(getActivity(), getActivity().getString(R.string.error_cannot_capture_photo),
                Toast.LENGTH_LONG);
        return;
    }

    /*
     * user initiated capturing. we calculate the time elapsed
     * between current time and mSyncTimeUs, then add an extra
     * delay which accounts for the P2P latency in sending the
     * command to the remote device
     */
    long captureStartUs = SystemClock.elapsedRealtimeNanos() / 1000 + P2P_LATENCY_US;
    long delayFromSyncUs = captureStartUs - mSyncTimeUs;

    /*
     * tell camera to prepare capture: lock AE, WB. it also
     * enables preview callback, within which frames will be
     * captured at the right moment
     */
    if (!mCameraClient.prepareCapture(captureStartUs)) {
        if (BuildConfig.DEBUG)
            Log.d(TAG, "clickCapture: previous capture still in progress. " + "Can't start a new one.");
        return;
    }

    /*
     * tell remote device to start recording
     */
    if (mAimfireService != null) {
        mAimfireService.sendStringToPeer(true, MainConsts.CMD_DEMO_CAMERA_ACTION_START + ":"
                + Long.toString(delayFromSyncUs) + ":" + Integer.toString(mImgInd));
    }

    mFirebaseAnalytics.logEvent(MainConsts.FIREBASE_CUSTOM_EVENT_SYNC_PHOTO_CAPTURE_START, null);
}

From source file:com.aimfire.demo.CamcorderActivity.java

/**
 * start solo (2D) recording
 */
private void soloStartRecording() {
    long startUs = SystemClock.elapsedRealtimeNanos() / 1000;
    setRecordingStart(startUs);
}

From source file:com.aimfire.demo.CamcorderActivity.java

/**
 * start our recording and send command to remote device to start
 *///from  ww w  . j a  v a2 s .c  om
private void syncStartRecording() {
    /*
     * user wanted to start recording. we calculate the time
     * elapsed between current time and mSyncTimeUs, then add
     * an extra delay which accounts for the P2P latency in
     * sending the command to the remote device
     */
    long startUs = SystemClock.elapsedRealtimeNanos() / 1000 + P2P_LATENCY_US;

    long delayFromSyncUs = startUs - mSyncTimeUs;
    if (BuildConfig.DEBUG)
        Log.d(TAG, "DELAY_FROM_SYNC: " + delayFromSyncUs);

    if (mRemoteCameraPaused) {
        //Toast.makeText(getActivity(), "Remote Camera not active, cannot " +
        //"capture video", Toast.LENGTH_LONG).show();
        CustomToast.show(getActivity(), getActivity().getString(R.string.error_cannot_capture_video),
                Toast.LENGTH_LONG);
        return;
    }

    /*
     * tell remote device to start recording
     */
    if (mAimfireService == null) {
        /*
         * sanity check
         */
        return;
    } else {
        mAimfireService.sendStringToPeer(true,
                MainConsts.CMD_DEMO_CAMERA_ACTION_START + ":" + Long.toString(delayFromSyncUs));
    }

    setRecordingStart(startUs);
}

From source file:com.aimfire.demo.CamcorderActivity.java

/**
 * stop solo (2D) recording
 */
private void soloStopRecording() {
    long stopUs = SystemClock.elapsedRealtimeNanos() / 1000;
    setRecordingStop(stopUs);
}

From source file:com.aimfire.demo.CamcorderActivity.java

/**
 * stop our recording and send command to remote device to stop
 *///w w w .  j  a  v a  2s  .c  o  m
private void syncStopRecording() {
    /*
     * stop our side of the recording. set it a little bit in
     * the future to account for the delay in sending the
     * command to the remote device. this will not be perfect -
     * the length of the two recording will always be slightly
     * different - but at least we tried.
     */
    long stopUs = SystemClock.elapsedRealtimeNanos() / 1000 + P2P_LATENCY_US;
    setRecordingStop(stopUs);

    /*
     * audio/visual indication of stop is done in handleRecordingStop instead
     * of here because we don't want to record the stop shutter sound.
     */

    /*
     * tell the remote device to stop, too. we do not have to check
     * if the remote camera was put to background or not, because if 
     * it did, it must have sent us a message, and we must have
     * stopped already and wouldn't get here
     * 
     */
    if (mAimfireService != null) {
        mAimfireService.sendStringToPeer(true, Integer.toString(MainConsts.CMD_DEMO_CAMERA_ACTION_END));
    }
}

From source file:com.aimfire.demo.CamcorderActivity.java

@Override
public void onDrawFrame(GL10 unused) {
    if (BuildConfig.DEBUG)
        if (VERBOSE)
            Log.d(TAG, "onDrawFrame tex=" + mTextureId);

    /*/*from  w ww  .j a va  2s . c  om*/
     * Latch the latest frame. If there isn't anything new, we'll just re-use 
     * whatever was there before.
     */
    mSurfaceTexture.updateTexImage();

    Long currTimeUs = SystemClock.elapsedRealtimeNanos() / 1000;
    Long currTimeMs = currTimeUs / 1000;

    if (!mIsRecording) {
        long captureWaitMs = (mRecordingStartUs - currTimeUs) / 1000;

        /*
         * TODO: hard-coded value
         */
        if (captureWaitMs < 500) {
            /*
             * if we are close to the start of capture time, we start estimating 
             * frame rate, and use it to control when frame capture will begin
             */
            if (mLastTimeMs != 0) {
                mNumOfFrameDelays++;

                int frameDelayMs = (int) (currTimeMs - mLastTimeMs);
                mAvgFrameDelayMs = (mAvgFrameDelayMs * (float) (mNumOfFrameDelays - 1) + (float) frameDelayMs)
                        / (float) mNumOfFrameDelays;

                //if(BuildConfig.DEBUG) Log.d(TAG, "preview frame delay " + frameDelayMs + "ms" +
                //", new avg = " + mAvgFrameDelayMs);
            }
            mLastTimeMs = currTimeMs;

            if (!mEncoderThreadStarted) {
                File outputFile;
                if (mSyncCapture) {
                    /*
                     * for sync capture, set a temp path which will be renamed later on
                     */
                    String path = mPrefix + "_" + Integer.toString(mIndex) + ".mp4";
                    outputFile = new File(path);
                } else {
                    /*
                     * for solo capture, set the correct path to use
                     */
                    String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
                    mOutputFilepath = mPrefix + timeStamp + ".mp4";
                    outputFile = new File(mOutputFilepath);
                }

                /*
                 * If we are getting close, start the encoder thread, so we are
                 * ready to record right away when time is right. the name
                 * "startRecording" below may be confusing - it means we start
                 * the encoding thread. but we won't actually feed it frames
                 * until time is right.
                 * 
                 * note there is only one instance of TextureMovieEncoder, but
                 * each time startRecording is invoked, a new encoding thread 
                 * is created. we want to call startRecording only once per
                 * recording.
                 */
                mEncoder.startRecording(mTextureId,
                        new TextureMovieEncoder.EncoderConfig(outputFile, mIncomingWidth, mIncomingHeight,
                                mIncomingRotation, mIncomingBitrate, EGL14.eglGetCurrentContext()));

                mEncoderThreadStarted = true;
            }
        }

        if (captureWaitMs < mAvgFrameDelayMs) {
            /*
             * If the recording state is changing, take care of it here.  Ideally we 
             * wouldn't be doing all this in onDrawFrame(), but the EGLContext sharing 
             * with GLSurfaceView makes it hard to do elsewhere.
             * 
             * to synchronize the left/right video, we could tweak the encoder to
             * adjust presentation time recorded in the stream, based on offset; 
             * currently we do not do this, but rather record the offset in the
             * file name of the video files, and adjust the timing at playback time
             */
            if (mSyncCapture) {
                mOutputFilepath = mPrefix
                        + ((captureWaitMs > 0) ? ("_m" + Long.toString(captureWaitMs))
                                : ("_p" + Long.toString(-captureWaitMs)))
                        + "_" + Integer.toString(mIndex) + ".mp4";
            }

            if (BuildConfig.DEBUG)
                Log.d(TAG, "onDrawFrame: recording start, captureWaitMs=" + captureWaitMs + ", mAvgFrameDelay="
                        + mAvgFrameDelayMs + "ms");

            /*
             * Tell the UI thread recording is starting. 
             */
            mCameraHandler.sendMessage(mCameraHandler
                    .obtainMessage(CamcorderActivity.CameraHandler.MSG_SET_RECORDING_START, mOutputFilepath));

            mIsRecording = true;
        }
    } else if (currTimeUs >= mRecordingStopUs) {
        /*
         * stop recording
         */
        long captureLengthMs = (currTimeUs - mRecordingStartUs) / 1000;
        if (BuildConfig.DEBUG)
            Log.d(TAG, "onDrawFrame: recording done, captureLengthMs=" + captureLengthMs);

        mEncoder.stopRecording();

        /*
         * Tell the UI thread recording is done. time to send file to 
         * remote device
         */
        mCameraHandler.sendMessage(mCameraHandler
                .obtainMessage(CamcorderActivity.CameraHandler.MSG_SET_RECORDING_STOP, mOutputFilepath));

        /*
         * reset recording flags and get ready for next capture
         */
        resetRecordingState();
        mIsRecording = false;
    }

    /*
     * tell the video encoder thread that a new frame is available.
     * this will be ignored if we're not actually recording.
     */
    if (mIsRecording) {
        mEncoder.frameAvailable(mSurfaceTexture);
    }

    if (mIncomingWidth <= 0 || mIncomingHeight <= 0) {
        /*
         * Texture size isn't set yet.  This is only used for the filters, but 
         * to be safe we can just skip drawing while we wait for the various 
         * races to resolve. (this seems to happen if you toggle the screen off/on 
         * with power button.)
         */
        //if(BuildConfig.DEBUG) Log.d(TAG, "Drawing before incoming texture size set; skipping");
        return;
    }

    /*
     *  Update the filter, if necessary.
     */
    if (mCurrentFilter != mNewFilter) {
        updateFilter();
    }

    if (mIncomingSizeUpdated) {
        mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
        mIncomingSizeUpdated = false;
    }

    /*
     * Draw the video frame.
     */
    mSurfaceTexture.getTransformMatrix(mSTMatrix);
    mFullScreen.drawFrame(mTextureId, mSTMatrix);

    /*
     * update the time counter
     */
    if (mIsRecording) {
        updateTimeCounter(currTimeUs - mRecordingStartUs);
    }
}