Example usage for android.view Surface Surface

List of usage examples for android.view Surface Surface

Introduction

In this page you can find the example usage for android.view Surface Surface.

Prototype

@UnsupportedAppUsage
    private Surface(long nativeObject) 

Source Link

Usage

From source file:com.lamcreations.scaffold.common.activities.VideoSplashScreenActivity.java

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
    Surface s = new Surface(surface);

    try {/* w  w  w.  ja v a 2  s  .  c om*/
        mMediaPlayer = MediaPlayer.create(this, getVideoRawResId());
        mMediaPlayer.setVideoScalingMode(MediaPlayer.VIDEO_SCALING_MODE_SCALE_TO_FIT);
        mMediaPlayer.setSurface(s);
        mMediaPlayer.setOnBufferingUpdateListener(this);
        mMediaPlayer.setOnPreparedListener(this);
        mMediaPlayer.setOnVideoSizeChangedListener(this);
        mMediaPlayer.setOnSeekCompleteListener(this);
        mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
        mTextureView.setAspectRatio(mMediaPlayer.getVideoWidth(), mMediaPlayer.getVideoHeight());
    } catch (IllegalArgumentException | SecurityException | IllegalStateException e) {
        Log.d(TAG, e.getMessage());
    }
}

From source file:com.example.gemswin.screancasttest.MainActivity_Reciever.java

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
    Log.i(LOG_TAG, "onSurfaceTextureAvailable (" + width + "/" + height + ")");
    try {/*from  w ww.  j  a va2 s  .c  o m*/
        final Surface surface = new Surface(surfaceTexture);
        mDecoderAsyncTask = new DecoderAsyncTask(mMediaCodecFactory, surface, this);
        mDecoderAsyncTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
    } catch (IOException e) {
        e.printStackTrace();
    }

}

From source file:freed.cam.apis.camera2.modules.VideoModuleApi2.java

@TargetApi(VERSION_CODES.LOLLIPOP)
@Override// w  w w.java 2 s  .  c  om
public void startPreview() {
    previewSize = new Size(currentVideoProfile.videoFrameWidth, currentVideoProfile.videoFrameHeight);
    int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
    int orientation = 0;
    switch (sensorOrientation) {
    case 90:
        orientation = 270;
        break;
    case 180:
        orientation = 0;
        break;
    case 270:
        orientation = 90;
        break;
    case 0:
        orientation = 180;
        break;
    }
    cameraHolder.CaptureSessionH.SetTextureViewSize(previewSize.getWidth(), previewSize.getHeight(),
            orientation, orientation + 180, true);
    SurfaceTexture texture = cameraHolder.CaptureSessionH.getSurfaceTexture();
    texture.setDefaultBufferSize(currentVideoProfile.videoFrameWidth, currentVideoProfile.videoFrameHeight);
    previewsurface = new Surface(texture);
    cameraHolder.CaptureSessionH.AddSurface(previewsurface, true);
    cameraHolder.CaptureSessionH.CreateCaptureSession();
}

From source file:com.example.gemswin.screencastrecevertest.MainActivity_Reciever.java

 @Override
public void onDestroy() {
   Log.i(LOG_TAG, "onSurfaceTextureAvailable (" + width + "/" + height + ")");
   try {// ww  w. ja v a  2 s  . c o m
      final Surface surface = new Surface(surfaceTexture);
      mDecoderAsyncTask = new DecoderAsyncTask(mMediaCodecFactory, surface, this);
      mDecoderAsyncTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
   } catch (IOException e) {
      e.printStackTrace();
   }

}

From source file:MainActivity.java

protected void takePicture(View view) {
    if (null == mCameraDevice) {
        return;//from  w  w  w .  j  av a2s . c o  m
    }
    CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
    try {
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
        StreamConfigurationMap configurationMap = characteristics
                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (configurationMap == null)
            return;
        Size largest = Collections.max(Arrays.asList(configurationMap.getOutputSizes(ImageFormat.JPEG)),
                new CompareSizesByArea());
        ImageReader reader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(2);
        outputSurfaces.add(reader.getSurface());
        outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
        final CaptureRequest.Builder captureBuilder = mCameraDevice
                .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
        captureBuilder.addTarget(reader.getSurface());
        captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
        ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image image = null;
                try {
                    image = reader.acquireLatestImage();
                    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
                    byte[] bytes = new byte[buffer.capacity()];
                    buffer.get(bytes);
                    OutputStream output = new FileOutputStream(getPictureFile());
                    output.write(bytes);
                    output.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    if (image != null) {
                        image.close();
                    }
                }
            }
        };
        HandlerThread thread = new HandlerThread("CameraPicture");
        thread.start();
        final Handler backgroudHandler = new Handler(thread.getLooper());
        reader.setOnImageAvailableListener(readerListener, backgroudHandler);
        final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
            @Override
            public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                    TotalCaptureResult result) {
                super.onCaptureCompleted(session, request, result);
                Toast.makeText(MainActivity.this, "Picture Saved", Toast.LENGTH_SHORT).show();
                startPreview(session);
            }
        };
        mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
            @Override
            public void onConfigured(CameraCaptureSession session) {
                try {
                    session.capture(captureBuilder.build(), captureCallback, backgroudHandler);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
            }

            @Override
            public void onConfigureFailed(CameraCaptureSession session) {
            }
        }, backgroudHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.google.android.apps.watchme.StreamerActivity.java

void startPreview() {
    SurfaceTexture surfaceTexture = textureView.getSurfaceTexture();
    surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
    Surface previewSurface = new Surface(surfaceTexture);
    try {/*from  w  w w.  ja v  a 2s. c om*/
        captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        captureRequestBuilder.addTarget(previewSurface);

        cameraDevice.createCaptureSession(Arrays.asList(previewSurface),
                new CameraCaptureSession.StateCallback() {
                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession session) {
                        try {
                            session.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
                        } catch (CameraAccessException e) {
                            e.printStackTrace();
                        }
                    }

                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                        Toast.makeText(getApplicationContext(), "The preview failed to setup",
                                Toast.LENGTH_SHORT).show();
                    }
                }, null);

    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.andrasta.dashi.MainActivity.java

@SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
    try {/* w ww .  j ava  2 s .  co m*/
        configBuilder = CameraUtils.initCameraConfig(this, display, width, height);
        onCameraOrientationSet(configBuilder.getCameraOrientation());

        int cameraWidth = configBuilder.getSize().getWidth();
        int cameraHeight = configBuilder.getSize().getHeight();

        // We fit the aspect ratio of TextureView to the size of preview we picked.
        int orientation = this.getResources().getConfiguration().orientation;
        if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
            textureView.setAspectRatio(cameraWidth, cameraHeight);
            laneView.setAspectRatio(cameraWidth, cameraHeight);
        } else {
            textureView.setAspectRatio(cameraHeight, cameraWidth);
            laneView.setAspectRatio(cameraHeight, cameraWidth);
        }
        Matrix matrix = CameraUtils.configureTransform(display.getRotation(), width, height, cameraWidth,
                cameraHeight);
        textureView.setTransform(matrix);
        SurfaceTexture texture = textureView.getSurfaceTexture();
        if (texture == null) {
            Log.d(TAG, "No SurfaceTexture");
            return;
        }
        // We configure the size of default buffer to be the size of camera preview we want.
        texture.setDefaultBufferSize(cameraWidth, cameraHeight);

        CameraConfig.Request request = new CameraConfig.Request(CameraDevice.TEMPLATE_PREVIEW,
                new Surface(texture));
        request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        configBuilder.addRequest(request);
        Log.d(TAG, "Display camera resolution " + cameraWidth + 'x' + cameraHeight);

        imageReader = ImageReader.newInstance(cameraRecSize.getWidth(), cameraRecSize.getHeight(),
                ImageFormat.YUV_420_888, alprHandler.getThreadsNum() + 1);
        imageReader.setOnImageAvailableListener(this, null);
        request = new CameraConfig.Request(CameraDevice.TEMPLATE_PREVIEW, imageReader.getSurface());
        configBuilder.addRequest(request);
        Log.d(TAG,
                "Recognition camera resolution " + cameraRecSize.getWidth() + 'x' + cameraRecSize.getHeight());

        camera.open(configBuilder.build());
        Log.d(TAG, "Camera opened: " + configBuilder.getCameraId());
    } catch (CameraAccessException e) {
        onError(false, e);
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the device
        onError(true, e);
    }
}

From source file:ca.frozen.curlingtv.activities.VideoFragment.java

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
    if (decoder != null) {
        decoder.setSurface(new Surface(surfaceTexture));
    }//w  w  w.  j  a va2s . c o m
}

From source file:ca.frozen.rpicameraviewer.activities.VideoFragment.java

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
    if (decoder != null) {
        decoder.setSurface(new Surface(surfaceTexture), startVideoHandler, startVideoRunner);
    }/*  w  ww. j av a  2s.  c o  m*/
}

From source file:net.nanocosmos.bintu.demo.encoder.activities.StreamActivity.java

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
    Surface foo = new Surface(surface);
    SurfaceView foo1 = new SurfaceView(this);
    foo = foo1.getHolder().getSurface();
    GLSurfaceView foo2 = new GLSurfaceView(this);
    foo = foo2.getHolder().getSurface();

    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
        initStreamLib();//from  w w w  .  j av  a2 s . c o  m
    } else {
        appPermissions = new CheckAppPermissions(this);
        boolean needPermission = false;
        if (streamVideo) {
            needPermission |= !appPermissions.checkCameraPermissions();
        }
        if (streamAudio) {
            needPermission |= !appPermissions.checkRecordAudioPermission();
        }
        if (recordMp4) {
            needPermission |= !appPermissions.checkWriteExternalStoragePermission();
        }

        if (needPermission) {
            appPermissions.requestMissingPermissions();
        } else {
            initStreamLib();
        }
    }
}