Example usage for org.opencv.core Core flip

List of usage examples for org.opencv.core Core flip

Introduction

In this page you can find the example usage for org.opencv.core Core flip.

Prototype

public static void flip(Mat src, Mat dst, int flipCode) 

Source Link

Usage

From source file:MainShapeConversion.java

public static void main(String[] args) {

    try {//from w  w  w  .ja  v  a2  s .  com

        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

        File input = new File("D://teste.png");

        BufferedImage image = ImageIO.read(input);

        byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();

        Mat mat = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC3);

        mat.put(0, 0, data);

        Mat mat1 = new Mat(image.getWidth(), image.getHeight(), CvType.CV_8UC3);

        Core.flip(mat, mat1, -1); //-1 invert , 1 normal

        byte[] data1 = new byte[mat1.rows() * mat1.cols() * (int) (mat1.elemSize())];

        mat1.get(0, 0, data1);

        BufferedImage image1 = new BufferedImage(mat1.cols(), mat1.rows(), 5);

        image1.getRaster().setDataElements(0, 0, mat1.cols(), mat1.rows(), data1);

        File output = new File("D://hsv.jpg");

        ImageIO.write(image1, "jpg", output);

    } catch (Exception e) {
        System.out.println("Exception: " + e.getMessage());

    }
}

From source file:ch.zhaw.facerecognition.Activities.AddPersonPreviewActivity.java

License:Open Source License

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();/* w ww. j a v a2  s.  c  o  m*/
    Mat imgCopy = new Mat();
    imgRgba.copyTo(imgCopy);
    // Selfie / Mirror mode
    if (front_camera) {
        Core.flip(imgRgba, imgRgba, 1);
    }

    long time = new Date().getTime();
    if ((method == MANUALLY) || (method == TIME) && (lastTime + timerDiff < time)) {
        lastTime = time;

        // Check that only 1 face is found. Skip if any or more than 1 are found.
        Mat img = ppF.getCroppedImage(imgCopy);
        if (img != null) {
            Rect[] faces = ppF.getFacesForRecognition();
            //Only proceed if 1 face has been detected, ignore if 0 or more than 1 face have been detected
            if ((faces != null) && (faces.length == 1)) {
                faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                if (((method == MANUALLY) && capturePressed) || (method == TIME)) {
                    MatName m = new MatName(name + "_" + total, img);
                    if (folder.equals("Test")) {
                        String wholeFolderPath = fh.TEST_PATH + name + "/" + subfolder;
                        new File(wholeFolderPath).mkdirs();
                        fh.saveMatToImage(m, wholeFolderPath + "/");
                    } else {
                        String wholeFolderPath = fh.TRAINING_PATH + name;
                        new File(wholeFolderPath).mkdirs();
                        fh.saveMatToImage(m, wholeFolderPath + "/");
                    }

                    for (int i = 0; i < faces.length; i++) {
                        MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], String.valueOf(total),
                                front_camera);
                    }

                    total++;

                    // Stop after numberOfPictures (settings option)
                    if (total >= numberOfPictures) {
                        Intent intent = new Intent(getApplicationContext(), AddPersonActivity.class);
                        intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                        startActivity(intent);
                    }
                    capturePressed = false;
                } else {
                    for (int i = 0; i < faces.length; i++) {
                        MatOperation.drawRectangleOnPreview(imgRgba, faces[i], front_camera);
                    }
                }
            }
        }
    }

    return imgRgba;
}

From source file:ch.zhaw.facerecognition.Activities.RecognitionActivity.java

License:Open Source License

public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();//w ww  .  j  a  va 2 s.  co m
    Mat img = new Mat();
    imgRgba.copyTo(img);
    List<Mat> images = ppF.getProcessedImage(img);
    Rect[] faces = ppF.getFacesForRecognition();
    // Selfie / Mirror mode
    if (front_camera) {
        Core.flip(imgRgba, imgRgba, 1);
    }
    if (images == null || images.size() == 0 || faces == null || faces.length == 0
            || !(images.size() == faces.length)) {
        // skip
        return imgRgba;
    } else {
        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
        for (int i = 0; i < faces.length; i++) {
            MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""),
                    front_camera);
        }
        return imgRgba;
    }
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatOperation.java

License:Open Source License

/***************************************************************************************
 *    Title: Rotate image by 90, 180 or 270 degrees
 *    Author: StereoMatching/*from www .  j a v  a 2  s  .  c  o  m*/
 *    Date: 29.04.2013
 *    Code version: -
 *    Availability: http://stackoverflow.com
 *
 ***************************************************************************************/

public static void rotate_90n(Mat img, int angle) {
    if (angle == 270 || angle == -90) {
        // Rotate clockwise 270 degrees
        Core.transpose(img, img);
        Core.flip(img, img, 0);
    } else if (angle == 180 || angle == -180) {
        // Rotate clockwise 180 degrees
        Core.flip(img, img, -1);
    } else if (angle == 90 || angle == -270) {
        // Rotate clockwise 90 degrees
        Core.transpose(img, img);
        Core.flip(img, img, 1);
    }
}

From source file:com.example.afs.makingmusic.process.MotionDetector.java

License:Open Source License

@Override
public void process(Frame frame) {
    Mat image = frame.getImageMatrix();//  ww w . ja va2  s.  com
    Core.flip(image, image, 1);
    backgroundSubtractor.apply(image, foregroundMask);
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(foregroundMask.clone(), contours, new Mat(), Imgproc.RETR_LIST,
            Imgproc.CHAIN_APPROX_SIMPLE);
    Collections.shuffle(contours);
    int contourCount = contours.size();
    for (int contourIndex = 0; contourIndex < contourCount; contourIndex++) {
        MatOfPoint contour = contours.get(contourIndex);
        double contourArea = Imgproc.contourArea(contour);
        if (contourArea > MotionDetector.MINIMUM_AREA) {
            Rect item = Imgproc.boundingRect(contour);
            frame.addItem(item);
            itemCount++;
        }
    }
    Injector.getMetrics().setItems(itemCount);
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

Bitmap matToBitmap(Mat input) {
    if (input == null) {
        return Bitmap.createBitmap(0, 0, Bitmap.Config.ARGB_8888);
    }/*  w w  w .j  av  a 2  s . com*/
    Mat tmp = new Mat();
    if (input.channels() == 1) {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_GRAY2RGB);
    } else {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_BGR2RGB);
    }
    Core.transpose(tmp, tmp);
    Core.flip(tmp, tmp, 1);

    Bitmap bm = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(tmp, bm);
    return bm;
}

From source file:com.joravasal.keyface.CameraAccessView.java

License:Open Source License

public Mat correctCameraImage(Mat image) {
    //Log.i(tag, "Correcting image rotation");
    //Check rotation of device
    int rotation = ((KeyFaceActivity) this.getContext()).getWindowManager().getDefaultDisplay().getRotation();
    switch (rotation) {
    case Surface.ROTATION_0:
        int degrees = 90;
        //Mirror (y axis) if front camera and rotation in any case
        Mat imageResult = new Mat();
        //For some reason to rotate the image properly, we have to set the center like this
        Point center = new Point(image.width() / 2, image.width() / 2);
        Mat transform = Imgproc.getRotationMatrix2D(center, degrees, 1.0);
        try {/*  ww  w  . j a va2s.co  m*/
            Imgproc.warpAffine(image, imageResult, transform, new Size(image.height(), image.width()));
        } catch (CvException e) {
            System.err.println(e.getMessage());
        }
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(imageResult, imageResult, -1);
        else
            Core.flip(imageResult, imageResult, 1);
        return imageResult;
    case Surface.ROTATION_90:
        //Mirror on y axis if front camera
        if (!KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, 1);
        break;
    case Surface.ROTATION_180:
        //Never gets here but just in case:
        break;
    case Surface.ROTATION_270:
        //Mirror on the x axis if rear camera, both axis if front camera
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, -1);
        else
            Core.flip(image, image, 0);
        break;
    default:
        break;
    }

    return image;
}

From source file:com.kunato.imagestitching.SphereObject.java

License:Apache License

public void draw(float[] viewMatrix, float[] projectionMatrix) {
    int xh = GLES20.glGetUniformLocation(mProgram, "img_x");
    int yh = GLES20.glGetUniformLocation(mProgram, "img_y");
    int widthh = GLES20.glGetUniformLocation(mProgram, "img_width");
    int heighth = GLES20.glGetUniformLocation(mProgram, "img_height");

    if (mTexRequireUpdate) {
        Log.i("GLSphere", "Bitmap updated,Return to normal activity.");
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, this.mTextures[0]);
        GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mQueueBitmap, 0);
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        mQueueBitmap.recycle();/*ww w  . ja  va  2s  .  c  o  m*/
        mTexRequireUpdate = false;
    }
    GLES20.glUseProgram(mProgram);
    //Attrib
    mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
    mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
    mSphereBuffer.position(0);
    GLES20.glEnableVertexAttribArray(mPositionHandle);
    GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, mSphereShape.getVeticesStride(),
            mSphereBuffer);

    mSphereBuffer.position(3);
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
    GLES20.glVertexAttribPointer(mTextureCoordinateHandle, 2, GLES20.GL_FLOAT, false,
            mSphereShape.getVeticesStride(), mSphereBuffer);
    //Uniform
    mTextureHandle = GLES20.glGetUniformLocation(mProgram, "sTexture");
    GLES20.glUniform1i(mTextureHandle, 0);
    //Area
    GLES20.glUniform1f(xh, mArea[0]);
    GLES20.glUniform1f(yh, mArea[1]);
    GLES20.glUniform1f(widthh, mArea[2]);
    GLES20.glUniform1f(heighth, mArea[3]);

    mViewMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uViewMatrix");
    mProjectionMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uProjectionMatrix");
    GLES20.glUniformMatrix4fv(mViewMatrixHandle, 1, false, viewMatrix, 0);
    GLES20.glUniformMatrix4fv(mProjectionMatrixHandle, 1, false, projectionMatrix, 0);
    GLES20.glDrawElements(GLES20.GL_TRIANGLES, mSphereShape.getNumIndices()[0], GLES20.GL_UNSIGNED_SHORT,
            mIndexBuffer);
    GLES20.glDisableVertexAttribArray(mPositionHandle);
    GLES20.glDisableVertexAttribArray(mTextureCoordinateHandle);

    if (readPixel) {
        Log.d("GL", "ReadPixel");
        mScreenBuffer = ByteBuffer.allocateDirect(glRenderer.mHeight * glRenderer.mWidth * 4);
        mScreenBuffer.order(ByteOrder.nativeOrder());
        GLES20.glReadPixels(0, 0, glRenderer.mWidth, glRenderer.mHeight, GLES20.GL_RGBA,
                GLES20.GL_UNSIGNED_BYTE, mScreenBuffer);
        Log.d("mScreenBuffer", "Remaining " + mScreenBuffer.remaining());
        mScreenBuffer.rewind();
        byte pixelsBuffer[] = new byte[4 * glRenderer.mHeight * glRenderer.mWidth];
        mScreenBuffer.get(pixelsBuffer);
        Mat mat = new Mat(glRenderer.mHeight, glRenderer.mWidth, CvType.CV_8UC4);
        mat.put(0, 0, pixelsBuffer);
        Mat m = new Mat();
        Imgproc.cvtColor(mat, m, Imgproc.COLOR_RGBA2BGR);
        Core.flip(m, mat, 0);
        Highgui.imwrite("/sdcard/stitch/readpixel.jpg", mat);

    }
}

From source file:emotion.Eye.java

private void templatingOuterCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightOuter.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightOuter.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag//from w w  w.ja  v  a2s .c  o m
            ? new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(9,9)- coordinates of eye outerCorner in the template
    if (rightEyeFlag) {
        imwrite("rightEyeForOuterTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(9,9)- coordinates of eye outerCorner in the template
        Point outerCorner = new Point(maxVal.maxLoc.x + 9, maxVal.maxLoc.y + 9);

        //Adjust coordinates according to whole face
        outerCorner.y += Eye.rightRect.y;
        outerCorner.x += Eye.rightRect.x;
        outerCorner.x += temp.width(); //We examine just right half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightOuterEyeCorner = outerCorner;
    } else {
        imwrite("leftEyeForOuterTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point outerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 9);
        //Adjust coordinates according to whole face
        outerCorner.y += Eye.leftRect.y;
        outerCorner.x += Eye.leftRect.x;
        ////////////////////////////////////////////
        EyeRegion.leftOuterEyeCorner = outerCorner;
    }
    //Mat tempw=reg._face.clone();
    //Face.drawCross(tempw, outerCorner);
    //imwrite("checkcorner.png",tempw);

}

From source file:emotion.Eye.java

private void templatingInnerCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightInner.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightInner.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag ? new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(4,7)- coordinates of eye innerCorner in the template
    if (rightEyeFlag) {
        imwrite("template4righteye.jpg", template);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(4,7)- coordinates of eye innerCorner in the template
        Point innerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 7);

        StaticFunctions.drawCross(temp, innerCorner, StaticFunctions.Features.EYE_CORNERS);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        //Adjust coordinates according to whole face
        innerCorner.y += Eye.rightRect.y;
        innerCorner.x += Eye.rightRect.x;
        //We examine just left half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightInnerEyeCorner = innerCorner;
    } else {//from  www  .  ja  va 2 s .co m
        imwrite("leftEyeForInnerTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point innerCorner = new Point(maxVal.maxLoc.x + 8, maxVal.maxLoc.y + 7);

        //Adjust coordinates according to whole face
        innerCorner.y += Eye.leftRect.y;
        innerCorner.x += Eye.leftRect.x;
        //We examine just right half on the left eye
        innerCorner.x += temp.width();
        ////////////////////////////////////////////
        EyeRegion.leftInnerEyeCorner = innerCorner;
    }
}