Example usage for org.opencv.imgproc Imgproc line

List of usage examples for org.opencv.imgproc Imgproc line

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc line.

Prototype

public static void line(Mat img, Point pt1, Point pt2, Scalar color, int thickness) 

Source Link

Usage

From source file:hu.unideb.fksz.VideoProcessor.java

License:Open Source License

/**
 * Processes {@code firstFrame} and {@code secondFrame}.
 * @param firstFrame    the first frame of a cycle.
 *///from   w ww .j av  a  2s.  co m
private void processFrame(Mat firstFrame) {
    double contourArea = 0;
    int position = 0;
    try {
        /**
         * Resizes the {@code firstFrame} to {@code frameSize}.
         *
         */
        Imgproc.resize(firstFrame, firstFrame, frameSize);

        /**
         * Convert the frame in grayscale color space.
         */
        Imgproc.cvtColor(firstFrame, firstGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * {@code video} reads the second frame.
         */
        video.read(secondFrame);

        Imgproc.resize(secondFrame, secondFrame, frameSize);

        Imgproc.cvtColor(secondFrame, secondGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * Getting the absolute per-pixel difference of the two frames into {@code differenceOfImages}.
         */
        Core.absdiff(firstGrayImage, secondGrayImage, differenceOfImages);
        Imgproc.threshold(differenceOfImages, thresholdImage, 25, 255, Imgproc.THRESH_BINARY);
        Imgproc.blur(thresholdImage, thresholdImage, new Size(12, 12));
        Imgproc.threshold(thresholdImage, thresholdImage, 20, 255, Imgproc.THRESH_BINARY);
        /////
        for (int i = 0; i < contours.size(); ++i) {
            contours.get(i).release();
        }
        contours.clear();

        /**
         * The horizontal line.
         */
        Imgproc.line(firstFrame, controlPoints.get(6), controlPoints.get(7), new Scalar(255, 0, 0),
                Imgproc.LINE_4);
        Imgproc.findContours(thresholdImage, contours, hierarchy, Imgproc.RETR_TREE,
                Imgproc.CHAIN_APPROX_SIMPLE);

        for (int i = 0; i < hullPoints.size(); ++i) {
            hullPoints.get(i).release();
        }
        hullPoints.clear();

        for (int i = 0; i < contours.size(); i++) {
            MatOfInt tmp = new MatOfInt();
            Imgproc.convexHull(contours.get(i), tmp, false);
            hullPoints.add(tmp);
        }

        /**
         * Searches for the contour with the greatest area.
         */
        if (contours.size() > 0) {
            for (int i = 0; i < contours.size(); i++) {
                if (Imgproc.contourArea(contours.get(i)) > contourArea) {
                    contourArea = Imgproc.contourArea(contours.get(i));
                    position = i;
                    boundingRectangle = Imgproc.boundingRect(contours.get(i));
                }

            }
        }
        secondFrame.release();
        hierarchy.release();
        secondGrayImage.release();
        firstGrayImage.release();
        thresholdImage.release();
        differenceOfImages.release();
    } catch (Exception e) {
        logger.error(e.getMessage());
    }

    /**
     * Checking whether the control point on the left is
     * inside of {@code boundingRectangle}, which is a {@code Rect},
     * bounding the greatest contour.
     */
    if (controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 0, 255), 2);
        wasAtLeftPoint = true;
    } else if (!controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 255, 0), 2);
    }
    /**
     * Checking whether the control point on the middle is
     * inside of {@code boundingRectangle}, which is a {@code Rect},
     * bounding the greatest contour.
     */
    if (controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 0, 255), 2);
        wasAtCenterPoint = true;
    } else if (!controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 255, 0), 2);
    }
    /**
     * Checking whether the control point on the right is
     * inside of {@code boundingRectangle}, which is a {@code Rect},
     * bounding the greatest contour.
     */
    if (controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 0, 255), 2);
        wasAtRightPoint = true;
    } else if (!controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 255, 0), 2);
    }

    /**
     * If the three control points have were inside the {@code boundingRectangle},
     * it means that a "car" has passed.
     */
    if (wasAtCenterPoint && wasAtLeftPoint && wasAtRightPoint) {
        detectedCarsCount++;

        wasAtCenterPoint = false;
        wasAtLeftPoint = false;
        wasAtRightPoint = false;
        logger.info("Detected " + detectedCarsCount + " car(s)");
    }
    /**
     * If the contour is big enough, draw it.
     */
    if (contourArea > 3000) {
        Imgproc.drawContours(frame, contours, position, new Scalar(255, 255, 255));
    }
}

From source file:opencltest.YetAnotherTestT.java

private static void paintLines(Mat targetImg, Mat lines) {
    for (int x = 0; x < lines.rows(); x++) {
        double[] vec = lines.get(x, 0);
        double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);
        Imgproc.line(targetImg, start, end, new Scalar(255, 0, 0), 1);
    }//ww w .  j  a va2  s.  c  om
}

From source file:opencltest.YetAnotherTestT.java

private static void drawCross(double x, double y, Mat foundSquare) {
    Imgproc.line(foundSquare, new Point(x, 0), new Point(x, foundSquare.height()), new Scalar(255, 0, 0, 180),
            1);/* www.j a v a  2 s.c  o  m*/

    Imgproc.line(foundSquare, new Point(0, y), new Point(foundSquare.width(), y), new Scalar(255, 0, 0, 180),
            1);
}

From source file:opencltest.YetAnotherTestT.java

private static void draw(List<Double> result, Mat test) {
    for (int i = 1; i < result.size() - 1; i++) {
        if (result.get(i) > result.get(i + 1) && result.get(i) > result.get(i - 1)) {
            Imgproc.line(test, new Point(i, 0), new Point(i, test.height()),
                    //                        new Scalar(result.get(i), 0, 0), 1
                    new Scalar(0, 0, 255), 1);
        }/*from ww w  .ja v a 2  s.  c o m*/

        //            Imgproc.line(test,
        //                        new Point(i, 0), new Point(i, test.height()),
        //                        new Scalar(result.get(i), 0, 0), 1
        ////                        new Scalar(0,0,255),1
        //                );
    }
}

From source file:opencvdemos.BallGame.java

License:Apache License

private Image grabFrame() {
    // Init everything
    Image imageToShow = null;//from w ww.  j  av  a  2  s .  c  om
    Mat frame = new Mat();

    // Check if the capture is open
    if (this.capture.isOpened()) {
        try {
            // Read the current frame
            this.capture.read(frame);
            // Flip image for easy object manipulation
            Core.flip(frame, frame, 1);

            // If the frame is not empty, process it
            if (!frame.empty()) {
                // Init
                Mat blurredImage = new Mat();
                Mat hsvImage = new Mat();
                Mat mask = new Mat();
                Mat morphOutput = new Mat();

                // Remove some noise
                Imgproc.blur(frame, blurredImage, new Size(7, 7));

                // Convert the frame to HSV
                Imgproc.cvtColor(blurredImage, hsvImage, Imgproc.COLOR_BGR2HSV);

                // Get thresholding values from the UI
                // Remember: H ranges 0-180, S and V range 0-255
                Scalar minValues = new Scalar(this.hueStart.getValue(), this.saturationStart.getValue(),
                        this.valueStart.getValue());
                Scalar maxValues = new Scalar(this.hueStop.getValue(), this.saturationStop.getValue(),
                        this.valueStop.getValue());

                // Show the current selected HSV range
                String valuesToPrint = "Hue range: " + minValues.val[0] + "-" + maxValues.val[0]
                        + ". Sat. range: " + minValues.val[1] + "-" + maxValues.val[1] + ". Value range: "
                        + minValues.val[2] + "-" + maxValues.val[2];
                hsvCurrentValues.setText(valuesToPrint);

                // Threshold HSV image to select object
                Core.inRange(hsvImage, minValues, maxValues, mask);
                // Show the partial output
                maskImage.getGraphics().drawImage(this.mat2Image(mask), 0, 0, 205, 154, null);

                // Morphological operators
                // Dilate with large element, erode with small ones
                Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(24, 24));
                Mat erodeElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(12, 12));

                Imgproc.erode(mask, morphOutput, erodeElement);
                Imgproc.erode(mask, morphOutput, erodeElement);

                Imgproc.dilate(mask, morphOutput, dilateElement);
                Imgproc.dilate(mask, morphOutput, dilateElement);

                // Show the partial output
                morphImage.getGraphics().drawImage(this.mat2Image(morphOutput), 0, 0, 205, 154, null);

                // Find the object(s) contours and show them
                frame = this.findAndDrawObjects(morphOutput, frame);

                // Calculate centers and move ball
                Mat temp = new Mat();
                morphOutput.copyTo(temp);
                List<MatOfPoint> contours = new ArrayList<>();
                Imgproc.findContours(temp, contours, new Mat(), Imgproc.RETR_EXTERNAL,
                        Imgproc.CHAIN_APPROX_SIMPLE);
                for (int i = 0; i < contours.size(); i++) {
                    Rect objectBoundingRectangle = Imgproc.boundingRect(contours.get(i));
                    int x = objectBoundingRectangle.x + objectBoundingRectangle.width / 2;
                    int y = objectBoundingRectangle.y + objectBoundingRectangle.height / 2;

                    // Move ball
                    if (!ballChanged) {
                        if (b.x > objectBoundingRectangle.x
                                && b.x < objectBoundingRectangle.x + objectBoundingRectangle.width
                                && b.y > objectBoundingRectangle.y
                                && b.y < objectBoundingRectangle.y + objectBoundingRectangle.height) {
                            b.dx = -b.dx;
                            b.dy = -b.dy;
                            ballChanged = true;
                        }
                    }

                    // Show crosshair
                    Imgproc.circle(frame, new Point(x, y), 20, new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x, y - 25), new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x, y + 25), new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x - 25, y), new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x + 25, y), new Scalar(0, 255, 0), 2);
                    Imgproc.putText(frame, "Tracking object at (" + x + "," + y + ")", new Point(x, y), 1, 1,
                            new Scalar(255, 0, 0), 2);
                }
                ballChanged = false;

                // Move and draw the ball
                if (b.dx < 0)
                    b.dx = ballSpeed.getValue() * -1;
                else
                    b.dx = ballSpeed.getValue();
                if (b.dy < 0)
                    b.dy = ballSpeed.getValue() * -1;
                else
                    b.dy = ballSpeed.getValue();
                b.move();
                Imgproc.circle(frame, new Point(b.x, b.y), b.r, new Scalar(255, 0, 255), -1);

                // convert the Mat object (OpenCV) to Image (Java AWT)
                imageToShow = mat2Image(frame);
            }

        } catch (Exception e) {
            // log the error
            System.err.println("Exception during the frame elaboration: " + e);
        }
    }

    return imageToShow;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.detect.DetectStripTask.java

License:Open Source License

@Nullable
@Override/* www.j  a va2 s  .  c om*/
protected Void doInBackground(Intent... params) {
    Intent intent = params[0];

    if (intent == null) {
        return null;
    }

    String uuid = intent.getStringExtra(Constant.UUID);

    StripTest stripTest = new StripTest();
    int numPatches = stripTest.getPatchCount(uuid);

    format = intent.getIntExtra(Constant.FORMAT, ImageFormat.NV21);
    width = intent.getIntExtra(Constant.WIDTH, 0);
    height = intent.getIntExtra(Constant.HEIGHT, 0);

    if (width == 0 || height == 0) {
        return null;
    }

    JSONArray imagePatchArray = null;
    int imageCount = -1;
    Mat labImg; // Mat for image from NV21 data
    Mat labStrip; // Mat for detected strip

    try {
        String json = FileUtil.readFromInternalStorage(context, Constant.IMAGE_PATCH);
        imagePatchArray = new JSONArray(json);
    } catch (Exception e) {
        Timber.e(e);
    }

    for (int i = 0; i < numPatches; i++) {
        try {
            if (imagePatchArray != null) {
                // sub-array for each patch
                JSONArray array = imagePatchArray.getJSONArray(i);

                // get the image number from the json array
                int imageNo = array.getInt(0);

                if (imageNo > imageCount) {

                    // Set imageCount to current number
                    imageCount = imageNo;

                    byte[] data = FileUtil.readByteArray(context, Constant.DATA + imageNo);
                    if (data == null) {
                        throw new IOException();
                    }

                    //make a L,A,B Mat object from data
                    try {
                        labImg = makeLab(data);
                    } catch (Exception e) {
                        if (context != null) {
                            Timber.e(e);
                        }
                        continue;
                    }

                    //perspectiveTransform
                    try {
                        if (labImg != null) {
                            warp(labImg, imageNo);
                        }
                    } catch (Exception e) {
                        if (context != null) {
                            Timber.e(e);
                        }
                        continue;
                    }

                    //divide into calibration and strip areas
                    try {
                        if (context != null) {
                            divideIntoCalibrationAndStripArea();
                        }
                    } catch (Exception e) {
                        Timber.e(e);
                        continue;
                    }

                    //save warped image to external storage
                    //                        if (DEVELOP_MODE) {
                    //                        Mat rgb = new Mat();
                    //                        Imgproc.cvtColor(warpMat, rgb, Imgproc.COLOR_Lab2RGB);
                    //                        Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888);
                    //                        Utils.matToBitmap(rgb, bitmap);
                    //
                    //                        //if (FileUtil.isExternalStorageWritable()) {
                    //                        FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + ".png");
                    //}
                    //                            //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false);
                    //                        }

                    //calibrate
                    Mat calibrationMat;
                    try {
                        CalibrationResultData calResult = getCalibratedImage(warpMat);
                        if (calResult == null) {
                            return null;
                        } else {
                            calibrationMat = calResult.getCalibratedImage();
                        }

                        //                            Log.d(this.getClass().getSimpleName(), "E94 error mean: " + String.format(Locale.US, "%.2f", calResult.meanE94)
                        //                                    + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94)
                        //                                    + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94));

                        //                            if (AppPreferences.isDiagnosticMode()) {
                        //                                listener.showError("E94 mean: " + String.format(Locale.US, "%.2f", calResult.meanE94)
                        //                                        + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94)
                        //                                        + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94));
                        //                            }
                    } catch (Exception e) {
                        Timber.e(e);
                        return null;
                    }

                    //show calibrated image
                    //                        if (DEVELOP_MODE) {
                    //                            Mat rgb = new Mat();
                    //                            Imgproc.cvtColor(calibrationMat, rgb, Imgproc.COLOR_Lab2RGB);
                    //                            Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888);
                    //                            Utils.matToBitmap(rgb, bitmap);
                    //                            if (FileUtil.isExternalStorageWritable()) {
                    //                                FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + "_cal.png");
                    //                            }
                    //                            //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false);
                    //                        }

                    // cut out black area that contains the strip
                    Mat stripArea = null;
                    if (roiStripArea != null) {
                        stripArea = calibrationMat.submat(roiStripArea);
                    }

                    if (stripArea != null) {
                        Mat strip = null;
                        try {
                            StripTest.Brand brand = stripTest.getBrand(uuid);
                            strip = OpenCVUtil.detectStrip(stripArea, brand, ratioW, ratioH);
                        } catch (Exception e) {
                            Timber.e(e);
                        }

                        String error = "";
                        if (strip != null) {
                            labStrip = strip.clone();
                        } else {
                            if (context != null) {
                                Timber.e(context.getString(R.string.error_calibrating));
                            }
                            labStrip = stripArea.clone();

                            error = Constant.ERROR;

                            //draw a red cross over the image
                            Scalar red = RED_LAB_COLOR; // Lab color
                            Imgproc.line(labStrip, new Point(0, 0), new Point(labStrip.cols(), labStrip.rows()),
                                    red, 2);
                            Imgproc.line(labStrip, new Point(0, labStrip.rows()), new Point(labStrip.cols(), 0),
                                    red, 2);
                        }

                        try {
                            // create byte[] from Mat and store it in internal storage
                            // In order to restore the byte array, we also need the rows and columns dimensions
                            // these are stored in the last 8 bytes
                            int dataSize = labStrip.cols() * labStrip.rows() * 3;
                            byte[] payload = new byte[dataSize + 8];
                            byte[] matByteArray = new byte[dataSize];

                            labStrip.get(0, 0, matByteArray);

                            // pack cols and rows into byte arrays
                            byte[] rows = FileUtil.leIntToByteArray(labStrip.rows());
                            byte[] cols = FileUtil.leIntToByteArray(labStrip.cols());

                            // append them to the end of the array, in order rows, cols
                            System.arraycopy(matByteArray, 0, payload, 0, dataSize);
                            System.arraycopy(rows, 0, payload, dataSize, 4);
                            System.arraycopy(cols, 0, payload, dataSize + 4, 4);
                            FileUtil.writeByteArray(context, payload, Constant.STRIP + imageNo + error);
                        } catch (Exception e) {
                            Timber.e(e);
                        }
                    }
                }
            }
        } catch (@NonNull JSONException | IOException e) {

            if (context != null) {
                Timber.e(context.getString(R.string.error_cut_out_strip));
            }
        }
    }
    return null;
}

From source file:org.lasarobotics.vision.image.Drawing.java

License:Open Source License

public static void drawLine(Mat img, Point point1, Point point2, Color color, int thickness) {
    Imgproc.line(img, point1, point2, color.getScalarRGBA(), thickness);
}

From source file:org.usfirst.frc.team2084.CMonster2016.vision.Target.java

License:Open Source License

/**
 * Draw information about the target on an image.
 * //from w w w.  j  av  a 2s . c  om
 * @param image the image to draw on
 * @param text whether to include position outputs in the corners of the
 *        image
 * @param imageHeading the heading of the robot when the image was taken
 */
public void draw(Mat image, boolean text, double imageHeading) {
    Scalar drawColor = isValid() ? VALID_TARGET_COLOR : INVALID_TARGET_COLOR;
    if (text) {
        drawColor = TARGET_COLOR;
    }

    Imgproc.line(image, topLeft, topRight, drawColor, DRAW_THICKNESS);
    Imgproc.line(image, topRight, bottomRight, drawColor, DRAW_THICKNESS);
    Imgproc.line(image, bottomRight, bottomLeft, drawColor, DRAW_THICKNESS);
    Imgproc.line(image, bottomLeft, topLeft, drawColor, DRAW_THICKNESS);

    Imgproc.circle(image, center, 5, drawColor);

    if (isValid()) {
        Utils.drawText(image, "score: " + SCORE_FORMAT.format(score), center.x - 50, center.y + 20, 1,
                Color.RED);
    } else {
        Utils.drawText(image, "failed: " + failedValidator, center.x - 50, center.y + 20, 1, Color.RED);
    }

    if (text) {
        Utils.drawText(image, " rotation: " + NUMBER_FORMAT.format(Math.toDegrees(xGoalAngle)) + " deg", 0,
                IMAGE_SIZE.height - 85);
        Utils.drawText(image, "distance: " + NUMBER_FORMAT.format(distance) + " ft", 0, IMAGE_SIZE.height - 65);
        Utils.drawText(image, "        x: " + NUMBER_FORMAT.format(position.x) + " ft", 0,
                IMAGE_SIZE.height - 45);
        Utils.drawText(image, "        y: " + NUMBER_FORMAT.format(position.y) + " ft", 0,
                IMAGE_SIZE.height - 25);
        Utils.drawText(image, "        z: " + NUMBER_FORMAT.format(position.z) + " ft", 0,
                IMAGE_SIZE.height - 5);

        double textX = IMAGE_SIZE.width - 250;

        double angleX = Math.toDegrees(rotation.get(0, 0)[0]);
        double angleY = Math.toDegrees(rotation.get(1, 0)[0]);
        double angleZ = Math.toDegrees(rotation.get(2, 0)[0]);

        Utils.drawText(image, "heading: " + NUMBER_FORMAT.format(Math.toDegrees(imageHeading)) + " deg", textX,
                IMAGE_SIZE.height - 65);
        Utils.drawText(image, "x angle: " + NUMBER_FORMAT.format(angleX) + " deg", textX,
                IMAGE_SIZE.height - 45);
        Utils.drawText(image, "y angle: " + NUMBER_FORMAT.format(angleY) + " deg", textX,
                IMAGE_SIZE.height - 25);
        Utils.drawText(image, "z angle: " + NUMBER_FORMAT.format(angleZ) + " deg", textX,
                IMAGE_SIZE.height - 5);
    }

}

From source file:vinylsleevedetection.Analyze.java

public void Check() {
    count = 1;/*from w ww  .ja v  a 2  s .  c om*/
    //load openCV library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    //for loop to compare source images to user image
    for (int j = 1; j < 4; j++) {
        //source image location (record sleeve)
        String Object = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Source\\" + j
                + ".jpg";
        //user image location
        String Scene = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\camera.jpg";
        //load images
        Mat objectImage = Imgcodecs.imread(Object, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Mat sceneImage = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        //use BRISK feature detection
        MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
        FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.BRISK);
        //perform feature detection on source image
        featureDetector.detect(objectImage, objectKeyPoints);
        KeyPoint[] keypoints = objectKeyPoints.toArray();
        //use descriptor extractor
        MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
        DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK);
        descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

        Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Scalar newKeypointColor = new Scalar(255, 0, 0);

        Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

        MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
        MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
        featureDetector.detect(sceneImage, sceneKeyPoints);
        descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

        Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Scalar matchestColor = new Scalar(0, 255, 0);

        List<MatOfDMatch> matches = new LinkedList<>();
        DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
        descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

        LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

        float nndrRatio = 0.7f;

        for (int i = 0; i < matches.size(); i++) {
            MatOfDMatch matofDMatch = matches.get(i);
            DMatch[] dmatcharray = matofDMatch.toArray();
            DMatch m1 = dmatcharray[0];
            DMatch m2 = dmatcharray[1];

            if (m1.distance <= m2.distance * nndrRatio) {
                goodMatchesList.addLast(m1);

            }
        }
        //if the number of good mathces is more than 150 a match is found
        if (goodMatchesList.size() > 150) {
            System.out.println("Object Found");

            List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
            List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

            LinkedList<Point> objectPoints = new LinkedList<>();
            LinkedList<Point> scenePoints = new LinkedList<>();

            for (int i = 0; i < goodMatchesList.size(); i++) {
                objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
                scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
            }

            MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
            objMatOfPoint2f.fromList(objectPoints);
            MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
            scnMatOfPoint2f.fromList(scenePoints);

            Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

            Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
            Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

            obj_corners.put(0, 0, new double[] { 0, 0 });
            obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
            obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
            obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

            Core.perspectiveTransform(obj_corners, scene_corners, homography);

            Mat img = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR);
            //draw a green square around the matched object
            Imgproc.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                    new Scalar(0, 255, 0), 10);

            MatOfDMatch goodMatches = new MatOfDMatch();
            goodMatches.fromList(goodMatchesList);

            Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                    matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);
            //output image with match, image of the match locations and keypoints image
            String folder = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\";
            Imgcodecs.imwrite(folder + "outputImage.jpg", outputImage);
            Imgcodecs.imwrite(folder + "matchoutput.jpg", matchoutput);
            Imgcodecs.imwrite(folder + "found.jpg", img);
            count = j;
            break;
        } else {
            System.out.println("Object Not Found");
            count = 0;
        }

    }

}