Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point() 

Source Link

Usage

From source file:app.GUI2.java

private void jRadioButtonA1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButtonA1ActionPerformed
    pointA1 = new Point();
    pointerToPoint = pointA1;//from  ww w.  ja  v  a  2 s .co m
    pointerToJextField = jTextFieldA1;
}

From source file:app.GUI2.java

private void jRadioButtonA2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButtonA2ActionPerformed
    pointA2 = new Point();
    pointerToPoint = pointA2;// www  .  ja  va 2 s.  co m
    pointerToJextField = jTextFieldA2;
}

From source file:app.GUI2.java

private void jRadioButtonB1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButtonB1ActionPerformed
    pointB1 = new Point();
    pointerToPoint = pointB1;//from   ww w  . jav a2 s . com
    pointerToJextField = jTextFieldB1;
}

From source file:app.GUI2.java

private void jRadioButtonB2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButtonB2ActionPerformed
    pointB2 = new Point();
    pointerToPoint = pointB2;//from  w w w  .j  ava 2 s.  co  m
    pointerToJextField = jTextFieldB2;
}

From source file:by.zuyeu.deyestracker.core.util.CVCoreUtils.java

public static Point sumPoints(Point p1, Point p2) {
    if (p1 == null && p2 == null) {
        return null;
    }/*from  w  ww  . j  a  v  a 2s.  com*/
    if (p1 == null && p2 != null) {
        return p2;
    }
    if (p2 == null && p1 != null) {
        return p1;
    }
    final Point sum = new Point();
    sum.x = p1.x + p2.x;
    sum.y = p1.y + p2.y;

    return sum;
}

From source file:by.zuyeu.deyestracker.core.util.CVCoreUtils.java

public static Point dividePoint(Point p, int divider) {
    final Point result = new Point();
    result.x = p.x / divider;/*from  w w w .j  a v a2s  . co  m*/
    result.y = p.y / divider;
    return result;
}

From source file:by.zuyeu.deyestracker.reader.util.ObjectTransformer.java

private static Point transformPoint2ToCVType(Point2 point2) {
    final Point point = new Point();
    point.x = point2.getX();/*from   w ww. jav a 2 s . c  om*/
    point.y = point2.getY();
    return point;
}

From source file:cn.xiongyihui.webcam.JpegFactory.java

License:Open Source License

public void onPreviewFrame(byte[] data, Camera camera) {
    YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);

    mJpegOutputStream.reset();//ww w.  j a  va 2s  .co m

    try {
        //Log.e(TAG, "Beginning to read values!");
        double distanceTemplateFeatures = this.globalClass.getDistanceTemplateFeatures();
        double xTemplateCentroid = this.globalClass.getXtemplateCentroid();
        double yTemplateCentroid = this.globalClass.getYtemplateCentroid();
        int x0template = this.globalClass.getX0display();
        int y0template = this.globalClass.getY0display();
        int x1template = this.globalClass.getX1display();
        int y1template = this.globalClass.getY1display();
        Mat templateDescriptor = this.globalClass.getTemplateDescriptor();
        MatOfKeyPoint templateKeyPoints = this.globalClass.getKeyPoints();
        KeyPoint[] templateKeyPointsArray = templateKeyPoints.toArray();
        int numberOfTemplateFeatures = this.globalClass.getNumberOfTemplateFeatures();
        int numberOfPositiveTemplateFeatures = this.globalClass.getNumberOfPositiveTemplateFeatures();
        KeyPoint[] normalisedTemplateKeyPoints = this.globalClass.getNormalisedTemplateKeyPoints();
        double normalisedXcentroid = this.globalClass.getNormalisedXcentroid();
        double normalisedYcentroid = this.globalClass.getNormalisedYcentroid();
        int templateCapturedBitmapWidth = this.globalClass.getTemplateCapturedBitmapWidth();
        int templateCapturedBitmapHeight = this.globalClass.getTemplateCapturedBitmapHeight();
        //Log.e(TAG, "Ended reading values!");
        globalClass.setJpegFactoryDimensions(mWidth, mHeight);
        double scalingRatio, scalingRatioHeight, scalingRatioWidth;

        scalingRatioHeight = (double) mHeight / (double) templateCapturedBitmapHeight;
        scalingRatioWidth = (double) mWidth / (double) templateCapturedBitmapWidth;
        scalingRatio = (scalingRatioHeight + scalingRatioWidth) / 2; //Just to account for any minor variations.
        //Log.e(TAG, "Scaling ratio:" + String.valueOf(scalingRatio));
        //Log.e("Test", "Captured Bitmap's dimensions: (" + templateCapturedBitmapHeight + "," + templateCapturedBitmapWidth + ")");

        //Scale the actual features of the image
        int flag = this.globalClass.getFlag();
        if (flag == 0) {
            int iterate = 0;
            int iterationMax = numberOfTemplateFeatures;

            for (iterate = 0; iterate < (iterationMax); iterate++) {
                Log.e(TAG, "Point detected " + iterate + ":(" + templateKeyPointsArray[iterate].pt.x + ","
                        + templateKeyPointsArray[iterate].pt.y + ")");

                if (flag == 0) {
                    templateKeyPointsArray[iterate].pt.x = scalingRatio
                            * (templateKeyPointsArray[iterate].pt.x + (double) x0template);
                    templateKeyPointsArray[iterate].pt.y = scalingRatio
                            * (templateKeyPointsArray[iterate].pt.y + (double) y0template);
                }
                Log.e(TAG, "Scaled points:(" + templateKeyPointsArray[iterate].pt.x + ","
                        + templateKeyPointsArray[iterate].pt.y + ")");
            }

            this.globalClass.setFlag(1);
        }

        templateKeyPoints.fromArray(templateKeyPointsArray);
        //Log.e(TAG, "Template-features have been scaled successfully!");

        long timeBegin = (int) System.currentTimeMillis();
        Mat mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1);
        mYuv.put(0, 0, data);
        Mat mRgb = new Mat();
        Imgproc.cvtColor(mYuv, mRgb, Imgproc.COLOR_YUV420sp2RGB);

        Mat result = new Mat();
        Imgproc.cvtColor(mRgb, result, Imgproc.COLOR_RGB2GRAY);
        int detectorType = FeatureDetector.ORB;
        FeatureDetector featureDetector = FeatureDetector.create(detectorType);
        MatOfKeyPoint keypointsImage = new MatOfKeyPoint();
        featureDetector.detect(result, keypointsImage);
        KeyPoint[] imageKeypoints = keypointsImage.toArray();

        Scalar color = new Scalar(0, 0, 0);

        DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

        Mat imageDescriptor = new Mat();
        descriptorExtractor.compute(result, keypointsImage, imageDescriptor);

        //BRUTEFORCE_HAMMING apparently finds even the suspicious feature-points! So, inliers and outliers can turn out to be a problem

        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
        MatOfDMatch matches = new MatOfDMatch();
        matcher.match(imageDescriptor, templateDescriptor, matches);

        //Log.e("Prasad", String.valueOf(mWidth) + "," + String.valueOf(mHeight));

        DMatch[] matchesArray = matches.toArray();

        double minimumMatchDistance = globalClass.getHammingDistance();

        int iDescriptorMax = matchesArray.length;
        int iterateDescriptor;

        double xMatchedPoint, yMatchedPoint;
        int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS;

        Point point;

        double rHigh = this.globalClass.getRhigh();
        double rLow = this.globalClass.getRlow();
        double gHigh = this.globalClass.getGhigh();
        double gLow = this.globalClass.getGlow();
        double bHigh = this.globalClass.getBhigh();
        double bLow = this.globalClass.getBlow();

        double[] colorValue;
        double red, green, blue;
        int[] featureCount;
        double xKernelSize = 9, yKernelSize = 9;
        globalClass.setKernelSize(xKernelSize, yKernelSize);
        double xImageKernelScaling, yImageKernelScaling;

        xImageKernelScaling = xKernelSize / mWidth;
        yImageKernelScaling = yKernelSize / mHeight;
        int[][] kernel = new int[(int) xKernelSize][(int) yKernelSize];
        double[][] kernelCounter = new double[(int) xKernelSize][(int) yKernelSize];
        int numberKernelMax = 10;
        globalClass.setNumberKernelMax(numberKernelMax);
        int[][][] kernelArray = new int[(int) xKernelSize][(int) yKernelSize][numberKernelMax];
        double featureImageResponse;
        double xImageCentroid, yImageCentroid;
        double xSum = 0, ySum = 0;
        double totalImageResponse = 0;

        for (iterateDescriptor = 0; iterateDescriptor < iDescriptorMax; iterateDescriptor++) {
            if (matchesArray[iterateDescriptor].distance < minimumMatchDistance) {
                //MatchedPoint: Awesome match without color feedback
                xMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.x;
                yMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.y;

                colorValue = mRgb.get((int) yMatchedPoint, (int) xMatchedPoint);

                red = colorValue[0];
                green = colorValue[1];
                blue = colorValue[2];

                int xKernelFeature, yKernelFeature;
                //Color feedback
                if ((rLow < red) & (red < rHigh) & (gLow < green) & (green < gHigh) & (bLow < blue)
                        & (blue < bHigh)) {
                    try {
                        featureImageResponse = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].response;
                        if (featureImageResponse > 0) {
                            xSum = xSum + featureImageResponse * xMatchedPoint;
                            ySum = ySum + featureImageResponse * yMatchedPoint;
                            totalImageResponse = totalImageResponse + featureImageResponse;
                            point = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt;

                            xKernelFeature = (int) (xMatchedPoint * xImageKernelScaling);
                            yKernelFeature = (int) (yMatchedPoint * yImageKernelScaling);
                            kernelCounter[xKernelFeature][yKernelFeature]++;
                            //Core.circle(result, point, 3, color);
                        }
                    } catch (Exception e) {
                    }
                }
                //Log.e(TAG, iterateDescriptor + ": (" + xMatchedPoint + "," + yMatchedPoint + ")");
            }
        }

        int iKernel = 0, jKernel = 0;
        for (iKernel = 0; iKernel < xKernelSize; iKernel++) {
            for (jKernel = 0; jKernel < yKernelSize; jKernel++) {
                if (kernelCounter[iKernel][jKernel] > 0) {
                    kernel[iKernel][jKernel] = 1;
                } else {
                    kernel[iKernel][jKernel] = 0;
                }
            }
        }

        xImageCentroid = xSum / totalImageResponse;
        yImageCentroid = ySum / totalImageResponse;

        if ((Double.isNaN(xImageCentroid)) | (Double.isNaN(yImageCentroid))) {
            //Log.e(TAG, "Centroid is not getting detected! Increasing hamming distance (error-tolerance)!");
            globalClass.setHammingDistance((int) (minimumMatchDistance + 2));
        } else {
            //Log.e(TAG, "Centroid is getting detected! Decreasing and optimising hamming (error-tolerance)!");
            globalClass.setHammingDistance((int) (minimumMatchDistance - 1));
            int jpegCount = globalClass.getJpegFactoryCallCount();
            jpegCount++;
            globalClass.setJpegFactoryCallCount(jpegCount);
            int initialisationFlag = globalClass.getInitialisationFlag();
            int numberOfDistances = 10;
            globalClass.setNumberOfDistances(numberOfDistances);

            if ((jpegCount > globalClass.getNumberKernelMax()) & (jpegCount > numberOfDistances)) {
                globalClass.setInitialisationFlag(1);
            }

            int[][] kernelSum = new int[(int) xKernelSize][(int) yKernelSize],
                    mask = new int[(int) xKernelSize][(int) yKernelSize];
            int iJpeg, jJpeg;
            kernelSum = globalClass.computeKernelSum(kernel);

            Log.e(TAG, Arrays.deepToString(kernelSum));

            for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) {
                for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) {
                    if (kernelSum[iJpeg][jJpeg] > (numberKernelMax / 4)) {//Meant for normalised kernel
                        mask[iJpeg][jJpeg]++;
                    }
                }
            }

            Log.e(TAG, Arrays.deepToString(mask));

            int maskedFeatureCount = 1, xMaskFeatureSum = 0, yMaskFeatureSum = 0;

            for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) {
                for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) {
                    if (mask[iJpeg][jJpeg] == 1) {
                        xMaskFeatureSum = xMaskFeatureSum + iJpeg;
                        yMaskFeatureSum = yMaskFeatureSum + jJpeg;
                        maskedFeatureCount++;
                    }
                }
            }

            double xMaskMean = xMaskFeatureSum / maskedFeatureCount;
            double yMaskMean = yMaskFeatureSum / maskedFeatureCount;

            double xSquaredSum = 0, ySquaredSum = 0;
            for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) {
                for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) {
                    if (mask[iJpeg][jJpeg] == 1) {
                        xSquaredSum = xSquaredSum + (iJpeg - xMaskMean) * (iJpeg - xMaskMean);
                        ySquaredSum = ySquaredSum + (jJpeg - yMaskMean) * (jJpeg - yMaskMean);
                    }
                }
            }

            double xRMSscaled = Math.sqrt(xSquaredSum);
            double yRMSscaled = Math.sqrt(ySquaredSum);
            double RMSimage = ((xRMSscaled / xImageKernelScaling) + (yRMSscaled / yImageKernelScaling)) / 2;
            Log.e(TAG, "RMS radius of the image: " + RMSimage);

            /*//Command the quadcopter and send PWM values to Arduino
            double throttlePWM = 1500, yawPWM = 1500, pitchPWM = 1500;
            double deltaThrottle = 1, deltaYaw = 1, deltaPitch = 1;
                    
            throttlePWM = globalClass.getThrottlePWM();
            pitchPWM = globalClass.getPitchPWM();
            yawPWM = globalClass.getYawPWM();
                    
            deltaThrottle = globalClass.getThrottleDelta();
            deltaPitch = globalClass.getPitchDelta();
            deltaYaw = globalClass.getYawDelta();
                    
            if(yImageCentroid>yTemplateCentroid) {
            throttlePWM = throttlePWM + deltaThrottle;
            }else{
            throttlePWM = throttlePWM - deltaThrottle;
            }
                    
            if(RMSimage>distanceTemplateFeatures) {
            pitchPWM = pitchPWM + deltaPitch;
            }else{
            pitchPWM = pitchPWM - deltaPitch;
            }
                    
            if(xImageCentroid>xTemplateCentroid) {
            yawPWM = yawPWM + deltaYaw;
            }else{
            yawPWM = yawPWM - deltaYaw;
            }
                    
            if(1000>throttlePWM){   throttlePWM = 1000; }
                    
            if(2000<throttlePWM){   throttlePWM = 2000; }
                    
            if(1000>pitchPWM){  pitchPWM = 1000;    }
                    
            if(2000<pitchPWM){  pitchPWM = 2000;    }
                    
            if(1000>yawPWM){    yawPWM = 1000;  }
                    
            if(2000<yawPWM){    yawPWM = 2000;  }
                    
            globalClass.setPitchPWM(pitchPWM);
            globalClass.setYawPWM(yawPWM);
            globalClass.setThrottlePWM(throttlePWM);*/

            //Display bounding circle
            int originalWidthBox = x1template - x0template;
            int originalHeightBox = y1template - y0template;

            double scaledBoundingWidth = (originalWidthBox * RMSimage / distanceTemplateFeatures);
            double scaledBoundingHeight = (originalHeightBox * RMSimage / distanceTemplateFeatures);

            double displayRadius = (scaledBoundingWidth + scaledBoundingHeight) / 2;
            displayRadius = displayRadius * 1.4826;
            displayRadius = displayRadius / numberKernelMax;
            double distanceAverage = 0;
            if (Double.isNaN(displayRadius)) {
                //Log.e(TAG, "displayRadius is NaN!");
            } else {
                distanceAverage = globalClass.imageDistanceAverage(displayRadius);
                //Log.e(TAG, "Average distance: " + distanceAverage);
            }

            if ((Double.isNaN(xImageCentroid)) | Double.isNaN(yImageCentroid)) {
                //Log.e(TAG, "Centroid is NaN!");
            } else {
                globalClass.centroidAverage(xImageCentroid, yImageCentroid);
            }

            if (initialisationFlag == 1) {
                //int displayRadius = 50;

                Point pointDisplay = new Point();
                //pointDisplay.x = xImageCentroid;
                //pointDisplay.y = yImageCentroid;
                pointDisplay.x = globalClass.getXcentroidAverageGlobal();
                pointDisplay.y = globalClass.getYcentroidAverageGlobal();
                globalClass.centroidAverage(xImageCentroid, yImageCentroid);
                int distanceAverageInt = (int) distanceAverage;
                Core.circle(result, pointDisplay, distanceAverageInt, color);
            }

        }

        Log.e(TAG, "Centroid in the streamed image: (" + xImageCentroid + "," + yImageCentroid + ")");
        /*try {
        //Features2d.drawKeypoints(result, keypointsImage, result, color, flagDraw);
        Features2d.drawKeypoints(result, templateKeyPoints, result, color, flagDraw);
        }catch(Exception e){}*/

        //Log.e(TAG, "High (R,G,B): (" + rHigh + "," + gHigh + "," + bHigh + ")");
        //Log.e(TAG, "Low (R,G,B): (" + rLow + "," + gLow + "," + bLow + ")");

        //Log.e(TAG, Arrays.toString(matchesArray));

        try {
            Bitmap bmp = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888);
            Utils.matToBitmap(result, bmp);
            //Utils.matToBitmap(mRgb, bmp);
            bmp.compress(Bitmap.CompressFormat.JPEG, mQuality, mJpegOutputStream);
        } catch (Exception e) {
            Log.e(TAG, "JPEG not working!");
        }

        long timeEnd = (int) System.currentTimeMillis();
        Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + "milli-seconds!");

        mJpegData = mJpegOutputStream.toByteArray();

        synchronized (mJpegOutputStream) {
            mJpegOutputStream.notifyAll();
        }
    } catch (Exception e) {
        Log.e(TAG, "JPEG-factory is not working!");
    }

}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

/**
 * ???? 2x2=4????????//from  w w w.j a v a 2 s  .c  o m
 * @param lineEq ?????(ax+by=1) ??[angle][section]
 * @param img????null??????
 * @return true:?false:
 */
private boolean selectLines(StraightLineEquation lineEq[][], Mat img) {
    //
    //????array
    ArrayList<LineInfo>[][] classifiedLines = new ArrayList[2][2];
    LineInfo[][] designateLine = new LineInfo[2][2];

    for (int i = 0; i < 2; i++) {
        for (int j = 0; j < 2; j++) {
            classifiedLines[i][j] = new ArrayList<LineInfo>();
            designateLine[i][j] = null;
        }
    }
    for (LineInfo li : mLineInfo) {
        final int agl = (li.mLocationFlg & LineInfo.ANGLE0) == LineInfo.ANGLE0 ? 0 : 1;
        final int sec = (li.mLocationFlg & LineInfo.LOCAT0) == LineInfo.LOCAT0 ? 0 : 1;
        classifiedLines[agl][sec].add(li);
    }
    final float centerX = (float) (mViewWidth >> 1);
    final float centerY = (float) (mViewHeight >> 1);

    //4????(?????
    if (classifiedLines[0][0].size() == 0 || classifiedLines[0][1].size() == 0
            || classifiedLines[1][0].size() == 0 || classifiedLines[1][1].size() == 0) {
        return false;
    }

    for (int ang = 0; ang < 2; ang++) {//?????
        for (int sec = 0; sec < 2; sec++) {//???
            //?? ax+by=1? a?b??????????a, b?????
            final int HIST_DIV_NUM = 50;
            final int OUTLIER_LOOPNUM = 2;
            //a, b??2?????
            final double SIGMA_MUL = 1.0;
            double aveA = 0.0;
            double aveB = 0.0;
            double stdevA = 0.0;
            double stdevB = 0.0;
            double aMax = Double.MIN_VALUE;
            double aMin = Double.MAX_VALUE;
            double bMax = Double.MIN_VALUE;
            double bMin = Double.MAX_VALUE;

            for (int i = 0; i < OUTLIER_LOOPNUM; i++) {
                if (classifiedLines[ang][sec].size() == 0) {
                    return false;
                }
                aveA = 0.0;
                aveB = 0.0;
                stdevA = 0.0;
                stdevB = 0.0;
                double aveL = 0.0;
                for (LineInfo li : classifiedLines[ang][sec]) {
                    aveA += li.mLineEq.a * li.mLength * li.mWidth;
                    aveB += li.mLineEq.b * li.mLength * li.mWidth;
                    aveL += li.mLength * li.mWidth;
                }
                aveA /= aveL;
                aveB /= aveL;
                for (LineInfo li : classifiedLines[ang][sec]) {
                    final double aa = li.mLineEq.a - aveA;
                    final double bb = li.mLineEq.b - aveB;
                    stdevA += aa * aa;
                    stdevB += bb * bb;
                }
                stdevA = Math.sqrt(stdevA / classifiedLines[ang][sec].size()) * SIGMA_MUL;
                stdevB = Math.sqrt(stdevB / classifiedLines[ang][sec].size()) * SIGMA_MUL;
                aMax = aveA + stdevA;
                aMin = aveA - stdevA;
                bMax = aveB + stdevB;
                bMin = aveB - stdevB;
                if (i < OUTLIER_LOOPNUM - 1) {
                    ArrayList<LineInfo> tmp = new ArrayList<LineInfo>();
                    for (LineInfo li : classifiedLines[ang][sec]) {
                        //???? a, b???????????
                        if (li.mLineEq.a > aMax || li.mLineEq.a < aMin || li.mLineEq.b > bMax
                                || li.mLineEq.b < bMin)
                            continue;
                        tmp.add(li);
                    }
                    if (tmp.size() > 0) {
                        classifiedLines[ang][sec] = tmp;
                    } else {
                        for (LineInfo li : classifiedLines[ang][sec]) {
                            //???????????a,b?????
                            if ((li.mLineEq.a > aMax || li.mLineEq.a < aMin)
                                    && (li.mLineEq.b > bMax || li.mLineEq.b < bMin))
                                continue;
                            tmp.add(li);
                        }
                        classifiedLines[ang][sec] = tmp;
                    }
                }
            }
            //max/min????
            aMax = Double.MIN_VALUE;
            aMin = Double.MAX_VALUE;
            bMax = Double.MIN_VALUE;
            bMin = Double.MAX_VALUE;
            for (LineInfo li : classifiedLines[ang][sec]) {
                if (li.mLineEq.a > aMax)
                    aMax = li.mLineEq.a;
                if (li.mLineEq.a < aMin)
                    aMin = li.mLineEq.a;
                if (li.mLineEq.b > bMax)
                    bMax = li.mLineEq.b;
                if (li.mLineEq.b < bMin)
                    bMin = li.mLineEq.b;
            }

            final double aDiv = (aMax - aMin) / (double) HIST_DIV_NUM;
            final double bDiv = (bMax - bMin) / (double) HIST_DIV_NUM;

            LineList hist[][] = new LineList[HIST_DIV_NUM][HIST_DIV_NUM];
            for (int i = 0; i < HIST_DIV_NUM; i++) {
                for (int j = 0; j < HIST_DIV_NUM; j++) {
                    hist[i][j] = new LineList();
                }
            }
            int linenum = 0;
            for (LineInfo li : classifiedLines[ang][sec]) {
                int aPos = (int) ((li.mLineEq.a - aMin) / aDiv);
                if (aPos == HIST_DIV_NUM)
                    aPos--;
                int bPos = (int) ((li.mLineEq.b - bMin) / bDiv);
                if (bPos == HIST_DIV_NUM)
                    bPos--;
                hist[aPos][bPos].pushLine(li);
                linenum++;
            }
            if (linenum == 0) {
                return false;
            }
            int maxAPos = 0;
            int maxBPos = 0;
            double maxLen = 0.0;
            for (int a = 0; a < HIST_DIV_NUM; a++) {
                for (int b = 0; b < HIST_DIV_NUM; b++) {
                    if (hist[a][b].getLineListNum() == 0) {
                        continue;
                    }
                    double len = 0.0;
                    for (LineInfo li : hist[a][b].mLineList) {
                        len += li.mLength;
                    }
                    if (maxLen < len) {
                        maxAPos = a;
                        maxBPos = b;
                        maxLen = len;
                    }
                }
            }
            if (linenum == 1) {
                lineEq[ang][sec].a = hist[maxAPos][maxBPos].mLineList.get(0).mLineEq.a;
                lineEq[ang][sec].b = hist[maxAPos][maxBPos].mLineList.get(0).mLineEq.b;
            } else {
                lineEq[ang][sec].a = ((double) maxAPos + 0.5) * aDiv + aMin;
                lineEq[ang][sec].b = ((double) maxBPos + 0.5) * bDiv + bMin;
            }

            if (img != null) {
                final double aa = lineEq[ang][sec].a;
                final double bb = lineEq[ang][sec].b;
                Point pt1 = new Point();
                Point pt2 = new Point();
                if (Math.abs(bb) > Math.abs(aa)) {
                    pt1.x = 0.0;
                    pt1.y = (1.0 - aa * (float) (-centerX)) / bb + (float) centerY;
                    pt2.x = (float) mViewWidth;
                    pt2.y = (1.0 - aa * (float) (centerX)) / bb + (float) centerY;
                } else {
                    pt1.x = (1.0 - bb * (float) (-centerY)) / aa + (float) centerX;
                    pt1.y = 0.0;
                    pt2.x = (1.0 - bb * (float) (centerY)) / aa + (float) centerX;
                    pt2.y = (float) mViewHeight;
                }
                if (MyDebug.DEBUG) {
                    if (Math.abs(bb) > 0.001 && Math.abs(aa / bb) > 0.3 && Math.abs(aa / bb) < 2) {
                        Log.d(LOG_TAG,
                                "ang = " + ang + " sec = " + sec + " max a/b = " + maxAPos + ":" + maxBPos);
                        //Core.line(img, pt1, pt2, new Scalar(0xff, 0x00, 0x00), 5);
                    } else {
                        //Core.line(img, pt1, pt2, new Scalar(0xff, 0xff, 0xff), 5);
                    }
                }

                //??????(debug)
                Scalar color[] = new Scalar[4];
                color[0] = new Scalar(0xff, 0x00, 0x00);
                color[1] = new Scalar(0x00, 0xff, 0x00);
                color[2] = new Scalar(0x00, 0x00, 0xff);
                color[3] = new Scalar(0xff, 0x00, 0xff);

                for (LineInfo li : mLineInfo) {
                    int c = 0;
                    if (li.mLocationFlg == (LineInfo.ANGLE0 | LineInfo.LOCAT0)) {
                        c = 0;
                    } else if (li.mLocationFlg == (LineInfo.ANGLE0 | LineInfo.LOCAT1)) {
                        c = 1;
                    }
                    if (li.mLocationFlg == (LineInfo.ANGLE1 | LineInfo.LOCAT0)) {
                        c = 2;
                    } else if (li.mLocationFlg == (LineInfo.ANGLE1 | LineInfo.LOCAT1)) {
                        c = 3;
                    }
                    Core.line(img, li.mStart, li.mEnd, color[c], 1);
                    Core.circle(img, li.mStart, 10, color[0]);
                    Core.circle(img, li.mEnd, 10, color[1]);
                }
            }
        }
    }
    return true;
}

From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java

License:Apache License

/**
 * <p>Build a template from a specific eye area previously substracted
 * uses detectMultiScale for this area, then uses minMaxLoc method to
 * detect iris from the detected eye</p>
 *
 * @param area Preformatted Area/*from  www  .  j  a va2  s  .c  o  m*/
 * @param size minimum iris size
 * @param grayMat image in gray
 * @param rgbaMat image in color
 * @param detectorEye Haar Cascade classifier
 * @return built template
 */
@NonNull
private static Mat buildTemplate(Rect area, final int size, @NonNull Mat grayMat, @NonNull Mat rgbaMat,
        CascadeClassifier detectorEye) {
    Mat template = new Mat();
    Mat graySubMatEye = grayMat.submat(area);
    MatOfRect eyes = new MatOfRect();
    Rect eyeTemplate;
    detectorEye.detectMultiScale(graySubMatEye, eyes, 1.15, 2,
            Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE,
            new Size(EYE_MIN_SIZE, EYE_MIN_SIZE), new Size());

    Rect[] eyesArray = eyes.toArray();
    if (eyesArray.length > 0) {
        Rect e = eyesArray[0];
        e.x = area.x + e.x;
        e.y = area.y + e.y;
        Rect eyeRectangle = getEyeArea((int) e.tl().x, (int) (e.tl().y + e.height * 0.4), e.width,
                (int) (e.height * 0.6));
        graySubMatEye = grayMat.submat(eyeRectangle);
        Mat rgbaMatEye = rgbaMat.submat(eyeRectangle);

        Core.MinMaxLocResult minMaxLoc = Core.minMaxLoc(graySubMatEye);

        FaceDrawerOpenCV.drawIrisCircle(rgbaMatEye, minMaxLoc);
        Point iris = new Point();
        iris.x = minMaxLoc.minLoc.x + eyeRectangle.x;
        iris.y = minMaxLoc.minLoc.y + eyeRectangle.y;
        eyeTemplate = getEyeArea((int) iris.x - size / 2, (int) iris.y - size / 2, size, size);
        FaceDrawerOpenCV.drawEyeRectangle(eyeTemplate, rgbaMat);
        template = (grayMat.submat(eyeTemplate)).clone();
    }
    return template;
}