Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:simeav.Modelo.java

private Mat dibujarGrafo() {
    Mat grafo = new Mat(imagenes.get(Etapa.PREPROCESADA).size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    ArrayList<Modulo> modulos = diagrama.getModulos();
    for (Modulo modulo : modulos) {
        Rect rect = modulo.getRectangulo();
        Core.rectangle(grafo, rect.tl(), rect.br(), new Scalar(182, 170, 5), 3);
        Core.putText(grafo, modulo.getNombre(), new Point(rect.tl().x + 20, rect.tl().y + 20),
                Core.FONT_HERSHEY_PLAIN, 1, new Scalar(175, 180, 5), 2);
    }/*from   w  w w. j  av  a2s.c  o m*/
    ArrayList<Conector> conectores = diagrama.getConectores();
    for (Conector c : conectores) {
        Core.line(grafo, c.getDesde(), c.getHasta(), new Scalar(180, 170, 5), 2);
        String tipo = c.getTipo();
        switch (tipo) {
        case "Usa":
            Core.circle(grafo, c.getHasta(), 6, new Scalar(0, 0, 255), -1);
            break;
        case "Agregacion":
            Core.circle(grafo, c.getHasta(), 6, new Scalar(255, 255, 0), -1);
            break;
        case "Extension":
            Core.circle(grafo, c.getHasta(), 6, new Scalar(255, 0, 0), -1);
        }
    }
    return grafo;
}

From source file:simeav.Utils.java

public static ArrayList<Point> getCentros(ArrayList<MatOfPoint> contornos) {
    ArrayList<Moments> mu = new ArrayList<>(contornos.size());
    for (int i = 0; i < contornos.size(); i++) {
        mu.add(i, Imgproc.moments(contornos.get(i), false));
    }/*from   ww w  .j  a  v a  2s.  com*/
    ArrayList<Point> mc = new ArrayList<>(contornos.size());
    for (int i = 0; i < contornos.size(); i++) {
        mc.add(i, new Point(mu.get(i).get_m10() / mu.get(i).get_m00(),
                mu.get(i).get_m01() / mu.get(i).get_m00()));
    }
    return mc;
}

From source file:src.main.java.org.roomwatcher.watcher.Processor.java

public Mat detect(Mat inputframe) {
    Mat mRgba = new Mat();
    Mat mGrey = new Mat();
    MatOfRect faces = new MatOfRect();

    inputframe.copyTo(mRgba);// w ww. j a  va 2  s . c o  m
    inputframe.copyTo(mGrey);

    Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
    Imgproc.equalizeHist(mGrey, mGrey);

    face_cascade.detectMultiScale(mGrey, faces);
    Window.setPeopleNumberLabelValue(String.valueOf(faces.toArray().length));

    for (Rect rect : faces.toArray()) {
        Point center = new Point(rect.x + rect.width * 0.5, rect.y + rect.height * 0.5);
        Core.ellipse(mRgba, center, new Size(rect.width * 0.5, rect.height * 0.5), 0, 0, 360,
                new Scalar(255, 0, 255), 4, 8, 0);
    }

    return mRgba;
}

From source file:src.model.filters.DotsFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________DOTS_______________**");

    try {//from   w  w w .j  a v a  2 s .  c o m

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________
        int elementSize = 2;
        int bsize = 10;
        Mat source = Imgcodecs.imread(savePath);

        Mat dst = zeros(source.size(), CV_8UC3);
        Mat cir = zeros(source.size(), CV_8UC1);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());
        Mat element = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT,
                new Size(elementSize * 3 + 1, elementSize * 3 + 1), new Point(elementSize, elementSize));

        for (int i = 0; i < source.rows(); i += bsize) {
            for (int j = 0; j < source.cols(); j += bsize) {

                circle(cir, new Point(j + bsize / (double) 2, i + bsize / (double) 2), bsize / 2 - 1,
                        new Scalar(255, 255, 255), -1, -1, Core.LINE_AA);

            }
        }

        Imgproc.morphologyEx(source, dst, Imgproc.MORPH_CLOSE, element);

        Mat cir_32f = new Mat(source.rows(), source.cols(), CV_32F);
        cir.convertTo(cir_32f, CV_32F);
        normalize(cir_32f, cir_32f, 0, 1, NORM_MINMAX);

        Mat dst_32f = new Mat(source.rows(), source.cols(), CV_32F);
        dst.convertTo(dst_32f, CV_32F);

        Vector<Mat> channels = new Vector();
        split(dst_32f, channels);
        System.out.println(channels.size());
        for (int i = 0; i < channels.size(); ++i) {
            channels.set(i, channels.get(i).mul(cir_32f));
        }
        merge(channels, dst_32f);
        dst_32f.convertTo(dst, CV_8U);

        // Core.gemm(source, source, bsize, source, bsize, dst);
        // Core.gemm(cir, destination, 1, new Mat(), 0,dst , 0);
        //            Imgcodecs.imwrite("images\\outddput.jpg", dst);
        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_DOTS_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_DOTS_temp.jpg";
        Imgcodecs.imwrite(output, dst);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}

From source file:src.model.filters.MorphFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________MORPH_______________**");

    try {//from  w  w  w  . j a  va2s . c  om

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________

        int elementSize = 9;

        Mat source = Imgcodecs.imread(savePath);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());
        Mat element = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT,
                new Size(elementSize * 2 + 1, elementSize * 2 + 1), new Point(elementSize, elementSize));
        Imgproc.morphologyEx(source, destination, Imgproc.MORPH_GRADIENT, element);

        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_MORPH_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_MORPH_temp.jpg";
        Imgcodecs.imwrite(output, destination);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}

From source file:syncleus.dann.data.video.BoundingBox.java

License:Apache License

public Point[] points() {
    final List<Point> result = new ArrayList<Point>();
    final int stepx = (int) Math.ceil((width - 2 * POINTS_MARGIN_H) / POINTS_MAX_COUNT);
    final int stepy = (int) Math.ceil((height - 2 * POINTS_MARGIN_V) / POINTS_MAX_COUNT);
    for (int j = y + POINTS_MARGIN_V; j < y + height - POINTS_MARGIN_V; j += stepy) {
        for (int i = x + POINTS_MARGIN_H; i < x + width - POINTS_MARGIN_H; i += stepx) {
            result.add(new Point(i, j));
        }//from w  ww .  j ava2 s .com
    }
    System.out.println(
            "Points in BB: " + this + " stepx=" + stepx + " stepy=" + stepy + " RES size=" + result.size());
    return result.toArray(new Point[result.size()]);
}

From source file:syncleus.dann.data.video.PatchGenerator.java

License:Apache License

public void generate(final Mat image, Point pt, Mat patch, Size patchSize, final RNG rng) {
    final Mat T = new MatOfDouble();

    // TODO why is inverse not specified in the original C++ code
    generateRandomTransform(pt, new Point((patchSize.width - 1) * 0.5, (patchSize.height - 1) * 0.5), T, false);

    generate(image, T, patch, patchSize, rng);
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

/**
 * Generate Positive data /*from   w  ww.  j ava 2 s. com*/
 * Inputs: 
 * - good_boxes 
 * - best_box 
 * - bbhull
 * Outputs: 
 * - Positive fern features (pFerns) 
 * - Positive NN examples (pExample)
 */
public void generatePositiveData(final Mat frame, final int numWarps, final Grid aGrid) {
    resizeZeroMeanStdev(frame.submat(aGrid.getBestBox()), _pExample, _params.patch_size);
    //Get Fern features on warped patches
    final Mat img = new Mat();
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);
    final BoundingBox bbhull = aGrid.getBBhull();
    final Mat warped = img.submat(bbhull);
    // centre of the hull
    final Point pt = new Point(bbhull.x + (bbhull.width - 1) * 0.5f, bbhull.y + (bbhull.height - 1) * 0.5f);

    _pFerns.clear();
    _pPatterns.clear();

    for (int i = 0; i < numWarps; i++) {
        if (i > 0) {
            // this is important as it introduces the necessary noise / fuziness in the initial examples such that the Fern classifier recognises similar shapes not only Exact ones ! 
            // warped is a reference to a subset of the img data, so this will affect the img object
            _patchGenerator.generate(frame, pt, warped, bbhull.size(), _rng);
        }

        final BoundingBox[] goodBoxes = aGrid.getGoodBoxes();
        for (BoundingBox goodBox : goodBoxes) {
            final Mat patch = img.submat(goodBox);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, goodBox.scaleIdx);
            _pFerns.add(new Pair<int[], Boolean>(allFernsHashCodes, true));

            //            // this will be used for display only
            //            final Mat tempPattern = new Mat();
            //            Imgproc.resize(patch, tempPattern, new Size(_params.patch_size, _params.patch_size));
            //            _pPatterns.add(tempPattern);
        }
    }

    System.out.println("Positive examples generated( ferns: " + _pFerns.size() + " NN: 1/n )");
}

From source file:syncleus.dann.data.video.TLDView.java

License:Apache License

public TLDView(Context context, AttributeSet attrs) {
    super(context, attrs);
    _holder = getHolder();/*  ww  w .  ja  v a 2  s  .c o  m*/

    // Init the PROPERTIES
    InputStream propsIS = null;
    try {
        propsIS = context.getResources().openRawResource(R.raw.parameters);
        _tldProperties = new Properties();
        _tldProperties.load(propsIS);
    } catch (IOException e) {
        Log.e(TLDUtil.TAG, "Can't load properties", e);
    } finally {
        if (propsIS != null) {
            try {
                propsIS.close();
            } catch (IOException e) {
                Log.e(TLDUtil.TAG, "Can't close props", e);
            }
        }
    }

    // listens to its own events
    setCvCameraViewListener(this);

    // DEBUG
    //_trackedBox = new BoundingBox(165,93,51,54, 0, 0);

    // LISTEN for touches of the screen, to define the BOX to be tracked
    final AtomicReference<Point> trackedBox1stCorner = new AtomicReference<Point>();
    final Paint rectPaint = new Paint();
    rectPaint.setColor(Color.rgb(0, 255, 0));
    rectPaint.setStrokeWidth(5);
    rectPaint.setStyle(Style.STROKE);

    setOnTouchListener(new OnTouchListener() {
        @Override
        public boolean onTouch(View v, MotionEvent event) {
            // re-init
            _errMessage = null;
            _tld = null;

            final Point corner = new Point(event.getX() - _canvasImgXOffset, event.getY() - _canvasImgYOffset);
            switch (event.getAction()) {
            case MotionEvent.ACTION_DOWN:
                trackedBox1stCorner.set(corner);
                System.out.println("1st corner: " + corner);
                break;
            case MotionEvent.ACTION_UP:
                _trackedBox = new Rect(trackedBox1stCorner.get(), corner);
                System.out.println("Tracked box DEFINED: " + _trackedBox);
                break;
            case MotionEvent.ACTION_MOVE:
                final android.graphics.Rect rect = new android.graphics.Rect(
                        (int) trackedBox1stCorner.get().x + _canvasImgXOffset,
                        (int) trackedBox1stCorner.get().y + _canvasImgYOffset,
                        (int) corner.x + _canvasImgXOffset, (int) corner.y + _canvasImgYOffset);
                final Canvas canvas = _holder.lockCanvas(rect);
                canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); // remove old rectangle
                canvas.drawRect(rect, rectPaint);
                _holder.unlockCanvasAndPost(canvas);
                break;
            }

            return true;
        }
    });
}

From source file:syncleus.dann.data.video.TLDView.java

License:Apache License

@Override
public Mat onCameraFrame(Mat originalFrame) {
    try {//  w  w  w. j av a 2 s  .  c om
        // Image is too big and this requires too much CPU for a phone, so scale everything down...
        Imgproc.resize(originalFrame, _workingFrame, WORKING_FRAME_SIZE);
        final Size workingRatio = new Size(originalFrame.width() / WORKING_FRAME_SIZE.width,
                originalFrame.height() / WORKING_FRAME_SIZE.height);
        // usefull to see what we're actually working with...
        _workingFrame.copyTo(originalFrame.submat(originalFrame.rows() - _workingFrame.rows(),
                originalFrame.rows(), 0, _workingFrame.cols()));

        if (_trackedBox != null) {
            if (_tld == null) { // run the 1st time only
                Imgproc.cvtColor(_workingFrame, _lastGray, Imgproc.COLOR_RGB2GRAY);
                _tld = new Tld(_tldProperties);
                final Rect scaledDownTrackedBox = scaleDown(_trackedBox, workingRatio);
                System.out.println("Working Ration: " + workingRatio + " / Tracking Box: " + _trackedBox
                        + " / Scaled down to: " + scaledDownTrackedBox);
                try {
                    _tld.init(_lastGray, scaledDownTrackedBox);
                } catch (Exception eInit) {
                    // start from scratch, you have to select an init box again !
                    _trackedBox = null;
                    _tld = null;
                    throw eInit; // re-throw it as it will be dealt with later
                }
            } else {
                Imgproc.cvtColor(_workingFrame, _currentGray, Imgproc.COLOR_RGB2GRAY);

                _processFrameStruct = _tld.processFrame(_lastGray, _currentGray);
                drawPoints(originalFrame, _processFrameStruct.lastPoints, workingRatio, new Scalar(255, 0, 0));
                drawPoints(originalFrame, _processFrameStruct.currentPoints, workingRatio,
                        new Scalar(0, 255, 0));
                drawBox(originalFrame, scaleUp(_processFrameStruct.currentBBox, workingRatio),
                        new Scalar(0, 0, 255));

                _currentGray.copyTo(_lastGray);

                // overlay the current positive examples on the real image(needs converting at the same time !)
                //copyTo(_tld.getPPatterns(), originalFrame);
            }
        }
    } catch (Exception e) {
        _errMessage = e.getClass().getSimpleName() + " / " + e.getMessage();
        Log.e(TLDUtil.TAG, "TLDView PROBLEM", e);
    }

    if (_errMessage != null) {
        Core.putText(originalFrame, _errMessage, new Point(0, 300), Core.FONT_HERSHEY_PLAIN, 1.3d,
                new Scalar(255, 0, 0), 2);
    }

    return originalFrame;
}