Example usage for org.opencv.core Mat toString

List of usage examples for org.opencv.core Mat toString

Introduction

In this page you can find the example usage for org.opencv.core Mat toString.

Prototype

@Override
    public String toString() 

Source Link

Usage

From source file:cv.faceRecognize.faceRecognizer.java

public void facesFromPhotos() {
    fileHandler.collecter(path);//w  ww.  ja  v  a 2s.c  om
    ArrayList<Mat> images = fileHandler.getImages();
    //        System.out.println(images.get(0).toString());
    //        
    //        System.out.println(images.toString());
    //        
    //        System.out.println(images.size());
    //        BufferedImage Mat2BufferedImage = im.Mat2BufferedImage((Mat)f.getImages().get(160));
    //        im.displayImage(Mat2BufferedImage);

    System.out.println(images.size());

    for (Mat image : images) {
        System.out.println(image.toString());
        //            if((Mat)image==null){
        //                System.out.println("nnn");
        //            }

        BufferedImage Mat2BufferedImage = im.Mat2BufferedImage((Mat) image);
        im.displayImage(im.Mat2BufferedImage((Mat) image));
        //            String toString = im.getFaceDetec((Mat) image).toString();
        //            System.out.println("aa");
        faces.add(im.getFaceDetec((Mat) image));
    }
}

From source file:cx.uni.jk.mms.iaip.tools.SimpleBrushTool.java

License:Open Source License

@Override
public Rect apply(Mat mat, BrushModel brush, int x, int y, boolean inverseEffect) {

    Rect changedArea = null;//from   w  ww.java 2 s. c o m

    try {
        this.logger.finer(String.format("apply mode=\"%s\" inverse=%s, size=%d, strength=%d", brush.getMode(),
                inverseEffect, brush.getSize(), brush.getValue()));

        this.logger.finest("mat    = " + mat.toString());

        /** where is brush going to work? this may reach outside the mat! */
        int brushColStart = x - (brush.getSize() - 1) / 2;
        int brushColEnd = x + brush.getSize() / 2;
        int brushRowStart = y - (brush.getSize() - 1) / 2;
        int brushRowEnd = y + brush.getSize() / 2;

        if (brushColEnd >= 0 && brushColStart < mat.cols() && brushRowEnd >= 0 && brushRowStart < mat.rows()) {

            /** calculate bounds for roiMat to fit into original mat */
            int subColStart = Math.max(0, brushColStart);
            int subColEnd = Math.min(brushColEnd, mat.cols() - 1);
            int subRowStart = Math.max(0, brushRowStart);
            int subRowEnd = Math.min(brushRowEnd, mat.rows() - 1);

            /**
             * the caller may want to know. Rect constructor interprets the
             * second point being outside of the Rect! a one pixel rectangle
             * Rect(Point(a,b), Point(a+1,b+1)) has height and width 1. see
             * 
             * @link{http://docs.opencv.org/java/org/opencv/core/Rect.html
             */
            changedArea = new Rect(new Point(subColStart, subRowStart),
                    new Point(subColEnd + 1, subRowEnd + 1));

            /**
             * get the part of original mat which going to be affected by
             * change
             */
            Mat roiMat = mat.submat(subRowStart, subRowEnd + 1, subColStart, subColEnd + 1);
            this.logger.finest("matRoi = " + roiMat.toString());

            /** does the brush fit into the roiMat we shall work on ? */
            boolean brushFits = brushColStart == subColStart && brushColEnd == subColEnd
                    && brushRowStart == subRowStart && brushRowEnd == subRowEnd;

            this.logger.finest("brush fits = " + brushFits);

            /**
             * make sure to have a working mat which matches the full brush
             * size
             */
            Mat workMat, workRoi = null;
            if (brushFits) {
                /** just work in the original mat area defined by roi */
                workMat = roiMat;
            } else {
                /** create a new mat as big as the brush */
                workMat = Mat.zeros(brush.getSize(), brush.getSize(), MatModel.MAT_TYPE);
                this.logger.finest("workMat= " + workMat.toString());
                /**
                 * create an ROI in the workMat as big as the subMat,
                 * correct offset for brushing in the middle
                 */
                int roiColStart = subColStart - brushColStart;
                int roiColEnd = roiColStart + roiMat.cols();
                int roiRowStart = subRowStart - brushRowStart;
                int roiRowEend = roiRowStart + roiMat.rows();

                workRoi = workMat.submat(roiRowStart, roiRowEend, roiColStart, roiColEnd);
                this.logger.finest("workRoi= " + workRoi.toString());
                roiMat.copyTo(workRoi);
                this.logger.finest("workRoi= " + workRoi.toString());

                // workRoi.put(0, 0, 1333.0d);
                this.logger.finest("roiMat  dump1 " + roiMat.dump());
                this.logger.finest("workRoi dump1 " + workRoi.dump());
                this.logger.finest("workMat dump1 " + workMat.dump());
            }

            /** the real action */
            this.applyToWorkMat(brush, inverseEffect, workMat);

            this.logger.finest("workMat dump2 " + workMat.dump());
            this.logger.finest("matRoi  dump2 " + roiMat.dump());

            if (brushFits) {
                /**
                 * nothing to do, we have been working directly in original
                 * mat
                 */
            } else {
                /** copy workMat back into original mat */
                this.logger.finest("workRoi dump2 " + workRoi.dump());
                // workRoi.put(0, 0, 1338);
                this.logger.finest("workRoi dump3 " + workRoi.dump());
                /**
                 * copy roi of changed workmat back into roi of original mat
                 */
                this.logger.finest("matRoi = " + roiMat.toString());
                workRoi.copyTo(roiMat);
                this.logger.finest("matRoi = " + roiMat.toString());
            }
            this.logger.finest("matRoi  dump3 " + roiMat.dump());
        }

    } catch (CvException e) {
        /** nevermind if the user does not notice */
        this.logger.fine(e.getStackTrace().toString());
    }

    /** let the caller know caller which area has potentially been changed */
    return changedArea;
}

From source file:fr.olympicinsa.riocognized.facedetector.tools.ImageConvertor.java

/**
 * Converts/writes a Mat into a BufferedImage.
 *
 * @param matrix Mat of type CV_8UC3 or CV_8UC1
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *///from  ww  w  . j  a v a2  s.c  om
public static BufferedImage matToBufferedImage(Mat matrix) {
    log.debug("****** MatToBuffered Image **********");
    log.debug("input : " + matrix.toString());
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;

    matrix.get(0, 0, data);

    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;

    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;

        // bgr to rgb
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;

    default:
        return null;
    }

    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);
    log.debug("type: " + type);
    log.debug("output:" + image.toString());
    log.debug("***********************************");
    return image;
}

From source file:fr.olympicinsa.riocognized.facedetector.tools.ImageConvertor.java

/**
 * Converts/writes a BufferedImage into a Mat.
 *
 * @param image BufferedImage of type TYPE_3BYTE_BGR
 * @return Mat image of type CV_8UC3//from   w w  w.j a v  a 2  s  .  c  o  m
 */
public static Mat bufferedImagetoMat(BufferedImage image) {
    log.debug("********bufferedImageToMat *********");
    log.debug("input : " + image.toString());
    int rows = image.getWidth();
    int cols = image.getHeight();
    byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    Mat mat = new Mat(cols, rows, CvType.CV_8UC3);
    mat.put(0, 0, data);
    log.debug("output : " + mat.toString());
    log.debug("***********************************");
    return mat;
}

From source file:fr.olympicinsa.riocognized.facedetector.tools.ImageConvertor.java

/**
 * Converts a Mat into a IplImage (GRAY_SCALED, depht = 8, Ch = 1).
 *
 * @param matImage Mat of type CV_8UC3/*w  w  w.  ja  v a 2 s  .c  om*/
 * @return IplImage iplImage (IPL_DEPTH_8U, 1 Channel)
 */
public static IplImage matToIplImage(Mat matImage) {
    log.debug("********** matToIplImage starting **********");
    log.debug("input:" + matImage.toString());
    IplImage image8UC3 = IplImage.createFrom(ImageConvertor.matToBufferedImage(matImage));
    IplImage resized = cvCreateImage(cvSize(image8UC3.width(), image8UC3.height()), IPL_DEPTH_32F, 1);
    if (image8UC3.nChannels() > 1)
        cvCvtColor(image8UC3, resized, CV_BGR2GRAY);
    else
        resized = image8UC3;
    log.debug("output: " + resized.toString());
    log.debug("*****************************************\n");
    return resized;
}

From source file:frclib.FrcFaceDetector.java

License:Open Source License

/**
 * This method is called to detect objects in the image frame.
 *
 * @param image specifies the image to be processed.
 * @param detectedTargets specifies the preallocated buffer to hold the detected targets.
 * @param detectedObjects specifies the object rectangle array to hold the detected objects.
 * @return detected objects, null if none detected.
 *//*from ww  w  . j  a  va2  s  .c o m*/
@Override
public MatOfRect detectObjects(Mat image, MatOfRect detectedObjects) {
    final String funcName = "detectedObjects";

    if (debugEnabled) {
        dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.CALLBK, "image=%s,objRects=%s", image.toString(),
                detectedObjects.toString());
    }

    faceDetector.detectMultiScale(image, detectedObjects);
    if (!detectedObjects.empty()) {
        faceRects = detectedObjects.toArray();
    } else {
        faceRects = null;
        detectedObjects = null;
    }

    if (videoOutEnabled) {
        putFrame();
    }

    currImage = image;

    if (debugEnabled) {
        dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.CALLBK, "=%s",
                Boolean.toString(detectedObjects != null));
    }

    return detectedObjects;
}