List of usage examples for org.opencv.android Utils matToBitmap
public static void matToBitmap(Mat mat, Bitmap bmp)
From source file:android.google.com.basiccamera.imageprocessing.CannyEdgeDetector.java
License:BSD License
protected void runTask() { Log.i(TAG, "Starting heavy image processing task"); while (running) { Long begin, end;/* w w w .j av a 2 s . co m*/ //begin = System.currentTimeMillis(); // requests picture and blocks till it receives one mTaskManager.requestPreviewFrame(); //end = System.currentTimeMillis(); //Log.i(TAG, "Process took " + String.valueOf(end - begin) + " ms"); byte[] image = getImage(); if (image == null) { Log.w(TAG, "Received null as picture"); } // do Canny edge detection Mat img = new Mat(); Bitmap bmp = BitmapFactory.decodeByteArray(image, 0, image.length); Utils.bitmapToMat(bmp, img); Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY); Imgproc.blur(img, img, new Size(3, 3)); Imgproc.Canny(img, img, 20, 100); Utils.matToBitmap(img, bmp); mTaskManager.drawResult(bmp); } return; }
From source file:at.entenbaer.utils.TPAUtils.java
License:Open Source License
/** * Saves a OpenCV Mat to a path inside the TexturePoemApp-Folder in the pictures directory * @param mat Image that should be saved * @param path path where the image should be saved inside the TexturePoemApp-Folder *///from w w w . jav a2 s . c om public static void saveMatToBitmap(Mat mat, String path) { if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES) .toString(); Log.d("galleryPath", galleryPath); Bitmap b = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(mat, b); File album = new File(galleryPath + "/TexturePoemApp"); if (!album.isDirectory()) { album.mkdirs(); } File f = new File(galleryPath + "/TexturePoemApp/" + path); try { FileOutputStream fo = new FileOutputStream(f); b.compress(Bitmap.CompressFormat.JPEG, 100, fo); fo.flush(); fo.close(); } catch (IOException e) { Log.e("IOException", "not saved"); e.printStackTrace(); } } else { Log.d("Env", "not mounted"); } }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.FileHelper.java
License:Open Source License
public String saveMatToImage(MatName m, String path) { String fullpath = path + m.getName() + ".png"; Mat mat = m.getMat();/*from w ww. j a v a 2 s .c o m*/ Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(mat, bitmap); File file = new File(fullpath); try { FileOutputStream os = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 100, os); os.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return fullpath; }
From source file:ch.zhaw.facerecognitionlibrary.Recognition.TensorFlow.java
License:Open Source License
public Mat getFeatureVector(Mat img) { Imgproc.resize(img, img, new Size(inputSize, inputSize)); Bitmap bmp = Bitmap.createBitmap(inputSize, inputSize, Bitmap.Config.ARGB_8888); Utils.matToBitmap(img, bmp); String[] sVector = classifyImageBmp(inputLayer, outputLayer, outputSize, bmp).split(STRING_SPLIT_CHARACTER); System.out.println(sVector.length); List<Float> fVector = new ArrayList<>(); for (String s : sVector) { fVector.add(Float.parseFloat(s)); }/*from w w w .jav a2 s .c o m*/ return Converters.vector_float_to_Mat(fVector); }
From source file:cn.xiongyihui.webcam.JpegFactory.java
License:Open Source License
public void onPreviewFrame(byte[] data, Camera camera) { YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null); mJpegOutputStream.reset();//from w w w . j a v a 2 s .c o m try { //Log.e(TAG, "Beginning to read values!"); double distanceTemplateFeatures = this.globalClass.getDistanceTemplateFeatures(); double xTemplateCentroid = this.globalClass.getXtemplateCentroid(); double yTemplateCentroid = this.globalClass.getYtemplateCentroid(); int x0template = this.globalClass.getX0display(); int y0template = this.globalClass.getY0display(); int x1template = this.globalClass.getX1display(); int y1template = this.globalClass.getY1display(); Mat templateDescriptor = this.globalClass.getTemplateDescriptor(); MatOfKeyPoint templateKeyPoints = this.globalClass.getKeyPoints(); KeyPoint[] templateKeyPointsArray = templateKeyPoints.toArray(); int numberOfTemplateFeatures = this.globalClass.getNumberOfTemplateFeatures(); int numberOfPositiveTemplateFeatures = this.globalClass.getNumberOfPositiveTemplateFeatures(); KeyPoint[] normalisedTemplateKeyPoints = this.globalClass.getNormalisedTemplateKeyPoints(); double normalisedXcentroid = this.globalClass.getNormalisedXcentroid(); double normalisedYcentroid = this.globalClass.getNormalisedYcentroid(); int templateCapturedBitmapWidth = this.globalClass.getTemplateCapturedBitmapWidth(); int templateCapturedBitmapHeight = this.globalClass.getTemplateCapturedBitmapHeight(); //Log.e(TAG, "Ended reading values!"); globalClass.setJpegFactoryDimensions(mWidth, mHeight); double scalingRatio, scalingRatioHeight, scalingRatioWidth; scalingRatioHeight = (double) mHeight / (double) templateCapturedBitmapHeight; scalingRatioWidth = (double) mWidth / (double) templateCapturedBitmapWidth; scalingRatio = (scalingRatioHeight + scalingRatioWidth) / 2; //Just to account for any minor variations. //Log.e(TAG, "Scaling ratio:" + String.valueOf(scalingRatio)); //Log.e("Test", "Captured Bitmap's dimensions: (" + templateCapturedBitmapHeight + "," + templateCapturedBitmapWidth + ")"); //Scale the actual features of the image int flag = this.globalClass.getFlag(); if (flag == 0) { int iterate = 0; int iterationMax = numberOfTemplateFeatures; for (iterate = 0; iterate < (iterationMax); iterate++) { Log.e(TAG, "Point detected " + iterate + ":(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); if (flag == 0) { templateKeyPointsArray[iterate].pt.x = scalingRatio * (templateKeyPointsArray[iterate].pt.x + (double) x0template); templateKeyPointsArray[iterate].pt.y = scalingRatio * (templateKeyPointsArray[iterate].pt.y + (double) y0template); } Log.e(TAG, "Scaled points:(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); } this.globalClass.setFlag(1); } templateKeyPoints.fromArray(templateKeyPointsArray); //Log.e(TAG, "Template-features have been scaled successfully!"); long timeBegin = (int) System.currentTimeMillis(); Mat mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1); mYuv.put(0, 0, data); Mat mRgb = new Mat(); Imgproc.cvtColor(mYuv, mRgb, Imgproc.COLOR_YUV420sp2RGB); Mat result = new Mat(); Imgproc.cvtColor(mRgb, result, Imgproc.COLOR_RGB2GRAY); int detectorType = FeatureDetector.ORB; FeatureDetector featureDetector = FeatureDetector.create(detectorType); MatOfKeyPoint keypointsImage = new MatOfKeyPoint(); featureDetector.detect(result, keypointsImage); KeyPoint[] imageKeypoints = keypointsImage.toArray(); Scalar color = new Scalar(0, 0, 0); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB); Mat imageDescriptor = new Mat(); descriptorExtractor.compute(result, keypointsImage, imageDescriptor); //BRUTEFORCE_HAMMING apparently finds even the suspicious feature-points! So, inliers and outliers can turn out to be a problem DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); MatOfDMatch matches = new MatOfDMatch(); matcher.match(imageDescriptor, templateDescriptor, matches); //Log.e("Prasad", String.valueOf(mWidth) + "," + String.valueOf(mHeight)); DMatch[] matchesArray = matches.toArray(); double minimumMatchDistance = globalClass.getHammingDistance(); int iDescriptorMax = matchesArray.length; int iterateDescriptor; double xMatchedPoint, yMatchedPoint; int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS; Point point; double rHigh = this.globalClass.getRhigh(); double rLow = this.globalClass.getRlow(); double gHigh = this.globalClass.getGhigh(); double gLow = this.globalClass.getGlow(); double bHigh = this.globalClass.getBhigh(); double bLow = this.globalClass.getBlow(); double[] colorValue; double red, green, blue; int[] featureCount; double xKernelSize = 9, yKernelSize = 9; globalClass.setKernelSize(xKernelSize, yKernelSize); double xImageKernelScaling, yImageKernelScaling; xImageKernelScaling = xKernelSize / mWidth; yImageKernelScaling = yKernelSize / mHeight; int[][] kernel = new int[(int) xKernelSize][(int) yKernelSize]; double[][] kernelCounter = new double[(int) xKernelSize][(int) yKernelSize]; int numberKernelMax = 10; globalClass.setNumberKernelMax(numberKernelMax); int[][][] kernelArray = new int[(int) xKernelSize][(int) yKernelSize][numberKernelMax]; double featureImageResponse; double xImageCentroid, yImageCentroid; double xSum = 0, ySum = 0; double totalImageResponse = 0; for (iterateDescriptor = 0; iterateDescriptor < iDescriptorMax; iterateDescriptor++) { if (matchesArray[iterateDescriptor].distance < minimumMatchDistance) { //MatchedPoint: Awesome match without color feedback xMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.x; yMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.y; colorValue = mRgb.get((int) yMatchedPoint, (int) xMatchedPoint); red = colorValue[0]; green = colorValue[1]; blue = colorValue[2]; int xKernelFeature, yKernelFeature; //Color feedback if ((rLow < red) & (red < rHigh) & (gLow < green) & (green < gHigh) & (bLow < blue) & (blue < bHigh)) { try { featureImageResponse = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].response; if (featureImageResponse > 0) { xSum = xSum + featureImageResponse * xMatchedPoint; ySum = ySum + featureImageResponse * yMatchedPoint; totalImageResponse = totalImageResponse + featureImageResponse; point = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt; xKernelFeature = (int) (xMatchedPoint * xImageKernelScaling); yKernelFeature = (int) (yMatchedPoint * yImageKernelScaling); kernelCounter[xKernelFeature][yKernelFeature]++; //Core.circle(result, point, 3, color); } } catch (Exception e) { } } //Log.e(TAG, iterateDescriptor + ": (" + xMatchedPoint + "," + yMatchedPoint + ")"); } } int iKernel = 0, jKernel = 0; for (iKernel = 0; iKernel < xKernelSize; iKernel++) { for (jKernel = 0; jKernel < yKernelSize; jKernel++) { if (kernelCounter[iKernel][jKernel] > 0) { kernel[iKernel][jKernel] = 1; } else { kernel[iKernel][jKernel] = 0; } } } xImageCentroid = xSum / totalImageResponse; yImageCentroid = ySum / totalImageResponse; if ((Double.isNaN(xImageCentroid)) | (Double.isNaN(yImageCentroid))) { //Log.e(TAG, "Centroid is not getting detected! Increasing hamming distance (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance + 2)); } else { //Log.e(TAG, "Centroid is getting detected! Decreasing and optimising hamming (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance - 1)); int jpegCount = globalClass.getJpegFactoryCallCount(); jpegCount++; globalClass.setJpegFactoryCallCount(jpegCount); int initialisationFlag = globalClass.getInitialisationFlag(); int numberOfDistances = 10; globalClass.setNumberOfDistances(numberOfDistances); if ((jpegCount > globalClass.getNumberKernelMax()) & (jpegCount > numberOfDistances)) { globalClass.setInitialisationFlag(1); } int[][] kernelSum = new int[(int) xKernelSize][(int) yKernelSize], mask = new int[(int) xKernelSize][(int) yKernelSize]; int iJpeg, jJpeg; kernelSum = globalClass.computeKernelSum(kernel); Log.e(TAG, Arrays.deepToString(kernelSum)); for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (kernelSum[iJpeg][jJpeg] > (numberKernelMax / 4)) {//Meant for normalised kernel mask[iJpeg][jJpeg]++; } } } Log.e(TAG, Arrays.deepToString(mask)); int maskedFeatureCount = 1, xMaskFeatureSum = 0, yMaskFeatureSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xMaskFeatureSum = xMaskFeatureSum + iJpeg; yMaskFeatureSum = yMaskFeatureSum + jJpeg; maskedFeatureCount++; } } } double xMaskMean = xMaskFeatureSum / maskedFeatureCount; double yMaskMean = yMaskFeatureSum / maskedFeatureCount; double xSquaredSum = 0, ySquaredSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xSquaredSum = xSquaredSum + (iJpeg - xMaskMean) * (iJpeg - xMaskMean); ySquaredSum = ySquaredSum + (jJpeg - yMaskMean) * (jJpeg - yMaskMean); } } } double xRMSscaled = Math.sqrt(xSquaredSum); double yRMSscaled = Math.sqrt(ySquaredSum); double RMSimage = ((xRMSscaled / xImageKernelScaling) + (yRMSscaled / yImageKernelScaling)) / 2; Log.e(TAG, "RMS radius of the image: " + RMSimage); /*//Command the quadcopter and send PWM values to Arduino double throttlePWM = 1500, yawPWM = 1500, pitchPWM = 1500; double deltaThrottle = 1, deltaYaw = 1, deltaPitch = 1; throttlePWM = globalClass.getThrottlePWM(); pitchPWM = globalClass.getPitchPWM(); yawPWM = globalClass.getYawPWM(); deltaThrottle = globalClass.getThrottleDelta(); deltaPitch = globalClass.getPitchDelta(); deltaYaw = globalClass.getYawDelta(); if(yImageCentroid>yTemplateCentroid) { throttlePWM = throttlePWM + deltaThrottle; }else{ throttlePWM = throttlePWM - deltaThrottle; } if(RMSimage>distanceTemplateFeatures) { pitchPWM = pitchPWM + deltaPitch; }else{ pitchPWM = pitchPWM - deltaPitch; } if(xImageCentroid>xTemplateCentroid) { yawPWM = yawPWM + deltaYaw; }else{ yawPWM = yawPWM - deltaYaw; } if(1000>throttlePWM){ throttlePWM = 1000; } if(2000<throttlePWM){ throttlePWM = 2000; } if(1000>pitchPWM){ pitchPWM = 1000; } if(2000<pitchPWM){ pitchPWM = 2000; } if(1000>yawPWM){ yawPWM = 1000; } if(2000<yawPWM){ yawPWM = 2000; } globalClass.setPitchPWM(pitchPWM); globalClass.setYawPWM(yawPWM); globalClass.setThrottlePWM(throttlePWM);*/ //Display bounding circle int originalWidthBox = x1template - x0template; int originalHeightBox = y1template - y0template; double scaledBoundingWidth = (originalWidthBox * RMSimage / distanceTemplateFeatures); double scaledBoundingHeight = (originalHeightBox * RMSimage / distanceTemplateFeatures); double displayRadius = (scaledBoundingWidth + scaledBoundingHeight) / 2; displayRadius = displayRadius * 1.4826; displayRadius = displayRadius / numberKernelMax; double distanceAverage = 0; if (Double.isNaN(displayRadius)) { //Log.e(TAG, "displayRadius is NaN!"); } else { distanceAverage = globalClass.imageDistanceAverage(displayRadius); //Log.e(TAG, "Average distance: " + distanceAverage); } if ((Double.isNaN(xImageCentroid)) | Double.isNaN(yImageCentroid)) { //Log.e(TAG, "Centroid is NaN!"); } else { globalClass.centroidAverage(xImageCentroid, yImageCentroid); } if (initialisationFlag == 1) { //int displayRadius = 50; Point pointDisplay = new Point(); //pointDisplay.x = xImageCentroid; //pointDisplay.y = yImageCentroid; pointDisplay.x = globalClass.getXcentroidAverageGlobal(); pointDisplay.y = globalClass.getYcentroidAverageGlobal(); globalClass.centroidAverage(xImageCentroid, yImageCentroid); int distanceAverageInt = (int) distanceAverage; Core.circle(result, pointDisplay, distanceAverageInt, color); } } Log.e(TAG, "Centroid in the streamed image: (" + xImageCentroid + "," + yImageCentroid + ")"); /*try { //Features2d.drawKeypoints(result, keypointsImage, result, color, flagDraw); Features2d.drawKeypoints(result, templateKeyPoints, result, color, flagDraw); }catch(Exception e){}*/ //Log.e(TAG, "High (R,G,B): (" + rHigh + "," + gHigh + "," + bHigh + ")"); //Log.e(TAG, "Low (R,G,B): (" + rLow + "," + gLow + "," + bLow + ")"); //Log.e(TAG, Arrays.toString(matchesArray)); try { Bitmap bmp = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(result, bmp); //Utils.matToBitmap(mRgb, bmp); bmp.compress(Bitmap.CompressFormat.JPEG, mQuality, mJpegOutputStream); } catch (Exception e) { Log.e(TAG, "JPEG not working!"); } long timeEnd = (int) System.currentTimeMillis(); Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + "milli-seconds!"); mJpegData = mJpegOutputStream.toByteArray(); synchronized (mJpegOutputStream) { mJpegOutputStream.notifyAll(); } } catch (Exception e) { Log.e(TAG, "JPEG-factory is not working!"); } }
From source file:cn.xiongyihui.webcam.setup.java
License:Open Source License
@Override public boolean onTouchEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_DOWN) { try {//from ww w . j a va 2 s.c o m final ImageView imageView = (ImageView) findViewById(R.id.imageView); int X = (int) event.getX(); int Y = (int) event.getY(); int[] coordinates = new int[2];//{0,0}; imageView.getLocationOnScreen(coordinates); int viewTop = coordinates[1]; int viewBottom = coordinates[1] + imageView.getHeight(); try { int viewLeft = coordinates[2]; int viewRight = coordinates[2] + imageView.getWidth(); } catch (Exception e) { Log.e(TAG, "getLocationOnScreen:Error!"); } imageViewHeight = (double) viewBottom - viewTop; imageViewWidth = aspectRatio * imageViewHeight; int imageViewWidthINT = (int) imageViewWidth; int imageViewHeightINT = (int) imageViewHeight; Display display = getWindowManager().getDefaultDisplay(); Point size = new Point(); display.getSize(size); int widthScreen = (int) size.x; int heightScreen = (int) size.y; int Yoffset = heightScreen - viewBottom; int Xoffset = widthScreen - imageView.getWidth(); int virtualOriginX = (int) ((widthScreen - imageViewWidthINT + Xoffset) / 2); int virtualOriginY = (int) (heightScreen - imageViewHeightINT - Yoffset / 2); x0 = X - virtualOriginX; y0 = Y - virtualOriginY; double openCVratio = (double) bitmapHeight / imageViewHeight; x0final = (int) ((double) x0 * openCVratio); y0final = (int) ((double) y0 * openCVratio); } catch (Exception e) { Log.e(TAG, "Touch events are not working!"); } } if (event.getAction() == MotionEvent.ACTION_UP) { try { final ImageView imageView = (ImageView) findViewById(R.id.imageView); int X = (int) event.getX(); int Y = (int) event.getY(); int[] coordinates = new int[2];//{0,0}; imageView.getLocationOnScreen(coordinates); int viewTop = coordinates[1]; int viewBottom = coordinates[1] + imageView.getHeight(); try { int viewLeft = coordinates[2]; int viewRight = coordinates[2] + imageView.getWidth(); } catch (Exception e) { Log.e(TAG, "getLocationOnScreen:Error!"); } imageViewHeight = (double) viewBottom - viewTop; imageViewWidth = aspectRatio * imageViewHeight; int imageViewWidthINT = (int) imageViewWidth; int imageViewHeightINT = (int) imageViewHeight; Display display = getWindowManager().getDefaultDisplay(); android.graphics.Point size = new android.graphics.Point(); display.getSize(size); int widthScreen = (int) size.x; int heightScreen = (int) size.y; int Yoffset = heightScreen - viewBottom; int Xoffset = widthScreen - imageView.getWidth(); int virtualOriginX = (int) ((widthScreen - imageViewWidthINT + Xoffset) / 2); int virtualOriginY = (int) (heightScreen - imageViewHeightINT - Yoffset / 2); x1 = X - virtualOriginX; y1 = Y - virtualOriginY; double openCVratio = (double) bitmapHeight / imageViewHeight; x1final = (int) ((double) x1 * openCVratio); y1final = (int) ((double) y1 * openCVratio); bitmap = BitmapFactory.decodeFile(filePath); bitmap = Bitmap.createScaledBitmap(bitmap, bitmapWidth, bitmapHeight, true); Mat frame = new Mat(bitmap.getHeight(), bitmap.getHeight(), CvType.CV_8UC3); Utils.bitmapToMat(bitmap, frame); rect = new Rect(x0final, y0final, x1final - x0final, y1final - y0final); Core.rectangle(frame, rect.tl(), rect.br(), color, 3); Utils.matToBitmap(frame, bitmap); imageView.setImageBitmap(bitmap); } catch (Exception e) { Log.e(TAG, "Touch events are not working!"); } } return true; }
From source file:cn.xiongyihui.webcam.setup.java
License:Open Source License
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_setup); this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); final Button cameraButton = (Button) findViewById(R.id.cameraButton); final Button selectButton = (Button) findViewById(R.id.selectButton); final Button templateButton = (Button) findViewById(R.id.templateButton); final Button instructionButton = (Button) findViewById(R.id.instructionButton); final ImageView imageView = (ImageView) findViewById(R.id.imageView); try {//w w w . j a va2s . com int NUMBER_OF_CORES = Runtime.getRuntime().availableProcessors(); Toast.makeText(this, NUMBER_OF_CORES, Toast.LENGTH_SHORT).show(); } catch (Exception e) { Log.e(TAG, "Processor-cores are not getting detected!"); } try { final Toast toast = Toast.makeText(this, "Please capture image; \n" + "select image; \n" + "Drag-and-drop, swipe on the desired region and confirm template!", Toast.LENGTH_LONG); final TextView v = (TextView) toast.getView().findViewById(android.R.id.message); instructionButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { if (v != null) v.setGravity(Gravity.CENTER); toast.show(); } }); } catch (Exception e) { Log.e(TAG, "Instructions are not getting displayed!"); } try { cameraButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { Intent intent = new Intent("android.media.action.IMAGE_CAPTURE"); startActivityForResult(intent, requestCode); } }); } catch (Exception e) { Log.e(TAG, "Camera is not working!"); } try { selectButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { Intent i = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI); startActivityForResult(i, requestCode); bitmap = BitmapFactory.decodeFile(filePath); imageView.setImageBitmap(bitmap); } }); } catch (Exception e) { Log.e(TAG, "Selection is not working!"); } try { templateButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { if (imageView.getDrawable() == null) { Log.e(TAG, "Null ImageView!"); } Log.e(TAG, "Button is working."); try { bitmap = BitmapFactory.decodeFile(filePath); bitmap = Bitmap.createScaledBitmap(bitmap, bitmapWidth, bitmapHeight, true); Mat frame = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4); Utils.bitmapToMat(bitmap, frame); GlobalClass globalVariable = (GlobalClass) getApplicationContext(); globalVariable.setTemplateCapturedBitmapHeight(bitmapHeight); globalVariable.setTemplateCapturedBitmapWidth(bitmapWidth); Log.e(TAG, "Bitmap has been set successfully; Template is being generated!"); rect = new Rect(x0final, y0final, x1final - x0final, y1final - y0final); Utils.matToBitmap(frame, bitmap); if (x0final < x1final) { x0display = x0final; x1display = x1final; } if (x0final > x1final) { x1display = x0final; x0display = x1final; } if (y0final < y1final) { y0display = y0final; y1display = y1final; } if (y0final > y1final) { y1display = y0final; y0display = y1final; } long timeBegin = (int) System.currentTimeMillis(); bitmap = Bitmap.createBitmap(bitmap, x0display, y0display, x1display - x0display, y1display - y0display); /*String path = Environment.getExternalStorageDirectory().toString(); Log.e(TAG, "File is about to be written!"); //File file = new File(path, "TraQuad"); //bitmap.compress(Bitmap.CompressFormat.PNG, 100, fOutputStream); //Log.e(TAG, "Stored image successfully!"); //fOutputStream.flush(); //fOutputStream.close(); //MediaStore.Images.Media.insertImage(getContentResolver(), file.getAbsolutePath(), file.getName(), file.getName());*/ /*Prominent colors code; This is not working in Android; OpenCV assertion error Log.e(TAG, "Retrieved image successfully!"); Imgproc.medianBlur(frame, frame, 3); Log.e(TAG, "Filtered image successfully!"); try { Mat mask = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1); MatOfFloat range = new MatOfFloat(0f, 255f); Mat hist = new Mat(); MatOfInt mHistSize = new MatOfInt(256); List<Mat> lHsv = new ArrayList<Mat>(3); Mat hsv = new Mat(); Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_RGB2HSV); Core.split(frame, lHsv); Mat mH = lHsv.get(0); Mat mS = lHsv.get(1); Mat mV = lHsv.get(2); ArrayList<Mat> ListMat = new ArrayList<Mat>(); ListMat.add(mH); Log.e(TAG, String.valueOf(ListMat)); MatOfInt channels = new MatOfInt(0, 1); Imgproc.calcHist(Arrays.asList(mH), channels, mask, hist, mHistSize, range); ListMat.clear(); }catch (Exception e){ Log.e(TAG, "Prominent colors are not getting detected!"); }*/ Mat colorFrame = frame; colorFrame = frame.clone(); Utils.bitmapToMat(bitmap, frame); Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGB2GRAY); Log.e(TAG, "Converted color successfully!"); int detectorType = FeatureDetector.ORB; //int detectorType = FeatureDetector.SIFT; //SIFT and SURF are not working! //int detectorType = FeatureDetector.SURF; FeatureDetector featureDetector = FeatureDetector.create(detectorType); Log.e(TAG, "Feature detection has begun!"); MatOfKeyPoint keypoints = new MatOfKeyPoint(); featureDetector.detect(frame, keypoints); Log.e(TAG, "Feature detection has ended successfully!"); /*if (!featureDetector.empty()) { //Draw the detected keypoints int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS; Features2d.drawKeypoints(frame, keypoints, frame, color, flagDraw); Utils.matToBitmap(frame, bitmap); }*/ imageView.setImageBitmap(bitmap); Log.e(TAG, "Final bitmap has been loaded!"); KeyPoint[] referenceKeypoints = keypoints.toArray(); Log.e(TAG, "Number of keypoints detected is " + String.valueOf(referenceKeypoints.length)); int iterationMax = referenceKeypoints.length; int iterate = 0; double xFeaturePoint, yFeaturePoint; double xSum = 0, ySum = 0; double totalResponse = 0; double keyPointResponse = 0; double xTemplateCentroid = 0, yTemplateCentroid = 0; DescriptorExtractor descriptorExtractor = DescriptorExtractor .create(DescriptorExtractor.ORB); Mat templateDescriptor = new Mat(); descriptorExtractor.compute(frame, keypoints, templateDescriptor); for (iterate = 0; iterate < iterationMax; iterate++) { xFeaturePoint = referenceKeypoints[iterate].pt.x; yFeaturePoint = referenceKeypoints[iterate].pt.y; keyPointResponse = referenceKeypoints[iterate].response; if (keyPointResponse > 0) { xSum = xSum + keyPointResponse * xFeaturePoint; ySum = ySum + keyPointResponse * yFeaturePoint; totalResponse = totalResponse + keyPointResponse; //Log.e(TAG, "Feature " + String.valueOf(iterate) + ":" + String.valueOf(referenceKeypoints[iterate])); } } xTemplateCentroid = xSum / totalResponse; yTemplateCentroid = ySum / totalResponse; Log.e(TAG, "Finished conversion of features to points!"); Log.e(TAG, "Centroid location is: (" + xTemplateCentroid + "," + yTemplateCentroid + ")"); double xSquareDistance = 0, ySquareDistance = 0; double distanceTemplateFeatures = 0; int numberOfPositiveResponses = 0; double[] colorValue; double rSum = 0, gSum = 0, bSum = 0; double rCentral, gCentral, bCentral; for (iterate = 0; iterate < iterationMax; iterate++) { xFeaturePoint = referenceKeypoints[iterate].pt.x; yFeaturePoint = referenceKeypoints[iterate].pt.y; keyPointResponse = referenceKeypoints[iterate].response; colorValue = colorFrame.get((int) yFeaturePoint, (int) xFeaturePoint); rSum = rSum + colorValue[0]; gSum = gSum + colorValue[1]; bSum = bSum + colorValue[2]; if (keyPointResponse > 0) { xSquareDistance = xSquareDistance + (xFeaturePoint - xTemplateCentroid) * (xFeaturePoint - xTemplateCentroid); ySquareDistance = ySquareDistance + (yFeaturePoint - yTemplateCentroid) * (yFeaturePoint - yTemplateCentroid); numberOfPositiveResponses++; } } rCentral = rSum / iterationMax; gCentral = gSum / iterationMax; bCentral = bSum / iterationMax; double deltaColor = 21; double rLow = rCentral - deltaColor; double rHigh = rCentral + deltaColor; double gLow = rCentral - deltaColor; double gHigh = rCentral + deltaColor; double bLow = rCentral - deltaColor; double bHigh = rCentral + deltaColor; Log.e(TAG, "Prominent color (R,G,B): (" + rCentral + "," + gCentral + "," + bCentral + ")"); distanceTemplateFeatures = Math .sqrt((xSquareDistance + ySquareDistance) / numberOfPositiveResponses); KeyPoint[] offsetCompensatedKeyPoints = keypoints.toArray(); double xMaxNormalisation, yMaxNormalisation; xMaxNormalisation = x1display - x0display; yMaxNormalisation = y1display - y0display; for (iterate = 0; iterate < iterationMax; iterate++) { offsetCompensatedKeyPoints[iterate].pt.x = offsetCompensatedKeyPoints[iterate].pt.x / xMaxNormalisation; offsetCompensatedKeyPoints[iterate].pt.y = offsetCompensatedKeyPoints[iterate].pt.y / yMaxNormalisation; //Log.e(TAG, "Compensated: (" + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.x) + "," + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.y) + ")"); } double xCentroidNormalised, yCentroidNormalised; xCentroidNormalised = (xTemplateCentroid - x0display) / xMaxNormalisation; yCentroidNormalised = (yTemplateCentroid - y0display) / yMaxNormalisation; Log.e(TAG, "Normalised Centroid: (" + String.valueOf(xCentroidNormalised) + "," + String.valueOf(yCentroidNormalised)); long timeEnd = (int) System.currentTimeMillis(); Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + " milli-seconds!"); Log.e(TAG, "RMS distance is: " + distanceTemplateFeatures); globalVariable.setDistanceTemplateFeatures(distanceTemplateFeatures); globalVariable.setX0display(x0display); globalVariable.setY0display(y0display); globalVariable.setX1display(x1display); globalVariable.setY1display(y1display); globalVariable.setKeypoints(keypoints); globalVariable.setXtemplateCentroid(xTemplateCentroid); globalVariable.setYtemplateCentroid(yTemplateCentroid); globalVariable.setTemplateDescriptor(templateDescriptor); globalVariable.setNumberOfTemplateFeatures(iterationMax); globalVariable.setNumberOfPositiveTemplateFeatures(numberOfPositiveResponses); globalVariable.setRhigh(rHigh); globalVariable.setRlow(rLow); globalVariable.setGhigh(gHigh); globalVariable.setGlow(gLow); globalVariable.setBhigh(bHigh); globalVariable.setBlow(bLow); globalVariable.setXnormalisedCentroid(xCentroidNormalised); globalVariable.setYnormalisedCentroid(yCentroidNormalised); globalVariable.setNormalisedTemplateKeyPoints(offsetCompensatedKeyPoints); Log.e(TAG, "Finished setting the global variables!"); } catch (Exception e) { Log.e(TAG, "Please follow instructions!"); } } }); } catch (Exception e) { Log.e(TAG, "Template is not working!"); } }
From source file:com.dtcristo.virtucane.ImageProcessor.java
License:Apache License
private Bitmap processFrame(byte[] data, Camera camera) { count++;/*from w ww. j ava 2 s. c o m*/ Log.i(TAG, "Processing frame No. " + count); mSaveFrame = false; mYuv.put(0, 0, data); Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); if (Utils.matToBitmap(mRgba, bmp)) { Log.i(TAG, "Utils.matToBitmap()"); return bmp; } bmp.recycle(); return null; }
From source file:com.example.bluetoothchat.Main.java
License:Apache License
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { resultBitmap = Bitmap.createBitmap(inputFrame.rgba().cols(), inputFrame.rgba().rows(), Bitmap.Config.ARGB_8888);//w ww. j a va 2 s . co m ; Utils.matToBitmap(inputFrame.rgba(), resultBitmap); return inputFrame.rgba(); }
From source file:com.example.colordetector.CamMainActivity.java
License:Apache License
public void takePicture() { // Make bitmaps to display images and (if the user want) save them on storage memory bitmap = Bitmap.createBitmap(camWidth, camHeight, Bitmap.Config.ARGB_8888); Utils.matToBitmap(rgbFrame, bitmap); bitmapMask = Bitmap.createBitmap(camWidth, camHeight, Bitmap.Config.ARGB_8888); Utils.matToBitmap(filteredFrame, bitmapMask); // Showing the image at the user, and ask if save them or not; the response will be processed on method onActivityResult Intent intent = new Intent(this, CapturedFrameActivity.class); startActivityForResult(intent, 1);//from w w w . java2 s. com }