private Mat doFindMatch(Element target, Mat mBase, Element probe) { if (SX.isNull(probe)) { probe = target; } Mat mResult = Element.getNewMat(); Mat mProbe = probe.getContentBGR(); if (!target.isPlainColor()) { if (probe.hasMask()) { Mat mMask = matMulti(probe.getMask(), mProbe.channels()); Imgproc.matchTemplate(mBase, mProbe, mResult, Imgproc.TM_CCORR_NORMED, mMask); } else { Imgproc.matchTemplate(mBase, mProbe, mResult, Imgproc.TM_CCOEFF_NORMED); } } else { Mat mBasePlain = mBase; Mat mProbePlain = mProbe; if (target.isBlack()) { Core.bitwise_not(mBase, mBasePlain); Core.bitwise_not(mProbe, mProbePlain); } if (probe.hasMask()) { Mat mMask = matMulti(probe.getMask(), mProbe.channels()); Imgproc.matchTemplate(mBasePlain, mProbePlain, mResult, Imgproc.TM_SQDIFF_NORMED, mMask); } else { Imgproc.matchTemplate(mBasePlain, mProbePlain, mResult, Imgproc.TM_SQDIFF_NORMED); } Core.subtract(Mat.ones(mResult.size(), CvType.CV_32F), mResult, mResult); } return mResult; }
Result matchTemplate(Mat mat, Mat template) { Mat result = new Mat(); Imgproc.matchTemplate(mat, template, result, Imgproc.TM_CCOEFF_NORMED); MinMaxLocResult mmr = Core.minMaxLoc(result); double maxVal = mmr.maxVal; double rangeMax = maxVal; // Since matchTemplate type is fixed to TM_CCOEFF_NORMED, corr is not actually needed // Using just threshold is enought List<TemplateMatch> matches = new ArrayList<>(); for (Point point : OpenCvUtils.matMaxima(result, threshold, rangeMax)) { int x = point.x; int y = point.y; TemplateMatch match = new TemplateMatch(x, y, template.cols(), template.rows(), result.get(y, x)[0]); matches.add(match); } Collections.sort(matches, new Comparator<TemplateMatch>() { @Override public int compare(TemplateMatch o1, TemplateMatch o2) { return ((Double) o2.score).compareTo(o1.score); } }); return new Result(result, matches); }
private Core.MinMaxLocResult doFindMatch(Mat base, Mat probe) { Mat res = new Mat(); Mat bi = new Mat(); Mat pi = new Mat(); if (!isPlainColor) { Imgproc.matchTemplate(base, probe, res, Imgproc.TM_CCOEFF_NORMED); } else { if (isBlack) { Core.bitwise_not(base, bi); Core.bitwise_not(probe, pi); } else { bi = base; pi = probe; } Imgproc.matchTemplate(bi, pi, res, Imgproc.TM_SQDIFF_NORMED); Core.subtract(Mat.ones(res.size(), CvType.CV_32F), res, res); } return Core.minMaxLoc(res); }
private MinMaxLocResult getBestTemplateMatching(int matchMethod, Mat sceneImageMat, Mat objectImageMat) { // / Create the result matrix int resultCols = sceneImageMat.cols() - objectImageMat.cols() + 1; int resultRows = sceneImageMat.rows() - objectImageMat.rows() + 1; Mat result = new Mat(resultRows, resultCols, CvType.CV_32FC1); // / Do the Matching and Normalize Imgproc.matchTemplate(sceneImageMat, objectImageMat, result, matchMethod); // / Localizing the best match with minMaxLoc return Core.minMaxLoc(result); }
Mat result = new Mat(); Imgproc.matchTemplate(mat, template, result, Imgproc.TM_CCOEFF_NORMED);
Imgproc.matchTemplate(image, templ, result, Imgproc.TM_CCOEFF_NORMED); MinMaxLocResult match = Core.minMaxLoc(result); if (match.maxVal > bestMatch.maxVal) { int resultRows = image.rows() - templ.rows() + 1; Mat result = new Mat(resultRows, resultCols, CvType.CV_32FC1); Imgproc.matchTemplate(image, templ, result, Imgproc.TM_CCOEFF_NORMED); MinMaxLocResult match = Core.minMaxLoc(result); if (match.maxVal > bestMatch.maxVal) {
@Override public Point[] locateTemplateMatches(int roiX, int roiY, int roiWidth, int roiHeight, int coiX, int coiY, BufferedImage templateImage_) throws Exception { BufferedImage cameraImage_ = camera.capture(); // Convert the camera image and template image to the same type. This // is required by the cvMatchTemplate call. templateImage_ = ImageUtils.convertBufferedImage(templateImage_, BufferedImage.TYPE_INT_ARGB); cameraImage_ = ImageUtils.convertBufferedImage(cameraImage_, BufferedImage.TYPE_INT_ARGB); Mat templateImage = OpenCvUtils.toMat(templateImage_); Mat cameraImage = OpenCvUtils.toMat(cameraImage_); Mat roiImage = new Mat(cameraImage, new Rect(roiX, roiY, roiWidth, roiHeight)); // http://stackoverflow.com/questions/17001083/opencv-template-matching-example-in-android Mat resultImage = new Mat(roiImage.cols() - templateImage.cols() + 1, roiImage.rows() - templateImage.rows() + 1, CvType.CV_32FC1); Imgproc.matchTemplate(roiImage, templateImage, resultImage, Imgproc.TM_CCOEFF); MinMaxLocResult mmr = Core.minMaxLoc(resultImage); org.opencv.core.Point matchLoc = mmr.maxLoc; double matchValue = mmr.maxVal; // TODO: Figure out certainty and how to filter on it. Logger.debug(String.format("locateTemplateMatches certainty %f at %f, %f", matchValue, matchLoc.x, matchLoc.y)); locateTemplateMatchesDebug(roiImage, templateImage, matchLoc); return new Point[] {new Point(((int) matchLoc.x) + roiX, ((int) matchLoc.y) + roiY)}; }
/** * Matches concrete point of the eye by using template with TM_SQDIFF_NORMED */ private static void matchEye(Rect area, Mat builtTemplate, Mat matrixGray, Mat matrixRGBA) { Point matchLoc; try { // when there is not builtTemplate we skip it if (builtTemplate.cols() == 0 || builtTemplate.rows() == 0) { return; } Mat submatGray = matrixGray.submat(area); int cols = submatGray.cols() - builtTemplate.cols() + 1; int rows = submatGray.rows() - builtTemplate.rows() + 1; Mat outputTemplateMat = new Mat(cols, rows, CvType.CV_8U); Imgproc.matchTemplate(submatGray, builtTemplate, outputTemplateMat, Imgproc.TM_SQDIFF_NORMED); Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(outputTemplateMat); // when is difference in matching methods, the best match is max / min value matchLoc = minMaxLocResult.minLoc; Point matchLocTx = new Point(matchLoc.x + area.x, matchLoc.y + area.y); Point matchLocTy = new Point(matchLoc.x + builtTemplate.cols() + area.x, matchLoc.y + builtTemplate.rows() + area.y); FaceDrawerOpenCV.drawMatchedEye(matchLocTx, matchLocTy, matrixRGBA); } catch (Exception e) { e.printStackTrace(); } }
Mat resultMat = new Mat(); Imgproc.matchTemplate(imageMat, templateMat, resultMat, Imgproc.TM_CCOEFF_NORMED);