public boolean isValid() { return !mBase.empty(); }
public boolean hasMask() { return SX.isNotNull(mask) && !mask.empty(); }
public boolean hasContent() { return SX.isNotNull(content) && !content.empty(); }
/** * @return true if the Element is useable and/or has valid content */ @Override public boolean isValid() { if (SX.isSet(getContent())) { return !getContent().empty(); } return false; }
Mat srcImg= Highgui.imread("china.jpg"); if ( srcImg.empty() ) // baah
/* * Initialize the bitmaps */ Bitmap bmCrop = BitmapFactory.decodeStream(iStream); Bitmap bmThreshed = bmCrop; /* * Initialize the Mats */ Mat threshed = new Mat(bmCrop.getWidth(),bmCrop.getHeight(), CvType.CV_8UC1); Utils.bitmapToMat(bmCrop, threshed); Mat crop = new Mat(bmCrop.getWidth(),bmCrop.getHeight(), CvType.CV_8UC1); Utils.bitmapToMat(bmCrop, crop); /* * Convert the Mats to Grayscale */ if(!threshed.empty()) Imgproc.cvtColor(threshed, threshed, Imgproc.COLOR_RGB2GRAY); if(!crop.empty()) Imgproc.cvtColor(crop, crop, Imgproc.COLOR_BGR2GRAY); /* * Use Adaptive Thresholding on the grayscaled Mats * crop -> threshed * Mat src, Mat dst, double maxValue, int adaptiveMethod, int thresholdType, int blockSize, double C */ Imgproc.adaptiveThreshold(crop, threshed, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 8);//15, 8 were original tests. Casey was 75,10 Utils.matToBitmap(threshed, bmThreshed); bmThreshed = bmCrop;
@Override protected Void doInBackground() throws Exception { Mat webcamImage = new Mat(); while (!isCancelled()) { camera.read(webcamImage); if (!webcamImage.empty()) { videoPanel.updateImage(webcamImage); } else { break; } } return null; }
Mat threshed = new Mat(bmCrop.getHeight(),bmCrop.getWidth(), CvType.CV_8UC1, new Scalar(4));//, new Scalar(4) //Mat crop = new Mat(); Mat crop = new Mat(bmCrop.getHeight(),bmCrop.getWidth(), CvType.CV_8UC1,new Scalar(4));//, new Scalar(4) Utils.bitmapToMat(bmCrop, crop); if(!crop.empty()) Imgproc.cvtColor(crop, crop, Imgproc.COLOR_BGR2GRAY,1);
public void run() { do { synchronized (JavaCameraView.this) { try { JavaCameraView.this.wait(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (!mStopThread) { if (!mFrameChain[mChainIdx].empty()) deliverAndDrawFrame(mCameraFrame[mChainIdx]); mChainIdx = 1 - mChainIdx; } } while (!mStopThread); Log.d(TAG, "Finish processing thread"); } }
public void run() { do { synchronized (JavaCameraView.this) { try { JavaCameraView.this.wait(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (!mStopThread) { if (!mFrameChain[mChainIdx].empty()) deliverAndDrawFrame(mCameraFrame[mChainIdx]); mChainIdx = 1 - mChainIdx; } } while (!mStopThread); Log.d(TAG, "Finish processing thread"); } }
@Override public void run() { do { synchronized (JavaCameraView.this) { try { while (!mCameraFrameReady && !mStopThread) { JavaCameraView.this.wait(); } } catch (InterruptedException e) { e.printStackTrace(); } if (mCameraFrameReady) mChainIdx = 1 - mChainIdx; } if (!mStopThread && mCameraFrameReady) { mCameraFrameReady = false; if (!mFrameChain[1 - mChainIdx].empty()) deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]); } } while (!mStopThread); Log.d(TAG, "Finish processing thread"); } }
@Override public void run() { do { boolean hasFrame = false; synchronized (JavaCameraView.this) { try { while (!mCameraFrameReady && !mStopThread) { JavaCameraView.this.wait(); } } catch (InterruptedException e) { e.printStackTrace(); } if (mCameraFrameReady) { mChainIdx = 1 - mChainIdx; mCameraFrameReady = false; hasFrame = true; } } if (!mStopThread && hasFrame) { if (!mFrameChain[1 - mChainIdx].empty()) deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]); } } while (!mStopThread); Log.d(TAG, "Finish processing thread"); } }
@Override public void run() { do { boolean hasFrame = false; synchronized (JavaCameraView.this) { try { while (!mCameraFrameReady && !mStopThread) { JavaCameraView.this.wait(); } } catch (InterruptedException e) { e.printStackTrace(); } if (mCameraFrameReady) { mChainIdx = 1 - mChainIdx; mCameraFrameReady = false; hasFrame = true; } } if (!mStopThread && hasFrame) { if (!mFrameChain[1 - mChainIdx].empty()) deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]); } } while (!mStopThread); Log.d(TAG, "Finish processing thread"); } }
@Override public void run() { do { synchronized (JavaCameraView.this) { try { while (!mCameraFrameReady && !mStopThread) { JavaCameraView.this.wait(); } } catch (InterruptedException e) { e.printStackTrace(); } if (mCameraFrameReady) mChainIdx = 1 - mChainIdx; } if (!mStopThread && mCameraFrameReady) { mCameraFrameReady = false; if (!mFrameChain[1 - mChainIdx].empty()) deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]); } } while (!mStopThread); Log.d(TAG, "Finish processing thread"); } }
@Override public void run() { do { boolean hasFrame = false; synchronized (JavaCameraView.this) { try { while (!mCameraFrameReady && !mStopThread) { JavaCameraView.this.wait(); } } catch (InterruptedException e) { e.printStackTrace(); } if (mCameraFrameReady) { mChainIdx = 1 - mChainIdx; mCameraFrameReady = false; hasFrame = true; } } if (!mStopThread && hasFrame) { if (!mFrameChain[1 - mChainIdx].empty()) deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]); } } while (!mStopThread); Log.d(TAG, "Finish processing thread"); } }
public void setMat(Mat mat) { if (mat == null || mat.empty()) { image = null; } else { image = OpenCvUtils.toBufferedImage(mat); } repaint(); }
Mat image = Imgcodecs.imread("C:/Users/ja/workspace/imgtomath/bin/imgtomath/lena.png"); if(image.empty() == true) { System.out.println("Error: no image found!"); } List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Mat image32S = new Mat(); image.convertTo(image32S, CvType.CV_32SC1); Imgproc.findContours(image32S, contours, new Mat(), Imgproc.RETR_FLOODFILL, Imgproc.CHAIN_APPROX_SIMPLE); // Draw all the contours such that they are filled in. Mat contourImg = new Mat(image32S.size(), image32S.type()); for (int i = 0; i < contours.size(); i++) { Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1); } Highgui.imwrite("debug_image.jpg", contourImg); // DEBUG
@Override public void onPreviewFrame(Mat image) { if (image == null || image.empty() == true) return; int w = image.cols(); //640 columns int h = image.rows(); //480 rows double[][][] array15x15 = new double[15][15][3]; for (int row = 0; row < 15; row++) { for (int col = 0; col < 15; col++) { double[] p = image.get(w/2 + row, h/2 + col); if (p == null || p.length < 3) return; else { array15x15[row][col] = p; } } } }
static Matrix cvMat2Matrix(Mat source) { if (source == null || source.empty()) { return null; } float[] matrixValuesF = new float[source.cols()*source.rows()]; if (CvType.depth(source.type()) == CvType.CV_32F) { source.get(0,0, matrixValuesF); } else { double[] matrixValuesD = new double[matrixValuesF.length]; source.get(0, 0, matrixValuesD); //will throw an java.lang.UnsupportedOperationException if type is not CvType.CV_64F for (int i=0; i<matrixValuesD.length; i++) { matrixValuesF[i] = (float) matrixValuesD[i]; } } Matrix result = new Matrix(); result.setValues(matrixValuesF); return result; }
public Mat process(Mat inputImage) { Mat foregroundThresh = new Mat(); // Firstly, convert to gray-level image, yields good results with performance Imgproc.cvtColor(inputImage, inputGray, Imgproc.COLOR_BGR2GRAY); // initialize background to 1st frame, convert to floating type if (accumulatedBackground.empty()) inputGray.convertTo(accumulatedBackground, CvType.CV_32F); // convert background to 8U, for differencing with input image accumulatedBackground.convertTo(backImage,CvType.CV_8U); // compute difference between image and background Core.absdiff(backImage,inputGray,foreground); // apply threshold to foreground image Imgproc.threshold(foreground,foregroundThresh, threshold,255, Imgproc.THRESH_BINARY_INV); // accumulate background Mat inputFloating = new Mat(); inputGray.convertTo(inputFloating, CvType.CV_32F); Imgproc.accumulateWeighted(inputFloating, accumulatedBackground,learningRate, foregroundThresh); return negative(foregroundThresh); }