if (!detector.isOperational()) {
/** * Creates and starts the camera. Note that this uses a higher resolution in comparison * to other detection examples to enable the barcode detector to detect small barcodes * at long distances. */ private void createCameraSource() { Context context = getApplicationContext(); FaceDetector detector = new FaceDetector.Builder(context) .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS) .build(); detector.setProcessor( new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()) .build()); if (!detector.isOperational()) { // Note: The first time that an app using face API is installed on a device, GMS will // download a native library to the device in order to do detection. Usually this // completes before the app is run for the first time. But if that download has not yet // completed, then the above call will not detect any faces. // // isOperational() can be used to check if the required native library is currently // available. The detector will automatically become operational once the library // download completes on device. Log.w(TAG, "Face detector dependencies are not yet available."); } mCameraSource = new CameraSource.Builder(context, detector) .setRequestedPreviewSize(640, 480) .setFacing(CameraSource.CAMERA_FACING_BACK) .setRequestedFps(30.0f) .build(); }
/** * Calculates a point (focus point) in the bitmap, around which cropping needs to be performed. * * @param bitmap Bitmap in which faces are to be detected. * @param centerOfAllFaces To store the center point. */ private void detectFace(Bitmap bitmap, PointF centerOfAllFaces) { FaceDetector faceDetector = PicassoFaceDetector.getFaceDetector(); if (!faceDetector.isOperational()) { centerOfAllFaces.set(bitmap.getWidth() / 2, bitmap.getHeight() / 2); // center crop return; } Frame frame = new Frame.Builder().setBitmap(bitmap).build(); SparseArray<Face> faces = faceDetector.detect(frame); final int totalFaces = faces.size(); if (totalFaces > 0) { float sumX = 0f; float sumY = 0f; for (int i = 0; i < totalFaces; i++) { PointF faceCenter = new PointF(); getFaceCenter(faces.get(faces.keyAt(i)), faceCenter); sumX = sumX + faceCenter.x; sumY = sumY + faceCenter.y; } centerOfAllFaces.set(sumX / totalFaces, sumY / totalFaces); return; } centerOfAllFaces.set(bitmap.getWidth() / 2, bitmap.getHeight() / 2); // center crop }
/** * Calculates a point (focus point) in the bitmap, around which cropping needs to be performed. * * @param bitmap Bitmap in which faces are to be detected. * @param centerOfAllFaces To store the center point. */ private void detectFace(Bitmap bitmap, PointF centerOfAllFaces) { FaceDetector faceDetector = GlideFaceDetector.getFaceDetector(); if (!faceDetector.isOperational()) { centerOfAllFaces.set(bitmap.getWidth() / 2, bitmap.getHeight() / 2); // center crop return; } Frame frame = new Frame.Builder().setBitmap(bitmap).build(); SparseArray<Face> faces = faceDetector.detect(frame); final int totalFaces = faces.size(); if (totalFaces > 0) { float sumX = 0f; float sumY = 0f; for (int i = 0; i < totalFaces; i++) { PointF faceCenter = new PointF(); getFaceCenter(faces.get(faces.keyAt(i)), faceCenter); sumX = sumX + faceCenter.x; sumY = sumY + faceCenter.y; } centerOfAllFaces.set(sumX / totalFaces, sumY / totalFaces); return; } centerOfAllFaces.set(bitmap.getWidth() / 2, bitmap.getHeight() / 2); // center crop }
/** * Create face decoder and camera source. */ private void creteCameraTracker() { mDetector = new FaceDetector.Builder(mActivity) .setTrackingEnabled(false) .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS) .build(); mDetector.setProcessor( new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()) .build()); if (!mDetector.isOperational()) { mUserAwareVideoView.onErrorOccurred(); Log.e("Start Tracking", "Face tracker is not operational."); } mCameraSource = new CameraSource.Builder(mActivity, mDetector) .setRequestedPreviewSize(640, 480) .setFacing(CameraSource.CAMERA_FACING_FRONT) .setRequestedFps(30.0f) .build(); }
public void setBitmap( Bitmap bitmap ) { mBitmap = bitmap; FaceDetector detector = new FaceDetector.Builder( getContext() ) .setTrackingEnabled(true) .setLandmarkType(FaceDetector.ALL_LANDMARKS) .setMode(FaceDetector.ACCURATE_MODE) .build(); if (!detector.isOperational()) { //Handle contingency } else { Frame frame = new Frame.Builder().setBitmap(bitmap).build(); mFaces = detector.detect(frame); detector.release(); } logFaceData(); invalidate(); }
.build()); if (!detector.isOperational()) {
if (!mDetector.isOperational()) { mScreenListener.onErrorOccurred(Errors.UNDEFINED); return;
.build(); if(previewFaceDetector.isOperational()) { previewFaceDetector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build()); } else {
.build(); if(previewFaceDetector.isOperational()) { previewFaceDetector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build()); } else {