Java 类android.media.FaceDetector 实例源码

项目:react-native-face-detect    文件:FaceTrackerView.java   
/**
 * Creates and starts the camera.  Note that this uses a higher resolution in comparison
 * to other detection examples to enable the barcode detector to detect small barcodes
 * at long distances.
 */
private void createCameraSource() {
  mCameraSource = open();
  maxFace = mCameraSource.getParameters().getMaxNumDetectedFaces();
  if (maxFace == 0)
    maxFace = 10;
  //mCameraSource.setFaceDetectionListener(this);
  previewSize = mCameraSource.getParameters().getPreviewSize();

  bufflen = previewSize.width * previewSize.height;
  grayBuff = new byte[bufflen];
  rgbs = new int[bufflen];
  faces = new FaceResult[maxFace];
  faces_previous = new FaceResult[maxFace];
  for (int i = 0; i < maxFace; i++) {
    faces[i] = new FaceResult();
    faces_previous[i] = new FaceResult();
  }
  fdet = new android.media.FaceDetector(previewSize.width, previewSize.height, maxFace);
  mCameraSource.setPreviewCallback(this);
}
项目:xbot_head    文件:SignInFragment.java   
@Override
public void onStart() {
    super.onStart();
    initView();

    mFaces = new FaceResult[MAX_FACE_COUNT];
    mPreviousFaces = new FaceResult[MAX_FACE_COUNT];
    mDetectedFaces = new FaceDetector.Face[MAX_FACE_COUNT];
    for (int i = 0; i < MAX_FACE_COUNT; i++) {
        mFaces[i] = new FaceResult();
        mPreviousFaces[i] = new FaceResult();
    }
    mFacesCountMap = new SparseIntArray();

    presenter = new SignInPresenter(this,getContext());
    presenter.start();
}
项目:haxsync    文件:BitmapUtil.java   
private static PointF findFaceMid(Bitmap in){
    PointF mid = new PointF();
    Bitmap bitmap565 = in.copy(Bitmap.Config.RGB_565, true);

    FaceDetector fd = new FaceDetector(in.getWidth(), in.getHeight(), 1);
    FaceDetector.Face[] faces = new FaceDetector.Face[1];
    fd.findFaces(bitmap565, faces);


    FaceDetector.Face face = faces[0];
    if (face != null){
        try{
            face.getMidPoint(mid);
            return mid;
        } catch (NullPointerException n){}
    }
    return null;

}
项目:FaceBarCodeDemo    文件:FaceComposeActivity.java   
private int detectFace() {
    Bitmap bitmap = mOriginal.copy(Bitmap.Config.RGB_565, true);

    FaceDetector.Face faces[] = new FaceDetector.Face[MAX_FACES];
    FaceDetector detector = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), MAX_FACES);
    int count = detector.findFaces(bitmap, faces);
    if (count > 0) {
        FaceDetector.Face face = faces[0];
        face.getMidPoint(mCenterPoint);
        mEyeDistance = face.eyesDistance();
    } else {
        mCenterPoint.x = 0.0f;
        mCenterPoint.y = 0.0f;
        mEyeDistance = 0.0f;
    }

    bitmap.recycle();

    return count;
}
项目:FaceBarCodeDemo    文件:ImageComposeDemoActivity.java   
private int detectFace() {
    Bitmap bitmap = mOriginal.copy(Bitmap.Config.RGB_565, true);

    FaceDetector.Face faces[] = new FaceDetector.Face[MAX_FACES];
    FaceDetector detector = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), MAX_FACES);
    int count = detector.findFaces(bitmap, faces);
    if (count > 0) {
        FaceDetector.Face face = faces[0];
        face.getMidPoint(mCenterPoint);
        mEyeDistance = face.eyesDistance();
    } else {
        mCenterPoint.x = 0.0f;
        mCenterPoint.y = 0.0f;
        mEyeDistance = 0.0f;
    }

    bitmap.recycle();
    bitmap = null;

    return count;
}
项目:Offline3fAuth    文件:SkinFaceDetector.java   
@Override
public List<Rect> findFaces(Bitmap img) {

    if(img.getWidth()!=width || img.getHeight()!=height)
    {
        width = img.getWidth();
        height = img.getHeight();

        Log.v("SkinFaceDetector", String.format("Changing face detector resolution to %dx%d", width, height));

        myFaceDetect = new FaceDetector(width, height, NUMBER_OF_FACES);
    }

    faces.clear();

    time = System.currentTimeMillis();
    int n = myFaceDetect.findFaces(img, myFace);
    time = System.currentTimeMillis() - time;

    for (int i = 0; i < n; i++) {
        Rect r = face2Rect(myFace[i], img.getWidth(), img.getHeight());
        faces.add(r);
    }

    return faces;
}
项目:apps-android-wikipedia    文件:FacePostprocessor.java   
@Nullable private static PointF detectFace(@NonNull Bitmap testBitmap) {
    final int maxFaces = 1;
    long millis = System.currentTimeMillis();
    // initialize the face detector, and look for only one face...
    FaceDetector fd = new FaceDetector(testBitmap.getWidth(), testBitmap.getHeight(), maxFaces);
    FaceDetector.Face[] faces = new FaceDetector.Face[maxFaces];
    int numFound = fd.findFaces(testBitmap, faces);
    PointF facePos = null;
    if (numFound > 0) {
        facePos = new PointF();
        faces[0].getMidPoint(facePos);
        // center on the nose, not on the eyes
        facePos.y += faces[0].eyesDistance() / 2;
        // normalize the position to [0, 1]
        facePos.set(MathUtil.constrain(facePos.x / testBitmap.getWidth(), 0, 1),
                MathUtil.constrain(facePos.y / testBitmap.getHeight(), 0, 1));
        L.d("Found face at " + facePos.x + ", " + facePos.y);
    }
    L.d("Face detection took " + (System.currentTimeMillis() - millis) + "ms");
    return facePos;
}
项目:PrivacyStreams    文件:ImageData.java   
List<FaceDetector.Face> getFaces(UQI uqi) {
    int max = 10;
    List<FaceDetector.Face> faces = new ArrayList<>();
    Bitmap bitmap = this.getBitmapRGB565(uqi);
    if (bitmap == null) return faces;
    FaceDetector detector = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), max);
    FaceDetector.Face[] facesArray = new FaceDetector.Face[max];
    int count = detector.findFaces(bitmap, facesArray);
    for (int i = 0; i < count; i++) {
        FaceDetector.Face face = facesArray[i];
        if (face != null && face.confidence() > 0.3)
            faces.add(face);
    }
    return faces;
}
项目:ViseFace    文件:NormalFaceDetector.java   
/**
 * 计算识别框
 */
private void getFaceRect() {
    Rect[] faceRectList = new Rect[mDetectorData.getFacesCount()];
    Rect rect = null;
    float distance = 0;
    for (int i = 0; i < mDetectorData.getFacesCount(); i++) {
        faceRectList[i] = new Rect();
        FaceDetector.Face face = mFaces[i];
        if (face != null) {
            float eyeDistance = face.eyesDistance();
            eyeDistance = eyeDistance * mZoomRatio;
            if (eyeDistance > distance) {
                distance = eyeDistance;
                rect = faceRectList[i];
            }
            PointF midEyesPoint = new PointF();
            face.getMidPoint(midEyesPoint);
            midEyesPoint.x = midEyesPoint.x * mZoomRatio;
            midEyesPoint.y = midEyesPoint.y * mZoomRatio;
            ViseLog.i("eyeDistance:" + eyeDistance + ",midEyesPoint.x:" + midEyesPoint.x
                    + ",midEyesPoint.y:" + midEyesPoint.y);
            faceRectList[i].set((int) (midEyesPoint.x - eyeDistance),
                    (int) (midEyesPoint.y - eyeDistance),
                    (int) (midEyesPoint.x + eyeDistance),
                    (int) (midEyesPoint.y + eyeDistance));
            ViseLog.i("FaceRectList[" + i + "]:" + faceRectList[i]);
        }
    }
    mDetectorData.setLightIntensity(FaceUtil.getYUVLight(mDetectorData.getFaceData(), rect, mCameraWidth));
    mDetectorData.setFaceRectList(faceRectList);
    if (mCameraWidth > 0) {
        mDetectorData.setDistance(distance * 2 / mCameraWidth);
    }
}
项目:MyAnimeViewer    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {

            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                MagicToast.showInfo(CropImage.this, "Multi face crop help");
            }
        }
    });
}
项目:phonk    文件:PUtil.java   
@ProtoMethod(description = "Detect faces in a bitmap", example = "")
@ProtoMethodParam(params = {"Bitmap", "numFaces"})
public int detectFaces(Bitmap bmp, int num_faces) {
    FaceDetector face_detector = new FaceDetector(bmp.getWidth(), bmp.getHeight(), num_faces);
    FaceDetector.Face[] faces = new FaceDetector.Face[num_faces];
    int face_count = face_detector.findFaces(bmp, faces);

    return face_count;
}
项目:xbot_head    文件:CommentaryFragment.java   
public CommentaryFragment() {
    faces = new FaceResult[MAX_FACE_COUNT];
    previousFaces = new FaceResult[MAX_FACE_COUNT];
    detectedFaces = new FaceDetector.Face[MAX_FACE_COUNT];
    for (int i = 0; i < MAX_FACE_COUNT; i++) {
        faces[i] = new FaceResult();
        previousFaces[i] = new FaceResult();
    }

    recyclerViewBitmapList = new ArrayList<>();
    facesCountMap = new SparseIntArray();
}
项目:xbot_head    文件:InteractionFragment.java   
@Override
public void onStart() {
    super.onStart();

    mFaces = new FaceResult[MAX_FACE_COUNT];
    mPreviousFaces = new FaceResult[MAX_FACE_COUNT];
    mDetectedFaces = new FaceDetector.Face[MAX_FACE_COUNT];
    for (int i = 0; i < MAX_FACE_COUNT; i++) {
        mFaces[i] = new FaceResult();
        mPreviousFaces[i] = new FaceResult();
    }
    mFacesCountMap = new SparseIntArray();

}
项目:xowa_android    文件:ImageViewWithFace.java   
@Override
@Nullable
public PointF performTask() {
    // boost this thread's priority a bit
    Thread.currentThread().setPriority(Thread.MAX_PRIORITY - 1);
    long millis = System.currentTimeMillis();
    // create a new bitmap onto which we'll draw the original bitmap,
    // because the FaceDetector requires it to be a 565 bitmap, and it
    // must also be even width. Reduce size of copy for performance.
    Bitmap testBmp = new565ScaledBitmap(srcBitmap);

    // initialize the face detector, and look for only one face...
    FaceDetector fd = new FaceDetector(testBmp.getWidth(), testBmp.getHeight(), 1);
    FaceDetector.Face[] faces = new FaceDetector.Face[1];
    int numFound = fd.findFaces(testBmp, faces);

    PointF facePos = null;
    if (numFound > 0) {
        facePos = new PointF();
        faces[0].getMidPoint(facePos);
        // scale back to proportions of original image
        facePos.x = (facePos.x * srcBitmap.getWidth() / BITMAP_COPY_WIDTH);
        facePos.y = (facePos.y * srcBitmap.getHeight() / testBmp.getHeight());
        L.d("Found face at " + facePos.x + ", " + facePos.y);
    }
    // free our temporary bitmap
    testBmp.recycle();

    L.d("Face detection took " + (System.currentTimeMillis() - millis) + "ms");

    return facePos;
}
项目:croperino    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {

            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                Crouton.makeText(CropImage.this, "Multi face crop help", Style.INFO).show();
            }
        }
    });
}
项目:TakeAndCrop    文件:CropImageActivity.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop, mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
项目:memoir    文件:CropImageActivity.java   
@Override
public void run() {

    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        @Override
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }
        }
    });
}
项目:memoir    文件:CropImageActivity.java   
public void run() {

            mImageMatrix = mImageView.getImageMatrix();
            Bitmap faceBitmap = prepareBitmap();

            mScale = 1.0F / mScale;
            if (faceBitmap != null && mDoFaceDetection) {
                FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                        faceBitmap.getHeight(), mFaces.length);
                mNumFaces = detector.findFaces(faceBitmap, mFaces);
            }

            if (faceBitmap != null && faceBitmap != mBitmap) {
                faceBitmap.recycle();
            }

            mHandler.post(new Runnable() {
                public void run() {
                    mWaitingToPick = mNumFaces > 1;
                    if (mNumFaces > 0) {
                        for (int i = 0; i < mNumFaces; i++) {
                            handleFace(mFaces[i]);
                        }
                    } else {
                        makeDefault();
                    }
                    mImageView.invalidate();
                    if (mImageView.mHighlightViews.size() == 1) {
                        mCrop = mImageView.mHighlightViews.get(0);
                        mCrop.setFocus(true);
                    }
                }
            });
        }
项目:Doctor    文件:CropImageActivity.java   
public void run() {

            mImageMatrix = mImageView.getImageMatrix();
            Bitmap faceBitmap = prepareBitmap();

            mScale = 1.0F / mScale;
            if (faceBitmap != null && mDoFaceDetection) {
                FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                        faceBitmap.getHeight(), mFaces.length);
                mNumFaces = detector.findFaces(faceBitmap, mFaces);
            }

            if (faceBitmap != null && faceBitmap != mBitmap) {
                faceBitmap.recycle();
            }

            mHandler.post(new Runnable() {
                public void run() {
                    mWaitingToPick = mNumFaces > 1;
                    if (mNumFaces > 0) {
                        for (int i = 0; i < mNumFaces; i++) {
                            handleFace(mFaces[i]);
                        }
                    } else {
                        makeDefault();
                    }
                    mImageView.invalidate();
                    if (mImageView.mHighlightViews.size() == 1) {
                        mCrop = mImageView.mHighlightViews.get(0);
                        mCrop.setFocus(true);
                    }
                }
            });
        }
项目:Android-Face-Recognition-with-Deep-Learning-Library    文件:PreProcessor.java   
public void setFaces(PreProcessorFactory.PreprocessingMode preprocessingMode) {
    List<Mat> images = getImages();

    PreferencesHelper preferencesHelper = new PreferencesHelper(context);
    if (preferencesHelper.getDetectionMethod()){
        faces = faceDetection.getFaces(images.get(0));
        angle = faceDetection.getAngle();
    } else {
        Mat img = images.get(0);
        FaceDetector faceDetector = new FaceDetector(img.cols(), img.rows(), 1);
        Bitmap bmp = Bitmap.createBitmap(img.cols(), img.rows(), Bitmap.Config.RGB_565);
        Utils.matToBitmap(img, bmp);
        FaceDetector.Face[] facesAndroid = new FaceDetector.Face[1];
        if (faceDetector.findFaces(bmp, facesAndroid) > 0){
            faces = new Rect[facesAndroid.length];
            for (int i=0; i<facesAndroid.length; i++){
                PointF pointF = new PointF();
                facesAndroid[i].getMidPoint(pointF);
                int xWidth = (int) (1.34 * facesAndroid[i].eyesDistance());
                int yWidth = (int) (1.12 * facesAndroid[i].eyesDistance());
                int dist = (int) (2.77 * facesAndroid[i].eyesDistance());
                Rect face = new Rect((int) pointF.x - xWidth, (int) pointF.y - yWidth, dist, dist);
                faces[i] = face;
            }
        }
    }

    if (preprocessingMode == PreProcessorFactory.PreprocessingMode.RECOGNITION && preferencesHelper.getDetectionMethod()){
        // Change the image rotation to the angle where the face was detected
        images.remove(0);
        images.add(faceDetection.getImg());
        setImages(images);
    }
}
项目:droidddle    文件:CropImage.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop, mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
项目:droidddle    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(), faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                Toast t = Toast.makeText(CropImage.this, R.string.multiface_crop_help, Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}
项目:eshow-android    文件:CropImage.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, false, true);

    mImageView.add(hv);
}
项目:eshow-android    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(), faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            makeDefault();
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() > 0) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
            }
        }
    });
}
项目:Android-RTEditor    文件:CropImageActivity.java   
public void run() {

            mImageMatrix = mImageView.getImageMatrix();
            Bitmap faceBitmap = prepareBitmap();

            mScale = 1.0F / mScale;
            if (faceBitmap != null && mDoFaceDetection) {
                FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                        faceBitmap.getHeight(), mFaces.length);
                mNumFaces = detector.findFaces(faceBitmap, mFaces);
            }

            if (faceBitmap != null && faceBitmap != mBitmap) {
                faceBitmap.recycle();
            }

            mHandler.post(new Runnable() {
                public void run() {
                    mWaitingToPick = mNumFaces > 1;
                    if (mNumFaces > 0) {
                        for (int i = 0; i < mNumFaces; i++) {
                            handleFace(mFaces[i]);
                        }
                    } else {
                        makeDefault();
                    }
                    mImageView.invalidate();
                    if (mImageView.mHighlightViews.size() == 1) {
                        mCrop = mImageView.mHighlightViews.get(0);
                        mCrop.setFocus(true);
                    }
                }
            });
        }
项目:Silence-Please    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            makeDefault();
            mImageView.invalidate();
            if (mImageView.getHighlightViews().size() == 1) {
                setCrop(mImageView.getHighlightViews().get(0));
                getCrop().setFocus(true);
            }

            if (mNumFaces > 1) {
                Toast t = Toast.makeText(CropImage.this,
                        "Multi face crop help", Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}
项目:TakeAndCrop    文件:CropImageActivity.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop, mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
项目:Chuangxinbei    文件:CameraSurfaceView.java   
private void updateMeasurement(final FaceDetector.Face currentFace) {
    if (currentFace == null) {
        // _facesFoundInMeasurement--;
        return;
    }

    _foundFace = _currentFaceDetectionThread.getCurrentFace();

    _points.add(new Point(_foundFace.eyesDistance(),
            CALIBRATION_DISTANCE_A4_MM
                    * (_distanceAtCalibrationPoint / _foundFace
                    .eyesDistance())));

    while (_points.size() > _threashold) {
        _points.remove(0);
    }

    float sum = 0;
    for (Point p : _points) {
        sum += p.getEyeDistance();
    }

    _currentAvgEyeDistance = sum / _points.size();

    _currentDistanceToFace = CALIBRATION_DISTANCE_A4_MM
            * (_distanceAtCalibrationPoint / _currentAvgEyeDistance);

    _currentDistanceToFace = Util.MM_TO_CM(_currentDistanceToFace);

    MeasurementStepMessage message = new MeasurementStepMessage();
    message.setConfidence(currentFace.confidence());
    message.setCurrentAvgEyeDistance(_currentAvgEyeDistance);
    message.setDistToFace(_currentDistanceToFace);
    message.setEyesDistance(currentFace.eyesDistance());
    message.setMeasurementsLeft(_calibrationsLeft);
    message.setProcessTimeForLastFrame(_processTimeForLastFrame);

    MessageHUB.get().sendMessage(MessageHUB.MEASUREMENT_STEP, message);
}
项目:Chuangxinbei    文件:CameraSurfaceView.java   
private void updateMeasurement(final FaceDetector.Face currentFace) {
    if (currentFace == null) {
        // _facesFoundInMeasurement--;
        return;
    }

    _foundFace = _currentFaceDetectionThread.getCurrentFace();

    _points.add(new Point(_foundFace.eyesDistance(),
            CALIBRATION_DISTANCE_A4_MM
                    * (_distanceAtCalibrationPoint / _foundFace
                    .eyesDistance())));

    while (_points.size() > _threashold) {
        _points.remove(0);
    }

    float sum = 0;
    for (Point p : _points) {
        sum += p.getEyeDistance();
    }

    _currentAvgEyeDistance = sum / _points.size();

    _currentDistanceToFace = CALIBRATION_DISTANCE_A4_MM
            * (_distanceAtCalibrationPoint / _currentAvgEyeDistance);

    _currentDistanceToFace = Util.MM_TO_CM(_currentDistanceToFace);

    MeasurementStepMessage message = new MeasurementStepMessage();
    message.setConfidence(currentFace.confidence());
    message.setCurrentAvgEyeDistance(_currentAvgEyeDistance);
    message.setDistToFace(_currentDistanceToFace);
    message.setEyesDistance(currentFace.eyesDistance());
    message.setMeasurementsLeft(_calibrationsLeft);
    message.setProcessTimeForLastFrame(_processTimeForLastFrame);

    MessageHUB.get().sendMessage(MessageHUB.MEASUREMENT_STEP, message);
}
项目:androidsummary    文件:CropImage.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, false, true);

    mImageView.add(hv);
}
项目:androidsummary    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(), faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            makeDefault();
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() > 0) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
            }
        }
    });
}
项目:Launchpet2    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                Toast t = Toast.makeText(CropImage.this,
                        R.string.multiface_crop_help,
                        Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}
项目:YikuairAndroid    文件:CropImage.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            makeDefault();

            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() > 0) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {

            }
        }
    });
}
项目:YiBo    文件:CropImageActivity.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop, mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
项目:YiBo    文件:CropImageActivity.java   
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(), faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.getHighlightViews().size() == 1) {
                mCrop = mImageView.getHighlightViews().get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                // CR: no need for the variable t. just do
                // Toast.makeText(...).show().
                Toast t = Toast.makeText(CropImageActivity.this, "Choose One", Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}
项目:Offline3fAuth    文件:SkinFaceDetector.java   
/**
 * Creates an instance of {@link SkinFaceDetector}. 
 */
public SkinFaceDetector(){
    width = 640;
    height = 480;

    myFace = new FaceDetector.Face [NUMBER_OF_FACES];//acha ateh 4 faces numa imagem
       myFaceDetect = new FaceDetector(width, height, NUMBER_OF_FACES);
       faces = new LinkedList<Rect>();
    p = new PointF();
    time = 0;
}
项目:react-native-face-detect    文件:FaceTrackerView.java   
public void run() {
  Bitmap bitmap = getBitmap();
  float aspect = (float) previewSize.height / (float) previewSize.width;
  int w = imageWidth;
  int h = (int) (imageWidth * aspect);

  float xScale = (float) previewSize.width / (float) w;
  float yScale = (float) previewSize.height / (float) h;

  Bitmap bmp = Bitmap.createScaledBitmap(bitmap, w, h, false);


  int rotate = mDisplayOrientation;
  if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT && mDisplayRotation % 180 == 0) {
    if (rotate + 180 > 360) {
      rotate = rotate - 180;
    } else
      rotate = rotate + 180;
  }
  switch (rotate) {
    case 90:
      bmp = ImageUtils.rotate(bmp, 90);
      xScale = (float) previewSize.height / bmp.getWidth();
      yScale = (float) previewSize.width / bmp.getHeight();
      break;
    case 180:
      bmp = ImageUtils.rotate(bmp, 180);
      break;
    case 270:
      bmp = ImageUtils.rotate(bmp, 270);
      xScale = (float) previewSize.height / (float) h;
      yScale = (float) previewSize.width / (float) imageWidth;
      break;
  }
  fdet = new android.media.FaceDetector(bmp.getWidth(), bmp.getHeight(), maxFace);

  android.media.FaceDetector.Face[] fullResults = new android.media.FaceDetector.Face[maxFace];
  int findCount = fdet.findFaces(bmp, fullResults);

  for (int i = 0; i < maxFace; i++) {
    processFace(bitmap, xScale, yScale, rotate, fullResults[i], i);
  }

  isThreadWorking = false;
}
项目:MyAnimeViewer    文件:CropImage.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();
    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right,
                faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom,
                faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
            mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
项目:croperino    文件:CropImage.java   
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();
    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right,
                faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom,
                faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
            mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
项目:TakeAndCrop    文件:CropImageActivity.java   
@Override
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {

        @Override
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                Toast t = Toast.makeText(CropImageActivity.this,
                        "Multi face crop help",
                        Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}