Java 类android.media.FaceDetector.Face 实例源码

项目:Offline3fAuth    文件:SkinFaceDetector.java   
/**
 * The Android face detector returns a {@link Face} object, this function converts it to a rectangle.
 * @param f Android Face parameter.
 * @param imgwidth Image width.
 * @param imgheight Image height.
 * @return Return the rectangle that contains the face
 */
private Rect face2Rect(Face f, int imgwidth, int imgheight)
{
    f.getMidPoint(p);
    double eyesDistance = f.eyesDistance();

    int x, y, width, height;
    x = (int)(Math.floor(p.x-(1.0*eyesDistance)));
    y = (int)(Math.floor(p.y-(1.0*eyesDistance)));
    width = (int)Math.ceil(2.0*eyesDistance);
    height = (int)Math.ceil(3.0f*eyesDistance);

    if(x < 0)
        x = 0;
    if(y < 0)
        y = 0;
    if((y + height) > imgheight)
        height = (int)(imgheight - y);
    if((x + width) > imgwidth)
        width = (int)(imgwidth - x);

    Rect r = new Rect();
    r.set(x, y, x+width, y+height);

    return r;
}
项目:Offline3fAuth    文件:SkinFaceDetector.java   
/**
 * Creates an instance of {@link SkinFaceDetector}. 
 */
public SkinFaceDetector(){
    width = 640;
    height = 480;

    myFace = new FaceDetector.Face [NUMBER_OF_FACES];//acha ateh 4 faces numa imagem
       myFaceDetect = new FaceDetector(width, height, NUMBER_OF_FACES);
       faces = new LinkedList<Rect>();
    p = new PointF();
    time = 0;
}
项目:WiCamera3D    文件:VideoSurfaceView2D.java   
@Override
public void onFaceDetection(android.hardware.Camera.Face[] faces,
        Camera camera) {
    boolean isfacedet = StoredData.getBoolean(StoredData.M_FACETRACKING,
            false);
    // System.out.println("是否可以人脸识别:" + isfacedet);
    if (!isfacedet || (m_cameraIndex == m_camera_front)) {
        WiCameraActivity.m_fd_face.setVisibility(View.GONE);
        return;
    }
    if (m_isfacedetection) {
        WiCameraActivity.m_fd_face.setVisibility(View.VISIBLE);
        // TODO Auto-generated method stub
        // TODO Auto-generated method stub
        Rect[] rectarrayRects = new Rect[faces.length];

        for (int i = 0; i < faces.length; i++) {
            rectarrayRects[i] = faces[i].rect;
        }
        // TODO Auto-generated method stub
        boolean isneedfocus = isNeedFocus(oldRect, rectarrayRects);
        WiCameraActivity.m_fd_face.setFaces(faces);
        if (isneedfocus) {

            if ((mFocusArea != null)
                    && (FOCU_STATE == STATE_SUCCESS
                            || FOCU_STATE == STATE_FAIL || FOCU_STATE == STATE_FOCUSING)) {
                mFocusArea = null;
                mMeteringArea = null;
                cameras.cancelAutoFocus();
                FOCU_STATE = STATE_IDLE;
            }
            mFocusArea = new ArrayList<Camera.Area>();
            mMeteringArea = new ArrayList<Camera.Area>();
            if (cameras != null) {
                for (int i = 0; i < faces.length; i++) {
                    mFocusArea.add(new Area(faces[i].rect, 100));
                    mMeteringArea.add(new Area(faces[i].rect, 100));
                }
            }
            setArea();
        }
        oldRect = rectarrayRects;
    }
}
项目:WiCamera3D    文件:VideoSurfaceView.java   
@Override
public void onFaceDetection(android.hardware.Camera.Face[] faces,
        Camera camera) {
    boolean isfacedet = StoredData.getBoolean(StoredData.M_FACETRACKING,
            false);
    // System.out.println("是否可以人脸识别:" + isfacedet);
    if (!isfacedet
            || (m_cameraIndex == m_camera_front
                    || Util.CAMERA_STATE == CAMERA_VIDEO
                    || WiCameraActivity.isContinus || (!WiCameraActivity.isCameraOpen))) {
        WiCameraActivity.m_fd_face.setVisibility(View.GONE);
        return;
    }
    if (m_isfacedetection) {
        WiCameraActivity.m_fd_face.setVisibility(View.VISIBLE);
        // if (FOCU_STATE == STATE_FOCUSING) {
        // WiCameraActivity.m_fd_face.setVisibility(View.GONE);
        // return;
        // }
        // TODO Auto-generated method stub
        // TODO Auto-generated method stub
        Rect[] rectarrayRects = new Rect[faces.length];

        for (int i = 0; i < faces.length; i++) {
            rectarrayRects[i] = faces[i].rect;
        }
        // TODO Auto-generated method stub
        boolean isneedfocus = isNeedFocus(oldRect, rectarrayRects);
        WiCameraActivity.m_fd_face.setFaces(faces);
        if (isneedfocus) {

            if ((mFocusArea != null)
                    && (FOCU_STATE == STATE_SUCCESS || FOCU_STATE == STATE_FAIL)) {
                mFocusArea = null;
                mMeteringArea = null;
                cameras.cancelAutoFocus();
                FOCU_STATE = STATE_IDLE;
            }
            mFocusArea = new ArrayList<Camera.Area>();
            mMeteringArea = new ArrayList<Camera.Area>();
            if (cameras != null) {
                for (int i = 0; i < faces.length; i++) {
                    mFocusArea.add(new Area(faces[i].rect, 100));
                    mMeteringArea.add(new Area(faces[i].rect, 100));
                }
            }
            // FOCU_STATE = STATE_FOCUSING;
            setArea();
        }
        oldRect = rectarrayRects;
    }
}
项目:amaroKontrol    文件:GetCollectionPhotosTask.java   
private Bitmap cutFaces(Bitmap photo) {
    if (photo == null)
        return null;
    else {
        try {
            photo = convert(photo, Bitmap.Config.RGB_565);
            Bitmap croped;
            int finalHeight = BitmapHelper.convertDPtoPX(174, mBaseActivity);

            // detect faces
            float top = photo.getHeight();
            FaceDetector detector = new FaceDetector(photo.getWidth(), photo.getHeight(), 5);
            Face[] faces = new Face[6];
            int facesCount = detector.findFaces(photo, faces);
            for (int i = 0; i < facesCount; i++) {
                if (faces[i] == null)
                    continue;

                PointF point = new PointF();
                faces[i].getMidPoint(point);
                float y = point.y - faces[i].eyesDistance();
                if (y < top)
                    top = y;
            }

            if (facesCount > 0) {
                if (top + finalHeight > photo.getHeight())
                    top = photo.getHeight() - finalHeight;
                croped = Bitmap.createBitmap(photo, 0, (int) top, photo.getWidth(), finalHeight);
            } else {
                int half = photo.getHeight() / 2 - finalHeight / 2;
                if (photo.getHeight() <= finalHeight)
                    half = 0;
                croped = Bitmap.createBitmap(photo, 0, half, photo.getWidth(), finalHeight);
            }
            return croped;
        } catch (Exception e) {
            return photo;
        }
    }
}
项目:365browser    文件:FaceDetectionImpl.java   
@Override
public void detect(SharedBufferHandle frameData, final int width, final int height,
        final DetectResponse callback) {
    final long numPixels = (long) width * height;
    // TODO(xianglu): https://crbug.com/670028 homogeneize overflow checking.
    if (!frameData.isValid() || width <= 0 || height <= 0 || numPixels > (Long.MAX_VALUE / 4)) {
        Log.d(TAG, "Invalid argument(s).");
        callback.call(new FaceDetectionResult[0]);
        return;
    }

    ByteBuffer imageBuffer = frameData.map(0, numPixels * 4, MapFlags.none());
    if (imageBuffer.capacity() <= 0) {
        Log.d(TAG, "Failed to map from SharedBufferHandle.");
        callback.call(new FaceDetectionResult[0]);
        return;
    }

    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);

    // An int array is needed to construct a Bitmap. However the Bytebuffer
    // we get from |sharedBufferHandle| is directly allocated and does not
    // have a supporting array. Therefore we need to copy from |imageBuffer|
    // to create this intermediate Bitmap.
    // TODO(xianglu): Consider worker pool as appropriate threads.
    // http://crbug.com/655814
    bitmap.copyPixelsFromBuffer(imageBuffer);

    // A Bitmap must be in 565 format for findFaces() to work. See
    // http://androidxref.com/7.0.0_r1/xref/frameworks/base/media/java/android/media/FaceDetector.java#124
    //
    // It turns out that FaceDetector is not able to detect correctly if
    // simply using pixmap.setConfig(). The reason might be that findFaces()
    // needs non-premultiplied ARGB arrangement, while the alpha type in the
    // original image is premultiplied. We can use getPixels() which does
    // the unmultiplication while copying to a new array. See
    // http://androidxref.com/7.0.0_r1/xref/frameworks/base/graphics/java/android/graphics/Bitmap.java#538
    int[] pixels = new int[width * height];
    bitmap.getPixels(pixels, 0, width, 0, 0, width, height);
    final Bitmap unPremultipliedBitmap =
            Bitmap.createBitmap(pixels, width, height, Bitmap.Config.RGB_565);

    // FaceDetector creation and findFaces() might take a long time and trigger a
    // "StrictMode policy violation": they should happen in a background thread.
    AsyncTask.THREAD_POOL_EXECUTOR.execute(new Runnable() {
        @Override
        public void run() {
            final FaceDetector detector = new FaceDetector(width, height, mMaxFaces);
            Face[] detectedFaces = new Face[mMaxFaces];
            // findFaces() will stop at |mMaxFaces|.
            final int numberOfFaces = detector.findFaces(unPremultipliedBitmap, detectedFaces);

            FaceDetectionResult[] faceArray = new FaceDetectionResult[numberOfFaces];

            for (int i = 0; i < numberOfFaces; i++) {
                faceArray[i] = new FaceDetectionResult();

                final Face face = detectedFaces[i];
                final PointF midPoint = new PointF();
                face.getMidPoint(midPoint);
                final float eyesDistance = face.eyesDistance();

                faceArray[i].boundingBox = new RectF();
                faceArray[i].boundingBox.x = midPoint.x - eyesDistance;
                faceArray[i].boundingBox.y = midPoint.y - eyesDistance;
                faceArray[i].boundingBox.width = 2 * eyesDistance;
                faceArray[i].boundingBox.height = 2 * eyesDistance;
                // TODO(xianglu): Consider adding Face.confidence and Face.pose.

                faceArray[i].landmarks = new Landmark[0];
            }

            callback.call(faceArray);
        }
    });
}
项目:Masaccio    文件:MasaccioImageView.java   
public void setFaces(final Face[] faces) {

        if ((faces != null) && (faces.length > 0)) {

            mDetectedFaces = faces;

        } else {

            mDetectedFaces = null;
        }

        applyCrop();
    }
项目:Masaccio    文件:MasaccioImageView.java   
private void getFaceOffsets(final Face[] faces, final float[] offsets, final float scaleFactor,
        final float width, final float height, final float maxOffsetX, final float maxOffsetY) {

    try {

        Face bestFace = null;

        float maxConfidence = 0;

        for (final Face face : faces) {

            final float faceConfidence = face.confidence();

            if (faceConfidence > maxConfidence) {

                maxConfidence = faceConfidence;
                bestFace = face;
            }
        }

        if (bestFace == null) {

            getDefaultOffsets(offsets, maxOffsetX, maxOffsetY);

            return;
        }

        final PointF midPoint = new PointF();

        bestFace.getMidPoint(midPoint);

        final float scaledOffsetX =
                (midPoint.x * scaleFactor) - ((width - maxOffsetX) * FACE_POSITION_RATIO_X);

        final float scaledOffsetY =
                (midPoint.y * scaleFactor) - ((height - maxOffsetY) * FACE_POSITION_RATIO_Y);

        if (Math.round(maxOffsetX) >= 0) {

            offsets[0] = Math.min(Math.max(0, scaledOffsetX), maxOffsetX);

        } else {

            offsets[0] = scaledOffsetX;
        }

        if (Math.round(maxOffsetY) >= 0) {

            offsets[1] = Math.min(Math.max(0, scaledOffsetY), maxOffsetY);

        } else {

            offsets[1] = scaledOffsetY;
        }


    } catch (final Exception e) {

        getDefaultOffsets(offsets, maxOffsetX, maxOffsetY);
    }
}
项目:Masaccio    文件:MasaccioImageView.java   
@Override
public Face[] process(final Bitmap bitmap) {

    final Map<Bitmap, Face[]> facesMap = mFacesMap;

    final Face[] preProcessed = facesMap.get(bitmap);

    if (preProcessed != null) {

        if (preProcessed == NO_FACES) {

            return null;
        }

        return preProcessed;
    }

    final Face[] faces = new Face[MAX_FACES];

    final Bitmap bitmap565 = convertTo565(bitmap);

    if (bitmap565 != null) {

        final FaceDetector faceDetector =
                new FaceDetector(bitmap565.getWidth(), bitmap565.getHeight(), MAX_FACES);

        final int faceCount = faceDetector.findFaces(bitmap565, faces);

        if (faceCount > 0) {

            final Face[] detected = new Face[faceCount];

            System.arraycopy(faces, 0, detected, 0, faceCount);

            facesMap.put(bitmap, detected);

            return detected;
        }
    }

    facesMap.put(bitmap, NO_FACES);

    return null;
}
项目:Masaccio    文件:MasaccioImageView.java   
private Face[] getFaces(final Bitmap bitmap) {

            return mFacesMap.get(bitmap);
        }
项目:Masaccio    文件:MasaccioImageView.java   
public Face[] process(Bitmap bitmap);