Java 类android.hardware.Camera.Size 实例源码

项目:react-native-webrtc    文件:WebRTCModule.java   
@SuppressWarnings("deprecation")
public WritableMap getCameraInfo(int index) {
    CameraInfo info = new CameraInfo();

    Size size = null;
    try {
        Camera.getCameraInfo(index, info);
        Camera camera = Camera.open(index);
        size = getMaxSupportedVideoSize(camera);
        camera.release();
    } catch (Exception var3) {
        Logging.e("CameraEnumerationAndroid", "getCameraInfo failed on index " + index, var3);

        return null;
    }
    WritableMap params = Arguments.createMap();
    String facing = info.facing == 1 ? "front" : "back";
    params.putString("label", "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation);
    params.putString("id", "" + index);
    params.putString("facing", facing);
    params.putString("kind", "video");
    params.putString("maxWidth", String.valueOf(size.width));
    params.putString("maxHeight", String.valueOf(size.height));

    return params;
}
项目:QRCodeScanner    文件:BarcodeCameraConfig.java   
public BarcodeCameraConfig configPreviewSize2(int viewWidth, int viewHeight) {
    Parameters params = camera.getParameters();
    List<Size> sizes = params.getSupportedPreviewSizes();
    if (sizes == null || sizes.size() <= 0) {
        return this;
    }

    Size bestSize = null;
    int diff = Integer.MAX_VALUE;

    for (Size tmpSize : sizes) {
        int newDiff = Math.abs(tmpSize.width - viewWidth) + Math.abs(tmpSize.height - viewHeight);
        if (newDiff == 0) {
            bestSize = tmpSize;
            break;
        } else if (newDiff < diff) {
            bestSize = tmpSize;
            diff = newDiff;
        }
    }
    params.setPreviewSize(bestSize.width, bestSize.height);
    camera.setParameters(params);
    return this;
}
项目:react-native-webrtc    文件:WebRTCModule.java   
@SuppressWarnings("deprecation")
private Size getMaxSupportedVideoSize(Camera camera) {

    List<Camera.Size> sizes;

    if (camera.getParameters().getSupportedVideoSizes() != null) {
        sizes = camera.getParameters().getSupportedVideoSizes();
    } else {
        // Video sizes may be null, which indicates that all the supported
        // preview sizes are supported for video recording.
        sizes = camera.getParameters().getSupportedPreviewSizes();
    }

    int maxWidth = sizes.get(0).width;
    int maxHeight = sizes.get(0).height;

    for (Camera.Size size : sizes) {
        if (size.height > maxWidth && size.width > maxHeight) {
            maxWidth = size.width;
            maxHeight = size.height;
        }
    }

    return new Size(maxWidth, maxHeight);
}
项目:FaceRecognition    文件:CamParaUtil.java   
public Size getPropPreviewSize(List<Size> list, float th, int minWidth){
    Collections.sort(list, sizeComparator);

    int i = 0;
    for(Size s:list){
        if((s.width >= minWidth) && equalRate(s, th)){
            Log.i(TAG, "PreviewSize:w = " + s.width + "h = " + s.height);
            break;
        }
        i++;
    }
    if(i == list.size()){
        i = 0;//���û�ҵ�����ѡ��С��size
    }
    return list.get(i);
}
项目:FaceRecognition    文件:CamParaUtil.java   
public Size getPropPictureSize(List<Size> list, float th, int minWidth){
    Collections.sort(list, sizeComparator);

    int i = 0;
    for(Size s:list){
        if((s.width >= minWidth) && equalRate(s, th)){
            Log.i(TAG, "PictureSize : w = " + s.width + "h = " + s.height);
            break;
        }
        i++;
    }
    if(i == list.size()){
        i = 0;//���û�ҵ�����ѡ��С��size
    }
    return list.get(i);
}
项目:WeiXinRecordedDemo    文件:MediaRecorderBase.java   
/** 设置回调 */
protected void setPreviewCallback() {
    Size size = mParameters.getPreviewSize();
    if (size != null) {
        PixelFormat pf = new PixelFormat();
        PixelFormat.getPixelFormatInfo(mParameters.getPreviewFormat(), pf);
        int buffSize = size.width * size.height * pf.bitsPerPixel / 8;
        try {
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.setPreviewCallbackWithBuffer(this);
        } catch (OutOfMemoryError e) {
            Log.e("Yixia", "startPreview...setPreviewCallback...", e);
        }
        Log.e("Yixia", "startPreview...setPreviewCallbackWithBuffer...width:" + size.width + " height:" + size.height);
    } else {
        camera.setPreviewCallback(this);
    }
}
项目:WeiXinRecordedDemo    文件:MediaRecorderBase.java   
/** 设置回调 */
protected void setPreviewCallback() {
    Size size = mParameters.getPreviewSize();
    if (size != null) {
        PixelFormat pf = new PixelFormat();
        PixelFormat.getPixelFormatInfo(mParameters.getPreviewFormat(), pf);
        int buffSize = size.width * size.height * pf.bitsPerPixel / 8;
        try {
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.setPreviewCallbackWithBuffer(this);
        } catch (OutOfMemoryError e) {
            Log.e("Yixia", "startPreview...setPreviewCallback...", e);
        }
        Log.e("Yixia", "startPreview...setPreviewCallbackWithBuffer...width:" + size.width + " height:" + size.height);
    } else {
        camera.setPreviewCallback(this);
    }
}
项目:19porn    文件:OpenGlUtils.java   
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
项目:PeSanKita-android    文件:CameraView.java   
private Rect getCroppedRect(Size cameraPreviewSize, Rect visibleRect, int rotation) {
  final int previewWidth  = cameraPreviewSize.width;
  final int previewHeight = cameraPreviewSize.height;

  if (rotation % 180 > 0) rotateRect(visibleRect);

  float scale = (float) previewWidth / visibleRect.width();
  if (visibleRect.height() * scale > previewHeight) {
    scale = (float) previewHeight / visibleRect.height();
  }
  final float newWidth  = visibleRect.width()  * scale;
  final float newHeight = visibleRect.height() * scale;
  final float centerX   = (VERSION.SDK_INT < 14 || isTroublemaker()) ? previewWidth - newWidth / 2 : previewWidth / 2;
  final float centerY   = previewHeight / 2;

  visibleRect.set((int) (centerX - newWidth  / 2),
                  (int) (centerY - newHeight / 2),
                  (int) (centerX + newWidth  / 2),
                  (int) (centerY + newHeight / 2));

  if (rotation % 180 > 0) rotateRect(visibleRect);
  return visibleRect;
}
项目:PaoMovie    文件:MagicCameraDisplay.java   
private void setUpCamera(){
mGLSurfaceView.queueEvent(new Runnable() {

          @Override
          public void run() {
            if(mTextureId == OpenGLUtils.NO_TEXTURE){
                mTextureId = OpenGLUtils.getExternalOESTextureID(); 
                mSurfaceTexture = new SurfaceTexture(mTextureId);
                mSurfaceTexture.setOnFrameAvailableListener(mOnFrameAvailableListener);   
            }
            Size size = CameraEngine.getPreviewSize();
            int orientation = CameraEngine.getOrientation();
            if(orientation == 90 || orientation == 270){
                mImageWidth = size.height;
                mImageHeight = size.width;
            }else{
                mImageWidth = size.width;
                mImageHeight = size.height;
            } 
            mCameraInputFilter.onOutputSizeChanged(mImageWidth, mImageHeight);
            CameraEngine.startPreview(mSurfaceTexture);
          }
      });
  }
项目:mao-android    文件:CameraRenderer.java   
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    log.d("onPreviewFrame");
    final Size previewSize = camera.getParameters().getPreviewSize();
    if (mGLRgbBuffer == null) {
        mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
    }
    if (mRunOnDraw.isEmpty()) {
        runOnDraw(new Runnable() {
            @Override
            public void run() {
                YuvDecoder.YUVtoRBGA(data, previewSize.width, previewSize.height, mGLRgbBuffer.array());
                mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
                camera.addCallbackBuffer(data);

                if (mImageWidth != previewSize.width) {
                    mImageWidth = previewSize.width;
                    mImageHeight = previewSize.height;
                    adjustImageScaling();
                }
            }
        });
    }
}
项目:mao-android    文件:OpenGlUtils.java   
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
项目:Cable-Android    文件:CameraView.java   
private Rect getCroppedRect(Size cameraPreviewSize, Rect visibleRect, int rotation) {
  final int previewWidth  = cameraPreviewSize.width;
  final int previewHeight = cameraPreviewSize.height;

  if (rotation % 180 > 0) rotateRect(visibleRect);

  float scale = (float) previewWidth / visibleRect.width();
  if (visibleRect.height() * scale > previewHeight) {
    scale = (float) previewHeight / visibleRect.height();
  }
  final float newWidth  = visibleRect.width()  * scale;
  final float newHeight = visibleRect.height() * scale;
  final float centerX   = (VERSION.SDK_INT < 14 || isTroublemaker()) ? previewWidth - newWidth / 2 : previewWidth / 2;
  final float centerY   = previewHeight / 2;

  visibleRect.set((int) (centerX - newWidth  / 2),
                  (int) (centerY - newHeight / 2),
                  (int) (centerX + newWidth  / 2),
                  (int) (centerY + newHeight / 2));

  if (rotation % 180 > 0) rotateRect(visibleRect);
  return visibleRect;
}
项目:meipai-Android    文件:MediaRecorderBase.java   
/** 设置回调 */
protected void setPreviewCallback() {
    Size size = mParameters.getPreviewSize();
    if (size != null) {
        PixelFormat pf = new PixelFormat();
        PixelFormat.getPixelFormatInfo(mParameters.getPreviewFormat(), pf);
        int buffSize = size.width * size.height * pf.bitsPerPixel / 8;
        try {
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.addCallbackBuffer(new byte[buffSize]);
            camera.setPreviewCallbackWithBuffer(this);
        } catch (OutOfMemoryError e) {
            Log.e("Yixia", "startPreview...setPreviewCallback...", e);
        }
        Log.e("Yixia", "startPreview...setPreviewCallbackWithBuffer...width:" + size.width + " height:" + size.height);
    } else {
        camera.setPreviewCallback(this);
    }
}
项目:live_master    文件:CamParaUtil.java   
public  Size getPreviewSize(List<Camera.Size> list, int width,float th){
    Collections.sort(list, sizeComparator);

    float rate = 1.77f;
    if (Math.abs(th-1.33f)<Math.abs(th-1.77f)){
        rate = 1.33f;
    }else{
        rate = 1.77f;
    }
    int i = 0;
    for(Size s:list){
        if((s.height > width) && equalRate(s, rate)){
            Log.i(TAG, "最终设置预览尺寸:w = " + s.width + "h = " + s.height+","+rate);
            break;
        }
        i++;
    }

    return list.get(i);
}
项目:live_master    文件:CamParaUtil.java   
public Size getPropPictureSize(List<Size> list, float th, int minWidth){
    Collections.sort(list, sizeComparator);

    int i = 0;
    for(Size s:list){
        if((s.width >= minWidth) && equalRate(s, th)){
            Log.i(TAG, "PictureSize : w = " + s.width + "h = " + s.height);
            break;
        }
        i++;
    }
    if(i == list.size()){
        i = 0;//���û�ҵ�����ѡ��С��size
    }
    return list.get(i);
}
项目:TAG    文件:GPUImageRenderer.java   
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    final Size previewSize = camera.getParameters().getPreviewSize();
    if (mGLRgbBuffer == null) {
        mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
    }
    if (mRunOnDraw.isEmpty()) {
        runOnDraw(new Runnable() {
            @Override
            public void run() {
                GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
                        mGLRgbBuffer.array());
                mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
                camera.addCallbackBuffer(data);

                if (mImageWidth != previewSize.width) {
                    mImageWidth = previewSize.width;
                    mImageHeight = previewSize.height;
                    adjustImageScaling();
                }
            }
        });
    }
}
项目:TAG    文件:OpenGlUtils.java   
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
项目:PreRect    文件:CameraConfigurationManager.java   
public void setDesiredCameraParameters(Camera camera, int caremaId) {
    Camera.Parameters parameters = camera.getParameters();
    parameters.setPreviewFormat(ImageFormat.NV21);
    parameters.setPreviewSize(cameraResolution.x, cameraResolution.y);
    setZoom(parameters);

    camera.setDisplayOrientation(getDisplayOrientation(caremaId));

    // 设置照片尺寸
    if (this.pictureSize == null) {
        WindowManager manager = (WindowManager) mContext
                .getSystemService(Context.WINDOW_SERVICE);
        Display display = manager.getDefaultDisplay();
        List<Size> pictureSizes = parameters.getSupportedPictureSizes();
        this.setPicutreSize(pictureSizes, display.getWidth(),
                display.getHeight());
    }
    try {
        parameters.setPictureSize(this.pictureSize.width,
                this.pictureSize.height);
    } catch (Exception e) {
        e.printStackTrace();
    }
    camera.setParameters(parameters);
}
项目:ROLF-EV3    文件:VideoQuality.java   
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
    VideoQuality v = quality.clone();
    int minDist = Integer.MAX_VALUE;
    String supportedSizesStr = "Supported resolutions: ";
    List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
    for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
        Size size = it.next();
        supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
        int dist = Math.abs(quality.resX - size.width);
        if (dist<minDist) {
            minDist = dist;
            v.resX = size.width;
            v.resY = size.height;
        }
    }
    Log.v(TAG, supportedSizesStr);
    if (quality.resX != v.resX || quality.resY != v.resY) {
        Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
    }

    return v;
}
项目:VideoCamera    文件:CameraWrapper.java   
@TargetApi(VERSION_CODES.HONEYCOMB)
protected List<Size> getSupportedVideoSizes(int currentSdkInt) {
    Parameters params = mNativeCamera.getNativeCameraParameters();

    List<Size> supportedVideoSizes;
    if (currentSdkInt < Build.VERSION_CODES.HONEYCOMB) {
        CLog.e(CLog.CAMERA, "Using supportedPreviewSizes iso supportedVideoSizes due to API restriction");
        supportedVideoSizes = params.getSupportedPreviewSizes();
    } else if (params.getSupportedVideoSizes() == null) {
        CLog.e(CLog.CAMERA, "Using supportedPreviewSizes because supportedVideoSizes is null");
        supportedVideoSizes = params.getSupportedPreviewSizes();
    } else {
        supportedVideoSizes = params.getSupportedVideoSizes();
    }

    return supportedVideoSizes;
}
项目:AndroidCamera    文件:GPUImageRenderer.java   
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    final Size previewSize = camera.getParameters().getPreviewSize();
    if (mGLRgbBuffer == null) {
        mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
    }
    if (mRunOnDraw.isEmpty()) {
        runOnDraw(new Runnable() {
            @Override
            public void run() {
                GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
                        mGLRgbBuffer.array());
                mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
                camera.addCallbackBuffer(data);

                if (mImageWidth != previewSize.width) {
                    mImageWidth = previewSize.width;
                    mImageHeight = previewSize.height;
                    adjustImageScaling();
                }
            }
        });
    }
}
项目:AndroidCamera    文件:OpenGlUtils.java   
public static int loadTexture(final IntBuffer data, final Size size, final int usedTexId) {
    int textures[] = new int[1];
    if (usedTexId == NO_TEXTURE) {
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, size.width, size.height,
                0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
        GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, size.width,
                size.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
        textures[0] = usedTexId;
    }
    return textures[0];
}
项目:VisiSynth    文件:Preview.java   
public void onPreviewFrame(byte[] data, Camera camera){
    Log.d("TAG", "frame1 "+data.length);
    Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
    YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);

    // Convert to Bitmap
    final double [][] imgmat = imgpro.BufferedYUVImage2Mat(yuvimage.getYuvData(),
            yuvimage.getWidth(), yuvimage.getHeight(), 640, 480);

    List<Double> ld = imgpro.AnalyzeMat(imgmat, 0.6);

    String logline = "points:";
    for(Double p : ld)
        logline += " " + (1-p);
    Log.d("TAG", logline);
    double [] f = new double[ld.size()];
    for(int i = 0; i < f.length; i ++)
        f[i] = Math.pow(2.0, ld.get(i) * 2) * 440.0;
    play(f);
}
项目:CommunityService    文件:CameraConfigurationManager.java   
/**
 * Sets the camera up to take preview images which are used for both preview and decoding.
 * We detect the preview format here so that buildLuminanceSource() can build an appropriate
 * LuminanceSource subclass. In the future we may want to force YUV420SP as it's the smallest,
 * and the planar Y can be used for barcode scanning without a copy in some cases.
 */
void setDesiredCameraParameters(Camera camera) {
    Camera.Parameters parameters = camera.getParameters();
    List<Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
    int position =0;
    if(supportedPreviewSizes.size()>2){
        position=supportedPreviewSizes.size()/2+1;//supportedPreviewSizes.get();
    }else {
        position=supportedPreviewSizes.size()/2;
    }

    int width = supportedPreviewSizes.get(position).width;
    int height = supportedPreviewSizes.get(position).height;
    Log.d(TAG, "Setting preview size: " + cameraResolution);
    camera.setDisplayOrientation(90);  
    cameraResolution.x=width;
    cameraResolution.y=height;
    parameters.setPreviewSize(width,height);
    setFlash(parameters);
    setZoom(parameters);
    //setSharpness(parameters);
    camera.setParameters(parameters);
}
项目:miku    文件:GPUImageRenderer.java   
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
  final Size previewSize = camera.getParameters().getPreviewSize();
  if (mGLRgbBuffer == null) {
    mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
  }
  if (mRunOnDraw.isEmpty()) {
    runOnDraw(new Runnable() {
      @Override
      public void run() {
        GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
            mGLRgbBuffer.array());
        mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
        camera.addCallbackBuffer(data);

        if (mImageWidth != previewSize.width) {
          mImageWidth = previewSize.width;
          mImageHeight = previewSize.height;
          adjustImageScaling();
        }
      }
    });
  }
}
项目:ArCamera    文件:CamParaUtil.java   
public  Size getPropPreviewSize(List<Size> list, float th, int minWidth){
    Collections.sort(list, sizeComparator);

    int i = 0;
    for(Size s:list){
        if((s.width >= minWidth) && equalRate(s, th)){
            Log.i(TAG, "PreviewSize:w = " + s.width + "h = " + s.height);
            break;
        }
        i++;
    }
    if(i == list.size()){
        i = 0;//���û�ҵ�����ѡ��С��size
    }
    return list.get(i);
}
项目:ArCamera    文件:CamParaUtil.java   
public Size getPropPictureSize(List<Size> list, float th, int minWidth){
    Collections.sort(list, sizeComparator);

    int i = 0;
    for(Size s:list){
        if((s.width >= minWidth) && equalRate(s, th)){
            Log.i(TAG, "PictureSize : w = " + s.width + "h = " + s.height);
            break;
        }
        i++;
    }
    if(i == list.size()){
        i = 0;//���û�ҵ�����ѡ��С��size
    }
    return list.get(i);
}
项目:Document-Scanner    文件:DocumentCameraView.java   
public void setMaxPictureResolution() {
    int maxWidth=0;
    Size curRes=null;
    for ( Size r: getPictureResolutionList() ) {
        Log.d(TAG,"supported picture resolution: "+r.width+"x"+r.height);
        if (r.width>maxWidth) {
            maxWidth=r.width;
            curRes=r;
        }
    }

    if (curRes!=null) {
        Camera.Parameters parameters = mCamera.getParameters();
        parameters.setPictureSize(curRes.width, curRes.height);
        mCamera.setParameters(parameters);
        Log.d(TAG, "selected picture resolution: " + curRes.width + "x" + curRes.height);
    }

    return;
}
项目:Document-Scanner    文件:DocumentCameraView.java   
public void setMaxPreviewResolution() {
    int maxWidth=0;
    Size curRes=null;

    mCamera.lock();

    for ( Size r: getResolutionList() ) {
        if (r.width>maxWidth) {
            Log.d(TAG,"supported preview resolution: "+r.width+"x"+r.height);
            maxWidth=r.width;
            curRes=r;
        }
    }

    if (curRes!=null) {
        setResolution(curRes);
        Log.d(TAG, "selected preview resolution: " + curRes.width + "x" + curRes.height);
    }

    return;
}
项目:prayer-times-android    文件:CameraSurfaceView.java   
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    if (mCamera == null) {
        return;
    }
    // Now that the size is known, set up the camera parameters and begin
    // the preview.

    Camera.Parameters parameters = mCamera.getParameters();
    Size size = getBestPreviewSize(width, height, parameters);
    if (size != null) {
        parameters.setPreviewSize(size.width, size.height);
    }
    mCamera.setParameters(parameters);

    try {
        mCamera.startPreview();
    } catch (Exception e) {
        Crashlytics.logException(e);
    }
}
项目:prayer-times-android    文件:CameraSurfaceView.java   
@Nullable
private Camera.Size getBestPreviewSize(int width, int height, @NonNull Camera.Parameters parameters) {
    Camera.Size result = null;
    double aspect = width / (double) height;
    for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
        if ((size.width >= width) && (size.height >= height)) {
            if (result == null) {
                result = size;
            } else {
                if (Math.abs((result.width / (double) result.height - aspect))
                        > Math.abs((size.width / (double) size.height - aspect))) {
                    result = size;
                }
            }
        }
    }

    return result;
}
项目:Android_CCTV    文件:VideoQuality.java   
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
    VideoQuality v = quality.clone();
    int minDist = Integer.MAX_VALUE;
    String supportedSizesStr = "Supported resolutions: ";
    List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
    for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
        Size size = it.next();
        supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
        int dist = Math.abs(quality.resX - size.width);
        if (dist<minDist) {
            minDist = dist;
            v.resX = size.width;
            v.resY = size.height;
        }
    }
    Log.v(TAG, supportedSizesStr);
    if (quality.resX != v.resX || quality.resY != v.resY) {
        Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
    }

    return v;
}
项目:Endoscope    文件:VideoQuality.java   
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
    VideoQuality v = quality.clone();
    int minDist = Integer.MAX_VALUE;
    String supportedSizesStr = "Supported resolutions: ";
    List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
    for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
        Size size = it.next();
        supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
        int dist = Math.abs(quality.resX - size.width);
        if (dist<minDist) {
            minDist = dist;
            v.resX = size.width;
            v.resY = size.height;
        }
    }
    Log.v(TAG, supportedSizesStr);
    if (quality.resX != v.resX || quality.resY != v.resY) {
        Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
    }

    return v;
}
项目:Rocket.Chat-android    文件:CameraManager.java   
public void setupCameraAndStartPreview(SurfaceHolder sf, Size sz, int displayRotation) {
    stopCameraPreview();

    cameraRotationDegree = CameraHelper.setCameraDisplayOrientation(defaultCameraID, camera, displayRotation);

    chooseCamcorderProfile(sz);

    // tweak profile
    profile.fileFormat = MediaRecorder.OutputFormat.THREE_GPP;
    profile.audioSampleRate = 16000;
    profile.audioChannels = 1;
    profile.audioBitRate = 96000;

    Parameters param = camera.getParameters();

    param.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
    param.setPreviewSize(profile.videoFrameWidth, profile.videoFrameHeight);
       if (Build.VERSION.SDK_INT >= 14)
           param.setRecordingHint(true);
    camera.setParameters(param);

    if (setDisplay(sf)) {
        startCameraPreview();
    }   
}
项目:Rocket.Chat-android    文件:CameraManager.java   
private void chooseCamcorderProfile(Size sizeHint) {
    // For android 2.3 devices video quality = low
    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB)
        profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_LOW));
    else {
        // For >= Android 3.0 devices select 720p, 480p or low quality of video
        if (CamcorderProfile.hasProfile(getCameraID(), CamcorderProfile.QUALITY_720P)
                && (sizeHint == null || sizeHint.height >= 720)) {
            profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_720P));
            return;
        }

        if (CamcorderProfile.hasProfile(getCameraID(), CamcorderProfile.QUALITY_480P)
                && (sizeHint == null || sizeHint.height >= 480)) {
            profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_480P));
            return;
        }

        profile = (CamcorderProfile.get(CamcorderProfile.QUALITY_LOW));
    }
}
项目:RemoteEye    文件:VideoQuality.java   
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
    VideoQuality v = quality.clone();
    int minDist = Integer.MAX_VALUE;
    String supportedSizesStr = "Supported resolutions: ";
    List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
    for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
        Size size = it.next();
        supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
        int dist = Math.abs(quality.resX - size.width);
        if (dist<minDist) {
            minDist = dist;
            v.resX = size.width;
            v.resY = size.height;
        }
    }
    Log.v(TAG, supportedSizesStr);
    if (quality.resX != v.resX || quality.resY != v.resY) {
        Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
    }

    return v;
}
项目:AndroidQrCodeScanner    文件:MainActivity.java   
public void onPreviewFrame(byte[] data, Camera camera) {
    Parameters parameters = camera.getParameters();
    Size size = parameters.getPreviewSize();

    Image barcode = new Image(size.width, size.height, "Y800");
    barcode.setData(data);

    int result = scanner.scanImage(barcode);

    if (result != 0) {
        previewing = false;
        mCamera.setPreviewCallback(null);
        mCamera.stopPreview();

        SymbolSet syms = scanner.getResults();
        for (Symbol sym : syms) {
          //  scanText.setText("barcode result " + sym.getData());
            Toast.makeText(MainActivity.this,sym.getData(),Toast.LENGTH_SHORT).show();
            barcodeScanned = false;
            mCamera.setPreviewCallback(previewCb);
            mCamera.startPreview();
          //  previewing = true;
            mCamera.autoFocus(autoFocusCB);
        }
    }
}
项目:pause-resume-video-recording    文件:CameraCaptureActivity.java   
private Size determineBestSize(List<Size> sizes, int widthThreshold) {
    Size bestSize = null;
    Size size;
    int numOfSizes = sizes.size();
    for (int i = 0; i < numOfSizes; i++) {
        size = sizes.get(i);
        boolean isDesireRatio = (size.width / 4) == (size.height / 3);
        boolean isBetterSize = (bestSize == null) || size.width > bestSize.width;

        if (isDesireRatio && isBetterSize) {
            bestSize = size;
        }
    }

    if (bestSize == null) {
        Log.d(TAG, "cannot find the best camera size");
        return sizes.get(sizes.size() - 1);
    }

    return bestSize;
}
项目:libstreaming_android_studio    文件:VideoQuality.java   
/** 
 * Checks if the requested resolution is supported by the camera.
 * If not, it modifies it by supported parameters. 
 **/
public static VideoQuality determineClosestSupportedResolution(Camera.Parameters parameters, VideoQuality quality) {
    VideoQuality v = quality.clone();
    int minDist = Integer.MAX_VALUE;
    String supportedSizesStr = "Supported resolutions: ";
    List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
    for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext();) {
        Size size = it.next();
        supportedSizesStr += size.width+"x"+size.height+(it.hasNext()?", ":"");
        int dist = Math.abs(quality.resX - size.width);
        if (dist<minDist) {
            minDist = dist;
            v.resX = size.width;
            v.resY = size.height;
        }
    }
    Log.v(TAG, supportedSizesStr);
    if (quality.resX != v.resX || quality.resY != v.resY) {
        Log.v(TAG,"Resolution modified: "+quality.resX+"x"+quality.resY+"->"+v.resX+"x"+v.resY);
    }

    return v;
}