Java 类android.hardware.camera2.params.StreamConfigurationMap 实例源码

项目:PXLSRT    文件:Camera2.java   
/**
 * <p>Collects some information from {@link #mCameraCharacteristics}.</p>
 * <p>This rewrites {@link #mPreviewSizes}, {@link #mPictureSizes}, and optionally,
 * {@link #mAspectRatio}.</p>
 */
private void collectCameraInfo() {
    StreamConfigurationMap map = mCameraCharacteristics.get(
            CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    if (map == null) {
        throw new IllegalStateException("Failed to get configuration map: " + mCameraId);
    }
    mPreviewSizes.clear();
    for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) {
        mPreviewSizes.add(new Size(size.getWidth(), size.getHeight()));
    }
    mPictureSizes.clear();
    collectPictureSizes(mPictureSizes, map);

    if (!mPreviewSizes.ratios().contains(mAspectRatio)) {
        mAspectRatio = mPreviewSizes.ratios().iterator().next();
    }
}
项目:TK_1701    文件:Camera2.java   
public void open() {
    try {
        CameraManager manager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
                StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
                mCameraSize = map.getOutputSizes(SurfaceTexture.class)[0];

                HandlerThread thread = new HandlerThread("OpenCamera");
                thread.start();
                Handler backgroundHandler = new Handler(thread.getLooper());

                manager.openCamera(cameraId, mCameraDeviceCallback, null);

                // カメラの物理的な情報を得る
                mCameraCharacteristics = manager.getCameraCharacteristics( cameraId );
                return;
            }
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}
项目:SCCameraView    文件:Camera2View.java   
@Override
public void collectRatioSizes() {
    ratioSizeList.clear();
    CameraCharacteristics characteristics;
    StreamConfigurationMap map = null;
    try {
        characteristics = ((CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE)).getCameraCharacteristics(Integer.toString(Integer.parseInt(getCameraId())));
        map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    Size[] outputSizes = map.getOutputSizes(SurfaceTexture.class);
    if (outputSizes != null) {
        List<Double> ratioList = new ArrayList<>();
        for (Size size : outputSizes) {
            double ratio = (double) size.getWidth() / (double) size.getHeight();
            if (!ratioList.contains(ratio)) {
                ratioList.add(ratio);
                ratioSizeList.add(new AspectRatio(ratio, size.getWidth(), size.getHeight()));
            }
        }
    }
}
项目:xbot_head    文件:CommentaryFragment.java   
private void startPreview() {
    try {
        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
        StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

        Size previewSize = Util.getPreferredPreviewSize(
                configMap.getOutputSizes(ImageFormat.JPEG),textureView.getWidth(), textureView.getHeight());

        surfaceTexture.setDefaultBufferSize(previewSize.getWidth(),previewSize.getHeight());
        Surface surface = new Surface(surfaceTexture);
        captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        captureBuilder.addTarget(surface);

        cameraDevice.createCaptureSession(Arrays.asList(surface),captureSessionCallback,backgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}
项目:CameraCompat    文件:Camera2Helper.java   
@Override
protected List<PreviewSize> getSupportedSize() {
    try {
        CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(
                getCurrentCameraId());
        StreamConfigurationMap map =
                characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (map == null) {
            return Collections.singletonList(new PreviewSize(mPreviewWidth, mPreviewHeight));
        }
        Size[] supportedSize = map.getOutputSizes(SurfaceTexture.class);
        if (supportedSize == null || supportedSize.length == 0) {
            return Collections.singletonList(new PreviewSize(mPreviewWidth, mPreviewHeight));
        }
        List<PreviewSize> results = new ArrayList<>();
        for (Size size : supportedSize) {
            results.add(new PreviewSize(size.getWidth(), size.getHeight()));
        }
        return results;
    } catch (CameraAccessException e) {
        throw new CameraAccessError();
    }
}
项目:CameraView    文件:VideoSession.java   
private List<Size> getSizes(StreamConfigurationMap map) {
    Size[] sizes = map.getOutputSizes(MediaRecorder.class);

    // StreamConfigurationMap.getOutputSizes(MediaRecorder.class) only tells us if the
    // camera supports these sizes. It does not tell us if MediaRecorder supports these
    // sizes, odd as that sounds. Therefore, we need to filter ourselves manually.
    List<Size> filtered;
    if (CamcorderProfile.hasProfile(getCameraId(), CamcorderProfile.QUALITY_2160P)) {
        filtered = filter(sizes, SIZE_4K);
        if (!filtered.isEmpty()) {
            return filtered;
        }
    }
    if (CamcorderProfile.hasProfile(getCameraId(), CamcorderProfile.QUALITY_1080P)) {
        filtered = filter(sizes, SIZE_1080P);
        if (!filtered.isEmpty()) {
            return filtered;
        }
    }
    if (CamcorderProfile.hasProfile(getCameraId(), CamcorderProfile.QUALITY_720P)) {
        filtered = filter(sizes, SIZE_720P);
        if (!filtered.isEmpty()) {
            return filtered;
        }
    }
    if (CamcorderProfile.hasProfile(getCameraId(), CamcorderProfile.QUALITY_480P)) {
        filtered = filter(sizes, SIZE_480P);
        if (!filtered.isEmpty()) {
            return filtered;
        }
    }
    return Arrays.asList(sizes);
}
项目:CameraView    文件:PreviewSession.java   
@Override
void initialize(StreamConfigurationMap map) {
    if (DEBUG) Log.d(TAG, "Initializing PreviewSession");
    super.initialize(chooseOptimalSize(getSizes(map), mCameraView.getWidth(), mCameraView.getHeight()));

    SurfaceTexture texture = mCameraView.getSurfaceTexture();
    if (texture == null) {
        // This will be caught in Camera2Module.setSession
        throw new IllegalStateException(
                "Expected a SurfaceTexture to exist, but none does. "
                    + "Was the SurfaceTexture already closed?");
    }
    texture.setDefaultBufferSize(getWidth(), getHeight());

    // This is the output Surface we need to start the preview.
    mSurface = new Surface(texture);
}
项目:SimpleCamera    文件:Camera2.java   
/**
 * <p>Collects some information from {@link #mCameraCharacteristics}.</p>
 * <p>This rewrites {@link #mPreviewSizes}, {@link #mPictureSizes}, and optionally,
 * {@link #mAspectRatio}.</p>
 */
private void collectCameraInfo() {
    StreamConfigurationMap map = mCameraCharacteristics.get(
            CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    if (map == null) {
        throw new IllegalStateException("Failed to get configuration map: " + mCameraId);
    }
    mPreviewSizes.clear();
    for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) {
        mPreviewSizes.add(new Size(size.getWidth(), size.getHeight()));
    }
    mPictureSizes.clear();
    collectPictureSizes(mPictureSizes, map);

    if (!mPreviewSizes.ratios().contains(mAspectRatio)) {
        mAspectRatio = mPreviewSizes.ratios().iterator().next();
    }
}
项目:AndroidSnippets    文件:TextureViewSurfaceHolder.java   
private static Size findBestPreviewSize(@NonNull StreamConfigurationMap scMap, int width, int height) {
    List<Size> sizes = new ArrayList<>(Arrays.asList(scMap.getOutputSizes(SurfaceTexture.class)));
    for (Iterator<Size> itr = sizes.iterator(); itr.hasNext(); ) {
        Size size = itr.next();
        if (size.getWidth() < width || size.getHeight() < height) {
            itr.remove();
        }
    }
    if (sizes.size() == 0) {
        sizes = new ArrayList<>(Arrays.asList(scMap.getOutputSizes(SurfaceTexture.class)));
    }
    Collections.sort(sizes, new Comparator<Size>() {
        @Override
        public int compare(Size s1, Size s2) {
            return s2.getWidth() * s2.getHeight() - s1.getWidth() * s1.getHeight();
        }
    });
    return (sizes.size() > 0) ? sizes.get(sizes.size() - 1) : null;
}
项目:Camera2    文件:OneCameraZslImpl.java   
/**
 * @return The largest supported picture size.
 */
public Size getDefaultPictureSize() {
    StreamConfigurationMap configs = mCharacteristics.get(
            CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);

    // Find the largest supported size.
    android.util.Size largestSupportedSize = supportedSizes[0];
    long largestSupportedSizePixels = largestSupportedSize.getWidth()
            * largestSupportedSize.getHeight();
    for (int i = 0; i < supportedSizes.length; i++) {
        long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
        if (numPixels > largestSupportedSizePixels) {
            largestSupportedSize = supportedSizes[i];
            largestSupportedSizePixels = numPixels;
        }
    }

    return new Size(largestSupportedSize.getWidth(),
            largestSupportedSize.getHeight());
}
项目:droidCam    文件:Camera2Api23.java   
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
项目:tensorflow-classifier-android    文件:CameraActivity.java   
private String chooseCamera() {
  final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
  try {
    for (final String cameraId : manager.getCameraIdList()) {
      final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

      // We don't use a front facing camera in this sample.
      final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
      if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
        continue;
      }

      final StreamConfigurationMap map =
          characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

      if (map == null) {
        continue;
      }

      useCamera2API = isHardwareLevelSupported(characteristics,
          CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
      LOGGER.i("Camera API lv2?: %s", useCamera2API);
      return cameraId;
    }
  } catch (CameraAccessException e) {
    LOGGER.e(e, "Not allowed to access camera");
  }

  return null;
}
项目:PXLSRT    文件:Camera2Api23.java   
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
项目:hella-renderscript    文件:BaseViewfinderActivity.java   
/**
 * Configure the surfaceview and RS processing.
 */
private void configureSurfaces() {
    // Find a good size for output - largest 16:9 aspect ratio that's less than 720p
    final int MAX_WIDTH = 1280;
    final float TARGET_ASPECT = 16.f / 9.f;
    final float ASPECT_TOLERANCE = 0.1f;

    StreamConfigurationMap configs =
            mCameraInfo.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

    Size[] outputSizes = configs.getOutputSizes(SurfaceHolder.class);

    Size outputSize = outputSizes[0];
    float outputAspect = (float) outputSize.getWidth() / outputSize.getHeight();
    for (Size candidateSize : outputSizes) {
        if (candidateSize.getWidth() > MAX_WIDTH)
            continue;
        float candidateAspect = (float) candidateSize.getWidth() / candidateSize.getHeight();
        boolean goodCandidateAspect =
                Math.abs(candidateAspect - TARGET_ASPECT) < ASPECT_TOLERANCE;
        boolean goodOutputAspect = Math.abs(outputAspect - TARGET_ASPECT) < ASPECT_TOLERANCE;
        if ((goodCandidateAspect && !goodOutputAspect) ||
                candidateSize.getWidth() > outputSize.getWidth()) {
            outputSize = candidateSize;
            outputAspect = candidateAspect;
        }
    }
    Log.i(TAG, "Resolution chosen: " + outputSize);

    setupProcessing(outputSize);

    // this will trigger onSurfaceChanged()
    getViewfinderSurfaceView().getHolder()
            .setFixedSize(outputSize.getWidth(), outputSize.getHeight());
    getViewfinderSurfaceView().setAspectRatio(outputAspect);
}
项目:polling-station-app    文件:CameraHandler.java   
/**
 * Sets the preview size of the fragment
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 * @param swappedDimensions - boolean indicating if dimensions need to be swapped
 * @param map - Configurationmap of the camera
 * @return mPreviewSize - the previewsize that is set in the fragment
 */
private Size setFragmentPreviewSize(int width, int height, boolean swappedDimensions, StreamConfigurationMap map) {
    // For still image captures, we use the largest available size.
    Size largest = Collections.max(
            Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
            new CameraFragmentUtil.CompareSizesByArea());

    Point displaySize = new Point();
    fragment.getActivity().getWindowManager().getDefaultDisplay().getSize(displaySize);
    int rotatedPreviewWidth = width;
    int rotatedPreviewHeight = height;
    int maxPreviewWidth = displaySize.x;
    int maxPreviewHeight = displaySize.y;

    if (swappedDimensions) {
        rotatedPreviewWidth = height;
        rotatedPreviewHeight = width;
        maxPreviewWidth = displaySize.y;
        maxPreviewHeight = displaySize.x;
    }

    if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
        maxPreviewWidth = MAX_PREVIEW_WIDTH;
    }
    if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
        maxPreviewHeight = MAX_PREVIEW_HEIGHT;
    }
    // Attempting to use too large a preview size could  exceed the camera bus' bandwidth
    // limitation, resulting in gorgeous previews but the storage of garbage capture data.
    Size mPreviewSize = CameraFragmentUtil.chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
            rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
            maxPreviewHeight, largest);
    fragment.setPreviewSize(mPreviewSize);
    return mPreviewSize;
}
项目:rtmp-rtsp-stream-client-java    文件:Camera2ApiManager.java   
public Size[] getCameraResolutionsBack() {
  try {
    CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics("0");
    if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
        != CameraCharacteristics.LENS_FACING_BACK) {
      cameraCharacteristics = cameraManager.getCameraCharacteristics("1");
    }
    StreamConfigurationMap streamConfigurationMap =
        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    return streamConfigurationMap.getOutputSizes(SurfaceTexture.class);
  } catch (CameraAccessException e) {
    Log.e(TAG, e.getMessage());
    return new Size[0];
  }
}
项目:rtmp-rtsp-stream-client-java    文件:Camera2ApiManager.java   
public Size[] getCameraResolutionsFront() {
  try {
    CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics("0");
    if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
        != CameraCharacteristics.LENS_FACING_FRONT) {
      cameraCharacteristics = cameraManager.getCameraCharacteristics("1");
    }
    StreamConfigurationMap streamConfigurationMap =
        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    return streamConfigurationMap.getOutputSizes(SurfaceTexture.class);
  } catch (CameraAccessException e) {
    Log.e(TAG, e.getMessage());
    return new Size[0];
  }
}
项目:AppRTC-Android    文件:Camera2Enumerator.java   
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
  final StreamConfigurationMap streamMap =
      cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
  final int supportLevel =
      cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);

  final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
  final List<Size> sizes = convertSizes(nativeSizes);

  // Video may be stretched pre LMR1 on legacy implementations.
  // Filter out formats that have different aspect ratio than the sensor array.
  if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
      && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
    final Rect activeArraySize =
        cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    final ArrayList<Size> filteredSizes = new ArrayList<Size>();

    for (Size size : sizes) {
      if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
        filteredSizes.add(size);
      }
    }

    return filteredSizes;
  } else {
    return sizes;
  }
}
项目:AndroidRTC    文件:Camera2Enumerator.java   
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
  final StreamConfigurationMap streamMap =
      cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
  final int supportLevel =
      cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);

  final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
  final List<Size> sizes = convertSizes(nativeSizes);

  // Video may be stretched pre LMR1 on legacy implementations.
  // Filter out formats that have different aspect ratio than the sensor array.
  if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
      && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
    final Rect activeArraySize =
        cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    final ArrayList<Size> filteredSizes = new ArrayList<Size>();

    for (Size size : sizes) {
      if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
        filteredSizes.add(size);
      }
    }

    return filteredSizes;
  } else {
    return sizes;
  }
}
项目:LongImageCamera    文件:Camera2.java   
/**
 * <p>Collects some information from {@link #mCameraCharacteristics}.</p>
 * <p>This rewrites {@link #mPreviewSizes}, {@link #mPictureSizes}, and optionally,
 * {@link #mAspectRatio}.</p>
 */
private void collectCameraInfo() {
    StreamConfigurationMap map = mCameraCharacteristics.get(
            CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    if (map == null) {
        throw new IllegalStateException("Failed to get configuration map: " + mCameraId);
    }
    mPreviewSizes.clear();
    for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) {
        int width = size.getWidth();
        int height = size.getHeight();
        if (width <= MAX_PREVIEW_WIDTH && height <= MAX_PREVIEW_HEIGHT) {
            mPreviewSizes.add(new Size(width, height));
        }
    }
    mPictureSizes.clear();
    collectPictureSizes(mPictureSizes, map);
    for (AspectRatio ratio : mPreviewSizes.ratios()) {
        if (!mPictureSizes.ratios().contains(ratio)) {
            mPreviewSizes.remove(ratio);
        }
    }

    if (!mPreviewSizes.ratios().contains(mAspectRatio)) {
        mAspectRatio = mPreviewSizes.ratios().iterator().next();
    }
}
项目:LongImageCamera    文件:Camera2Api23.java   
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
项目:VideoCRE    文件:Camera2Enumerator.java   
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
  final StreamConfigurationMap streamMap =
      cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
  final int supportLevel =
      cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);

  final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
  final List<Size> sizes = convertSizes(nativeSizes);

  // Video may be stretched pre LMR1 on legacy implementations.
  // Filter out formats that have different aspect ratio than the sensor array.
  if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
      && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
    final Rect activeArraySize =
        cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    final ArrayList<Size> filteredSizes = new ArrayList<Size>();

    for (Size size : sizes) {
      if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
        filteredSizes.add(size);
      }
    }

    return filteredSizes;
  } else {
    return sizes;
  }
}
项目:xbot_head    文件:InteractionFragment.java   
@Override
    public void startCamera() {
        try {
            CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraID);
            StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            int width = textureView.getWidth();
            int height = textureView.getHeight();

            //设置一个合适的预览尺寸,防止图像拉伸
//            previewSize = getPreferredPreviewSize(configMap.getOutputSizes(SurfaceTexture.class), width, height);
            previewSize = Util.getPreferredPreviewSize(configMap.getOutputSizes(ImageFormat.JPEG), width, height);
            surfaceTexture.setDefaultBufferSize(previewSize.getWidth(),previewSize.getHeight());
            Log.i(TAG, "previewSize info:" + previewSize.getWidth() + "x" + previewSize.getHeight());

            surface = new Surface(surfaceTexture);

            builder =cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);

            if (surface.isValid()) {
                builder.addTarget(surface);
            }
            Log.i(TAG, "mTextureView info:" + textureView.getWidth() + "x" + textureView.getHeight());

            cameraDevice.createCaptureSession(Arrays.asList(surface),sessionStateCallback,null);

        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }
项目:android-camera    文件:Camera2.java   
@Override
public boolean start() {
    if (!super.start()) {
        return false;
    }
    deviceOrientationListener.enable();
    cameraManager = (CameraManager) activity.get().getSystemService(Context.CAMERA_SERVICE);
    // choose camera id by lens
    if (!chooseCameraIdByLensFacing()) {
        return false;
    }
    // collect preview and picture sizes based on the query aspect ratio
    StreamConfigurationMap info = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    if (info == null) {
        throw new IllegalStateException("Failed to get configuration map: " + cameraId);
    }
    android.util.Size[] sizes = info.getOutputSizes(viewFinderPreview.gePreviewType());
    AspectRatio desiredAspectRatio = AspectRatio.of(aspectRatio.getWidth(), aspectRatio.getHeight());
    List<Size> availableSizes = convertSizes(sizes);
    previewImageSize = chooseOptimalSize(availableSizes);
    cameraStatusCallback.onAspectRatioAvailable(desiredAspectRatio, aspectRatio, availableSizes);
    sizes = info.getOutputSizes(ImageFormat.JPEG);
    capturedPictureSize = chooseOptimalSize(convertSizes(sizes));
    // prepare image reader
    prepareImageReader(capturedPictureSize);
    // open the camera and relayout the surface based on the chosen size
    startOpeningCamera();
    return true;
}
项目:CameraView    文件:VideoSession.java   
@Override
public void initialize(@NonNull StreamConfigurationMap map) throws CameraAccessException {
    super.initialize(map);
    mVideoSurface.initialize(map);
    if (!mVideoSurface.mIsInitialized) {
        onVideoFailed();
        throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
    }
}
项目:CameraView    文件:PictureSession.java   
private List<Size> getSizes(StreamConfigurationMap map) {
    Size[] sizes = map.getOutputSizes(getImageFormat(getQuality()));

    // Special case for high resolution images (assuming, of course, quality was set to high)
    if (Build.VERSION.SDK_INT >= 23) {
        Size[] highResSizes = map.getHighResolutionOutputSizes(getImageFormat(getQuality()));
        if (highResSizes != null) {
            sizes = concat(sizes, highResSizes);
        }
    }

    return Arrays.asList(sizes);
}
项目:CameraView    文件:PictureSession.java   
@Override
void initialize(StreamConfigurationMap map) {
    if (DEBUG) Log.d(TAG, "Initializing PictureSession");
    super.initialize(chooseSize(getSizes(map), mPreviewSurface.mSize));
    mImageReader = ImageReader.newInstance(getWidth(), getHeight(), getImageFormat(getQuality()), 1 /* maxImages */);
    mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mCameraView.getHandler());
}
项目:SimpleCamera    文件:Camera2Api23.java   
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
项目:cameraview    文件:Camera2.java   
/**
 * <p>Collects some information from {@link #mCameraCharacteristics}.</p>
 * <p>This rewrites {@link #mPreviewSizes}, {@link #mPictureSizes}, and optionally,
 * {@link #mAspectRatio}.</p>
 */
private void collectCameraInfo() {
    StreamConfigurationMap map = mCameraCharacteristics.get(
            CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    if (map == null) {
        throw new IllegalStateException("Failed to get configuration map: " + mCameraId);
    }
    mPreviewSizes.clear();
    for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) {
        int width = size.getWidth();
        int height = size.getHeight();
        if (width <= MAX_PREVIEW_WIDTH && height <= MAX_PREVIEW_HEIGHT) {
            mPreviewSizes.add(new Size(width, height));
        }
    }
    mPictureSizes.clear();
    collectPictureSizes(mPictureSizes, map);
    for (AspectRatio ratio : mPreviewSizes.ratios()) {
        if (!mPictureSizes.ratios().contains(ratio)) {
            mPreviewSizes.remove(ratio);
        }
    }

    if (!mPreviewSizes.ratios().contains(mAspectRatio)) {
        mAspectRatio = mPreviewSizes.ratios().iterator().next();
    }
}
项目:cameraview    文件:Camera2Api23.java   
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
项目:android_camera2_api_video_app    文件:Camera2VideoImageActivity.java   
private void setupCamera(int width, int height) {
    CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
    try {
        for(String cameraId : cameraManager.getCameraIdList()){
            CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
            if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
                    CameraCharacteristics.LENS_FACING_FRONT){
                continue;
            }
            StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
            mTotalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
            boolean swapRotation = mTotalRotation == 90 || mTotalRotation == 270;
            int rotatedWidth = width;
            int rotatedHeight = height;
            if(swapRotation) {
                rotatedWidth = height;
                rotatedHeight = width;
            }
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
            mVideoSize = chooseOptimalSize(map.getOutputSizes(MediaRecorder.class), rotatedWidth, rotatedHeight);
            mImageSize = chooseOptimalSize(map.getOutputSizes(ImageFormat.JPEG), rotatedWidth, rotatedHeight);
            mImageReader = ImageReader.newInstance(mImageSize.getWidth(), mImageSize.getHeight(), ImageFormat.JPEG, 1);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}
项目:CameraFragment    文件:Camera2Manager.java   
@Override
public Size getPhotoSizeForQuality(@Configuration.MediaQuality int mediaQuality) {
    final StreamConfigurationMap map = currentCameraId.equals(faceBackCameraId) ? backCameraStreamConfigurationMap : frontCameraStreamConfigurationMap;
    return CameraHelper.getPictureSize(Size.fromArray2(map.getOutputSizes(ImageFormat.JPEG)), mediaQuality);
}
项目:CameraFragment    文件:Camera2Manager.java   
@Override
protected void prepareCameraOutputs() {
    try {
        final CameraCharacteristics characteristics = currentCameraId.equals(faceBackCameraId) ? backCameraCharacteristics : frontCameraCharacteristics;

        if (currentCameraId.equals(faceFrontCameraId) && frontCameraStreamConfigurationMap == null)
            frontCameraStreamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        else if (currentCameraId.equals(faceBackCameraId) && backCameraStreamConfigurationMap == null)
            backCameraStreamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

        final StreamConfigurationMap map = currentCameraId.equals(faceBackCameraId) ? backCameraStreamConfigurationMap : frontCameraStreamConfigurationMap;
        if (configurationProvider.getMediaQuality() == Configuration.MEDIA_QUALITY_AUTO) {
            camcorderProfile = CameraHelper.getCamcorderProfile(currentCameraId, configurationProvider.getVideoFileSize(), configurationProvider.getMinimumVideoDuration());
        } else
            camcorderProfile = CameraHelper.getCamcorderProfile(configurationProvider.getMediaQuality(), currentCameraId);

        videoSize = CameraHelper.chooseOptimalSize(Size.fromArray2(map.getOutputSizes(MediaRecorder.class)),
                windowSize.getWidth(), windowSize.getHeight(), new Size(camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight));

        if (videoSize == null || videoSize.getWidth() > camcorderProfile.videoFrameWidth
                || videoSize.getHeight() > camcorderProfile.videoFrameHeight)
            videoSize = CameraHelper.getSizeWithClosestRatio(Size.fromArray2(map.getOutputSizes(MediaRecorder.class)), camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);
        else if (videoSize == null || videoSize.getWidth() > camcorderProfile.videoFrameWidth
                || videoSize.getHeight() > camcorderProfile.videoFrameHeight)
            videoSize = CameraHelper.getSizeWithClosestRatio(Size.fromArray2(map.getOutputSizes(MediaRecorder.class)), camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);

        photoSize = CameraHelper.getPictureSize(Size.fromArray2(map.getOutputSizes(ImageFormat.JPEG)),
                configurationProvider.getMediaQuality() == Configuration.MEDIA_QUALITY_AUTO
                        ? Configuration.MEDIA_QUALITY_HIGHEST : configurationProvider.getMediaQuality());

        imageReader = ImageReader.newInstance(photoSize.getWidth(), photoSize.getHeight(),
                ImageFormat.JPEG, 2);
        imageReader.setOnImageAvailableListener(this, backgroundHandler);

        if (configurationProvider.getMediaAction() == Configuration.MEDIA_ACTION_PHOTO
                || configurationProvider.getMediaAction() == Configuration.MEDIA_ACTION_UNSPECIFIED) {

            if (windowSize.getHeight() * windowSize.getWidth() > photoSize.getWidth() * photoSize.getHeight()) {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), photoSize.getWidth(), photoSize.getHeight());
            } else {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), windowSize.getWidth(), windowSize.getHeight());
            }

            if (previewSize == null)
                previewSize = CameraHelper.chooseOptimalSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), windowSize.getWidth(), windowSize.getHeight(), photoSize);

        } else {
            if (windowSize.getHeight() * windowSize.getWidth() > videoSize.getWidth() * videoSize.getHeight()) {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), videoSize.getWidth(), videoSize.getHeight());
            } else {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), windowSize.getWidth(), windowSize.getHeight());
            }

            if (previewSize == null)
                previewSize = CameraHelper.getSizeWithClosestRatio(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), videoSize.getWidth(), videoSize.getHeight());
        }
    } catch (Exception e) {
        Log.e(TAG, "Error while setup camera sizes.", e);
    }
}
项目:PXLSRT    文件:Camera2.java   
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) {
        mPictureSizes.add(new Size(size.getWidth(), size.getHeight()));
    }
}
项目:361Camera    文件:Camera2Fragment.java   
/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 */
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").
                show(getFragmentManager(), "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics
                    = manager.getCameraCharacteristics(cameraId);

            if ((!cameraId.equals(CAMERA_FRONT) && (!cameraId.equals(CAMERA_BACK))
                    || (!cameraId.equals(mCameraId)))) {
                continue;
            }
            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                continue;
            }

            StreamConfigurationMap map = characteristics.get(
                    CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(
                    Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(
                    Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestJpeg.getWidth(),
                                    largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(
                        mOnJpegImageAvailableListener, mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(),
                                    largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(
                        mOnRawImageAvailableListener, mBackgroundHandler);

                mCharacteristics = characteristics;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").
            show(getFragmentManager(), "dialog");
    return false;
}
项目:OkayCamera-Android    文件:Camera2RawFragment.java   
/**
     * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
     */
    private boolean setUpCameraOutputs() {
        Activity activity = getActivity();
        CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
        if (manager == null) {
            ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").
                    show(getFragmentManager(), "dialog");
            return false;
        }
        try {
            // Find a CameraDevice that supports RAW captures, and configure state.
            for (String cameraId : manager.getCameraIdList()) {
                CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

                // We only use a camera that supports RAW in this sample.
                if (!contains(characteristics.get(
                        CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                        CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
//                    continue;
                }
                if (!"0".equals(cameraId)) {
                    continue;
                }

                StreamConfigurationMap map = characteristics.get(
                        CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

                // For still image captures, we use the largest available size.
                Size largestJpeg = Collections.max(
                        Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                        new Utils.CompareSizesByArea());

                Size largestRaw = Collections.max(
                        Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                        new Utils.CompareSizesByArea());

                synchronized (mCameraStateLock) {
                    // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                    // counted wrapper to ensure they are only closed when all background tasks
                    // using them are finished.
                    if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                        mJpegImageReader = new RefCountedAutoCloseable<>(
                                ImageReader.newInstance(largestJpeg.getWidth(),
                                        largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                    }
                    mJpegImageReader.get().setOnImageAvailableListener(
                            mOnJpegImageAvailableListener, mBackgroundHandler);

                    if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                        mRawImageReader = new RefCountedAutoCloseable<>(
                                ImageReader.newInstance(largestRaw.getWidth(),
                                        largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                    }
                    mRawImageReader.get().setOnImageAvailableListener(
                            mOnRawImageAvailableListener, mBackgroundHandler);

                    mCharacteristics = characteristics;
                    mCameraId = cameraId;
                }
                return true;
            }
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }

        // If we found no suitable cameras for capturing RAW, warn the user.
        ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").
                show(getFragmentManager(), "dialog");
        return false;
    }
项目:LongImageCamera    文件:Camera2.java   
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) {
        mPictureSizes.add(new Size(size.getWidth(), size.getHeight()));
    }
}
项目:phonk    文件:CameraNew2.java   
/**
 * Sets up member variables related to camera.
 */
private void setUpCameraOutputs(int width, int height) throws CameraAccessException {
    MLog.d(TAG, "setUpCameraOutputs");

    String[] cameras = mCameraManager.getCameraIdList();

    for (int i = 0; i < cameras.length; i++) {
        MLog.d(TAG, "camera " + cameras[i]);
    }

    String cameraId = cameras[0];
    CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraId);
    List<CameraCharacteristics.Key<?>> keys = characteristics.getKeys();
    for (int i = 0; i < keys.size(); i++) {
        Object val = characteristics.get(keys.get(i));
        MLog.d(TAG, "characteristic " + keys.get(i) + " " + val);
    }

    // is it facingcamera
    Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
    StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

    // For still image captures, we use the largest available size.
    Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
    mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2);
    mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);

    // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
    // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
    // garbage capture data.
    int rotatedPreviewWidth = 500;
    int rotatedPreviewHeight = 500;
    int maxPreviewWidth = 500;
    int maxPreviewHeight = 500;
    mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
            rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
            maxPreviewHeight, largest);

    //TODO mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());

    mCameraId = cameraId;
}
项目:sandriosCamera    文件:Camera2Manager.java   
@Override
public Size getPhotoSizeForQuality(@CameraConfiguration.MediaQuality int mediaQuality) {
    StreamConfigurationMap map = currentCameraId.equals(faceBackCameraId) ? backCameraStreamConfigurationMap : frontCameraStreamConfigurationMap;
    return CameraHelper.getPictureSize(Size.fromArray2(map.getOutputSizes(ImageFormat.JPEG)), mediaQuality);
}
项目:sandriosCamera    文件:Camera2Manager.java   
@Override
protected void prepareCameraOutputs() {
    try {
        CameraCharacteristics characteristics = currentCameraId.equals(faceBackCameraId) ? backCameraCharacteristics : frontCameraCharacteristics;

        if (currentCameraId.equals(faceFrontCameraId) && frontCameraStreamConfigurationMap == null)
            frontCameraStreamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        else if (currentCameraId.equals(faceBackCameraId) && backCameraStreamConfigurationMap == null)
            backCameraStreamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

        StreamConfigurationMap map = currentCameraId.equals(faceBackCameraId) ? backCameraStreamConfigurationMap : frontCameraStreamConfigurationMap;
        if (configurationProvider.getMediaQuality() == CameraConfiguration.MEDIA_QUALITY_AUTO) {
            camcorderProfile = CameraHelper.getCamcorderProfile(currentCameraId, configurationProvider.getVideoFileSize(), configurationProvider.getMinimumVideoDuration());
        } else
            camcorderProfile = CameraHelper.getCamcorderProfile(configurationProvider.getMediaQuality(), currentCameraId);

        videoSize = CameraHelper.chooseOptimalSize(Size.fromArray2(map.getOutputSizes(MediaRecorder.class)),
                windowSize.getWidth(), windowSize.getHeight(), new Size(camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight));

        if (videoSize == null || videoSize.getWidth() > camcorderProfile.videoFrameWidth
                || videoSize.getHeight() > camcorderProfile.videoFrameHeight)
            videoSize = CameraHelper.getSizeWithClosestRatio(Size.fromArray2(map.getOutputSizes(MediaRecorder.class)), camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);
        else if (videoSize == null || videoSize.getWidth() > camcorderProfile.videoFrameWidth
                || videoSize.getHeight() > camcorderProfile.videoFrameHeight)
            videoSize = CameraHelper.getSizeWithClosestRatio(Size.fromArray2(map.getOutputSizes(MediaRecorder.class)), camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);

        photoSize = CameraHelper.getPictureSize(Size.fromArray2(map.getOutputSizes(ImageFormat.JPEG)),
                configurationProvider.getMediaQuality() == CameraConfiguration.MEDIA_QUALITY_AUTO
                        ? CameraConfiguration.MEDIA_QUALITY_HIGHEST : configurationProvider.getMediaQuality());

        imageReader = ImageReader.newInstance(photoSize.getWidth(), photoSize.getHeight(),
                ImageFormat.JPEG, 2);
        imageReader.setOnImageAvailableListener(this, backgroundHandler);

        if (configurationProvider.getMediaAction() == CameraConfiguration.MEDIA_ACTION_PHOTO
                || configurationProvider.getMediaAction() == CameraConfiguration.MEDIA_ACTION_BOTH) {

            if (windowSize.getHeight() * windowSize.getWidth() > photoSize.getWidth() * photoSize.getHeight()) {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), photoSize.getWidth(), photoSize.getHeight());
            } else {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), windowSize.getWidth(), windowSize.getHeight());
            }

            if (previewSize == null)
                previewSize = CameraHelper.chooseOptimalSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), windowSize.getWidth(), windowSize.getHeight(), photoSize);

        } else {
            if (windowSize.getHeight() * windowSize.getWidth() > videoSize.getWidth() * videoSize.getHeight()) {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), videoSize.getWidth(), videoSize.getHeight());
            } else {
                previewSize = CameraHelper.getOptimalPreviewSize(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), windowSize.getWidth(), windowSize.getHeight());
            }

            if (previewSize == null)
                previewSize = CameraHelper.getSizeWithClosestRatio(Size.fromArray2(map.getOutputSizes(SurfaceTexture.class)), videoSize.getWidth(), videoSize.getHeight());
        }
    } catch (Exception e) {
        Log.e(TAG, "Error while setup camera sizes.", e);
    }
}