Java 类android.media.Image.Plane 实例源码

项目:snu-artoon    文件:CameraActivity.java   
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
项目:TensorflowAndroidDemo    文件:CameraActivity.java   
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
项目:tensorflow-classifier-android    文件:CameraActivity.java   
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
项目:Tensorflow_Andriod_With_Audio_Output    文件:CameraActivity.java   
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
项目:SortingHatAndroid    文件:CameraActivity.java   
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
项目:ImageClassify    文件:CameraActivity.java   
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
项目:AI_Calorie_Counter_Demo    文件:CameraActivity.java   
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
  // Because of the variable row stride it's not possible to know in
  // advance the actual necessary dimensions of the yuv planes.
  for (int i = 0; i < planes.length; ++i) {
    final ByteBuffer buffer = planes[i].getBuffer();
    if (yuvBytes[i] == null) {
      LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
      yuvBytes[i] = new byte[buffer.capacity()];
    }
    buffer.get(yuvBytes[i]);
  }
}
项目:TensorflowAndroidDemo    文件:ClassifierActivity.java   
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;

  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  final Canvas canvas = new Canvas(croppedBitmap);
  canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  runInBackground(
      new Runnable() {
        @Override
        public void run() {
          final long startTime = SystemClock.uptimeMillis();
          final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
          lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;

          cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
          resultsView.setResults(results);
          requestRender();
          computing = false;
        }
      });

  Trace.endSection();
}
项目:tensorflow-classifier-android    文件:CameraActivity.java   
/**
 * Callback for Camera2 API
 */
@Override
public void onImageAvailable(final ImageReader reader) {
  //We need wait until we have some size from onPreviewSizeChosen
  if (previewWidth == 0 || previewHeight == 0) {
    return;
  }
  if (rgbBytes == null) {
    rgbBytes = new int[previewWidth * previewHeight];
  }
  try {
    final Image image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (isProcessingFrame) {
      image.close();
      return;
    }
    isProcessingFrame = true;
    Trace.beginSection("imageAvailable");
    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);
    yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();

    imageConverter =
        new Runnable() {
          @Override
          public void run() {
            ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                rgbBytes);
          }
        };

    postInferenceCallback =
        new Runnable() {
          @Override
          public void run() {
            image.close();
            isProcessingFrame = false;
          }
        };

    processImage();
  } catch (final Exception e) {
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }
  Trace.endSection();
}
项目:Tensorflow_Andriod_With_Audio_Output    文件:ClassifierActivity.java   
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;

  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        rgbBytes,
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  final Canvas canvas = new Canvas(croppedBitmap);
  canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  runInBackground(
      new Runnable() {
        @Override
        public void run() {
          final long startTime = SystemClock.uptimeMillis();
          final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
          lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;

          cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
          resultsView.setResults(results);
          requestRender();
          computing = false;
        }
      });

  Trace.endSection();
}
项目:SortingHatAndroid    文件:CameraActivity.java   
@Override public void onImageAvailable(final ImageReader reader) {
  imageReader = reader;

  Image image = null;

  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (savingImage || computing) {
      image.close();
      return;
    }
    savingImage = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2], rgbBytes,
        previewWidth, previewHeight, yRowStride, uvRowStride, uvPixelStride, false);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  final Canvas canvas = new Canvas(croppedBitmap);
  canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  savingImage = false;
  Trace.endSection();
}
项目:ImageClassify    文件:CameraClassifierActivity.java   
@Override
public void onImageAvailable(final ImageReader reader) {
    Image image = null;

    try {
        image = reader.acquireLatestImage();

        if (image == null) {
            return;
        }

        if (computing) {
            image.close();
            return;
        }
        computing = true;

        Trace.beginSection("imageAvailable");

        final Plane[] planes = image.getPlanes();
        fillBytes(planes, yuvBytes);

        final int yRowStride = planes[0].getRowStride();
        final int uvRowStride = planes[1].getRowStride();
        final int uvPixelStride = planes[1].getPixelStride();
        ImageUtils.convertYUV420ToARGB8888(
                yuvBytes[0],
                yuvBytes[1],
                yuvBytes[2],
                rgbBytes,
                previewWidth,
                previewHeight,
                yRowStride,
                uvRowStride,
                uvPixelStride,
                false);

        image.close();
    } catch (final Exception e) {
        if (image != null) {
            image.close();
        }
        LOGGER.e(e, "Exception!");
        Trace.endSection();
        return;
    }

    rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
    final Canvas canvas = new Canvas(croppedBitmap);
    canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

    // For examining the actual TF input.
    if (SAVE_PREVIEW_BITMAP) {
        ImageUtils.saveBitmap(croppedBitmap);
    }

    runInBackground(
            new Runnable() {
                @Override
                public void run() {
                    final long startTime = SystemClock.uptimeMillis();
                    final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
                    lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;

                    cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
                    resultsView.setResults(results);
                    requestRender();
                    computing = false;
                }
            });

    Trace.endSection();
}
项目:AI_Calorie_Counter_Demo    文件:ClassifierActivity.java   
@Override
public void onImageAvailable(final ImageReader reader) {
  Image image = null;

  try {
    image = reader.acquireLatestImage();

    if (image == null) {
      return;
    }

    if (computing) {
      image.close();
      return;
    }
    computing = true;

    Trace.beginSection("imageAvailable");

    final Plane[] planes = image.getPlanes();
    fillBytes(planes, yuvBytes);

    final int yRowStride = planes[0].getRowStride();
    final int uvRowStride = planes[1].getRowStride();
    final int uvPixelStride = planes[1].getPixelStride();
    ImageUtils.convertYUV420ToARGB8888(
        yuvBytes[0],
        yuvBytes[1],
        yuvBytes[2],
        previewWidth,
        previewHeight,
        yRowStride,
        uvRowStride,
        uvPixelStride,
        rgbBytes);

    image.close();
  } catch (final Exception e) {
    if (image != null) {
      image.close();
    }
    LOGGER.e(e, "Exception!");
    Trace.endSection();
    return;
  }

  rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
  final Canvas canvas = new Canvas(croppedBitmap);
  canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

  // For examining the actual TF input.
  if (SAVE_PREVIEW_BITMAP) {
    ImageUtils.saveBitmap(croppedBitmap);
  }

  runInBackground(
      new Runnable() {
        @Override
        public void run() {
          final long startTime = SystemClock.uptimeMillis();
          final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
          lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;

          cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
          resultsView.setResults(results);

          requestRender();
          computing = false;
        }
      });

  Trace.endSection();
}
项目:Camera2    文件:JpegUtilNative.java   
/**
 * Compresses the given image to jpeg. Note that only ImageFormat.YUV_420_888 is currently
 * supported. Furthermore, all planes must use direct byte buffers.
 *
 * @param img the image to compress
 * @param outBuf a direct byte buffer to hold the output jpeg.
 * @return The number of bytes written to outBuf
 */
public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality) {
    if (img.getFormat() != ImageFormat.YUV_420_888) {
        throw new RuntimeException("Unsupported Image Format.");
    }

    final int NUM_PLANES = 3;

    if (img.getPlanes().length != NUM_PLANES) {
        throw new RuntimeException("Output buffer must be direct.");
    }

    if (!outBuf.isDirect()) {
        throw new RuntimeException("Output buffer must be direct.");
    }

    ByteBuffer[] planeBuf = new ByteBuffer[NUM_PLANES];
    int[] pixelStride = new int[NUM_PLANES];
    int[] rowStride = new int[NUM_PLANES];

    for (int i = 0; i < NUM_PLANES; i++) {
        Plane plane = img.getPlanes()[i];

        if (!plane.getBuffer().isDirect()) {
            return -1;
        }

        planeBuf[i] = plane.getBuffer();
        pixelStride[i] = plane.getPixelStride();
        rowStride[i] = plane.getRowStride();
    }

    outBuf.clear();

    int numBytesWritten = compressJpegFromYUV420p(
            img.getWidth(), img.getHeight(),
            planeBuf[0], pixelStride[0], rowStride[0],
            planeBuf[1], pixelStride[1], rowStride[1],
            planeBuf[2], pixelStride[2], rowStride[2],
            outBuf, quality);

    outBuf.limit(numBytesWritten);

    return numBytesWritten;
}