convertYUV420ToImage static method

Image convertYUV420ToImage(
  1. CameraImage cameraImage
)

Implementation

static img.Image convertYUV420ToImage(CameraImage cameraImage) {
  final imageWidth = cameraImage.width;
  final imageHeight = cameraImage.height;

  final yBuffer = cameraImage.planes[0].bytes;
  final uBuffer =
      cameraImage.planes.length > 1 ? cameraImage.planes[1].bytes : null;
  final vBuffer =
      cameraImage.planes.length > 2 ? cameraImage.planes[2].bytes : null;

  final int yRowStride = cameraImage.planes[0].bytesPerRow;
  final int yPixelStride = cameraImage.planes[0].bytesPerPixel ?? 1;

  // iOS usa um Ășnico plano para UV
  final int uvRowStride = cameraImage.planes.length > 1
      ? cameraImage.planes[1].bytesPerRow
      : yRowStride;
  final int uvPixelStride = cameraImage.planes.length > 1
      ? cameraImage.planes[1].bytesPerPixel ?? 2
      : 2;

  final image = img.Image(width: imageWidth, height: imageHeight);

  for (int h = 0; h < imageHeight; h++) {
    int uvh = (h / 2).floor();

    for (int w = 0; w < imageWidth; w++) {
      int uvw = (w / 2).floor();

      final yIndex = (h * yRowStride) + (w * yPixelStride);
      final int y = yBuffer[yIndex];

      int u = 128;
      int v = 128;

      if (uBuffer != null && vBuffer != null) {
        // Android
        final uvIndex = (uvh * uvRowStride) + (uvw * uvPixelStride);
        u = uBuffer[uvIndex];
        v = vBuffer[uvIndex];
      } else if (uBuffer != null) {
        // iOS usa um plano combinado UV
        final uvIndex = (uvh * uvRowStride) + (uvw * uvPixelStride);
        u = uBuffer[uvIndex];
        v = uBuffer[uvIndex + 1];
      }

      int r = (y + v * 1436 / 1024 - 179).round();
      int g = (y - u * 46549 / 131072 + 44 - v * 93604 / 131072 + 91).round();
      int b = (y + u * 1814 / 1024 - 227).round();

      r = r.clamp(0, 255);
      g = g.clamp(0, 255);
      b = b.clamp(0, 255);

      image.setPixelRgb(w, h, r, g, b);
    }
  }

  return image;
}