extractInfoFromImage static method

Future<IDCardInfo> extractInfoFromImage(
  1. InputImage image,
  2. String cardType
)

Implementation

static Future<IDCardInfo> extractInfoFromImage(
    InputImage image, String cardType) async {
  // --- 1. TEXT RECOGNITION ---
  final textRecognizer = TextRecognizer(script: TextRecognitionScript.latin);
  final RecognizedText recognizedText = await textRecognizer.processImage(image);
  final lines = <String>[];
  for (var block in recognizedText.blocks) {
    for (var line in block.lines) {
      lines.add(line.text.trim());
    }
  }

  // --- 2. CARD-SPECIFIC TEXT EXTRACTION ---
  IDCardInfo idCardInfo = IDCardInfo();
  switch (cardType) {
    case "Voter's Card":
      idCardInfo = await ExtractVoter.extractVoter(lines);
      break;
    case 'Internation Passport':
      idCardInfo = await ExtractNational.extractnational(lines);
      break;
    case 'National identity card':
      idCardInfo = await extractNIN(lines);
      break;
    case "Driver's License":
      idCardInfo = await ExtractDriverLicense.extractDriverLicense(lines);
      break;
    case 'nimc':
      idCardInfo = await extractnimc(lines);
      break;
    case 'ninslip':
      idCardInfo = await extractNINslip(lines);
      break;
    case 'digitalninslip':
      idCardInfo = await extractDigitalNINslip(lines);
      break;
    default:
      idCardInfo = await extractunknown(lines);
      break;
  }

  // --- 3. FACE DETECTION AND CROPPING ---
  String? croppedFacePath;
  final imageBytes = image.bytes ?? (image.filePath != null ? await File(image.filePath!).readAsBytes() : null);

  if (imageBytes != null) {
    try {
      final faceDetector = FaceDetector(options: FaceDetectorOptions());
      final List<Face> faces = await faceDetector.processImage(image);

      if (faces.isNotEmpty) {
        faces.sort((a, b) => b.boundingBox.width.compareTo(a.boundingBox.width));
        final Face largestFace = faces.first;

        final originalImage = img.decodeImage(imageBytes);

        if (originalImage != null) {
          final x = (largestFace.boundingBox.left - 20).clamp(0, originalImage.width).toInt();
          final y = (largestFace.boundingBox.top - 20).clamp(0, originalImage.height).toInt();
          final w = (largestFace.boundingBox.width + 40).clamp(0, originalImage.width - x).toInt();
          final h = (largestFace.boundingBox.height + 40).clamp(0, originalImage.height - y).toInt();

          final croppedFace = img.copyCrop(originalImage, x: x, y: y, width: w, height: h);

          final tempDir = await getTemporaryDirectory();
          final file = await File('${tempDir.path}/face_crop${DateTime.now().millisecondsSinceEpoch}.jpg').create();
          file.writeAsBytesSync(img.encodeJpg(croppedFace));
          croppedFacePath = file.path;
        }
      }
      await faceDetector.close();
    } catch (e) {
      print('Error cropping face from ID card: $e');
    }
  }

  // --- 4. RETURN COMBINED INFO ---
  return idCardInfo.copyWith(faceImagePath: croppedFacePath);
}