face_anti_spoofing_detector 0.0.3
face_anti_spoofing_detector: ^0.0.3 copied to clipboard
Flutter plugin that provides passive liveness detection for facial recognition systems — ensuring that the detected face belongs to a live person rather than a photo, video.
import 'package:face_anti_spoofing_detector/face_anti_spoofing_detector.dart';
import 'package:face_contour_detector/face_contour_detector.dart';
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:flutter/services.dart';
import 'package:image/image.dart' as img;
import 'package:liveness_detector_example/camera_view.dart';
void main() {
runApp(const MaterialApp(
home: LivenessScreen(),
));
}
class LivenessScreen extends StatefulWidget {
const LivenessScreen({super.key});
@override
State<LivenessScreen> createState() => _LivenessScreenState();
}
class _LivenessScreenState extends State<LivenessScreen> {
double? _confidenceScore;
bool _initializeModelStatus = false;
@override
void initState() {
super.initState();
_initializeDetector();
}
@override
void dispose() {
super.dispose();
FaceAntiSpoofingDetector.destroy();
FaceContourDetector.destroy();
}
Future<void> _initializeDetector() async {
final livenessDetectorStatus = await FaceAntiSpoofingDetector.initialize();
final faceContourDetectorStatus = await FaceContourDetector.initialize();
if(livenessDetectorStatus && faceContourDetectorStatus){
setState(() {
_initializeModelStatus = true;
});
}
}
void handleCameraStream(Uint8List yuvBytes,Size cameraViewSize) async {
try {
final startAt = DateTime.now();
final faceContourList = await FaceContourDetector.detectFromYuv(
yuvBytes: yuvBytes,
width: cameraViewSize.width.toInt(),
height: cameraViewSize.height.toInt()
);
if(faceContourList.isEmpty){
_confidenceScore = null;
}else {
final faceContour = faceContourList[0];
_confidenceScore = await FaceAntiSpoofingDetector.detect(
yuvBytes: yuvBytes,
previewWidth: cameraViewSize.width.toInt(),
previewHeight: cameraViewSize.height.toInt(),
orientation: 7,
faceContour : faceContour,
);
print("Confidence Score = $_confidenceScore");
}
print("Detect duration = ${DateTime.now().difference(startAt).inMilliseconds} ms");
} catch (e) {
print(e);
_confidenceScore = null;
}finally {
setState(() {});
}
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text('Liveness Detector')),
body: Padding(
padding: const EdgeInsets.all(20.0),
child: _initializeModelStatus ? Column(
mainAxisAlignment: MainAxisAlignment.start,
children: [
const SizedBox(height: 20),
CameraView(
customPaint: null,
onImage: handleCameraStream ,
// cameraStreamProcessDelay: Duration(milliseconds: 500),
),
SizedBox(height: 40),
_confidenceScore == null ?
Text(
"No face detected",
style: TextStyle(
color: Colors.redAccent
),
) :
Text(
"${(_confidenceScore! > .8 ? "Real" : "Fake")}\n Score : $_confidenceScore",
style: TextStyle(
color: _confidenceScore! > .8 ? Colors.lightGreen : Colors.red,
),
textAlign: TextAlign.center,
)
],
) :
Text(
"Failed to initialize detector!."
)
),
);
}
}
Uint8List convertBytesImageToYuv(Uint8List imageBytes) {
// Decode the image from bytes
final image = img.decodeImage(imageBytes);
if (image == null) {
throw Exception('Failed to decode image');
}
// Resize image to match the preview dimensions (640x480)
final resized = img.copyResize(image, width: 640, height: 480);
final width = resized.width;
final height = resized.height;
// Calculate buffer sizes for YUV420 format (NV21)
final ySize = width * height;
final uvSize = (width ~/ 2) * (height ~/ 2);
final totalSize = ySize + uvSize * 2;
final yuv = Uint8List(totalSize);
int yIndex = 0;
int uvIndex = ySize;
// Convert RGB to YUV
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
final pixel = resized.getPixel(x, y);
final r = pixel.r.toInt();
final g = pixel.g.toInt();
final b = pixel.b.toInt();
// Convert to Y (luminance)
final yValue = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
yuv[yIndex++] = yValue.clamp(0, 255);
// Sample U and V at every other pixel (for YUV420)
if (y % 2 == 0 && x % 2 == 0) {
final uValue = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
final vValue = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
yuv[uvIndex++] = vValue.clamp(0, 255); // V (NV21 format)
yuv[uvIndex++] = uValue.clamp(0, 255); // U
}
}
}
return yuv;
}
class FaceBoxPainter extends CustomPainter {
final List<FaceModel> faces;
final Size imageSize;
FaceBoxPainter(this.faces, this.imageSize);
@override
void paint(Canvas canvas, Size size) {
final paint = Paint()
..color = Colors.green
..style = PaintingStyle.stroke
..strokeWidth = 4.0;
for (var face in faces) {
// Draw rectangle
canvas.drawRect(face.faceContour, paint);
// Draw text at top-left corner
final textSpan = TextSpan(
text: "Real : ${face.score}",
style: TextStyle(
color: Colors.green,
fontSize: 16,
fontWeight: FontWeight.bold,
backgroundColor: Colors.black.withOpacity(0.7),
),
);
final textPainter = TextPainter(
text: textSpan,
textDirection: TextDirection.ltr,
);
textPainter.layout();
// Position text at top-left corner with small offset
final offset = Offset(
face.faceContour.left + 5,
face.faceContour.top - textPainter.height - 5,
);
// Draw background rectangle for better readability
final backgroundRect = Rect.fromLTWH(
offset.dx - 2,
offset.dy - 2,
textPainter.width + 4,
textPainter.height + 4,
);
canvas.drawRect(
backgroundRect,
Paint()..color = Colors.black.withOpacity(0.7),
);
// Draw text
textPainter.paint(canvas, offset);
}
}
@override
bool shouldRepaint(covariant FaceBoxPainter oldDelegate) {
return faces != oldDelegate.faces || imageSize != oldDelegate.imageSize;
}
}
class FaceModel{
double? score;
Rect faceContour;
FaceModel({
this.score,
required this.faceContour
});
}