flutter_face_guide 0.1.0
flutter_face_guide: ^0.1.0 copied to clipboard
Guided face capture flow for Flutter with alignment states and lightweight quality checks.
import 'dart:async';
import 'dart:io';
import 'dart:typed_data';
import 'dart:ui' as ui;
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter_face_guide/flutter_face_guide.dart';
import 'package:flutter_face_guide_mlkit/flutter_face_guide_mlkit.dart';
void main() {
runApp(const FaceGuideExampleApp());
}
class FaceGuideExampleApp extends StatelessWidget {
const FaceGuideExampleApp({super.key});
@override
Widget build(BuildContext context) {
return const MaterialApp(home: FaceGuideHome());
}
}
class FaceGuideHome extends StatefulWidget {
const FaceGuideHome({super.key});
@override
State<FaceGuideHome> createState() => _FaceGuideHomeState();
}
class _FaceGuideHomeState extends State<FaceGuideHome> {
late final FaceGuideController controller;
late final MlKitFaceGuideAdapter _mlKitAdapter;
CameraController? _cameraController;
bool _cameraReady = false;
bool _isBusyCapture = false;
bool _isBusyAutoAnalyze = false;
bool _isRealtimeEnabled = true;
bool _isAnalyzingFrame = false;
DateTime _lastFrameProcessed = DateTime.fromMillisecondsSinceEpoch(0);
String? _lastImagePath;
double _brightness = 0.70;
double _sharpness = 0.70;
double _faceCenterRatio = 0.80;
@override
void initState() {
super.initState();
_mlKitAdapter = MlKitFaceGuideAdapter();
controller = FaceGuideController(
config: const FaceGuideConfig(
enableChallenge: true,
minBrightness: 0.20,
minSharpness: 0.03,
minFaceCenterRatio: 0.20,
),
);
controller.start();
_initCamera();
}
@override
void dispose() {
unawaited(_stopRealtimeAnalysis());
unawaited(_mlKitAdapter.dispose());
unawaited(_cameraController?.dispose());
controller.dispose();
super.dispose();
}
Future<void> _initCamera() async {
try {
final cameras = await availableCameras();
final front = cameras.firstWhere(
(camera) => camera.lensDirection == CameraLensDirection.front,
orElse: () => cameras.first,
);
final cam = CameraController(
front,
ResolutionPreset.high,
enableAudio: false,
imageFormatGroup: Platform.isAndroid
? ImageFormatGroup.nv21
: ImageFormatGroup.bgra8888,
);
await cam.initialize();
if (!mounted) {
await cam.dispose();
return;
}
setState(() {
_cameraController = cam;
_cameraReady = true;
});
await _startRealtimeAnalysis();
} catch (_) {
if (!mounted) {
return;
}
setState(() {
_cameraReady = false;
});
}
}
void _simulateGoodFrame() {
_brightness = 0.85;
_sharpness = 0.88;
_faceCenterRatio = 0.92;
controller.updateQuality(
QualityInput(
brightness: _brightness,
sharpness: _sharpness,
faceCenterRatio: _faceCenterRatio,
),
);
setState(() {});
}
void _simulateBadFrame() {
_brightness = 0.20;
_sharpness = 0.20;
_faceCenterRatio = 0.30;
controller.updateQuality(
QualityInput(
brightness: _brightness,
sharpness: _sharpness,
faceCenterRatio: _faceCenterRatio,
),
);
setState(() {});
}
void _applyCustomQuality() {
controller.updateQuality(
QualityInput(
brightness: _brightness,
sharpness: _sharpness,
faceCenterRatio: _faceCenterRatio,
),
);
}
Future<void> _capture() async {
if (_isBusyCapture) {
return;
}
setState(() {
_isBusyCapture = true;
});
await _stopRealtimeAnalysis();
final result = await controller.capture();
XFile? shot;
if (result.ok && _cameraReady && _cameraController != null) {
try {
shot = await _cameraController!.takePicture();
} catch (_) {
shot = null;
}
}
setState(() {
_isBusyCapture = false;
_lastImagePath = shot?.path;
});
if (_isRealtimeEnabled) {
await _startRealtimeAnalysis();
}
if (!mounted) {
return;
}
final text = result.ok
? shot != null
? 'Capture success. Saved to: ${shot.path}'
: 'Capture passed quality, but camera photo failed.'
: 'Capture failed: ${result.reason}';
ScaffoldMessenger.of(context).showSnackBar(SnackBar(content: Text(text)));
}
Future<void> _autoAnalyze() async {
if (_isBusyAutoAnalyze || _isBusyCapture) {
return;
}
final cam = _cameraController;
if (!_cameraReady || cam == null || !cam.value.isInitialized) {
if (!mounted) {
return;
}
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(content: Text('Camera is not ready for auto analysis.')),
);
return;
}
setState(() {
_isBusyAutoAnalyze = true;
});
try {
await _stopRealtimeAnalysis();
final shot = await cam.takePicture();
final image = InputImage.fromFilePath(shot.path);
final analysis = await _mlKitAdapter.analyze(
image: image,
frameWidth: cam.value.previewSize?.width.toInt() ?? 1,
frameHeight: cam.value.previewSize?.height.toInt() ?? 1,
);
final metrics =
await _computeImageMetricsFromBytes(await shot.readAsBytes());
_applyMetrics(
metrics,
imagePath: shot.path,
faceCenterRatio: analysis.faceCenterRatio,
);
} catch (_) {
if (!mounted) {
return;
}
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(
content: Text('Auto analysis failed. Please try again.')),
);
} finally {
if (_isRealtimeEnabled) {
await _startRealtimeAnalysis();
}
if (mounted) {
setState(() {
_isBusyAutoAnalyze = false;
});
}
}
}
Future<void> _startRealtimeAnalysis() async {
final cam = _cameraController;
if (!_cameraReady || cam == null || !cam.value.isInitialized) {
return;
}
if (cam.value.isStreamingImages) {
return;
}
await cam.startImageStream((image) {
if (!_isRealtimeEnabled || _isBusyCapture || _isBusyAutoAnalyze) {
return;
}
final now = DateTime.now();
if (_isAnalyzingFrame ||
now.difference(_lastFrameProcessed).inMilliseconds < 500) {
return;
}
_isAnalyzingFrame = true;
_lastFrameProcessed = now;
unawaited(
_analyzeCameraFrame(image).whenComplete(() {
_isAnalyzingFrame = false;
}),
);
});
}
Future<void> _stopRealtimeAnalysis() async {
final cam = _cameraController;
if (cam == null || !cam.value.isStreamingImages) {
return;
}
await cam.stopImageStream();
}
Future<void> _analyzeCameraFrame(CameraImage image) async {
try {
final inputImage = _inputImageFromCameraImage(image);
if (inputImage == null) {
return;
}
final analysis = await _mlKitAdapter.analyze(
image: inputImage,
frameWidth: image.width,
frameHeight: image.height,
);
final metrics = _computeImageMetricsFromCameraImage(image);
_applyMetrics(metrics, faceCenterRatio: analysis.faceCenterRatio);
} catch (_) {
// Skip noisy frame errors and continue stream.
}
}
InputImage? _inputImageFromCameraImage(CameraImage image) {
final cam = _cameraController;
if (cam == null) {
return null;
}
if (image.planes.isEmpty) {
return null;
}
final rotation = InputImageRotationValue.fromRawValue(
cam.description.sensorOrientation,
) ??
InputImageRotation.rotation0deg;
final format =
Platform.isAndroid ? InputImageFormat.nv21 : InputImageFormat.bgra8888;
final bytes = image.planes.first.bytes;
return InputImage.fromBytes(
bytes: bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: format,
bytesPerRow: image.planes.first.bytesPerRow,
),
);
}
Future<_ImageMetrics> _computeImageMetricsFromBytes(
Uint8List bytes,
) async {
final codec = await ui.instantiateImageCodec(bytes);
final frame = await codec.getNextFrame();
final image = frame.image;
final byteData = await image.toByteData(format: ui.ImageByteFormat.rawRgba);
if (byteData == null) {
return const _ImageMetrics(
brightness: 0,
sharpness: 0,
faceCenterRatio: 0,
);
}
final rgba = byteData.buffer.asUint8List();
final width = image.width;
final height = image.height;
var brightnessSum = 0.0;
var brightnessCount = 0;
var sharpnessSum = 0.0;
var sharpnessCount = 0;
const step = 4;
for (var y = 0; y < height; y += step) {
for (var x = 0; x < width; x += step) {
final pixelIndex = (y * width + x) * 4;
final r = rgba[pixelIndex].toDouble();
final g = rgba[pixelIndex + 1].toDouble();
final b = rgba[pixelIndex + 2].toDouble();
final luma = (0.2126 * r + 0.7152 * g + 0.0722 * b) / 255.0;
brightnessSum += luma;
brightnessCount += 1;
if (x + step < width) {
final rightIndex = (y * width + x + step) * 4;
final r2 = rgba[rightIndex].toDouble();
final g2 = rgba[rightIndex + 1].toDouble();
final b2 = rgba[rightIndex + 2].toDouble();
final luma2 = (0.2126 * r2 + 0.7152 * g2 + 0.0722 * b2) / 255.0;
sharpnessSum += (luma - luma2).abs();
sharpnessCount += 1;
}
}
}
final brightness = brightnessCount == 0
? 0.0
: (brightnessSum / brightnessCount).clamp(0.0, 1.0);
final sharpness = sharpnessCount == 0
? 0.0
: (sharpnessSum / sharpnessCount).clamp(0.0, 1.0);
return _ImageMetrics(
brightness: brightness,
sharpness: sharpness,
faceCenterRatio: 0,
);
}
_ImageMetrics _computeImageMetricsFromCameraImage(CameraImage image) {
final width = image.width;
final height = image.height;
if (width <= 0 || height <= 0 || image.planes.isEmpty) {
return const _ImageMetrics(
brightness: 0,
sharpness: 0,
faceCenterRatio: 0,
);
}
final plane = image.planes.first;
final bytes = plane.bytes;
final bytesPerRow = plane.bytesPerRow;
final pixelStride = plane.bytesPerPixel ?? 1;
var brightnessSum = 0.0;
var brightnessCount = 0;
var sharpnessSum = 0.0;
var sharpnessCount = 0;
const step = 4;
final isBgra = image.format.raw == InputImageFormat.bgra8888.rawValue;
for (var y = 0; y < height; y += step) {
for (var x = 0; x < width; x += step) {
final index = y * bytesPerRow + x * pixelStride;
if (index >= bytes.length) {
continue;
}
final luma = isBgra
? _bgraLuma(bytes, index)
: (bytes[index].toDouble() / 255.0).clamp(0.0, 1.0);
brightnessSum += luma;
brightnessCount += 1;
final rightX = x + step;
if (rightX < width) {
final rightIndex = y * bytesPerRow + rightX * pixelStride;
if (rightIndex < bytes.length) {
final rightLuma = isBgra
? _bgraLuma(bytes, rightIndex)
: (bytes[rightIndex].toDouble() / 255.0).clamp(0.0, 1.0);
sharpnessSum += (luma - rightLuma).abs();
sharpnessCount += 1;
}
}
}
}
final brightness = brightnessCount == 0
? 0.0
: (brightnessSum / brightnessCount).clamp(0.0, 1.0);
final sharpness = sharpnessCount == 0
? 0.0
: (sharpnessSum / sharpnessCount).clamp(0.0, 1.0);
return _ImageMetrics(
brightness: brightness,
sharpness: sharpness,
faceCenterRatio: 0,
);
}
double _bgraLuma(Uint8List bytes, int index) {
if (index + 2 >= bytes.length) {
return 0;
}
final b = bytes[index].toDouble();
final g = bytes[index + 1].toDouble();
final r = bytes[index + 2].toDouble();
return ((0.2126 * r + 0.7152 * g + 0.0722 * b) / 255.0).clamp(0.0, 1.0);
}
void _applyMetrics(
_ImageMetrics metrics, {
String? imagePath,
double? faceCenterRatio,
}) {
if (!mounted) {
return;
}
setState(() {
_brightness = metrics.brightness;
_sharpness = metrics.sharpness;
_faceCenterRatio = faceCenterRatio ?? metrics.faceCenterRatio;
if (imagePath != null) {
_lastImagePath = imagePath;
}
});
controller.updateQuality(
QualityInput(
brightness: _brightness,
sharpness: _sharpness,
faceCenterRatio: _faceCenterRatio,
),
);
}
Widget _metricSlider({
required String label,
required double value,
required ValueChanged<double> onChanged,
}) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('$label: ${value.toStringAsFixed(2)}'),
Slider(
value: value,
min: 0,
max: 1,
divisions: 100,
onChanged: (newValue) {
onChanged(newValue);
_applyCustomQuality();
setState(() {});
},
),
],
);
}
Widget _cameraPreviewCard(FaceGuideState state) {
final theme = Theme.of(context);
final cam = _cameraController;
if (!_cameraReady || cam == null || !cam.value.isInitialized) {
return Container(
height: 320,
decoration: BoxDecoration(
color: theme.colorScheme.surfaceVariant,
borderRadius: BorderRadius.circular(16),
),
alignment: Alignment.center,
child: const Text('Camera is not ready'),
);
}
return ClipRRect(
borderRadius: BorderRadius.circular(16),
child: SizedBox(
height: 320,
child: Stack(
fit: StackFit.expand,
children: [
CameraPreview(cam),
Container(
decoration: BoxDecoration(
border: Border.all(
color: state.status == FaceGuideStatus.ready
? Colors.greenAccent
: Colors.white,
width: 2,
),
borderRadius: BorderRadius.circular(16),
),
),
Center(
child: Container(
width: 190,
height: 250,
decoration: BoxDecoration(
border: Border.all(
color: state.status == FaceGuideStatus.ready
? Colors.greenAccent
: Colors.white,
width: 3,
),
borderRadius: BorderRadius.circular(120),
),
),
),
],
),
),
);
}
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
return Scaffold(
appBar: AppBar(title: const Text('Face Guide Example')),
body: StreamBuilder<FaceGuideState>(
stream: controller.state,
initialData: controller.currentState,
builder: (context, snapshot) {
final state = snapshot.data ?? FaceGuideState.idle();
final quality = state.quality;
return Padding(
padding: const EdgeInsets.all(16),
child: ListView(
children: [
_cameraPreviewCard(state),
const SizedBox(height: 16),
Text('Status: ${state.status.name}',
style: theme.textTheme.titleMedium),
const SizedBox(height: 4),
Text(state.instruction),
if (_lastImagePath != null) ...[
const SizedBox(height: 8),
Text('Last photo: $_lastImagePath'),
],
const SizedBox(height: 12),
SwitchListTile.adaptive(
contentPadding: EdgeInsets.zero,
title: const Text('Realtime Quality Check'),
subtitle: const Text(
'Automatically update quality from camera stream'),
value: _isRealtimeEnabled,
onChanged: (value) async {
setState(() {
_isRealtimeEnabled = value;
});
if (value) {
await _startRealtimeAnalysis();
} else {
await _stopRealtimeAnalysis();
}
},
),
const SizedBox(height: 8),
if (quality != null)
Card(
child: Padding(
padding: const EdgeInsets.all(12),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('Quality Report',
style: theme.textTheme.titleSmall),
const SizedBox(height: 8),
Text(
'brightness: ${quality.brightness.toStringAsFixed(2)}'),
Text(
'sharpness: ${quality.sharpness.toStringAsFixed(2)}'),
Text(
'faceCenterRatio: ${quality.faceCenterRatio.toStringAsFixed(2)}'),
Text('passed: ${quality.passed}'),
Text(
quality.issues.isEmpty
? 'issues: none'
: 'issues: ${quality.issues.join(', ')}',
),
],
),
),
),
const SizedBox(height: 12),
_metricSlider(
label: 'Brightness',
value: _brightness,
onChanged: (v) => _brightness = v,
),
_metricSlider(
label: 'Sharpness',
value: _sharpness,
onChanged: (v) => _sharpness = v,
),
_metricSlider(
label: 'Face Center Ratio',
value: _faceCenterRatio,
onChanged: (v) => _faceCenterRatio = v,
),
const SizedBox(height: 8),
Wrap(
spacing: 8,
runSpacing: 8,
children: [
FilledButton(
onPressed: () => controller.start(),
child: const Text('Start'),
),
FilledButton.tonal(
onPressed: () => controller.pause(),
child: const Text('Pause'),
),
FilledButton.tonal(
onPressed: () => controller.resume(),
child: const Text('Resume'),
),
FilledButton.tonal(
onPressed: () => controller.stop(),
child: const Text('Stop'),
),
],
),
const SizedBox(height: 8),
Wrap(
spacing: 8,
runSpacing: 8,
children: [
FilledButton(
onPressed: _isBusyAutoAnalyze || _isRealtimeEnabled
? null
: _autoAnalyze,
child: Text(
_isBusyAutoAnalyze
? 'Analyzing...'
: 'Auto Analyze (Single Shot)',
),
),
FilledButton(
onPressed: _simulateGoodFrame,
child: const Text('Simulate Good Frame'),
),
FilledButton.tonal(
onPressed: _simulateBadFrame,
child: const Text('Simulate Bad Frame'),
),
FilledButton(
onPressed: _isBusyCapture ? null : _capture,
child: Text(_isBusyCapture ? 'Capturing...' : 'Capture'),
),
],
),
],
),
);
},
),
);
}
}
class _ImageMetrics {
const _ImageMetrics({
required this.brightness,
required this.sharpness,
required this.faceCenterRatio,
});
final double brightness;
final double sharpness;
final double faceCenterRatio;
}