learning_object_detection 0.0.2 learning_object_detection: ^0.0.2 copied to clipboard
The easy way to use ML Kit for object detection & tracking in Flutter.
import 'package:flutter/material.dart';
import 'package:learning_input_image/learning_input_image.dart';
import 'package:learning_object_detection/learning_object_detection.dart';
import 'package:provider/provider.dart';
void main() {
runApp(MyApp());
}
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
debugShowCheckedModeBanner: false,
theme: ThemeData(
primarySwatch: Colors.lightBlue,
visualDensity: VisualDensity.adaptivePlatformDensity,
primaryTextTheme: TextTheme(headline6: TextStyle(color: Colors.white)),
),
home: ChangeNotifierProvider(
create: (_) => ObjectDetectionState(),
child: ObjectDetectionPage(),
),
);
}
}
class ObjectDetectionPage extends StatefulWidget {
@override
_ObjectDetectionPageState createState() => _ObjectDetectionPageState();
}
class _ObjectDetectionPageState extends State<ObjectDetectionPage> {
ObjectDetectionState get state => Provider.of(context, listen: false);
ObjectDetector _detector = ObjectDetector(
isStream: false,
enableClassification: true,
enableMultipleObjects: true,
);
@override
void dispose() {
_detector.dispose();
super.dispose();
}
Future<void> _detectObjects(InputImage image) async {
if (state.isNotProcessing) {
state.startProcessing();
state.image = image;
state.data = await _detector.detect(image);
state.stopProcessing();
}
}
@override
Widget build(BuildContext context) {
return InputCameraView(
cameraDefault: InputCameraType.rear,
title: 'Object Detection & Tracking',
onImage: _detectObjects,
overlay: Consumer<ObjectDetectionState>(
builder: (_, state, __) {
if (state.isEmpty) {
return Container();
}
Size originalSize = state.size!;
Size size = MediaQuery.of(context).size;
// if image source from gallery
// image display size is scaled to 360x360 with retaining aspect ratio
if (state.notFromLive) {
if (originalSize.aspectRatio > 1) {
size = Size(360.0, 360.0 / originalSize.aspectRatio);
} else {
size = Size(360.0 * originalSize.aspectRatio, 360.0);
}
}
return ObjectOverlay(
size: size,
originalSize: originalSize,
rotation: state.rotation,
objects: state.data,
);
},
),
);
}
}
class ObjectDetectionState extends ChangeNotifier {
InputImage? _image;
List<DetectedObject> _data = [];
bool _isProcessing = false;
InputImage? get image => _image;
List<DetectedObject> get data => _data;
String? get type => _image?.type;
InputImageRotation? get rotation => _image?.metadata?.rotation;
Size? get size => _image?.metadata?.size;
bool get isNotProcessing => !_isProcessing;
bool get isEmpty => _data.isEmpty;
bool get isFromLive => type == 'bytes';
bool get notFromLive => !isFromLive;
void startProcessing() {
_isProcessing = true;
notifyListeners();
}
void stopProcessing() {
_isProcessing = false;
notifyListeners();
}
set image(InputImage? image) {
_image = image;
if (notFromLive) {
_data = [];
}
notifyListeners();
}
set data(List<DetectedObject> data) {
_data = data;
notifyListeners();
}
}