apple_vision_hand_3d 0.0.1 copy "apple_vision_hand_3d: ^0.0.1" to clipboard
apple_vision_hand_3d: ^0.0.1 copied to clipboard

PlatformiOSmacOS

A Flutter plugin to use Apple Vision Hand Detection to detect a single hand and fingers in an image or video stream, and identify key distal features.

apple_vision_hand_3d #

Pub Version analysis Star on Github License: MIT

Apple Vision Hand 3D is a Flutter plugin that enables Flutter apps to use tensor flows Blaze Hand landmarks.

  • This plugin is not sponsor or maintained by Apple. The authors are developers who wanted to make a similar plugin to Google's ml kit for macos.

Requirements #

MacOS

  • Minimum osx Deployment Target: 12.0
  • Xcode 13 or newer
  • Swift 5
  • ML Kit only supports 64-bit architectures (x86_64 and arm64).

iOS

  • Minimum ios Deployment Target: 14.0
  • Xcode 13 or newer
  • Swift 5
  • ML Kit only supports 64-bit architectures (x86_64 and arm64).

Getting Started #

You need to first import 'package:apple_vision/apple_vision.dart';

final GlobalKey cameraKey = GlobalKey(debugLabel: "cameraKey");
AppleVisionHand3DController visionController = AppleVisionHand3DController();
InsertCamera camera = InsertCamera();
Size imageSize = const Size(640,640*9/16);
String? deviceId;
bool loading = true;

List<HandMesh>? handData;
late double deviceWidth;
late double deviceHeight;

@override
void initState() {
  camera.setupCameras().then((value){
    setState(() {
      loading = false;
    });
    camera.startLiveFeed((InputImage i){
      if(i.metadata?.size != null){
        imageSize = i.metadata!.size;
      }
      if(mounted) {
        Uint8List? image = i.bytes;
        visionController.processImage(image!, imageSize, ImageOrientation.up).then((data){
          handData = data;
          setState(() {
            
          });
        });
      }
    });
  });
  super.initState();
}
@override
void dispose() {
  camera.dispose();
  super.dispose();
}

@override
Widget build(BuildContext context) {
  deviceWidth = MediaQuery.of(context).size.width;
  deviceHeight = MediaQuery.of(context).size.height;
  return Stack(
    children:<Widget>[
      SizedBox(
        width: imageSize.width, 
        height: imageSize.height, 
        child: loading?Container():CameraSetup(camera: camera, size: imageSize)
    ),
    ]+showPoints()
  );
}

List<Widget> showPoints(){
  if(handData == null || handData!.isEmpty) return[];
  List<Widget> widgets = [];
  Map<FingerJoint3D,Color> colors = {
    FingerJoint3D.thumbCMC: Colors.amber,
    FingerJoint3D.thumbIP: Colors.amber,
    FingerJoint3D.thumbMCP: Colors.amber,
    FingerJoint3D.thumbTip: Colors.amber,

    FingerJoint3D.indexDIP: Colors.green,
    FingerJoint3D.indexMCP: Colors.green,
    FingerJoint3D.indexPIP: Colors.green,
    FingerJoint3D.indexTip: Colors.green,

    FingerJoint3D.middleDIP: Colors.purple,
    FingerJoint3D.middleMCP: Colors.purple,
    FingerJoint3D.middlePIP: Colors.purple,
    FingerJoint3D.middleTip: Colors.purple,

    FingerJoint3D.ringDIP: Colors.pink,
    FingerJoint3D.ringMCP: Colors.pink,
    FingerJoint3D.ringPIP: Colors.pink,
    FingerJoint3D.ringTip: Colors.pink,

    FingerJoint3D.littleDIP: Colors.cyanAccent,
    FingerJoint3D.littleMCP: Colors.cyanAccent,
    FingerJoint3D.littlePIP: Colors.cyanAccent,
    FingerJoint3D.littleTip: Colors.cyanAccent,

    FingerJoint3D.wrist: Colors.black
  };
  for(int j = 0; j < handData!.length; j++){
    //print(handData![j].poses[0].location.y);
    for(int i = 0; i < handData![j].poses.length; i++){
      HandPoint3D points = handData![j].poses[i].location;
      widgets.add(
        Positioned(
          left: points.x * imageSize.width/2.56 + handData![0].image.origin.x,
          bottom: imageSize.height/2 - points.y * imageSize.width/2.56 + handData![0].image.origin.y+120,
          child: Container(
            width: 10,
            height: 10,
            decoration: BoxDecoration(
              color: colors[handData![j].poses[i].joint],
              borderRadius: BorderRadius.circular(5)
            ),
          )
        )
      );
    }
  }
  return widgets;
}

Widget loadingWidget(){
  return Container(
    width: deviceWidth,
    height:deviceHeight,
    color: Theme.of(context).canvasColor,
    alignment: Alignment.center,
    child: const CircularProgressIndicator(color: Colors.blue)
  );
}

Example #

Find the example for this API here.

Contributing #

Contributions are welcome. In case of any problems look at existing issues, if you cannot find anything related to your problem then open an issue. Create an issue before opening a pull request for non trivial fixes. In case of trivial fixes open a pull request directly.

0
likes
150
pub points
30%
popularity

Publisher

unverified uploader

A Flutter plugin to use Apple Vision Hand Detection to detect a single hand and fingers in an image or video stream, and identify key distal features.

Repository (GitHub)
View/report issues

Documentation

API reference

License

MIT (license)

Dependencies

apple_vision_commons, flutter

More

Packages that depend on apple_vision_hand_3d