flutter_nsfw 0.0.7 copy "flutter_nsfw: ^0.0.7" to clipboard
flutter_nsfw: ^0.0.7 copied to clipboard

discontinued
PlatformAndroidiOS

flutter plugin for detecting NSFW content from images and videos.

Flutter NSFW #

License

  • 1- Download, tflite model and put it in the assets folder
  • 2 - Add the path of the tflite model to pubspec.yaml
  • 3 - Read the file using path_provider plugin
  • 4 - Make a seperate class for accessing the NSFW detector as
class NSFWDetector {
  NSFWDetector(this.modelPath, this.enableLog, this.isOpenGPU, this.numThreads);

  final String modelPath;
  final bool enableLog;
  final bool isOpenGPU;
  final int numThreads;

  bool isInitialized = false;

  Future<dynamic> detectInPhoto(String photoPath) async {
    if (!isInitialized) {
      Directory appDocDir = await getApplicationDocumentsDirectory();
      String appDocPath = appDocDir.path;
      var file = File(appDocPath + "/nsfw.tflite");
      if (!file.existsSync()) {
        var data = await rootBundle.load("assets/nsfw.tflite");
        final buffer = data.buffer;
        await file.writeAsBytes(
            buffer.asUint8List(data.offsetInBytes, data.lengthInBytes));
      }
      await FlutterNsfw.initNsfw(
        file.path,
      );
      isInitialized = true;
    }

    return FlutterNsfw.getPhotoNSFWScore(photoPath);
  }

  Future<dynamic> detectVideo(
    String videoPath,
    double nsfwThreshold,
    int width,
    int height,
  ) async {
    if (!isInitialized) {
      Directory appDocDir = await getApplicationDocumentsDirectory();
      String appDocPath = appDocDir.path;
      var file = File(appDocPath + "/nsfw.tflite");
      if (!file.existsSync()) {
        var data = await rootBundle.load("assets/nsfw.tflite");
        final buffer = data.buffer;
        await file.writeAsBytes(
            buffer.asUint8List(data.offsetInBytes, data.lengthInBytes));
      }
      await FlutterNsfw.initNsfw(
        file.path,
      );
      isInitialized = true;
    }
    final result = await FlutterNsfw.detectNSFWVideo(
        videoPath: videoPath,
        nsfwThreshold: nsfwThreshold,
        frameWidth: width,
        frameHeight: height,
        durationPerFrame: 1000);
    if (result != null) {
      print('the result is true');
      return result as bool;
    } else {
      print('this result is false');
      return false;
    }
  }
}
  • 5 - Initiate the instance of the class that you have previously made
  NSFWDetector _nsfwDetector =
      NSFWDetector('assets/nsfw.tflite', true, true, 2);
  • 6 - Make helper method for detecting nsfw photo,
  Future<dynamic> detectNSFWImage(String photo) async {
    final nsfwStatus = await _nsfwDetector.detectInPhoto(photo);
    if (nsfwStatus > 0.80) {
      return true;
    } else {
      return false;
    }
  }

  • 7 - Make helper mothod for detecting NSFW video,
  Future<dynamic> detectNSFWVideo(String video, int width, int height) async {
    final nsfwStatus =
        await _nsfwDetector.detectVideo(video, 0.70, width, height);
    return nsfwStatus ?? false;
  }
  • If you find that the model is increasing your App Size you can also host your model Firebase ML kit and download it using FirebaseMLModelDownloader
  • If you are running the example app on emulator so it might not work because of GPU constraints please use a real device especially when testing for Android.
15
likes
160
points
43
downloads

Publisher

verified publisherahsanali.xyz

Weekly Downloads

flutter plugin for detecting NSFW content from images and videos.

Repository (GitHub)

Documentation

API reference

License

BSD-3-Clause (license)

Dependencies

flutter

More

Packages that depend on flutter_nsfw