robots_txt 1.1.0+3 copy "robots_txt: ^1.1.0+3" to clipboard
robots_txt: ^1.1.0+3 copied to clipboard

outdated

A simple yet complete, lightweight and sturdy `robots.txt` ruleset parser to ensure your application follows the standard.

example/example.dart

import 'package:robots_txt/robots_txt.dart';

Future main() async {
  // Create an instance of the `robots.txt` parser.
  final robots = Robots(host: 'https://github.com/');
  // Read the ruleset of the website.
  await robots.read();
  // Print the ruleset.
  for (final ruleset in robots.rulesets) {
    // Print the user-agent the ruleset applies to.
    print(ruleset.appliesTo);
    if (ruleset.allows.isNotEmpty) {
      print('Allows:');
    }
    // Print the path expressions allowed by this ruleset.
    for (final rule in ruleset.allows) {
      print('  - ${rule.expression}');
    }
    if (ruleset.disallows.isNotEmpty) {
      print('Disallows:');
    }
    // Print the path expressions disallowed by this ruleset.
    for (final rule in ruleset.disallows) {
      print('  - ${rule.expression}');
    }
  }
  // False: it cannot.
  print(robots.canVisitPath('/gist/', userAgent: '*'));
  // True: it can.
  print(robots.canVisitPath('/wordcollector/robots_txt', userAgent: '*'));
  return;
}
4
likes
0
pub points
31%
popularity

Publisher

verified publishervxern.dev

A simple yet complete, lightweight and sturdy `robots.txt` ruleset parser to ensure your application follows the standard.

Repository (GitHub)
View/report issues

License

unknown (license)

Dependencies

sprint, web_scraper

More

Packages that depend on robots_txt