reportForWriters function

void reportForWriters(
  1. CrawlResult result,
  2. bool ansiTerm,
  3. bool shouldCheckAnchors,
  4. bool showRedirects,
  5. Stdout stdout,
)

Writes the reports from the perspective of a website writer - which pages reference broken links.

Implementation

void reportForWriters(CrawlResult result, bool ansiTerm,
    bool shouldCheckAnchors, bool showRedirects, Stdout stdout) {
  void print(Object message) => stdout.writeln(message);

  print('');

  final links = result.links;

  /// Links that were found broken or had a warning or were redirected.
  final problematicLinks = links
      .where((link) =>
          !link.destination.isUnsupportedScheme &&
          !link.wasSkipped &&
          (link.destination.isInvalid ||
              link.destination.wasTried &&
                  (link.destination.isBroken ||
                      link.hasWarning(shouldCheckAnchors) ||
                      (showRedirects && link.destination.isRedirected))))
      .toList(growable: false);

  final deniedByRobots = result.destinations
      .where((destination) => destination.wasDeniedByRobotsTxt)
      .toList(growable: false);
  deniedByRobots.sort((a, b) => a.url.compareTo(b.url));

  final sourceUris = problematicLinks
      .map((link) => link.origin.uri)
      .toSet()
      .toList(growable: false);
  sourceUris.sort((a, b) => a.toString().compareTo(b.toString()));

  TextPen? ansiPen;
  if (ansiTerm) {
    ansiPen = TextPen();
  }

  final brokenSeeds = result.destinations
      .where((destination) => destination.isSeed && destination.isBroken)
      .toList(growable: false);
  brokenSeeds.sort((a, b) => a.toString().compareTo(b.toString()));

  if (brokenSeeds.isNotEmpty) {
    print('Provided URLs failing:');
    for (final destination in brokenSeeds) {
      if (ansiPen != null) {
        ansiPen
            .reset()
            .yellow()
            .text(destination.url)
            .lightGray()
            .text(' (')
            .red()
            .text(destination.statusDescription)
            .lightGray()
            .text(')')
            .normal()
            .print();
      } else {
        print('${destination.url} (${destination.statusDescription})');
      }
    }

    print('');
  }

  if (deniedByRobots.isNotEmpty) {
    print('Access to these URLs denied by robots.txt, '
        "so we couldn't check them:");
    for (final destination in deniedByRobots) {
      if (ansiPen != null) {
        ansiPen
            .reset()
            .normal()
            .text('- ')
            .yellow()
            .text(destination.url)
            .normal()
            .print();
      } else {
        print('- ${destination.url}');
      }
    }

    print('');
  }

  // TODO: summarize when there are huge amounts of sourceURIs for a broken link
  // TODO: report invalid links

  for (final uri in sourceUris) {
    if (ansiPen != null) {
      printWithAnsi(uri, problematicLinks, ansiPen);
    } else {
      printWithoutAnsi(uri, problematicLinks, stdout);
    }
  }

  final brokenLinks =
      problematicLinks.where((link) => link.hasError).toList(growable: false);
  if (brokenLinks.isNotEmpty &&
      brokenLinks.length < problematicLinks.length / 2) {
    // Reiterate really broken links if the listing above is mostly warnings
    // with only a minority of errors. The user cares about errors first.
    print('');
    print('Summary of most serious issues:');
    print('');

    final brokenUris = brokenLinks
        .map((link) => link.origin.uri)
        .toSet()
        .toList(growable: false);
    brokenUris.sort((a, b) => a.toString().compareTo(b.toString()));

    for (final uri in brokenUris) {
      if (ansiPen != null) {
        printWithAnsi(uri, brokenLinks, ansiPen);
      } else {
        printWithoutAnsi(uri, brokenLinks, stdout);
      }
    }
  }
}