tokenizing method

void tokenizing(
  1. Lexeme lexeme,
  2. String string, [
  3. int start = 0
])

Implementation

void tokenizing(Lexeme lexeme, String string, [int start = 0]) {
  if (!showAll && lexeme.name == null) return;

  String? character = start < string.length ? string[start] : null;
  if (character == '\n') character = r'\n';
  if (character == '\r') character = r'\r';
  if (character == '\t') character = r'\t';

  final filteredPath = <List<_CombinedText>>[];
  if (showPath) {
    filteredPath.addAll(lexeme.path
        .where((lexeme) => showAll || lexeme.name != null)
        .fold<List<_CombinedText>>([], (value, lexeme) {
      if (value.isEmpty || value.last.text != lexeme.displayName) {
        value.add(_CombinedText(lexeme.displayName));
      } else {
        value.last.increment();
      }
      return value;
    }).fold<List<List<_CombinedText>>>([], (value, text) {
      if (value.isEmpty || value.last.length >= 5) value.add([]);
      value.last.add(text);
      return value;
    }));
  }

  print(('${debugIndex == 0 ? ' ' : '│'}  (#${debugIndex + 1})\n') +
      (debugIndex == 0 ? '┬► ' : '├► ') +
      [
        'Tokenizing ${lexeme.name != null ? 'named ${lexeme.name}' : lexeme.displayName}',
        'at index $start${character != null ? ', character "$character"' : ''}',
        if (showPath)
          'on path: ${filteredPath.map((line) => line.join(' → ')).join(' → \n│           → ')}',
        ''
      ].join('\n│    '));
  sleep(delay);
  debugIndex++;
}