lexer_builder 0.1.1 copy "lexer_builder: ^0.1.1" to clipboard
lexer_builder: ^0.1.1 copied to clipboard

Generate lexer code from regular expression rules.

example/lib/example.dart

import 'package:lexer_builder_runtime/lexer_builder_runtime.dart';

// This line is needed to include the generated lexer.
// Always include "part 'filename.g.dart';" in files where you declare lexers.
part 'example.g.dart';

/// The [Token] type for the lexer.
/// The lexer returns a [List] of tokens generated by the [Rule] methods (actions if you're coming from the lexer generator flex).
/// This has to be a subclass of [Token].
/// You can pass arbitrary data in tokens back out of the lexer.
/// This example just passes back the matched String.
class StringLexerToken extends Token {
  /// The value matched by the rule.
  String value;
  
  StringLexerToken(this.value);
}

/// The lexer class.
/// Annotate a class with [Lexer] to generate a _Classname class for it to extend.
/// The generated class includes the matching code for the rules, and needs the Token class as a type parameter.
/// The optional parameter startState defines the starting state for the lexer, and defaults to 0.
@Lexer()
class StringLexer extends _StringLexer<StringLexerToken> {
  
  // Since the generated class defines these methods, you should use override.
  @override
  // Rule specifies the method as a rule for the lexer.
  // The first parameter is the pattern that will cause the method to be executed if matched.
  // The second parameter is the priority, the highest rule that has a match is selected.
  @Rule("[a-zA-Z0-9]+", 0)
  TokenResponse<StringLexerToken> word(String token, int line, int char, int index) {
    // TokenResponse.accept accepts the match and can return a token to the token stream.
    // TokenResponse.reject would cause the lexer to find another matching rule for the input instead.
    return TokenResponse.accept(StringLexerToken(token));
  }
  
  @override
  @Rule(r"\s+", 0)
  TokenResponse<StringLexerToken> space(String token, int line, int char, int index) {
    // If null is passed, no token is placed in the token stream for this rule.
    return TokenResponse.accept(null);
  }
  
  @override
  @Rule('"', 1)
  TokenResponse<StringLexerToken> quote(String token, int line, int char, int index) {
    // The variable state is defined by the generated class and lets you query and modify the lexer state.
    // Here state 0 represents a word matching state and state 1 matches everything in double quotes, like a string literal.
    if (state == 0) {
      state = 1;
    } else {
      state = 0;
    }
    return TokenResponse.accept(null);
  }
  
  
  @override
  // The optional parameter state defines the state in which the rule will be considered for matching.
  // It defaults to null, which means the rule is considered in any state.
  // Here it is set to 1 to make this rule only match in the string literal state,
  @Rule('[^"]+', 1, state: 1)
  TokenResponse<StringLexerToken> wordQuoted(String token, int line, int char, int index) {
    return TokenResponse.accept(StringLexerToken(token));
  }

  
  
}
1
likes
0
pub points
0%
popularity

Publisher

unverified uploader

Generate lexer code from regular expression rules.

Repository (GitHub)
View/report issues

Documentation

Documentation

License

unknown (license)

Dependencies

analyzer, build, collection, lexer_builder_runtime, source_gen

More

Packages that depend on lexer_builder