lexer_builder 0.1.3 copy "lexer_builder: ^0.1.3" to clipboard
lexer_builder: ^0.1.3 copied to clipboard

Generate lexer code from regular expression rules. Arbitrary custom tokens are supported with generics.

example/lib/example.dart

import 'package:lexer_builder_runtime/lexer_builder_runtime.dart';

// This line is needed to include the generated lexer.
// Always include "part 'filename.g.dart';" in files where you declare lexers.
part 'example.g.dart';

/// The [Token] type for the lexer.
/// The lexer returns a [List] of tokens generated by the [Rule] methods (actions if you're coming from the lexer generator flex).
/// This has to be a subclass of [Token].
/// You can pass arbitrary data in tokens back out of the lexer.
/// This example just passes back the matched String.
class StringLexerToken extends Token {
  /// The value matched by the rule.
  String value;
  
  StringLexerToken(this.value);
}

/// The lexer class.
/// Annotate a class with [Lexer] to generate a _Classname class for it to extend.
/// The generated class includes the matching code for the rules, and needs the Token class as a type parameter.
/// The optional parameter startState defines the starting state for the lexer, and defaults to 0.
@Lexer()
class StringLexer extends _StringLexer<StringLexerToken> {
  
  // Since the generated class defines these methods, you should use override.
  @override
  // Rule specifies the method as a rule for the lexer.
  // The first parameter is the pattern that will cause the method to be executed if matched.
  // The second parameter is the priority, the highest rule that has a match is selected.
  @Rule("[a-zA-Z0-9]+", 0)
  TokenResponse<StringLexerToken> word(String token, int line, int char, int index) {
    // TokenResponse.accept accepts the match and can return a token to the token stream.
    // TokenResponse.reject would cause the lexer to find another matching rule for the input instead.
    return TokenResponse.accept(StringLexerToken(token));
  }
  
  @override
  @Rule(r"\s+", 0)
  TokenResponse<StringLexerToken> space(String token, int line, int char, int index) {
    // If null is passed, no token is placed in the token stream for this rule.
    return TokenResponse.accept(null);
  }
  
  @override
  @Rule('"', 1)
  TokenResponse<StringLexerToken> quote(String token, int line, int char, int index) {
    // The variable state is defined by the generated class and lets you query and modify the lexer state.
    // Here state 0 represents a word matching state and state 1 matches everything in double quotes, like a string literal.
    if (state == 0) {
      state = 1;
    } else {
      state = 0;
    }
    return TokenResponse.accept(null);
  }
  
  
  @override
  // The optional parameter state defines the state in which the rule will be considered for matching.
  // It defaults to null, which means the rule is considered in any state.
  // Here it is set to 1 to make this rule only match in the string literal state,
  @Rule('[^"]+', 1, state: 1)
  TokenResponse<StringLexerToken> wordQuoted(String token, int line, int char, int index) {
    return TokenResponse.accept(StringLexerToken(token));
  }

  
  
}
1
likes
150
points
41
downloads

Documentation

Documentation
API reference

Publisher

unverified uploader

Weekly Downloads

Generate lexer code from regular expression rules. Arbitrary custom tokens are supported with generics.

Repository (GitHub)
View/report issues

License

MPL-2.0 (license)

Dependencies

analyzer, build, collection, lexer_builder_runtime, source_gen, source_helper

More

Packages that depend on lexer_builder