tokenize method

List<Token> tokenize()

Implementation

List<Token> tokenize() {
  fileContent = fileContent.trim();

  List<Token> tokens = [];
  advanceCursor();

  while (!isEndOfFile) {
    skipWhitespaces();

    switch (currentCharacter) {
      case "[":
        {
          tokens.add(
              consumeToken(TokenKind.openingSquareBracket, currentCharacter));
          break;
        }
      case "]":
        {
          tokens.add(
              consumeToken(TokenKind.closingSquareBracket, currentCharacter));
          break;
        }

      case ":":
        {
          tokens.add(consumeToken(TokenKind.colon, currentCharacter));
          break;
        }

      case "\"":
        {
          String issueName = "";
          advanceCursor();
          while (currentCharacter != "\"" && !isEndOfFile) {
            issueName += currentCharacter;
            advanceCursor();
          }
          advanceCursor();
          tokens.add(Token(TokenKind.issueText, issueName));
          break;
        }

      case "~":
        {
          tokens.add(consumeToken(TokenKind.tilde, currentCharacter));
          break;
        }

      case ";":
        {
          tokens.add(consumeToken(TokenKind.semicolon, currentCharacter));
          break;
        }

      case ",":
        {
          tokens.add(consumeToken(TokenKind.comma, currentCharacter));
          break;
        }

      case "{":
        {
          tokens.add(
              consumeToken(TokenKind.openingCurlyBrace, currentCharacter));
          break;
        }

      case "}":
        {
          tokens.add(
              consumeToken(TokenKind.closingCurlyBrace, currentCharacter));
          break;
        }

      default:
        throw UnknownToken(currentCharacter);
    }
  }
  return tokens;
}