parsePrimary method

Expression parsePrimary()

// EXPRESSIONS //////

Implementation

Expression parsePrimary() {
  int start = token?.startOffset?? 0;

  switch (token?.type) {
    case Token.NAME:
      switch (token?.text) {
        case 'this':
          Token? tok = next();
          return new ThisExpression()
            ..start = start
            ..end = endOffset
            ..line = tok?.line?? 0;
        case 'true':
          Token? tok = next();
          return new LiteralExpression(true, 'true')
            ..start = start
            ..end = endOffset
            ..line = tok?.line??0;
        case 'false':
          Token? tok = next();
          return new LiteralExpression(false, 'false')
            ..start = start
            ..end = endOffset
            ..line = tok?.line??0;
        case 'null':
          Token? tok = next();
          return new LiteralExpression(null, 'null')
            ..start = start
            ..end = endOffset
            ..line = tok?.line??0;
        case 'function':
          return new FunctionExpression(parseFunction());
      }
      Name name = parseName();
      return new NameExpression(name)
        ..start = start
        ..end = endOffset
        ..line = name.line;

    case Token.NUMBER:
      Token? tok = next();
      return new LiteralExpression(num.parse(tok?.text??'0'), tok?.text)
        ..start = start
        ..end = endOffset
        ..line = tok?.line??0;

    case Token.STRING:
      Token? tok = next();
      return new LiteralExpression(tok?.value, tok?.text)
        ..start = start
        ..end = endOffset
        ..line = tok?.line??0;

    case Token.LBRACKET:
      return parseArrayLiteral();

    case Token.LBRACE:
      return parseObjectLiteral();

    case Token.LPAREN:
      next();
      Expression exp = parseExpression();
      consume(Token.RPAREN);
      return exp;

    case Token.BINARY:
    case Token.ASSIGN:
      if (token?.text == '/' || token?.text == '/=') {
        Token? regexTok = lexer?.scanRegexpBody(token!);
        token = lexer?.scan();
        endOffset = regexTok?.endOffset??0;
        return new RegexpExpression(regexTok?.text??'')
          ..start = regexTok?.startOffset??0
          ..end = regexTok?.endOffset??0
          ..line = regexTok?.line??0;
      }
      throw fail();

    default:
      throw fail();
  }
}