| // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| library analyzer.src.generated.parser; |
| |
| import 'dart:collection'; |
| import "dart:math" as math; |
| |
| import 'package:analyzer/dart/ast/ast.dart'; |
| import 'package:analyzer/dart/ast/standard_ast_factory.dart'; |
| import 'package:analyzer/dart/ast/token.dart'; |
| import 'package:analyzer/error/error.dart'; |
| import 'package:analyzer/error/listener.dart'; |
| import 'package:analyzer/src/dart/ast/ast.dart'; |
| import 'package:analyzer/src/dart/ast/token.dart'; |
| import 'package:analyzer/src/dart/error/syntactic_errors.dart'; |
| import 'package:analyzer/src/dart/scanner/reader.dart'; |
| import 'package:analyzer/src/dart/scanner/scanner.dart'; |
| import 'package:analyzer/src/error/codes.dart'; |
| import 'package:analyzer/src/generated/engine.dart' show AnalysisEngine; |
| import 'package:analyzer/src/generated/java_core.dart'; |
| import 'package:analyzer/src/generated/java_engine.dart'; |
| import 'package:analyzer/src/generated/source.dart'; |
| import 'package:analyzer/src/generated/utilities_dart.dart'; |
| |
| export 'package:analyzer/src/dart/ast/utilities.dart' show ResolutionCopier; |
| export 'package:analyzer/src/dart/error/syntactic_errors.dart'; |
| |
| /** |
| * A simple data-holder for a method that needs to return multiple values. |
| */ |
| class CommentAndMetadata { |
| /** |
| * The documentation comment that was parsed, or `null` if none was given. |
| */ |
| final Comment comment; |
| |
| /** |
| * The metadata that was parsed, or `null` if none was given. |
| */ |
| final List<Annotation> metadata; |
| |
| /** |
| * Initialize a newly created holder with the given [comment] and [metadata]. |
| */ |
| CommentAndMetadata(this.comment, this.metadata); |
| |
| /** |
| * Return `true` if some metadata was parsed. |
| */ |
| bool get hasMetadata => metadata != null && metadata.isNotEmpty; |
| } |
| |
| /** |
| * A simple data-holder for a method that needs to return multiple values. |
| */ |
| class FinalConstVarOrType { |
| /** |
| * The 'final', 'const' or 'var' keyword, or `null` if none was given. |
| */ |
| final Token keyword; |
| |
| /** |
| * The type, or `null` if no type was specified. |
| */ |
| final TypeAnnotation type; |
| |
| /** |
| * Initialize a newly created holder with the given [keyword] and [type]. |
| */ |
| FinalConstVarOrType(this.keyword, this.type); |
| } |
| |
| /** |
| * A simple data-holder for a method that needs to return multiple values. |
| */ |
| class Modifiers { |
| /** |
| * The token representing the keyword 'abstract', or `null` if the keyword was |
| * not found. |
| */ |
| Token abstractKeyword; |
| |
| /** |
| * The token representing the keyword 'const', or `null` if the keyword was |
| * not found. |
| */ |
| Token constKeyword; |
| |
| /** |
| * The token representing the keyword 'covariant', or `null` if the keyword |
| * was not found. |
| */ |
| Token covariantKeyword; |
| |
| /** |
| * The token representing the keyword 'external', or `null` if the keyword was |
| * not found. |
| */ |
| Token externalKeyword; |
| |
| /** |
| * The token representing the keyword 'factory', or `null` if the keyword was |
| * not found. |
| */ |
| Token factoryKeyword; |
| |
| /** |
| * The token representing the keyword 'final', or `null` if the keyword was |
| * not found. |
| */ |
| Token finalKeyword; |
| |
| /** |
| * The token representing the keyword 'static', or `null` if the keyword was |
| * not found. |
| */ |
| Token staticKeyword; |
| |
| /** |
| * The token representing the keyword 'var', or `null` if the keyword was not |
| * found. |
| */ |
| Token varKeyword; |
| |
| @override |
| String toString() { |
| StringBuffer buffer = new StringBuffer(); |
| bool needsSpace = _appendKeyword(buffer, false, abstractKeyword); |
| needsSpace = _appendKeyword(buffer, needsSpace, constKeyword); |
| needsSpace = _appendKeyword(buffer, needsSpace, externalKeyword); |
| needsSpace = _appendKeyword(buffer, needsSpace, factoryKeyword); |
| needsSpace = _appendKeyword(buffer, needsSpace, finalKeyword); |
| needsSpace = _appendKeyword(buffer, needsSpace, staticKeyword); |
| _appendKeyword(buffer, needsSpace, varKeyword); |
| return buffer.toString(); |
| } |
| |
| /** |
| * If the given [keyword] is not `null`, append it to the given [builder], |
| * prefixing it with a space if [needsSpace] is `true`. Return `true` if |
| * subsequent keywords need to be prefixed with a space. |
| */ |
| bool _appendKeyword(StringBuffer buffer, bool needsSpace, Token keyword) { |
| if (keyword != null) { |
| if (needsSpace) { |
| buffer.writeCharCode(0x20); |
| } |
| buffer.write(keyword.lexeme); |
| return true; |
| } |
| return needsSpace; |
| } |
| } |
| |
| /** |
| * A parser used to parse tokens into an AST structure. |
| */ |
| class Parser { |
| static String ASYNC = "async"; |
| |
| static String _AWAIT = "await"; |
| |
| static String _HIDE = "hide"; |
| |
| static String _OF = "of"; |
| |
| static String _ON = "on"; |
| |
| static String _NATIVE = "native"; |
| |
| static String _SHOW = "show"; |
| |
| static String SYNC = "sync"; |
| |
| static String _YIELD = "yield"; |
| |
| /** |
| * The source being parsed. |
| */ |
| final Source _source; |
| |
| /** |
| * The error listener that will be informed of any errors that are found |
| * during the parse. |
| */ |
| final AnalysisErrorListener _errorListener; |
| |
| /** |
| * An [_errorListener] lock, if more than `0`, then errors are not reported. |
| */ |
| int _errorListenerLock = 0; |
| |
| /** |
| * A flag indicating whether the parser is to parse asserts in the initializer |
| * list of a constructor. |
| */ |
| bool _enableAssertInitializer = false; |
| |
| /** |
| * A flag indicating whether the parser is to parse the non-nullable modifier |
| * in type names. |
| */ |
| bool _enableNnbd = false; |
| |
| /** |
| * A flag indicating whether the parser is to allow URI's in part-of |
| * directives. |
| */ |
| bool _enableUriInPartOf = true; |
| |
| /** |
| * A flag indicating whether parser is to parse function bodies. |
| */ |
| bool _parseFunctionBodies = true; |
| |
| /** |
| * The next token to be parsed. |
| */ |
| Token _currentToken; |
| |
| /** |
| * A flag indicating whether the parser is currently in a function body marked |
| * as being 'async'. |
| */ |
| bool _inAsync = false; |
| |
| /** |
| * A flag indicating whether the parser is currently in a function body marked |
| * (by a star) as being a generator. |
| */ |
| bool _inGenerator = false; |
| |
| /** |
| * A flag indicating whether the parser is currently in the body of a loop. |
| */ |
| bool _inLoop = false; |
| |
| /** |
| * A flag indicating whether the parser is currently in a switch statement. |
| */ |
| bool _inSwitch = false; |
| |
| /** |
| * A flag indicating whether the parser is currently in a constructor field |
| * initializer, with no intervening parentheses, braces, or brackets. |
| */ |
| bool _inInitializer = false; |
| |
| /** |
| * A flag indicating whether the parser is to parse generic method syntax. |
| */ |
| @deprecated |
| bool parseGenericMethods = false; |
| |
| /** |
| * A flag indicating whether to parse generic method comments, of the form |
| * `/*=T*/` and `/*<T>*/`. |
| */ |
| bool parseGenericMethodComments = false; |
| |
| /** |
| * Initialize a newly created parser to parse tokens in the given [_source] |
| * and to report any errors that are found to the given [_errorListener]. |
| */ |
| Parser(this._source, this._errorListener); |
| |
| /** |
| * Return the current token. |
| */ |
| Token get currentToken => _currentToken; |
| |
| /** |
| * Set the token with which the parse is to begin to the given [token]. |
| */ |
| void set currentToken(Token token) { |
| this._currentToken = token; |
| } |
| |
| /** |
| * Return `true` if the parser is to parse asserts in the initializer list of |
| * a constructor. |
| */ |
| bool get enableAssertInitializer => _enableAssertInitializer; |
| |
| /** |
| * Set whether the parser is to parse asserts in the initializer list of a |
| * constructor to match the given [enable] flag. |
| */ |
| void set enableAssertInitializer(bool enable) { |
| _enableAssertInitializer = enable; |
| } |
| |
| /** |
| * Return `true` if the parser is to parse the non-nullable modifier in type |
| * names. |
| */ |
| bool get enableNnbd => _enableNnbd; |
| |
| /** |
| * Set whether the parser is to parse the non-nullable modifier in type names |
| * to match the given [enable] flag. |
| */ |
| void set enableNnbd(bool enable) { |
| _enableNnbd = enable; |
| } |
| |
| /** |
| * Return `true` if the parser is to allow URI's in part-of directives. |
| */ |
| bool get enableUriInPartOf => _enableUriInPartOf; |
| |
| /** |
| * Set whether the parser is to allow URI's in part-of directives to the given |
| * [enable] flag. |
| */ |
| void set enableUriInPartOf(bool enable) { |
| _enableUriInPartOf = enable; |
| } |
| |
| /** |
| * Return `true` if the current token is the first token of a return type that |
| * is followed by an identifier, possibly followed by a list of type |
| * parameters, followed by a left-parenthesis. This is used by |
| * [parseTypeAlias] to determine whether or not to parse a return type. |
| */ |
| bool get hasReturnTypeInTypeAlias { |
| // TODO(brianwilkerson) This is too expensive as implemented and needs to be |
| // re-implemented or removed. |
| Token next = skipReturnType(_currentToken); |
| if (next == null) { |
| return false; |
| } |
| return _tokenMatchesIdentifier(next); |
| } |
| |
| /** |
| * Set whether the parser is to parse the async support. |
| * |
| * Support for removing the 'async' library has been removed. |
| */ |
| @deprecated |
| void set parseAsync(bool parseAsync) {} |
| |
| @deprecated |
| bool get parseConditionalDirectives => true; |
| |
| @deprecated |
| void set parseConditionalDirectives(bool value) {} |
| |
| /** |
| * Set whether parser is to parse function bodies. |
| */ |
| void set parseFunctionBodies(bool parseFunctionBodies) { |
| this._parseFunctionBodies = parseFunctionBodies; |
| } |
| |
| /** |
| * Return the content of a string with the given literal representation. The |
| * [lexeme] is the literal representation of the string. The flag [isFirst] is |
| * `true` if this is the first token in a string literal. The flag [isLast] is |
| * `true` if this is the last token in a string literal. |
| */ |
| String computeStringValue(String lexeme, bool isFirst, bool isLast) { |
| StringLexemeHelper helper = new StringLexemeHelper(lexeme, isFirst, isLast); |
| int start = helper.start; |
| int end = helper.end; |
| bool stringEndsAfterStart = end >= start; |
| assert(stringEndsAfterStart); |
| if (!stringEndsAfterStart) { |
| AnalysisEngine.instance.logger.logError( |
| "Internal error: computeStringValue($lexeme, $isFirst, $isLast)"); |
| return ""; |
| } |
| if (helper.isRaw) { |
| return lexeme.substring(start, end); |
| } |
| StringBuffer buffer = new StringBuffer(); |
| int index = start; |
| while (index < end) { |
| index = _translateCharacter(buffer, lexeme, index); |
| } |
| return buffer.toString(); |
| } |
| |
| /** |
| * Return a synthetic identifier. |
| */ |
| SimpleIdentifier createSyntheticIdentifier({bool isDeclaration: false}) { |
| Token syntheticToken; |
| if (_currentToken.type == TokenType.KEYWORD) { |
| // Consider current keyword token as an identifier. |
| // It is not always true, e.g. "^is T" where "^" is place the place for |
| // synthetic identifier. By creating SyntheticStringToken we can |
| // distinguish a real identifier from synthetic. In the code completion |
| // behavior will depend on a cursor position - before or on "is". |
| syntheticToken = _injectToken(new SyntheticStringToken( |
| TokenType.IDENTIFIER, _currentToken.lexeme, _currentToken.offset)); |
| } else { |
| syntheticToken = _createSyntheticToken(TokenType.IDENTIFIER); |
| } |
| return astFactory.simpleIdentifier(syntheticToken, |
| isDeclaration: isDeclaration); |
| } |
| |
| /** |
| * Return a synthetic string literal. |
| */ |
| SimpleStringLiteral createSyntheticStringLiteral() => astFactory |
| .simpleStringLiteral(_createSyntheticToken(TokenType.STRING), ""); |
| |
| /** |
| * Advance to the next token in the token stream, making it the new current |
| * token and return the token that was current before this method was invoked. |
| */ |
| Token getAndAdvance() { |
| Token token = _currentToken; |
| _currentToken = _currentToken.next; |
| return token; |
| } |
| |
| /** |
| * Return `true` if the current token appears to be the beginning of a |
| * function declaration. |
| */ |
| bool isFunctionDeclaration() { |
| Keyword keyword = _currentToken.keyword; |
| if (keyword == Keyword.VOID) { |
| return true; |
| } |
| Token afterReturnType = skipTypeName(_currentToken); |
| if (afterReturnType == null) { |
| // There was no return type, but it is optional, so go back to where we |
| // started. |
| afterReturnType = _currentToken; |
| } |
| Token afterIdentifier = skipSimpleIdentifier(afterReturnType); |
| if (afterIdentifier == null) { |
| // It's possible that we parsed the function name as if it were a type |
| // name, so see whether it makes sense if we assume that there is no type. |
| afterIdentifier = skipSimpleIdentifier(_currentToken); |
| } |
| if (afterIdentifier == null) { |
| return false; |
| } |
| if (isFunctionExpression(afterIdentifier)) { |
| return true; |
| } |
| // It's possible that we have found a getter. While this isn't valid at this |
| // point we test for it in order to recover better. |
| if (keyword == Keyword.GET) { |
| Token afterName = skipSimpleIdentifier(_currentToken.next); |
| if (afterName == null) { |
| return false; |
| } |
| TokenType type = afterName.type; |
| return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET; |
| } else if (_tokenMatchesKeyword(afterReturnType, Keyword.GET)) { |
| Token afterName = skipSimpleIdentifier(afterReturnType.next); |
| if (afterName == null) { |
| return false; |
| } |
| TokenType type = afterName.type; |
| return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET; |
| } |
| return false; |
| } |
| |
| /** |
| * Return `true` if the given [token] appears to be the beginning of a |
| * function expression. |
| */ |
| bool isFunctionExpression(Token token) { |
| // Function expressions aren't allowed in initializer lists. |
| if (_inInitializer) { |
| return false; |
| } |
| Token afterTypeParameters = _skipTypeParameterList(token); |
| if (afterTypeParameters == null) { |
| afterTypeParameters = token; |
| } |
| Token afterParameters = _skipFormalParameterList(afterTypeParameters); |
| if (afterParameters == null) { |
| return false; |
| } |
| if (afterParameters.matchesAny( |
| const <TokenType>[TokenType.OPEN_CURLY_BRACKET, TokenType.FUNCTION])) { |
| return true; |
| } |
| String lexeme = afterParameters.lexeme; |
| return lexeme == ASYNC || lexeme == SYNC; |
| } |
| |
| /** |
| * Return `true` if the current token is the first token in an initialized |
| * variable declaration rather than an expression. This method assumes that we |
| * have already skipped past any metadata that might be associated with the |
| * declaration. |
| * |
| * initializedVariableDeclaration ::= |
| * declaredIdentifier ('=' expression)? (',' initializedIdentifier)* |
| * |
| * declaredIdentifier ::= |
| * metadata finalConstVarOrType identifier |
| * |
| * finalConstVarOrType ::= |
| * 'final' type? |
| * | 'const' type? |
| * | 'var' |
| * | type |
| * |
| * type ::= |
| * qualified typeArguments? |
| * |
| * initializedIdentifier ::= |
| * identifier ('=' expression)? |
| */ |
| bool isInitializedVariableDeclaration() { |
| Keyword keyword = _currentToken.keyword; |
| if (keyword == Keyword.FINAL || keyword == Keyword.VAR) { |
| // An expression cannot start with a keyword other than 'const', |
| // 'rethrow', or 'throw'. |
| return true; |
| } |
| if (keyword == Keyword.CONST) { |
| // Look to see whether we might be at the start of a list or map literal, |
| // otherwise this should be the start of a variable declaration. |
| return !_peek().matchesAny(const <TokenType>[ |
| TokenType.LT, |
| TokenType.OPEN_CURLY_BRACKET, |
| TokenType.OPEN_SQUARE_BRACKET, |
| TokenType.INDEX |
| ]); |
| } |
| bool allowAdditionalTokens = true; |
| // We know that we have an identifier, and need to see whether it might be |
| // a type name. |
| if (_currentToken.type != TokenType.IDENTIFIER) { |
| allowAdditionalTokens = false; |
| } |
| Token token = skipTypeName(_currentToken); |
| if (token == null) { |
| // There was no type name, so this can't be a declaration. |
| return false; |
| } |
| if (token.type != TokenType.IDENTIFIER) { |
| allowAdditionalTokens = false; |
| } |
| token = skipSimpleIdentifier(token); |
| if (token == null) { |
| return false; |
| } |
| TokenType type = token.type; |
| // Usual cases in valid code: |
| // String v = ''; |
| // String v, v2; |
| // String v; |
| // for (String item in items) {} |
| if (type == TokenType.EQ || |
| type == TokenType.COMMA || |
| type == TokenType.SEMICOLON || |
| token.keyword == Keyword.IN) { |
| return true; |
| } |
| // It is OK to parse as a variable declaration in these cases: |
| // String v } |
| // String v if (true) print('OK'); |
| // String v { print(42); } |
| // ...but not in these cases: |
| // get getterName { |
| // String get getterName |
| if (allowAdditionalTokens) { |
| if (type == TokenType.CLOSE_CURLY_BRACKET || |
| type == TokenType.KEYWORD || |
| type == TokenType.IDENTIFIER || |
| type == TokenType.OPEN_CURLY_BRACKET) { |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| /** |
| * Return `true` if the current token appears to be the beginning of a switch |
| * member. |
| */ |
| bool isSwitchMember() { |
| Token token = _currentToken; |
| while (_tokenMatches(token, TokenType.IDENTIFIER) && |
| _tokenMatches(token.next, TokenType.COLON)) { |
| token = token.next.next; |
| } |
| Keyword keyword = token.keyword; |
| return keyword == Keyword.CASE || keyword == Keyword.DEFAULT; |
| } |
| |
| /** |
| * Parse an additive expression. Return the additive expression that was |
| * parsed. |
| * |
| * additiveExpression ::= |
| * multiplicativeExpression (additiveOperator multiplicativeExpression)* |
| * | 'super' (additiveOperator multiplicativeExpression)+ |
| */ |
| Expression parseAdditiveExpression() { |
| Expression expression; |
| if (_currentToken.keyword == Keyword.SUPER && |
| _currentToken.next.type.isAdditiveOperator) { |
| expression = astFactory.superExpression(getAndAdvance()); |
| } else { |
| expression = parseMultiplicativeExpression(); |
| } |
| while (_currentToken.type.isAdditiveOperator) { |
| expression = astFactory.binaryExpression( |
| expression, getAndAdvance(), parseMultiplicativeExpression()); |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse an annotation. Return the annotation that was parsed. |
| * |
| * This method assumes that the current token matches [TokenType.AT]. |
| * |
| * annotation ::= |
| * '@' qualified ('.' identifier)? arguments? |
| */ |
| Annotation parseAnnotation() { |
| Token atSign = getAndAdvance(); |
| Identifier name = parsePrefixedIdentifier(); |
| Token period = null; |
| SimpleIdentifier constructorName = null; |
| if (_matches(TokenType.PERIOD)) { |
| period = getAndAdvance(); |
| constructorName = parseSimpleIdentifier(); |
| } |
| ArgumentList arguments = null; |
| if (_matches(TokenType.OPEN_PAREN)) { |
| arguments = parseArgumentList(); |
| } |
| return astFactory.annotation( |
| atSign, name, period, constructorName, arguments); |
| } |
| |
| /** |
| * Parse an argument. Return the argument that was parsed. |
| * |
| * argument ::= |
| * namedArgument |
| * | expression |
| * |
| * namedArgument ::= |
| * label expression |
| */ |
| Expression parseArgument() { |
| // TODO(brianwilkerson) Consider returning a wrapper indicating whether the |
| // expression is a named expression in order to remove the 'is' check in |
| // 'parseArgumentList'. |
| // |
| // Both namedArgument and expression can start with an identifier, but only |
| // namedArgument can have an identifier followed by a colon. |
| // |
| if (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) { |
| return astFactory.namedExpression(parseLabel(), parseExpression2()); |
| } else { |
| return parseExpression2(); |
| } |
| } |
| |
| /** |
| * Parse a list of arguments. Return the argument list that was parsed. |
| * |
| * This method assumes that the current token matches [TokenType.OPEN_PAREN]. |
| * |
| * arguments ::= |
| * '(' argumentList? ')' |
| * |
| * argumentList ::= |
| * namedArgument (',' namedArgument)* |
| * | expressionList (',' namedArgument)* |
| */ |
| ArgumentList parseArgumentList() { |
| Token leftParenthesis = getAndAdvance(); |
| if (_matches(TokenType.CLOSE_PAREN)) { |
| return astFactory.argumentList(leftParenthesis, null, getAndAdvance()); |
| } |
| |
| /** |
| * Return `true` if the parser appears to be at the beginning of an argument |
| * even though there was no comma. This is a special case of the more |
| * general recovery technique described below. |
| */ |
| bool isLikelyMissingComma() { |
| if (_matchesIdentifier() && |
| _tokenMatches(_currentToken.next, TokenType.COLON) && |
| leftParenthesis is BeginToken && |
| leftParenthesis.endToken != null) { |
| _reportErrorForToken( |
| ParserErrorCode.EXPECTED_TOKEN, _currentToken.previous, [',']); |
| return true; |
| } |
| return false; |
| } |
| |
| // |
| // Even though unnamed arguments must all appear before any named arguments, |
| // we allow them to appear in any order so that we can recover faster. |
| // |
| bool wasInInitializer = _inInitializer; |
| _inInitializer = false; |
| try { |
| Token previousStartOfArgument = _currentToken; |
| Expression argument = parseArgument(); |
| List<Expression> arguments = <Expression>[argument]; |
| bool foundNamedArgument = argument is NamedExpression; |
| bool generatedError = false; |
| while (_optional(TokenType.COMMA) || |
| (isLikelyMissingComma() && |
| previousStartOfArgument != _currentToken)) { |
| if (_matches(TokenType.CLOSE_PAREN)) { |
| break; |
| } |
| previousStartOfArgument = _currentToken; |
| argument = parseArgument(); |
| arguments.add(argument); |
| if (argument is NamedExpression) { |
| foundNamedArgument = true; |
| } else if (foundNamedArgument) { |
| if (!generatedError) { |
| if (!argument.isSynthetic) { |
| // Report the error, once, but allow the arguments to be in any |
| // order in the AST. |
| _reportErrorForCurrentToken( |
| ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT); |
| generatedError = true; |
| } |
| } |
| } |
| } |
| // Recovery: If the next token is not a right parenthesis, look at the |
| // left parenthesis to see whether there is a matching right parenthesis. |
| // If there is, then we're more likely missing a comma and should go back |
| // to parsing arguments. |
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); |
| return astFactory.argumentList( |
| leftParenthesis, arguments, rightParenthesis); |
| } finally { |
| _inInitializer = wasInInitializer; |
| } |
| } |
| |
| /** |
| * Parse an assert statement. Return the assert statement. |
| * |
| * This method assumes that the current token matches `Keyword.ASSERT`. |
| * |
| * assertStatement ::= |
| * 'assert' '(' expression [',' expression] ')' ';' |
| */ |
| AssertStatement parseAssertStatement() { |
| Token keyword = getAndAdvance(); |
| Token leftParen = _expect(TokenType.OPEN_PAREN); |
| Expression expression = parseExpression2(); |
| Token comma; |
| Expression message; |
| if (_matches(TokenType.COMMA)) { |
| comma = getAndAdvance(); |
| message = parseExpression2(); |
| } |
| Token rightParen = _expect(TokenType.CLOSE_PAREN); |
| Token semicolon = _expect(TokenType.SEMICOLON); |
| return astFactory.assertStatement( |
| keyword, leftParen, expression, comma, message, rightParen, semicolon); |
| } |
| |
| /** |
| * Parse an assignable expression. The [primaryAllowed] is `true` if the |
| * expression is allowed to be a primary without any assignable selector. |
| * Return the assignable expression that was parsed. |
| * |
| * assignableExpression ::= |
| * primary (arguments* assignableSelector)+ |
| * | 'super' unconditionalAssignableSelector |
| * | identifier |
| */ |
| Expression parseAssignableExpression(bool primaryAllowed) { |
| if (_matchesKeyword(Keyword.SUPER)) { |
| return parseAssignableSelector( |
| astFactory.superExpression(getAndAdvance()), false, |
| allowConditional: false); |
| } |
| return _parseAssignableExpressionNotStartingWithSuper(primaryAllowed); |
| } |
| |
| /** |
| * Parse an assignable selector. The [prefix] is the expression preceding the |
| * selector. The [optional] is `true` if the selector is optional. Return the |
| * assignable selector that was parsed, or the original prefix if there was no |
| * assignable selector. If [allowConditional] is false, then the '?.' |
| * operator will still be parsed, but a parse error will be generated. |
| * |
| * unconditionalAssignableSelector ::= |
| * '[' expression ']' |
| * | '.' identifier |
| * |
| * assignableSelector ::= |
| * unconditionalAssignableSelector |
| * | '?.' identifier |
| */ |
| Expression parseAssignableSelector(Expression prefix, bool optional, |
| {bool allowConditional: true}) { |
| TokenType type = _currentToken.type; |
| if (type == TokenType.OPEN_SQUARE_BRACKET) { |
| Token leftBracket = getAndAdvance(); |
| bool wasInInitializer = _inInitializer; |
| _inInitializer = false; |
| try { |
| Expression index = parseExpression2(); |
| Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET); |
| return astFactory.indexExpressionForTarget( |
| prefix, leftBracket, index, rightBracket); |
| } finally { |
| _inInitializer = wasInInitializer; |
| } |
| } else { |
| bool isQuestionPeriod = type == TokenType.QUESTION_PERIOD; |
| if (type == TokenType.PERIOD || isQuestionPeriod) { |
| if (isQuestionPeriod && !allowConditional) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, |
| [_currentToken.lexeme]); |
| } |
| Token operator = getAndAdvance(); |
| return astFactory.propertyAccess( |
| prefix, operator, parseSimpleIdentifier()); |
| } else if (type == TokenType.INDEX) { |
| _splitIndex(); |
| Token leftBracket = getAndAdvance(); |
| Expression index = parseSimpleIdentifier(); |
| Token rightBracket = getAndAdvance(); |
| return astFactory.indexExpressionForTarget( |
| prefix, leftBracket, index, rightBracket); |
| } else { |
| if (!optional) { |
| // Report the missing selector. |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR); |
| } |
| return prefix; |
| } |
| } |
| } |
| |
| /** |
| * Parse a await expression. Return the await expression that was parsed. |
| * |
| * This method assumes that the current token matches `_AWAIT`. |
| * |
| * awaitExpression ::= |
| * 'await' unaryExpression |
| */ |
| AwaitExpression parseAwaitExpression() { |
| Token awaitToken = getAndAdvance(); |
| Expression expression = parseUnaryExpression(); |
| return astFactory.awaitExpression(awaitToken, expression); |
| } |
| |
| /** |
| * Parse a bitwise and expression. Return the bitwise and expression that was |
| * parsed. |
| * |
| * bitwiseAndExpression ::= |
| * shiftExpression ('&' shiftExpression)* |
| * | 'super' ('&' shiftExpression)+ |
| */ |
| Expression parseBitwiseAndExpression() { |
| Expression expression; |
| if (_currentToken.keyword == Keyword.SUPER && |
| _currentToken.next.type == TokenType.AMPERSAND) { |
| expression = astFactory.superExpression(getAndAdvance()); |
| } else { |
| expression = parseShiftExpression(); |
| } |
| while (_currentToken.type == TokenType.AMPERSAND) { |
| expression = astFactory.binaryExpression( |
| expression, getAndAdvance(), parseShiftExpression()); |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse a bitwise or expression. Return the bitwise or expression that was |
| * parsed. |
| * |
| * bitwiseOrExpression ::= |
| * bitwiseXorExpression ('|' bitwiseXorExpression)* |
| * | 'super' ('|' bitwiseXorExpression)+ |
| */ |
| Expression parseBitwiseOrExpression() { |
| Expression expression; |
| if (_currentToken.keyword == Keyword.SUPER && |
| _currentToken.next.type == TokenType.BAR) { |
| expression = astFactory.superExpression(getAndAdvance()); |
| } else { |
| expression = parseBitwiseXorExpression(); |
| } |
| while (_currentToken.type == TokenType.BAR) { |
| expression = astFactory.binaryExpression( |
| expression, getAndAdvance(), parseBitwiseXorExpression()); |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse a bitwise exclusive-or expression. Return the bitwise exclusive-or |
| * expression that was parsed. |
| * |
| * bitwiseXorExpression ::= |
| * bitwiseAndExpression ('^' bitwiseAndExpression)* |
| * | 'super' ('^' bitwiseAndExpression)+ |
| */ |
| Expression parseBitwiseXorExpression() { |
| Expression expression; |
| if (_currentToken.keyword == Keyword.SUPER && |
| _currentToken.next.type == TokenType.CARET) { |
| expression = astFactory.superExpression(getAndAdvance()); |
| } else { |
| expression = parseBitwiseAndExpression(); |
| } |
| while (_currentToken.type == TokenType.CARET) { |
| expression = astFactory.binaryExpression( |
| expression, getAndAdvance(), parseBitwiseAndExpression()); |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse a block. Return the block that was parsed. |
| * |
| * This method assumes that the current token matches |
| * [TokenType.OPEN_CURLY_BRACKET]. |
| * |
| * block ::= |
| * '{' statements '}' |
| */ |
| Block parseBlock() { |
| bool isEndOfBlock() { |
| TokenType type = _currentToken.type; |
| return type == TokenType.EOF || type == TokenType.CLOSE_CURLY_BRACKET; |
| } |
| |
| Token leftBracket = getAndAdvance(); |
| List<Statement> statements = <Statement>[]; |
| Token statementStart = _currentToken; |
| while (!isEndOfBlock()) { |
| Statement statement = parseStatement2(); |
| if (identical(_currentToken, statementStart)) { |
| // Ensure that we are making progress and report an error if we're not. |
| _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, |
| [_currentToken.lexeme]); |
| _advance(); |
| } else if (statement != null) { |
| statements.add(statement); |
| } |
| statementStart = _currentToken; |
| } |
| // Recovery: If the next token is not a right curly bracket, look at the |
| // left curly bracket to see whether there is a matching right bracket. If |
| // there is, then we're more likely missing a semi-colon and should go back |
| // to parsing statements. |
| Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); |
| return astFactory.block(leftBracket, statements, rightBracket); |
| } |
| |
| /** |
| * Parse a break statement. Return the break statement that was parsed. |
| * |
| * This method assumes that the current token matches `Keyword.BREAK`. |
| * |
| * breakStatement ::= |
| * 'break' identifier? ';' |
| */ |
| Statement parseBreakStatement() { |
| Token breakKeyword = getAndAdvance(); |
| SimpleIdentifier label = null; |
| if (_matchesIdentifier()) { |
| label = _parseSimpleIdentifierUnchecked(); |
| } |
| if (!_inLoop && !_inSwitch && label == null) { |
| _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword); |
| } |
| Token semicolon = _expect(TokenType.SEMICOLON); |
| return astFactory.breakStatement(breakKeyword, label, semicolon); |
| } |
| |
| /** |
| * Parse a cascade section. Return the expression representing the cascaded |
| * method invocation. |
| * |
| * This method assumes that the current token matches |
| * `TokenType.PERIOD_PERIOD`. |
| * |
| * cascadeSection ::= |
| * '..' (cascadeSelector typeArguments? arguments*) |
| * (assignableSelector typeArguments? arguments*)* cascadeAssignment? |
| * |
| * cascadeSelector ::= |
| * '[' expression ']' |
| * | identifier |
| * |
| * cascadeAssignment ::= |
| * assignmentOperator expressionWithoutCascade |
| */ |
| Expression parseCascadeSection() { |
| Token period = getAndAdvance(); |
| Expression expression = null; |
| SimpleIdentifier functionName = null; |
| if (_matchesIdentifier()) { |
| functionName = _parseSimpleIdentifierUnchecked(); |
| } else if (_currentToken.type == TokenType.OPEN_SQUARE_BRACKET) { |
| Token leftBracket = getAndAdvance(); |
| bool wasInInitializer = _inInitializer; |
| _inInitializer = false; |
| try { |
| Expression index = parseExpression2(); |
| Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET); |
| expression = astFactory.indexExpressionForCascade( |
| period, leftBracket, index, rightBracket); |
| period = null; |
| } finally { |
| _inInitializer = wasInInitializer; |
| } |
| } else { |
| _reportErrorForToken(ParserErrorCode.MISSING_IDENTIFIER, _currentToken, |
| [_currentToken.lexeme]); |
| functionName = createSyntheticIdentifier(); |
| } |
| assert((expression == null && functionName != null) || |
| (expression != null && functionName == null)); |
| if (_isLikelyArgumentList()) { |
| do { |
| TypeArgumentList typeArguments = _parseOptionalTypeArguments(); |
| if (functionName != null) { |
| expression = astFactory.methodInvocation(expression, period, |
| functionName, typeArguments, parseArgumentList()); |
| period = null; |
| functionName = null; |
| } else if (expression == null) { |
| // It should not be possible to get here. |
| expression = astFactory.methodInvocation(expression, period, |
| createSyntheticIdentifier(), typeArguments, parseArgumentList()); |
| } else { |
| expression = astFactory.functionExpressionInvocation( |
| expression, typeArguments, parseArgumentList()); |
| } |
| } while (_isLikelyArgumentList()); |
| } else if (functionName != null) { |
| expression = astFactory.propertyAccess(expression, period, functionName); |
| period = null; |
| } |
| assert(expression != null); |
| bool progress = true; |
| while (progress) { |
| progress = false; |
| Expression selector = parseAssignableSelector(expression, true); |
| if (!identical(selector, expression)) { |
| expression = selector; |
| progress = true; |
| while (_isLikelyArgumentList()) { |
| TypeArgumentList typeArguments = _parseOptionalTypeArguments(); |
| Expression currentExpression = expression; |
| if (currentExpression is PropertyAccess) { |
| expression = astFactory.methodInvocation( |
| currentExpression.target, |
| currentExpression.operator, |
| currentExpression.propertyName, |
| typeArguments, |
| parseArgumentList()); |
| } else { |
| expression = astFactory.functionExpressionInvocation( |
| expression, typeArguments, parseArgumentList()); |
| } |
| } |
| } |
| } |
| if (_currentToken.type.isAssignmentOperator) { |
| Token operator = getAndAdvance(); |
| _ensureAssignable(expression); |
| expression = astFactory.assignmentExpression( |
| expression, operator, parseExpressionWithoutCascade()); |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse a class declaration. The [commentAndMetadata] is the metadata to be |
| * associated with the member. The [abstractKeyword] is the token for the |
| * keyword 'abstract', or `null` if the keyword was not given. Return the |
| * class declaration that was parsed. |
| * |
| * This method assumes that the current token matches `Keyword.CLASS`. |
| * |
| * classDeclaration ::= |
| * metadata 'abstract'? 'class' name typeParameterList? (extendsClause withClause?)? implementsClause? '{' classMembers '}' | |
| * metadata 'abstract'? 'class' mixinApplicationClass |
| */ |
| CompilationUnitMember parseClassDeclaration( |
| CommentAndMetadata commentAndMetadata, Token abstractKeyword) { |
| // |
| // Parse the name and type parameters. |
| // |
| Token keyword = getAndAdvance(); |
| SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true); |
| String className = name.name; |
| TypeParameterList typeParameters = null; |
| TokenType type = _currentToken.type; |
| if (type == TokenType.LT) { |
| typeParameters = parseTypeParameterList(); |
| type = _currentToken.type; |
| } |
| // |
| // Check to see whether this might be a class type alias rather than a class |
| // declaration. |
| // |
| if (type == TokenType.EQ) { |
| return _parseClassTypeAliasAfterName( |
| commentAndMetadata, abstractKeyword, keyword, name, typeParameters); |
| } |
| // |
| // Parse the clauses. The parser accepts clauses in any order, but will |
| // generate errors if they are not in the order required by the |
| // specification. |
| // |
| ExtendsClause extendsClause = null; |
| WithClause withClause = null; |
| ImplementsClause implementsClause = null; |
| bool foundClause = true; |
| while (foundClause) { |
| Keyword keyword = _currentToken.keyword; |
| if (keyword == Keyword.EXTENDS) { |
| if (extendsClause == null) { |
| extendsClause = parseExtendsClause(); |
| if (withClause != null) { |
| _reportErrorForToken( |
| ParserErrorCode.WITH_BEFORE_EXTENDS, withClause.withKeyword); |
| } else if (implementsClause != null) { |
| _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_EXTENDS, |
| implementsClause.implementsKeyword); |
| } |
| } else { |
| _reportErrorForToken(ParserErrorCode.MULTIPLE_EXTENDS_CLAUSES, |
| extendsClause.extendsKeyword); |
| parseExtendsClause(); |
| } |
| } else if (keyword == Keyword.WITH) { |
| if (withClause == null) { |
| withClause = parseWithClause(); |
| if (implementsClause != null) { |
| _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_WITH, |
| implementsClause.implementsKeyword); |
| } |
| } else { |
| _reportErrorForToken( |
| ParserErrorCode.MULTIPLE_WITH_CLAUSES, withClause.withKeyword); |
| parseWithClause(); |
| // TODO(brianwilkerson) Should we merge the list of applied mixins |
| // into a single list? |
| } |
| } else if (keyword == Keyword.IMPLEMENTS) { |
| if (implementsClause == null) { |
| implementsClause = parseImplementsClause(); |
| } else { |
| _reportErrorForToken(ParserErrorCode.MULTIPLE_IMPLEMENTS_CLAUSES, |
| implementsClause.implementsKeyword); |
| parseImplementsClause(); |
| // TODO(brianwilkerson) Should we merge the list of implemented |
| // classes into a single list? |
| } |
| } else { |
| foundClause = false; |
| } |
| } |
| if (withClause != null && extendsClause == null) { |
| _reportErrorForToken( |
| ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword); |
| } |
| // |
| // Look for and skip over the extra-lingual 'native' specification. |
| // |
| NativeClause nativeClause = null; |
| if (_matchesString(_NATIVE) && _tokenMatches(_peek(), TokenType.STRING)) { |
| nativeClause = _parseNativeClause(); |
| } |
| // |
| // Parse the body of the class. |
| // |
| Token leftBracket = null; |
| List<ClassMember> members = null; |
| Token rightBracket = null; |
| if (_matches(TokenType.OPEN_CURLY_BRACKET)) { |
| leftBracket = getAndAdvance(); |
| members = _parseClassMembers(className, _getEndToken(leftBracket)); |
| rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); |
| } else { |
| // Recovery: Check for an unmatched closing curly bracket and parse |
| // members until it is reached. |
| leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET); |
| rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET); |
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY); |
| } |
| ClassDeclaration classDeclaration = astFactory.classDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| abstractKeyword, |
| keyword, |
| name, |
| typeParameters, |
| extendsClause, |
| withClause, |
| implementsClause, |
| leftBracket, |
| members, |
| rightBracket); |
| classDeclaration.nativeClause = nativeClause; |
| return classDeclaration; |
| } |
| |
| /** |
| * Parse a class member. The [className] is the name of the class containing |
| * the member being parsed. Return the class member that was parsed, or `null` |
| * if what was found was not a valid class member. |
| * |
| * classMemberDefinition ::= |
| * declaration ';' |
| * | methodSignature functionBody |
| */ |
| ClassMember parseClassMember(String className) { |
| CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); |
| Modifiers modifiers = parseModifiers(); |
| Keyword keyword = _currentToken.keyword; |
| if (keyword == Keyword.VOID) { |
| TypeName returnType = astFactory.typeName( |
| astFactory.simpleIdentifier(getAndAdvance()), null); |
| keyword = _currentToken.keyword; |
| Token next = _peek(); |
| bool isFollowedByIdentifier = _tokenMatchesIdentifier(next); |
| if (keyword == Keyword.GET && isFollowedByIdentifier) { |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseGetter(commentAndMetadata, modifiers.externalKeyword, |
| modifiers.staticKeyword, returnType); |
| } else if (keyword == Keyword.SET && isFollowedByIdentifier) { |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseSetter(commentAndMetadata, modifiers.externalKeyword, |
| modifiers.staticKeyword, returnType); |
| } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { |
| _validateModifiersForOperator(modifiers); |
| return _parseOperatorAfterKeyword(commentAndMetadata, |
| modifiers.externalKeyword, returnType, getAndAdvance()); |
| } else if (_matchesIdentifier() && |
| _peek().matchesAny(const <TokenType>[ |
| TokenType.OPEN_PAREN, |
| TokenType.OPEN_CURLY_BRACKET, |
| TokenType.FUNCTION, |
| TokenType.LT |
| ])) { |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return _parseMethodDeclarationAfterReturnType(commentAndMetadata, |
| modifiers.externalKeyword, modifiers.staticKeyword, returnType); |
| } else { |
| // |
| // We have found an error of some kind. Try to recover. |
| // |
| if (_matchesIdentifier()) { |
| if (_peek().matchesAny(const <TokenType>[ |
| TokenType.EQ, |
| TokenType.COMMA, |
| TokenType.SEMICOLON |
| ])) { |
| // |
| // We appear to have a variable declaration with a type of "void". |
| // |
| _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType); |
| return parseInitializedIdentifierList( |
| commentAndMetadata, |
| modifiers.staticKeyword, |
| modifiers.covariantKeyword, |
| _validateModifiersForField(modifiers), |
| returnType); |
| } |
| } |
| if (_isOperator(_currentToken)) { |
| // |
| // We appear to have found an operator declaration without the |
| // 'operator' keyword. |
| // |
| _validateModifiersForOperator(modifiers); |
| return parseOperator( |
| commentAndMetadata, modifiers.externalKeyword, returnType); |
| } |
| _reportErrorForToken( |
| ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); |
| return null; |
| } |
| } |
| Token next = _peek(); |
| bool isFollowedByIdentifier = _tokenMatchesIdentifier(next); |
| if (keyword == Keyword.GET && isFollowedByIdentifier) { |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseGetter(commentAndMetadata, modifiers.externalKeyword, |
| modifiers.staticKeyword, null); |
| } else if (keyword == Keyword.SET && isFollowedByIdentifier) { |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseSetter(commentAndMetadata, modifiers.externalKeyword, |
| modifiers.staticKeyword, null); |
| } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { |
| _validateModifiersForOperator(modifiers); |
| return _parseOperatorAfterKeyword( |
| commentAndMetadata, modifiers.externalKeyword, null, getAndAdvance()); |
| } else if (!_matchesIdentifier()) { |
| // |
| // Recover from an error. |
| // |
| if (_matchesKeyword(Keyword.CLASS)) { |
| _reportErrorForCurrentToken(ParserErrorCode.CLASS_IN_CLASS); |
| // TODO(brianwilkerson) We don't currently have any way to capture the |
| // class that was parsed. |
| parseClassDeclaration(commentAndMetadata, null); |
| return null; |
| } else if (_matchesKeyword(Keyword.ABSTRACT) && |
| _tokenMatchesKeyword(_peek(), Keyword.CLASS)) { |
| _reportErrorForToken(ParserErrorCode.CLASS_IN_CLASS, _peek()); |
| // TODO(brianwilkerson) We don't currently have any way to capture the |
| // class that was parsed. |
| parseClassDeclaration(commentAndMetadata, getAndAdvance()); |
| return null; |
| } else if (_matchesKeyword(Keyword.ENUM)) { |
| _reportErrorForToken(ParserErrorCode.ENUM_IN_CLASS, _peek()); |
| // TODO(brianwilkerson) We don't currently have any way to capture the |
| // enum that was parsed. |
| parseEnumDeclaration(commentAndMetadata); |
| return null; |
| } else if (_isOperator(_currentToken)) { |
| // |
| // We appear to have found an operator declaration without the |
| // 'operator' keyword. |
| // |
| _validateModifiersForOperator(modifiers); |
| return parseOperator( |
| commentAndMetadata, modifiers.externalKeyword, null); |
| } |
| Token keyword = modifiers.varKeyword ?? |
| modifiers.finalKeyword ?? |
| modifiers.constKeyword; |
| if (keyword != null) { |
| // |
| // We appear to have found an incomplete field declaration. |
| // |
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); |
| VariableDeclaration variable = astFactory.variableDeclaration( |
| createSyntheticIdentifier(), null, null); |
| List<VariableDeclaration> variables = <VariableDeclaration>[variable]; |
| return astFactory.fieldDeclaration2( |
| comment: commentAndMetadata.comment, |
| metadata: commentAndMetadata.metadata, |
| covariantKeyword: modifiers.covariantKeyword, |
| fieldList: astFactory.variableDeclarationList( |
| null, null, keyword, null, variables), |
| semicolon: _expect(TokenType.SEMICOLON)); |
| } |
| _reportErrorForToken( |
| ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken); |
| if (commentAndMetadata.comment != null || |
| commentAndMetadata.hasMetadata) { |
| // |
| // We appear to have found an incomplete declaration at the end of the |
| // class. At this point it consists of a metadata, which we don't want |
| // to loose, so we'll treat it as a method declaration with a missing |
| // name, parameters and empty body. |
| // |
| return astFactory.methodDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| null, |
| null, |
| null, |
| null, |
| null, |
| createSyntheticIdentifier(isDeclaration: true), |
| null, |
| astFactory.formalParameterList( |
| _createSyntheticToken(TokenType.OPEN_PAREN), |
| <FormalParameter>[], |
| null, |
| null, |
| _createSyntheticToken(TokenType.CLOSE_PAREN)), |
| astFactory |
| .emptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON))); |
| } |
| return null; |
| } else if (_tokenMatches(next, TokenType.PERIOD) && |
| _tokenMatchesIdentifierOrKeyword(_peekAt(2)) && |
| _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) { |
| if (!_tokenMatchesIdentifier(_peekAt(2))) { |
| _reportErrorForToken(ParserErrorCode.INVALID_CONSTRUCTOR_NAME, |
| _peekAt(2), [_peekAt(2).lexeme]); |
| } |
| return _parseConstructor( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| _validateModifiersForConstructor(modifiers), |
| modifiers.factoryKeyword, |
| parseSimpleIdentifier(), |
| getAndAdvance(), |
| parseSimpleIdentifier(allowKeyword: true, isDeclaration: true), |
| parseFormalParameterList()); |
| } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) { |
| TypeName returnType = _parseOptionalTypeNameComment(); |
| SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true); |
| TypeParameterList typeParameters = _parseGenericCommentTypeParameters(); |
| FormalParameterList parameters = parseFormalParameterList(); |
| if (_matches(TokenType.COLON) || |
| modifiers.factoryKeyword != null || |
| methodName.name == className) { |
| return _parseConstructor( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| _validateModifiersForConstructor(modifiers), |
| modifiers.factoryKeyword, |
| astFactory.simpleIdentifier(methodName.token, isDeclaration: false), |
| null, |
| null, |
| parameters); |
| } |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| _validateFormalParameterList(parameters); |
| return _parseMethodDeclarationAfterParameters( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| modifiers.staticKeyword, |
| returnType, |
| methodName, |
| typeParameters, |
| parameters); |
| } else if (next.matchesAny(const <TokenType>[ |
| TokenType.EQ, |
| TokenType.COMMA, |
| TokenType.SEMICOLON |
| ])) { |
| if (modifiers.constKeyword == null && |
| modifiers.finalKeyword == null && |
| modifiers.varKeyword == null) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE); |
| } |
| return parseInitializedIdentifierList( |
| commentAndMetadata, |
| modifiers.staticKeyword, |
| modifiers.covariantKeyword, |
| _validateModifiersForField(modifiers), |
| null); |
| } else if (keyword == Keyword.TYPEDEF) { |
| _reportErrorForCurrentToken(ParserErrorCode.TYPEDEF_IN_CLASS); |
| // TODO(brianwilkerson) We don't currently have any way to capture the |
| // function type alias that was parsed. |
| _parseFunctionTypeAlias(commentAndMetadata, getAndAdvance()); |
| return null; |
| } else { |
| Token token = _skipTypeParameterList(_peek()); |
| if (token != null && _tokenMatches(token, TokenType.OPEN_PAREN)) { |
| return _parseMethodDeclarationAfterReturnType(commentAndMetadata, |
| modifiers.externalKeyword, modifiers.staticKeyword, null); |
| } |
| } |
| TypeAnnotation type = _parseTypeAnnotationAfterIdentifier(); |
| keyword = _currentToken.keyword; |
| next = _peek(); |
| isFollowedByIdentifier = _tokenMatchesIdentifier(next); |
| if (keyword == Keyword.GET && isFollowedByIdentifier) { |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseGetter(commentAndMetadata, modifiers.externalKeyword, |
| modifiers.staticKeyword, type); |
| } else if (keyword == Keyword.SET && isFollowedByIdentifier) { |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseSetter(commentAndMetadata, modifiers.externalKeyword, |
| modifiers.staticKeyword, type); |
| } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { |
| _validateModifiersForOperator(modifiers); |
| return _parseOperatorAfterKeyword( |
| commentAndMetadata, modifiers.externalKeyword, type, getAndAdvance()); |
| } else if (!_matchesIdentifier()) { |
| if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { |
| // |
| // We appear to have found an incomplete declaration at the end of the |
| // class. At this point it consists of a type name, so we'll treat it as |
| // a field declaration with a missing field name and semicolon. |
| // |
| return parseInitializedIdentifierList( |
| commentAndMetadata, |
| modifiers.staticKeyword, |
| modifiers.covariantKeyword, |
| _validateModifiersForField(modifiers), |
| type); |
| } |
| if (_isOperator(_currentToken)) { |
| // |
| // We appear to have found an operator declaration without the |
| // 'operator' keyword. |
| // |
| _validateModifiersForOperator(modifiers); |
| return parseOperator( |
| commentAndMetadata, modifiers.externalKeyword, type); |
| } |
| // |
| // We appear to have found an incomplete declaration before another |
| // declaration. At this point it consists of a type name, so we'll treat |
| // it as a field declaration with a missing field name and semicolon. |
| // |
| _reportErrorForToken( |
| ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken); |
| try { |
| _lockErrorListener(); |
| return parseInitializedIdentifierList( |
| commentAndMetadata, |
| modifiers.staticKeyword, |
| modifiers.covariantKeyword, |
| _validateModifiersForField(modifiers), |
| type); |
| } finally { |
| _unlockErrorListener(); |
| } |
| } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) { |
| SimpleIdentifier methodName = |
| _parseSimpleIdentifierUnchecked(isDeclaration: true); |
| TypeParameterList typeParameters = _parseGenericCommentTypeParameters(); |
| FormalParameterList parameters = parseFormalParameterList(); |
| if (methodName.name == className) { |
| _reportErrorForNode(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, type); |
| return _parseConstructor( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| _validateModifiersForConstructor(modifiers), |
| modifiers.factoryKeyword, |
| astFactory.simpleIdentifier(methodName.token, isDeclaration: true), |
| null, |
| null, |
| parameters); |
| } |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| _validateFormalParameterList(parameters); |
| return _parseMethodDeclarationAfterParameters( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| modifiers.staticKeyword, |
| type, |
| methodName, |
| typeParameters, |
| parameters); |
| } else if (_tokenMatches(next, TokenType.LT)) { |
| return _parseMethodDeclarationAfterReturnType(commentAndMetadata, |
| modifiers.externalKeyword, modifiers.staticKeyword, type); |
| } else if (_tokenMatches(next, TokenType.OPEN_CURLY_BRACKET)) { |
| // We have found "TypeName identifier {", and are guessing that this is a |
| // getter without the keyword 'get'. |
| _validateModifiersForGetterOrSetterOrMethod(modifiers); |
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_GET); |
| _currentToken = _injectToken( |
| new Parser_SyntheticKeywordToken(Keyword.GET, _currentToken.offset)); |
| return parseGetter(commentAndMetadata, modifiers.externalKeyword, |
| modifiers.staticKeyword, type); |
| } |
| return parseInitializedIdentifierList( |
| commentAndMetadata, |
| modifiers.staticKeyword, |
| modifiers.covariantKeyword, |
| _validateModifiersForField(modifiers), |
| type); |
| } |
| |
| /** |
| * Parse a single combinator. Return the combinator that was parsed, or `null` |
| * if no combinator is found. |
| * |
| * combinator ::= |
| * 'show' identifier (',' identifier)* |
| * | 'hide' identifier (',' identifier)* |
| */ |
| Combinator parseCombinator() { |
| if (_matchesString(_SHOW)) { |
| return astFactory.showCombinator(getAndAdvance(), parseIdentifierList()); |
| } else if (_matchesString(_HIDE)) { |
| return astFactory.hideCombinator(getAndAdvance(), parseIdentifierList()); |
| } |
| return null; |
| } |
| |
| /** |
| * Parse a list of combinators in a directive. Return the combinators that |
| * were parsed, or `null` if there are no combinators. |
| * |
| * combinator ::= |
| * 'show' identifier (',' identifier)* |
| * | 'hide' identifier (',' identifier)* |
| */ |
| List<Combinator> parseCombinators() { |
| List<Combinator> combinators = null; |
| while (true) { |
| Combinator combinator = parseCombinator(); |
| if (combinator == null) { |
| break; |
| } |
| combinators ??= <Combinator>[]; |
| combinators.add(combinator); |
| } |
| return combinators; |
| } |
| |
| /** |
| * Parse the documentation comment and metadata preceding a declaration. This |
| * method allows any number of documentation comments to occur before, after |
| * or between the metadata, but only returns the last (right-most) |
| * documentation comment that is found. Return the documentation comment and |
| * metadata that were parsed. |
| * |
| * metadata ::= |
| * annotation* |
| */ |
| CommentAndMetadata parseCommentAndMetadata() { |
| // TODO(brianwilkerson) Consider making the creation of documentation |
| // comments be lazy. |
| List<DocumentationCommentToken> tokens = parseDocumentationCommentTokens(); |
| List<Annotation> metadata = null; |
| while (_matches(TokenType.AT)) { |
| metadata ??= <Annotation>[]; |
| metadata.add(parseAnnotation()); |
| List<DocumentationCommentToken> optionalTokens = |
| parseDocumentationCommentTokens(); |
| if (optionalTokens != null) { |
| tokens = optionalTokens; |
| } |
| } |
| return new CommentAndMetadata(parseDocumentationComment(tokens), metadata); |
| } |
| |
| /** |
| * Parse a comment reference from the source between square brackets. The |
| * [referenceSource] is the source occurring between the square brackets |
| * within a documentation comment. The [sourceOffset] is the offset of the |
| * first character of the reference source. Return the comment reference that |
| * was parsed, or `null` if no reference could be found. |
| * |
| * commentReference ::= |
| * 'new'? prefixedIdentifier |
| */ |
| CommentReference parseCommentReference( |
| String referenceSource, int sourceOffset) { |
| // TODO(brianwilkerson) The errors are not getting the right offset/length |
| // and are being duplicated. |
| try { |
| BooleanErrorListener listener = new BooleanErrorListener(); |
| Scanner scanner = new Scanner( |
| null, new SubSequenceReader(referenceSource, sourceOffset), listener); |
| scanner.setSourceStart(1, 1); |
| Token firstToken = scanner.tokenize(); |
| if (listener.errorReported) { |
| return null; |
| } |
| if (firstToken.type == TokenType.EOF) { |
| Token syntheticToken = |
| new SyntheticStringToken(TokenType.IDENTIFIER, "", sourceOffset); |
| syntheticToken.setNext(firstToken); |
| return astFactory.commentReference( |
| null, astFactory.simpleIdentifier(syntheticToken)); |
| } |
| Token newKeyword = null; |
| if (_tokenMatchesKeyword(firstToken, Keyword.NEW)) { |
| newKeyword = firstToken; |
| firstToken = firstToken.next; |
| } |
| if (firstToken.isUserDefinableOperator) { |
| if (firstToken.next.type != TokenType.EOF) { |
| return null; |
| } |
| Identifier identifier = astFactory.simpleIdentifier(firstToken); |
| return astFactory.commentReference(null, identifier); |
| } else if (_tokenMatchesKeyword(firstToken, Keyword.OPERATOR)) { |
| Token secondToken = firstToken.next; |
| if (secondToken.isUserDefinableOperator) { |
| if (secondToken.next.type != TokenType.EOF) { |
| return null; |
| } |
| Identifier identifier = astFactory.simpleIdentifier(secondToken); |
| return astFactory.commentReference(null, identifier); |
| } |
| return null; |
| } else if (_tokenMatchesIdentifier(firstToken)) { |
| Token secondToken = firstToken.next; |
| Token thirdToken = secondToken.next; |
| Token nextToken; |
| Identifier identifier; |
| if (_tokenMatches(secondToken, TokenType.PERIOD)) { |
| if (thirdToken.isUserDefinableOperator) { |
| identifier = astFactory.prefixedIdentifier( |
| astFactory.simpleIdentifier(firstToken), |
| secondToken, |
| astFactory.simpleIdentifier(thirdToken)); |
| nextToken = thirdToken.next; |
| } else if (_tokenMatchesKeyword(thirdToken, Keyword.OPERATOR)) { |
| Token fourthToken = thirdToken.next; |
| if (fourthToken.isUserDefinableOperator) { |
| identifier = astFactory.prefixedIdentifier( |
| astFactory.simpleIdentifier(firstToken), |
| secondToken, |
| astFactory.simpleIdentifier(fourthToken)); |
| nextToken = fourthToken.next; |
| } else { |
| return null; |
| } |
| } else if (_tokenMatchesIdentifier(thirdToken)) { |
| identifier = astFactory.prefixedIdentifier( |
| astFactory.simpleIdentifier(firstToken), |
| secondToken, |
| astFactory.simpleIdentifier(thirdToken)); |
| nextToken = thirdToken.next; |
| } |
| } else { |
| identifier = astFactory.simpleIdentifier(firstToken); |
| nextToken = firstToken.next; |
| } |
| if (nextToken.type != TokenType.EOF) { |
| return null; |
| } |
| return astFactory.commentReference(newKeyword, identifier); |
| } else { |
| Keyword keyword = firstToken.keyword; |
| if (keyword == Keyword.THIS || |
| keyword == Keyword.NULL || |
| keyword == Keyword.TRUE || |
| keyword == Keyword.FALSE) { |
| // TODO(brianwilkerson) If we want to support this we will need to |
| // extend the definition of CommentReference to take an expression |
| // rather than an identifier. For now we just ignore it to reduce the |
| // number of errors produced, but that's probably not a valid long term |
| // approach. |
| return null; |
| } |
| } |
| } catch (exception) { |
| // Ignored because we assume that it wasn't a real comment reference. |
| } |
| return null; |
| } |
| |
| /** |
| * Parse all of the comment references occurring in the given array of |
| * documentation comments. The [tokens] are the comment tokens representing |
| * the documentation comments to be parsed. Return the comment references that |
| * were parsed. |
| * |
| * commentReference ::= |
| * '[' 'new'? qualified ']' libraryReference? |
| * |
| * libraryReference ::= |
| * '(' stringLiteral ')' |
| */ |
| List<CommentReference> parseCommentReferences( |
| List<DocumentationCommentToken> tokens) { |
| List<CommentReference> references = <CommentReference>[]; |
| bool isInGitHubCodeBlock = false; |
| for (DocumentationCommentToken token in tokens) { |
| String comment = token.lexeme; |
| // Skip GitHub code blocks. |
| // https://help.github.com/articles/creating-and-highlighting-code-blocks/ |
| if (tokens.length != 1) { |
| if (comment.indexOf('```') != -1) { |
| isInGitHubCodeBlock = !isInGitHubCodeBlock; |
| } |
| if (isInGitHubCodeBlock) { |
| continue; |
| } |
| } |
| // Remove GitHub include code. |
| comment = _removeGitHubInlineCode(comment); |
| // Find references. |
| int length = comment.length; |
| List<List<int>> codeBlockRanges = _getCodeBlockRanges(comment); |
| int leftIndex = comment.indexOf('['); |
| while (leftIndex >= 0 && leftIndex + 1 < length) { |
| List<int> range = _findRange(codeBlockRanges, leftIndex); |
| if (range == null) { |
| int nameOffset = token.offset + leftIndex + 1; |
| int rightIndex = comment.indexOf(']', leftIndex); |
| if (rightIndex >= 0) { |
| int firstChar = comment.codeUnitAt(leftIndex + 1); |
| if (firstChar != 0x27 && firstChar != 0x22) { |
| if (_isLinkText(comment, rightIndex)) { |
| // TODO(brianwilkerson) Handle the case where there's a library |
| // URI in the link text. |
| } else { |
| CommentReference reference = parseCommentReference( |
| comment.substring(leftIndex + 1, rightIndex), nameOffset); |
| if (reference != null) { |
| references.add(reference); |
| token.references.add(reference.beginToken); |
| } |
| } |
| } |
| } else { |
| // terminating ']' is not typed yet |
| int charAfterLeft = comment.codeUnitAt(leftIndex + 1); |
| Token nameToken; |
| if (Character.isLetterOrDigit(charAfterLeft)) { |
| int nameEnd = StringUtilities.indexOfFirstNotLetterDigit( |
| comment, leftIndex + 1); |
| String name = comment.substring(leftIndex + 1, nameEnd); |
| nameToken = |
| new StringToken(TokenType.IDENTIFIER, name, nameOffset); |
| } else { |
| nameToken = new SyntheticStringToken( |
| TokenType.IDENTIFIER, '', nameOffset); |
| } |
| nameToken.setNext(new SimpleToken(TokenType.EOF, nameToken.end)); |
| references.add(astFactory.commentReference( |
| null, astFactory.simpleIdentifier(nameToken))); |
| token.references.add(nameToken); |
| // next character |
| rightIndex = leftIndex + 1; |
| } |
| leftIndex = comment.indexOf('[', rightIndex); |
| } else { |
| leftIndex = comment.indexOf('[', range[1]); |
| } |
| } |
| } |
| return references; |
| } |
| |
| /** |
| * Parse a compilation unit, starting with the given [token]. Return the |
| * compilation unit that was parsed. |
| */ |
| CompilationUnit parseCompilationUnit(Token token) { |
| _currentToken = token; |
| return parseCompilationUnit2(); |
| } |
| |
| /** |
| * Parse a compilation unit. Return the compilation unit that was parsed. |
| * |
| * Specified: |
| * |
| * compilationUnit ::= |
| * scriptTag? directive* topLevelDeclaration* |
| * |
| * Actual: |
| * |
| * compilationUnit ::= |
| * scriptTag? topLevelElement* |
| * |
| * topLevelElement ::= |
| * directive |
| * | topLevelDeclaration |
| */ |
| CompilationUnit parseCompilationUnit2() { |
| Token firstToken = _currentToken; |
| ScriptTag scriptTag = null; |
| if (_matches(TokenType.SCRIPT_TAG)) { |
| scriptTag = astFactory.scriptTag(getAndAdvance()); |
| } |
| // |
| // Even though all directives must appear before declarations and must occur |
| // in a given order, we allow directives and declarations to occur in any |
| // order so that we can recover better. |
| // |
| bool libraryDirectiveFound = false; |
| bool partOfDirectiveFound = false; |
| bool partDirectiveFound = false; |
| bool directiveFoundAfterDeclaration = false; |
| List<Directive> directives = <Directive>[]; |
| List<CompilationUnitMember> declarations = <CompilationUnitMember>[]; |
| Token memberStart = _currentToken; |
| TokenType type = _currentToken.type; |
| while (type != TokenType.EOF) { |
| CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); |
| Keyword keyword = _currentToken.keyword; |
| TokenType nextType = _currentToken.next.type; |
| if ((keyword == Keyword.IMPORT || |
| keyword == Keyword.EXPORT || |
| keyword == Keyword.LIBRARY || |
| keyword == Keyword.PART) && |
| nextType != TokenType.PERIOD && |
| nextType != TokenType.LT && |
| nextType != TokenType.OPEN_PAREN) { |
| Directive parseDirective() { |
| if (keyword == Keyword.IMPORT) { |
| if (partDirectiveFound) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE); |
| } |
| return parseImportDirective(commentAndMetadata); |
| } else if (keyword == Keyword.EXPORT) { |
| if (partDirectiveFound) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE); |
| } |
| return parseExportDirective(commentAndMetadata); |
| } else if (keyword == Keyword.LIBRARY) { |
| if (libraryDirectiveFound) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES); |
| } else { |
| if (directives.length > 0) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST); |
| } |
| libraryDirectiveFound = true; |
| } |
| return parseLibraryDirective(commentAndMetadata); |
| } else if (keyword == Keyword.PART) { |
| if (_tokenMatchesString(_peek(), _OF)) { |
| partOfDirectiveFound = true; |
| return _parsePartOfDirective(commentAndMetadata); |
| } else { |
| partDirectiveFound = true; |
| return _parsePartDirective(commentAndMetadata); |
| } |
| } else { |
| // Internal error: this method should not have been invoked if the |
| // current token was something other than one of the above. |
| throw new StateError( |
| "parseDirective invoked in an invalid state (currentToken = $_currentToken)"); |
| } |
| } |
| |
| Directive directive = parseDirective(); |
| if (declarations.length > 0 && !directiveFoundAfterDeclaration) { |
| _reportErrorForToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION, |
| directive.beginToken); |
| directiveFoundAfterDeclaration = true; |
| } |
| directives.add(directive); |
| } else if (type == TokenType.SEMICOLON) { |
| // TODO(brianwilkerson) Consider moving this error detection into |
| // _parseCompilationUnitMember (in the places where EXPECTED_EXECUTABLE |
| // is being generated). |
| _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, |
| [_currentToken.lexeme]); |
| _advance(); |
| } else { |
| CompilationUnitMember member = |
| parseCompilationUnitMember(commentAndMetadata); |
| if (member != null) { |
| declarations.add(member); |
| } |
| } |
| if (identical(_currentToken, memberStart)) { |
| _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, |
| [_currentToken.lexeme]); |
| _advance(); |
| while (!_matches(TokenType.EOF) && |
| !_couldBeStartOfCompilationUnitMember()) { |
| _advance(); |
| } |
| } |
| memberStart = _currentToken; |
| type = _currentToken.type; |
| } |
| if (partOfDirectiveFound && directives.length > 1) { |
| // TODO(brianwilkerson) Improve error reporting when both a library and |
| // part-of directive are found. |
| // if (libraryDirectiveFound) { |
| // int directiveCount = directives.length; |
| // for (int i = 0; i < directiveCount; i++) { |
| // Directive directive = directives[i]; |
| // if (directive is PartOfDirective) { |
| // _reportErrorForToken( |
| // ParserErrorCode.PART_OF_IN_LIBRARY, directive.partKeyword); |
| // } |
| // } |
| // } else { |
| bool firstPartOf = true; |
| int directiveCount = directives.length; |
| for (int i = 0; i < directiveCount; i++) { |
| Directive directive = directives[i]; |
| if (directive is PartOfDirective) { |
| if (firstPartOf) { |
| firstPartOf = false; |
| } else { |
| _reportErrorForToken(ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES, |
| directive.partKeyword); |
| } |
| } else { |
| _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, |
| directives[i].keyword); |
| } |
| // } |
| } |
| } |
| return astFactory.compilationUnit( |
| firstToken, scriptTag, directives, declarations, _currentToken); |
| } |
| |
| /** |
| * Parse a compilation unit member. The [commentAndMetadata] is the metadata |
| * to be associated with the member. Return the compilation unit member that |
| * was parsed, or `null` if what was parsed could not be represented as a |
| * compilation unit member. |
| * |
| * compilationUnitMember ::= |
| * classDefinition |
| * | functionTypeAlias |
| * | external functionSignature |
| * | external getterSignature |
| * | external setterSignature |
| * | functionSignature functionBody |
| * | returnType? getOrSet identifier formalParameterList functionBody |
| * | (final | const) type? staticFinalDeclarationList ';' |
| * | variableDeclaration ';' |
| */ |
| CompilationUnitMember parseCompilationUnitMember( |
| CommentAndMetadata commentAndMetadata) { |
| Modifiers modifiers = parseModifiers(); |
| Keyword keyword = _currentToken.keyword; |
| if (keyword == Keyword.CLASS) { |
| return parseClassDeclaration( |
| commentAndMetadata, _validateModifiersForClass(modifiers)); |
| } |
| Token next = _peek(); |
| TokenType nextType = next.type; |
| if (keyword == Keyword.TYPEDEF && |
| nextType != TokenType.PERIOD && |
| nextType != TokenType.LT && |
| nextType != TokenType.OPEN_PAREN) { |
| _validateModifiersForTypedef(modifiers); |
| return parseTypeAlias(commentAndMetadata); |
| } else if (keyword == Keyword.ENUM) { |
| _validateModifiersForEnum(modifiers); |
| return parseEnumDeclaration(commentAndMetadata); |
| } else if (keyword == Keyword.VOID) { |
| TypeName returnType = astFactory.typeName( |
| astFactory.simpleIdentifier(getAndAdvance()), null); |
| keyword = _currentToken.keyword; |
| next = _peek(); |
| if ((keyword == Keyword.GET || keyword == Keyword.SET) && |
| _tokenMatchesIdentifier(next)) { |
| _validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration( |
| commentAndMetadata, modifiers.externalKeyword, returnType); |
| } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { |
| _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); |
| return _convertToFunctionDeclaration(_parseOperatorAfterKeyword( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| returnType, |
| getAndAdvance())); |
| } else if (_matchesIdentifier() && |
| next.matchesAny(const <TokenType>[ |
| TokenType.OPEN_PAREN, |
| TokenType.OPEN_CURLY_BRACKET, |
| TokenType.FUNCTION, |
| TokenType.LT |
| ])) { |
| _validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration( |
| commentAndMetadata, modifiers.externalKeyword, returnType); |
| } else { |
| // |
| // We have found an error of some kind. Try to recover. |
| // |
| if (_matchesIdentifier()) { |
| if (next.matchesAny(const <TokenType>[ |
| TokenType.EQ, |
| TokenType.COMMA, |
| TokenType.SEMICOLON |
| ])) { |
| // |
| // We appear to have a variable declaration with a type of "void". |
| // |
| _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType); |
| return astFactory.topLevelVariableDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| parseVariableDeclarationListAfterType(null, |
| _validateModifiersForTopLevelVariable(modifiers), null), |
| _expect(TokenType.SEMICOLON)); |
| } |
| } |
| _reportErrorForToken( |
| ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); |
| return null; |
| } |
| } else if ((keyword == Keyword.GET || keyword == Keyword.SET) && |
| _tokenMatchesIdentifier(next)) { |
| _validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration( |
| commentAndMetadata, modifiers.externalKeyword, null); |
| } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { |
| _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); |
| return _convertToFunctionDeclaration(_parseOperatorAfterKeyword( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| null, |
| getAndAdvance())); |
| } else if (!_matchesIdentifier()) { |
| Token keyword = modifiers.varKeyword; |
| if (keyword == null) { |
| keyword = modifiers.finalKeyword; |
| } |
| if (keyword == null) { |
| keyword = modifiers.constKeyword; |
| } |
| if (keyword != null) { |
| // |
| // We appear to have found an incomplete top-level variable declaration. |
| // |
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); |
| VariableDeclaration variable = astFactory.variableDeclaration( |
| createSyntheticIdentifier(), null, null); |
| List<VariableDeclaration> variables = <VariableDeclaration>[variable]; |
| return astFactory.topLevelVariableDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| astFactory.variableDeclarationList( |
| null, null, keyword, null, variables), |
| _expect(TokenType.SEMICOLON)); |
| } |
| _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); |
| return null; |
| } else if (_isPeekGenericTypeParametersAndOpenParen()) { |
| return parseFunctionDeclaration( |
| commentAndMetadata, modifiers.externalKeyword, null); |
| } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) { |
| TypeName returnType = _parseOptionalTypeNameComment(); |
| _validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration( |
| commentAndMetadata, modifiers.externalKeyword, returnType); |
| } else if (next.matchesAny(const <TokenType>[ |
| TokenType.EQ, |
| TokenType.COMMA, |
| TokenType.SEMICOLON |
| ])) { |
| if (modifiers.constKeyword == null && |
| modifiers.finalKeyword == null && |
| modifiers.varKeyword == null) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE); |
| } |
| return astFactory.topLevelVariableDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| parseVariableDeclarationListAfterType( |
| null, _validateModifiersForTopLevelVariable(modifiers), null), |
| _expect(TokenType.SEMICOLON)); |
| } |
| TypeAnnotation returnType = parseReturnType(false); |
| keyword = _currentToken.keyword; |
| next = _peek(); |
| if ((keyword == Keyword.GET || keyword == Keyword.SET) && |
| _tokenMatchesIdentifier(next)) { |
| _validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration( |
| commentAndMetadata, modifiers.externalKeyword, returnType); |
| } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { |
| _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); |
| return _convertToFunctionDeclaration(_parseOperatorAfterKeyword( |
| commentAndMetadata, |
| modifiers.externalKeyword, |
| returnType, |
| getAndAdvance())); |
| } else if (_matches(TokenType.AT)) { |
| return astFactory.topLevelVariableDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| parseVariableDeclarationListAfterType(null, |
| _validateModifiersForTopLevelVariable(modifiers), returnType), |
| _expect(TokenType.SEMICOLON)); |
| } else if (!_matchesIdentifier()) { |
| // TODO(brianwilkerson) Generalize this error. We could also be parsing a |
| // top-level variable at this point. |
| _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); |
| Token semicolon; |
| if (_matches(TokenType.SEMICOLON)) { |
| semicolon = getAndAdvance(); |
| } else { |
| semicolon = _createSyntheticToken(TokenType.SEMICOLON); |
| } |
| VariableDeclaration variable = astFactory.variableDeclaration( |
| createSyntheticIdentifier(), null, null); |
| List<VariableDeclaration> variables = <VariableDeclaration>[variable]; |
| return astFactory.topLevelVariableDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| astFactory.variableDeclarationList( |
| null, null, null, returnType, variables), |
| semicolon); |
| } else if (next.matchesAny(const <TokenType>[ |
| TokenType.OPEN_PAREN, |
| TokenType.FUNCTION, |
| TokenType.OPEN_CURLY_BRACKET, |
| TokenType.LT |
| ])) { |
| _validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration( |
| commentAndMetadata, modifiers.externalKeyword, returnType); |
| } |
| return astFactory.topLevelVariableDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| parseVariableDeclarationListAfterType( |
| null, _validateModifiersForTopLevelVariable(modifiers), returnType), |
| _expect(TokenType.SEMICOLON)); |
| } |
| |
| /** |
| * Parse a conditional expression. Return the conditional expression that was |
| * parsed. |
| * |
| * conditionalExpression ::= |
| * ifNullExpression ('?' expressionWithoutCascade ':' expressionWithoutCascade)? |
| */ |
| Expression parseConditionalExpression() { |
| Expression condition = parseIfNullExpression(); |
| if (_currentToken.type != TokenType.QUESTION) { |
| return condition; |
| } |
| Token question = getAndAdvance(); |
| Expression thenExpression = parseExpressionWithoutCascade(); |
| Token colon = _expect(TokenType.COLON); |
| Expression elseExpression = parseExpressionWithoutCascade(); |
| return astFactory.conditionalExpression( |
| condition, question, thenExpression, colon, elseExpression); |
| } |
| |
| /** |
| * Parse a configuration in either an import or export directive. |
| * |
| * This method assumes that the current token matches `Keyword.IF`. |
| * |
| * configuration ::= |
| * 'if' '(' test ')' uri |
| * |
| * test ::= |
| * dottedName ('==' stringLiteral)? |
| * |
| * dottedName ::= |
| * identifier ('.' identifier)* |
| */ |
| Configuration parseConfiguration() { |
| Token ifKeyword = getAndAdvance(); |
| Token leftParenthesis = _expect(TokenType.OPEN_PAREN); |
| DottedName name = parseDottedName(); |
| Token equalToken = null; |
| StringLiteral value = null; |
| if (_matches(TokenType.EQ_EQ)) { |
| equalToken = getAndAdvance(); |
| value = parseStringLiteral(); |
| if (value is StringInterpolation) { |
| _reportErrorForNode( |
| ParserErrorCode.INVALID_LITERAL_IN_CONFIGURATION, value); |
| } |
| } |
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); |
| StringLiteral libraryUri = _parseUri(); |
| return astFactory.configuration(ifKeyword, leftParenthesis, name, |
| equalToken, value, rightParenthesis, libraryUri); |
| } |
| |
| /** |
| * Parse a const expression. Return the const expression that was parsed. |
| * |
| * This method assumes that the current token matches `Keyword.CONST`. |
| * |
| * constExpression ::= |
| * instanceCreationExpression |
| * | listLiteral |
| * | mapLiteral |
| */ |
| Expression parseConstExpression() { |
| Token keyword = getAndAdvance(); |
| TokenType type = _currentToken.type; |
| if (type == TokenType.LT || _injectGenericCommentTypeList()) { |
| return parseListOrMapLiteral(keyword); |
| } else if (type == TokenType.OPEN_SQUARE_BRACKET || |
| type == TokenType.INDEX) { |
| return parseListLiteral(keyword, null); |
| } else if (type == TokenType.OPEN_CURLY_BRACKET) { |
| return parseMapLiteral(keyword, null); |
| } |
| return parseInstanceCreationExpression(keyword); |
| } |
| |
| /** |
| * Parse a field initializer within a constructor. The flag [hasThis] should |
| * be true if the current token is `this`. Return the field initializer that |
| * was parsed. |
| * |
| * fieldInitializer: |
| * ('this' '.')? identifier '=' conditionalExpression cascadeSection* |
| */ |
| ConstructorFieldInitializer parseConstructorFieldInitializer(bool hasThis) { |
| Token keywordToken = null; |
| Token period = null; |
| if (hasThis) { |
| keywordToken = getAndAdvance(); |
| period = _expect(TokenType.PERIOD); |
| } |
| SimpleIdentifier fieldName = parseSimpleIdentifier(); |
| Token equals = null; |
| TokenType type = _currentToken.type; |
| if (type == TokenType.EQ) { |
| equals = getAndAdvance(); |
| } else { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER); |
| Keyword keyword = _currentToken.keyword; |
| if (keyword != Keyword.THIS && |
| keyword != Keyword.SUPER && |
| type != TokenType.OPEN_CURLY_BRACKET && |
| type != TokenType.FUNCTION) { |
| equals = _createSyntheticToken(TokenType.EQ); |
| } else { |
| return astFactory.constructorFieldInitializer( |
| keywordToken, |
| period, |
| fieldName, |
| _createSyntheticToken(TokenType.EQ), |
| createSyntheticIdentifier()); |
| } |
| } |
| bool wasInInitializer = _inInitializer; |
| _inInitializer = true; |
| try { |
| Expression expression = parseConditionalExpression(); |
| if (_matches(TokenType.PERIOD_PERIOD)) { |
| List<Expression> cascadeSections = <Expression>[]; |
| do { |
| Expression section = parseCascadeSection(); |
| if (section != null) { |
| cascadeSections.add(section); |
| } |
| } while (_matches(TokenType.PERIOD_PERIOD)); |
| expression = astFactory.cascadeExpression(expression, cascadeSections); |
| } |
| return astFactory.constructorFieldInitializer( |
| keywordToken, period, fieldName, equals, expression); |
| } finally { |
| _inInitializer = wasInInitializer; |
| } |
| } |
| |
| /** |
| * Parse the name of a constructor. Return the constructor name that was |
| * parsed. |
| * |
| * constructorName: |
| * type ('.' identifier)? |
| */ |
| ConstructorName parseConstructorName() { |
| TypeName type = parseTypeName(false); |
| Token period = null; |
| SimpleIdentifier name = null; |
| if (_matches(TokenType.PERIOD)) { |
| period = getAndAdvance(); |
| name = parseSimpleIdentifier(); |
| } |
| return astFactory.constructorName(type, period, name); |
| } |
| |
| /** |
| * Parse a continue statement. Return the continue statement that was parsed. |
| * |
| * This method assumes that the current token matches `Keyword.CONTINUE`. |
| * |
| * continueStatement ::= |
| * 'continue' identifier? ';' |
| */ |
| Statement parseContinueStatement() { |
| Token continueKeyword = getAndAdvance(); |
| if (!_inLoop && !_inSwitch) { |
| _reportErrorForToken( |
| ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword); |
| } |
| SimpleIdentifier label = null; |
| if (_matchesIdentifier()) { |
| label = _parseSimpleIdentifierUnchecked(); |
| } |
| if (_inSwitch && !_inLoop && label == null) { |
| _reportErrorForToken( |
| ParserErrorCode.CONTINUE_WITHOUT_LABEL_IN_CASE, continueKeyword); |
| } |
| Token semicolon = _expect(TokenType.SEMICOLON); |
| return astFactory.continueStatement(continueKeyword, label, semicolon); |
| } |
| |
| /** |
| * Parse a directive. The [commentAndMetadata] is the metadata to be |
| * associated with the directive. Return the directive that was parsed. |
| * |
| * directive ::= |
| * exportDirective |
| * | libraryDirective |
| * | importDirective |
| * | partDirective |
| */ |
| Directive parseDirective(CommentAndMetadata commentAndMetadata) { |
| if (_matchesKeyword(Keyword.IMPORT)) { |
| return parseImportDirective(commentAndMetadata); |
| } else if (_matchesKeyword(Keyword.EXPORT)) { |
| return parseExportDirective(commentAndMetadata); |
| } else if (_matchesKeyword(Keyword.LIBRARY)) { |
| return parseLibraryDirective(commentAndMetadata); |
| } else if (_matchesKeyword(Keyword.PART)) { |
| return parsePartOrPartOfDirective(commentAndMetadata); |
| } else { |
| // Internal error: this method should not have been invoked if the current |
| // token was something other than one of the above. |
| throw new StateError( |
| "parseDirective invoked in an invalid state; currentToken = $_currentToken"); |
| } |
| } |
| |
| /** |
| * Parse the script tag and directives in a compilation unit, starting with |
| * the given [token], until the first non-directive is encountered. The |
| * remainder of the compilation unit will not be parsed. Specifically, if |
| * there are directives later in the file, they will not be parsed. Return the |
| * compilation unit that was parsed. |
| */ |
| CompilationUnit parseDirectives(Token token) { |
| _currentToken = token; |
| return parseDirectives2(); |
| } |
| |
| /** |
| * Parse the script tag and directives in a compilation unit until the first |
| * non-directive is encountered. Return the compilation unit that was parsed. |
| * |
| * compilationUnit ::= |
| * scriptTag? directive* |
| */ |
| CompilationUnit parseDirectives2() { |
| Token firstToken = _currentToken; |
| ScriptTag scriptTag = null; |
| if (_matches(TokenType.SCRIPT_TAG)) { |
| scriptTag = astFactory.scriptTag(getAndAdvance()); |
| } |
| List<Directive> directives = <Directive>[]; |
| while (!_matches(TokenType.EOF)) { |
| CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); |
| Keyword keyword = _currentToken.keyword; |
| TokenType type = _peek().type; |
| if ((keyword == Keyword.IMPORT || |
| keyword == Keyword.EXPORT || |
| keyword == Keyword.LIBRARY || |
| keyword == Keyword.PART) && |
| type != TokenType.PERIOD && |
| type != TokenType.LT && |
| type != TokenType.OPEN_PAREN) { |
| directives.add(parseDirective(commentAndMetadata)); |
| } else if (_matches(TokenType.SEMICOLON)) { |
| _advance(); |
| } else { |
| while (!_matches(TokenType.EOF)) { |
| _advance(); |
| } |
| return astFactory.compilationUnit( |
| firstToken, scriptTag, directives, null, _currentToken); |
| } |
| } |
| return astFactory.compilationUnit( |
| firstToken, scriptTag, directives, null, _currentToken); |
| } |
| |
| /** |
| * Parse a documentation comment based on the given list of documentation |
| * comment tokens. Return the documentation comment that was parsed, or `null` |
| * if there was no comment. |
| * |
| * documentationComment ::= |
| * multiLineComment? |
| * | singleLineComment* |
| */ |
| Comment parseDocumentationComment(List<DocumentationCommentToken> tokens) { |
| if (tokens == null) { |
| return null; |
| } |
| List<CommentReference> references = parseCommentReferences(tokens); |
| return astFactory.documentationComment(tokens, references); |
| } |
| |
| /** |
| * Parse a documentation comment. Return the documentation comment that was |
| * parsed, or `null` if there was no comment. |
| * |
| * documentationComment ::= |
| * multiLineComment? |
| * | singleLineComment* |
| */ |
| List<DocumentationCommentToken> parseDocumentationCommentTokens() { |
| List<DocumentationCommentToken> tokens = <DocumentationCommentToken>[]; |
| CommentToken commentToken = _currentToken.precedingComments; |
| while (commentToken != null) { |
| if (commentToken is DocumentationCommentToken) { |
| if (tokens.isNotEmpty) { |
| if (commentToken.type == TokenType.SINGLE_LINE_COMMENT) { |
| if (tokens[0].type != TokenType.SINGLE_LINE_COMMENT) { |
| tokens.clear(); |
| } |
| } else { |
| tokens.clear(); |
| } |
| } |
| tokens.add(commentToken); |
| } |
| commentToken = commentToken.next; |
| } |
| return tokens.isEmpty ? null : tokens; |
| } |
| |
| /** |
| * Parse a do statement. Return the do statement that was parsed. |
| * |
| * This method assumes that the current token matches `Keyword.DO`. |
| * |
| * doStatement ::= |
| * 'do' statement 'while' '(' expression ')' ';' |
| */ |
| Statement parseDoStatement() { |
| bool wasInLoop = _inLoop; |
| _inLoop = true; |
| try { |
| Token doKeyword = getAndAdvance(); |
| Statement body = parseStatement2(); |
| Token whileKeyword = _expectKeyword(Keyword.WHILE); |
| Token leftParenthesis = _expect(TokenType.OPEN_PAREN); |
| Expression condition = parseExpression2(); |
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); |
| Token semicolon = _expect(TokenType.SEMICOLON); |
| return astFactory.doStatement(doKeyword, body, whileKeyword, |
| leftParenthesis, condition, rightParenthesis, semicolon); |
| } finally { |
| _inLoop = wasInLoop; |
| } |
| } |
| |
| /** |
| * Parse a dotted name. Return the dotted name that was parsed. |
| * |
| * dottedName ::= |
| * identifier ('.' identifier)* |
| */ |
| DottedName parseDottedName() { |
| List<SimpleIdentifier> components = <SimpleIdentifier>[ |
| parseSimpleIdentifier() |
| ]; |
| while (_optional(TokenType.PERIOD)) { |
| components.add(parseSimpleIdentifier()); |
| } |
| return astFactory.dottedName(components); |
| } |
| |
| /** |
| * Parse an empty statement. Return the empty statement that was parsed. |
| * |
| * This method assumes that the current token matches `TokenType.SEMICOLON`. |
| * |
| * emptyStatement ::= |
| * ';' |
| */ |
| Statement parseEmptyStatement() => astFactory.emptyStatement(getAndAdvance()); |
| |
| /** |
| * Parse an enum declaration. The [commentAndMetadata] is the metadata to be |
| * associated with the member. Return the enum declaration that was parsed. |
| * |
| * This method assumes that the current token matches `Keyword.ENUM`. |
| * |
| * enumType ::= |
| * metadata 'enum' id '{' id (',' id)* (',')? '}' |
| */ |
| EnumDeclaration parseEnumDeclaration(CommentAndMetadata commentAndMetadata) { |
| Token keyword = getAndAdvance(); |
| SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true); |
| Token leftBracket = null; |
| List<EnumConstantDeclaration> constants = <EnumConstantDeclaration>[]; |
| Token rightBracket = null; |
| if (_matches(TokenType.OPEN_CURLY_BRACKET)) { |
| leftBracket = getAndAdvance(); |
| if (_matchesIdentifier() || _matches(TokenType.AT)) { |
| constants.add(_parseEnumConstantDeclaration()); |
| } else if (_matches(TokenType.COMMA) && |
| _tokenMatchesIdentifier(_peek())) { |
| constants.add(_parseEnumConstantDeclaration()); |
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); |
| } else { |
| constants.add(_parseEnumConstantDeclaration()); |
| _reportErrorForCurrentToken(ParserErrorCode.EMPTY_ENUM_BODY); |
| } |
| while (_optional(TokenType.COMMA)) { |
| if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { |
| break; |
| } |
| constants.add(_parseEnumConstantDeclaration()); |
| } |
| rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); |
| } else { |
| leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET); |
| rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET); |
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_ENUM_BODY); |
| } |
| return astFactory.enumDeclaration( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| keyword, |
| name, |
| leftBracket, |
| constants, |
| rightBracket); |
| } |
| |
| /** |
| * Parse an equality expression. Return the equality expression that was |
| * parsed. |
| * |
| * equalityExpression ::= |
| * relationalExpression (equalityOperator relationalExpression)? |
| * | 'super' equalityOperator relationalExpression |
| */ |
| Expression parseEqualityExpression() { |
| Expression expression; |
| if (_currentToken.keyword == Keyword.SUPER && |
| _currentToken.next.type.isEqualityOperator) { |
| expression = astFactory.superExpression(getAndAdvance()); |
| } else { |
| expression = parseRelationalExpression(); |
| } |
| bool leftEqualityExpression = false; |
| while (_currentToken.type.isEqualityOperator) { |
| if (leftEqualityExpression) { |
| _reportErrorForNode( |
| ParserErrorCode.EQUALITY_CANNOT_BE_EQUALITY_OPERAND, expression); |
| } |
| expression = astFactory.binaryExpression( |
| expression, getAndAdvance(), parseRelationalExpression()); |
| leftEqualityExpression = true; |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse an export directive. The [commentAndMetadata] is the metadata to be |
| * associated with the directive. Return the export directive that was parsed. |
| * |
| * This method assumes that the current token matches `Keyword.EXPORT`. |
| * |
| * exportDirective ::= |
| * metadata 'export' stringLiteral configuration* combinator*';' |
| */ |
| ExportDirective parseExportDirective(CommentAndMetadata commentAndMetadata) { |
| Token exportKeyword = getAndAdvance(); |
| StringLiteral libraryUri = _parseUri(); |
| List<Configuration> configurations = _parseConfigurations(); |
| List<Combinator> combinators = parseCombinators(); |
| Token semicolon = _expect(TokenType.SEMICOLON); |
| return astFactory.exportDirective( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| exportKeyword, |
| libraryUri, |
| configurations, |
| combinators, |
| semicolon); |
| } |
| |
| /** |
| * Parse an expression, starting with the given [token]. Return the expression |
| * that was parsed, or `null` if the tokens do not represent a recognizable |
| * expression. |
| */ |
| Expression parseExpression(Token token) { |
| _currentToken = token; |
| return parseExpression2(); |
| } |
| |
| /** |
| * Parse an expression that might contain a cascade. Return the expression |
| * that was parsed. |
| * |
| * expression ::= |
| * assignableExpression assignmentOperator expression |
| * | conditionalExpression cascadeSection* |
| * | throwExpression |
| */ |
| Expression parseExpression2() { |
| Keyword keyword = _currentToken.keyword; |
| if (keyword == Keyword.THROW) { |
| return parseThrowExpression(); |
| } else if (keyword == Keyword.RETHROW) { |
| // TODO(brianwilkerson) Rethrow is a statement again. |
| return parseRethrowExpression(); |
| } |
| // |
| // assignableExpression is a subset of conditionalExpression, so we can |
| // parse a conditional expression and then determine whether it is followed |
| // by an assignmentOperator, checking for conformance to the restricted |
| // grammar after making that determination. |
| // |
| Expression expression = parseConditionalExpression(); |
| TokenType type = _currentToken.type; |
| if (type == TokenType.PERIOD_PERIOD) { |
| List<Expression> cascadeSections = <Expression>[]; |
| do { |
| Expression section = parseCascadeSection(); |
| if (section != null) { |
| cascadeSections.add(section); |
| } |
| } while (_currentToken.type == TokenType.PERIOD_PERIOD); |
| return astFactory.cascadeExpression(expression, cascadeSections); |
| } else if (type.isAssignmentOperator) { |
| Token operator = getAndAdvance(); |
| _ensureAssignable(expression); |
| return astFactory.assignmentExpression( |
| expression, operator, parseExpression2()); |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse a list of expressions. Return the expression that was parsed. |
| * |
| * expressionList ::= |
| * expression (',' expression)* |
| */ |
| List<Expression> parseExpressionList() { |
| List<Expression> expressions = <Expression>[parseExpression2()]; |
| while (_optional(TokenType.COMMA)) { |
| expressions.add(parseExpression2()); |
| } |
| return expressions; |
| } |
| |
| /** |
| * Parse an expression that does not contain any cascades. Return the |
| * expression that was parsed. |
| * |
| * expressionWithoutCascade ::= |
| * assignableExpression assignmentOperator expressionWithoutCascade |
| * | conditionalExpression |
| * | throwExpressionWithoutCascade |
| */ |
| Expression parseExpressionWithoutCascade() { |
| if (_matchesKeyword(Keyword.THROW)) { |
| return parseThrowExpressionWithoutCascade(); |
| } else if (_matchesKeyword(Keyword.RETHROW)) { |
| return parseRethrowExpression(); |
| } |
| // |
| // assignableExpression is a subset of conditionalExpression, so we can |
| // parse a conditional expression and then determine whether it is followed |
| // by an assignmentOperator, checking for conformance to the restricted |
| // grammar after making that determination. |
| // |
| Expression expression = parseConditionalExpression(); |
| if (_currentToken.type.isAssignmentOperator) { |
| Token operator = getAndAdvance(); |
| _ensureAssignable(expression); |
| expression = astFactory.assignmentExpression( |
| expression, operator, parseExpressionWithoutCascade()); |
| } |
| return expression; |
| } |
| |
| /** |
| * Parse a class extends clause. Return the class extends clause that was |
| * parsed. |
| * |
| * This method assumes that the current token matches `Keyword.EXTENDS`. |
| * |
| * classExtendsClause ::= |
| * 'extends' type |
| */ |
| ExtendsClause parseExtendsClause() { |
| Token keyword = getAndAdvance(); |
| TypeName superclass = parseTypeName(false); |
| _mustNotBeNullable(superclass, ParserErrorCode.NULLABLE_TYPE_IN_EXTENDS); |
| return astFactory.extendsClause(keyword, superclass); |
| } |
| |
| /** |
| * Parse the 'final', 'const', 'var' or type preceding a variable declaration. |
| * The [optional] is `true` if the keyword and type are optional. Return the |
| * 'final', 'const', 'var' or type that was parsed. |
| * |
| * finalConstVarOrType ::= |
| * 'final' type? |
| * | 'const' type? |
| * | 'var' |
| * | type |
| */ |
| FinalConstVarOrType parseFinalConstVarOrType(bool optional, |
| {bool inFunctionType: false}) { |
| Token keywordToken = null; |
| TypeAnnotation type = null; |
| Keyword keyword = _currentToken.keyword; |
| if (keyword == Keyword.FINAL || keyword == Keyword.CONST) { |
| keywordToken = getAndAdvance(); |
| if (_isTypedIdentifier(_currentToken)) { |
| type = parseTypeAnnotation(false); |
| } else { |
| // Support `final/*=T*/ x;` |
| type = _parseOptionalTypeNameComment(); |
| } |
| } else if (keyword == Keyword.VAR) { |
| keywordToken = getAndAdvance(); |
| // Support `var/*=T*/ x;` |
| type = _parseOptionalTypeNameComment(); |
| if (type != null) { |
| // Clear the keyword to prevent an error. |
| keywordToken = null; |
| } |
| } else if (_isTypedIdentifier(_currentToken)) { |
| type = parseReturnType(false); |
| } else if (inFunctionType && _matchesIdentifier()) { |
| type = parseTypeAnnotation(false); |
| } else if (!optional) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE); |
| } else { |
| // Support parameters such as `(/*=K*/ key, /*=V*/ value)` |
| // This is not supported if the type is required. |
| type = _parseOptionalTypeNameComment(); |
| } |
| return new FinalConstVarOrType(keywordToken, type); |
| } |
| |
| /** |
| * Parse a formal parameter. At most one of `isOptional` and `isNamed` can be |
| * `true`. The [kind] is the kind of parameter being expected based on the |
| * presence or absence of group delimiters. Return the formal parameter that |
| * was parsed. |
| * |
| * defaultFormalParameter ::= |
| * normalFormalParameter ('=' expression)? |
| * |
| * defaultNamedParameter ::= |
| * normalFormalParameter ('=' expression)? |
| * normalFormalParameter (':' expression)? |
| */ |
| FormalParameter parseFormalParameter(ParameterKind kind, |
| {bool inFunctionType: false}) { |
| NormalFormalParameter parameter = |
| parseNormalFormalParameter(inFunctionType: inFunctionType); |
| TokenType type = _currentToken.type; |
| if (type == TokenType.EQ) { |
| if (inFunctionType) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.DEFAULT_VALUE_IN_FUNCTION_TYPE); |
| } |
| Token separator = getAndAdvance(); |
| Expression defaultValue = parseExpression2(); |
| if (kind == ParameterKind.REQUIRED) { |
| _reportErrorForNode( |
| ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter); |
| kind = ParameterKind.POSITIONAL; |
| } else if (kind == ParameterKind.NAMED && |
| inFunctionType && |
| parameter.identifier == null) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_NAME_FOR_NAMED_PARAMETER); |
| } |
| return astFactory.defaultFormalParameter( |
| parameter, kind, separator, defaultValue); |
| } else if (type == TokenType.COLON) { |
| if (inFunctionType) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.DEFAULT_VALUE_IN_FUNCTION_TYPE); |
| } |
| Token separator = getAndAdvance(); |
| Expression defaultValue = parseExpression2(); |
| if (kind == ParameterKind.REQUIRED) { |
| _reportErrorForNode( |
| ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter); |
| kind = ParameterKind.NAMED; |
| } else if (kind == ParameterKind.POSITIONAL) { |
| _reportErrorForToken( |
| ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER, |
| separator); |
| } else if (kind == ParameterKind.NAMED && |
| inFunctionType && |
| parameter.identifier == null) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_NAME_FOR_NAMED_PARAMETER); |
| } |
| return astFactory.defaultFormalParameter( |
| parameter, kind, separator, defaultValue); |
| } else if (kind != ParameterKind.REQUIRED) { |
| if (kind == ParameterKind.NAMED && |
| inFunctionType && |
| parameter.identifier == null) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_NAME_FOR_NAMED_PARAMETER); |
| } |
| return astFactory.defaultFormalParameter(parameter, kind, null, null); |
| } |
| return parameter; |
| } |
| |
| /** |
| * Parse a list of formal parameters. Return the formal parameters that were |
| * parsed. |
| * |
| * formalParameterList ::= |
| * '(' ')' |
| * | '(' normalFormalParameters (',' optionalFormalParameters)? ')' |
| * | '(' optionalFormalParameters ')' |
| * |
| * normalFormalParameters ::= |
| * normalFormalParameter (',' normalFormalParameter)* |
| * |
| * optionalFormalParameters ::= |
| * optionalPositionalFormalParameters |
| * | namedFormalParameters |
| * |
| * optionalPositionalFormalParameters ::= |
| * '[' defaultFormalParameter (',' defaultFormalParameter)* ']' |
| * |
| * namedFormalParameters ::= |
| * '{' defaultNamedParameter (',' defaultNamedParameter)* '}' |
| */ |
| FormalParameterList parseFormalParameterList({bool inFunctionType: false}) { |
| if (_matches(TokenType.OPEN_PAREN)) { |
| return _parseFormalParameterListUnchecked(inFunctionType: inFunctionType); |
| } |
| // TODO(brianwilkerson) Improve the error message. |
| _reportErrorForCurrentToken( |
| ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_PAREN.lexeme]); |
| // Recovery: Check for an unmatched closing paren and parse parameters until |
| // it is reached. |
| return _parseFormalParameterListAfterParen( |
| _createSyntheticToken(TokenType.OPEN_PAREN)); |
| } |
| |
| /** |
| * Parse a for statement. Return the for statement that was parsed. |
| * |
| * forStatement ::= |
| * 'for' '(' forLoopParts ')' statement |
| * |
| * forLoopParts ::= |
| * forInitializerStatement expression? ';' expressionList? |
| * | declaredIdentifier 'in' expression |
| * | identifier 'in' expression |
| * |
| * forInitializerStatement ::= |
| * localVariableDeclaration ';' |
| * | expression? ';' |
| */ |
| Statement parseForStatement() { |
| bool wasInLoop = _inLoop; |
| _inLoop = true; |
| try { |
| Token awaitKeyword = null; |
| if (_matchesString(_AWAIT)) { |
| awaitKeyword = getAndAdvance(); |
| } |
| Token forKeyword = _expectKeyword(Keyword.FOR); |
| Token leftParenthesis = _expect(TokenType.OPEN_PAREN); |
| VariableDeclarationList variableList = null; |
| Expression initialization = null; |
| if (!_matches(TokenType.SEMICOLON)) { |
| CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); |
| if (_matchesIdentifier() && |
| (_tokenMatchesKeyword(_peek(), Keyword.IN) || |
| _tokenMatches(_peek(), TokenType.COLON))) { |
| SimpleIdentifier variableName = _parseSimpleIdentifierUnchecked(); |
| variableList = astFactory.variableDeclarationList( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| null, |
| null, <VariableDeclaration>[ |
| astFactory.variableDeclaration(variableName, null, null) |
| ]); |
| } else if (isInitializedVariableDeclaration()) { |
| variableList = |
| parseVariableDeclarationListAfterMetadata(commentAndMetadata); |
| } else { |
| initialization = parseExpression2(); |
| } |
| TokenType type = _currentToken.type; |
| if (_matchesKeyword(Keyword.IN) || type == TokenType.COLON) { |
| if (type == TokenType.COLON) { |
| _reportErrorForCurrentToken(ParserErrorCode.COLON_IN_PLACE_OF_IN); |
| } |
| DeclaredIdentifier loopVariable = null; |
| SimpleIdentifier identifier = null; |
| if (variableList == null) { |
| // We found: <expression> 'in' |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MISSING_VARIABLE_IN_FOR_EACH); |
| } else { |
| NodeList<VariableDeclaration> variables = variableList.variables; |
| if (variables.length > 1) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.MULTIPLE_VARIABLES_IN_FOR_EACH, |
| [variables.length.toString()]); |
| } |
| VariableDeclaration variable = variables[0]; |
| if (variable.initializer != null) { |
| _reportErrorForCurrentToken( |
| ParserErrorCode.INITIALIZED_VARIABLE_IN_FOR_EACH); |
| } |
| Token keyword = variableList.keyword; |
| TypeAnnotation type = variableList.type; |
| if (keyword != null || type != null) { |
| loopVariable = astFactory.declaredIdentifier( |
| commentAndMetadata.comment, |
| commentAndMetadata.metadata, |
| keyword, |
| type, |
| astFactory.simpleIdentifier(variable.name.token, |
| isDeclaration: true)); |
| } else { |
| if (commentAndMetadata.hasMetadata) { |
| // TODO(jwren) metadata isn't allowed before the identifier in |
| // "identifier in expression", add warning if commentAndMetadata |
| // has content |
| } |
| identifier = variable.name; |
| } |
| } |
| Token inKeyword = getAndAdvance(); |
| Expression iterator = parseExpression2(); |
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); |
| Statement body = parseStatement2(); |
| if (loopVariable == null) { |
| return astFactory.forEachStatementWithReference( |
| awaitKeyword, |
| forKeyword, |
| leftParenthesis, |
| identifier, |
| inKeyword, |
| iterator, |
| rightParenthesis, |
| body); |
| } |
| return astFactory.forEachStatementWithDeclaration( |
| awaitKeyword, |
| forKeyword, |
| leftParenthesis, |
| loopVariable, |
| inKeyword, |
| iterator, |
| rightParenthesis, |
| body); |
| } |
| } |
| if (awaitKeyword != null) { |
| _reportErrorForToken( |
| ParserErrorCode.INVALID_AWAIT_IN_FOR, awaitKeyword); |
| } |
| Token leftSeparator = _expect(TokenType.SEMICOLON); |
| Expression condition = null; |
| if (!_matches(TokenType.SEMICOLON)) { |
| condition = parseExpression2(); |
| } |
| Token rightSeparator = _expect(TokenType.SEMICOLON); |
| List<Expression> updaters = null; |
| if (!_matches(TokenType.CLOSE_PAREN)) { |
| updaters = parseExpressionList(); |
| } |
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); |
| Statement body = parseStatement2(); |
| return astFactory.forStatement( |
| forKeyword, |
| leftParenthesis, |
| variableList, |
| initialization, |
| leftSeparator, |
| condition, |
| rightSeparator, |
| updaters, |
| rightParenthesis, |
| body); |
| } finally { |
| _inLoop = wasInLoop; |
| } |
| } |
| |
| /** |
| * Parse a function body. The [mayBeEmpty] is `true` if the function body is |
| * allowed to be empty. The [emptyErrorCode] is the error code to report if |
| * function body expected, but not found. The [inExpression] is `true` if the |
| * function body is being parsed as part of an expression and therefore does |
| * not have a terminating semicolon. Return the function body that was parsed. |
| * |
| * functionBody ::= |
| * '=>' expression ';' |
| * | block |
| * |
| * functionExpressionBody ::= |
| * '=>' expression |
| * | block |
| */ |
| FunctionBody parseFunctionBody( |
| bool mayBeEmpty, ParserErrorCode emptyErrorCode, bool inExpression) { |
| bool wasInAsync = _inAsync; |
| bool wasInGenerator = _inGenerator; |
| bool wasInLoop = _inLoop; |
| bool wasInSwitch = _inSwitch; |
| _inAsync = false; |
| _inGenerator = false; |
| _inLoop = false; |
| _inSwitch = false; |
| try { |
| TokenType type = _currentToken.type; |
| if (type == TokenType.SEMICOLON) { |
| if (! |