blob: c98702ebd7e3c42137870f3a41559953d19dfb8c [file] [log] [blame]
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library _fe_analyzer_shared.parser.parser;
import 'package:_fe_analyzer_shared/src/parser/type_info_impl.dart';
import '../messages/codes.dart' as codes;
import '../scanner/scanner.dart' show ErrorToken, Token;
import '../scanner/token.dart'
show
ASSIGNMENT_PRECEDENCE,
BeginToken,
CASCADE_PRECEDENCE,
EQUALITY_PRECEDENCE,
Keyword,
POSTFIX_PRECEDENCE,
RELATIONAL_PRECEDENCE,
SELECTOR_PRECEDENCE,
StringToken,
SyntheticBeginToken,
SyntheticKeywordToken,
SyntheticStringToken,
SyntheticToken,
TokenType;
import '../scanner/token_constants.dart'
show
BANG_EQ_EQ_TOKEN,
COMMA_TOKEN,
DOUBLE_TOKEN,
EOF_TOKEN,
EQ_EQ_EQ_TOKEN,
EQ_TOKEN,
FUNCTION_TOKEN,
HASH_TOKEN,
HEXADECIMAL_TOKEN,
IDENTIFIER_TOKEN,
INT_TOKEN,
KEYWORD_TOKEN,
LT_TOKEN,
OPEN_CURLY_BRACKET_TOKEN,
OPEN_PAREN_TOKEN,
OPEN_SQUARE_BRACKET_TOKEN,
SEMICOLON_TOKEN,
STRING_INTERPOLATION_IDENTIFIER_TOKEN,
STRING_INTERPOLATION_TOKEN,
STRING_TOKEN;
import 'assert.dart' show Assert;
import 'async_modifier.dart' show AsyncModifier;
import 'block_kind.dart';
import 'declaration_kind.dart' show DeclarationKind;
import 'directive_context.dart';
import 'formal_parameter_kind.dart'
show
FormalParameterKind,
isMandatoryFormalParameterKind,
isOptionalPositionalFormalParameterKind;
import 'forwarding_listener.dart' show ForwardingListener, NullListener;
import 'identifier_context.dart'
show IdentifierContext, looksLikeExpressionStart;
import 'listener.dart' show Listener;
import 'literal_entry_info.dart'
show
LiteralEntryInfo,
computeLiteralEntry,
looksLikeLiteralEntry,
simpleEntry;
import 'loop_state.dart' show LoopState;
import 'member_kind.dart' show MemberKind;
import 'modifier_context.dart' show ModifierRecoveryContext, isModifier;
import 'recovery_listeners.dart'
show
ClassHeaderRecoveryListener,
ImportRecoveryListener,
MixinHeaderRecoveryListener;
import 'token_stream_rewriter.dart'
show
TokenStreamRewriter,
TokenStreamRewriterImpl,
UndoableTokenStreamRewriter;
import 'type_info.dart'
show
TypeInfo,
TypeParamOrArgInfo,
computeMethodTypeArguments,
computeType,
computeTypeParamOrArg,
isValidTypeReference,
noType,
noTypeParamOrArg;
import 'util.dart'
show
findNonZeroLengthToken,
findPreviousNonZeroLengthToken,
isLetter,
isLetterOrDigit,
isOneOf,
isOneOfOrEof,
isWhitespace,
optional;
/// An event generating parser of Dart programs. This parser expects all tokens
/// in a linked list (aka a token stream).
///
/// The class [Scanner] is used to generate a token stream. See the file
/// [scanner.dart](../scanner.dart).
///
/// Subclasses of the class [Listener] are used to listen to events.
///
/// Most methods of this class belong in one of four major categories: parse
/// methods, peek methods, ensure methods, and skip methods.
///
/// Parse methods all have the prefix `parse`, generate events
/// (by calling methods on [listener]), and return the next token to parse.
/// Some exceptions to this last point are methods such as [parseFunctionBody]
/// and [parseClassOrMixinOrExtensionBody] which return the last token parsed
/// rather than the next token to be parsed.
/// Parse methods are generally named `parseGrammarProductionSuffix`.
/// The suffix can be one of `opt`, or `star`.
/// `opt` means zero or one matches, `star` means zero or more matches.
/// For example, [parseMetadataStar] corresponds to this grammar snippet:
/// `metadata*`, and [parseArgumentsOpt] corresponds to: `arguments?`.
///
/// Peek methods all have the prefix `peek`, do not generate events
/// (except for errors) and may return null.
///
/// Ensure methods all have the prefix `ensure` and may generate events.
/// They return the current token, or insert and return a synthetic token
/// if the current token does not match. For example,
/// [ensureSemicolon] returns the current token if the current token is a
/// semicolon, otherwise inserts a synthetic semicolon in the token stream
/// before the current token and then returns that new synthetic token.
///
/// Skip methods are like parse methods, but all have the prefix `skip`
/// and skip over some parts of the file being parsed.
/// Typically, skip methods generate an event for the structure being skipped,
/// but not for its substructures.
///
/// ## Current Token
///
/// The current token is always to be found in a formal parameter named
/// `token`. This parameter should be the first as this increases the chance
/// that a compiler will place it in a register.
///
/// ## Implementation Notes
///
/// The parser assumes that keywords, built-in identifiers, and other special
/// words (pseudo-keywords) are all canonicalized. To extend the parser to
/// recognize a new identifier, one should modify
/// [keyword.dart](../scanner/keyword.dart) and ensure the identifier is added
/// to the keyword table.
///
/// As a consequence of this, one should not use `==` to compare strings in the
/// parser. One should favor the methods [optional] and [expect] to recognize
/// keywords or identifiers. In some cases, it's possible to compare a token's
/// `stringValue` using [identical], but normally [optional] will suffice.
///
/// Historically, we over-used identical, and when identical is used on objects
/// other than strings, it can often be replaced by `==`.
///
/// ## Flexibility, Extensibility, and Specification
///
/// The parser is designed to be flexible and extensible. Its methods are
/// designed to be overridden in subclasses, so it can be extended to handle
/// unspecified language extension or experiments while everything in this file
/// attempts to follow the specification (unless when it interferes with error
/// recovery).
///
/// We achieve flexibility, extensible, and specification compliance by
/// following a few rules-of-thumb:
///
/// 1. All methods in the parser should be public.
///
/// 2. The methods follow the specified grammar, and do not implement custom
/// extensions, for example, `native`.
///
/// 3. The parser doesn't rewrite the token stream (when dealing with `>>`).
///
/// ### Implementing Extensions
///
/// For various reasons, some Dart language implementations have used
/// custom/unspecified extensions to the Dart grammar. Examples of this
/// includes diet parsing, patch files, `native` keyword, and generic
/// comments. This class isn't supposed to implement any of these
/// features. Instead it provides hooks for those extensions to be implemented
/// in subclasses or listeners. Let's examine how diet parsing and `native`
/// keyword is currently supported by Fasta.
///
/// #### Legacy Implementation of `native` Keyword
///
/// TODO(ahe,danrubel): Remove this section.
///
/// Both dart2js and the Dart VM have used the `native` keyword to mark methods
/// that couldn't be implemented in the Dart language and needed to be
/// implemented in JavaScript or C++, respectively. An example of the syntax
/// extension used by the Dart VM is:
///
/// nativeFunction() native "NativeFunction";
///
/// When attempting to parse this function, the parser eventually calls
/// [parseFunctionBody]. This method will report an unrecoverable error to the
/// listener with the code [fasta.messageExpectedFunctionBody]. The listener can
/// then look at the error code and the token and use the methods in
/// [native_support.dart](native_support.dart) to parse the native syntax.
///
/// #### Implementation of Diet Parsing
///
/// We call it _diet_ _parsing_ when the parser skips parts of a file. Both
/// dart2js and the Dart VM have been relying on this from early on as it allows
/// them to more quickly compile small programs that use small parts of big
/// libraries. It's also become an integrated part of how Fasta builds up
/// outlines before starting to parse method bodies.
///
/// When looking through this parser, you'll find a number of unused methods
/// starting with `skip`. These methods are only used by subclasses, such as
/// [ClassMemberParser](class_member_parser.dart) and
/// [TopLevelParser](top_level_parser.dart). These methods violate the
/// principle above about following the specified grammar, and originally lived
/// in subclasses. However, we realized that these methods were so widely used
/// and hard to maintain in subclasses, that it made sense to move them here.
///
/// ### Specification and Error Recovery
///
/// To improve error recovery, the parser will inform the listener of
/// recoverable errors and continue to parse. An example of a recoverable
/// error is:
///
/// Error: Asynchronous for-loop can only be used in 'async' or 'async*'...
/// main() { await for (var x in []) {} }
/// ^^^^^
///
/// ### Legacy Error Recovery
///
/// What's described below will be phased out in preference of the parser
/// reporting and recovering from syntax errors. The motivation for this is
/// that we have multiple listeners that use the parser, and this will ensure
/// consistency.
///
/// For unrecoverable errors, the parser will ask the listener for help to
/// recover from the error. We haven't made much progress on these kinds of
/// errors, so in most cases, the parser aborts by skipping to the end of file.
///
/// Historically, this parser has been rather lax in what it allows, and
/// deferred the enforcement of some syntactical rules to subsequent phases. It
/// doesn't matter how we got there, only that we've identified that it's
/// easier if the parser reports as many errors it can, but informs the
/// listener if the error is recoverable or not.
class Parser {
Listener listener;
Uri? get uri => listener.uri;
bool mayParseFunctionExpressions = true;
/// Represents parser state: what asynchronous syntax is allowed in the
/// function being currently parsed. In rare situations, this can be set by
/// external clients, for example, to parse an expression outside a function.
AsyncModifier asyncState = AsyncModifier.Sync;
// TODO(danrubel): The [loopState] and associated functionality in the
// [Parser] duplicates work that the resolver needs to do when resolving
// break/continue targets. Long term, this state and functionality will be
// removed from the [Parser] class and the resolver will be responsible
// for generating all break/continue error messages.
/// Represents parser state: whether parsing outside a loop,
/// inside a loop, or inside a switch. This is used to determine whether
/// break and continue statements are allowed.
LoopState loopState = LoopState.OutsideLoop;
/// A rewriter for inserting synthetic tokens.
/// Access using [rewriter] for lazy initialization.
TokenStreamRewriter? cachedRewriter;
TokenStreamRewriter get rewriter {
return cachedRewriter ??= new TokenStreamRewriterImpl();
}
Parser(this.listener)
: assert(listener != null); // ignore:unnecessary_null_comparison
bool get inGenerator {
return asyncState == AsyncModifier.AsyncStar ||
asyncState == AsyncModifier.SyncStar;
}
bool get inAsync {
return asyncState == AsyncModifier.Async ||
asyncState == AsyncModifier.AsyncStar;
}
bool get inPlainSync => asyncState == AsyncModifier.Sync;
bool get isBreakAllowed => loopState != LoopState.OutsideLoop;
bool get isContinueAllowed => loopState == LoopState.InsideLoop;
bool get isContinueWithLabelAllowed => loopState != LoopState.OutsideLoop;
/// Parse a compilation unit.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
///
/// ```
/// libraryDefinition:
/// scriptTag?
/// libraryName?
/// importOrExport*
/// partDirective*
/// topLevelDefinition*
/// ;
///
/// partDeclaration:
/// partHeader topLevelDefinition*
/// ;
/// ```
Token parseUnit(Token token) {
// Skip over error tokens and report them at the end
// so that the parser has the chance to adjust the error location.
Token errorToken = token;
token = skipErrorTokens(errorToken);
listener.beginCompilationUnit(token);
int count = 0;
DirectiveContext directiveState = new DirectiveContext();
token = syntheticPreviousToken(token);
if (identical(token.next!.type, TokenType.SCRIPT_TAG)) {
directiveState.checkScriptTag(this, token.next!);
token = parseScript(token);
}
while (!token.next!.isEof) {
final Token start = token.next!;
token = parseTopLevelDeclarationImpl(token, directiveState);
listener.endTopLevelDeclaration(token.next!);
count++;
if (start == token.next!) {
// Recovery:
// If progress has not been made reaching the end of the token stream,
// then report an error and skip the current token.
token = token.next!;
listener.beginMetadataStar(token);
listener.endMetadataStar(/* count = */ 0);
reportRecoverableErrorWithToken(
token, codes.templateExpectedDeclaration);
listener.handleInvalidTopLevelDeclaration(token);
listener.endTopLevelDeclaration(token.next!);
count++;
}
}
token = token.next!;
reportAllErrorTokens(errorToken);
listener.endCompilationUnit(count, token);
// Clear fields that could lead to memory leak.
cachedRewriter = null;
return token;
}
/// This method exists for analyzer compatibility only
/// and will be removed once analyzer/fasta integration is complete.
///
/// Similar to [parseUnit], this method parses a compilation unit,
/// but stops when it reaches the first declaration or EOF.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseDirectives(Token token) {
listener.beginCompilationUnit(token);
int count = 0;
DirectiveContext directiveState = new DirectiveContext();
token = syntheticPreviousToken(token);
while (!token.next!.isEof) {
final Token start = token.next!;
final String? nextValue = start.next!.stringValue;
// If a built-in keyword is being used as function name, then stop.
if (identical(nextValue, '.') ||
identical(nextValue, '<') ||
identical(nextValue, '(')) {
break;
}
if (identical(token.next!.type, TokenType.SCRIPT_TAG)) {
directiveState.checkScriptTag(this, token.next!);
token = parseScript(token);
} else {
token = parseMetadataStar(token);
Token keyword = token.next!;
final String? value = keyword.stringValue;
if (identical(value, 'import')) {
directiveState.checkImport(this, keyword);
token = parseImport(keyword);
} else if (identical(value, 'export')) {
directiveState.checkExport(this, keyword);
token = parseExport(keyword);
} else if (identical(value, 'library')) {
directiveState.checkLibrary(this, keyword);
token = parseLibraryName(keyword);
} else if (identical(value, 'part')) {
token = parsePartOrPartOf(keyword, directiveState);
} else if (identical(value, ';')) {
token = start;
listener.handleDirectivesOnly();
} else {
listener.handleDirectivesOnly();
break;
}
}
listener.endTopLevelDeclaration(token.next!);
}
token = token.next!;
listener.endCompilationUnit(count, token);
// Clear fields that could lead to memory leak.
cachedRewriter = null;
return token;
}
/// Parse a top-level declaration.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseTopLevelDeclaration(Token token) {
token = parseTopLevelDeclarationImpl(
syntheticPreviousToken(token), /* directiveState = */ null)
.next!;
listener.endTopLevelDeclaration(token);
return token;
}
/// ```
/// topLevelDefinition:
/// classDefinition |
/// enumType |
/// typeAlias |
/// 'external'? functionSignature ';' |
/// 'external'? getterSignature ';' |
/// 'external''? setterSignature ';' |
/// functionSignature functionBody |
/// returnType? 'get' identifier functionBody |
/// returnType? 'set' identifier formalParameterList functionBody |
/// ('final' | 'const') type? staticFinalDeclarationList ';' |
/// variableDeclaration ';'
/// ;
/// ```
Token parseTopLevelDeclarationImpl(
Token token, DirectiveContext? directiveState) {
token = parseMetadataStar(token);
Token next = token.next!;
if (next.isTopLevelKeyword) {
return parseTopLevelKeywordDeclaration(token, next, directiveState);
}
Token start = token;
// Skip modifiers to find a top level keyword or identifier
if (next.isModifier) {
if (optional('var', next) ||
optional('late', next) ||
((optional('const', next) || optional('final', next)) &&
// Ignore `const class` and `final class` so that it is reported
// below as an invalid modifier on a class.
!optional('class', next.next!))) {
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(token);
}
while (token.next!.isModifier) {
token = token.next!;
}
}
next = token.next!;
if (next.isTopLevelKeyword) {
return parseTopLevelKeywordDeclaration(start, next, directiveState);
} else if (next.isKeywordOrIdentifier) {
// TODO(danrubel): improve parseTopLevelMember
// so that we don't parse modifiers twice.
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(start);
} else if (start.next != next) {
directiveState?.checkDeclaration();
// Handle the edge case where a modifier is being used as an identifier
return parseTopLevelMemberImpl(start);
}
// Recovery
if (next.isOperator && optional('(', next.next!)) {
// This appears to be a top level operator declaration, which is invalid.
reportRecoverableError(next, codes.messageTopLevelOperator);
// Insert a synthetic identifier
// and continue parsing as a top level function.
rewriter.insertSyntheticIdentifier(
next, '#synthetic_function_${next.charOffset}');
return parseTopLevelMemberImpl(next);
}
// Ignore any preceding modifiers and just report the unexpected token
listener.beginTopLevelMember(next);
return parseInvalidTopLevelDeclaration(token);
}
/// Parse the modifiers before the `class` keyword.
/// Return the first `abstract` modifier or `null` if not found.
Token? parseClassDeclarationModifiers(Token start, Token keyword) {
Token modifier = start.next!;
while (modifier != keyword) {
if (optional('abstract', modifier)) {
parseTopLevelKeywordModifiers(modifier, keyword);
return modifier;
} else {
// Recovery
reportTopLevelModifierError(modifier, keyword);
}
modifier = modifier.next!;
}
return null;
}
/// Report errors on any modifiers before the specified keyword.
void parseTopLevelKeywordModifiers(Token start, Token keyword) {
Token modifier = start.next!;
while (modifier != keyword) {
// Recovery
reportTopLevelModifierError(modifier, keyword);
modifier = modifier.next!;
}
}
// Report an error for the given modifier preceding a top level keyword
// such as `import` or `class`.
void reportTopLevelModifierError(Token modifier, Token afterModifiers) {
if (optional('const', modifier) && optional('class', afterModifiers)) {
reportRecoverableError(modifier, codes.messageConstClass);
} else if (optional('external', modifier)) {
if (optional('class', afterModifiers)) {
reportRecoverableError(modifier, codes.messageExternalClass);
} else if (optional('enum', afterModifiers)) {
reportRecoverableError(modifier, codes.messageExternalEnum);
} else if (optional('typedef', afterModifiers)) {
reportRecoverableError(modifier, codes.messageExternalTypedef);
} else {
reportRecoverableErrorWithToken(
modifier, codes.templateExtraneousModifier);
}
} else {
reportRecoverableErrorWithToken(
modifier, codes.templateExtraneousModifier);
}
}
/// Parse any top-level declaration that begins with a keyword.
/// [start] is the token before any modifiers preceding [keyword].
Token parseTopLevelKeywordDeclaration(
Token start, Token keyword, DirectiveContext? directiveState) {
assert(keyword.isTopLevelKeyword);
final String? value = keyword.stringValue;
if (identical(value, 'class')) {
directiveState?.checkDeclaration();
Token? abstractToken = parseClassDeclarationModifiers(start, keyword);
return parseClassOrNamedMixinApplication(abstractToken, keyword);
} else if (identical(value, 'enum')) {
directiveState?.checkDeclaration();
parseTopLevelKeywordModifiers(start, keyword);
return parseEnum(keyword);
} else {
// The remaining top level keywords are built-in keywords
// and can be used in a top level declaration
// as an identifier such as "abstract<T>() => 0;"
// or as a prefix such as "abstract.A b() => 0;".
String? nextValue = keyword.next!.stringValue;
if (identical(nextValue, '(') || identical(nextValue, '.')) {
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(start);
} else if (identical(nextValue, '<')) {
if (identical(value, 'extension')) {
// The name in an extension declaration is optional:
// `extension<T> on ...`
Token? endGroup = keyword.next!.endGroup;
if (endGroup != null && optional('on', endGroup.next!)) {
directiveState?.checkDeclaration();
return parseExtension(keyword);
}
}
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(start);
} else {
parseTopLevelKeywordModifiers(start, keyword);
if (identical(value, 'import')) {
directiveState?.checkImport(this, keyword);
return parseImport(keyword);
} else if (identical(value, 'export')) {
directiveState?.checkExport(this, keyword);
return parseExport(keyword);
} else if (identical(value, 'typedef')) {
directiveState?.checkDeclaration();
return parseTypedef(keyword);
} else if (identical(value, 'mixin')) {
directiveState?.checkDeclaration();
return parseMixin(keyword);
} else if (identical(value, 'extension')) {
directiveState?.checkDeclaration();
return parseExtension(keyword);
} else if (identical(value, 'part')) {
return parsePartOrPartOf(keyword, directiveState);
} else if (identical(value, 'library')) {
directiveState?.checkLibrary(this, keyword);
return parseLibraryName(keyword);
}
}
}
throw "Internal error: Unhandled top level keyword '$value'.";
}
/// ```
/// libraryDirective:
/// 'library' qualified ';'
/// ;
/// ```
Token parseLibraryName(Token libraryKeyword) {
assert(optional('library', libraryKeyword));
listener.beginUncategorizedTopLevelDeclaration(libraryKeyword);
listener.beginLibraryName(libraryKeyword);
Token token = parseQualified(libraryKeyword, IdentifierContext.libraryName,
IdentifierContext.libraryNameContinuation);
token = ensureSemicolon(token);
listener.endLibraryName(libraryKeyword, token);
return token;
}
/// ```
/// importPrefix:
/// 'deferred'? 'as' identifier
/// ;
/// ```
Token parseImportPrefixOpt(Token token) {
Token next = token.next!;
if (optional('deferred', next) && optional('as', next.next!)) {
Token deferredToken = next;
Token asKeyword = next.next!;
token = ensureIdentifier(
asKeyword, IdentifierContext.importPrefixDeclaration);
listener.handleImportPrefix(deferredToken, asKeyword);
} else if (optional('as', next)) {
Token asKeyword = next;
token = ensureIdentifier(next, IdentifierContext.importPrefixDeclaration);
listener.handleImportPrefix(/* deferredKeyword = */ null, asKeyword);
} else {
listener.handleImportPrefix(
/* deferredKeyword = */ null,
/* asKeyword = */ null);
}
return token;
}
/// ```
/// importDirective:
/// 'import' uri ('if' '(' test ')' uri)* importPrefix? combinator* ';'
/// ;
/// ```
Token parseImport(Token importKeyword) {
assert(optional('import', importKeyword));
listener.beginUncategorizedTopLevelDeclaration(importKeyword);
listener.beginImport(importKeyword);
Token token = ensureLiteralString(importKeyword);
Token uri = token;
token = parseConditionalUriStar(token);
token = parseImportPrefixOpt(token);
token = parseCombinatorStar(token).next!;
if (optional(';', token)) {
listener.endImport(importKeyword, token);
return token;
} else {
// Recovery
listener.endImport(importKeyword, /* semicolon = */ null);
return parseImportRecovery(uri);
}
}
/// Recover given out-of-order clauses in an import directive where [token] is
/// the import keyword.
Token parseImportRecovery(Token token) {
final Listener primaryListener = listener;
final ImportRecoveryListener recoveryListener =
new ImportRecoveryListener();
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener
listener = recoveryListener;
token = parseConditionalUriStar(token);
token = parseImportPrefixOpt(token);
token = parseCombinatorStar(token);
Token? firstDeferredKeyword = recoveryListener.deferredKeyword;
bool hasPrefix = recoveryListener.asKeyword != null;
bool hasCombinator = recoveryListener.hasCombinator;
// Update the recovery listener to forward subsequent events
// to the primary listener
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses.
Token? semicolon;
do {
Token start = token.next!;
// Check for extraneous token in the middle of an import statement.
token = skipUnexpectedTokenOpt(
token, const <String>['if', 'deferred', 'as', 'hide', 'show', ';']);
// During recovery, clauses are parsed in the same order
// and generate the same events as in the parseImport method above.
recoveryListener.clear();
token = parseConditionalUriStar(token);
if (recoveryListener.ifKeyword != null) {
if (firstDeferredKeyword != null) {
// TODO(danrubel): report error indicating conditional should
// be moved before deferred keyword
} else if (hasPrefix) {
// TODO(danrubel): report error indicating conditional should
// be moved before prefix clause
} else if (hasCombinator) {
// TODO(danrubel): report error indicating conditional should
// be moved before combinators
}
}
if (optional('deferred', token.next!) &&
!optional('as', token.next!.next!)) {
listener.handleImportPrefix(token.next!, /* asKeyword = */ null);
token = token.next!;
} else {
token = parseImportPrefixOpt(token);
}
if (recoveryListener.deferredKeyword != null) {
if (firstDeferredKeyword != null) {
reportRecoverableError(recoveryListener.deferredKeyword!,
codes.messageDuplicateDeferred);
} else {
if (hasPrefix) {
reportRecoverableError(recoveryListener.deferredKeyword!,
codes.messageDeferredAfterPrefix);
}
firstDeferredKeyword = recoveryListener.deferredKeyword;
}
}
if (recoveryListener.asKeyword != null) {
if (hasPrefix) {
reportRecoverableError(
recoveryListener.asKeyword!, codes.messageDuplicatePrefix);
} else {
if (hasCombinator) {
reportRecoverableError(recoveryListener.asKeyword!,
codes.messagePrefixAfterCombinator);
}
hasPrefix = true;
}
}
token = parseCombinatorStar(token);
hasCombinator = hasCombinator || recoveryListener.hasCombinator;
if (optional(';', token.next!)) {
semicolon = token.next!;
} else if (identical(start, token.next!)) {
// If no forward progress was made, insert ';' so that we exit loop.
semicolon = ensureSemicolon(token);
}
listener.handleRecoverImport(semicolon);
} while (semicolon == null);
if (firstDeferredKeyword != null && !hasPrefix) {
reportRecoverableError(
firstDeferredKeyword, codes.messageMissingPrefixInDeferredImport);
}
return semicolon;
}
/// ```
/// conditionalUris:
/// conditionalUri*
/// ;
/// ```
Token parseConditionalUriStar(Token token) {
listener.beginConditionalUris(token.next!);
int count = 0;
while (optional('if', token.next!)) {
count++;
token = parseConditionalUri(token);
}
listener.endConditionalUris(count);
return token;
}
/// ```
/// conditionalUri:
/// 'if' '(' dottedName ('==' literalString)? ')' uri
/// ;
/// ```
Token parseConditionalUri(Token token) {
Token ifKeyword = token = token.next!;
assert(optional('if', token));
listener.beginConditionalUri(token);
Token leftParen = token.next!;
if (!optional('(', leftParen)) {
reportRecoverableError(
leftParen, codes.templateExpectedButGot.withArguments('('));
leftParen = rewriter.insertParens(token, /* includeIdentifier = */ true);
}
token = parseDottedName(leftParen);
Token next = token.next!;
Token? equalitySign;
if (optional('==', next)) {
equalitySign = next;
token = ensureLiteralString(next);
next = token.next!;
}
if (next != leftParen.endGroup) {
Token endGroup = leftParen.endGroup!;
if (endGroup.isSynthetic) {
// The scanner did not place the synthetic ')' correctly, so move it.
next = rewriter.moveSynthetic(token, endGroup);
} else {
reportRecoverableErrorWithToken(next, codes.templateUnexpectedToken);
next = endGroup;
}
}
token = next;
assert(optional(')', token));
token = ensureLiteralString(token);
listener.endConditionalUri(ifKeyword, leftParen, equalitySign);
return token;
}
/// ```
/// dottedName:
/// identifier ('.' identifier)*
/// ;
/// ```
Token parseDottedName(Token token) {
token = ensureIdentifier(token, IdentifierContext.dottedName);
Token firstIdentifier = token;
int count = 1;
while (optional('.', token.next!)) {
token = ensureIdentifier(
token.next!, IdentifierContext.dottedNameContinuation);
count++;
}
listener.handleDottedName(count, firstIdentifier);
return token;
}
/// ```
/// exportDirective:
/// 'export' uri conditional-uris* combinator* ';'
/// ;
/// ```
Token parseExport(Token exportKeyword) {
assert(optional('export', exportKeyword));
listener.beginUncategorizedTopLevelDeclaration(exportKeyword);
listener.beginExport(exportKeyword);
Token token = ensureLiteralString(exportKeyword);
token = parseConditionalUriStar(token);
token = parseCombinatorStar(token);
token = ensureSemicolon(token);
listener.endExport(exportKeyword, token);
return token;
}
/// ```
/// combinators:
/// (hideCombinator | showCombinator)*
/// ;
/// ```
Token parseCombinatorStar(Token token) {
Token next = token.next!;
listener.beginCombinators(next);
int count = 0;
while (true) {
String? value = next.stringValue;
if (identical('hide', value)) {
token = parseHide(token);
} else if (identical('show', value)) {
token = parseShow(token);
} else {
listener.endCombinators(count);
break;
}
next = token.next!;
count++;
}
return token;
}
/// ```
/// hideCombinator:
/// 'hide' identifierList
/// ;
/// ```
Token parseHide(Token token) {
Token hideKeyword = token.next!;
assert(optional('hide', hideKeyword));
listener.beginHide(hideKeyword);
token = parseIdentifierList(hideKeyword);
listener.endHide(hideKeyword);
return token;
}
/// ```
/// showCombinator:
/// 'show' identifierList
/// ;
/// ```
Token parseShow(Token token) {
Token showKeyword = token.next!;
assert(optional('show', showKeyword));
listener.beginShow(showKeyword);
token = parseIdentifierList(showKeyword);
listener.endShow(showKeyword);
return token;
}
/// ```
/// identifierList:
/// identifier (',' identifier)*
/// ;
/// ```
Token parseIdentifierList(Token token) {
token = ensureIdentifier(token, IdentifierContext.combinator);
int count = 1;
while (optional(',', token.next!)) {
token = ensureIdentifier(token.next!, IdentifierContext.combinator);
count++;
}
listener.handleIdentifierList(count);
return token;
}
/// ```
/// typeList:
/// type (',' type)*
/// ;
/// ```
Token parseTypeList(Token token) {
listener.beginTypeList(token.next!);
token =
computeType(token, /* required = */ true).ensureTypeOrVoid(token, this);
int count = 1;
while (optional(',', token.next!)) {
token = computeType(token.next!, /* required = */ true)
.ensureTypeOrVoid(token.next!, this);
count++;
}
listener.endTypeList(count);
return token;
}
Token parsePartOrPartOf(Token partKeyword, DirectiveContext? directiveState) {
assert(optional('part', partKeyword));
listener.beginUncategorizedTopLevelDeclaration(partKeyword);
if (optional('of', partKeyword.next!)) {
directiveState?.checkPartOf(this, partKeyword);
return parsePartOf(partKeyword);
} else {
directiveState?.checkPart(this, partKeyword);
return parsePart(partKeyword);
}
}
/// ```
/// partDirective:
/// 'part' uri ';'
/// ;
/// ```
Token parsePart(Token partKeyword) {
assert(optional('part', partKeyword));
listener.beginPart(partKeyword);
Token token = ensureLiteralString(partKeyword);
token = ensureSemicolon(token);
listener.endPart(partKeyword, token);
return token;
}
/// ```
/// partOfDirective:
/// 'part' 'of' (qualified | uri) ';'
/// ;
/// ```
Token parsePartOf(Token partKeyword) {
Token ofKeyword = partKeyword.next!;
assert(optional('part', partKeyword));
assert(optional('of', ofKeyword));
listener.beginPartOf(partKeyword);
bool hasName = ofKeyword.next!.isIdentifier;
Token token;
if (hasName) {
token = parseQualified(ofKeyword, IdentifierContext.partName,
IdentifierContext.partNameContinuation);
} else {
token = ensureLiteralString(ofKeyword);
}
token = ensureSemicolon(token);
listener.endPartOf(partKeyword, ofKeyword, token, hasName);
return token;
}
/// ```
/// metadata:
/// annotation*
/// ;
/// ```
Token parseMetadataStar(Token token) {
listener.beginMetadataStar(token.next!);
int count = 0;
while (optional('@', token.next!)) {
token = parseMetadata(token);
count++;
}
listener.endMetadataStar(count);
return token;
}
/// ```
/// annotation:
/// '@' qualified ('.' identifier)? arguments?
/// ;
/// ```
Token parseMetadata(Token token) {
Token atToken = token.next!;
assert(optional('@', atToken));
listener.beginMetadata(atToken);
token = ensureIdentifier(atToken, IdentifierContext.metadataReference);
token =
parseQualifiedRestOpt(token, IdentifierContext.metadataContinuation);
bool hasTypeArguments = optional("<", token.next!);
token = computeTypeParamOrArg(token).parseArguments(token, this);
Token? period = null;
if (optional('.', token.next!)) {
period = token.next!;
token = ensureIdentifier(
period, IdentifierContext.metadataContinuationAfterTypeArguments);
}
if (hasTypeArguments && !optional("(", token.next!)) {
reportRecoverableError(
token, codes.messageMetadataTypeArgumentsUninstantiated);
}
token = parseArgumentsOpt(token);
listener.endMetadata(atToken, period, token.next!);
return token;
}
/// ```
/// scriptTag:
/// '#!' (˜NEWLINE)* NEWLINE
/// ;
/// ```
Token parseScript(Token token) {
token = token.next!;
assert(identical(token.type, TokenType.SCRIPT_TAG));
listener.handleScript(token);
return token;
}
/// ```
/// typeAlias:
/// metadata 'typedef' typeAliasBody |
/// metadata 'typedef' identifier typeParameters? '=' functionType ';'
/// ;
///
/// functionType:
/// returnType? 'Function' typeParameters? parameterTypeList
///
/// typeAliasBody:
/// functionTypeAlias
/// ;
///
/// functionTypeAlias:
/// functionPrefix typeParameters? formalParameterList ‘;’
/// ;
///
/// functionPrefix:
/// returnType? identifier
/// ;
/// ```
Token parseTypedef(Token typedefKeyword) {
assert(optional('typedef', typedefKeyword));
listener.beginUncategorizedTopLevelDeclaration(typedefKeyword);
listener.beginFunctionTypeAlias(typedefKeyword);
TypeInfo typeInfo = computeType(typedefKeyword, /* required = */ false);
Token token = typeInfo.skipType(typedefKeyword);
Token next = token.next!;
Token? equals;
TypeParamOrArgInfo typeParam =
computeTypeParamOrArg(next, /* inDeclaration = */ true);
if (typeInfo == noType && optional('=', typeParam.skip(next).next!)) {
// New style typedef, e.g. typedef foo = void Function();".
// Parse as recovered here to 'force' using it as an identifier as we've
// already established that the next token is the equal sign we're looking
// for.
token = ensureIdentifierPotentiallyRecovered(token,
IdentifierContext.typedefDeclaration, /* isRecovered = */ true);
token = typeParam.parseVariables(token, this);
next = token.next!;
// parseVariables rewrites so even though we checked in the if,
// we might not have an equal here now.
if (!optional('=', next) && optional('=', next.next!)) {
// Recovery after recovery: A token was inserted, but we'll skip it now
// to get more in line with what we thought in the if before.
next = next.next!;
}
if (optional('=', next)) {
equals = next;
TypeInfo type = computeType(equals, /* required = */ true);
if (!type.isFunctionType) {
// Recovery: In certain cases insert missing 'Function' and missing
// parens.
Token skippedType = type.skipType(equals);
if (optional('(', skippedType.next!) &&
skippedType.next!.endGroup != null &&
optional(';', skippedType.next!.endGroup!.next!)) {
// Turn "<return type>? '(' <whatever> ')';"
// into "<return type>? Function '(' <whatever> ')';".
// Assume the type is meant as the return type.
Token functionToken =
rewriter.insertSyntheticKeyword(skippedType, Keyword.FUNCTION);
reportRecoverableError(functionToken,
codes.templateExpectedButGot.withArguments('Function'));
type = computeType(equals, /* required = */ true);
} else if (type is NoType &&
optional('<', skippedType.next!) &&
skippedType.next!.endGroup != null) {
// Recover these two:
// "<whatever>;" => "Function<whatever>();"
// "<whatever>(<whatever>);" => "Function<whatever>(<whatever>);"
Token endGroup = skippedType.next!.endGroup!;
bool recover = false;
if (optional(';', endGroup.next!)) {
// Missing parenthesis. Insert them.
// Turn "<whatever>;" in to "<whatever>();"
// Insert missing 'Function' below.
reportRecoverableError(endGroup,
missingParameterMessage(MemberKind.FunctionTypeAlias));
rewriter.insertParens(endGroup, /*includeIdentifier =*/ false);
recover = true;
} else if (optional('(', endGroup.next!) &&
endGroup.next!.endGroup != null &&
optional(';', endGroup.next!.endGroup!.next!)) {
// "<whatever>(<whatever>);". Insert missing 'Function' below.
recover = true;
}
if (recover) {
// Assume the '<' indicates type arguments to the function.
// Insert 'Function' before them.
Token functionToken =
rewriter.insertSyntheticKeyword(equals, Keyword.FUNCTION);
reportRecoverableError(functionToken,
codes.templateExpectedButGot.withArguments('Function'));
type = computeType(equals, /* required = */ true);
}
} else {
// E.g. "typedef j = foo;" -- don't attempt any recovery.
}
}
token = type.ensureTypeOrVoid(equals, this);
} else {
// A rewrite caused the = to disappear
token = parseFormalParametersRequiredOpt(
next, MemberKind.FunctionTypeAlias);
}
} else {
// Old style typedef, e.g. "typedef void foo();".
token = typeInfo.parseType(typedefKeyword, this);
next = token.next!;
bool isIdentifierRecovered = false;
if (next.kind != IDENTIFIER_TOKEN &&
optional('(', typeParam.skip(next).next!)) {
// Recovery: Not a valid identifier, but is used as such.
isIdentifierRecovered = true;
}
token = ensureIdentifierPotentiallyRecovered(
token, IdentifierContext.typedefDeclaration, isIdentifierRecovered);
token = typeParam.parseVariables(token, this);
token =
parseFormalParametersRequiredOpt(token, MemberKind.FunctionTypeAlias);
}
token = ensureSemicolon(token);
listener.endFunctionTypeAlias(typedefKeyword, equals, token);
return token;
}
/// Parse a mixin application starting from `with`. Assumes that the first
/// type has already been parsed.
Token parseMixinApplicationRest(Token token) {
Token withKeyword = token.next!;
if (!optional('with', withKeyword)) {
// Recovery: Report an error and insert synthetic `with` clause.
reportRecoverableError(
withKeyword, codes.templateExpectedButGot.withArguments('with'));
withKeyword = rewriter.insertSyntheticKeyword(token, Keyword.WITH);
if (!isValidTypeReference(withKeyword.next!)) {
rewriter.insertSyntheticIdentifier(withKeyword);
}
}
token = parseTypeList(withKeyword);
listener.handleNamedMixinApplicationWithClause(withKeyword);
return token;
}
Token parseWithClauseOpt(Token token) {
// <mixins> ::= with <typeNotVoidList>
Token withKeyword = token.next!;
if (optional('with', withKeyword)) {
token = parseTypeList(withKeyword);
listener.handleClassWithClause(withKeyword);
} else {
listener.handleClassNoWithClause();
}
return token;
}
/// Parse the formal parameters of a getter (which shouldn't have parameters)
/// or function or method.
Token parseGetterOrFormalParameters(
Token token, Token name, bool isGetter, MemberKind kind) {
Token next = token.next!;
if (optional("(", next)) {
if (isGetter) {
reportRecoverableError(next, codes.messageGetterWithFormals);
}
token = parseFormalParameters(token, kind);
} else if (isGetter) {
listener.handleNoFormalParameters(next, kind);
} else {
// Recovery
if (optional('operator', name)) {
Token next = name.next!;
if (next.isOperator) {
name = next;
} else if (isUnaryMinus(next)) {
name = next.next!;
}
}
reportRecoverableError(name, missingParameterMessage(kind));
token = rewriter.insertParens(token, /* includeIdentifier = */ false);
token = parseFormalParametersRest(token, kind);
}
return token;
}
Token parseFormalParametersOpt(Token token, MemberKind kind) {
Token next = token.next!;
if (optional('(', next)) {
token = parseFormalParameters(token, kind);
} else {
listener.handleNoFormalParameters(next, kind);
}
return token;
}
Token skipFormalParameters(Token token, MemberKind kind) {
return skipFormalParametersRest(token.next!, kind);
}
Token skipFormalParametersRest(Token token, MemberKind kind) {
assert(optional('(', token));
// TODO(ahe): Shouldn't this be `beginFormalParameters`?
listener.beginOptionalFormalParameters(token);
Token closeBrace = token.endGroup!;
assert(optional(')', closeBrace));
listener.endFormalParameters(/* count = */ 0, token, closeBrace, kind);
return closeBrace;
}
/// Parses the formal parameter list of a function.
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParametersRequiredOpt(Token token, MemberKind kind) {
Token next = token.next!;
if (!optional('(', next)) {
reportRecoverableError(next, missingParameterMessage(kind));
next = rewriter.insertParens(token, /* includeIdentifier = */ false);
}
return parseFormalParametersRest(next, kind);
}
/// Parses the formal parameter list of a function given that the left
/// parenthesis is known to exist.
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParameters(Token token, MemberKind kind) {
return parseFormalParametersRest(token.next!, kind);
}
/// Parses the formal parameter list of a function given that the left
/// parenthesis passed in as [token].
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParametersRest(Token token, MemberKind kind) {
Token begin = token;
assert(optional('(', token));
listener.beginFormalParameters(begin, kind);
int parameterCount = 0;
while (true) {
Token next = token.next!;
if (optional(')', next)) {
token = next;
break;
}
++parameterCount;
String? value = next.stringValue;
if (identical(value, '[')) {
token = parseOptionalPositionalParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
} else if (identical(value, '{')) {
token = parseOptionalNamedParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
} else if (identical(value, '[]')) {
// Recovery
token = rewriteSquareBrackets(token);
token = parseOptionalPositionalParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
}
token = parseFormalParameter(token, FormalParameterKind.mandatory, kind);
next = token.next!;
if (!optional(',', next)) {
Token next = token.next!;
if (optional(')', next)) {
token = next;
} else {
// Recovery
if (begin.endGroup!.isSynthetic) {
// Scanner has already reported a missing `)` error,
// but placed the `)` in the wrong location, so move it.
token = rewriter.moveSynthetic(token, begin.endGroup!);
} else if (next.kind == IDENTIFIER_TOKEN &&
next.next!.kind == IDENTIFIER_TOKEN) {
// Looks like a missing comma
token = rewriteAndRecover(
token,
codes.templateExpectedButGot.withArguments(','),
new SyntheticToken(TokenType.COMMA, next.charOffset));
continue;
} else {
token = ensureCloseParen(token, begin);
}
}
break;
}
token = next;
}
assert(optional(')', token));
listener.endFormalParameters(parameterCount, begin, token, kind);
return token;
}
/// Return the message that should be produced when the formal parameters are
/// missing.
codes.Message missingParameterMessage(MemberKind kind) {
if (kind == MemberKind.FunctionTypeAlias) {
return codes.messageMissingTypedefParameters;
} else if (kind == MemberKind.NonStaticMethod ||
kind == MemberKind.StaticMethod) {
return codes.messageMissingMethodParameters;
}
return codes.messageMissingFunctionParameters;
}
/// Check if [token] is the usage of 'required' in a formal parameter in a
/// context where it's not legal (i.e. in non-nnbd-mode).
bool _isUseOfRequiredInNonNNBD(Token token) {
if (token.next is StringToken && token.next!.value() == "required") {
// Possible recovery: Figure out if we're in a situation like
// required covariant? <type> name
// (in non-nnbd-mode) where the required modifier is not legal and thus
// would normally be parsed as the type.
token = token.next!;
Token next = token.next!;
// Skip modifiers.
while (next.isModifier) {
token = next;
next = next.next!;
}
// Parse the (potential) new type.
TypeInfo typeInfoAlternative = computeType(
token,
/* required = */ false,
/* inDeclaration = */ true);
token = typeInfoAlternative.skipType(token);
next = token.next!;
// We've essentially ignored the 'required' at this point.
// `token` is (in the good state) the last token of the type,
// `next` is (in the good state) the name;
// Are we in a 'good' state?
if (typeInfoAlternative != noType &&
next.isIdentifier &&
(optional(',', next.next!) || optional('}', next.next!))) {
return true;
}
}
return false;
}
/// ```
/// normalFormalParameter:
/// functionFormalParameter |
/// fieldFormalParameter |
/// simpleFormalParameter
/// ;
///
/// functionFormalParameter:
/// metadata 'covariant'? returnType? identifier formalParameterList
/// ;
///
/// simpleFormalParameter:
/// metadata 'covariant'? finalConstVarOrType? identifier |
/// ;
///
/// fieldFormalParameter:
/// metadata finalConstVarOrType? 'this' '.' identifier formalParameterList?
/// ;
/// ```
Token parseFormalParameter(
Token token, FormalParameterKind parameterKind, MemberKind memberKind) {
// ignore: unnecessary_null_comparison
assert(parameterKind != null);
token = parseMetadataStar(token);
Token? skippedNonRequiredRequired;
if (_isUseOfRequiredInNonNNBD(token)) {
skippedNonRequiredRequired = token.next!;
reportRecoverableErrorWithToken(skippedNonRequiredRequired,
codes.templateUnexpectedModifierInNonNnbd);
token = token.next!;
}
Token next = token.next!;
Token start = next;
final bool inFunctionType =
memberKind == MemberKind.GeneralizedFunctionType;
Token? requiredToken;
Token? covariantToken;
Token? varFinalOrConst;
if (isModifier(next)) {
if (optional('required', next)) {
if (parameterKind == FormalParameterKind.optionalNamed) {
requiredToken = token = next;
next = token.next!;
}
}
if (isModifier(next)) {
if (optional('covariant', next)) {
if (memberKind != MemberKind.StaticMethod &&
memberKind != MemberKind.TopLevelMethod &&
memberKind != MemberKind.ExtensionNonStaticMethod &&
memberKind != MemberKind.ExtensionStaticMethod) {
covariantToken = token = next;
next = token.next!;
}
}
if (isModifier(next)) {
if (!inFunctionType) {
if (optional('var', next)) {
varFinalOrConst = token = next;
next = token.next!;
} else if (optional('final', next)) {
varFinalOrConst = token = next;
next = token.next!;
}
}
if (isModifier(next)) {
// Recovery
ModifierRecoveryContext context = new ModifierRecoveryContext(this)
..covariantToken = covariantToken
..requiredToken = requiredToken
..varFinalOrConst = varFinalOrConst;
token = context.parseFormalParameterModifiers(
token, parameterKind, memberKind);
next = token.next!;
covariantToken = context.covariantToken;
requiredToken = context.requiredToken;
varFinalOrConst = context.varFinalOrConst;
}
}
}
}
if (requiredToken == null) {
// `required` was used as a modifier in non-nnbd mode. An error has been
// emitted. Still use it as a required token for the remainder in an
// attempt to avoid cascading errors (and for passing to the listener).
requiredToken = skippedNonRequiredRequired;
}
listener.beginFormalParameter(
start, memberKind, requiredToken, covariantToken, varFinalOrConst);
// Type is required in a generalized function type, but optional otherwise.
final Token beforeType = token;
TypeInfo typeInfo = computeType(token, inFunctionType);
token = typeInfo.skipType(token);
next = token.next!;
if (typeInfo == noType &&
(optional('.', next) ||
(next.isIdentifier && optional('.', next.next!)))) {
// Recovery: Malformed type reference.
typeInfo = computeType(beforeType, /* required = */ true);
token = typeInfo.skipType(beforeType);
next = token.next!;
}
final bool isNamedParameter =
parameterKind == FormalParameterKind.optionalNamed;
Token? thisKeyword;
Token? periodAfterThis;
IdentifierContext nameContext =
IdentifierContext.formalParameterDeclaration;
if (!inFunctionType && optional('this', next)) {
thisKeyword = token = next;
next = token.next!;
if (!optional('.', next)) {
// Recover from a missing period by inserting one.
next = rewriteAndRecover(
token,
codes.templateExpectedButGot.withArguments('.'),
new SyntheticToken(TokenType.PERIOD, next.charOffset));
}
periodAfterThis = token = next;
next = token.next!;
nameContext = IdentifierContext.fieldInitializer;
}
if (next.isIdentifier) {
token = next;
next = token.next!;
}
Token? beforeInlineFunctionType;
TypeParamOrArgInfo typeParam = noTypeParamOrArg;
if (optional("<", next)) {
typeParam = computeTypeParamOrArg(token);
if (typeParam != noTypeParamOrArg) {
Token closer = typeParam.skip(token);
if (optional("(", closer.next!)) {
if (varFinalOrConst != null) {
reportRecoverableError(
varFinalOrConst, codes.messageFunctionTypedParameterVar);
}
beforeInlineFunctionType = token;
token = closer.next!.endGroup!;
next = token.next!;
}
}
} else if (optional("(", next)) {
if (varFinalOrConst != null) {
reportRecoverableError(
varFinalOrConst, codes.messageFunctionTypedParameterVar);
}
beforeInlineFunctionType = token;
token = next.endGroup!;
next = token.next!;
}
if (typeInfo != noType &&
varFinalOrConst != null &&
optional('var', varFinalOrConst)) {
reportRecoverableError(varFinalOrConst, codes.messageTypeAfterVar);
}
Token? endInlineFunctionType;
if (beforeInlineFunctionType != null) {
endInlineFunctionType =
typeParam.parseVariables(beforeInlineFunctionType, this);
listener
.beginFunctionTypedFormalParameter(beforeInlineFunctionType.next!);
token = typeInfo.parseType(beforeType, this);
endInlineFunctionType = parseFormalParametersRequiredOpt(
endInlineFunctionType, MemberKind.FunctionTypedParameter);
Token? question;
if (optional('?', endInlineFunctionType.next!)) {
question = endInlineFunctionType = endInlineFunctionType.next!;
}
listener.endFunctionTypedFormalParameter(
beforeInlineFunctionType, question);
// Generalized function types don't allow inline function types.
// The following isn't allowed:
// int Function(int bar(String x)).
if (inFunctionType) {
reportRecoverableError(beforeInlineFunctionType.next!,
codes.messageInvalidInlineFunctionType);
}
} else if (inFunctionType) {
token = typeInfo.ensureTypeOrVoid(beforeType, this);
} else {
token = typeInfo.parseType(beforeType, this);
}
Token nameToken;
if (periodAfterThis != null) {
token = periodAfterThis;
}
next = token.next!;
if (inFunctionType &&
!isNamedParameter &&
!next.isKeywordOrIdentifier &&
beforeInlineFunctionType == null) {
nameToken = token.next!;
listener.handleNoName(nameToken);
} else {
nameToken = token = ensureIdentifier(token, nameContext);
if (isNamedParameter && nameToken.lexeme.startsWith("_")) {
reportRecoverableError(nameToken, codes.messagePrivateNamedParameter);
}
}
if (endInlineFunctionType != null) {
token = endInlineFunctionType;
}
next = token.next!;
String? value = next.stringValue;
Token? initializerStart, initializerEnd;
if ((identical('=', value)) || (identical(':', value))) {
Token equal = next;
initializerStart = equal.next!;
listener.beginFormalParameterDefaultValueExpression();
token = initializerEnd = parseExpression(equal);
next = token.next!;
listener.endFormalParameterDefaultValueExpression();
// TODO(danrubel): Consider removing the last parameter from the
// handleValuedFormalParameter event... it appears to be unused.
listener.handleValuedFormalParameter(equal, next);
if (isMandatoryFormalParameterKind(parameterKind)) {
reportRecoverableError(
equal, codes.messageRequiredParameterWithDefault);
} else if (isOptionalPositionalFormalParameterKind(parameterKind) &&
identical(':', value)) {
reportRecoverableError(
equal, codes.messagePositionalParameterWithEquals);
} else if (inFunctionType ||
memberKind == MemberKind.FunctionTypeAlias ||
memberKind == MemberKind.FunctionTypedParameter) {
reportRecoverableError(equal, codes.messageFunctionTypeDefaultValue);
}
} else {
listener.handleFormalParameterWithoutValue(next);
}
listener.endFormalParameter(thisKeyword, periodAfterThis, nameToken,
initializerStart, initializerEnd, parameterKind, memberKind);
return token;
}
/// ```
/// defaultFormalParameter:
/// normalFormalParameter ('=' expression)?
/// ;
/// ```
Token parseOptionalPositionalParameters(Token token, MemberKind kind) {
Token begin = token = token.next!;
assert(optional('[', token));
listener.beginOptionalFormalParameters(begin);
int parameterCount = 0;
while (true) {
Token next = token.next!;
if (optional(']', next)) {
break;
}
token = parseFormalParameter(
token, FormalParameterKind.optionalPositional, kind);
next = token.next!;
++parameterCount;
if (!optional(',', next)) {
if (!optional(']', next)) {
// Recovery
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments(']'));
// Scanner guarantees a closing bracket.
next = begin.endGroup!;
while (token.next != next) {
token = token.next!;
}
}
break;
}
token = next;
}
if (parameterCount == 0) {
rewriteAndRecover(
token,
codes.messageEmptyOptionalParameterList,
new SyntheticStringToken(TokenType.IDENTIFIER, '',
token.next!.charOffset, /* _length = */ 0));
token = parseFormalParameter(
token, FormalParameterKind.optionalPositional, kind);
++parameterCount;
}
token = token.next!;
assert(optional(']', token));
listener.endOptionalFormalParameters(parameterCount, begin, token);
return token;
}
/// ```
/// defaultNamedParameter:
/// normalFormalParameter ('=' expression)? |
/// normalFormalParameter (':' expression)?
/// ;
/// ```
Token parseOptionalNamedParameters(Token token, MemberKind kind) {
Token begin = token = token.next!;
assert(optional('{', token));
listener.beginOptionalFormalParameters(begin);
int parameterCount = 0;
while (true) {
Token next = token.next!;
if (optional('}', next)) {
break;
}
token =
parseFormalParameter(token, FormalParameterKind.optionalNamed, kind);
next = token.next!;
++parameterCount;
if (!optional(',', next)) {
if (!optional('}', next)) {
// Recovery
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments('}'));
// Scanner guarantees a closing bracket.
next = begin.endGroup!;
while (token.next != next) {
token = token.next!;
}
}
break;
}
token = next;
}
if (parameterCount == 0) {
rewriteAndRecover(
token,
codes.messageEmptyNamedParameterList,
new SyntheticStringToken(TokenType.IDENTIFIER, '',
token.next!.charOffset, /* _length = */ 0));
token =
parseFormalParameter(token, FormalParameterKind.optionalNamed, kind);
++parameterCount;
}
token = token.next!;
assert(optional('}', token));
listener.endOptionalFormalParameters(parameterCount, begin, token);
return token;
}
/// ```
/// qualified:
/// identifier qualifiedRest*
/// ;
/// ```
Token parseQualified(Token token, IdentifierContext context,
IdentifierContext continuationContext) {
token = ensureIdentifier(token, context);
while (optional('.', token.next!)) {
token = parseQualifiedRest(token, continuationContext);
}
return token;
}
/// ```
/// qualifiedRestOpt:
/// qualifiedRest?
/// ;
/// ```
Token parseQualifiedRestOpt(
Token token, IdentifierContext continuationContext) {
if (optional('.', token.next!)) {
return parseQualifiedRest(token, continuationContext);
} else {
return token;
}
}
/// ```
/// qualifiedRest:
/// '.' identifier
/// ;
/// ```
Token parseQualifiedRest(Token token, IdentifierContext context) {
token = token.next!;
assert(optional('.', token));
Token period = token;
token = ensureIdentifier(token, context);
listener.handleQualified(period);
return token;
}
Token skipBlock(Token token) {
// The scanner ensures that `{` always has a closing `}`.
return ensureBlock(
token, /* template = */ null, /* missingBlockName = */ null)
.endGroup!;
}
/// ```
/// enumType:
/// metadata 'enum' id '{' metadata id [',' metadata id]* [','] '}'
/// ;
/// ```
Token parseEnum(Token enumKeyword) {
assert(optional('enum', enumKeyword));
listener.beginUncategorizedTopLevelDeclaration(enumKeyword);
listener.beginEnum(enumKeyword);
Token token =
ensureIdentifier(enumKeyword, IdentifierContext.enumDeclaration);
Token leftBrace = token.next!;
int count = 0;
if (optional('{', leftBrace)) {
token = leftBrace;
while (true) {
Token next = token.next!;
if (optional('}', next)) {
token = next;
if (count == 0) {
reportRecoverableError(token, codes.messageEnumDeclarationEmpty);
}
break;
}
token = parseMetadataStar(token);
token = ensureIdentifier(token, IdentifierContext.enumValueDeclaration);
next = token.next!;
count++;
if (optional(',', next)) {
token = next;
} else if (optional('}', next)) {
token = next;
break;
} else {
// Recovery
Token endGroup = leftBrace.endGroup!;
if (endGroup.isSynthetic) {
// The scanner did not place the synthetic '}' correctly.
token = rewriter.moveSynthetic(token, endGroup);
break;
} else if (next.isIdentifier) {
// If the next token is an identifier, assume a missing comma.
// TODO(danrubel): Consider improved recovery for missing `}`
// both here and when the scanner inserts a synthetic `}`
// for situations such as `enum Letter {a, b Letter e;`.
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments(','));
} else {
// Otherwise assume a missing `}` and exit the loop
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments('}'));
token = leftBrace.endGroup!;
break;
}
}
}
} else {
// TODO(danrubel): merge this error message with missing class/mixin body
leftBrace = ensureBlock(
token, codes.templateExpectedEnumBody, /* missingBlockName = */ null);
token = leftBrace.endGroup!;
}
assert(optional('}', token));
listener.endEnum(enumKeyword, leftBrace, count);
return token;
}
Token parseClassOrNamedMixinApplication(
Token? abstractToken, Token classKeyword) {
assert(optional('class', classKeyword));
Token begin = abstractToken ?? classKeyword;
listener.beginClassOrNamedMixinApplicationPrelude(begin);
Token name = ensureIdentifier(
classKeyword, IdentifierContext.classOrMixinOrExtensionDeclaration);
Token token = computeTypeParamOrArg(
name, /* inDeclaration = */ true, /* allowsVariance = */ true)
.parseVariables(name, this);
if (optional('=', token.next!)) {
listener.beginNamedMixinApplication(begin, abstractToken, name);
return parseNamedMixinApplication(token, begin, classKeyword);
} else {
listener.beginClassDeclaration(begin, abstractToken, name);
return parseClass(token, begin, classKeyword, name.lexeme);
}
}
Token parseNamedMixinApplication(
Token token, Token begin, Token classKeyword) {
Token equals = token = token.next!;
assert(optional('=', equals));
token = computeType(token, /* required = */ true)
.ensureTypeNotVoid(token, this);
token = parseMixinApplicationRest(token);
Token? implementsKeyword = null;
if (optional('implements', token.next!)) {
implementsKeyword = token.next!;
token = parseTypeList(implementsKeyword);
}
token = ensureSemicolon(token);
listener.endNamedMixinApplication(
begin, classKeyword, equals, implementsKeyword, token);
return token;
}
/// Parse the portion of a class declaration (not a mixin application) that
/// follows the end of the type parameters.
///
/// ```
/// classDefinition:
/// metadata abstract? 'class' identifier typeParameters?
/// (superclass mixins?)? interfaces?
/// '{' (metadata classMemberDefinition)* '}' |
/// metadata abstract? 'class' mixinApplicationClass
/// ;
/// ```
Token parseClass(
Token token, Token begin, Token classKeyword, String className) {
Token start = token;
token = parseClassHeaderOpt(token, begin, classKeyword);
if (!optional('{', token.next!)) {
// Recovery
token = parseClassHeaderRecovery(start, begin, classKeyword);
ensureBlock(token, /* template = */ null, 'class declaration');
}
token = parseClassOrMixinOrExtensionBody(
token, DeclarationKind.Class, className);
listener.endClassDeclaration(begin, token);
return token;
}
Token parseClassHeaderOpt(Token token, Token begin, Token classKeyword) {
token = parseClassExtendsOpt(token);
token = parseWithClauseOpt(token);
token = parseClassOrMixinImplementsOpt(token);
Token? nativeToken;
if (optional('native', token.next!)) {
nativeToken = token.next!;
token = parseNativeClause(token);
}
listener.handleClassHeader(begin, classKeyword, nativeToken);
return token;
}
/// Recover given out-of-order clauses in a class header.
Token parseClassHeaderRecovery(Token token, Token begin, Token classKeyword) {
final Listener primaryListener = listener;
final ClassHeaderRecoveryListener recoveryListener =
new ClassHeaderRecoveryListener();
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener.
listener = recoveryListener;
token = parseClassHeaderOpt(token, begin, classKeyword);
bool hasExtends = recoveryListener.extendsKeyword != null;
bool hasImplements = recoveryListener.implementsKeyword != null;
bool hasWith = recoveryListener.withKeyword != null;
// Update the recovery listener to forward subsequent events
// to the primary listener.
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses
Token start;
do {
start = token;
// Check for extraneous token in the middle of a class header.
token = skipUnexpectedTokenOpt(
token, const <String>['extends', 'with', 'implements', '{']);
// During recovery, clauses are parsed in the same order
// and generate the same events as in the parseClassHeader method above.
recoveryListener.clear();
if (token.next!.isKeywordOrIdentifier &&
const ['extend', 'on'].contains(token.next!.lexeme)) {
reportRecoverableError(token.next!,
codes.templateExpectedInstead.withArguments('extends'));
token = parseClassExtendsSeenExtendsClause(token.next!, token);
} else {
token = parseClassExtendsOpt(token);
}
if (recoveryListener.extendsKeyword != null) {
if (hasExtends) {
reportRecoverableError(
recoveryListener.extendsKeyword!, codes.messageMultipleExtends);
} else {
if (hasWith) {
reportRecoverableError(recoveryListener.extendsKeyword!,
codes.messageWithBeforeExtends);
} else if (hasImplements) {
reportRecoverableError(recoveryListener.extendsKeyword!,
codes.messageImplementsBeforeExtends);
}
hasExtends = true;
}
}
token = parseWithClauseOpt(token);
if (recoveryListener.withKeyword != null) {
if (hasWith) {
reportRecoverableError(
recoveryListener.withKeyword!, codes.messageMultipleWith);
} else {
if (hasImplements) {
reportRecoverableError(recoveryListener.withKeyword!,
codes.messageImplementsBeforeWith);
}
hasWith = true;
}
}
token = parseClassOrMixinImplementsOpt(token);
if (recoveryListener.implementsKeyword != null) {
if (hasImplements) {
reportRecoverableError(recoveryListener.implementsKeyword!,
codes.messageMultipleImplements);
} else {
hasImplements = true;
}
}
listener.handleRecoverClassHeader();
// Exit if a class body is detected, or if no progress has been made
} while (!optional('{', token.next!) && start != token);
listener = primaryListener;
return token;
}
Token parseClassExtendsOpt(Token token) {
// extends <typeNotVoid>
Token next = token.next!;
if (optional('extends', next)) {
token = parseClassExtendsSeenExtendsClause(next, token);
} else {
listener.handleNoType(token);
listener.handleClassExtends(
/* extendsKeyword = */ null,
/* typeCount = */ 1);
}
return token;
}
Token parseClassExtendsSeenExtendsClause(Token extendsKeyword, Token token) {
Token next = extendsKeyword;
token =
computeType(next, /* required = */ true).ensureTypeNotVoid(next, this);
int count = 1;
// Error recovery: extends <typeNotVoid>, <typeNotVoid> [...]
if (optional(',', token.next!)) {
reportRecoverableError(token.next!, codes.messageMultipleExtends);
while (optional(',', token.next!)) {
next = token.next!;
token = computeType(next, /* required = */ true)
.ensureTypeNotVoid(next, this);
count++;
}
}
listener.handleClassExtends(extendsKeyword, count);
return token;
}
/// ```
/// implementsClause:
/// 'implements' typeName (',' typeName)*
/// ;
/// ```
Token parseClassOrMixinImplementsOpt(Token token) {
Token? implementsKeyword;
int interfacesCount = 0;
if (optional('implements', token.next!)) {
implementsKeyword = token.next!;
do {
token = computeType(token.next!, /* required = */ true)
.ensureTypeNotVoid(token.next!, this);
++interfacesCount;
} while (optional(',', token.next!));
}
listener.handleClassOrMixinImplements(implementsKeyword, interfacesCount);
return token;
}
/// Parse a mixin declaration.
///
/// ```
/// mixinDeclaration:
/// metadata? 'mixin' [SimpleIdentifier] [TypeParameterList]?
/// [OnClause]? [ImplementsClause]? '{' [ClassMember]* '}'
/// ;
/// ```
Token parseMixin(Token mixinKeyword) {
assert(optional('mixin', mixinKeyword));
listener.beginClassOrNamedMixinApplicationPrelude(mixinKeyword);
Token name = ensureIdentifier(
mixinKeyword, IdentifierContext.classOrMixinOrExtensionDeclaration);
Token headerStart = computeTypeParamOrArg(
name, /* inDeclaration = */ true, /* allowsVariance = */ true)
.parseVariables(name, this);
listener.beginMixinDeclaration(mixinKeyword, name);
Token token = parseMixinHeaderOpt(headerStart, mixinKeyword);
if (!optional('{', token.next!)) {
// Recovery
token = parseMixinHeaderRecovery(token, mixinKeyword, headerStart);
ensureBlock(token, /* template = */ null, 'mixin declaration');
}
token = parseClassOrMixinOrExtensionBody(
token, DeclarationKind.Mixin, name.lexeme);
listener.endMixinDeclaration(mixinKeyword, token);
return token;
}
Token parseMixinHeaderOpt(Token token, Token mixinKeyword) {
token = parseMixinOnOpt(token);
token = parseClassOrMixinImplementsOpt(token);
listener.handleMixinHeader(mixinKeyword);
return token;
}
Token parseMixinHeaderRecovery(
Token token, Token mixinKeyword, Token headerStart) {
final Listener primaryListener = listener;
final MixinHeaderRecoveryListener recoveryListener =
new MixinHeaderRecoveryListener();
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener.
listener = recoveryListener;
token = parseMixinHeaderOpt(headerStart, mixinKeyword);
bool hasOn = recoveryListener.onKeyword != null;
bool hasImplements = recoveryListener.implementsKeyword != null;
// Update the recovery listener to forward subsequent events
// to the primary listener.
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses
Token start;
do {
start = token;
// Check for extraneous token in the middle of a class header.
token = skipUnexpectedTokenOpt(
token, const <String>['on', 'implements', '{']);
// During recovery, clauses are parsed in the same order and
// generate the same events as in the parseMixinHeaderOpt method above.
recoveryListener.clear();
if (token.next!.isKeywordOrIdentifier &&
const ['extend', 'extends'].contains(token.next!.lexeme)) {
reportRecoverableError(
token.next!, codes.templateExpectedInstead.withArguments('on'));
token = parseMixinOn(token);
} else {
token = parseMixinOnOpt(token);
}
if (recoveryListener.onKeyword != null) {
if (hasOn) {
reportRecoverableError(
recoveryListener.onKeyword!, codes.messageMultipleOnClauses);
} else {
if (hasImplements) {
reportRecoverableError(
recoveryListener.onKeyword!, codes.messageImplementsBeforeOn);
}
hasOn = true;
}
}
token = parseClassOrMixinImplementsOpt(token);
if (recoveryListener.implementsKeyword != null) {
if (hasImplements) {
reportRecoverableError(recoveryListener.implementsKeyword!,
codes.messageMultipleImplements);
} else {
hasImplements = true;
}
}
listener.handleRecoverMixinHeader();
// Exit if a mixin body is detected, or if no progress has been made
} while (!optional('{', token.next!) && start != token);
listener = primaryListener;
return token;
}
/// ```
/// onClause:
/// 'on' typeName (',' typeName)*
/// ;
/// ```
Token parseMixinOnOpt(Token token) {
if (!optional('on', token.next!)) {
listener.handleMixinOn(/* onKeyword = */ null, /* typeCount = */ 0);
return token;
}
return parseMixinOn(token);
}
Token parseMixinOn(Token token) {
Token onKeyword = token.next!;
// During recovery, the [onKeyword] can be "extend" or "extends"
assert(optional('on', onKeyword) ||
optional('extends', onKeyword) ||
onKeyword.lexeme == 'extend');
int typeCount = 0;
do {
token = computeType(token.next!, /* required = */ true)
.ensureTypeNotVoid(token.next!, this);
++typeCount;
} while (optional(',', token.next!));
listener.handleMixinOn(onKeyword, typeCount);
return token;
}
/// ```
/// 'extension' <identifier>? <typeParameters>? 'on' <type> '?'?
// `{'
// <memberDeclaration>*
// `}'
/// ```
Token parseExtension(Token extensionKeyword) {
assert(optional('extension', extensionKeyword));
Token token = extensionKeyword;
listener.beginExtensionDeclarationPrelude(extensionKeyword);
Token? name = token.next!;
if (name.isIdentifier && !optional('on', name)) {
token = name;
if (name.type.isBuiltIn) {
reportRecoverableErrorWithToken(
token, codes.templateBuiltInIdentifierInDeclaration);
}
} else {
name = null;
}
token = computeTypeParamOrArg(token, /* inDeclaration = */ true)
.parseVariables(token, this);
listener.beginExtensionDeclaration(extensionKeyword, name);
Token onKeyword = token.next!;
if (!optional('on', onKeyword)) {
// Recovery
if (optional('extends', onKeyword) ||
optional('implements', onKeyword) ||
optional('with', onKeyword)) {
reportRecoverableError(
onKeyword, codes.templateExpectedInstead.withArguments('on'));
} else {
reportRecoverableError(
token, codes.templateExpectedAfterButGot.withArguments('on'));
onKeyword = rewriter.insertSyntheticKeyword(token, Keyword.ON);
}
}
TypeInfo typeInfo = computeType(onKeyword, /* required = */ true);
token = typeInfo.ensureTypeOrVoid(onKeyword, this);
if (!optional('{', token.next!)) {
// Recovery
Token next = token.next!;
while (!next.isEof) {
if (optional(',', next) ||
optional('extends', next) ||
optional('implements', next) ||
optional('on', next) ||
optional('with', next)) {
// Report an error and skip `,` or specific keyword
// optionally followed by an identifier
reportRecoverableErrorWithToken(next, codes.templateUnexpectedToken);
token = next;
next = token.next!;
if (next.isIdentifier) {
token = next;
next = token.next!;
}
} else {
break;
}
}
ensureBlock(token, /* template = */ null, 'extension declaration');
}
token = parseClassOrMixinOrExtensionBody(
token, DeclarationKind.Extension, name?.lexeme);
listener.endExtensionDeclaration(extensionKeyword, onKeyword, token);
return token;
}
Token parseStringPart(Token token) {
Token next = token.next!;
if (next.kind != STRING_TOKEN) {
reportRecoverableErrorWithToken(next, codes.templateExpectedString);
next = rewriter.insertToken(token,
new SyntheticStringToken(TokenType.STRING, '', next.charOffset));
}
listener.handleStringPart(next);
return next;
}
/// Insert a synthetic identifier after the given [token] and create an error
/// message based on the given [context]. Return the synthetic identifier that
/// was inserted.
Token insertSyntheticIdentifier(Token token, IdentifierContext context,
{codes.Message? message, Token? messageOnToken}) {
Token next = token.next!;
reportRecoverableError(messageOnToken ?? next,
message ?? context.recoveryTemplate.withArguments(next));
return rewriter.insertSyntheticIdentifier(token);
}
/// Parse a simple identifier at the given [token], and return the identifier
/// that was parsed.
///
/// If the token is not an identifier, or is not appropriate for use as an
/// identifier in the given [context], create a synthetic identifier, report
/// an error, and return the synthetic identifier.
Token ensureIdentifier(Token token, IdentifierContext context) {
// ignore: unnecessary_null_comparison
assert(context != null);
Token identifier = token.next!;
if (identifier.kind != IDENTIFIER_TOKEN) {
identifier = context.ensureIdentifier(token, this);
// ignore: unnecessary_null_comparison
assert(identifier != null);
assert(identifier.isKeywordOrIdentifier);
}
listener.handleIdentifier(identifier, context);
return identifier;
}
/// Parse a simple identifier at the given [token], and return the identifier
/// that was parsed.
///
/// If the token is not an identifier, or is not appropriate for use as an
/// identifier in the given [context], create a synthetic identifier, report
/// an error, and return the synthetic identifier.
/// [isRecovered] is passed to [context] which - if true - allows implementers
/// to use the token as an identifier, even if it isn't a valid identifier.
Token ensureIdentifierPotentiallyRecovered(
Token token, IdentifierContext context, bool isRecovered) {
// ignore: unnecessary_null_comparison
assert(context != null);
Token identifier = token.next!;
if (identifier.kind != IDENTIFIER_TOKEN) {
identifier = context.ensureIdentifierPotentiallyRecovered(
token, this, isRecovered);
// ignore: unnecessary_null_comparison
assert(identifier != null);
assert(identifier.isKeywordOrIdentifier);
}
listener.handleIdentifier(identifier, context);
return identifier;
}
bool notEofOrValue(String value, Token token) {
return !identical(token.kind, EOF_TOKEN) &&
!identical(value, token.stringValue);
}
Token parseTypeVariablesOpt(Token token) {
return computeTypeParamOrArg(token, /* inDeclaration = */ true)
.parseVariables(token, this);
}
/// Parse a top level field or function.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseTopLevelMember(Token token) {
token = parseMetadataStar(syntheticPreviousToken(token));
return parseTopLevelMemberImpl(token).next!;
}
/// Check if [token] is the usage of 'late' before a field declaration in a
/// context where it's not legal (i.e. in non-nnbd-mode).
bool _isUseOfLateInNonNNBD(Token token) {
if (token is StringToken && token.value() == "late") {
// Possible recovery: Figure out if we're in a situation like
// late final? <type>/var/const name [...]
// (in non-nnbd-mode) where the late modifier is not legal and thus would
// normally be parsed as the type.
Token next = token.next!;
// Skip modifiers.
while (next.isModifier) {
token = next;
next = next.next!;
}
// Parse the (potential) new type.
TypeInfo typeInfoAlternative = computeType(
token,
/* required = */ false,
/* inDeclaration = */ true);
token = typeInfoAlternative.skipType(token);
next = token.next!;
// We've essentially ignored the 'late' at this point.
// `token` is (in the good state) the last token of the type,
// `next` is (in the good state) the name;
// Are we in a 'good' state?
if (typeInfoAlternative != noType &&
next.isIdentifier &&
indicatesMethodOrField(next.next!)) {
return true;
}
}
return false;
}
Token parseTopLevelMemberImpl(Token token) {
Token beforeStart = token;
Token next = token.next!;
listener.beginTopLevelMember(next);
Token? skippedNonLateLate;
if (_isUseOfLateInNonNNBD(next)) {
skippedNonLateLate = next;
reportRecoverableErrorWithToken(
skippedNonLateLate, codes.templateUnexpectedModifierInNonNnbd);
token = token.next!;
beforeStart = token;
next = token.next!;
}
Token? externalToken;
Token? lateToken;
Token? varFinalOrConst;
if (isModifier(next)) {
if (optional('external', next)) {
externalToken = token = next;
next = token.next!;
}
if (isModifier(next)) {
if (optional('final', next)) {
varFinalOrConst = token = next;
next = token.next!;
} else if (optional('var', next)) {
varFinalOrConst = token = next;
next = token.next!;
} else if (optional('const', next)) {
varFinalOrConst = token = next;
next = token.next!;
} else if (optional('late', next)) {
lateToken = token = next;
next = token.next!;
if (isModifier(next) && optional('final', next)) {
varFinalOrConst = token = next;
next = token.next!;
}
}
if (isModifier(next)) {
// Recovery
if (varFinalOrConst != null &&
(optional('final', next) ||
optional('var', next) ||
optional('const', next))) {
// If another `var`, `final`, or `const` then fall through
// to parse that as part of the next top level declaration.
} else {
ModifierRecoveryContext context = new ModifierRecoveryContext(this)
..externalToken = externalToken
..lateToken = lateToken
..varFinalOrConst = varFinalOrConst;
token = context.parseTopLevelModifiers(token);
next = token.next!;
externalToken = context.externalToken;
lateToken = context.lateToken;
varFinalOrConst = context.varFinalOrConst;
}
}
}
}
if (lateToken == null) {
// `late` was used as a modifier in non-nnbd mode. An error has been
// emitted. Still use it as a late token for the remainder in an attempt
// to avoid cascading errors (and for passing to the listener).
lateToken = skippedNonLateLate;
}
Token beforeType = token;
TypeInfo typeInfo =
computeType(token, /* required = */ false, /* inDeclaration = */ true);
token = typeInfo.skipType(token);
next = token.next!;
Token? getOrSet;
String? value = next.stringValue;
if (identical(value, 'get') || identical(value, 'set')) {
if (next.next!.isIdentifier) {
getOrSet = token = next;
next = token.next!;
}
}
bool nameIsRecovered = false;
// Recovery: If the code is
// <return type>? <reserved word> <token indicating method or field>
// take the reserved keyword as the name.
if (typeInfo == noType &&
varFinalOrConst == null &&
isReservedKeyword(next.next!) &&
indicatesMethodOrField(next.next!.next!)) {
// Recovery: Use the reserved keyword despite that not being legal.
typeInfo = computeType(
token,
/*required = */ true,
/* inDeclaration = */ true);
token = typeInfo.skipType(token);
next = token.next!;
nameIsRecovered = true;
}
if (next.type != TokenType.IDENTIFIER) {
value = next.stringValue;
if (identical(value, 'factory') || identical(value, 'operator')) {
// `factory` and `operator` can be used as an identifier.
value = next.next!.stringValue;
if (getOrSet == null &&
!identical(value, '(') &&
!identical(value, '{') &&
!identical(value, '<') &&
!identical(value, '=>') &&
!identical(value, '=') &&
!identical(value, ';') &&
!identical(value, ',')) {
// Recovery
value = next.stringValue;
if (identical(value, 'factory')) {
reportRecoverableError(
next, codes.messageFactoryTopLevelDeclaration);
} else {
reportRecoverableError(next, codes.messageTopLevelOperator);
if (next.next!.isOperator) {
token = next;
next = token.next!;
if (optional('(', next.next!)) {
rewriter.insertSyntheticIdentifier(
next, '#synthetic_identifier_${next.charOffset}');
}
}
}
listener.handleInvalidTopLevelDeclaration(next);
return next;
}
// Fall through and continue parsing
} else if (!next.isIdentifier) {
// Recovery
if (next.isKeyword) {
// Fall through to parse the keyword as the identifier.
// ensureIdentifier will report the error.
} else if (token == beforeStart) {
// Ensure we make progress.
return parseInvalidTopLevelDeclaration(token);
} else {
// Looks like a declaration missing an identifier.
// Insert synthetic identifier and fall through.
insertSyntheticIdentifier(token, IdentifierContext.methodDeclaration);
next = token.next!;
}
}
}
// At this point, `token` is beforeName.
// Recovery: Inserted ! after method name.
if (optional('!', next.next!)) {
next = next.next!;
}
next = next.next!;
value = next.stringValue;
if (getOrSet != null ||
identical(value, '(') ||
identical(value, '{') ||
identical(value, '<') ||
identical(value, '.') ||
identical(value, '=>')) {
if (varFinalOrConst != null) {
if (optional('var', varFinalOrConst)) {
reportRecoverableError(varFinalOrConst, codes.messageVarReturnType);
} else {
reportRecoverableErrorWithToken(
varFinalOrConst, codes.templateExtraneousModifier);
}
} else if (lateToken != null) {
reportRecoverableErrorWithToken(
lateToken, codes.templateExtraneousModifier);
}
return parseTopLevelMethod(beforeStart, externalToken, beforeType,
typeInfo, getOrSet, token.next!, nameIsRecovered);
}
if (getOrSet != null) {
reportRecoverableErrorWithToken(
getOrSet, codes.templateExtraneousModifier);
}
return parseFields(
beforeStart,
/* abstractToken = */ null,
externalToken,
/* staticToken = */ null,
/* covariantToken = */ null,
lateToken,
varFinalOrConst,
beforeType,
typeInfo,
token.next!,
DeclarationKind.TopLevel,
/* enclosingDeclarationName = */ null,
nameIsRecovered);
}
Token parseFields(
Token beforeStart,
Token? abstractToken,
Token? externalToken,
Token? staticToken,
Token? covariantToken,
Token? lateToken,
Token? varFinalOrConst,
Token beforeType,
TypeInfo typeInfo,
Token name,
DeclarationKind kind,
String? enclosingDeclarationName,
bool nameIsRecovered) {
listener.beginFields(beforeStart);
// Covariant affects only the setter and final fields do not have a setter,
// unless it's a late field (dartbug.com/40805).
// Field that are covariant late final with initializers are checked further
// down.
if (covariantToken != null && lateToken == null) {
if (varFinalOrConst != null && optional('final', varFinalOrConst)) {
reportRecoverableError(covariantToken, codes.messageFinalAndCovariant);
covariantToken = null;
}
}
if (typeInfo == noType) {
if (varFinalOrConst == null) {
reportRecoverableError(name, codes.messageMissingConstFinalVarOrType);
}
} else {
if (varFinalOrConst != null && optional('var', varFinalOrConst)) {
reportRecoverableError(varFinalOrConst, codes.messageTypeAfterVar);
}
}
if (abstractToken != null && externalToken != null) {
reportRecoverableError(abstractToken, codes.messageAbstractExternalField);
}
Token token = typeInfo.parseType(beforeType, this);
assert(token.next == name || token.next!.isEof);
IdentifierContext context = kind == DeclarationKind.TopLevel
? IdentifierContext.topLevelVariableDeclaration
: IdentifierContext.fieldDeclaration;
Token firstName = name = ensureIdentifierPotentiallyRecovered(
token, context, /* isRecovered = */ nameIsRecovered);
// Check for covariant late final with initializer.
if (covariantToken != null && lateToken != null) {
if (varFinalOrConst != null && optional('final', varFinalOrConst)) {
Token next = name.next!;
if (optional('=', next)) {
reportRecoverableError(covariantToken,
codes.messageFinalAndCovariantLateWithInitializer);
covariantToken = null;
}
}
}
int fieldCount = 1;
token = parseFieldInitializerOpt(name, name, lateToken, abstractToken,
externalToken, varFinalOrConst, kind, enclosingDeclarationName);
while (optional(',', token.next!)) {
name = ensureIdentifier(token.next!, context);
token = parseFieldInitializerOpt(name, name, lateToken, abstractToken,
externalToken, varFinalOrConst, kind, enclosingDeclarationName);
++fieldCount;
}
Token semicolon = token.next!;
if (optional(';', semicolon)) {
token = semicolon;
} else {
// Recovery
if (kind == DeclarationKind.TopLevel &&
beforeType.next!.isIdentifier &&
beforeType.next!.lexeme == 'extension') {
// Looks like an extension method
// TODO(danrubel): Remove when extension methods are enabled by default
// because then 'extension' will be interpreted as a built-in
// and this code will never be executed
reportRecoverableError(
beforeType.next!,
codes.templateExperimentNotEnabled
.withArguments('extension-methods', '2.6'));
token = rewriter.insertSyntheticToken(token, TokenType.SEMICOLON);
} else {
token = ensureSemicolon(token);
}
}
switch (kind) {
case DeclarationKind.TopLevel:
assert(abstractToken == null);
listener.endTopLevelFields(externalToken, staticToken, covariantToken,
lateToken, varFinalOrConst, fieldCount, beforeStart.next!, token);
break;
case DeclarationKind.Class:
listener.endClassFields(
abstractToken,
externalToken,
staticToken,
covariantToken,
lateToken,
varFinalOrConst,
fieldCount,
beforeStart.next!,
token);
break;
case DeclarationKind.Mixin:
listener.endMixinFields(
abstractToken,
externalToken,
staticToken,
covariantToken,
lateToken,
varFinalOrConst,
fieldCount,
beforeStart.next!,
token);
break;
case DeclarationKind.Extension:
if (abstractToken != null) {
reportRecoverableError(
firstName, codes.messageAbstractExtensionField);
}
if (staticToken == null && externalToken == null) {
reportRecoverableError(
firstName, codes.messageExtensionDeclaresInstanceField);
}
listener.endExtensionFields(
abstractToken,
externalToken,
staticToken,
covariantToken,
lateToken,
varFinalOrConst,
fieldCount,
beforeStart.next!,
token);
break;
}
return token;
}
Token parseTopLevelMethod(
Token beforeStart,
Token? externalToken,
Token beforeType,
TypeInfo typeInfo,
Token? getOrSet,
Token name,
bool nameIsRecovered) {
listener.beginTopLevelMethod(beforeStart, externalToken);
Token token = typeInfo.parseType(beforeType, this);
assert(token.next == (getOrSet ?? name) || token.next!.isEof);
name = ensureIdentifierPotentiallyRecovered(
getOrSet ?? token,
IdentifierContext.topLevelFunctionDeclaration,
/* isRecovered = */ nameIsRecovered);
bool isGetter = false;
if (getOrSet == null) {
token = parseMethodTypeVar(name);
} else {
isGetter = optional("get", getOrSet);
token = name;
listener.handleNoTypeVariables(token.next!);
}
token = parseGetterOrFormalParameters(
token, name, isGetter, MemberKind.TopLevelMethod);
AsyncModifier savedAsyncModifier = asyncState;
Token asyncToken = token.next!;
token = parseAsyncModifierOpt(token);
if (getOrSet != null && !inPlainSync && optional("set", getOrSet)) {
reportRecoverableError(asyncToken, codes.messageSetterNotSync);
}
// TODO(paulberry): code below is slightly hacky to allow for implementing
// the feature "Infer non-nullability from local boolean variables"
// (https://github.com/dart-lang/language/issues/1274). Since the version
// of Dart that is used for presubmit checks lags slightly behind master,
// we need the code to analyze correctly regardless of whether local boolean
// variables cause promotion or not. Once the version of dart used for
// presubmit checks has been updated, this can be cleaned up to:
// bool isExternal = externalToken != null;
// if (externalToken != null && !optional(';', token.next!)) {
// reportRecoverableError(
// externalToken, codes.messageExternalMethodWithBody);
// }
bool isExternal = false;
if (externalToken != null) {
isExternal = true;
if (!optional(';', token.next!)) {
reportRecoverableError(
externalToken, codes.messageExternalMethodWithBody);
}
}
token = parseFunctionBody(
token, /* ofFunctionExpression = */ false, isExternal);
asyncState = savedAsyncModifier;
listener.endTopLevelMethod(beforeStart.next!, getOrSet, token);
return token;
}
Token parseMethodTypeVar(Token name) {
if (optional('!', name.next!)) {
// Recovery
name = name.next!;
reportRecoverableErrorWithToken(name, codes.templateUnexpectedToken);
}
if (!optional('<', name.next!)) {
return noTypeParamOrArg.parseVariables(name, this);
}
TypeParamOrArgInfo typeVar =
computeTypeParamOrArg(name, /* inDeclaration = */ true);
Token token = typeVar.parseVariables(name, this);
if (optional('=', token.next!)) {
// Recovery
token = token.next!;
reportRecoverableErrorWithToken(token, codes.templateUnexpectedToken);
}
return token;
}
Token parseFieldInitializerOpt(
Token token,
Token name,
Token? lateToken,
Token? abstractToken,
Token? externalToken,
Token? varFinalOrConst,
DeclarationKind kind,
String? enclosingDeclarationName) {
if (name.lexeme == enclosingDeclarationName) {
reportRecoverableError(name, codes.messageMemberWithSameNameAsClass);
}
Token next = token.next!;
if (optional('=', next)) {
Token assignment = next;
listener.beginFieldInitializer(next);
token = parseExpression(next);
listener.endFieldInitializer(assignment, token.next!);
} else {
if (varFinalOrConst != null && !name.isSynthetic) {
if (optional("const", varFinalOrConst)) {
reportRecoverableError(
name,
codes.templateConstFieldWithoutInitializer
.withArguments(name.lexeme));
} else if (kind == DeclarationKind.TopLevel &&
optional("final", varFinalOrConst) &&
lateToken == null &&
abstractToken == null &&
externalToken == null) {
reportRecoverableError(
name,
codes.templateFinalFieldWithoutInitializer
.withArguments(name.lexeme));
}
}
listener.handleNoFieldInitializer(token.next!);
}
return token;
}
Token parseVariableInitializerOpt(Token token) {
if (optional('=', token.next!)) {
Token assignment = token.next!;
listener.beginVariableInitializer(assignment);
token = parseExpression(assignment);
listener.endVariableInitializer(assignment);
} else {
listener.handleNoVariableInitializer(token);
}
return token;
}
Token parseInitializersOpt(Token token) {
if (optional(':', token.next!)) {
return parseInitializers(token.next!);
} else {
listener.handleNoInitializers();
return token;
}
}
/// ```
/// initializers:
/// ':' initializerListEntry (',' initializerListEntry)*
/// ;
/// ```
Token parseInitializers(Token token) {
Token begin = token;
assert(optional(':', begin));
listener.beginInitializers(begin);
int count = 0;
bool old = mayParseFunctionExpressions;
mayParseFunctionExpressions = false;
Token next = begin;
while (true) {
token = parseInitializer(next);
++count;
next = token.next!;
if (!optional(',', next)) {
// Recovery: Found an identifier which could be
// 1) missing preceding `,` thus it's another initializer, or
// 2) missing preceding `;` thus it's a class member, or
// 3) missing preceding '{' thus it's a statement
if (optional('assert', next)) {
next = next.next!;
if (!optional('(', next)) {
break;
}
// Looks like assert expression ... fall through to insert comma
} else if (!next.isIdentifier && !optional('this', next)) {
// An identifier that wasn't an initializer. Break.
break;
} else {
if (optional('this', next)) {
next = next.next!;
if (!optional('.', next)) {
break;
}
next = next.next!;
if (!next.isIdentifier && !optional('assert', next)) {
break;
}
}
next = next.next!;
if (!optional('=', next)) {
break;
}
// Looks like field assignment... fall through to insert comma
}
// TODO(danrubel): Consider enhancing this to indicate that we are
// expecting one of `,` or `;` or `{`
reportRecoverableError(
token, codes.templateExpectedAfterButGot.withArguments(','));
next = rewriter.insertSyntheticToken(token, TokenType.COMMA);
}
}
mayParseFunctionExpressions = old;
listener.endInitializers(count, begin, token.next!);
return token;
}
/// ```
/// initializerListEntry: