blob: 4f37e6e0afdf6b971f2b2d345ed2915852a7bf48 [file] [log] [blame]
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// @docImport '../scanner/scanner.dart';
/// @docImport 'util.dart';
library _fe_analyzer_shared.parser.parser;
import '../experiments/flags.dart';
import '../messages/codes.dart' as codes;
import '../scanner/scanner.dart' show ErrorToken, Token;
import '../scanner/token.dart'
show
ASSIGNMENT_PRECEDENCE,
BITWISE_OR_PRECEDENCE,
BeginToken,
CASCADE_PRECEDENCE,
EQUALITY_PRECEDENCE,
Keyword,
MULTIPLICATIVE_PRECEDENCE,
POSTFIX_PRECEDENCE,
PREFIX_PRECEDENCE,
RELATIONAL_PRECEDENCE,
SELECTOR_PRECEDENCE,
StringToken,
SyntheticBeginToken,
SyntheticKeywordToken,
SyntheticStringToken,
SyntheticToken,
TokenType;
import '../scanner/token_constants.dart'
show
BANG_EQ_EQ_TOKEN,
COMMA_TOKEN,
DOUBLE_TOKEN,
EOF_TOKEN,
EQ_EQ_EQ_TOKEN,
EQ_TOKEN,
FUNCTION_TOKEN,
HASH_TOKEN,
HEXADECIMAL_TOKEN,
IDENTIFIER_TOKEN,
INT_TOKEN,
KEYWORD_TOKEN,
LT_TOKEN,
OPEN_CURLY_BRACKET_TOKEN,
OPEN_PAREN_TOKEN,
OPEN_SQUARE_BRACKET_TOKEN,
SEMICOLON_TOKEN,
STRING_INTERPOLATION_IDENTIFIER_TOKEN,
STRING_INTERPOLATION_TOKEN,
STRING_TOKEN;
import 'assert.dart' show Assert;
import 'async_modifier.dart' show AsyncModifier;
import 'block_kind.dart';
import 'constructor_reference_context.dart' show ConstructorReferenceContext;
import 'declaration_kind.dart' show DeclarationHeaderKind, DeclarationKind;
import 'directive_context.dart';
import 'formal_parameter_kind.dart' show FormalParameterKind;
import 'forwarding_listener.dart' show ForwardingListener, NullListener;
import 'identifier_context.dart'
show
IdentifierContext,
looksLikeExpressionStart,
looksLikePatternStart,
okNextValueInFormalParameter;
import 'identifier_context_impl.dart'
show looksLikeStartOfNextTopLevelDeclaration;
import 'listener.dart' show Listener;
import 'literal_entry_info.dart'
show
LiteralEntryInfo,
computeLiteralEntry,
looksLikeLiteralEntry,
simpleEntry;
import 'loop_state.dart' show LoopState;
import 'member_kind.dart' show MemberKind;
import 'modifier_context.dart' show ModifierContext, isModifier;
import 'recovery_listeners.dart'
show
DeclarationHeaderRecoveryListener,
ImportRecoveryListener,
MixinHeaderRecoveryListener;
import 'token_stream_rewriter.dart'
show
TokenStreamRewriter,
TokenStreamRewriterImpl,
UndoableTokenStreamRewriter;
import 'type_info.dart'
show
TypeInfo,
TypeParamOrArgInfo,
computeMethodTypeArguments,
computeType,
computeTypeParamOrArg,
computeVariablePatternType,
illegalPatternIdentifiers,
isValidNonRecordTypeReference,
noType,
noTypeParamOrArg;
import 'type_info_impl.dart';
import 'util.dart'
show
findNonZeroLengthToken,
findPreviousNonZeroLengthToken,
isOneOf,
isOneOfOrEof,
optional;
/// An event generating parser of Dart programs. This parser expects all tokens
/// in a linked list (aka a token stream).
///
/// The class [Scanner] is used to generate a token stream. See the file
/// [scanner.dart](../scanner.dart).
///
/// Subclasses of the class [Listener] are used to listen to events.
///
/// Most methods of this class belong in one of four major categories: parse
/// methods, peek methods, ensure methods, and skip methods.
///
/// Parse methods all have the prefix `parse`, generate events
/// (by calling methods on [listener]), and return the next token to parse.
/// Some exceptions to this last point are methods such as [parseFunctionBody]
/// and [parseClassOrMixinOrExtensionBody] which return the last token parsed
/// rather than the next token to be parsed.
/// Parse methods are generally named `parseGrammarProductionSuffix`.
/// The suffix can be one of `opt`, or `star`.
/// `opt` means zero or one matches, `star` means zero or more matches.
/// For example, [parseMetadataStar] corresponds to this grammar snippet:
/// `metadata*`, and [parseArgumentsOpt] corresponds to: `arguments?`.
///
/// Peek methods all have the prefix `peek`, do not generate events
/// (except for errors) and may return null.
///
/// Ensure methods all have the prefix `ensure` and may generate events.
/// They return the current token, or insert and return a synthetic token
/// if the current token does not match. For example,
/// [ensureSemicolon] returns the current token if the current token is a
/// semicolon, otherwise inserts a synthetic semicolon in the token stream
/// before the current token and then returns that new synthetic token.
///
/// Skip methods are like parse methods, but all have the prefix `skip`
/// and skip over some parts of the file being parsed.
/// Typically, skip methods generate an event for the structure being skipped,
/// but not for its substructures.
///
/// ## Current Token
///
/// The current token is always to be found in a formal parameter named
/// `token`. This parameter should be the first as this increases the chance
/// that a compiler will place it in a register.
///
/// ## Implementation Notes
///
/// The parser assumes that keywords, built-in identifiers, and other special
/// words (pseudo-keywords) are all canonicalized. To extend the parser to
/// recognize a new identifier, one should modify
/// [keyword.dart](../scanner/keyword.dart) and ensure the identifier is added
/// to the keyword table.
///
/// As a consequence of this, one should not use `==` to compare strings in the
/// parser. One should favor the method [optional] to recognize keywords or
/// identifiers. In some cases, it's possible to compare a token's `stringValue`
/// using [identical], but normally [optional] will suffice.
///
/// Historically, we over-used identical, and when identical is used on objects
/// other than strings, it can often be replaced by `==`.
///
/// ## Flexibility, Extensibility, and Specification
///
/// The parser is designed to be flexible and extensible. Its methods are
/// designed to be overridden in subclasses, so it can be extended to handle
/// unspecified language extension or experiments while everything in this file
/// attempts to follow the specification (unless when it interferes with error
/// recovery).
///
/// We achieve flexibility, extensible, and specification compliance by
/// following a few rules-of-thumb:
///
/// 1. All methods in the parser should be public.
///
/// 2. The methods follow the specified grammar, and do not implement custom
/// extensions, for example, `native`.
///
/// 3. The parser doesn't rewrite the token stream (when dealing with `>>`).
///
/// ### Implementing Extensions
///
/// For various reasons, some Dart language implementations have used
/// custom/unspecified extensions to the Dart grammar. Examples of this
/// includes diet parsing, patch files, `native` keyword, and generic
/// comments. This class isn't supposed to implement any of these
/// features. Instead it provides hooks for those extensions to be implemented
/// in subclasses or listeners. Let's examine how diet parsing and `native`
/// keyword is currently supported by Fasta.
///
/// #### Legacy Implementation of `native` Keyword
///
/// TODO(ahe,danrubel): Remove this section.
///
/// Both dart2js and the Dart VM have used the `native` keyword to mark methods
/// that couldn't be implemented in the Dart language and needed to be
/// implemented in JavaScript or C++, respectively. An example of the syntax
/// extension used by the Dart VM is:
///
/// nativeFunction() native "NativeFunction";
///
/// When attempting to parse this function, the parser eventually calls
/// [parseFunctionBody]. This method will report an unrecoverable error to the
/// listener with the code [codes.codeExpectedFunctionBody]. The listener can
/// then look at the error code and the token and use the methods in
/// [native_support.dart](native_support.dart) to parse the native syntax.
///
/// #### Implementation of Diet Parsing
///
/// We call it _diet_ _parsing_ when the parser skips parts of a file. Both
/// dart2js and the Dart VM have been relying on this from early on as it allows
/// them to more quickly compile small programs that use small parts of big
/// libraries. It's also become an integrated part of how Fasta builds up
/// outlines before starting to parse method bodies.
///
/// When looking through this parser, you'll find a number of unused methods
/// starting with `skip`. These methods are only used by subclasses, such as
/// [ClassMemberParser](class_member_parser.dart) and
/// [TopLevelParser](top_level_parser.dart). These methods violate the
/// principle above about following the specified grammar, and originally lived
/// in subclasses. However, we realized that these methods were so widely used
/// and hard to maintain in subclasses, that it made sense to move them here.
///
/// ### Specification and Error Recovery
///
/// To improve error recovery, the parser will inform the listener of
/// recoverable errors and continue to parse. An example of a recoverable
/// error is:
///
/// Error: Asynchronous for-loop can only be used in 'async' or 'async*'...
/// main() { await for (var x in []) {} }
/// ^^^^^
///
/// ### Legacy Error Recovery
///
/// What's described below will be phased out in preference of the parser
/// reporting and recovering from syntax errors. The motivation for this is
/// that we have multiple listeners that use the parser, and this will ensure
/// consistency.
///
/// For unrecoverable errors, the parser will ask the listener for help to
/// recover from the error. We haven't made much progress on these kinds of
/// errors, so in most cases, the parser aborts by skipping to the end of file.
///
/// Historically, this parser has been rather lax in what it allows, and
/// deferred the enforcement of some syntactical rules to subsequent phases. It
/// doesn't matter how we got there, only that we've identified that it's
/// easier if the parser reports as many errors it can, but informs the
/// listener if the error is recoverable or not.
class Parser {
Listener listener;
Uri? get uri => listener.uri;
bool mayParseFunctionExpressions = true;
/// Represents parser state: what asynchronous syntax is allowed in the
/// function being currently parsed. In rare situations, this can be set by
/// external clients, for example, to parse an expression outside a function.
AsyncModifier asyncState = AsyncModifier.Sync;
// TODO(danrubel): The [loopState] and associated functionality in the
// [Parser] duplicates work that the resolver needs to do when resolving
// break/continue targets. Long term, this state and functionality will be
// removed from the [Parser] class and the resolver will be responsible
// for generating all break/continue error messages.
/// Represents parser state: whether parsing outside a loop,
/// inside a loop, or inside a switch. This is used to determine whether
/// break and continue statements are allowed.
LoopState loopState = LoopState.OutsideLoop;
/// A rewriter for inserting synthetic tokens.
/// Access using [rewriter] for lazy initialization.
TokenStreamRewriter? cachedRewriter;
TokenStreamRewriter get rewriter {
return cachedRewriter ??= new TokenStreamRewriterImpl();
}
/// If `true`, syntax like `foo<bar>.baz()` is parsed like an implicit
/// creation expression. Otherwise it is parsed as a explicit instantiation
/// followed by an invocation.
///
/// With the constructor-tearoffs experiment, such syntax can lead to a valid
/// expression that is _not_ an implicit creation expression, and the parser
/// should therefore not special case the syntax but instead let listeners
/// resolve the expression by the seen selectors.
///
/// Use this flag to test that the implementation doesn't need the special
/// casing.
// TODO(johnniwinther): Remove this when both analyzer and CFE can parse the
// implicit create expression without the special casing.
final bool useImplicitCreationExpression;
/// Indicates whether pattern parsing is enabled.
///
/// This ensures that we don't regress non-pattern functionality while pattern
/// parsing logic is being developed. Eventually we will want to turn this
/// functionality on permanently, and leave it to the client to report an
/// appropriate error if a pattern is used while patterns are not enabled.
/// TODO(paulberry): remove this flag when appropriate.
final bool allowPatterns;
/// Indicates whether the last pattern parsed is allowed inside unary
/// patterns. This is set by [parsePrimaryPattern] and [parsePattern].
///
/// TODO(paulberry): once this package can safely use Dart 3.0 features,
/// remove this boolean and instead return a record (Token, bool) from the
/// [parsePrimaryPattern] and [parsePattern].
bool isLastPatternAllowedInsideUnaryPattern = false;
/// Whether the `enhanced-parts` feature is enabled.
final bool enableFeatureEnhancedParts;
Parser(
this.listener, {
this.useImplicitCreationExpression = true,
this.allowPatterns = false,
this.enableFeatureEnhancedParts = false,
}) : assert(listener != null); // ignore:unnecessary_null_comparison
/// Executes [callback]; however if `this` is the `TestParser` (from
/// `pkg/front_end/test/parser_test_parser.dart`) then no output is printed
/// during its execution.
///
/// This is sometimes necessary inside `assert` statements, to ensure that the
/// output of `TestParser` is the same regardless of whether assertions are
/// enabled.
T inhibitPrinting<T>(T Function() callback) {
return callback();
}
bool get inGenerator {
return asyncState == AsyncModifier.AsyncStar ||
asyncState == AsyncModifier.SyncStar;
}
bool get inAsync {
return asyncState == AsyncModifier.Async ||
asyncState == AsyncModifier.AsyncStar;
}
bool get inPlainSync => asyncState == AsyncModifier.Sync;
bool get isBreakAllowed => loopState != LoopState.OutsideLoop;
bool get isContinueAllowed => loopState == LoopState.InsideLoop;
bool get isContinueWithLabelAllowed => loopState != LoopState.OutsideLoop;
/// Parse a compilation unit.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
///
/// ```
/// libraryDefinition:
/// scriptTag?
/// libraryName?
/// importOrExport*
/// partDirective*
/// topLevelDefinition*
/// ;
///
/// partDeclaration:
/// partHeader topLevelDefinition*
/// ;
/// ```
Token parseUnit(Token token) {
// Skip over error tokens and report them at the end
// so that the parser has the chance to adjust the error location.
Token errorToken = token;
token = skipErrorTokens(errorToken);
listener.beginCompilationUnit(token);
int count = 0;
DirectiveContext directiveState = new DirectiveContext(
enableFeatureEnhancedParts: enableFeatureEnhancedParts,
);
token = syntheticPreviousToken(token);
if (identical(token.next!.type, TokenType.SCRIPT_TAG)) {
directiveState.checkScriptTag(this, token.next!);
token = parseScript(token);
}
while (!token.next!.isEof) {
final Token start = token.next!;
token = parseTopLevelDeclarationImpl(token, directiveState);
listener.endTopLevelDeclaration(token);
count++;
if (start == token.next!) {
// Recovery:
// If progress has not been made reaching the end of the token stream,
// then report an error and skip the current token.
token = token.next!;
listener.beginMetadataStar(token);
listener.endMetadataStar(/* count = */ 0);
reportRecoverableErrorWithToken(
token, codes.templateExpectedDeclaration);
listener.handleInvalidTopLevelDeclaration(token);
listener.endTopLevelDeclaration(token);
count++;
}
}
token = token.next!;
assert(token.isEof);
reportAllErrorTokens(errorToken);
listener.endCompilationUnit(count, token);
// Clear fields that could lead to memory leak.
cachedRewriter = null;
return token;
}
/// This method exists for analyzer compatibility only
/// and will be removed once analyzer/fasta integration is complete.
///
/// Similar to [parseUnit], this method parses a compilation unit,
/// but stops when it reaches the first declaration or EOF.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseDirectives(Token token) {
listener.beginCompilationUnit(token);
int count = 0;
DirectiveContext directiveState = new DirectiveContext(
enableFeatureEnhancedParts: enableFeatureEnhancedParts,
);
token = syntheticPreviousToken(token);
while (!token.next!.isEof) {
final Token start = token.next!;
final String? nextValue = start.next!.stringValue;
// If a built-in keyword is being used as function name, then stop.
if (identical(nextValue, '.') ||
identical(nextValue, '<') ||
identical(nextValue, '(')) {
break;
}
if (identical(token.next!.type, TokenType.SCRIPT_TAG)) {
directiveState.checkScriptTag(this, token.next!);
token = parseScript(token);
} else {
token = parseMetadataStar(token);
Token keyword = token.next!;
final String? value = keyword.stringValue;
if (identical(value, 'import')) {
directiveState.checkImport(this, keyword);
token = parseImport(keyword);
} else if (identical(value, 'export')) {
directiveState.checkExport(this, keyword);
token = parseExport(keyword);
} else if (identical(value, 'library')) {
directiveState.checkLibrary(this, keyword);
token = parseLibraryName(keyword);
} else if (identical(value, 'part')) {
token = parsePartOrPartOf(keyword, directiveState);
} else if (identical(value, ';')) {
token = start;
listener.handleDirectivesOnly();
} else {
listener.handleDirectivesOnly();
break;
}
}
listener.endTopLevelDeclaration(token);
}
token = token.next!;
listener.endCompilationUnit(count, token);
// Clear fields that could lead to memory leak.
cachedRewriter = null;
return token;
}
/// Parse a top-level declaration.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseTopLevelDeclaration(Token token) {
token = parseTopLevelDeclarationImpl(
syntheticPreviousToken(token), /* directiveState = */ null);
listener.endTopLevelDeclaration(token);
return token.next!;
}
/// ```
/// topLevelDefinition:
/// classDefinition |
/// enumType |
/// typeAlias |
/// 'external'? functionSignature ';' |
/// 'external'? getterSignature ';' |
/// 'external''? setterSignature ';' |
/// functionSignature functionBody |
/// returnType? 'get' identifier functionBody |
/// returnType? 'set' identifier formalParameterList functionBody |
/// ('final' | 'const') type? staticFinalDeclarationList ';' |
/// variableDeclaration ';'
/// ;
/// ```
Token parseTopLevelDeclarationImpl(
Token token, DirectiveContext? directiveState) {
token = parseMetadataStar(token);
Token next = token.next!;
if (next.isTopLevelKeyword) {
return parseTopLevelKeywordDeclaration(
/* beginToken = */ token.next!,
/* modifierStart = */ token,
/* keyword = */ next,
/* macroToken = */ null,
/* sealedToken = */ null,
/* baseToken = */ null,
/* interfaceToken = */ null,
directiveState);
}
final Token beginToken = token.next!;
Token modifierStart = token;
// Skip modifiers to find a top level keyword or identifier
if (next.isModifier) {
if (optional('var', next) ||
optional('late', next) ||
(optional('final', next) &&
(!optional('class', next.next!) &&
!optional('mixin', next.next!) &&
!optional('enum', next.next!))) ||
// Ignore using 'final' as a modifier for a class, a mixin, or an
// enum, but allow in other contexts.
(optional('const', next) && !optional('class', next.next!))) {
// Ignore `const class` so that it is reported below as an invalid
// modifier on a class.
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(token);
}
while (token.next!.isModifier) {
token = token.next!;
}
}
next = token.next!;
Token? macroToken;
Token? sealedToken;
Token? baseToken;
Token? interfaceToken;
if (next.isIdentifier &&
next.lexeme == 'macro' &&
optional('class', next.next!)) {
macroToken = next;
next = next.next!;
} else if (next.isIdentifier && optional('sealed', next)) {
sealedToken = next;
if (optional('class', next.next!) ||
optional('mixin', next.next!) ||
optional('enum', next.next!)) {
next = next.next!;
} else if (optional('abstract', next.next!) &&
optional('class', next.next!.next!)) {
// Defer error handling of sealed abstract to
// [parseClassOrNamedMixinApplication] after the abstract is parsed.
modifierStart = next;
next = next.next!.next!;
}
} else if (next.isIdentifier && optional('base', next)) {
baseToken = next;
if (optional('class', next.next!) ||
optional('mixin', next.next!) ||
optional('enum', next.next!)) {
next = next.next!;
}
} else if (next.isIdentifier && optional('interface', next)) {
interfaceToken = next;
if (optional('class', next.next!) ||
optional('mixin', next.next!) ||
optional('enum', next.next!)) {
next = next.next!;
}
// TODO(kallentu): Handle incorrect ordering of modifiers.
}
if (next.isTopLevelKeyword) {
return parseTopLevelKeywordDeclaration(
/* beginToken = */ beginToken,
/* modifierStart = */ modifierStart,
/* keyword = */ next,
/* macroToken = */ macroToken,
/* sealedToken = */ sealedToken,
/* baseToken = */ baseToken,
/* interfaceToken = */ interfaceToken,
directiveState);
} else if (next.isKeywordOrIdentifier) {
// TODO(danrubel): improve parseTopLevelMember
// so that we don't parse modifiers twice.
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(modifierStart);
} else if (modifierStart.next != next) {
directiveState?.checkDeclaration();
// Handle the edge case where a modifier is being used as an identifier
return parseTopLevelMemberImpl(modifierStart);
} else if (/* record type */ optional('(', next)) {
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(modifierStart);
}
// Recovery
if (next.isOperator && optional('(', next.next!)) {
// This appears to be a top level operator declaration, which is invalid.
reportRecoverableError(next, codes.messageTopLevelOperator);
// Insert a synthetic identifier
// and continue parsing as a top level function.
rewriter.insertSyntheticIdentifier(
next, '#synthetic_function_${next.charOffset}');
return parseTopLevelMemberImpl(next);
}
// Ignore any preceding modifiers and just report the unexpected token
listener.beginTopLevelMember(next);
return parseInvalidTopLevelDeclaration(token);
}
/// Parse any top-level declaration that begins with a keyword.
/// [beginToken] is the first token after any metadata that is parsed as
/// part of the declaration. [modifierStart] is the token before any modifiers
/// preceding [keyword]. [beginToken] may point to some out-of-order modifiers
/// before [modifierStart].
Token parseTopLevelKeywordDeclaration(
Token beginToken,
Token modifierStart,
Token keyword,
Token? macroToken,
Token? sealedToken,
Token? baseToken,
Token? interfaceToken,
DirectiveContext? directiveState) {
assert(keyword.isTopLevelKeyword);
final String? value = keyword.stringValue;
if (identical(value, 'class')) {
return _handleModifiersForClassDeclaration(
beginToken,
modifierStart,
keyword,
macroToken,
sealedToken,
baseToken,
interfaceToken,
/* mixinToken = */ null,
directiveState);
} else if (identical(value, 'enum')) {
directiveState?.checkDeclaration();
ModifierContext context = new ModifierContext(this);
context.parseEnumModifiers(modifierStart, keyword);
// Enums can't declare any explicit modifier.
if (baseToken != null) {
reportRecoverableError(baseToken, codes.messageBaseEnum);
}
if (context.finalToken != null) {
reportRecoverableError(context.finalToken!, codes.messageFinalEnum);
}
if (interfaceToken != null) {
reportRecoverableError(interfaceToken, codes.messageInterfaceEnum);
}
if (sealedToken != null) {
reportRecoverableError(sealedToken, codes.messageSealedEnum);
}
return parseEnum(beginToken, context.augmentToken, keyword);
} else {
// The remaining top level keywords are built-in keywords
// and can be used in a top level declaration
// as an identifier such as "abstract<T>() => 0;"
// or as a prefix such as "abstract.A b() => 0;".
// This also means that `typedef ({int? j}) => 0;` is a method, but with
// records something like `typedef ({int? j}) X();` is a typedef.
String? nextValue = keyword.next!.stringValue;
bool typedefWithRecord = false;
if (identical(value, 'typedef') && identical(nextValue, '(')) {
Token? endParen = keyword.next!.endGroup;
if (endParen != null &&
_isIdentifierOrQuestionIdentifier(endParen.next!)) {
// Looks like a typedef with a record.
TypeInfo typeInfo = computeType(keyword, /* required = */ false);
if (typeInfo is ComplexTypeInfo && typeInfo.isRecordType) {
typedefWithRecord = true;
}
}
}
if ((identical(nextValue, '(') || identical(nextValue, '.')) &&
!typedefWithRecord) {
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(modifierStart);
} else if (identical(nextValue, '<')) {
if (identical(value, 'extension')) {
// The name in an extension declaration is optional:
// `extension<T> on ...`
Token? endGroup = keyword.next!.endGroup;
if (endGroup != null && optional('on', endGroup.next!)) {
directiveState?.checkDeclaration();
ModifierContext context = new ModifierContext(this);
context.parseExtensionModifiers(modifierStart, keyword);
return parseExtension(beginToken, context.augmentToken, keyword);
}
}
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(modifierStart);
} else {
ModifierContext context = new ModifierContext(this);
if (identical(value, 'import')) {
context.parseTopLevelKeywordModifiers(modifierStart, keyword);
directiveState?.checkImport(this, keyword);
return parseImport(keyword);
} else if (identical(value, 'export')) {
context.parseTopLevelKeywordModifiers(modifierStart, keyword);
directiveState?.checkExport(this, keyword);
return parseExport(keyword);
} else if (identical(value, 'typedef')) {
context.parseTypedefModifiers(modifierStart, keyword);
directiveState?.checkDeclaration();
return parseTypedef(context.augmentToken, keyword);
} else if (identical(value, 'mixin')) {
if (identical(nextValue, 'class')) {
return _handleModifiersForClassDeclaration(
beginToken,
modifierStart,
keyword.next!,
macroToken,
sealedToken,
baseToken,
interfaceToken,
keyword,
directiveState);
}
context.parseMixinModifiers(modifierStart, keyword);
// Mixins can't have any modifier other than a base modifier.
if (context.finalToken != null) {
reportRecoverableError(
context.finalToken!, codes.messageFinalMixin);
}
if (interfaceToken != null) {
reportRecoverableError(interfaceToken, codes.messageInterfaceMixin);
}
if (sealedToken != null) {
reportRecoverableError(sealedToken, codes.messageSealedMixin);
}
directiveState?.checkDeclaration();
return parseMixin(
beginToken, context.augmentToken, baseToken, keyword);
} else if (identical(value, 'extension')) {
context.parseExtensionModifiers(modifierStart, keyword);
directiveState?.checkDeclaration();
return parseExtension(
modifierStart.next!, context.augmentToken, keyword);
} else if (identical(value, 'part')) {
context.parseTopLevelKeywordModifiers(modifierStart, keyword);
return parsePartOrPartOf(keyword, directiveState);
} else if (identical(value, 'library')) {
directiveState?.checkLibrary(this, keyword);
context.parseLibraryDirectiveModifiers(modifierStart, keyword);
if (context.augmentToken case final augmentKeyword?) {
return parseLibraryAugmentation(augmentKeyword, keyword);
} else {
return parseLibraryName(keyword);
}
}
}
}
throw "Internal error: Unhandled top level keyword '$value'.";
}
Token _handleModifiersForClassDeclaration(
Token beginToken,
Token modifierStart,
Token classKeyword,
Token? macroToken,
Token? sealedToken,
Token? baseToken,
Token? interfaceToken,
Token? mixinToken,
DirectiveContext? directiveState) {
directiveState?.checkDeclaration();
ModifierContext context = new ModifierContext(this);
if (mixinToken != null) {
context.parseClassModifiers(modifierStart, mixinToken);
// Mixin classes can't have any modifier other than a base modifier.
if (context.finalToken != null) {
reportRecoverableError(
context.finalToken!, codes.messageFinalMixinClass);
}
if (interfaceToken != null) {
reportRecoverableError(
interfaceToken, codes.messageInterfaceMixinClass);
}
if (sealedToken != null) {
reportRecoverableError(sealedToken, codes.messageSealedMixinClass);
}
} else {
context.parseClassModifiers(modifierStart, classKeyword);
}
return parseClassOrNamedMixinApplication(
beginToken,
context.abstractToken,
macroToken,
sealedToken,
baseToken,
interfaceToken,
context.finalToken,
context.augmentToken,
mixinToken,
classKeyword);
}
bool _isIdentifierOrQuestionIdentifier(Token token) {
if (token.isIdentifier) return true;
if (optional("?", token)) {
return token.next!.isIdentifier;
}
return false;
}
/// ```
/// libraryAugmentationDirective:
/// 'augment' 'library' uri ';'
/// ;
/// ```
Token parseLibraryAugmentation(Token augmentKeyword, Token libraryKeyword) {
assert(optional('augment', augmentKeyword));
assert(optional('library', libraryKeyword));
listener.beginUncategorizedTopLevelDeclaration(libraryKeyword);
listener.beginLibraryAugmentation(augmentKeyword, libraryKeyword);
Token start = libraryKeyword;
Token token = ensureLiteralString(start);
Token semicolon = ensureSemicolon(token);
listener.endLibraryAugmentation(augmentKeyword, libraryKeyword, semicolon);
return semicolon;
}
/// ```
/// libraryDirective:
/// 'library' qualified? ';'
/// ;
/// ```
Token parseLibraryName(Token libraryKeyword) {
assert(optional('library', libraryKeyword));
listener.beginUncategorizedTopLevelDeclaration(libraryKeyword);
listener.beginLibraryName(libraryKeyword);
Token token = libraryKeyword.next!;
bool hasName = !optional(';', token);
if (hasName) {
token = parseQualified(libraryKeyword, IdentifierContext.libraryName,
IdentifierContext.libraryNameContinuation);
token = ensureSemicolon(token);
} else {
token = ensureSemicolon(libraryKeyword);
}
listener.endLibraryName(libraryKeyword, token, hasName);
return token;
}
/// ```
/// importPrefix:
/// 'deferred'? 'as' identifier
/// ;
/// ```
Token parseImportPrefixOpt(Token token) {
Token next = token.next!;
if (optional('deferred', next) && optional('as', next.next!)) {
Token deferredToken = next;
Token asKeyword = next.next!;
token = ensureIdentifier(
asKeyword, IdentifierContext.importPrefixDeclaration);
listener.handleImportPrefix(deferredToken, asKeyword);
} else if (optional('as', next)) {
Token asKeyword = next;
token = ensureIdentifier(next, IdentifierContext.importPrefixDeclaration);
listener.handleImportPrefix(/* deferredKeyword = */ null, asKeyword);
} else {
listener.handleImportPrefix(
/* deferredKeyword = */ null,
/* asKeyword = */ null,
);
}
return token;
}
/// ```
/// importDirective:
/// 'import' uri ('if' '(' test ')' uri)* importPrefix? combinator* ';'
/// ;
/// ```
Token parseImport(Token importKeyword) {
assert(optional('import', importKeyword));
listener.beginUncategorizedTopLevelDeclaration(importKeyword);
listener.beginImport(importKeyword);
Token start = importKeyword;
Token? augmentToken;
if (start.next!.isIdentifier && start.next!.lexeme == 'augment') {
start = augmentToken = start.next!;
}
Token token = ensureLiteralString(start);
Token uri = token;
token = parseConditionalUriStar(token);
token = parseImportPrefixOpt(token);
token = parseCombinatorStar(token).next!;
if (optional(';', token)) {
listener.endImport(importKeyword, augmentToken, token);
return token;
} else {
// Recovery
listener.endImport(importKeyword, augmentToken, /* semicolon = */ null);
return parseImportRecovery(uri);
}
}
/// Recover given out-of-order clauses in an import directive where [token] is
/// the import keyword.
Token parseImportRecovery(Token token) {
final Listener primaryListener = listener;
final ImportRecoveryListener recoveryListener =
new ImportRecoveryListener();
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener
listener = recoveryListener;
token = parseConditionalUriStar(token);
token = parseImportPrefixOpt(token);
token = parseCombinatorStar(token);
Token? firstDeferredKeyword = recoveryListener.deferredKeyword;
bool hasPrefix = recoveryListener.asKeyword != null;
bool hasCombinator = recoveryListener.hasCombinator;
// Update the recovery listener to forward subsequent events
// to the primary listener
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses.
Token? semicolon;
do {
Token start = token.next!;
// Check for extraneous token in the middle of an import statement.
token = skipUnexpectedTokenOpt(
token, const <String>['if', 'deferred', 'as', 'hide', 'show', ';']);
// During recovery, clauses are parsed in the same order
// and generate the same events as in the parseImport method above.
recoveryListener.clear();
token = parseConditionalUriStar(token);
if (recoveryListener.ifKeyword != null) {
if (firstDeferredKeyword != null) {
// TODO(danrubel): report error indicating conditional should
// be moved before deferred keyword
} else if (hasPrefix) {
// TODO(danrubel): report error indicating conditional should
// be moved before prefix clause
} else if (hasCombinator) {
// TODO(danrubel): report error indicating conditional should
// be moved before combinators
}
}
if (optional('deferred', token.next!) &&
!optional('as', token.next!.next!)) {
listener.handleImportPrefix(token.next!, /* asKeyword = */ null);
token = token.next!;
} else {
token = parseImportPrefixOpt(token);
}
if (recoveryListener.deferredKeyword != null) {
if (firstDeferredKeyword != null) {
reportRecoverableError(recoveryListener.deferredKeyword!,
codes.messageDuplicateDeferred);
} else {
if (hasPrefix) {
reportRecoverableError(recoveryListener.deferredKeyword!,
codes.messageDeferredAfterPrefix);
}
firstDeferredKeyword = recoveryListener.deferredKeyword;
}
}
if (recoveryListener.asKeyword != null) {
if (hasPrefix) {
reportRecoverableError(
recoveryListener.asKeyword!, codes.messageDuplicatePrefix);
} else {
if (hasCombinator) {
reportRecoverableError(recoveryListener.asKeyword!,
codes.messagePrefixAfterCombinator);
}
hasPrefix = true;
}
}
token = parseCombinatorStar(token);
hasCombinator = hasCombinator || recoveryListener.hasCombinator;
if (optional(';', token.next!)) {
semicolon = token.next!;
} else if (identical(start, token.next!)) {
// If no forward progress was made, insert ';' so that we exit loop.
semicolon = ensureSemicolon(token);
}
listener.handleRecoverImport(semicolon);
} while (semicolon == null);
if (firstDeferredKeyword != null && !hasPrefix) {
reportRecoverableError(
firstDeferredKeyword, codes.messageMissingPrefixInDeferredImport);
}
return semicolon;
}
/// ```
/// conditionalUris:
/// conditionalUri*
/// ;
/// ```
Token parseConditionalUriStar(Token token) {
listener.beginConditionalUris(token.next!);
int count = 0;
while (optional('if', token.next!)) {
count++;
token = parseConditionalUri(token);
}
listener.endConditionalUris(count);
return token;
}
/// ```
/// conditionalUri:
/// 'if' '(' dottedName ('==' literalString)? ')' uri
/// ;
/// ```
Token parseConditionalUri(Token token) {
Token ifKeyword = token = token.next!;
assert(optional('if', token));
listener.beginConditionalUri(token);
Token leftParen = token.next!;
if (!optional('(', leftParen)) {
reportRecoverableError(
leftParen, codes.templateExpectedButGot.withArguments('('));
leftParen = rewriter.insertParens(token, /* includeIdentifier = */ true);
}
token = parseDottedName(leftParen);
Token next = token.next!;
Token? equalitySign;
if (optional('==', next)) {
equalitySign = next;
token = ensureLiteralString(next);
next = token.next!;
}
if (next != leftParen.endGroup) {
Token endGroup = leftParen.endGroup!;
if (endGroup.isSynthetic) {
// The scanner did not place the synthetic ')' correctly, so move it.
next = rewriter.moveSynthetic(token, endGroup);
} else {
reportRecoverableErrorWithToken(next, codes.templateUnexpectedToken);
next = endGroup;
}
}
token = next;
assert(optional(')', token));
token = ensureLiteralString(token);
listener.endConditionalUri(ifKeyword, leftParen, equalitySign);
return token;
}
/// ```
/// dottedName:
/// identifier ('.' identifier)*
/// ;
/// ```
Token parseDottedName(Token token) {
token = ensureIdentifier(token, IdentifierContext.dottedName);
Token firstIdentifier = token;
int count = 1;
while (optional('.', token.next!)) {
token = ensureIdentifier(
token.next!, IdentifierContext.dottedNameContinuation);
count++;
}
listener.handleDottedName(count, firstIdentifier);
return token;
}
/// ```
/// exportDirective:
/// 'export' uri conditional-uris* combinator* ';'
/// ;
/// ```
Token parseExport(Token exportKeyword) {
assert(optional('export', exportKeyword));
listener.beginUncategorizedTopLevelDeclaration(exportKeyword);
listener.beginExport(exportKeyword);
Token token = ensureLiteralString(exportKeyword);
token = parseConditionalUriStar(token);
token = parseCombinatorStar(token);
token = ensureSemicolon(token);
listener.endExport(exportKeyword, token);
return token;
}
/// ```
/// combinators:
/// (hideCombinator | showCombinator)*
/// ;
/// ```
Token parseCombinatorStar(Token token) {
Token next = token.next!;
listener.beginCombinators(next);
int count = 0;
while (true) {
String? value = next.stringValue;
if (identical('hide', value)) {
token = parseHide(token);
} else if (identical('show', value)) {
token = parseShow(token);
} else {
listener.endCombinators(count);
break;
}
next = token.next!;
count++;
}
return token;
}
/// ```
/// hideCombinator:
/// 'hide' identifierList
/// ;
/// ```
Token parseHide(Token token) {
Token hideKeyword = token.next!;
assert(optional('hide', hideKeyword));
listener.beginHide(hideKeyword);
token = parseIdentifierList(hideKeyword);
listener.endHide(hideKeyword);
return token;
}
/// ```
/// showCombinator:
/// 'show' identifierList
/// ;
/// ```
Token parseShow(Token token) {
Token showKeyword = token.next!;
assert(optional('show', showKeyword));
listener.beginShow(showKeyword);
token = parseIdentifierList(showKeyword);
listener.endShow(showKeyword);
return token;
}
/// ```
/// identifierList:
/// identifier (',' identifier)*
/// ;
/// ```
Token parseIdentifierList(Token token) {
token = ensureIdentifier(token, IdentifierContext.combinator);
int count = 1;
while (optional(',', token.next!)) {
token = ensureIdentifier(token.next!, IdentifierContext.combinator);
count++;
}
listener.handleIdentifierList(count);
return token;
}
/// ```
/// typeList:
/// type (',' type)*
/// ;
/// ```
Token parseTypeList(Token token) {
listener.beginTypeList(token.next!);
token =
computeType(token, /* required = */ true).ensureTypeOrVoid(token, this);
int count = 1;
while (optional(',', token.next!)) {
token = computeType(token.next!, /* required = */ true)
.ensureTypeOrVoid(token.next!, this);
count++;
}
listener.endTypeList(count);
return token;
}
Token parsePartOrPartOf(Token partKeyword, DirectiveContext? directiveState) {
assert(optional('part', partKeyword));
listener.beginUncategorizedTopLevelDeclaration(partKeyword);
if (optional('of', partKeyword.next!)) {
directiveState?.checkPartOf(this, partKeyword);
return parsePartOf(partKeyword);
} else {
directiveState?.checkPart(this, partKeyword);
return parsePart(partKeyword);
}
}
/// ```
/// partDirective:
/// 'part' uri ('if' '(' test ')' uri)* ';'
/// ;
/// ```
Token parsePart(Token partKeyword) {
assert(optional('part', partKeyword));
listener.beginPart(partKeyword);
Token token = ensureLiteralString(partKeyword);
token = parseConditionalUriStar(token);
token = ensureSemicolon(token);
listener.endPart(partKeyword, token);
return token;
}
/// ```
/// partOfDirective:
/// 'part' 'of' (qualified | uri) ';'
/// ;
/// ```
Token parsePartOf(Token partKeyword) {
Token ofKeyword = partKeyword.next!;
assert(optional('part', partKeyword));
assert(optional('of', ofKeyword));
listener.beginPartOf(partKeyword);
bool hasName = ofKeyword.next!.isIdentifier;
Token token;
if (hasName) {
token = parseQualified(ofKeyword, IdentifierContext.partName,
IdentifierContext.partNameContinuation);
} else {
token = ensureLiteralString(ofKeyword);
}
token = ensureSemicolon(token);
listener.endPartOf(partKeyword, ofKeyword, token, hasName);
return token;
}
/// ```
/// metadata:
/// annotation*
/// ;
/// ```
Token parseMetadataStar(Token token) {
listener.beginMetadataStar(token.next!);
int count = 0;
while (optional('@', token.next!)) {
token = parseMetadata(token);
count++;
}
listener.endMetadataStar(count);
return token;
}
/// ```
/// <metadata> ::= (‘@’ <metadatum>)*
/// <metadatum> ::= <identifier>
/// | <qualifiedName>
/// | <constructorDesignation> <arguments>
/// <qualifiedName> ::= <typeIdentifier> ‘.’ <identifier>
/// | <typeIdentifier> ‘.’ <typeIdentifier> ‘.’ <identifier>
/// <constructorDesignation> ::= <typeIdentifier>
/// | <qualifiedName>
/// | <typeName> <typeArguments> (‘.’ <identifier>)?
/// <typeName> ::= <typeIdentifier> (‘.’ <typeIdentifier>)?
/// ```
/// (where typeIdentifier is an identifier that's not on the list of
/// built in identifiers)
/// So these are legal:
/// * identifier
/// qualifiedName:
/// * typeIdentifier.identifier
/// * typeIdentifier.typeIdentifier.identifier
/// via constructorDesignation part 1
/// * typeIdentifier(arguments)
/// via constructorDesignation part 2
/// * typeIdentifier.identifier(arguments)
/// * typeIdentifier.typeIdentifier.identifier(arguments)
/// via constructorDesignation part 3
/// * typeIdentifier<typeArguments>(arguments)
/// * typeIdentifier<typeArguments>.identifier(arguments)
/// * typeIdentifier.typeIdentifier<typeArguments>(arguments)
/// * typeIdentifier.typeIdentifier<typeArguments>.identifier(arguments)
///
/// So in another way (ignoring the difference between typeIdentifier and
/// identifier):
/// * 1, 2 or 3 identifiers with or without arguments.
/// * 1 or 2 identifiers, then type arguments, then possibly followed by a
/// single identifier, and then (required!) arguments.
///
/// Note that if this is updated [skipMetadata] (in util.dart) should be
/// updated as well.
Token parseMetadata(Token token) {
Token atToken = token.next!;
assert(optional('@', atToken));
listener.beginMetadata(atToken);
token = ensureIdentifier(atToken, IdentifierContext.metadataReference);
token =
parseQualifiedRestOpt(token, IdentifierContext.metadataContinuation);
bool hasTypeArguments = optional("<", token.next!);
token = computeTypeParamOrArg(token).parseArguments(token, this);
Token? period = null;
if (optional('.', token.next!)) {
period = token.next!;
token = ensureIdentifier(
period, IdentifierContext.metadataContinuationAfterTypeArguments);
}
if (hasTypeArguments && !optional("(", token.next!)) {
reportRecoverableError(
token, codes.messageMetadataTypeArgumentsUninstantiated);
}
token = parseArgumentsOptMetadata(token, hasTypeArguments);
listener.endMetadata(atToken, period, token);
return token;
}
/// ```
/// scriptTag:
/// '#!' (ËœNEWLINE)* NEWLINE
/// ;
/// ```
Token parseScript(Token token) {
token = token.next!;
assert(identical(token.type, TokenType.SCRIPT_TAG));
listener.handleScript(token);
return token;
}
/// ```
/// typeAlias:
/// metadata 'typedef' typeAliasBody |
/// metadata 'typedef' identifier typeParameters? '=' functionType ';'
/// ;
///
/// functionType:
/// returnType? 'Function' typeParameters? parameterTypeList
///
/// typeAliasBody:
/// functionTypeAlias
/// ;
///
/// functionTypeAlias:
/// functionPrefix typeParameters? formalParameterList ‘;’
/// ;
///
/// functionPrefix:
/// returnType? identifier
/// ;
/// ```
Token parseTypedef(Token? augmentToken, Token typedefKeyword) {
assert(optional('typedef', typedefKeyword));
listener.beginUncategorizedTopLevelDeclaration(typedefKeyword);
listener.beginTypedef(typedefKeyword);
TypeInfo typeInfo = computeType(typedefKeyword, /* required = */ false);
Token token = typeInfo.skipType(typedefKeyword);
Token next = token.next!;
Token? equals;
TypeParamOrArgInfo typeParam =
computeTypeParamOrArg(next, /* inDeclaration = */ true);
if (typeInfo == noType && optional('=', typeParam.skip(next).next!)) {
// New style typedef, e.g. typedef foo = void Function();".
// Parse as recovered here to 'force' using it as an identifier as we've
// already established that the next token is the equal sign we're looking
// for.
token = ensureIdentifierPotentiallyRecovered(token,
IdentifierContext.typedefDeclaration, /* isRecovered = */ true);
token = typeParam.parseVariables(token, this);
next = token.next!;
// parseVariables rewrites so even though we checked in the if,
// we might not have an equal here now.
if (!optional('=', next) && optional('=', next.next!)) {
// Recovery after recovery: A token was inserted, but we'll skip it now
// to get more in line with what we thought in the if before.
next = next.next!;
}
if (optional('=', next)) {
equals = next;
TypeInfo type = computeType(equals, /* required = */ true);
if (!type.isFunctionType) {
// Recovery: In certain cases insert missing 'Function' and missing
// parens.
Token skippedType = type.skipType(equals);
if (optional('(', skippedType.next!) &&
skippedType.next!.endGroup != null &&
optional(';', skippedType.next!.endGroup!.next!)) {
// Turn "<return type>? '(' <whatever> ')';"
// into "<return type>? Function '(' <whatever> ')';".
// Assume the type is meant as the return type.
Token functionToken =
rewriter.insertSyntheticKeyword(skippedType, Keyword.FUNCTION);
reportRecoverableError(functionToken,
codes.templateExpectedButGot.withArguments('Function'));
type = computeType(equals, /* required = */ true);
} else if (type is NoType &&
optional('<', skippedType.next!) &&
skippedType.next!.endGroup != null) {
// Recover these two:
// "<whatever>;" => "Function<whatever>();"
// "<whatever>(<whatever>);" => "Function<whatever>(<whatever>);"
Token endGroup = skippedType.next!.endGroup!;
bool recover = false;
if (optional(';', endGroup.next!)) {
// Missing parenthesis. Insert them.
// Turn "<whatever>;" into "<whatever>();"
// Insert missing 'Function' below.
reportRecoverableError(endGroup,
missingParameterMessage(MemberKind.FunctionTypeAlias));
rewriter.insertParens(endGroup, /* includeIdentifier = */ false);
recover = true;
} else if (optional('(', endGroup.next!) &&
endGroup.next!.endGroup != null &&
optional(';', endGroup.next!.endGroup!.next!)) {
// "<whatever>(<whatever>);". Insert missing 'Function' below.
recover = true;
}
if (recover) {
// Assume the '<' indicates type arguments to the function.
// Insert 'Function' before them.
Token functionToken =
rewriter.insertSyntheticKeyword(equals, Keyword.FUNCTION);
reportRecoverableError(functionToken,
codes.templateExpectedButGot.withArguments('Function'));
type = computeType(equals, /* required = */ true);
}
} else {
// E.g. "typedef j = foo;" -- don't attempt any recovery.
}
}
token = type.ensureTypeOrVoid(equals, this);
} else {
// A rewrite caused the = to disappear
token = parseFormalParametersRequiredOpt(
next, MemberKind.FunctionTypeAlias);
}
} else {
// Old style typedef, e.g. "typedef void foo();".
token = typeInfo.parseType(typedefKeyword, this);
next = token.next!;
bool isIdentifierRecovered = false;
if (next.kind != IDENTIFIER_TOKEN &&
optional('(', typeParam.skip(next).next!)) {
// Recovery: Not a valid identifier, but is used as such.
isIdentifierRecovered = true;
}
token = ensureIdentifierPotentiallyRecovered(
token, IdentifierContext.typedefDeclaration, isIdentifierRecovered);
token = typeParam.parseVariables(token, this);
token =
parseFormalParametersRequiredOpt(token, MemberKind.FunctionTypeAlias);
}
token = ensureSemicolon(token);
listener.endTypedef(augmentToken, typedefKeyword, equals, token);
return token;
}
/// Parse a mixin application starting from `with`. Assumes that the first
/// type has already been parsed.
Token parseMixinApplicationRest(Token token) {
Token withKeyword = token.next!;
if (!optional('with', withKeyword)) {
// Recovery: Report an error and insert synthetic `with` clause.
reportRecoverableError(
withKeyword, codes.templateExpectedButGot.withArguments('with'));
withKeyword = rewriter.insertSyntheticKeyword(token, Keyword.WITH);
if (!isValidNonRecordTypeReference(withKeyword.next!)) {
rewriter.insertSyntheticIdentifier(withKeyword);
}
}
token = parseTypeList(withKeyword);
listener.handleNamedMixinApplicationWithClause(withKeyword);
return token;
}
Token parseClassWithClauseOpt(Token token) {
// <mixins> ::= with <typeNotVoidList>
Token withKeyword = token.next!;
if (optional('with', withKeyword)) {
token = parseTypeList(withKeyword);
listener.handleClassWithClause(withKeyword);
} else {
listener.handleClassNoWithClause();
}
return token;
}
Token parseEnumWithClauseOpt(Token token) {
// <mixins> ::= with <typeNotVoidList>
Token withKeyword = token.next!;
if (optional('with', withKeyword)) {
token = parseTypeList(withKeyword);
listener.handleEnumWithClause(withKeyword);
} else {
listener.handleEnumNoWithClause();
}
return token;
}
/// Parse the formal parameters of a getter (which shouldn't have parameters)
/// or function or method.
Token parseGetterOrFormalParameters(
Token token, Token name, bool isGetter, MemberKind kind) {
Token next = token.next!;
if (optional("(", next)) {
if (isGetter) {
reportRecoverableError(next, codes.messageGetterWithFormals);
}
token = parseFormalParameters(token, kind);
} else if (isGetter) {
listener.handleNoFormalParameters(next, kind);
} else {
// Recovery
if (optional('operator', name)) {
Token next = name.next!;
if (next.isOperator) {
name = next;
} else if (isUnaryMinus(next)) {
name = next.next!;
}
}
reportRecoverableError(name, missingParameterMessage(kind));
token = rewriter.insertParens(token, /* includeIdentifier = */ false);
token = parseFormalParametersRest(token, kind);
}
return token;
}
Token parseFormalParametersOpt(Token token, MemberKind kind) {
Token next = token.next!;
if (optional('(', next)) {
token = parseFormalParameters(token, kind);
} else {
listener.handleNoFormalParameters(next, kind);
}
return token;
}
Token skipFormalParameters(Token token, MemberKind kind) {
return skipFormalParametersRest(token.next!, kind);
}
Token skipFormalParametersRest(Token token, MemberKind kind) {
assert(optional('(', token));
// TODO(ahe): Shouldn't this be `beginFormalParameters`?
listener.beginOptionalFormalParameters(token);
Token closeBrace = token.endGroup!;
assert(optional(')', closeBrace));
listener.endFormalParameters(/* count = */ 0, token, closeBrace, kind);
return closeBrace;
}
/// Parse a record type similarly as a formal parameter list of a function.
///
/// recordType ::= '(' recordTypeFields ',' recordTypeNamedFields ')'
/// | '(' recordTypeFields ','? ')'
/// | '(' recordTypeNamedFields? ')'
///
/// recordTypeFields ::= recordTypeField ( ',' recordTypeField )*
/// recordTypeField ::= metadata type identifier?
///
/// recordTypeNamedFields ::= '{' recordTypeNamedField
/// ( ',' recordTypeNamedField )* ','? '}'
/// recordTypeNamedField ::= metadata type identifier
Token parseRecordType(
final Token start, Token token, bool isQuestionMarkPartOfType) {
token = token.next!;
assert(optional('(', token));
listener.beginRecordType(start);
Token begin = token;
/// parameterCount counting the presence of named fields as 1.
int parameterCount = 0;
bool hasNamedFields = false;
bool sawComma = false;
Token? illegalTrailingComma;
while (true) {
Token next = token.next!;
if (optional(')', next)) {
token = next;
break;
} else if (parameterCount == 0 &&
optional(',', next) &&
optional(')', next.next!)) {
illegalTrailingComma = next;
token = next.next!;
break;
}
++parameterCount;
String? value = next.stringValue;
if (identical(value, '{')) {
hasNamedFields = true;
token = parseRecordTypeNamedFields(token);
token = ensureCloseParen(token, begin);
break;
}
token = parseRecordTypeField(token, identifierIsOptional: true);
next = token.next!;
if (!optional(',', next)) {
Token next = token.next!;
if (optional(')', next)) {
token = next;
} else {
// Recovery.
// TODO: This is copied from parseFormalParametersRest.
// We could possibly either have more specific recovery here
// or have the recovery in a shared method.
if (begin.endGroup!.isSynthetic) {
// Scanner has already reported a missing `)` error,
// but placed the `)` in the wrong location, so move it.
token = rewriter.moveSynthetic(token, begin.endGroup!);
} else if (next.kind == IDENTIFIER_TOKEN &&
next.next!.kind == IDENTIFIER_TOKEN) {
// Looks like a missing comma
token = rewriteAndRecover(
token,
codes.templateExpectedButGot.withArguments(','),
new SyntheticToken(TokenType.COMMA, next.charOffset));
continue;
} else {
token = ensureCloseParen(token, begin);
}
}
break;
} else {
sawComma = true;
}
token = next;
}
assert(optional(')', token));
if (parameterCount == 0 && illegalTrailingComma != null) {
// Empty record type with a comma `(,)`.
reportRecoverableError(illegalTrailingComma,
codes.messageRecordTypeZeroFieldsButTrailingComma);
} else if (parameterCount == 1 && !hasNamedFields && !sawComma) {
// Single non-named element without trailing comma.
reportRecoverableError(
token, codes.messageRecordTypeOnePositionalFieldNoTrailingComma);
}
// Only consume the `?` if it is part of the type.
Token? questionMark = token.next!;
if (optional('?', questionMark) && isQuestionMarkPartOfType) {
token = questionMark;
} else {
questionMark = null;
}
listener.endRecordType(start, questionMark, parameterCount,
/* hasNamedFields = */ hasNamedFields);
return token;
}
Token parseRecordTypeField(Token token,
{required bool identifierIsOptional}) {
listener.beginRecordTypeEntry();
token = parseMetadataStar(token);
token = computeType(
token,
/* required = */ true,
).ensureTypeOrVoid(token, this);
if (token.next!.isIdentifier || !identifierIsOptional) {
token = ensureIdentifier(token, IdentifierContext.recordFieldDeclaration);
} else {
listener.handleNoName(token.next!);
}
listener.endRecordTypeEntry();
return token;
}
Token parseRecordTypeNamedFields(Token token) {
Token begin = token = token.next!;
assert(optional('{', token));
listener.beginRecordTypeNamedFields(begin);
int parameterCount = 0;
Token next;
while (true) {
next = token.next!;
if (optional('}', next)) {
// breaking with next pointing to '}'.
break;
}
token = parseRecordTypeField(token, identifierIsOptional: false);
next = token.next!;
++parameterCount;
if (!optional(',', next)) {
if (!optional('}', next)) {
// Recovery
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments('}'));
// Scanner guarantees a closing bracket.
next = begin.endGroup!;
}
// breaking with next pointing to '}'.
break;
}
token = next;
}
token = next;
assert(optional('}', token));
if (parameterCount == 0) {
reportRecoverableError(
token, codes.messageEmptyRecordTypeNamedFieldsList);
}
listener.endRecordTypeNamedFields(parameterCount, begin);
return token;
}
/// Parses the formal parameter list of a function.
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParametersRequiredOpt(Token token, MemberKind kind) {
Token next = token.next!;
if (!optional('(', next)) {
reportRecoverableError(next, missingParameterMessage(kind));
next = rewriter.insertParens(token, /* includeIdentifier = */ false);
}
return parseFormalParametersRest(next, kind);
}
/// Parses the formal parameter list of a function given that the left
/// parenthesis is known to exist.
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParameters(Token token, MemberKind kind) {
return parseFormalParametersRest(token.next!, kind);
}
/// Parses the formal parameter list of a function given that the left
/// parenthesis passed in as [token].
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParametersRest(Token token, MemberKind kind) {
Token begin = token;
assert(optional('(', token));
listener.beginFormalParameters(begin, kind);
int parameterCount = 0;
while (true) {
Token next = token.next!;
if (optional(')', next)) {
token = next;
break;
}
++parameterCount;
String? value = next.stringValue;
if (identical(value, '[')) {
token = parseOptionalPositionalParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
} else if (identical(value, '{')) {
token = parseOptionalNamedParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
} else if (identical(value, '[]')) {
// Recovery
token = rewriteSquareBrackets(token);
token = parseOptionalPositionalParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
}
token = parseFormalParameter(
token, FormalParameterKind.requiredPositional, kind);
next = token.next!;
if (!optional(',', next)) {
Token next = token.next!;
if (optional(')', next)) {
token = next;
} else {
// Recovery
if (begin.endGroup!.isSynthetic) {
// Scanner has already reported a missing `)` error,
// but placed the `)` in the wrong location, so move it.
token = rewriter.moveSynthetic(token, begin.endGroup!);
} else if (next.kind == IDENTIFIER_TOKEN &&
next.next!.kind == IDENTIFIER_TOKEN) {
// Looks like a missing comma
token = rewriteAndRecover(
token,
codes.templateExpectedButGot.withArguments(','),
new SyntheticToken(TokenType.COMMA, next.charOffset));
continue;
} else {
token = ensureCloseParen(token, begin);
}
}
break;
}
token = next;
}
assert(optional(')', token));
listener.endFormalParameters(parameterCount, begin, token, kind);
return token;
}
/// Return the message that should be produced when the formal parameters are
/// missing.
codes.Message missingParameterMessage(MemberKind kind) {
switch (kind) {
case MemberKind.FunctionTypeAlias:
return codes.messageMissingTypedefParameters;
case MemberKind.StaticMethod:
case MemberKind.NonStaticMethod:
return codes.messageMissingMethodParameters;
case MemberKind.TopLevelMethod:
case MemberKind.ExtensionNonStaticMethod:
case MemberKind.ExtensionStaticMethod:
case MemberKind.ExtensionTypeNonStaticMethod:
case MemberKind.ExtensionTypeStaticMethod:
case MemberKind.Catch:
case MemberKind.Factory:
case MemberKind.FunctionTypedParameter:
case MemberKind.GeneralizedFunctionType:
case MemberKind.Local:
case MemberKind.NonStaticField:
case MemberKind.StaticField:
case MemberKind.TopLevelField:
case MemberKind.PrimaryConstructor:
return codes.messageMissingFunctionParameters;
}
}
/// Check if [token] is the usage of 'required' in a formal parameter in a
/// context where it's not legal (i.e. in non-nnbd-mode).
bool _isUseOfRequiredInNonNNBD(Token token) {
if (token.next is StringToken && token.next!.value() == "required") {
// Possible recovery: Figure out if we're in a situation like
// required covariant? <type> name
// (in non-nnbd-mode) where the required modifier is not legal and thus
// would normally be parsed as the type.
token = token.next!;
Token next = token.next!;
// Skip modifiers.
while (next.isModifier) {
token = next;
next = next.next!;
}
// Parse the (potential) new type.
TypeInfo typeInfoAlternative = computeType(
token,
/* required = */ false,
/* inDeclaration = */ true,
);
token = typeInfoAlternative.skipType(token);
next = token.next!;
// We've essentially ignored the 'required' at this point.
// `token` is (in the good state) the last token of the type,
// `next` is (in the good state) the name;
// Are we in a 'good' state?
if (typeInfoAlternative != noType &&
next.isIdentifier &&
(optional(',', next.next!) || optional('}', next.next!))) {
return true;
}
}
return false;
}
/// ```
/// normalFormalParameter:
/// functionFormalParameter |
/// fieldFormalParameter |
/// simpleFormalParameter
/// ;
///
/// functionFormalParameter:
/// metadata 'covariant'? returnType? identifier formalParameterList
/// ;
///
/// simpleFormalParameter:
/// metadata 'covariant'? finalConstVarOrType? identifier |
/// ;
///
/// fieldFormalParameter:
/// metadata finalConstVarOrType? 'this' '.' identifier formalParameterList?
/// ;
/// ```
Token parseFormalParameter(
Token token, FormalParameterKind parameterKind, MemberKind memberKind) {
token = parseMetadataStar(token);
Token? skippedNonRequiredRequired;
if (_isUseOfRequiredInNonNNBD(token)) {
skippedNonRequiredRequired = token.next!;
reportRecoverableErrorWithToken(skippedNonRequiredRequired,
codes.templateUnexpectedModifierInNonNnbd);
token = token.next!;
}
Token next = token.next!;
Token start = next;
final bool inFunctionType =
memberKind == MemberKind.GeneralizedFunctionType;
Token? requiredToken;
Token? covariantToken;
Token? varFinalOrConst;
if (isModifier(next)) {
if (optional('required', next)) {
if (parameterKind == FormalParameterKind.optionalNamed) {
parameterKind = FormalParameterKind.requiredNamed;
requiredToken = token = next;
next = token.next!;
}
}
if (isModifier(next)) {
if (optional('covariant', next)) {
switch (memberKind) {
case MemberKind.StaticMethod:
case MemberKind.TopLevelMethod:
case MemberKind.ExtensionNonStaticMethod:
case MemberKind.ExtensionStaticMethod:
case MemberKind.ExtensionTypeNonStaticMethod:
case MemberKind.ExtensionTypeStaticMethod:
case MemberKind.PrimaryConstructor:
// Error cases reported in
// [ModifierContext.parseFormalParameterModifiers].
break;
case MemberKind.Catch:
case MemberKind.Factory:
case MemberKind.FunctionTypeAlias:
case MemberKind.FunctionTypedParameter:
case MemberKind.GeneralizedFunctionType:
case MemberKind.Local:
case MemberKind.NonStaticMethod:
case MemberKind.NonStaticField:
case MemberKind.StaticField:
case MemberKind.TopLevelField:
covariantToken = token = next;
next = token.next!;
}
}
if (isModifier(next)) {
if (!inFunctionType) {
if (optional('var', next)) {
varFinalOrConst = token = next;
next = token.next!;
} else if (optional('final', next)) {
varFinalOrConst = token = next;
next = token.next!;
}
}
if (isModifier(next)) {
// Recovery
ModifierContext context = new ModifierContext(this)
..covariantToken = covariantToken
..requiredToken = requiredToken
..varFinalOrConst = varFinalOrConst;
token = context.parseFormalParameterModifiers(
token, parameterKind, memberKind);
next = token.next!;
covariantToken = context.covariantToken;
requiredToken = context.requiredToken;
varFinalOrConst = context.varFinalOrConst;
}
}
}
}
if (requiredToken == null) {
// `required` was used as a modifier in non-nnbd mode. An error has been
// emitted. Still use it as a required token for the remainder in an
// attempt to avoid cascading errors (and for passing to the listener).
requiredToken = skippedNonRequiredRequired;
}
listener.beginFormalParameter(
start, memberKind, requiredToken, covariantToken, varFinalOrConst);
// Type is required in a generalized function type, but optional otherwise.
final Token beforeType = token;
TypeInfo typeInfo = computeType(
token,
inFunctionType,
/* inDeclaration = */ false,
/* acceptKeywordForSimpleType = */ true,
);
token = typeInfo.skipType(token);
next = token.next!;
if (typeInfo == noType &&
(optional('.', next) ||
(next.isIdentifier && optional('.', next.next!)))) {
// Recovery: Malformed type reference.
typeInfo = computeType(beforeType, /* required = */ true);
token = typeInfo.skipType(beforeType);
next = token.next!;
}
Token? thisKeyword;
Token? superKeyword;
Token? periodAfterThisOrSuper;
IdentifierContext nameContext =
IdentifierContext.formalParameterDeclaration;
if (!inFunctionType &&
(optional('this', next) || optional('super', next))) {
Token originalToken = token;
if (optional('this', next)) {
thisKeyword = token = next;
} else {
superKeyword = token = next;
}
next = token.next!;
if (!optional('.', next)) {
if (isOneOf(next, okNextValueInFormalParameter)) {
// Recover by not parsing as 'this' --- an error will be given
// later that it's not an allowed identifier.
token = originalToken;
next = token.next!;
thisKeyword = superKeyword = null;
} else {
// Recover from a missing period by inserting one.
next = rewriteAndRecover(
token,
codes.templateExpectedButGot.withArguments('.'),
new SyntheticToken(TokenType.PERIOD, next.charOffset));
// These 3 lines are duplicated here and below.
periodAfterThisOrSuper = token = next;
next = token.next!;
nameContext = IdentifierContext.fieldInitializer;
}
} else {
// These 3 lines are duplicated here and above.
periodAfterThisOrSuper = token = next;
next = token.next!;
nameContext = IdentifierContext.fieldInitializer;
}
}
if (next.isIdentifier) {
token = next;
next = token.next!;
}
Token? beforeInlineFunctionType;
TypeParamOrArgInfo typeParam = noTypeParamOrArg;
if (optional("<", next)) {
typeParam = computeTypeParamOrArg(token);
if (typeParam != noTypeParamOrArg) {
Token closer = typeParam.skip(token);
if (optional("(", closer.next!)) {
if (varFinalOrConst != null) {
reportRecoverableError(
varFinalOrConst, codes.messageFunctionTypedParameterVar);
}
beforeInlineFunctionType = token;
token = closer.next!.endGroup!;
next = token.next!;
}
}
} else if (optional("(", next)) {
if (varFinalOrConst != null) {
reportRecoverableError(
varFinalOrConst, codes.messageFunctionTypedParameterVar);
}
beforeInlineFunctionType = token;
token = next.endGroup!;
next = token.next!;
}
if (typeInfo != noType &&
varFinalOrConst != null &&
optional('var', varFinalOrConst)) {
reportRecoverableError(varFinalOrConst, codes.messageTypeAfterVar);
}
Token? endInlineFunctionType;
if (beforeInlineFunctionType != null) {
endInlineFunctionType =
typeParam.parseVariables(beforeInlineFunctionType, this);
listener
.beginFunctionTypedFormalParameter(beforeInlineFunctionType.next!);
token = typeInfo.parseType(beforeType, this);
endInlineFunctionType = parseFormalParametersRequiredOpt(
endInlineFunctionType, MemberKind.FunctionTypedParameter);
Token? question;
if (optional('?', endInlineFunctionType.next!)) {
question = endInlineFunctionType = endInlineFunctionType.next!;
}
listener.endFunctionTypedFormalParameter(
beforeInlineFunctionType, question);
// Generalized function types don't allow inline function types.
// The following isn't allowed:
// int Function(int bar(String x)).
if (inFunctionType) {
reportRecoverableError(beforeInlineFunctionType.next!,
codes.messageInvalidInlineFunctionType);
}
} else if (inFunctionType) {
token = typeInfo.ensureTypeOrVoid(beforeType, this);
} else {
token = typeInfo.parseType(beforeType, this);
}
Token nameToken;
if (periodAfterThisOrSuper != null) {
token = periodAfterThisOrSuper;
}
final bool isNamedParameter = parameterKind.isNamed;
next = token.next!;
if (inFunctionType &&
!isNamedParameter &&
!next.isKeywordOrIdentifier &&
beforeInlineFunctionType == null) {
nameToken = token.next!;
listener.handleNoName(nameToken);
} else {
nameToken = token = ensureIdentifier(token, nameContext);
if (isNamedParameter && nameToken.lexeme.startsWith("_")) {
reportRecoverableError(nameToken, codes.messagePrivateNamedParameter);
}
}
if (endInlineFunctionType != null) {
token = endInlineFunctionType;
}
next = token.next!;
String? value = next.stringValue;
Token? initializerStart, initializerEnd;
if ((identical('=', value)) || (identical(':', value))) {
Token equal = next;
initializerStart = equal.next!;
listener.beginFormalParameterDefaultValueExpression();
token = initializerEnd = parseExpression(equal);
next = token.next!;
listener.endFormalParameterDefaultValueExpression();
// TODO(danrubel): Consider removing the last parameter from the
// handleValuedFormalParameter event... it appears to be unused.
listener.handleValuedFormalParameter(equal, next, parameterKind);
if (parameterKind.isRequiredPositional) {
reportRecoverableError(
equal, codes.messageRequiredParameterWithDefault);
} else if (parameterKind.isOptionalPositional && identical(':', value)) {
reportRecoverableError(
equal, codes.messagePositionalParameterWithEquals);
} else if (inFunctionType ||
memberKind == MemberKind.FunctionTypeAlias ||
memberKind == MemberKind.FunctionTypedParameter) {
reportRecoverableError(equal, codes.messageFunctionTypeDefaultValue);
}
} else {
listener.handleFormalParameterWithoutValue(next);
}
listener.endFormalParameter(
thisKeyword,
superKeyword,
periodAfterThisOrSuper,
nameToken,
initializerStart,
initializerEnd,
parameterKind,
memberKind);
return token;
}
/// ```
/// defaultFormalParameter:
/// normalFormalParameter ('=' expression)?
/// ;
/// ```
Token parseOptionalPositionalParameters(Token token, MemberKind kind) {
Token begin = token = token.next!;
assert(optional('[', token));
listener.beginOptionalFormalParameters(begin);
int parameterCount = 0;
while (true) {
Token next = token.next!;
if (optional(']', next)) {
break;
}
token = parseFormalParameter(
token, FormalParameterKind.optionalPositional, kind);
next = token.next!;
++parameterCount;
if (!optional(',', next)) {
if (!optional(']', next)) {
// Recovery
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments(']'));
// Scanner guarantees a closing bracket.
next = begin.endGroup!;
while (token.next != next) {
token = token.next!;
}
}
break;
}
token = next;
}
if (parameterCount == 0) {
rewriteAndRecover(
token,
codes.messageEmptyOptionalParameterList,
new SyntheticStringToken(TokenType.IDENTIFIER, '',
token.next!.charOffset, /* _length = */ 0));
token = parseFormalParameter(
token, FormalParameterKind.optionalPositional, kind);
++parameterCount;
}
token = token.next!;
assert(optional(']', token));
listener.endOptionalFormalParameters(parameterCount, begin, token, kind);
return token;
}
/// ```
/// defaultNamedParameter:
/// normalFormalParameter ('=' expression)? |
/// normalFormalParameter (':' expression)?
/// ;
/// ```
Token parseOptionalNamedParameters(Token token, MemberKind kind) {
Token begin = token = token.next!;
assert(optional('{', token));
listener.beginOptionalFormalParameters(begin);
int parameterCount = 0;
while (true) {
Token next = token.next!;
if (optional('}', next)) {
break;
}
token =
parseFormalParameter(token, FormalParameterKind.optionalNamed, kind);
next = token.next!;
++parameterCount;
if (!optional(',', next)) {
if (!optional('}', next)) {
// Recovery
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments('}'));
// Scanner guarantees a closing bracket.
next = begin.endGroup!;
while (token.next != next) {
token = token.next!;
}
}
break;
}
token = next;
}
if (parameterCount == 0) {
rewriteAndRecover(
token,
codes.messageEmptyNamedParameterList,
new SyntheticStringToken(TokenType.IDENTIFIER, '',
token.next!.charOffset, /* _length = */ 0));
token =
parseFormalParameter(token, FormalParameterKind.optionalNamed, kind);
++parameterCount;
}
token = token.next!;
assert(optional('}', token));
listener.endOptionalFormalParameters(parameterCount, begin, token, kind);
return token;
}
/// ```
/// qualified:
/// identifier qualifiedRest*
/// ;
/// ```
Token parseQualified(Token token, IdentifierContext context,
IdentifierContext continuationContext) {
token = ensureIdentifier(token, context);
while (optional('.', token.next!)) {
token = parseQualifiedRest(token, continuationContext);
}
return token;
}
/// ```
/// qualifiedRestOpt:
/// qualifiedRest?
/// ;
/// ```
Token parseQualifiedRestOpt(
Token token, IdentifierContext continuationContext) {
if (optional('.', token.next!)) {
return parseQualifiedRest(token, continuationContext);
} else {
return token;
}
}
/// ```
/// qualifiedRest:
/// '.' identifier
/// ;
/// ```
Token parseQualifiedRest(Token token, IdentifierContext context) {
token = token.next!;
assert(optional('.', token));
_tryRewriteNewToIdentifier(token, context);
Token period = token;
token = ensureIdentifier(token, context);
listener.handleQualified(period);
return token;
}
Token skipBlock(Token token) {
// The scanner ensures that `{` always has a closing `}`.
return ensureBlock(token, /* missingBlockKind = */ null).endGroup!;
}
/// ```
/// enumType:
/// metadata 'enum' id typeParameters? mixins? interfaces? '{'
/// enumEntry (',' enumEntry)* (',')? (';'
/// (metadata classMemberDefinition)*
/// )?
/// '}'
///
/// enumEntry:
/// metadata id argumentPart?
/// | metadata id typeArguments? '.' id arguments
/// ```
Token parseEnum(Token beginToken, Token? augmentToken, Token enumKeyword) {
assert(optional('enum', enumKeyword));
listener.beginUncategorizedTopLevelDeclaration(enumKeyword);
Token token =
ensureIdentifier(enumKeyword, IdentifierContext.enumDeclaration);
String name = token.lexeme;
listener.beginEnum(enumKeyword);
token = parseEnumHeaderOpt(token, enumKeyword);
Token leftBrace = token.next!;
int elementCount = 0;
int memberCount = 0;
if (optional('{', leftBrace)) {
listener.handleEnumHeader(augmentToken, enumKeyword, leftBrace);
token = leftBrace;
while (true) {
Token next = token.next!;
if (optional('}', next) || optional(';', next)) {
token = next;
break;
}
token = parseEnumElement(token);
next = token.next!;
elementCount++;
if (optional(',', next)) {
token = next;
} else if (optional('}', next) || optional(';', next)) {
token = next;
break;
} else {
// Recovery
Token endGroup = leftBrace.endGroup!;
if (endGroup.isSynthetic) {
// The scanner did not place the synthetic '}' correctly.
token = rewriter.moveSynthetic(token, endGroup);
break;
} else if (next.isIdentifier) {
// If the next token is an identifier, assume a missing comma.
// TODO(danrubel): Consider improved recovery for missing `}`
// both here and when the scanner inserts a synthetic `}`
// for situations such as `enum Letter {a, b Letter e;`.
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments(','));
} else {
// Otherwise assume a missing `}` and exit the loop
reportRecoverableError(
next, codes.templateExpectedButGot.withArguments('}'));
token = leftBrace.endGroup!;
break;
}
}
}
listener.handleEnumElements(token, elementCount);
if (optional(';', token)) {
while (notEofOrValue('}', token.next!)) {
token = parseClassOrMixinOrExtensionOrEnumMemberImpl(
token, DeclarationKind.Enum, name);
++memberCount;
}
token = token.next!;
assert(token.isEof || optional('}', token));
}
} else {
leftBrace = ensureBlock(token, BlockKind.enumDeclaration);
listener.handleEnumHeader(augmentToken, enumKeyword, leftBrace);
listener.handleEnumElements(token, elementCount);
token = leftBrace.endGroup!;
}
assert(optional('}', token));
listener.endEnum(beginToken, enumKeyword, leftBrace, memberCount, token);
return token;
}
Token parseEnumHeaderOpt(Token token, Token enumKeyword) {
token = computeTypeParamOrArg(
token, /* inDeclaration = */ true, /* allowsVariance = */ true)
.parseVariables(token, this);
List<String> lookForNext = const ['{', 'with', 'implements'];
if (!isOneOf(token.next!, lookForNext)) {
// Recovery: Possible unexpected tokens before any clauses.
Token? skipToken = recoverySmallLookAheadSkipTokens(token, lookForNext);
if (skipToken != null) {
token = skipToken;
}
}
Token beforeWith = token;
token = parseEnumWithClauseOpt(token);
while (!isOneOf(token.next!, const ['{', 'implements'])) {
// Recovery: Skip unexpected tokens and more with clauses.
// Note that if we find a "with" we've seen one already (otherwise the
// parseEnumWithClauseOpt call above would have found this 'with').
Token? skipToken = recoveryEnumWith(token,
codes.templateMultipleClauses.withArguments("enum", "with")) ??
recoverySmallLookAheadSkipTokens(token, lookForNext);
if (skipToken != null) {
// Skipped tokens.
token = skipToken;
} else {
break;
}
}
token = parseClassOrMixinOrEnumImplementsOpt(token);
bool? hasWithClauses;
while (!optional('{', token.next!)) {
if (hasWithClauses == null) {
hasWithClauses = optional('with', beforeWith.next!);
}
// Recovery: Skip unexpected tokens and more with/implements clauses.
Token? skipToken = recoveryEnumWith(
token,
hasWithClauses
? codes.templateMultipleClauses.withArguments("enum", "with")
: codes.templateOutOfOrderClauses
.withArguments("with", "implements"));
if (skipToken != null) {
hasWithClauses = true;
}
if (skipToken == null) {
// Note that if we find a "implements" we've seen one already (otherwise
// the parseClassOrMixinOrEnumImplementsOpt call above would have found
// this 'implements').
skipToken = recoveryEnumImplements(token,
codes.templateMultipleClauses.withArguments("enum", "implements"));
}
if (skipToken == null) {
skipToken = recoverySmallLookAheadSkipTokens(token, lookForNext);
}
if (skipToken != null) {
// Skipped tokens.
token = skipToken;
} else {
break;
}
}
return token;
}
Token? recoveryEnumWith(Token token, codes.Message message) {
if (optional('with', token.next!)) {
reportRecoverableError(token.next!, message);
Listener originalListener = listener;
listener = new NullListener();
token = parseEnumWithClauseOpt(token);
listener = originalListener;
return token;
}
return null;
}
Token? recoveryEnumImplements(Token token, codes.Message message) {
if (optional('implements', token.next!)) {
reportRecoverableError(token.next!, message);
Listener originalListener = listener;
listener = new NullListener();
token = parseClassOrMixinOrEnumImplementsOpt(token);
listener = originalListener;
return token;
}
return null;
}
/// Allow a small lookahead (currently up to 3 tokens) trying to find any in
/// [lookFor].
///
/// If any wanted token is found an error is issued about unexpected tokens,
/// and the last skipped token is returned.
/// Otherwise null is returned.
Token? recoverySmallLookAheadSkipTokens(
final Token token, Iterable<String> lookFor) {
// Recovery: Allow a small lookahead for '{'. E.g. the user might be in
// the middle of writing 'with' or 'implements'.
Token skipToken = token.next!;
bool foundWanted = false;
if (looksLikeStartOfNextTopLevelDeclaration(skipToken)) return null;
int skipped = 0;
while (skipped < 3) {
skipped++;
if (isOneOf(skipToken.next!, lookFor)) {
foundWanted = true;
break;
}
skipToken = skipToken.next!;
if (looksLikeStartOfNextTopLevelDeclaration(skipToken)) return null;
}
if (foundWanted) {
// Give error and skip the tokens.
if (skipped == 1) {
reportRecoverableError(
skipToken, codes.templateUnexpectedToken.withArguments(skipToken));
} else {
reportRecoverableErrorWithEnd(
token.next!, skipToken, codes.messageUnexpectedTokens);
}
return skipToken;
}
return null;
}
Token parseEnumElement(Token token) {
Token beginToken = token;
token = parseMetadataStar(token);
Token? augmentToken;
if (optional('augment', token.next!)) {
augmentToken = token.next!;
token = token.next!;
}
token = ensureIdentifier(token, IdentifierContext.enumValueDeclaration);
bool hasTypeArgumentsOrDot = false;
{
// This is almost a verbatim copy of [parseConstructorReference] inserted
// to provide better recovery.
Token start = token;
listener.handleNoTypeNameInConstructorReference(token.next!);
listener.beginConstructorReference(start);
TypeParamOrArgInfo typeArg = computeTypeParamOrArg(token);
if (typeArg != noTypeParamOrArg) {
hasTypeArgumentsOrDot = true;
}
token = typeArg.parseArguments(token, this);
Token? period = null;
if (optional('.', token.next!)) {
hasTypeArgumentsOrDot = true;
period = token.next!;
token = ensureIdentifier(
period,
IdentifierContext
.constructorReferenceContinuationAfterTypeArguments);
} else {
listener
.handleNoConstructorReferenceContinuationAfterTypeArguments(token);
}
listener.endConstructorReference(
start, period, token, ConstructorReferenceContext.Const);
}
Token next = token.next!;
if (optional('(', next) || hasTypeArgumentsOrDot) {
token = parseConstructorInvocationArguments(token);
} else {
listener.handleNoArguments(token);
}
listener.handleEnumElement(beginToken, augmentToken);
return token;
}
Token parseClassOrNamedMixinApplication(
Token beginToken,
Token? abstractToken,
Token? macroToken,
Token? sealedToken,
Token? baseToken,
Token? interfaceToken,
Token? finalToken,
Token? augmentToken,
Token? mixinToken,
Token classKeyword) {
assert(optional('class', classKeyword));
listener.beginClassOrMixinOrNamedMixinApplicationPrelude(beginToken);
Token name = ensureIdentifier(
classKeyword, IdentifierContext.classOrMixinOrExtensionDeclaration);
Token token = computeTypeParamOrArg(
name, /* inDeclaration = */ true, /* allowsVariance = */ true)
.parseVariables(name, this);
if (abstractToken != null) {
if (sealedToken != null) {
reportRecoverableError(sealedToken, codes.messageAbstractSealedClass);
} else if (finalToken != null) {
if (baseToken != null) {
reportRecoverableErrorWithEnd(
finalToken, baseToken, codes.messageAbstractFinalBaseClass);
} else if (interfaceToken != null) {
reportRecoverableErrorWithEnd(finalToken, interfaceToken,
codes.messageAbstractFinalInterfaceClass);
}
}
}
if (optional('=', token.next!)) {
listener.beginNamedMixinApplication(
beginToken,
abstractToken,
macroToken,
sealedToken,
baseToken,
interfaceToken,
finalToken,
augmentToken,
mixinToken,
name);
return parseNamedMixinApplication(token, beginToken, classKeyword);
} else {
listener.beginClassDeclaration(
beginToken,
abstractToken,
macroToken,
sealedToken,
baseToken,
interfaceToken,
finalToken,
augmentToken,
mixinToken,
name);
return parseClass(token, beginToken, classKeyword, name.lexeme);
}
}
Token parseNamedMixinApplication(
Token token, Token begin, Token classKeyword) {
Token equals = token = token.next!;
assert(optional('=', equals));
token = computeType(token, /* required = */ true)
.ensureTypeNotVoid(token, this);
token = parseMixinApplicationRest(token);
Token? implementsKeyword = null;
if (optional('implements', token.next!)) {
implementsKeyword = token.next!;
token = parseTypeList(implementsKeyword);
}
token = ensureSemicolon(token);
listener.endNamedMixinApplication(
begin, classKeyword, equals, implementsKeyword, token);
return token;
}
/// Parse the portion of a class declaration (not a mixin application) that
/// follows the end of the type parameters.
///
/// ```
/// classDefinition:
/// metadata abstract? 'class' identifier typeParameters?
/// (superclass mixins?)? interfaces?
/// '{' (metadata classMemberDefinition)* '}' |
/// metadata abstract? 'class' mixinApplicationClass
/// ;
/// ```
Token parseClass(
Token token, Token beginToken, Token classKeyword, String className) {
Token start = token;
token = parseClassHeaderOpt(token, beginToken, classKeyword);
if (!optional('{', token.next!)) {
// Recovery
token = parseClassHeaderRecovery(start, beginToken, classKeyword);
ensureBlock(token, BlockKind.classDeclaration);
}
token = parseClassOrMixinOrExtensionBody(
token, DeclarationKind.Class, className);
listener.endClassDeclaration(beginToken, token);
return token;
}
Token parseClassHeaderOpt(Token token, Token begin, Token classKeyword) {
token = parseClassExtendsOpt(token, DeclarationHeaderKind.Class);
token = parseClassWithClauseOpt(token);
token = parseClassOrMixinOrEnumImplementsOpt(token);
Token? nativeToken;
if (optional('native', token.next!)) {
nativeToken = token.next!;
token = parseNativeClause(token);
}
listener.handleClassHeader(begin, classKeyword, nativeToken);
return token;
}
/// Recover given out-of-order clauses in a class header.
Token parseClassHeaderRecovery(Token token, Token begin, Token classKeyword) {
return parseDeclarationHeaderRecoveryInternal(
token, begin, classKeyword, DeclarationHeaderKind.Class);
}
/// Recover given out-of-order clauses in an extension type header.
Token parseExtensionTypeHeaderRecovery(Token token, Token extensionKeyword) {
return parseDeclarationHeaderRecoveryInternal(token, extensionKeyword,
extensionKeyword, DeclarationHeaderKind.ExtensionType);
}
/// Recover given out-of-order clauses in a class, enum, mixin, extension, or
/// extension type header.
Token parseDeclarationHeaderRecoveryInternal(Token token, Token begin,
Token declarationKeyword, DeclarationHeaderKind kind) {
final Listener primaryListener = listener;
final DeclarationHeaderRecoveryListener recoveryListener =
new DeclarationHeaderRecoveryListener();
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener.
listener = recoveryListener;
switch (kind) {
case DeclarationHeaderKind.Class:
token = parseClassHeaderOpt(token, begin, declarationKeyword);
case DeclarationHeaderKind.ExtensionType:
token = parseClassOrMixinOrEnumImplementsOpt(token);
}
bool hasExtends = recoveryListener.extendsKeyword != null;
bool hasImplements = recoveryListener.implementsKeyword != null;
bool hasWith = recoveryListener.withKeyword != null;
// Update the recovery listener to forward subsequent events
// to the primary listener.
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses
Token start;
do {
start = token;
// Check for extraneous token in the middle of a declaration header.
token = skipUnexpectedTokenOpt(
token, const <String>['extends', 'with', 'implements', '{']);
// During recovery, clauses are parsed in the same order
// and generate the same events as in the parseClassHeader method above.
recoveryListener.clear();
if (token.next!.isKeywordOrIdentifier &&
const ['extend', 'on'].contains(token.next!.lexeme)) {
reportRecoverableError(token.next!,
codes.templateExpectedInstead.withArguments('extends'));
token = parseClassExtendsSeenExtendsClause(token.next!, token, kind);
} else {
token = parseClassExtendsOpt(token, kind);
}
if (recoveryListener.extendsKeyword != null) {
switch (kind) {
case DeclarationHeaderKind.Class:
if (hasExtends) {
reportRecoverableError(recoveryListener.extendsKeyword!,
codes.messageMultipleExtends);
} else {
if (hasWith) {
reportRecoverableError(recoveryListener.extendsKeyword!,
codes.messageWithBeforeExtends);
} else if (hasImplements) {
reportRecoverableError(recoveryListener.extendsKeyword!,
codes.messageImplementsBeforeExtends);
}
hasExtends = true;
}
case DeclarationHeaderKind.ExtensionType:
reportRecoverableError(recoveryListener.extendsKeyword!,
codes.messageExtensionTypeExtends);
}
}
token = parseClassWithClauseOpt(token);
if (recoveryListener.withKeyword != null) {
switch (kind) {
case DeclarationHeaderKind.Class:
if (hasWith) {
reportRecoverableError(
recoveryListener.withKeyword!, codes.messageMultipleWith);
} else {
if (hasImplements) {
reportRecoverableError(recoveryListener.withKeyword!,
codes.messageImplementsBeforeWith);
}
hasWith = true;
}
case DeclarationHeaderKind.ExtensionType:
reportRecoverableError(
recoveryListener.withKeyword!, codes.messageExtensionTypeWith);
}
}
token = parseClassOrMixinOrEnumImplementsOpt(token);
if (recoveryListener.implementsKeyword != null) {
if (hasImplements) {
reportRecoverableError(recoveryListener.implementsKeyword!,
codes.messageMultipleImplements);
} else {
hasImplements = true;
}
}
listener.handleRecoverDeclarationHeader(kind);
// Exit if a declaration body is detected, or if no progress has been made
} while (!optional('{', token.next!) && start != token);
listener = primaryListener;
return token;
}
Token parseClassExtendsOpt(Token token, DeclarationHeaderKind kind) {
// extends <typeNotVoid>
Token next = token.next!;
if (optional('extends', next)) {
token = parseClassExtendsSeenExtendsClause(next, token, kind);
} else {
listener.handleNoType(token);
listener.handleClassExtends(
/* extendsKeyword = */ null,
/* typeCount = */ 1,
);
}
return token;
}
Token parseClassExtendsSeenExtendsClause(
Token extendsKeyword, Token token, DeclarationHeaderKind kind) {
Token next = extendsKeyword;
token =
computeType(next, /* required = */ true).ensureTypeNotVoid(next, this);
int count = 1;
// Error recovery: extends <typeNotVoid>, <typeNotVoid> [...]
if (optional(',', token.next!)) {
switch (kind) {
case DeclarationHeaderKind.Class:
reportRecoverableError(token.next!, codes.messageMultipleExtends);
break;
case DeclarationHeaderKind.ExtensionType:
// This is an error case. The error is reported elsewhere.
break;
}
while (optional(',', token.next!)) {
next = token.next!;
token = computeType(next, /* required = */ true)
.ensureTypeNotVoid(next, this);
count++;
}
}
listener.handleClassExtends(extendsKeyword, count);
return token;
}
/// ```
/// implementsClause:
/// 'implements' typeName (',' typeName)*
/// ;
/// ```
Token parseClassOrMixinOrEnumImplementsOpt(Token token) {
Token? implementsKeyword;
int interfacesCount = 0;
if (optional('implements', token.next!)) {
implementsKeyword = token.next!;
do {
token = computeType(token.next!, /* required = */ true)
.ensureTypeNotVoid(token.next!, this);
++interfacesCount;
} while (optional(',', token.next!));
}
listener.handleImplements(implementsKeyword, interfacesCount);
return token;
}
/// Parse a mixin declaration.
///
/// ```
/// mixinDeclaration:
/// metadata? 'augment'? 'base'? 'mixin' [SimpleIdentifier]
/// [TypeParameterList]? [OnClause]? [ImplementsClause]?
/// '{' [ClassMember]* '}'
/// ;
/// ```
Token parseMixin(Token beginToken, Token? augmentToken, Token? baseToken,
Token mixinKeyword) {
assert(optional('mixin', mixinKeyword));
listener.beginClassOrMixinOrNamedMixinApplicationPrelude(mixinKeyword);
Token name = ensureIdentifier(
mixinKeyword, IdentifierContext.classOrMixinOrExtensionDeclaration);
Token headerStart = computeTypeParamOrArg(
name, /* inDeclaration = */ true, /* allowsVariance = */ true)
.parseVariables(name, this);
listener.beginMixinDeclaration(
beginToken, augmentToken, baseToken, mixinKeyword, name);
Token token = parseMixinHeaderOpt(headerStart, mixinKeyword);
if (!optional('{', token.next!)) {
// Recovery
token = parseMixinHeaderRecovery(token, mixinKeyword, headerStart);
ensureBlock(token, BlockKind.mixinDeclaration);
}
token = parseClassOrMixinOrExtensionBody(
token, DeclarationKind.Mixin, name.lexeme);
listener.endMixinDeclaration(beginToken, token);
return token;
}
Token parseMixinHeaderOpt(Token token, Token mixinKeyword) {
token = parseMixinOnOpt(token);
token = parseClassOrMixinOrEnumImplementsOpt(token);
listener.handleMixinHeader(mixinKeyword);
return token;
}
Token parseMixinHeaderRecovery(
Token token, Token mixinKeyword, Token headerStart) {
final Listener primaryListener = listener;
final MixinHeaderRecoveryListener recoveryListener =
new MixinHeaderRecoveryListener();
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener.
listener = recoveryListener;
token = parseMixinHeaderOpt(headerStart, mixinKeyword);
bool hasOn = recoveryListener.onKeyword != null;
bool hasImplements = recoveryListener.implementsKeyword != null;
// Update the recovery listener to forward subsequent events
// to the primary listener.
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses
Token start;
do {
start = token;
// Check for extraneous token in the middle of a class header.
token = skipUnexpectedTokenOpt(
token, const <String>['on', 'implements', '{']);
// During recovery, clauses are parsed in the same order and
// generate the same events as in the parseMixinHeaderOpt method above.
recoveryListener.clear();
if (token.next!.isKeywordOrIdentifier &&
const ['extend', 'extends'].contains(token.next!.lexeme)) {
reportRecoverableError(
token.next!, codes.templateExpectedInstead.withArguments('on'));
token = parseMixinOn(token);
} else {
token = parseMixinOnOpt(token);
}
if (recoveryListener.onKeyword != null) {
if (hasOn) {
reportRecoverableError(
recoveryListener.onKeyword!, codes.messageMultipleOnClauses);
} else {
if (hasImplements) {
reportRecoverableError(
recoveryListener.onKeyword!, codes.messageImplementsBeforeOn);
}
hasOn = true;
}
}
token = parseClassOrMixinOrEnumImplementsOpt(token);
if (recoveryListener.implementsKeyword != null) {
if (hasImplements) {
reportRecoverableError(recoveryListener.implementsKeyword!,
codes.messageMultipleImplements);
} else {
hasImplements = true;
}
}
if (optional("with", token.next!)) {
Token withKeyword = token.next!;
reportRecoverableError(token.next!, codes.messageMixinWithClause);
token = parseTypeList(withKeyword);
listener.handleMixinWithClause(withKeyword);