blob: f70d40823b13de7154f6c41f98e7fe6b35fefcfe [file] [log] [blame]
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library fasta.parser.parser;
import '../fasta_codes.dart' show Message, Template;
import '../fasta_codes.dart' as fasta;
import '../scanner.dart' show ErrorToken, Token;
import '../scanner/recover.dart' show skipToEof;
import '../../scanner/token.dart'
show
ASSIGNMENT_PRECEDENCE,
BeginToken,
CASCADE_PRECEDENCE,
EQUALITY_PRECEDENCE,
Keyword,
POSTFIX_PRECEDENCE,
RELATIONAL_PRECEDENCE,
SELECTOR_PRECEDENCE,
SyntheticBeginToken,
SyntheticKeywordToken,
SyntheticStringToken,
SyntheticToken,
TokenType;
import '../scanner/token_constants.dart'
show
COMMA_TOKEN,
DOUBLE_TOKEN,
EOF_TOKEN,
EQ_TOKEN,
FUNCTION_TOKEN,
HASH_TOKEN,
HEXADECIMAL_TOKEN,
IDENTIFIER_TOKEN,
INT_TOKEN,
KEYWORD_TOKEN,
LT_TOKEN,
OPEN_CURLY_BRACKET_TOKEN,
OPEN_PAREN_TOKEN,
OPEN_SQUARE_BRACKET_TOKEN,
SEMICOLON_TOKEN,
STRING_INTERPOLATION_IDENTIFIER_TOKEN,
STRING_INTERPOLATION_TOKEN,
STRING_TOKEN;
import '../util/link.dart' show Link;
import 'assert.dart' show Assert;
import 'async_modifier.dart' show AsyncModifier;
import 'directive_context.dart';
import 'formal_parameter_kind.dart'
show
FormalParameterKind,
isMandatoryFormalParameterKind,
isOptionalPositionalFormalParameterKind;
import 'forwarding_listener.dart' show ForwardingListener;
import 'identifier_context.dart' show IdentifierContext;
import 'listener.dart' show Listener;
import 'loop_state.dart' show LoopState;
import 'member_kind.dart' show MemberKind;
import 'modifier_context.dart' show ModifierRecoveryContext, isModifier;
import 'recovery_listeners.dart'
show ClassHeaderRecoveryListener, ImportRecoveryListener;
import 'token_stream_rewriter.dart' show TokenStreamRewriter;
import 'type_continuation.dart' show TypeContinuation;
import 'type_info.dart'
show
TypeInfo,
TypeParamOrArgInfo,
computeMethodTypeArguments,
computeType,
computeTypeParamOrArg,
isGeneralizedFunctionType,
isValidTypeReference,
noType,
noTypeParamOrArg;
import 'util.dart' show findNonSyntheticToken, isOneOf, optional;
/// An event generating parser of Dart programs. This parser expects all tokens
/// in a linked list (aka a token stream).
///
/// The class [Scanner] is used to generate a token stream. See the file
/// [scanner.dart](../scanner.dart).
///
/// Subclasses of the class [Listener] are used to listen to events.
///
/// Most methods of this class belong in one of four major categories: parse
/// methods, peek methods, ensure methods, and skip methods.
///
/// Parse methods all have the prefix `parse`, generate events
/// (by calling methods on [listener]), and return the next token to parse.
/// Some exceptions to this last point are methods such as [parseFunctionBody]
/// and [parseClassBody] which return the last token parsed
/// rather than the next token to be parsed.
/// Parse methods are generally named `parseGrammarProductionSuffix`.
/// The suffix can be one of `opt`, or `star`.
/// `opt` means zero or one matches, `star` means zero or more matches.
/// For example, [parseMetadataStar] corresponds to this grammar snippet:
/// `metadata*`, and [parseArgumentsOpt] corresponds to: `arguments?`.
///
/// Peek methods all have the prefix `peek`, do not generate events
/// (except for errors) and may return null.
///
/// Ensure methods all have the prefix `ensure` and may generate events.
/// They return the current token, or insert and return a synthetic token
/// if the current token does not match. For example,
/// [ensureSemicolon] returns the current token if the current token is a
/// semicolon, otherwise inserts a synthetic semicolon in the token stream
/// before the current token and then returns that new synthetic token.
///
/// Skip methods are like parse methods, but all have the prefix `skip`
/// and skip over some parts of the file being parsed.
/// Typically, skip methods generate an event for the structure being skipped,
/// but not for its substructures.
///
/// ## Current Token
///
/// The current token is always to be found in a formal parameter named
/// `token`. This parameter should be the first as this increases the chance
/// that a compiler will place it in a register.
///
/// ## Implementation Notes
///
/// The parser assumes that keywords, built-in identifiers, and other special
/// words (pseudo-keywords) are all canonicalized. To extend the parser to
/// recognize a new identifier, one should modify
/// [keyword.dart](../scanner/keyword.dart) and ensure the identifier is added
/// to the keyword table.
///
/// As a consequence of this, one should not use `==` to compare strings in the
/// parser. One should favor the methods [optional] and [expect] to recognize
/// keywords or identifiers. In some cases, it's possible to compare a token's
/// `stringValue` using [identical], but normally [optional] will suffice.
///
/// Historically, we over-used identical, and when identical is used on objects
/// other than strings, it can often be replaced by `==`.
///
/// ## Flexibility, Extensibility, and Specification
///
/// The parser is designed to be flexible and extensible. Its methods are
/// designed to be overridden in subclasses, so it can be extended to handle
/// unspecified language extension or experiments while everything in this file
/// attempts to follow the specification (unless when it interferes with error
/// recovery).
///
/// We achieve flexibility, extensible, and specification compliance by
/// following a few rules-of-thumb:
///
/// 1. All methods in the parser should be public.
///
/// 2. The methods follow the specified grammar, and do not implement custom
/// extensions, for example, `native`.
///
/// 3. The parser doesn't rewrite the token stream (when dealing with `>>`).
///
/// ### Implementing Extensions
///
/// For various reasons, some Dart language implementations have used
/// custom/unspecified extensions to the Dart grammar. Examples of this
/// includes diet parsing, patch files, `native` keyword, and generic
/// comments. This class isn't supposed to implement any of these
/// features. Instead it provides hooks for those extensions to be implemented
/// in subclasses or listeners. Let's examine how diet parsing and `native`
/// keyword is currently supported by Fasta.
///
/// #### Legacy Implementation of `native` Keyword
///
/// TODO(ahe,danrubel): Remove this section.
///
/// Both dart2js and the Dart VM have used the `native` keyword to mark methods
/// that couldn't be implemented in the Dart language and needed to be
/// implemented in JavaScript or C++, respectively. An example of the syntax
/// extension used by the Dart VM is:
///
/// nativeFunction() native "NativeFunction";
///
/// When attempting to parse this function, the parser eventually calls
/// [parseFunctionBody]. This method will report an unrecoverable error to the
/// listener with the code [fasta.messageExpectedFunctionBody]. The listener can
/// then look at the error code and the token and use the methods in
/// [native_support.dart](native_support.dart) to parse the native syntax.
///
/// #### Implementation of Diet Parsing
///
/// We call it _diet_ _parsing_ when the parser skips parts of a file. Both
/// dart2js and the Dart VM have been relying on this from early on as it allows
/// them to more quickly compile small programs that use small parts of big
/// libraries. It's also become an integrated part of how Fasta builds up
/// outlines before starting to parse method bodies.
///
/// When looking through this parser, you'll find a number of unused methods
/// starting with `skip`. These methods are only used by subclasses, such as
/// [ClassMemberParser](class_member_parser.dart) and
/// [TopLevelParser](top_level_parser.dart). These methods violate the
/// principle above about following the specified grammar, and originally lived
/// in subclasses. However, we realized that these methods were so widely used
/// and hard to maintain in subclasses, that it made sense to move them here.
///
/// ### Specification and Error Recovery
///
/// To improve error recovery, the parser will inform the listener of
/// recoverable errors and continue to parse. An example of a recoverable
/// error is:
///
/// Error: Asynchronous for-loop can only be used in 'async' or 'async*'...
/// main() { await for (var x in []) {} }
/// ^^^^^
///
/// ### Legacy Error Recovery
///
/// What's described below will be phased out in preference of the parser
/// reporting and recovering from syntax errors. The motivation for this is
/// that we have multiple listeners that use the parser, and this will ensure
/// consistency.
///
/// For unrecoverable errors, the parser will ask the listener for help to
/// recover from the error. We haven't made much progress on these kinds of
/// errors, so in most cases, the parser aborts by skipping to the end of file.
///
/// Historically, this parser has been rather lax in what it allows, and
/// deferred the enforcement of some syntactical rules to subsequent phases. It
/// doesn't matter how we got there, only that we've identified that it's
/// easier if the parser reports as many errors it can, but informs the
/// listener if the error is recoverable or not.
///
/// Currently, the parser is particularly lax when it comes to the order of
/// modifiers such as `abstract`, `final`, `static`, etc. Historically, dart2js
/// would handle such errors in later phases. We hope that these cases will go
/// away as Fasta matures.
class Parser {
Listener listener;
Uri get uri => listener.uri;
bool mayParseFunctionExpressions = true;
/// Represents parser state: what asynchronous syntax is allowed in the
/// function being currently parsed. In rare situations, this can be set by
/// external clients, for example, to parse an expression outside a function.
AsyncModifier asyncState = AsyncModifier.Sync;
// TODO(danrubel): The [loopState] and associated functionality in the
// [Parser] duplicates work that the resolver needs to do when resolving
// break/continue targets. Long term, this state and functionality will be
// removed from the [Parser] class and the resolver will be responsible
// for generating all break/continue error messages.
/// Represents parser state: whether parsing outside a loop,
/// inside a loop, or inside a switch. This is used to determine whether
/// break and continue statements are allowed.
LoopState loopState = LoopState.OutsideLoop;
/// A rewriter for inserting synthetic tokens.
/// Access using [rewriter] for lazy initialization.
TokenStreamRewriter cachedRewriter;
TokenStreamRewriter get rewriter {
cachedRewriter ??= new TokenStreamRewriter();
return cachedRewriter;
}
Parser(this.listener);
bool get inGenerator {
return asyncState == AsyncModifier.AsyncStar ||
asyncState == AsyncModifier.SyncStar;
}
bool get inAsync {
return asyncState == AsyncModifier.Async ||
asyncState == AsyncModifier.AsyncStar;
}
bool get inPlainSync => asyncState == AsyncModifier.Sync;
bool get isBreakAllowed => loopState != LoopState.OutsideLoop;
bool get isContinueAllowed => loopState == LoopState.InsideLoop;
bool get isContinueWithLabelAllowed => loopState != LoopState.OutsideLoop;
/// Parse a compilation unit.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
///
/// ```
/// libraryDefinition:
/// scriptTag?
/// libraryName?
/// importOrExport*
/// partDirective*
/// topLevelDefinition*
/// ;
///
/// partDeclaration:
/// partHeader topLevelDefinition*
/// ;
/// ```
Token parseUnit(Token token) {
listener.beginCompilationUnit(token);
int count = 0;
DirectiveContext directiveState = new DirectiveContext();
token = syntheticPreviousToken(token);
while (!token.next.isEof) {
final Token start = token.next;
token = parseTopLevelDeclarationImpl(token, directiveState);
listener.endTopLevelDeclaration(token.next);
count++;
if (start == token.next) {
// Recovery:
// If progress has not been made reaching the end of the token stream,
// then report an error and skip the current token.
token = token.next;
listener.beginMetadataStar(token);
listener.endMetadataStar(0);
reportRecoverableErrorWithToken(
token, fasta.templateExpectedDeclaration);
listener.handleInvalidTopLevelDeclaration(token);
listener.endTopLevelDeclaration(token.next);
count++;
}
}
token = token.next;
listener.endCompilationUnit(count, token);
// Clear fields that could lead to memory leak.
cachedRewriter = null;
return token;
}
/// This method exists for analyzer compatibility only
/// and will be removed once analyzer/fasta integration is complete.
///
/// Similar to [parseUnit], this method parses a compilation unit,
/// but stops when it reaches the first declaration or EOF.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseDirectives(Token token) {
listener.beginCompilationUnit(token);
int count = 0;
DirectiveContext directiveState = new DirectiveContext();
token = syntheticPreviousToken(token);
while (!token.next.isEof) {
final Token start = token.next;
final String value = start.stringValue;
final String nextValue = start.next.stringValue;
// If a built-in keyword is being used as function name, then stop.
if (identical(nextValue, '.') ||
identical(nextValue, '<') ||
identical(nextValue, '(')) {
break;
}
if (identical(token.next.type, TokenType.SCRIPT_TAG)) {
directiveState?.checkScriptTag(this, token.next);
token = parseScript(token);
} else {
token = parseMetadataStar(token);
if (identical(value, 'import')) {
directiveState?.checkImport(this, token);
token = parseImport(token);
} else if (identical(value, 'export')) {
directiveState?.checkExport(this, token);
token = parseExport(token);
} else if (identical(value, 'library')) {
directiveState?.checkLibrary(this, token);
token = parseLibraryName(token);
} else if (identical(value, 'part')) {
token = parsePartOrPartOf(token, directiveState);
} else if (identical(value, ';')) {
token = start;
} else {
listener.handleDirectivesOnly();
break;
}
}
listener.endTopLevelDeclaration(token.next);
}
token = token.next;
listener.endCompilationUnit(count, token);
// Clear fields that could lead to memory leak.
cachedRewriter = null;
return token;
}
/// Parse a top-level declaration.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseTopLevelDeclaration(Token token) {
token =
parseTopLevelDeclarationImpl(syntheticPreviousToken(token), null).next;
listener.endTopLevelDeclaration(token);
return token;
}
/// ```
/// topLevelDefinition:
/// classDefinition |
/// enumType |
/// typeAlias |
/// 'external'? functionSignature ';' |
/// 'external'? getterSignature ';' |
/// 'external''? setterSignature ';' |
/// functionSignature functionBody |
/// returnType? 'get' identifier functionBody |
/// returnType? 'set' identifier formalParameterList functionBody |
/// ('final' | 'const') type? staticFinalDeclarationList ';' |
/// variableDeclaration ';'
/// ;
/// ```
Token parseTopLevelDeclarationImpl(
Token token, DirectiveContext directiveState) {
if (identical(token.next.type, TokenType.SCRIPT_TAG)) {
directiveState?.checkScriptTag(this, token.next);
return parseScript(token);
}
token = parseMetadataStar(token);
Token next = token.next;
if (next.isTopLevelKeyword) {
return parseTopLevelKeywordDeclaration(token, null, directiveState);
}
Token start = token;
// Skip modifiers to find a top level keyword or identifier
if (next.isModifier) {
if (optional('var', next) ||
((optional('const', next) || optional('final', next)) &&
// Ignore `const class` and `final class` so that it is reported
// below as an invalid modifier on a class.
!optional('class', next.next))) {
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(token);
}
while (token.next.isModifier) {
token = token.next;
}
}
next = token.next;
if (next.isTopLevelKeyword) {
Token beforeAbstractToken;
Token beforeModifier = start;
Token modifier = start.next;
while (modifier != next) {
if (optional('abstract', modifier) &&
optional('class', next) &&
beforeAbstractToken == null) {
beforeAbstractToken = beforeModifier;
} else {
// Recovery
reportTopLevelModifierError(modifier, next);
}
beforeModifier = modifier;
modifier = modifier.next;
}
return parseTopLevelKeywordDeclaration(
token, beforeAbstractToken, directiveState);
} else if (next.isKeywordOrIdentifier) {
// TODO(danrubel): improve parseTopLevelMember
// so that we don't parse modifiers twice.
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(start);
} else if (start.next != next) {
directiveState?.checkDeclaration();
// Handle the edge case where a modifier is being used as an identifier
return parseTopLevelMemberImpl(start);
}
// Recovery
if (next.isOperator && optional('(', next.next)) {
// This appears to be a top level operator declaration, which is invalid.
reportRecoverableError(next, fasta.messageTopLevelOperator);
// Insert a synthetic identifier
// and continue parsing as a top level function.
rewriter.insertTokenAfter(
next,
new SyntheticStringToken(
TokenType.IDENTIFIER,
'#synthetic_function_${next.charOffset}',
next.next.charOffset,
0));
return parseTopLevelMemberImpl(next);
}
// Ignore any preceding modifiers and just report the unexpected token
listener.beginTopLevelMember(next);
return parseInvalidTopLevelDeclaration(token);
}
// Report an error for the given modifier preceding a top level keyword
// such as `import` or `class`.
void reportTopLevelModifierError(Token modifier, Token afterModifiers) {
if (optional('const', modifier) && optional('class', afterModifiers)) {
reportRecoverableError(modifier, fasta.messageConstClass);
} else if (optional('external', modifier)) {
if (optional('class', afterModifiers)) {
reportRecoverableError(modifier, fasta.messageExternalClass);
} else if (optional('enum', afterModifiers)) {
reportRecoverableError(modifier, fasta.messageExternalEnum);
} else if (optional('typedef', afterModifiers)) {
reportRecoverableError(modifier, fasta.messageExternalTypedef);
} else {
reportRecoverableErrorWithToken(
modifier, fasta.templateExtraneousModifier);
}
} else {
reportRecoverableErrorWithToken(
modifier, fasta.templateExtraneousModifier);
}
}
/// Parse any top-level declaration that begins with a keyword.
Token parseTopLevelKeywordDeclaration(
Token token, Token beforeAbstractToken, DirectiveContext directiveState) {
Token previous = token;
token = token.next;
assert(token.isTopLevelKeyword);
final String value = token.stringValue;
if (identical(value, 'class')) {
directiveState?.checkDeclaration();
return parseClassOrNamedMixinApplication(previous, beforeAbstractToken);
} else if (identical(value, 'enum')) {
directiveState?.checkDeclaration();
return parseEnum(previous);
} else if (identical(value, 'typedef')) {
String nextValue = token.next.stringValue;
directiveState?.checkDeclaration();
if (identical('(', nextValue) ||
identical('<', nextValue) ||
identical('.', nextValue)) {
return parseTopLevelMemberImpl(previous);
} else {
return parseTypedef(previous);
}
} else {
// The remaining top level keywords are built-in keywords
// and can be used in a top level declaration
// as an identifier such as "abstract<T>() => 0;"
// or as a prefix such as "abstract.A b() => 0;".
String nextValue = token.next.stringValue;
if (identical(nextValue, '(') ||
identical(nextValue, '<') ||
identical(nextValue, '.')) {
directiveState?.checkDeclaration();
return parseTopLevelMemberImpl(previous);
} else if (identical(value, 'library')) {
directiveState?.checkLibrary(this, token);
return parseLibraryName(previous);
} else if (identical(value, 'import')) {
directiveState?.checkImport(this, token);
return parseImport(previous);
} else if (identical(value, 'export')) {
directiveState?.checkExport(this, token);
return parseExport(previous);
} else if (identical(value, 'part')) {
return parsePartOrPartOf(previous, directiveState);
}
}
throw "Internal error: Unhandled top level keyword '$value'.";
}
/// ```
/// libraryDirective:
/// 'library' qualified ';'
/// ;
/// ```
Token parseLibraryName(Token token) {
Token libraryKeyword = token.next;
assert(optional('library', libraryKeyword));
listener.beginLibraryName(libraryKeyword);
token = parseQualified(libraryKeyword, IdentifierContext.libraryName,
IdentifierContext.libraryNameContinuation);
token = ensureSemicolon(token);
listener.endLibraryName(libraryKeyword, token);
return token;
}
/// ```
/// importPrefix:
/// 'deferred'? 'as' identifier
/// ;
/// ```
Token parseImportPrefixOpt(Token token) {
Token next = token.next;
if (optional('deferred', next) && optional('as', next.next)) {
Token deferredToken = next;
Token asKeyword = next.next;
token = ensureIdentifier(
asKeyword, IdentifierContext.importPrefixDeclaration);
listener.handleImportPrefix(deferredToken, asKeyword);
} else if (optional('as', next)) {
Token asKeyword = next;
token = ensureIdentifier(next, IdentifierContext.importPrefixDeclaration);
listener.handleImportPrefix(null, asKeyword);
} else {
listener.handleImportPrefix(null, null);
}
return token;
}
/// ```
/// importDirective:
/// 'import' uri ('if' '(' test ')' uri)* importPrefix? combinator* ';'
/// ;
/// ```
Token parseImport(Token token) {
Token importKeyword = token.next;
assert(optional('import', importKeyword));
listener.beginImport(importKeyword);
token = ensureLiteralString(importKeyword);
Token uri = token;
token = parseConditionalUriStar(token);
token = parseImportPrefixOpt(token);
token = parseCombinatorStar(token).next;
if (optional(';', token)) {
listener.endImport(importKeyword, token);
return token;
} else {
// Recovery
listener.endImport(importKeyword, null);
return parseImportRecovery(uri);
}
}
/// Recover given out-of-order clauses in an import directive where [token] is
/// the import keyword.
Token parseImportRecovery(Token token) {
final primaryListener = listener;
final recoveryListener = new ImportRecoveryListener(primaryListener);
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener
listener = recoveryListener;
token = parseConditionalUriStar(token);
token = parseImportPrefixOpt(token);
token = parseCombinatorStar(token);
Token firstDeferredKeyword = recoveryListener.deferredKeyword;
bool hasPrefix = recoveryListener.asKeyword != null;
bool hasCombinator = recoveryListener.hasCombinator;
// Update the recovery listener to forward subsequent events
// to the primary listener
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses.
Token semicolon;
do {
Token start = token.next;
// Check for extraneous token in the middle of an import statement.
token = skipUnexpectedTokenOpt(
token, const <String>['if', 'deferred', 'as', 'hide', 'show', ';']);
// During recovery, clauses are parsed in the same order
// and generate the same events as in the parseImport method above.
recoveryListener.clear();
token = parseConditionalUriStar(token);
if (recoveryListener.ifKeyword != null) {
if (firstDeferredKeyword != null) {
// TODO(danrubel): report error indicating conditional should
// be moved before deferred keyword
} else if (hasPrefix) {
// TODO(danrubel): report error indicating conditional should
// be moved before prefix clause
} else if (hasCombinator) {
// TODO(danrubel): report error indicating conditional should
// be moved before combinators
}
}
if (optional('deferred', token.next) &&
!optional('as', token.next.next)) {
listener.handleImportPrefix(token.next, null);
token = token.next;
} else {
token = parseImportPrefixOpt(token);
}
if (recoveryListener.deferredKeyword != null) {
if (firstDeferredKeyword != null) {
reportRecoverableError(
recoveryListener.deferredKeyword, fasta.messageDuplicateDeferred);
} else {
if (hasPrefix) {
reportRecoverableError(recoveryListener.deferredKeyword,
fasta.messageDeferredAfterPrefix);
}
firstDeferredKeyword = recoveryListener.deferredKeyword;
}
}
if (recoveryListener.asKeyword != null) {
if (hasPrefix) {
reportRecoverableError(
recoveryListener.asKeyword, fasta.messageDuplicatePrefix);
} else {
if (hasCombinator) {
reportRecoverableError(
recoveryListener.asKeyword, fasta.messagePrefixAfterCombinator);
}
hasPrefix = true;
}
}
token = parseCombinatorStar(token);
hasCombinator = hasCombinator || recoveryListener.hasCombinator;
if (optional(';', token.next)) {
semicolon = token.next;
} else if (identical(start, token.next)) {
// If no forward progress was made, insert ';' so that we exit loop.
semicolon = ensureSemicolon(token);
}
listener.handleRecoverImport(semicolon);
} while (semicolon == null);
if (firstDeferredKeyword != null && !hasPrefix) {
reportRecoverableError(
firstDeferredKeyword, fasta.messageMissingPrefixInDeferredImport);
}
return semicolon;
}
/// ```
/// conditionalUris:
/// conditionalUri*
/// ;
/// ```
Token parseConditionalUriStar(Token token) {
listener.beginConditionalUris(token.next);
int count = 0;
while (optional('if', token.next)) {
count++;
token = parseConditionalUri(token);
}
listener.endConditionalUris(count);
return token;
}
/// ```
/// conditionalUri:
/// 'if' '(' dottedName ('==' literalString)? ')' uri
/// ;
/// ```
Token parseConditionalUri(Token token) {
Token ifKeyword = token = token.next;
assert(optional('if', token));
listener.beginConditionalUri(token);
Token leftParen = token.next;
if (!optional('(', leftParen)) {
reportRecoverableError(
leftParen, fasta.templateExpectedButGot.withArguments('('));
leftParen = rewriter.insertParens(token, true);
}
token = parseDottedName(leftParen);
Token next = token.next;
Token equalitySign;
if (optional('==', next)) {
equalitySign = next;
token = ensureLiteralString(next);
next = token.next;
}
if (next != leftParen.endGroup) {
Token endGroup = leftParen.endGroup;
if (endGroup.isSynthetic) {
// The scanner did not place the synthetic ')' correctly, so move it.
next = rewriter.moveSynthetic(token, endGroup);
} else {
reportRecoverableErrorWithToken(next, fasta.templateUnexpectedToken);
next = endGroup;
}
}
token = next;
assert(optional(')', token));
token = ensureLiteralString(token);
listener.endConditionalUri(ifKeyword, leftParen, equalitySign);
return token;
}
/// ```
/// dottedName:
/// identifier ('.' identifier)*
/// ;
/// ```
Token parseDottedName(Token token) {
token = ensureIdentifier(token, IdentifierContext.dottedName);
Token firstIdentifier = token;
int count = 1;
while (optional('.', token.next)) {
token = ensureIdentifier(
token.next, IdentifierContext.dottedNameContinuation);
count++;
}
listener.handleDottedName(count, firstIdentifier);
return token;
}
/// ```
/// exportDirective:
/// 'export' uri conditional-uris* combinator* ';'
/// ;
/// ```
Token parseExport(Token token) {
Token exportKeyword = token.next;
assert(optional('export', exportKeyword));
listener.beginExport(exportKeyword);
token = ensureLiteralString(exportKeyword);
token = parseConditionalUriStar(token);
token = parseCombinatorStar(token);
token = ensureSemicolon(token);
listener.endExport(exportKeyword, token);
return token;
}
/// ```
/// combinators:
/// (hideCombinator | showCombinator)*
/// ;
/// ```
Token parseCombinatorStar(Token token) {
Token next = token.next;
listener.beginCombinators(next);
int count = 0;
while (true) {
String value = next.stringValue;
if (identical('hide', value)) {
token = parseHide(token);
} else if (identical('show', value)) {
token = parseShow(token);
} else {
listener.endCombinators(count);
break;
}
next = token.next;
count++;
}
return token;
}
/// ```
/// hideCombinator:
/// 'hide' identifierList
/// ;
/// ```
Token parseHide(Token token) {
Token hideKeyword = token.next;
assert(optional('hide', hideKeyword));
listener.beginHide(hideKeyword);
token = parseIdentifierList(hideKeyword);
listener.endHide(hideKeyword);
return token;
}
/// ```
/// showCombinator:
/// 'show' identifierList
/// ;
/// ```
Token parseShow(Token token) {
Token showKeyword = token.next;
assert(optional('show', showKeyword));
listener.beginShow(showKeyword);
token = parseIdentifierList(showKeyword);
listener.endShow(showKeyword);
return token;
}
/// ```
/// identifierList:
/// identifier (',' identifier)*
/// ;
/// ```
Token parseIdentifierList(Token token) {
token = ensureIdentifier(token, IdentifierContext.combinator);
int count = 1;
while (optional(',', token.next)) {
token = ensureIdentifier(token.next, IdentifierContext.combinator);
count++;
}
listener.handleIdentifierList(count);
return token;
}
/// ```
/// typeList:
/// type (',' type)*
/// ;
/// ```
Token parseTypeList(Token token) {
listener.beginTypeList(token.next);
token = computeType(token, true).ensureTypeOrVoid(token, this);
int count = 1;
while (optional(',', token.next)) {
token = computeType(token.next, true).ensureTypeOrVoid(token.next, this);
count++;
}
listener.endTypeList(count);
return token;
}
Token parsePartOrPartOf(Token token, DirectiveContext directiveState) {
Token next = token.next;
assert(optional('part', next));
if (optional('of', next.next)) {
directiveState?.checkPartOf(this, next);
return parsePartOf(token);
} else {
directiveState?.checkPart(this, next);
return parsePart(token);
}
}
/// ```
/// partDirective:
/// 'part' uri ';'
/// ;
/// ```
Token parsePart(Token token) {
Token partKeyword = token.next;
assert(optional('part', partKeyword));
listener.beginPart(partKeyword);
token = ensureLiteralString(partKeyword);
token = ensureSemicolon(token);
listener.endPart(partKeyword, token);
return token;
}
/// ```
/// partOfDirective:
/// 'part' 'of' (qualified | uri) ';'
/// ;
/// ```
Token parsePartOf(Token token) {
Token partKeyword = token.next;
Token ofKeyword = partKeyword.next;
assert(optional('part', partKeyword));
assert(optional('of', ofKeyword));
listener.beginPartOf(partKeyword);
bool hasName = ofKeyword.next.isIdentifier;
if (hasName) {
token = parseQualified(ofKeyword, IdentifierContext.partName,
IdentifierContext.partNameContinuation);
} else {
token = ensureLiteralString(ofKeyword);
}
token = ensureSemicolon(token);
listener.endPartOf(partKeyword, ofKeyword, token, hasName);
return token;
}
/// ```
/// metadata:
/// annotation*
/// ;
/// ```
Token parseMetadataStar(Token token) {
listener.beginMetadataStar(token.next);
int count = 0;
while (optional('@', token.next)) {
token = parseMetadata(token);
count++;
}
listener.endMetadataStar(count);
return token;
}
/// ```
/// annotation:
/// '@' qualified ('.' identifier)? arguments?
/// ;
/// ```
Token parseMetadata(Token token) {
Token atToken = token.next;
assert(optional('@', atToken));
listener.beginMetadata(atToken);
token = ensureIdentifier(atToken, IdentifierContext.metadataReference);
token =
parseQualifiedRestOpt(token, IdentifierContext.metadataContinuation);
if (optional("<", token.next)) {
reportRecoverableError(token.next, fasta.messageMetadataTypeArguments);
}
token = computeTypeParamOrArg(token).parseArguments(token, this);
Token period = null;
if (optional('.', token.next)) {
period = token.next;
token = ensureIdentifier(
period, IdentifierContext.metadataContinuationAfterTypeArguments);
}
token = parseArgumentsOpt(token);
listener.endMetadata(atToken, period, token.next);
return token;
}
/// ```
/// scriptTag:
/// '#!' (ËœNEWLINE)* NEWLINE
/// ;
/// ```
Token parseScript(Token token) {
token = token.next;
assert(identical(token.type, TokenType.SCRIPT_TAG));
listener.handleScript(token);
return token;
}
/// ```
/// typeAlias:
/// metadata 'typedef' typeAliasBody |
/// metadata 'typedef' identifier typeParameters? '=' functionType ';'
/// ;
///
/// functionType:
/// returnType? 'Function' typeParameters? parameterTypeList
///
/// typeAliasBody:
/// functionTypeAlias
/// ;
///
/// functionTypeAlias:
/// functionPrefix typeParameters? formalParameterList ‘;’
/// ;
///
/// functionPrefix:
/// returnType? identifier
/// ;
/// ```
Token parseTypedef(Token token) {
Token typedefKeyword = token.next;
assert(optional('typedef', typedefKeyword));
listener.beginFunctionTypeAlias(typedefKeyword);
TypeInfo typeInfo = computeType(typedefKeyword, false);
token = typeInfo.skipType(typedefKeyword).next;
Token equals;
TypeParamOrArgInfo typeParam = computeTypeParamOrArg(token);
if (typeInfo == noType &&
(token.kind == IDENTIFIER_TOKEN || token.type.isPseudo) &&
optional('=', typeParam.skip(token).next)) {
listener.handleIdentifier(token, IdentifierContext.typedefDeclaration);
equals = typeParam.parseVariables(token, this).next;
assert(optional('=', equals));
token = computeType(equals, true).ensureTypeOrVoid(equals, this);
} else {
token = typeInfo.parseType(typedefKeyword, this);
token = ensureIdentifier(token, IdentifierContext.typedefDeclaration);
token = typeParam.parseVariables(token, this);
token =
parseFormalParametersRequiredOpt(token, MemberKind.FunctionTypeAlias);
}
token = ensureSemicolon(token);
listener.endFunctionTypeAlias(typedefKeyword, equals, token);
return token;
}
/// Parse a mixin application starting from `with`. Assumes that the first
/// type has already been parsed.
Token parseMixinApplicationRest(Token token) {
Token withKeyword = token.next;
if (!optional('with', withKeyword)) {
reportRecoverableError(
withKeyword, fasta.templateExpectedButGot.withArguments('with'));
withKeyword =
new SyntheticKeywordToken(Keyword.WITH, withKeyword.charOffset);
rewriter.insertTokenAfter(token, withKeyword);
if (!isValidTypeReference(withKeyword.next)) {
rewriter.insertTokenAfter(
withKeyword,
new SyntheticStringToken(
TokenType.IDENTIFIER, '', withKeyword.charOffset));
}
}
listener.beginMixinApplication(withKeyword);
assert(optional('with', withKeyword));
token = parseTypeList(withKeyword);
listener.endMixinApplication(withKeyword);
return token;
}
Token parseFormalParametersOpt(Token token, MemberKind kind) {
Token next = token.next;
if (optional('(', next)) {
return parseFormalParameters(token, kind);
} else {
listener.handleNoFormalParameters(next, kind);
return token;
}
}
Token skipFormalParameters(Token token, MemberKind kind) {
Token lastConsumed = token;
token = token.next;
// TODO(ahe): Shouldn't this be `beginFormalParameters`?
listener.beginOptionalFormalParameters(token);
if (!optional('(', token)) {
if (optional(';', token)) {
reportRecoverableError(token, fasta.messageExpectedOpenParens);
listener.endFormalParameters(0, token, token, kind);
return lastConsumed;
}
listener.endFormalParameters(0, token, token, kind);
return reportUnexpectedToken(token);
}
Token closeBrace = token.endGroup;
listener.endFormalParameters(0, token, closeBrace, kind);
return closeBrace;
}
/// Parses the formal parameter list of a function.
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParametersRequiredOpt(Token token, MemberKind kind) {
Token next = token.next;
if (!optional('(', next)) {
reportRecoverableError(next, missingParameterMessage(kind));
rewriter.insertParens(token, false);
}
return parseFormalParameters(token, kind);
}
/// Parses the formal parameter list of a function given that the left
/// parenthesis is known to exist.
///
/// If `kind == MemberKind.GeneralizedFunctionType`, then names may be
/// omitted (except for named arguments). Otherwise, types may be omitted.
Token parseFormalParameters(Token token, MemberKind kind) {
Token begin = token = token.next;
assert(optional('(', token));
listener.beginFormalParameters(begin, kind);
int parameterCount = 0;
while (true) {
Token next = token.next;
if (optional(')', next)) {
token = next;
break;
}
++parameterCount;
String value = next.stringValue;
if (identical(value, '[')) {
token = parseOptionalPositionalParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
} else if (identical(value, '{')) {
token = parseOptionalNamedParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
} else if (identical(value, '[]')) {
// Recovery
token = rewriteSquareBrackets(token);
token = parseOptionalPositionalParameters(token, kind);
token = ensureCloseParen(token, begin);
break;
}
token = parseFormalParameter(token, FormalParameterKind.mandatory, kind);
next = token.next;
if (!optional(',', next)) {
Token next = token.next;
if (optional(')', next)) {
token = next;
} else {
// Recovery
if (begin.endGroup.isSynthetic) {
// Scanner has already reported a missing `)` error,
// but placed the `)` in the wrong location, so move it.
token = rewriter.moveSynthetic(token, begin.endGroup);
} else if (next.kind == IDENTIFIER_TOKEN &&
next.next.kind == IDENTIFIER_TOKEN) {
// Looks like a missing comma
Token comma = new SyntheticToken(TokenType.COMMA, next.charOffset);
token = rewriter.insertTokenAfter(token, comma).next;
continue;
} else {
reportRecoverableError(
next, fasta.templateExpectedButGot.withArguments(')'));
token = begin.endGroup;
}
}
break;
}
token = next;
}
assert(optional(')', token));
listener.endFormalParameters(parameterCount, begin, token, kind);
return token;
}
/// Return the message that should be produced when the formal parameters are
/// missing.
Message missingParameterMessage(MemberKind kind) {
if (kind == MemberKind.FunctionTypeAlias) {
return fasta.messageMissingTypedefParameters;
} else if (kind == MemberKind.NonStaticMethod ||
kind == MemberKind.StaticMethod) {
return fasta.messageMissingMethodParameters;
}
return fasta.messageMissingFunctionParameters;
}
/// ```
/// normalFormalParameter:
/// functionFormalParameter |
/// fieldFormalParameter |
/// simpleFormalParameter
/// ;
///
/// functionFormalParameter:
/// metadata 'covariant'? returnType? identifier formalParameterList
/// ;
///
/// simpleFormalParameter:
/// metadata 'covariant'? finalConstVarOrType? identifier |
/// ;
///
/// fieldFormalParameter:
/// metadata finalConstVarOrType? 'this' '.' identifier formalParameterList?
/// ;
/// ```
Token parseFormalParameter(
Token token, FormalParameterKind parameterKind, MemberKind memberKind) {
assert(parameterKind != null);
token = parseMetadataStar(token);
Token next = token.next;
Token start = next;
final bool inFunctionType =
memberKind == MemberKind.GeneralizedFunctionType;
Token covariantToken;
Token varFinalOrConst;
if (isModifier(next)) {
if (optional('covariant', next)) {
if (memberKind != MemberKind.StaticMethod &&
memberKind != MemberKind.TopLevelMethod) {
covariantToken = token = next;
next = token.next;
}
}
if (isModifier(next)) {
if (!inFunctionType) {
if (optional('var', next)) {
varFinalOrConst = token = next;
next = token.next;
} else if (optional('final', next)) {
varFinalOrConst = token = next;
next = token.next;
}
}
if (isModifier(next)) {
// Recovery
ModifierRecoveryContext context = new ModifierRecoveryContext(this);
token = context.parseFormalParameterModifiers(token, memberKind,
covariantToken: covariantToken, varFinalOrConst: varFinalOrConst);
covariantToken = context.covariantToken;
varFinalOrConst = context.varFinalOrConst;
context = null;
}
}
}
listener.beginFormalParameter(
start, memberKind, covariantToken, varFinalOrConst);
// Type is required in a generalized function type, but optional otherwise.
final Token beforeType = token;
TypeInfo typeInfo = computeType(token, inFunctionType);
token = typeInfo.skipType(token);
next = token.next;
if (typeInfo == noType &&
(optional('.', next) ||
(next.isIdentifier && optional('.', next.next)))) {
// Recovery: Malformed type reference.
typeInfo = computeType(beforeType, true);
token = typeInfo.skipType(beforeType);
next = token.next;
}
final bool isNamedParameter =
parameterKind == FormalParameterKind.optionalNamed;
Token thisKeyword;
Token periodAfterThis;
IdentifierContext nameContext =
IdentifierContext.formalParameterDeclaration;
if (!inFunctionType && optional('this', next)) {
thisKeyword = token = next;
next = token.next;
if (!optional('.', next)) {
// Recover from a missing period by inserting one.
next = rewriteAndRecover(
token,
fasta.templateExpectedButGot.withArguments('.'),
new SyntheticToken(TokenType.PERIOD, next.charOffset))
.next;
}
periodAfterThis = token = next;
next = token.next;
nameContext = IdentifierContext.fieldInitializer;
}
if (next.isIdentifier) {
token = next;
next = token.next;
}
Token beforeInlineFunctionType;
if (optional("<", next)) {
Token closer = next.endGroup;
if (closer != null) {
if (optional("(", closer.next)) {
if (varFinalOrConst != null) {
reportRecoverableError(
varFinalOrConst, fasta.messageFunctionTypedParameterVar);
}
beforeInlineFunctionType = token;
token = closer.next.endGroup;
next = token.next;
}
}
} else if (optional("(", next)) {
if (varFinalOrConst != null) {
reportRecoverableError(
varFinalOrConst, fasta.messageFunctionTypedParameterVar);
}
beforeInlineFunctionType = token;
token = next.endGroup;
next = token.next;
}
if (typeInfo != noType &&
varFinalOrConst != null &&
optional('var', varFinalOrConst)) {
reportRecoverableError(varFinalOrConst, fasta.messageTypeAfterVar);
}
Token endInlineFunctionType;
if (beforeInlineFunctionType != null) {
endInlineFunctionType = computeTypeParamOrArg(beforeInlineFunctionType)
.parseVariables(beforeInlineFunctionType, this);
listener.beginFunctionTypedFormalParameter(beforeInlineFunctionType.next);
token = typeInfo.parseType(beforeType, this);
endInlineFunctionType = parseFormalParametersRequiredOpt(
endInlineFunctionType, MemberKind.FunctionTypedParameter);
listener.endFunctionTypedFormalParameter();
// Generalized function types don't allow inline function types.
// The following isn't allowed:
// int Function(int bar(String x)).
if (inFunctionType) {
reportRecoverableError(beforeInlineFunctionType.next,
fasta.messageInvalidInlineFunctionType);
}
} else if (inFunctionType) {
token = typeInfo.ensureTypeOrVoid(beforeType, this);
} else {
token = typeInfo.parseType(beforeType, this);
}
Token nameToken;
if (periodAfterThis != null) {
token = periodAfterThis;
}
next = token.next;
if (inFunctionType && !isNamedParameter && !next.isKeywordOrIdentifier) {
nameToken = token.next;
listener.handleNoName(nameToken);
} else {
nameToken = token = ensureIdentifier(token, nameContext);
if (isNamedParameter && nameToken.lexeme.startsWith("_")) {
reportRecoverableError(nameToken, fasta.messagePrivateNamedParameter);
}
}
if (endInlineFunctionType != null) {
token = endInlineFunctionType;
}
next = token.next;
String value = next.stringValue;
if ((identical('=', value)) || (identical(':', value))) {
Token equal = next;
listener.beginFormalParameterDefaultValueExpression();
token = parseExpression(equal);
next = token.next;
listener.endFormalParameterDefaultValueExpression();
// TODO(danrubel): Consider removing the last parameter from the
// handleValuedFormalParameter event... it appears to be unused.
listener.handleValuedFormalParameter(equal, next);
if (isMandatoryFormalParameterKind(parameterKind)) {
reportRecoverableError(
equal, fasta.messageRequiredParameterWithDefault);
} else if (isOptionalPositionalFormalParameterKind(parameterKind) &&
identical(':', value)) {
reportRecoverableError(
equal, fasta.messagePositionalParameterWithEquals);
} else if (inFunctionType ||
memberKind == MemberKind.FunctionTypeAlias ||
memberKind == MemberKind.FunctionTypedParameter) {
reportRecoverableError(equal, fasta.messageFunctionTypeDefaultValue);
}
} else {
listener.handleFormalParameterWithoutValue(next);
}
listener.endFormalParameter(
thisKeyword, periodAfterThis, nameToken, parameterKind, memberKind);
return token;
}
/// ```
/// defaultFormalParameter:
/// normalFormalParameter ('=' expression)?
/// ;
/// ```
Token parseOptionalPositionalParameters(Token token, MemberKind kind) {
Token begin = token = token.next;
assert(optional('[', token));
listener.beginOptionalFormalParameters(begin);
int parameterCount = 0;
while (true) {
Token next = token.next;
if (optional(']', next)) {
break;
}
token = parseFormalParameter(
token, FormalParameterKind.optionalPositional, kind);
next = token.next;
++parameterCount;
if (!optional(',', next)) {
if (!optional(']', next)) {
// Recovery
reportRecoverableError(
next, fasta.templateExpectedButGot.withArguments(']'));
// Scanner guarantees a closing bracket.
next = begin.endGroup;
while (token.next != next) {
token = token.next;
}
}
break;
}
token = next;
}
if (parameterCount == 0) {
token = rewriteAndRecover(
token,
fasta.messageEmptyOptionalParameterList,
new SyntheticStringToken(
TokenType.IDENTIFIER, '', token.next.charOffset, 0));
token = parseFormalParameter(
token, FormalParameterKind.optionalPositional, kind);
++parameterCount;
}
token = token.next;
assert(optional(']', token));
listener.endOptionalFormalParameters(parameterCount, begin, token);
return token;
}
/// ```
/// defaultNamedParameter:
/// normalFormalParameter ('=' expression)? |
/// normalFormalParameter (':' expression)?
/// ;
/// ```
Token parseOptionalNamedParameters(Token token, MemberKind kind) {
Token begin = token = token.next;
assert(optional('{', token));
listener.beginOptionalFormalParameters(begin);
int parameterCount = 0;
while (true) {
Token next = token.next;
if (optional('}', next)) {
break;
}
token =
parseFormalParameter(token, FormalParameterKind.optionalNamed, kind);
next = token.next;
++parameterCount;
if (!optional(',', next)) {
if (!optional('}', next)) {
// Recovery
reportRecoverableError(
next, fasta.templateExpectedButGot.withArguments('}'));
// Scanner guarantees a closing bracket.
next = begin.endGroup;
while (token.next != next) {
token = token.next;
}
}
break;
}
token = next;
}
if (parameterCount == 0) {
token = rewriteAndRecover(
token,
fasta.messageEmptyNamedParameterList,
new SyntheticStringToken(
TokenType.IDENTIFIER, '', token.next.charOffset, 0));
token =
parseFormalParameter(token, FormalParameterKind.optionalNamed, kind);
++parameterCount;
}
token = token.next;
assert(optional('}', token));
listener.endOptionalFormalParameters(parameterCount, begin, token);
return token;
}
/// ```
/// qualified:
/// identifier qualifiedRest*
/// ;
/// ```
Token parseQualified(Token token, IdentifierContext context,
IdentifierContext continuationContext) {
token = ensureIdentifier(token, context);
while (optional('.', token.next)) {
token = parseQualifiedRest(token, continuationContext);
}
return token;
}
/// ```
/// qualifiedRestOpt:
/// qualifiedRest?
/// ;
/// ```
Token parseQualifiedRestOpt(
Token token, IdentifierContext continuationContext) {
if (optional('.', token.next)) {
return parseQualifiedRest(token, continuationContext);
} else {
return token;
}
}
/// ```
/// qualifiedRest:
/// '.' identifier
/// ;
/// ```
Token parseQualifiedRest(Token token, IdentifierContext context) {
token = token.next;
assert(optional('.', token));
Token period = token;
token = ensureIdentifier(token, context);
listener.handleQualified(period);
return token;
}
Token skipBlock(Token token) {
// The scanner ensures that `{` always has a closing `}`.
return ensureBlock(token, null).endGroup;
}
/// ```
/// enumType:
/// metadata 'enum' id '{' metadata id [',' metadata id]* [','] '}'
/// ;
/// ```
Token parseEnum(Token token) {
Token enumKeyword = token.next;
assert(optional('enum', enumKeyword));
listener.beginEnum(enumKeyword);
token = ensureIdentifier(enumKeyword, IdentifierContext.enumDeclaration);
Token leftBrace = token.next;
int count = 0;
if (optional('{', leftBrace)) {
token = leftBrace;
while (true) {
Token next = token.next;
if (optional('}', next)) {
token = next;
if (count == 0) {
reportRecoverableError(token, fasta.messageEnumDeclarationEmpty);
}
break;
}
token = parseMetadataStar(token);
token = ensureIdentifier(token, IdentifierContext.enumValueDeclaration);
next = token.next;
count++;
if (optional(',', next)) {
token = next;
} else if (optional('}', next)) {
token = next;
break;
} else {
// Recovery
Token endGroup = leftBrace.endGroup;
if (endGroup.isSynthetic) {
// The scanner did not place the synthetic '}' correctly.
token = rewriter.moveSynthetic(token, endGroup);
break;
} else if (next.isIdentifier) {
// If the next token is an identifier, assume a missing comma.
// TODO(danrubel): Consider improved recovery for missing `}`
// both here and when the scanner inserts a synthetic `}`
// for situations such as `enum Letter {a, b Letter e;`.
reportRecoverableError(
next, fasta.templateExpectedButGot.withArguments(','));
} else {
// Otherwise assume a missing `}` and exit the loop
reportRecoverableError(
next, fasta.templateExpectedButGot.withArguments('}'));
token = leftBrace.endGroup;
break;
}
}
}
} else {
leftBrace = ensureBlock(token, fasta.templateExpectedEnumBody);
token = leftBrace.endGroup;
}
assert(optional('}', token));
listener.endEnum(enumKeyword, leftBrace, count);
return token;
}
Token parseClassOrNamedMixinApplication(
Token token, Token beforeAbstractToken) {
token = token.next;
listener.beginClassOrNamedMixinApplication(token);
Token abstractToken = beforeAbstractToken?.next;
Token begin = abstractToken ?? token;
Token classKeyword = token;
assert(optional('class', token));
Token name =
ensureIdentifier(token, IdentifierContext.classOrNamedMixinDeclaration);
token = computeTypeParamOrArg(name, true).parseVariables(name, this);
if (optional('=', token.next)) {
listener.beginNamedMixinApplication(begin, abstractToken, name);
return parseNamedMixinApplication(token, begin, classKeyword);
} else {
listener.beginClassDeclaration(begin, abstractToken, name);
return parseClass(token, begin, classKeyword);
}
}
Token parseNamedMixinApplication(
Token token, Token begin, Token classKeyword) {
Token equals = token = token.next;
assert(optional('=', equals));
token = computeType(token, true).ensureTypeNotVoid(token, this);
token = parseMixinApplicationRest(token);
Token implementsKeyword = null;
if (optional('implements', token.next)) {
implementsKeyword = token.next;
token = parseTypeList(implementsKeyword);
}
token = ensureSemicolon(token);
listener.endNamedMixinApplication(
begin, classKeyword, equals, implementsKeyword, token);
return token;
}
/// Parse the portion of a class declaration (not a mixin application) that
/// follows the end of the type parameters.
///
/// ```
/// classDefinition:
/// metadata abstract? 'class' identifier typeParameters?
/// (superclass mixins?)? interfaces?
/// '{' (metadata classMemberDefinition)* '}' |
/// metadata abstract? 'class' mixinApplicationClass
/// ;
/// ```
Token parseClass(Token token, Token begin, Token classKeyword) {
Token start = token;
token = parseClassHeaderOpt(token, begin, classKeyword);
if (!optional('{', token.next)) {
// Recovery
token = parseClassHeaderRecovery(start, begin, classKeyword);
ensureBlock(token, fasta.templateExpectedClassBody);
}
token = parseClassBody(token);
listener.endClassDeclaration(begin, token);
return token;
}
Token parseClassHeaderOpt(Token token, Token begin, Token classKeyword) {
token = parseClassExtendsOpt(token);
token = parseClassImplementsOpt(token);
Token nativeToken;
if (optional('native', token.next)) {
nativeToken = token.next;
token = parseNativeClause(token);
}
listener.handleClassHeader(begin, classKeyword, nativeToken);
return token;
}
/// Recover given out-of-order clauses in a class header.
Token parseClassHeaderRecovery(Token token, Token begin, Token classKeyword) {
final primaryListener = listener;
final recoveryListener = new ClassHeaderRecoveryListener(primaryListener);
// Reparse to determine which clauses have already been parsed
// but intercept the events so they are not sent to the primary listener.
listener = recoveryListener;
token = parseClassHeaderOpt(token, begin, classKeyword);
bool hasExtends = recoveryListener.extendsKeyword != null;
bool hasImplements = recoveryListener.implementsKeyword != null;
Token withKeyword = recoveryListener.withKeyword;
// Update the recovery listener to forward subsequent events
// to the primary listener.
recoveryListener.listener = primaryListener;
// Parse additional out-of-order clauses
Token start;
do {
start = token;
// Check for extraneous token in the middle of a class header.
token = skipUnexpectedTokenOpt(
token, const <String>['extends', 'with', 'implements', '{']);
// During recovery, clauses are parsed in the same order
// and generate the same events as in the parseClassHeader method above.
recoveryListener.clear();
Token next = token.next;
if (optional('with', next)) {
// If there is a `with` clause without a preceding `extends` clause
// then insert a synthetic `extends` clause and parse both clauses.
Token extendsKeyword =
new SyntheticKeywordToken(Keyword.EXTENDS, next.offset);
Token superclassToken = new SyntheticStringToken(
TokenType.IDENTIFIER, 'Object', next.offset, 0);
rewriter.insertTokenAfter(token, extendsKeyword);
rewriter.insertTokenAfter(extendsKeyword, superclassToken);
token = computeType(extendsKeyword, true)
.ensureTypeNotVoid(extendsKeyword, this);
token = parseMixinApplicationRest(token);
listener.handleClassExtends(extendsKeyword);
} else {
token = parseClassExtendsOpt(token);
if (recoveryListener.extendsKeyword != null) {
if (hasExtends) {
reportRecoverableError(
recoveryListener.extendsKeyword, fasta.messageMultipleExtends);
} else {
if (withKeyword != null) {
reportRecoverableError(recoveryListener.extendsKeyword,
fasta.messageWithBeforeExtends);
} else if (hasImplements) {
reportRecoverableError(recoveryListener.extendsKeyword,
fasta.messageImplementsBeforeExtends);
}
hasExtends = true;
}
}
}
if (recoveryListener.withKeyword != null) {
if (withKeyword != null) {
reportRecoverableError(
recoveryListener.withKeyword, fasta.messageMultipleWith);
} else {
if (hasImplements) {
reportRecoverableError(recoveryListener.withKeyword,
fasta.messageImplementsBeforeWith);
}
withKeyword = recoveryListener.withKeyword;
}
}
token = parseClassImplementsOpt(token);
if (recoveryListener.implementsKeyword != null) {
if (hasImplements) {
reportRecoverableError(recoveryListener.implementsKeyword,
fasta.messageMultipleImplements);
} else {
hasImplements = true;
}
}
listener.handleRecoverClassHeader();
// Exit if a class body is detected, or if no progress has been made
} while (!optional('{', token.next) && start != token);
if (withKeyword != null && !hasExtends) {
reportRecoverableError(withKeyword, fasta.messageWithWithoutExtends);
}
listener = primaryListener;
return token;
}
Token parseClassExtendsOpt(Token token) {
Token next = token.next;
if (optional('extends', next)) {
Token extendsKeyword = next;
token = computeType(next, true).ensureTypeNotVoid(next, this);
if (optional('with', token.next)) {
token = parseMixinApplicationRest(token);
} else {
token = token;
}
listener.handleClassExtends(extendsKeyword);
} else {
listener.handleNoType(token);
listener.handleClassExtends(null);
}
return token;
}
/// ```
/// implementsClause:
/// 'implements' typeName (',' typeName)*
/// ;
/// ```
Token parseClassImplementsOpt(Token token) {
Token implementsKeyword;
int interfacesCount = 0;
if (optional('implements', token.next)) {
implementsKeyword = token.next;
do {
token =
computeType(token.next, true).ensureTypeNotVoid(token.next, this);
++interfacesCount;
} while (optional(',', token.next));
}
listener.handleClassImplements(implementsKeyword, interfacesCount);
return token;
}
Token parseStringPart(Token token) {
token = token.next;
while (token.kind != STRING_TOKEN) {
if (token is ErrorToken) {
reportErrorToken(token, true);
} else {
token = reportUnrecoverableErrorWithToken(
token, fasta.templateExpectedString);
}
token = token.next;
}
listener.handleStringPart(token);
return token;
}
/// Insert a synthetic identifier after the given [token] and create an error
/// message based on the given [context]. Return the synthetic identifier that
/// was inserted.
Token insertSyntheticIdentifier(Token token, IdentifierContext context,
{Message message, Token messageOnToken}) {
Token next = token.next;
reportRecoverableError(messageOnToken ?? next,
message ?? context.recoveryTemplate.withArguments(next));
Token identifier = new SyntheticStringToken(
TokenType.IDENTIFIER,
context == IdentifierContext.methodDeclaration ||
context == IdentifierContext.topLevelVariableDeclaration ||
context == IdentifierContext.fieldDeclaration
? '#synthetic_identifier_${next.offset}'
: '',
next.charOffset,
0);
rewriter.insertTokenAfter(token, identifier);
return token.next;
}
/// Parse a simple identifier at the given [token], and return the identifier
/// that was parsed.
///
/// If the token is not an identifier, or is not appropriate for use as an
/// identifier in the given [context], create a synthetic identifier, report
/// an error, and return the synthetic identifier.
Token ensureIdentifier(Token token, IdentifierContext context) {
assert(context != null);
Token identifier = token.next;
if (identifier.kind != IDENTIFIER_TOKEN) {
identifier = context.ensureIdentifier(token, this);
assert(identifier != null);
assert(identifier.isKeywordOrIdentifier);
}
listener.handleIdentifier(identifier, context);
return identifier;
}
/// Return `true` if the given [token] should be treated like the start of
/// an expression for the purposes of recovery.
bool isExpressionStartForRecovery(Token next) =>
next.isKeywordOrIdentifier ||
next.type == TokenType.DOUBLE ||
next.type == TokenType.HASH ||
next.type == TokenType.HEXADECIMAL ||
next.type == TokenType.IDENTIFIER ||
next.type == TokenType.INT ||
next.type == TokenType.STRING ||
optional('{', next) ||
optional('(', next) ||
optional('[', next) ||
optional('[]', next) ||
optional('<', next) ||
optional('!', next) ||
optional('-', next) ||
optional('~', next) ||
optional('++', next) ||
optional('--', next);
Token expect(String string, Token token) {
// TODO(danrubel): update all uses of expect(';'...) to ensureSemicolon
// then add assert(!identical(';', string));
if (!identical(string, token.stringValue)) {
return reportUnrecoverableError(
token, fasta.templateExpectedButGot.withArguments(string))
.next;
}
return token.next;
}
/// ```
/// typeVariable:
/// metadata? identifier (('extends' | 'super') typeName)?
/// ;
/// ```
Token parseTypeVariable(Token token) {
token = parseMetadataStar(token);
token = token.next.kind == IDENTIFIER_TOKEN
? token.next
: IdentifierContext.typeVariableDeclaration
.ensureIdentifier(token, this);
listener.beginTypeVariable(token);
Token extendsOrSuper = null;
Token next = token.next;
if (optional('extends', next) || optional('super', next)) {
extendsOrSuper = next;
token = parseType(next);
} else {
listener.handleNoType(token);
}
listener.endTypeVariable(token.next, extendsOrSuper);
return token;
}
bool notEofOrValue(String value, Token token) {
return !identical(token.kind, EOF_TOKEN) &&
!identical(value, token.stringValue);
}
/// Parse a type, if it is appropriate to do so.
///
/// If this method can parse a type, it will return the next (non-null) token
/// after the type. Otherwise, it returns null.
Token parseType(Token token,
[TypeContinuation continuation = TypeContinuation.Required,
IdentifierContext continuationContext,
MemberKind memberKind,
Token varFinalOrConst]) {
/// True if we've seen the `var` keyword.
bool hasVar = false;
/// The token before [token].
Token beforeToken;
/// The token before the `begin` token.
Token beforeBegin;
/// Where the type begins.
Token begin;
/// Non-null if 'void' is the first token.
Token voidToken;
/// True if the tokens at [begin] looks like a type.
bool looksLikeType = false;
/// True if a type that could be a return type for a generalized function
/// type was seen during analysis.
bool hasReturnType = false;
/// The identifier context to use for parsing the type.
IdentifierContext context = IdentifierContext.typeReference;
/// Non-null if type arguments were seen during analysis.
Token typeArguments;
/// The number of function types seen during analysis.
int functionTypes = 0;
/// The tokens before the start of type variables of function types seen
/// during analysis. Notice that the tokens in this list might precede
/// either `'<'` or `'('` as not all function types have type parameters.
/// Also, it is safe to assume that token.endGroup will return
/// non-null for all of the tokens following these tokens.
Link<Token> typeVariableStarters = const Link<Token>();
{
// Analyse the next tokens to see if they could be a type.
beforeToken = beforeBegin = token;
token = begin = token.next;
if (optional("void", token)) {
// `void` is a type.
looksLikeType = true;
beforeToken = voidToken = token;
token = token.next;
} else if (isValidTypeReference(token) &&
!isGeneralizedFunctionType(token)) {
// We're looking at an identifier that could be a type (or `dynamic`).
looksLikeType = true;
beforeToken = token;
token = token.next;
if (optional(".", token) && isValidTypeReference(token.next)) {
// We're looking at `prefix '.' identifier`.
context = IdentifierContext.prefixedTypeReference;
beforeToken = token.next;
token = beforeToken.next;
}
if (optional("<", token)) {
Token close = token.endGroup;
if (close != null &&
(optional(">", close) || optional(">>", close))) {
// We found some type arguments.
typeArguments = token;
beforeToken = close;
token = close.next;
}
}
} else if (token.isModifier && isValidTypeReference(token.next)) {
// Recovery - report error and skip modifier
reportRecoverableErrorWithToken(token, fasta.templateExpectedType);
return parseType(token, continuation, continuationContext, memberKind);
}
// If what we have seen so far looks like a type, that could be a return
// type for a generalized function type.
hasReturnType = looksLikeType;
while (optional("Function", token)) {
Token typeVariableStart = token;
if (optional("<", token.next)) {
Token close = token.next.endGroup;
if (close != null && optional(">", close)) {
beforeToken = previousToken(token, close);
token = close;
} else {
break; // Not a function type.
}
}
if (optional("(", token.next)) {
// This is a function type.
Token close = token.next.endGroup;
assert(optional(")", close));
looksLikeType = true;
functionTypes++;
typeVariableStarters =
typeVariableStarters.prepend(typeVariableStart);
beforeToken = close;
token = close.next;
} else {
break; // Not a function type.
}
}
}
/// Call this function when it's known that [begin] is a type. This
/// function will call the appropriate event methods on [listener] to
/// handle the type.
Token commitType() {
int count = 0;
for (Token typeVariableStart in typeVariableStarters) {
count++;
parseTypeVariablesOpt(typeVariableStart);
listener.beginFunctionType(begin);
}
assert(count == functionTypes);
if (functionTypes > 0 && !hasReturnType) {
// A function type without return type.
// Push the non-existing return type first. The loop below will
// generate the full type.
listener.handleNoType(beforeBegin);
token = beforeBegin;
} else if (voidToken != null) {
listener.handleVoidKeyword(voidToken);
token = voidToken;
} else {
token = ensureIdentifier(beforeBegin, context);
token = parseQualifiedRestOpt(
token, IdentifierContext.typeReferenceContinuation);
assert(typeArguments == null || typeArguments == token.next);
token = parseTypeArgumentsOpt(token);
listener.handleType(begin, token.next);
}
for (int i = 0; i < functionTypes; i++) {
Token next = token.next;
assert(optional('Function', next));
Token functionToken = next;
if (optional("<", next.next)) {
// Skip type parameters, they were parsed above.
next = next.next.endGroup;
}
token = parseFormalParametersRequiredOpt(
next, MemberKind.GeneralizedFunctionType);
listener.endFunctionType(functionToken, token.next);
}
if (hasVar) {
reportRecoverableError(begin, fasta.messageTypeAfterVar);
}
return token;
}
switch (continuation) {
case TypeContinuation.Required:
// If the token after the type is not an identifier,
// the report a missing type
if (!token.isIdentifier) {
if (memberKind == MemberKind.TopLevelField ||
memberKind == MemberKind.NonStaticField ||
memberKind == MemberKind.StaticField ||
memberKind == MemberKind.Local) {
reportRecoverableError(
begin, fasta.messageMissingConstFinalVarOrType);
listener.handleNoType(beforeBegin);
return beforeBegin;
}
}
return commitType();
optional:
case TypeContinuation.Optional:
if (looksLikeType) {
if (functionTypes > 0) {
return commitType(); // Parse function type.
}
if (voidToken != null) {
listener.handleVoidKeyword(voidToken);
return voidToken;
}
if (token.isIdentifier || optional('this', token)) {
return commitType(); // Parse type.
}
}
listener.handleNoType(beforeBegin);
return beforeBegin;
case TypeContinuation.OptionalAfterVar:
hasVar = true;
continue optional;
}
throw "Internal error: Unhandled continuation '$continuation'.";
}
Token parseTypeArgumentsOpt(Token token) {
Token next = token.next;
if (optional('<', next)) {
BeginToken begin = next;
rewriteLtEndGroupOpt(begin);
listener.beginTypeArguments(begin);
int count = 0;
do {
token = parseType(next);
next = token.next;
++count;
} while (optional(',', next));
if (next == begin.endToken) {
token = next;
} else if (begin.endToken != null) {
reportRecoverableError(
next, fasta.templateExpectedToken.withArguments('>'));
token = begin.endToken;
} else {
token = begin.endToken = ensureGt(token);
}
listener.endTypeArguments(count, begin, token);
} else {
listener.handleNoTypeArguments(next);
}
return token;
}
Token parseTypeVariablesOpt(Token token) {
Token next = token.next;
if (optional('<', next)) {
BeginToken begin = next;
rewriteLtEndGroupOpt(begin);
listener.beginTypeVariables(begin);
int count = 0;
do {
token = parseTypeVariable(next);
next = token.next;
++count;
} while (optional(',', next));
if (next == begin.endToken) {
token = next;
} else if (begin.endToken != null) {
reportRecoverableError(
next, fasta.templateExpectedToken.withArguments('>'));
token = begin.endToken;
} else {
token = begin.endToken = ensureGt(token);
}
listener.endTypeVariables(count, begin, token);
} else {
listener.handleNoTypeVariables(next);
}
return token;
}
/// Parse a top level field or function.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseTopLevelMember(Token token) {
token = parseMetadataStar(syntheticPreviousToken(token));
return parseTopLevelMemberImpl(token).next;
}
Token parseTopLevelMemberImpl(Token token) {
Token beforeStart = token;
Token next = token.next;
listener.beginTopLevelMember(next);
Token externalToken;
Token varFinalOrConst;
if (isModifier(next)) {
if (optional('external', next)) {
externalToken = token = next;
next = token.next;
}
if (isModifier(next)) {
if (optional('final', next)) {
varFinalOrConst = token = next;
next = token.next;
} else if (optional('var', next)) {
varFinalOrConst = token = next;
next = token.next;
} else if (optional('const', next)) {
varFinalOrConst = token = next;
next = token.next;
}
if (isModifier(next)) {
// Recovery
if (varFinalOrConst != null &&
(optional('final', next) ||
optional('var', next) ||
optional('const', next))) {
// If another `var`, `final`, or `const` then fall through
// to parse that as part of the next top level declaration.
} else {
ModifierRecoveryContext context = new ModifierRecoveryContext(this);
token = context.parseTopLevelModifiers(token,
externalToken: externalToken, varFinalOrConst: varFinalOrConst);
next = token.next;
externalToken = context.externalToken;
varFinalOrConst = context.varFinalOrConst;
context = null;
}
}
}
}
Token beforeType = token;
TypeInfo typeInfo = computeType(token, false, true);
token = typeInfo.skipType(token);
next = token.next;
Token getOrSet;
String value = next.stringValue;
if (identical(value, 'get') || identical(value, 'set')) {
if (next.next.isIdentifier) {
getOrSet = token = next;
next = token.next;
}
}
if (next.type != TokenType.IDENTIFIER) {
value = next.stringValue;
if (identical(value, 'factory') || identical(value, 'operator')) {
// `factory` and `operator` can be used as an identifier.
value = next.next.stringValue;
if (getOrSet == null &&
!identical(value, '(') &&
!identical(value, '{') &&
!identical(value, '<') &&
!identical(value, '=>') &&
!identical(value, '=') &&
!identical(value, ';') &&
!identical(value, ',')) {
// Recovery
value = next.stringValue;
if (identical(value, 'factory')) {
reportRecoverableError(
next, fasta.messageFactoryTopLevelDeclaration);
} else {
reportRecoverableError(next, fasta.messageTopLevelOperator);
if (next.next.isOperator) {
token = next;
next = token.next;
if (optional('(', next.next)) {
rewriter.insertTokenAfter(
next,
new SyntheticStringToken(
TokenType.IDENTIFIER,
'#synthetic_identifier_${next.charOffset}',
next.charOffset,
0));
}
}
}
listener.handleInvalidTopLevelDeclaration(next);
return next;
}
// Fall through and continue parsing
} else if (!next.isIdentifier) {
// Recovery
if (next.isKeyword) {
// Fall through to parse the keyword as the identifier.
// ensureIdentifier will report the error.
} else if (token == beforeStart) {
// Ensure we make progress.
return parseInvalidTopLevelDeclaration(token);
} else {
// Looks like a declaration missing an identifier.
// Insert synthetic identifier and fall through.
insertSyntheticIdentifier(token, IdentifierContext.methodDeclaration);
next = token.next;
}
}
}
// At this point, `token` is beforeName.
next = next.next;
value = next.stringValue;
if (getOrSet != null ||
identical(value, '(') ||
identical(value, '{') ||
identical(value, '<') ||
identical(value, '.') ||
identical(value, '=>')) {
if (varFinalOrConst != null) {
if (optional('var', varFinalOrConst)) {
reportRecoverableError(varFinalOrConst, fasta.messageVarReturnType);
} else {
reportRecoverableErrorWithToken(
varFinalOrConst, fasta.templateExtraneousModifier);
}
}
return parseTopLevelMethod(beforeStart, externalToken, beforeType,
typeInfo, getOrSet, token.next);
}
if (getOrSet != null) {
reportRecoverableErrorWithToken(
getOrSet, fasta.templateExtraneousModifier);
}
return parseFields(beforeStart, externalToken, null, null, varFinalOrConst,
beforeType, typeInfo, token.next, true);
}
Token parseFields(
Token beforeStart,
Token externalToken,
Token staticToken,
Token covariantToken,
Token varFinalOrConst,
Token beforeType,
TypeInfo typeInfo,
Token name,
bool isTopLevel) {
if (externalToken != null) {
reportRecoverableError(externalToken, fasta.messageExternalField);
}
if (covariantToken != null) {
if (varFinalOrConst != null && optional('final', varFinalOrConst)) {
reportRecoverableError(covariantToken, fasta.messageFinalAndCovariant);
covariantToken = null;
}
}
if (typeInfo == noType) {
if (varFinalOrConst == null) {
reportRecoverableError(name, fasta.messageMissingConstFinalVarOrType);
}
} else {
if (varFinalOrConst != null && optional('var', varFinalOrConst)) {
reportRecoverableError(varFinalOrConst, fasta.messageTypeAfterVar);
}
}
Token token = typeInfo.parseType(beforeType, this);
assert(token.next == name);
IdentifierContext context = isTopLevel
? IdentifierContext.topLevelVariableDeclaration
: IdentifierContext.fieldDeclaration;
name = ensureIdentifier(token, context);
int fieldCount = 1;
token = parseFieldInitializerOpt(name, name, varFinalOrConst, isTopLevel);
while (optional(',', token.next)) {
name = ensureIdentifier(token.next, context);
token = parseFieldInitializerOpt(name, name, varFinalOrConst, isTopLevel);
++fieldCount;
}
token = ensureSemicolon(token);
if (isTopLevel) {
listener.endTopLevelFields(staticToken, covariantToken, varFinalOrConst,
fieldCount, beforeStart.next, token);
} else {
listener.endFields(staticToken, covariantToken, varFinalOrConst,
fieldCount, beforeStart.next, token);
}
return token;
}
Token parseTopLevelMethod(Token beforeStart, Token externalToken,
Token beforeType, TypeInfo typeInfo, Token getOrSet, Token name) {
listener.beginTopLevelMethod(beforeStart, externalToken);
Token token = typeInfo.parseType(beforeType, this);
assert(token.next == (getOrSet ?? name));
name = ensureIdentifier(
getOrSet ?? token, IdentifierContext.topLevelFunctionDeclaration);
bool isGetter = false;
if (getOrSet == null) {
token = parseMethodTypeVar(name);
} else {
isGetter = optional("get", getOrSet);
token = name;
listener.handleNoTypeVariables(token.next);
}
checkFormals(token, name, isGetter, MemberKind.TopLevelMethod);
token = parseFormalParametersOpt(token, MemberKind.TopLevelMethod);
AsyncModifier savedAsyncModifier = asyncState;
Token asyncToken = token.next;
token = parseAsyncModifierOpt(token);
if (getOrSet != null && !inPlainSync && optional("set", getOrSet)) {
reportRecoverableError(asyncToken, fasta.messageSetterNotSync);
}
token = parseFunctionBody(token, false, externalToken != null);
asyncState = savedAsyncModifier;
listener.endTopLevelMethod(beforeStart.next, getOrSet, token);
return token;
}
Token parseMethodTypeVar(Token name) {
TypeParamOrArgInfo typeVar = computeTypeParamOrArg(name, true);
Token token;
if (typeVar == noTypeParamOrArg || name.next.endGroup != null) {
token = typeVar.parseVariables(name, this);
} else {
// Recovery
token = typeVar.parseVariables(name, this);
if (optional('=', token.next)) {
token = token.next;
reportRecoverableErrorWithToken(token, fasta.templateUnexpectedToken);
}
}
return token;
}
void checkFormals(Token token, Token name, bool isGetter, MemberKind kind) {
Token next = token.next;
if (optional("(", next)) {
if (isGetter) {
reportRecoverableError(next, fasta.messageGetterWithFormals);
}
} else if (!isGetter) {
if (optional('operator', name)) {
Token next = name.next;
if (next.isOperator) {
name = next;
} else if (isUnaryMinus(next)) {
name = next.next;
}
}
// Recovery
reportRecoverableError(name, missingParameterMessage(kind));
rewriter.insertParens(token, false);
}
}
Token parseFieldInitializerOpt(
Token token, Token name, Token varFinalOrConst, bool isTopLevel) {
Token next = token.next;
if (optional('=', next)) {
Token assignment = next;
listener.beginFieldInitializer(next);
token = parseExpression(next);
listener.endFieldInitializer(assignment, token.next);
} else {
if (varFinalOrConst != null && !name.isSynthetic) {
if (optional("const", varFinalOrConst)) {
reportRecoverableError(
name,
fasta.templateConstFieldWithoutInitializer
.withArguments(name.lexeme));
} else if (isTopLevel && optional("final", varFinalOrConst)) {
reportRecoverableError(
name,
fasta.templateFinalFieldWithoutInitializer
.withArguments(name.lexeme));
}
}
listener.handleNoFieldInitializer(token.next);
}
return token;
}
Token parseVariableInitializerOpt(Token token) {
if (optional('=', token.next)) {
Token assignment = token.next;
listener.beginVariableInitializer(assignment);
token = parseExpression(assignment);
listener.endVariableInitializer(assignment);
} else {
listener.handleNoVariableInitializer(token.next);
}
return token;
}
Token parseInitializersOpt(Token token) {
if (optional(':', token.next)) {
return parseInitializers(token);
} else {
listener.handleNoInitializers();
return token;
}
}
/// ```
/// initializers:
/// ':' initializerListEntry (',' initializerListEntry)*
/// ;
/// ```
Token parseInitializers(Token token) {
Token begin = token.next;
assert(optional(':', begin));
listener.beginInitializers(begin);
int count = 0;
bool old = mayParseFunctionExpressions;
mayParseFunctionExpressions = false;
do {
token = parseInitializer(token.next);
++count;
} while (optional(',', token.next));
mayParseFunctionExpressions = old;
listener.endInitializers(count, begin, token.next);
return token;
}
/// ```
/// initializerListEntry:
/// 'super' ('.' identifier)? arguments |
/// fieldInitializer |
/// assertion
/// ;
///
/// fieldInitializer:
/// ('this' '.')? identifier '=' conditionalExpression cascadeSection*
/// ;
/// ```
Token parseInitializer(Token token) {
Token next = token.next;
listener.beginInitializer(next);
Token beforeExpression = token;
if (optional('assert', next)) {
token = parseAssert(token, Assert.Initializer);
listener.endInitializer(token.next);
return token;
} else if (optional('super', next)) {
return parseInitializerExpressionRest(token);
} else if (optional('this', next)) {
token = next;
next = token.next;
if (optional('.', next)) {
token = next;
next = token.next;
if (next.isIdentifier) {
token = next;
} else {
// Recovery
token = insertSyntheticIdentifier(
token, IdentifierContext.fieldInitializer);
}
next = token.next;
if (optional('=', next)) {
return parseInitializerExpressionRest(beforeExpression);
}
}
if (optional('(', next)) {
token = parseInitializerExpressionRest(beforeExpression);
next = token.next;
if (optional('{', next) || optional('=>', next)) {
reportRecoverableError(
next, fasta.messageRedirectingConstructorWithBody);
}
return token;
}
// Recovery
if (optional('this', token)) {
// TODO(danrubel): Consider a better error message indicating that
// `this.<fieldname>=` is expected.
reportRecoverableError(
next, fasta.templateExpectedButGot.withArguments('.'));
rewriter.insertTokenAfter(
token, new SyntheticToken(TokenType.PERIOD, next.offset));
token = token.next;
rewriter.insertTokenAfter(token,
new SyntheticStringToken(TokenType.IDENTIFIER, '', next.offset));
token = token.next;
next = token.next;
}
// Fall through to recovery
} else if (next.isIdentifier) {
if (optional('=', next.next)) {
return parseInitializerExpressionRest(token);
}
// Fall through to recovery
} else {
// Recovery
insertSyntheticIdentifier(token, IdentifierContext.fieldInitializer,
message: fasta.messageExpectedAnInitializer, messageOnToken: token);
return parseInitializerExpressionRest(beforeExpression);
}
// Recovery
// Insert a sythetic assignment to ensure that the expression is indeed
// an assignment. Failing to do so causes this test to fail:
// pkg/front_end/testcases/regress/issue_31192.dart
// TODO(danrubel): Investigate better recovery.
token = insertSyntheticIdentifier(
beforeExpression, IdentifierContext.fieldInitializer,
message: fasta.messageMissingAssignmentInInitializer);
rewriter.insertTokenAfter(
token, new SyntheticToken(TokenType.EQ, token.offset));
return parseInitializerExpressionRest(beforeExpression);
}
Token parseInitializerExpressionRest(Token token) {
token = parseExpression(token);
listener.endInitializer(token.next);
return token;
}
/// If the next token is an opening curly brace, return it. Otherwise, use the
/// given [template] to report an error, insert an opening and a closing curly
/// brace, and return the newly inserted opening curly brace. If the
/// [template] is `null`, use a default error message instead.
Token ensureBlock(
Token token, Template<Message Function(Token token)> template) {
Token next = token.next;
if (optional('{', next)) return next;
Message message = template == null
? fasta.templateExpectedButGot.withArguments('{')
: template.withArguments(next);
reportRecoverableError(next, message);
return insertBlock(token);
}
Token insertBlock(Token token) {
Token next = token.next;
Token replacement = link(
new SyntheticBeginToken(TokenType.OPEN_CURLY_BRACKET, next.offset),
new SyntheticToken(TokenType.CLOSE_CURLY_BRACKET, next.offset));
rewriter.insertTokenAfter(token, replacement);
return replacement;
}
/// If the next token is a closing parenthesis, return it.
/// Otherwise, report an error and return the closing parenthesis
/// associated with the specified open parenthesis.
Token ensureCloseParen(Token token, Token openParen) {
Token next = token.next;
if (optional(')', next)) {
return next;
}
// TODO(danrubel): Pass in context for better error message.
reportRecoverableError(
next, fasta.templateExpectedButGot.withArguments(')'));
// Scanner guarantees a closing parenthesis
// TODO(danrubel): Improve recovery by having callers parse tokens
// between `token` and `openParen.endGroup`.
return openParen.endGroup;
}
/// If the next token is a colon, return it. Otherwise, report an
/// error, insert a synthetic colon, and return the inserted colon.
Token ensureColon(Token token) {
Token next = token.next;
if (optional(':', next)) return next;
Message message = fasta.templateExpectedButGot.withArguments(':');
Token newToken = new SyntheticToken(TokenType.COLON, next.charOffset);
return rewriteAndRecover(token, message, newToken).next;
}
/// If the token after [token] is a '>', return it.
/// If the next token is a composite greater-than token such as '>>',
/// then replace that token with separate tokens, and return the first '>'.
/// Otherwise, report an error, insert a synthetic '>',
/// and return that newly inserted synthetic '>'.
Token ensureGt(Token token) {
Token next = token.next;
String value = next.stringValue;
if (value == '>') {
return next;
}
rewriteGtCompositeOrRecover(token, next, value);
return token.next;
}
/// If the token after [token] is a not literal string,
/// then insert a synthetic literal string.
/// Call `parseLiteralString` and return the result.
Token ensureLiteralString(Token token) {
Token next = token.next;
if (!identical(next.kind, STRING_TOKEN)) {
Message message = fasta.templateExpectedString.withArguments(next);
Token newToken =
new SyntheticStringToken(TokenType.STRING, '""', next.charOffset, 0);
rewriteAndRecover(token, message, newToken);
}
return parseLiteralString(token);
}
/// If the token after [token] is a semi-colon, return it.
/// Otherwise, report an error, insert a synthetic semi-colon,
/// and return the inserted semi-colon.
Token ensureSemicolon(Token token) {
// TODO(danrubel): Once all expect(';'...) call sites have been converted
// to use this method, remove similar semicolon recovery code
// from the handleError method in element_listener.dart.
Token next = token.next;
if (optional(';', next)) return next;
Message message = fasta.templateExpectedButGot.withArguments(';');
Token newToken = new SyntheticToken(TokenType.SEMICOLON, next.charOffset);
return rewriteAndRecover(token, message, newToken).next;
}
/// Report an error at the token after [token] that has the given [message].
/// Insert the [newToken] after [token] and return [token].
Token rewriteAndRecover(Token token, Message message, Token newToken) {
reportRecoverableError(token.next, message);
rewriter.insertTokenAfter(token, newToken);
return token;
}
/// Replace the token after [token] with `[` followed by `]`
/// and return [token].
Token rewriteSquareBrackets(Token token) {
Token next = token.next;
assert(optional('[]', next));
Token replacement = link(
new BeginToken(TokenType.OPEN_SQUARE_BRACKET, next.offset),
new Token(TokenType.CLOSE_SQUARE_BRACKET, next.offset + 1));
rewriter.replaceTokenFollowing(token, replacement);
return token;
}
void rewriteGtCompositeOrRecover(Token token, Token next, String value) {
assert(value != '>');
Token replacement = new Token(TokenType.GT, next.charOffset);
if (identical(value, '>>')) {
replacement.setNext(new Token(TokenType.GT, next.charOffset + 1));
} else if (identical(value, '>=')) {
replacement.setNext(new Token(TokenType.EQ, next.charOffset + 1));
} else if (identical(value, '>>=')) {
replacement.setNext(new Token(TokenType.GT, next.charOffset + 1));
replacement.next.setNext(new Token(TokenType.EQ, next.charOffset + 2));
} else {
// Recovery
rewriteAndRecover(token, fasta.templateExpectedToken.withArguments('>'),
new SyntheticToken(TokenType.GT, next.offset));
return;
}
rewriter.replaceTokenFollowing(token, replacement);
}
void rewriteLtEndGroupOpt(BeginToken beginToken) {
assert(optional('<', beginToken));
Token end = beginToken.endGroup;
String value = end?.stringValue;
if (value != null && value.length > 1) {
Token beforeEnd = previousToken(beginToken, end);
rewriteGtCompositeOrRecover(beforeEnd, end, value);
beginToken.endGroup = null;
}
}
/// Report the given token as unexpected and return the next token if the next
/// token is one of the [expectedNext], otherwise just return the given token.
Token skipUnexpectedTokenOpt(Token token, List<String> expectedNext) {
Token next = token.next;
if (next.keyword == null) {
final String nextValue = next.next.stringValue;
for (String expectedValue in expectedNext) {
if (identical(nextValue, expectedValue)) {
reportRecoverableErrorWithToken(next, fasta.templateUnexpectedToken);
return next;
}
}
}
return token;
}
Token parseNativeClause(Token token) {
Token nativeToken = token = token.next;
assert(optional('native', nativeToken));
bool hasName = false;
if (token.next.kind == STRING_TOKEN) {
hasName = true;
token = parseLiteralString(token);
}
listener.handleNativeClause(nativeToken, hasName);
reportRecoverableError(
nativeToken, fasta.messageNativeClauseShouldBeAnnotation);
return token;
}
Token skipClassBody(Token token) {
// The scanner ensures that `{` always has a closing `}`.
return ensureBlock(token, fasta.templateExpectedClassBody);
}
/// ```
/// classBody:
/// '{' classMember* '}'
/// ;
/// ```
Token parseClassBody(Token token) {
Token begin = token = token.next;
assert(optional('{', token));
listener.beginClassBody(token);
int count = 0;
while (notEofOrValue('}', token.next)) {
token = parseClassMemberImpl(token);
++count;
}
token = token.next;
assert(optional('}', token));
listener.endClassBody(count, begin, token);
return token;
}
bool isUnaryMinus(Token token) =>
token.kind == IDENTIFIER_TOKEN &&
token.lexeme == 'unary' &&
optional('-', token.next);
/// Parse a class member.
///
/// This method is only invoked from outside the parser. As a result, this
/// method takes the next token to be consumed rather than the last consumed
/// token and returns the token after the last consumed token rather than the
/// last consumed token.
Token parseClassMember(Token token) {
return parseClassMemberImpl(syntheticPreviousToken(token)).next;
}
/// ```
/// classMember:
/// fieldDeclaration |
/// constructorDeclaration |
/// methodDeclaration
/// ;
/// ```
Token parseClassMemberImpl(Token token) {
Token beforeStart = token = parseMetadataStar(token);
Token covariantToken;
Token externalToken;
Token staticToken;
Token varFinalOrConst;
Token next = token.next;
if (isModifier(next)) {
if (optional('external', next)) {
externalToken = token = next;
next = token.next;
}
if (isModifier(next)) {
if (optional('static', next)) {
staticToken = token = next;
next = token.next;
} else if (optional('covariant', next)) {
covariantToken = token = next;
next = token.next;
}
if (isModifier(next)) {
if (optional('final', next)) {
varFinalOrConst = token = next;
next = token.next;
} else if (optional('var', next)) {
varFinalOrConst = token = next;
next = token.next;
} else if (optional('const', next) && covariantToken == null) {
varFinalOrConst = token = next;
next = token.next;
}
if (isModifier(next)) {
ModifierRecoveryContext context = new ModifierRecoveryContext(this);
token = context.parseClassMemberModifiers(token,
externalToken: externalToken,
staticToken: staticToken,
covariantToken: covariantToken,
varFinalOrConst: varFinalOrConst);
next = token.next;
covariantToken = context.covariantToken;
externalToken = context.externalToken;
staticToken = context.staticToken;
varFinalOrConst = context.varFinalOrConst;
context = null;
}
}
}
}
listener.beginMember();
Token beforeType = token;
TypeInfo typeInfo = computeType(token, false, true);
token = typeInfo.skipType(token);
next = token.next;
Token getOrSet;
if (next.type != TokenType.IDENTIFIER) {
String value = next.stringValue;
if (identical(value, 'get') || identical(value, 'set')) {
if (next.next.isIdentifier) {
getOrSet = token = next;
next = token.next;
}
// Fall through to continue parsing `get` or `set` as an identifier.
} else if (identical(value, 'factory')) {
Token next2 = next.next;
if (next2.isIdentifier || next2.isModifier) {
token = parseFactoryMethod(token, beforeStart, externalToken,
staticToken ?? covariantToken, varFinalOrConst);
listener.endMember();
return token;
}
// Fall through to continue parsing `factory` as an identifier.
} else if (identical(value, 'operator')) {
Token next2 = next.next;
// `operator` can be used as an identifier as in
// `int operator<T>()` or `int operator = 2`
if (next2.isUserDefinableOperator && next2.endGroup ==