| // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| library fasta.parser.parser; |
| |
| import '../fasta_codes.dart' show Message, Template; |
| |
| import '../fasta_codes.dart' as fasta; |
| |
| import '../scanner.dart' show ErrorToken, Token; |
| |
| import '../scanner/recover.dart' show closeBraceFor, skipToEof; |
| |
| import '../../scanner/token.dart' |
| show |
| ASSIGNMENT_PRECEDENCE, |
| BeginToken, |
| CASCADE_PRECEDENCE, |
| EQUALITY_PRECEDENCE, |
| Keyword, |
| POSTFIX_PRECEDENCE, |
| RELATIONAL_PRECEDENCE, |
| SyntheticBeginToken, |
| SyntheticKeywordToken, |
| SyntheticStringToken, |
| SyntheticToken, |
| TokenType; |
| |
| import '../scanner/token_constants.dart' |
| show |
| CLOSE_CURLY_BRACKET_TOKEN, |
| COMMA_TOKEN, |
| DOUBLE_TOKEN, |
| EOF_TOKEN, |
| EQ_TOKEN, |
| FUNCTION_TOKEN, |
| GT_GT_TOKEN, |
| GT_TOKEN, |
| HASH_TOKEN, |
| HEXADECIMAL_TOKEN, |
| IDENTIFIER_TOKEN, |
| INT_TOKEN, |
| KEYWORD_TOKEN, |
| LT_TOKEN, |
| OPEN_CURLY_BRACKET_TOKEN, |
| OPEN_PAREN_TOKEN, |
| OPEN_SQUARE_BRACKET_TOKEN, |
| PERIOD_TOKEN, |
| SEMICOLON_TOKEN, |
| STRING_INTERPOLATION_IDENTIFIER_TOKEN, |
| STRING_INTERPOLATION_TOKEN, |
| STRING_TOKEN; |
| |
| import '../scanner/characters.dart' show $CLOSE_CURLY_BRACKET; |
| |
| import '../util/link.dart' show Link; |
| |
| import 'assert.dart' show Assert; |
| |
| import 'async_modifier.dart' show AsyncModifier; |
| |
| import 'directive_context.dart'; |
| |
| import 'formal_parameter_kind.dart' |
| show |
| FormalParameterKind, |
| isMandatoryFormalParameterKind, |
| isOptionalPositionalFormalParameterKind; |
| |
| import 'forwarding_listener.dart' show ForwardingListener; |
| |
| import 'identifier_context.dart' show IdentifierContext; |
| |
| import 'listener.dart' show Listener; |
| |
| import 'loop_state.dart' show LoopState; |
| |
| import 'member_kind.dart' show MemberKind; |
| |
| import 'modifier_context.dart' |
| show |
| ModifierRecoveryContext, |
| ModifierRecoveryContext2, |
| isModifier, |
| typeContinuationAfterVar; |
| |
| import 'recovery_listeners.dart' |
| show ClassHeaderRecoveryListener, ImportRecoveryListener; |
| |
| import 'token_stream_rewriter.dart' show TokenStreamRewriter; |
| |
| import 'type_continuation.dart' |
| show TypeContinuation, typeContinuationFromFormalParameterKind; |
| |
| import 'type_info.dart' show isGeneralizedFunctionType, isValidTypeReference; |
| |
| import 'util.dart' show closeBraceTokenFor, optional; |
| |
| /// An event generating parser of Dart programs. This parser expects all tokens |
| /// in a linked list (aka a token stream). |
| /// |
| /// The class [Scanner] is used to generate a token stream. See the file |
| /// [scanner.dart](../scanner.dart). |
| /// |
| /// Subclasses of the class [Listener] are used to listen to events. |
| /// |
| /// Most methods of this class belong in one of four major categories: parse |
| /// methods, peek methods, ensure methods, and skip methods. |
| /// |
| /// Parse methods all have the prefix `parse`, generate events |
| /// (by calling methods on [listener]), and return the next token to parse. |
| /// Some exceptions to this last point are methods such as [parseFunctionBody] |
| /// and [parseClassBody] which return the last token parsed |
| /// rather than the next token to be parsed. |
| /// Parse methods are generally named `parseGrammarProductionSuffix`. |
| /// The suffix can be one of `opt`, or `star`. |
| /// `opt` means zero or one matches, `star` means zero or more matches. |
| /// For example, [parseMetadataStar] corresponds to this grammar snippet: |
| /// `metadata*`, and [parseArgumentsOpt] corresponds to: `arguments?`. |
| /// |
| /// Peek methods all have the prefix `peek`, do not generate events |
| /// (except for errors) and may return null. |
| /// |
| /// Ensure methods all have the prefix `ensure` and may generate events. |
| /// They return the current token, or insert and return a synthetic token |
| /// if the current token does not match. For example, |
| /// [ensureSemicolon] returns the current token if the current token is a |
| /// semicolon, otherwise inserts a synthetic semicolon in the token stream |
| /// before the current token and then returns that new synthetic token. |
| /// |
| /// Skip methods are like parse methods, but all have the prefix `skip` |
| /// and skip over some parts of the file being parsed. |
| /// Typically, skip methods generate an event for the structure being skipped, |
| /// but not for its substructures. |
| /// |
| /// ## Current Token |
| /// |
| /// The current token is always to be found in a formal parameter named |
| /// `token`. This parameter should be the first as this increases the chance |
| /// that a compiler will place it in a register. |
| /// |
| /// ## Implementation Notes |
| /// |
| /// The parser assumes that keywords, built-in identifiers, and other special |
| /// words (pseudo-keywords) are all canonicalized. To extend the parser to |
| /// recognize a new identifier, one should modify |
| /// [keyword.dart](../scanner/keyword.dart) and ensure the identifier is added |
| /// to the keyword table. |
| /// |
| /// As a consequence of this, one should not use `==` to compare strings in the |
| /// parser. One should favor the methods [optional] and [expect] to recognize |
| /// keywords or identifiers. In some cases, it's possible to compare a token's |
| /// `stringValue` using [identical], but normally [optional] will suffice. |
| /// |
| /// Historically, we over-used identical, and when identical is used on objects |
| /// other than strings, it can often be replaced by `==`. |
| /// |
| /// ## Flexibility, Extensibility, and Specification |
| /// |
| /// The parser is designed to be flexible and extensible. Its methods are |
| /// designed to be overridden in subclasses, so it can be extended to handle |
| /// unspecified language extension or experiments while everything in this file |
| /// attempts to follow the specification (unless when it interferes with error |
| /// recovery). |
| /// |
| /// We achieve flexibility, extensible, and specification compliance by |
| /// following a few rules-of-thumb: |
| /// |
| /// 1. All methods in the parser should be public. |
| /// |
| /// 2. The methods follow the specified grammar, and do not implement custom |
| /// extensions, for example, `native`. |
| /// |
| /// 3. The parser doesn't rewrite the token stream (when dealing with `>>`). |
| /// |
| /// ### Implementing Extensions |
| /// |
| /// For various reasons, some Dart language implementations have used |
| /// custom/unspecified extensions to the Dart grammar. Examples of this |
| /// includes diet parsing, patch files, `native` keyword, and generic |
| /// comments. This class isn't supposed to implement any of these |
| /// features. Instead it provides hooks for those extensions to be implemented |
| /// in subclasses or listeners. Let's examine how diet parsing and `native` |
| /// keyword is currently supported by Fasta. |
| /// |
| /// #### Legacy Implementation of `native` Keyword |
| /// |
| /// TODO(ahe,danrubel): Remove this section. |
| /// |
| /// Both dart2js and the Dart VM have used the `native` keyword to mark methods |
| /// that couldn't be implemented in the Dart language and needed to be |
| /// implemented in JavaScript or C++, respectively. An example of the syntax |
| /// extension used by the Dart VM is: |
| /// |
| /// nativeFunction() native "NativeFunction"; |
| /// |
| /// When attempting to parse this function, the parser eventually calls |
| /// [parseFunctionBody]. This method will report an unrecoverable error to the |
| /// listener with the code [fasta.messageExpectedFunctionBody]. The listener can |
| /// then look at the error code and the token and use the methods in |
| /// [native_support.dart](native_support.dart) to parse the native syntax. |
| /// |
| /// #### Implementation of Diet Parsing |
| /// |
| /// We call it _diet_ _parsing_ when the parser skips parts of a file. Both |
| /// dart2js and the Dart VM have been relying on this from early on as it allows |
| /// them to more quickly compile small programs that use small parts of big |
| /// libraries. It's also become an integrated part of how Fasta builds up |
| /// outlines before starting to parse method bodies. |
| /// |
| /// When looking through this parser, you'll find a number of unused methods |
| /// starting with `skip`. These methods are only used by subclasses, such as |
| /// [ClassMemberParser](class_member_parser.dart) and |
| /// [TopLevelParser](top_level_parser.dart). These methods violate the |
| /// principle above about following the specified grammar, and originally lived |
| /// in subclasses. However, we realized that these methods were so widely used |
| /// and hard to maintain in subclasses, that it made sense to move them here. |
| /// |
| /// ### Specification and Error Recovery |
| /// |
| /// To improve error recovery, the parser will inform the listener of |
| /// recoverable errors and continue to parse. An example of a recoverable |
| /// error is: |
| /// |
| /// Error: Asynchronous for-loop can only be used in 'async' or 'async*'... |
| /// main() { await for (var x in []) {} } |
| /// ^^^^^ |
| /// |
| /// ### Legacy Error Recovery |
| /// |
| /// What's described below will be phased out in preference of the parser |
| /// reporting and recovering from syntax errors. The motivation for this is |
| /// that we have multiple listeners that use the parser, and this will ensure |
| /// consistency. |
| /// |
| /// For unrecoverable errors, the parser will ask the listener for help to |
| /// recover from the error. We haven't made much progress on these kinds of |
| /// errors, so in most cases, the parser aborts by skipping to the end of file. |
| /// |
| /// Historically, this parser has been rather lax in what it allows, and |
| /// deferred the enforcement of some syntactical rules to subsequent phases. It |
| /// doesn't matter how we got there, only that we've identified that it's |
| /// easier if the parser reports as many errors it can, but informs the |
| /// listener if the error is recoverable or not. |
| /// |
| /// Currently, the parser is particularly lax when it comes to the order of |
| /// modifiers such as `abstract`, `final`, `static`, etc. Historically, dart2js |
| /// would handle such errors in later phases. We hope that these cases will go |
| /// away as Fasta matures. |
| class Parser { |
| Listener listener; |
| |
| Uri get uri => listener.uri; |
| |
| bool mayParseFunctionExpressions = true; |
| |
| /// Represents parser state: what asynchronous syntax is allowed in the |
| /// function being currently parsed. In rare situations, this can be set by |
| /// external clients, for example, to parse an expression outside a function. |
| AsyncModifier asyncState = AsyncModifier.Sync; |
| |
| // TODO(danrubel): The [loopState] and associated functionality in the |
| // [Parser] duplicates work that the resolver needs to do when resolving |
| // break/continue targets. Long term, this state and functionality will be |
| // removed from the [Parser] class and the resolver will be responsible |
| // for generating all break/continue error messages. |
| |
| /// Represents parser state: whether parsing outside a loop, |
| /// inside a loop, or inside a switch. This is used to determine whether |
| /// break and continue statements are allowed. |
| LoopState loopState = LoopState.OutsideLoop; |
| |
| /// A rewriter for inserting synthetic tokens. |
| /// Access using [rewriter] for lazy initialization. |
| TokenStreamRewriter cachedRewriter; |
| |
| TokenStreamRewriter get rewriter { |
| cachedRewriter ??= new TokenStreamRewriter(); |
| return cachedRewriter; |
| } |
| |
| Parser(this.listener); |
| |
| bool get inGenerator { |
| return asyncState == AsyncModifier.AsyncStar || |
| asyncState == AsyncModifier.SyncStar; |
| } |
| |
| bool get inAsync { |
| return asyncState == AsyncModifier.Async || |
| asyncState == AsyncModifier.AsyncStar; |
| } |
| |
| bool get inPlainSync => asyncState == AsyncModifier.Sync; |
| |
| bool get isBreakAllowed => loopState != LoopState.OutsideLoop; |
| |
| bool get isContinueAllowed => loopState == LoopState.InsideLoop; |
| |
| bool get isContinueWithLabelAllowed => loopState != LoopState.OutsideLoop; |
| |
| /// Parse a compilation unit. |
| /// |
| /// This method is only invoked from outside the parser. As a result, this |
| /// method takes the next token to be consumed rather than the last consumed |
| /// token and returns the token after the last consumed token rather than the |
| /// last consumed token. |
| /// |
| /// ``` |
| /// libraryDefinition: |
| /// scriptTag? |
| /// libraryName? |
| /// importOrExport* |
| /// partDirective* |
| /// topLevelDefinition* |
| /// ; |
| /// |
| /// partDeclaration: |
| /// partHeader topLevelDefinition* |
| /// ; |
| /// ``` |
| Token parseUnit(Token token) { |
| listener.beginCompilationUnit(token); |
| int count = 0; |
| DirectiveContext directiveState = new DirectiveContext(); |
| token = syntheticPreviousToken(token); |
| while (!token.next.isEof) { |
| final Token start = token.next; |
| token = parseTopLevelDeclarationImpl(token, directiveState); |
| listener.endTopLevelDeclaration(token.next); |
| count++; |
| if (start == token.next) { |
| // If progress has not been made reaching the end of the token stream, |
| // then report an error and skip the current token. |
| token = token.next; |
| reportRecoverableErrorWithToken( |
| token, fasta.templateExpectedDeclaration); |
| listener.handleInvalidTopLevelDeclaration(token); |
| listener.endTopLevelDeclaration(token.next); |
| count++; |
| } |
| } |
| token = token.next; |
| listener.endCompilationUnit(count, token); |
| // Clear fields that could lead to memory leak. |
| cachedRewriter = null; |
| return token; |
| } |
| |
| /// This method exists for analyzer compatibility only |
| /// and will be removed once analyzer/fasta integration is complete. |
| /// |
| /// Similar to [parseUnit], this method parses a compilation unit, |
| /// but stops when it reaches the first declaration or EOF. |
| /// |
| /// This method is only invoked from outside the parser. As a result, this |
| /// method takes the next token to be consumed rather than the last consumed |
| /// token and returns the token after the last consumed token rather than the |
| /// last consumed token. |
| Token parseDirectives(Token token) { |
| listener.beginCompilationUnit(token); |
| int count = 0; |
| DirectiveContext directiveState = new DirectiveContext(); |
| token = syntheticPreviousToken(token); |
| while (!token.next.isEof) { |
| final Token start = token.next; |
| final String value = start.stringValue; |
| final String nextValue = start.next.stringValue; |
| |
| // If a built-in keyword is being used as function name, then stop. |
| if (identical(nextValue, '.') || |
| identical(nextValue, '<') || |
| identical(nextValue, '(')) { |
| break; |
| } |
| |
| if (identical(token.next.type, TokenType.SCRIPT_TAG)) { |
| directiveState?.checkScriptTag(this, token.next); |
| token = parseScript(token); |
| } else { |
| token = parseMetadataStar(token); |
| if (identical(value, 'import')) { |
| directiveState?.checkImport(this, token); |
| token = parseImport(token); |
| } else if (identical(value, 'export')) { |
| directiveState?.checkExport(this, token); |
| token = parseExport(token); |
| } else if (identical(value, 'library')) { |
| directiveState?.checkLibrary(this, token); |
| token = parseLibraryName(token); |
| } else if (identical(value, 'part')) { |
| token = parsePartOrPartOf(token, directiveState); |
| } else if (identical(value, ';')) { |
| token = start; |
| } else { |
| listener.handleDirectivesOnly(); |
| break; |
| } |
| } |
| listener.endTopLevelDeclaration(token.next); |
| } |
| token = token.next; |
| listener.endCompilationUnit(count, token); |
| // Clear fields that could lead to memory leak. |
| cachedRewriter = null; |
| return token; |
| } |
| |
| /// Parse a top-level declaration. |
| /// |
| /// This method is only invoked from outside the parser. As a result, this |
| /// method takes the next token to be consumed rather than the last consumed |
| /// token and returns the token after the last consumed token rather than the |
| /// last consumed token. |
| Token parseTopLevelDeclaration(Token token) { |
| token = |
| parseTopLevelDeclarationImpl(syntheticPreviousToken(token), null).next; |
| listener.endTopLevelDeclaration(token); |
| return token; |
| } |
| |
| /// ``` |
| /// topLevelDefinition: |
| /// classDefinition | |
| /// enumType | |
| /// typeAlias | |
| /// 'external'? functionSignature ';' | |
| /// 'external'? getterSignature ';' | |
| /// 'external''? setterSignature ';' | |
| /// functionSignature functionBody | |
| /// returnType? 'get' identifier functionBody | |
| /// returnType? 'set' identifier formalParameterList functionBody | |
| /// ('final' | 'const') type? staticFinalDeclarationList ';' | |
| /// variableDeclaration ';' |
| /// ; |
| /// ``` |
| Token parseTopLevelDeclarationImpl( |
| Token token, DirectiveContext directiveState) { |
| if (identical(token.next.type, TokenType.SCRIPT_TAG)) { |
| directiveState?.checkScriptTag(this, token.next); |
| return parseScript(token); |
| } |
| token = parseMetadataStar(token); |
| if (token.next.isTopLevelKeyword) { |
| return parseTopLevelKeywordDeclaration(token, null, directiveState); |
| } |
| Token start = token; |
| // Skip modifiers to find a top level keyword or identifier |
| while (token.next.isModifier) { |
| token = token.next; |
| } |
| Token next = token.next; |
| if (next.isTopLevelKeyword) { |
| Token beforeAbstractToken; |
| Token beforeModifier = start; |
| Token modifier = start.next; |
| while (modifier != next) { |
| if (optional('abstract', modifier) && |
| optional('class', next) && |
| beforeAbstractToken == null) { |
| beforeAbstractToken = beforeModifier; |
| } else { |
| // Recovery |
| reportTopLevelModifierError(modifier, next); |
| } |
| beforeModifier = modifier; |
| modifier = modifier.next; |
| } |
| return parseTopLevelKeywordDeclaration( |
| token, beforeAbstractToken, directiveState); |
| } else if (next.isKeywordOrIdentifier) { |
| // TODO(danrubel): improve parseTopLevelMember |
| // so that we don't parse modifiers twice. |
| directiveState?.checkDeclaration(); |
| return parseTopLevelMemberImpl(start); |
| } else if (start.next != next) { |
| directiveState?.checkDeclaration(); |
| // Handle the edge case where a modifier is being used as an identifier |
| return parseTopLevelMemberImpl(start); |
| } |
| // Recovery |
| if (next.isOperator && optional('(', next.next)) { |
| // This appears to be a top level operator declaration, which is invalid. |
| reportRecoverableError(next, fasta.messageTopLevelOperator); |
| // Insert a synthetic identifier |
| // and continue parsing as a top level function. |
| rewriter.insertTokenAfter( |
| next, |
| new SyntheticStringToken(TokenType.IDENTIFIER, |
| '#synthetic_function_${next.charOffset}', token.charOffset, 0)); |
| return parseTopLevelMemberImpl(next); |
| } |
| // Ignore any preceding modifiers and just report the unexpected token |
| listener.beginTopLevelMember(next); |
| return parseInvalidTopLevelDeclaration(token); |
| } |
| |
| // Report an error for the given modifier preceding a top level keyword |
| // such as `import` or `class`. |
| void reportTopLevelModifierError(Token modifier, Token afterModifiers) { |
| if (optional('const', modifier) && optional('class', afterModifiers)) { |
| reportRecoverableError(modifier, fasta.messageConstClass); |
| } else if (optional('external', modifier)) { |
| if (optional('class', afterModifiers)) { |
| reportRecoverableError(modifier, fasta.messageExternalClass); |
| } else if (optional('enum', afterModifiers)) { |
| reportRecoverableError(modifier, fasta.messageExternalEnum); |
| } else if (optional('typedef', afterModifiers)) { |
| reportRecoverableError(modifier, fasta.messageExternalTypedef); |
| } else { |
| reportRecoverableErrorWithToken( |
| modifier, fasta.templateExtraneousModifier); |
| } |
| } else { |
| reportRecoverableErrorWithToken( |
| modifier, fasta.templateExtraneousModifier); |
| } |
| } |
| |
| /// Parse any top-level declaration that begins with a keyword. |
| Token parseTopLevelKeywordDeclaration( |
| Token token, Token beforeAbstractToken, DirectiveContext directiveState) { |
| Token previous = token; |
| token = token.next; |
| assert(token.isTopLevelKeyword); |
| final String value = token.stringValue; |
| if (identical(value, 'class')) { |
| directiveState?.checkDeclaration(); |
| return parseClassOrNamedMixinApplication(previous, beforeAbstractToken); |
| } else if (identical(value, 'enum')) { |
| directiveState?.checkDeclaration(); |
| return parseEnum(previous); |
| } else if (identical(value, 'typedef')) { |
| Token next = token.next; |
| directiveState?.checkDeclaration(); |
| if (next.isIdentifier || optional("void", next)) { |
| return parseTypedef(previous); |
| } else { |
| return parseTopLevelMemberImpl(previous); |
| } |
| } else { |
| // The remaining top level keywords are built-in keywords |
| // and can be used in a top level declaration |
| // as an identifier such as "abstract<T>() => 0;" |
| // or as a prefix such as "abstract.A b() => 0;". |
| String nextValue = token.next.stringValue; |
| if (identical(nextValue, '(') || |
| identical(nextValue, '<') || |
| identical(nextValue, '.')) { |
| directiveState?.checkDeclaration(); |
| return parseTopLevelMemberImpl(previous); |
| } else if (identical(value, 'library')) { |
| directiveState?.checkLibrary(this, token); |
| return parseLibraryName(previous); |
| } else if (identical(value, 'import')) { |
| directiveState?.checkImport(this, token); |
| return parseImport(previous); |
| } else if (identical(value, 'export')) { |
| directiveState?.checkExport(this, token); |
| return parseExport(previous); |
| } else if (identical(value, 'part')) { |
| return parsePartOrPartOf(previous, directiveState); |
| } |
| } |
| |
| throw "Internal error: Unhandled top level keyword '$value'."; |
| } |
| |
| /// ``` |
| /// libraryDirective: |
| /// 'library' qualified ';' |
| /// ; |
| /// ``` |
| Token parseLibraryName(Token token) { |
| Token libraryKeyword = token.next; |
| assert(optional('library', libraryKeyword)); |
| listener.beginLibraryName(libraryKeyword); |
| token = parseQualified(libraryKeyword, IdentifierContext.libraryName, |
| IdentifierContext.libraryNameContinuation); |
| token = ensureSemicolon(token); |
| listener.endLibraryName(libraryKeyword, token); |
| return token; |
| } |
| |
| /// ``` |
| /// importPrefix: |
| /// 'deferred'? 'as' identifier |
| /// ; |
| /// ``` |
| Token parseImportPrefixOpt(Token token) { |
| Token next = token.next; |
| if (optional('deferred', next) && optional('as', next.next)) { |
| Token deferredToken = next; |
| Token asKeyword = next.next; |
| token = ensureIdentifier( |
| asKeyword, IdentifierContext.importPrefixDeclaration); |
| listener.handleImportPrefix(deferredToken, asKeyword); |
| } else if (optional('as', next)) { |
| Token asKeyword = next; |
| token = ensureIdentifier(next, IdentifierContext.importPrefixDeclaration); |
| listener.handleImportPrefix(null, asKeyword); |
| } else { |
| listener.handleImportPrefix(null, null); |
| } |
| return token; |
| } |
| |
| /// ``` |
| /// importDirective: |
| /// 'import' uri ('if' '(' test ')' uri)* importPrefix? combinator* ';' |
| /// ; |
| /// ``` |
| Token parseImport(Token token) { |
| Token importKeyword = token.next; |
| assert(optional('import', importKeyword)); |
| listener.beginImport(importKeyword); |
| token = ensureLiteralString(importKeyword); |
| Token uri = token; |
| token = parseConditionalUriStar(token); |
| token = parseImportPrefixOpt(token); |
| token = parseCombinatorStar(token).next; |
| if (optional(';', token)) { |
| listener.endImport(importKeyword, token); |
| return token; |
| } else { |
| // Recovery |
| listener.endImport(importKeyword, null); |
| return parseImportRecovery(uri); |
| } |
| } |
| |
| /// Recover given out-of-order clauses in an import directive where [token] is |
| /// the import keyword. |
| Token parseImportRecovery(Token token) { |
| final primaryListener = listener; |
| final recoveryListener = new ImportRecoveryListener(primaryListener); |
| |
| // Reparse to determine which clauses have already been parsed |
| // but intercept the events so they are not sent to the primary listener |
| listener = recoveryListener; |
| token = parseConditionalUriStar(token); |
| token = parseImportPrefixOpt(token); |
| token = parseCombinatorStar(token); |
| |
| Token firstDeferredKeyword = recoveryListener.deferredKeyword; |
| bool hasPrefix = recoveryListener.asKeyword != null; |
| bool hasCombinator = recoveryListener.hasCombinator; |
| |
| // Update the recovery listener to forward subsequent events |
| // to the primary listener |
| recoveryListener.listener = primaryListener; |
| |
| // Parse additional out-of-order clauses. |
| Token semicolon; |
| do { |
| Token start = token.next; |
| |
| // Check for extraneous token in the middle of an import statement. |
| token = skipUnexpectedTokenOpt( |
| token, const <String>['if', 'deferred', 'as', 'hide', 'show', ';']); |
| |
| // During recovery, clauses are parsed in the same order |
| // and generate the same events as in the parseImport method above. |
| recoveryListener.clear(); |
| token = parseConditionalUriStar(token); |
| if (recoveryListener.ifKeyword != null) { |
| if (firstDeferredKeyword != null) { |
| // TODO(danrubel): report error indicating conditional should |
| // be moved before deferred keyword |
| } else if (hasPrefix) { |
| // TODO(danrubel): report error indicating conditional should |
| // be moved before prefix clause |
| } else if (hasCombinator) { |
| // TODO(danrubel): report error indicating conditional should |
| // be moved before combinators |
| } |
| } |
| |
| if (optional('deferred', token.next) && |
| !optional('as', token.next.next)) { |
| listener.handleImportPrefix(token.next, null); |
| token = token.next; |
| } else { |
| token = parseImportPrefixOpt(token); |
| } |
| if (recoveryListener.deferredKeyword != null) { |
| if (firstDeferredKeyword != null) { |
| reportRecoverableError( |
| recoveryListener.deferredKeyword, fasta.messageDuplicateDeferred); |
| } else { |
| if (hasPrefix) { |
| reportRecoverableError(recoveryListener.deferredKeyword, |
| fasta.messageDeferredAfterPrefix); |
| } |
| firstDeferredKeyword = recoveryListener.deferredKeyword; |
| } |
| } |
| if (recoveryListener.asKeyword != null) { |
| if (hasPrefix) { |
| reportRecoverableError( |
| recoveryListener.asKeyword, fasta.messageDuplicatePrefix); |
| } else { |
| if (hasCombinator) { |
| reportRecoverableError( |
| recoveryListener.asKeyword, fasta.messagePrefixAfterCombinator); |
| } |
| hasPrefix = true; |
| } |
| } |
| |
| token = parseCombinatorStar(token); |
| hasCombinator = hasCombinator || recoveryListener.hasCombinator; |
| |
| if (optional(';', token.next)) { |
| semicolon = token.next; |
| } else if (identical(start, token.next)) { |
| // If no forward progress was made, insert ';' so that we exit loop. |
| semicolon = ensureSemicolon(token); |
| } |
| listener.handleRecoverImport(semicolon); |
| } while (semicolon == null); |
| |
| if (firstDeferredKeyword != null && !hasPrefix) { |
| reportRecoverableError( |
| firstDeferredKeyword, fasta.messageMissingPrefixInDeferredImport); |
| } |
| |
| return semicolon; |
| } |
| |
| /// ``` |
| /// conditionalUris: |
| /// conditionalUri* |
| /// ; |
| /// ``` |
| Token parseConditionalUriStar(Token token) { |
| listener.beginConditionalUris(token.next); |
| int count = 0; |
| while (optional('if', token.next)) { |
| count++; |
| token = parseConditionalUri(token); |
| } |
| listener.endConditionalUris(count); |
| return token; |
| } |
| |
| /// ``` |
| /// conditionalUri: |
| /// 'if' '(' dottedName ('==' literalString)? ')' uri |
| /// ; |
| /// ``` |
| Token parseConditionalUri(Token token) { |
| Token ifKeyword = token = token.next; |
| listener.beginConditionalUri(ifKeyword); |
| token = expect('if', token); |
| Token leftParen = token; |
| expect('(', token); |
| token = parseDottedName(token).next; |
| Token equalitySign; |
| if (optional('==', token)) { |
| equalitySign = token; |
| token = ensureLiteralString(token).next; |
| } |
| expect(')', token); |
| token = ensureLiteralString(token); |
| listener.endConditionalUri(ifKeyword, leftParen, equalitySign); |
| return token; |
| } |
| |
| /// ``` |
| /// dottedName: |
| /// identifier ('.' identifier)* |
| /// ; |
| /// ``` |
| Token parseDottedName(Token token) { |
| token = ensureIdentifier(token, IdentifierContext.dottedName); |
| Token firstIdentifier = token; |
| int count = 1; |
| while (optional('.', token.next)) { |
| token = ensureIdentifier( |
| token.next, IdentifierContext.dottedNameContinuation); |
| count++; |
| } |
| listener.handleDottedName(count, firstIdentifier); |
| return token; |
| } |
| |
| /// ``` |
| /// exportDirective: |
| /// 'export' uri conditional-uris* combinator* ';' |
| /// ; |
| /// ``` |
| Token parseExport(Token token) { |
| Token exportKeyword = token.next; |
| assert(optional('export', exportKeyword)); |
| listener.beginExport(exportKeyword); |
| token = ensureLiteralString(exportKeyword); |
| token = parseConditionalUriStar(token); |
| token = parseCombinatorStar(token); |
| token = ensureSemicolon(token); |
| listener.endExport(exportKeyword, token); |
| return token; |
| } |
| |
| /// ``` |
| /// combinators: |
| /// (hideCombinator | showCombinator)* |
| /// ; |
| /// ``` |
| Token parseCombinatorStar(Token token) { |
| Token next = token.next; |
| listener.beginCombinators(next); |
| int count = 0; |
| while (true) { |
| String value = next.stringValue; |
| if (identical('hide', value)) { |
| token = parseHide(token); |
| } else if (identical('show', value)) { |
| token = parseShow(token); |
| } else { |
| listener.endCombinators(count); |
| break; |
| } |
| next = token.next; |
| count++; |
| } |
| return token; |
| } |
| |
| /// ``` |
| /// hideCombinator: |
| /// 'hide' identifierList |
| /// ; |
| /// ``` |
| Token parseHide(Token token) { |
| Token hideKeyword = token.next; |
| assert(optional('hide', hideKeyword)); |
| listener.beginHide(hideKeyword); |
| token = parseIdentifierList(hideKeyword); |
| listener.endHide(hideKeyword); |
| return token; |
| } |
| |
| /// ``` |
| /// showCombinator: |
| /// 'show' identifierList |
| /// ; |
| /// ``` |
| Token parseShow(Token token) { |
| Token showKeyword = token.next; |
| assert(optional('show', showKeyword)); |
| listener.beginShow(showKeyword); |
| token = parseIdentifierList(showKeyword); |
| listener.endShow(showKeyword); |
| return token; |
| } |
| |
| /// ``` |
| /// identifierList: |
| /// identifier (',' identifier)* |
| /// ; |
| /// ``` |
| Token parseIdentifierList(Token token) { |
| token = ensureIdentifier(token, IdentifierContext.combinator); |
| int count = 1; |
| while (optional(',', token.next)) { |
| token = ensureIdentifier(token.next, IdentifierContext.combinator); |
| count++; |
| } |
| listener.handleIdentifierList(count); |
| return token; |
| } |
| |
| /// ``` |
| /// typeList: |
| /// type (',' type)* |
| /// ; |
| /// ``` |
| Token parseTypeList(Token token) { |
| listener.beginTypeList(token.next); |
| token = parseType(token); |
| int count = 1; |
| while (optional(',', token.next)) { |
| token = parseType(token.next); |
| count++; |
| } |
| listener.endTypeList(count); |
| return token; |
| } |
| |
| Token parsePartOrPartOf(Token token, DirectiveContext directiveState) { |
| Token next = token.next; |
| assert(optional('part', next)); |
| if (optional('of', next.next)) { |
| directiveState?.checkPartOf(this, next); |
| return parsePartOf(token); |
| } else { |
| directiveState?.checkPart(this, next); |
| return parsePart(token); |
| } |
| } |
| |
| /// ``` |
| /// partDirective: |
| /// 'part' uri ';' |
| /// ; |
| /// ``` |
| Token parsePart(Token token) { |
| Token partKeyword = token.next; |
| assert(optional('part', partKeyword)); |
| listener.beginPart(partKeyword); |
| token = ensureLiteralString(partKeyword); |
| token = ensureSemicolon(token); |
| listener.endPart(partKeyword, token); |
| return token; |
| } |
| |
| /// ``` |
| /// partOfDirective: |
| /// 'part' 'of' (qualified | uri) ';' |
| /// ; |
| /// ``` |
| Token parsePartOf(Token token) { |
| Token partKeyword = token.next; |
| Token ofKeyword = partKeyword.next; |
| assert(optional('part', partKeyword)); |
| assert(optional('of', ofKeyword)); |
| listener.beginPartOf(partKeyword); |
| bool hasName = ofKeyword.next.isIdentifier; |
| if (hasName) { |
| token = parseQualified(ofKeyword, IdentifierContext.partName, |
| IdentifierContext.partNameContinuation); |
| } else { |
| token = ensureLiteralString(ofKeyword); |
| } |
| token = ensureSemicolon(token); |
| listener.endPartOf(partKeyword, ofKeyword, token, hasName); |
| return token; |
| } |
| |
| /// ``` |
| /// metadata: |
| /// annotation* |
| /// ; |
| /// ``` |
| Token parseMetadataStar(Token token) { |
| listener.beginMetadataStar(token.next); |
| int count = 0; |
| while (optional('@', token.next)) { |
| token = parseMetadata(token); |
| count++; |
| } |
| listener.endMetadataStar(count); |
| return token; |
| } |
| |
| /// ``` |
| /// annotation: |
| /// '@' qualified ('.' identifier)? arguments? |
| /// ; |
| /// ``` |
| Token parseMetadata(Token token) { |
| Token atToken = token.next; |
| assert(optional('@', atToken)); |
| listener.beginMetadata(atToken); |
| token = ensureIdentifier(atToken, IdentifierContext.metadataReference); |
| token = |
| parseQualifiedRestOpt(token, IdentifierContext.metadataContinuation); |
| if (optional("<", token.next)) { |
| reportRecoverableError(token.next, fasta.messageMetadataTypeArguments); |
| } |
| token = parseTypeArgumentsOpt(token); |
| Token period = null; |
| if (optional('.', token.next)) { |
| period = token.next; |
| token = ensureIdentifier( |
| period, IdentifierContext.metadataContinuationAfterTypeArguments); |
| } |
| token = parseArgumentsOpt(token); |
| listener.endMetadata(atToken, period, token.next); |
| return token; |
| } |
| |
| /// ``` |
| /// scriptTag: |
| /// '#!' (ËœNEWLINE)* NEWLINE |
| /// ; |
| /// ``` |
| Token parseScript(Token token) { |
| token = token.next; |
| assert(identical(token.type, TokenType.SCRIPT_TAG)); |
| listener.handleScript(token); |
| return token; |
| } |
| |
| /// ``` |
| /// typeAlias: |
| /// metadata 'typedef' typeAliasBody |
| /// ; |
| /// |
| /// typeAliasBody: |
| /// functionTypeAlias |
| /// ; |
| /// |
| /// functionTypeAlias: |
| /// functionPrefix typeParameters? formalParameterList ‘;’ |
| /// ; |
| /// |
| /// functionPrefix: |
| /// returnType? identifier |
| /// ; |
| /// ``` |
| Token parseTypedef(Token token) { |
| Token typedefKeyword = token.next; |
| assert(optional('typedef', typedefKeyword)); |
| listener.beginFunctionTypeAlias(typedefKeyword); |
| Token equals; |
| Token afterType = parseType(typedefKeyword, TypeContinuation.Typedef); |
| if (afterType == null) { |
| token = ensureIdentifier( |
| typedefKeyword, IdentifierContext.typedefDeclaration); |
| token = parseTypeVariablesOpt(token).next; |
| equals = token; |
| expect('=', token); |
| token = parseType(token); |
| } else { |
| token = ensureIdentifier(afterType, IdentifierContext.typedefDeclaration); |
| token = parseTypeVariablesOpt(token); |
| token = |
| parseFormalParametersRequiredOpt(token, MemberKind.FunctionTypeAlias); |
| } |
| token = ensureSemicolon(token); |
| listener.endFunctionTypeAlias(typedefKeyword, equals, token); |
| return token; |
| } |
| |
| /// Parse a mixin application starting from `with`. Assumes that the first |
| /// type has already been parsed. |
| Token parseMixinApplicationRest(Token token) { |
| Token withKeyword = token.next; |
| listener.beginMixinApplication(withKeyword); |
| expect('with', withKeyword); |
| token = parseTypeList(withKeyword); |
| listener.endMixinApplication(withKeyword); |
| return token; |
| } |
| |
| Token parseFormalParametersOpt(Token token, MemberKind kind) { |
| Token next = token.next; |
| if (optional('(', next)) { |
| return parseFormalParameters(token, kind); |
| } else { |
| listener.handleNoFormalParameters(next, kind); |
| return token; |
| } |
| } |
| |
| Token skipFormalParameters(Token token, MemberKind kind) { |
| Token lastConsumed = token; |
| token = token.next; |
| // TODO(ahe): Shouldn't this be `beginFormalParameters`? |
| listener.beginOptionalFormalParameters(token); |
| if (!optional('(', token)) { |
| if (optional(';', token)) { |
| reportRecoverableError(token, fasta.messageExpectedOpenParens); |
| listener.endFormalParameters(0, token, token, kind); |
| return lastConsumed; |
| } |
| listener.endFormalParameters(0, token, token, kind); |
| return reportUnexpectedToken(token); |
| } |
| Token closeBrace = closeBraceTokenFor(token); |
| listener.endFormalParameters(0, token, closeBrace, kind); |
| return closeBrace; |
| } |
| |
| /// Parses the formal parameter list of a function. |
| /// |
| /// If `kind == MemberKind.GeneralizedFunctionType`, then names may be |
| /// omitted (except for named arguments). Otherwise, types may be omitted. |
| Token parseFormalParametersRequiredOpt(Token token, MemberKind kind) { |
| Token next = token.next; |
| if (!optional('(', next)) { |
| reportRecoverableError(next, missingParameterMessage(kind)); |
| Token replacement = link( |
| new SyntheticBeginToken(TokenType.OPEN_PAREN, next.charOffset), |
| new SyntheticToken(TokenType.CLOSE_PAREN, next.charOffset)); |
| rewriter.insertTokenAfter(token, replacement); |
| } |
| return parseFormalParameters(token, kind); |
| } |
| |
| /// Parses the formal parameter list of a function given that the left |
| /// parenthesis is known to exist. |
| /// |
| /// If `kind == MemberKind.GeneralizedFunctionType`, then names may be |
| /// omitted (except for named arguments). Otherwise, types may be omitted. |
| Token parseFormalParameters(Token token, MemberKind kind) { |
| Token begin = token = token.next; |
| assert(optional('(', token)); |
| listener.beginFormalParameters(begin, kind); |
| int parameterCount = 0; |
| while (true) { |
| Token next = token.next; |
| if (optional(')', next)) { |
| token = next; |
| break; |
| } |
| ++parameterCount; |
| String value = next.stringValue; |
| if (identical(value, '[')) { |
| token = parseOptionalPositionalParameters(token, kind); |
| token = ensureCloseParen(token, begin); |
| break; |
| } else if (identical(value, '{')) { |
| token = parseOptionalNamedParameters(token, kind); |
| token = ensureCloseParen(token, begin); |
| break; |
| } else if (identical(value, '[]')) { |
| // Recovery |
| token = rewriteSquareBrackets(token); |
| token = parseOptionalPositionalParameters(token, kind); |
| token = ensureCloseParen(token, begin); |
| break; |
| } |
| token = parseFormalParameter(token, FormalParameterKind.mandatory, kind); |
| next = token.next; |
| if (optional(',', next)) { |
| token = next; |
| continue; |
| } |
| token = ensureCloseParen(token, begin); |
| break; |
| } |
| assert(optional(')', token)); |
| listener.endFormalParameters(parameterCount, begin, token, kind); |
| return token; |
| } |
| |
| /// Return the message that should be produced when the formal parameters are |
| /// missing. |
| Message missingParameterMessage(MemberKind kind) { |
| if (kind == MemberKind.FunctionTypeAlias) { |
| return fasta.messageMissingTypedefParameters; |
| } else if (kind == MemberKind.NonStaticMethod || |
| kind == MemberKind.StaticMethod) { |
| return fasta.messageMissingMethodParameters; |
| } |
| return fasta.messageMissingFunctionParameters; |
| } |
| |
| /// ``` |
| /// normalFormalParameter: |
| /// functionFormalParameter | |
| /// fieldFormalParameter | |
| /// simpleFormalParameter |
| /// ; |
| /// |
| /// functionFormalParameter: |
| /// metadata 'covariant'? returnType? identifier formalParameterList |
| /// ; |
| /// |
| /// simpleFormalParameter: |
| /// metadata 'covariant'? finalConstVarOrType? identifier | |
| /// ; |
| /// |
| /// fieldFormalParameter: |
| /// metadata finalConstVarOrType? 'this' '.' identifier formalParameterList? |
| /// ; |
| /// ``` |
| Token parseFormalParameter( |
| Token token, FormalParameterKind parameterKind, MemberKind memberKind) { |
| assert(parameterKind != null); |
| token = parseMetadataStar(token); |
| Token next = token.next; |
| listener.beginFormalParameter(next, memberKind); |
| |
| TypeContinuation typeContinuation = |
| typeContinuationFromFormalParameterKind(parameterKind); |
| Token varFinalOrConst; |
| if (isModifier(next)) { |
| int modifierCount = 0; |
| Token covariantToken; |
| if (optional('covariant', next)) { |
| if (memberKind != MemberKind.StaticMethod && |
| memberKind != MemberKind.TopLevelMethod) { |
| covariantToken = token = parseModifier(token); |
| ++modifierCount; |
| next = token.next; |
| } |
| } |
| |
| if (isModifier(next)) { |
| if (optional('var', next)) { |
| typeContinuation = typeContinuationAfterVar(typeContinuation); |
| varFinalOrConst = token = parseModifier(token); |
| ++modifierCount; |
| next = token.next; |
| } else if (optional('final', next)) { |
| varFinalOrConst = token = parseModifier(token); |
| ++modifierCount; |
| next = token.next; |
| } |
| |
| if (isModifier(next)) { |
| // Recovery |
| ModifierRecoveryContext modifierContext = new ModifierRecoveryContext( |
| this, memberKind, parameterKind, false, typeContinuation); |
| token = modifierContext.parseRecovery(token, |
| covariantToken: covariantToken, varFinalOrConst: varFinalOrConst); |
| |
| modifierCount = modifierContext.modifierCount; |
| covariantToken = modifierContext.covariantToken; |
| varFinalOrConst = modifierContext.varFinalOrConst; |
| |
| memberKind = modifierContext.memberKind; |
| typeContinuation = modifierContext.typeContinuation; |
| varFinalOrConst = modifierContext.varFinalOrConst; |
| modifierContext = null; |
| } |
| } |
| listener.handleModifiers(modifierCount); |
| } else { |
| listener.handleModifiers(0); |
| } |
| |
| return parseType( |
| token, typeContinuation, null, memberKind, varFinalOrConst); |
| } |
| |
| /// ``` |
| /// defaultFormalParameter: |
| /// normalFormalParameter ('=' expression)? |
| /// ; |
| /// ``` |
| Token parseOptionalPositionalParameters(Token token, MemberKind kind) { |
| Token begin = token = token.next; |
| assert(optional('[', token)); |
| listener.beginOptionalFormalParameters(begin); |
| int parameterCount = 0; |
| while (true) { |
| Token next = token.next; |
| if (optional(']', next)) { |
| break; |
| } |
| token = parseFormalParameter( |
| token, FormalParameterKind.optionalPositional, kind); |
| next = token.next; |
| ++parameterCount; |
| if (!optional(',', next)) { |
| if (!optional(']', next)) { |
| // Recovery |
| reportRecoverableError( |
| next, fasta.templateExpectedButGot.withArguments(']')); |
| // Scanner guarantees a closing bracket. |
| next = begin.endGroup; |
| while (token.next != next) { |
| token = token.next; |
| } |
| } |
| break; |
| } |
| token = next; |
| } |
| if (parameterCount == 0) { |
| token = rewriteAndRecover( |
| token, |
| fasta.messageEmptyOptionalParameterList, |
| new SyntheticStringToken( |
| TokenType.IDENTIFIER, '', token.next.charOffset, 0)); |
| token = parseFormalParameter( |
| token, FormalParameterKind.optionalPositional, kind); |
| ++parameterCount; |
| } |
| token = token.next; |
| assert(optional(']', token)); |
| listener.endOptionalFormalParameters(parameterCount, begin, token); |
| return token; |
| } |
| |
| /// ``` |
| /// defaultNamedParameter: |
| /// normalFormalParameter ('=' expression)? | |
| /// normalFormalParameter (':' expression)? |
| /// ; |
| /// ``` |
| Token parseOptionalNamedParameters(Token token, MemberKind kind) { |
| Token begin = token = token.next; |
| assert(optional('{', token)); |
| listener.beginOptionalFormalParameters(begin); |
| int parameterCount = 0; |
| while (true) { |
| Token next = token.next; |
| if (optional('}', next)) { |
| break; |
| } |
| token = |
| parseFormalParameter(token, FormalParameterKind.optionalNamed, kind); |
| next = token.next; |
| ++parameterCount; |
| if (!optional(',', next)) { |
| if (!optional('}', next)) { |
| // Recovery |
| reportRecoverableError( |
| next, fasta.templateExpectedButGot.withArguments('}')); |
| // Scanner guarantees a closing bracket. |
| next = begin.endGroup; |
| while (token.next != next) { |
| token = token.next; |
| } |
| } |
| break; |
| } |
| token = next; |
| } |
| if (parameterCount == 0) { |
| token = rewriteAndRecover( |
| token, |
| fasta.messageEmptyNamedParameterList, |
| new SyntheticStringToken( |
| TokenType.IDENTIFIER, '', token.next.charOffset, 0)); |
| token = |
| parseFormalParameter(token, FormalParameterKind.optionalNamed, kind); |
| ++parameterCount; |
| } |
| token = token.next; |
| assert(optional('}', token)); |
| listener.endOptionalFormalParameters(parameterCount, begin, token); |
| return token; |
| } |
| |
| /// Skip over the `Function` type parameter. |
| /// For example, `Function<E>(int foo)` or `Function(foo)` or just `Function`. |
| Token skipGenericFunctionType(Token token) { |
| Token last = token; |
| Token next = token.next; |
| while (optional('Function', next)) { |
| last = token; |
| token = next; |
| next = token.next; |
| if (optional('<', next)) { |
| next = next.endGroup; |
| if (next == null) { |
| // TODO(danrubel): Consider better recovery |
| // because this is probably a type reference. |
| return token; |
| } |
| token = next; |
| next = token.next; |
| } |
| if (optional('(', next)) { |
| token = next.endGroup; |
| next = token.next; |
| } |
| } |
| if (next.isKeywordOrIdentifier) { |
| return token; |
| } else { |
| return last; |
| } |
| } |
| |
| /// If the token after [token] begins a valid type reference |
| /// or looks like a valid type reference, then return the last token |
| /// in that type reference, otherwise return [token]. |
| /// |
| /// For example, it is an error when built-in keyword is being used as a type, |
| /// as in `abstract<t> foo`. In situations such as this, return the last |
| /// token in that type reference and assume the caller will report the error |
| /// and recover. |
| Token skipTypeReferenceOpt(Token token, bool inDeclaration) { |
| final Token beforeStart = token; |
| Token next = token.next; |
| |
| TokenType type = next.type; |
| bool looksLikeTypeRef = false; |
| if (type != TokenType.IDENTIFIER) { |
| String value = next.stringValue; |
| if (identical(value, 'get') || identical(value, 'set')) { |
| // No type reference. |
| return beforeStart; |
| } else if (identical(value, 'factory') || identical(value, 'operator')) { |
| Token next2 = next.next; |
| if (!optional('<', next2) || next2.endGroup == null) { |
| // No type reference. |
| return beforeStart; |
| } |
| // Even though built-ins cannot be used as a type, |
| // it looks like its being used as such. |
| } else if (identical(value, 'void')) { |
| // Found type reference. |
| looksLikeTypeRef = true; |
| } else if (identical(value, 'Function')) { |
| // Found type reference. |
| return skipGenericFunctionType(token); |
| } else if (identical(value, 'typedef')) { |
| // `typedef` can be used as a prefix. |
| // For example: `typedef.A x = new typedef.A();` |
| if (!optional('.', next.next)) { |
| // No type reference. |
| return beforeStart; |
| } |
| } else if (!next.isIdentifier) { |
| // No type reference. |
| return beforeStart; |
| } |
| } |
| token = next; |
| next = token.next; |
| |
| if (optional('.', next)) { |
| token = next; |
| next = token.next; |
| if (next.type != TokenType.IDENTIFIER) { |
| String value = next.stringValue; |
| if (identical(value, '<')) { |
| // Found a type reference, but missing an identifier after the period. |
| rewriteAndRecover( |
| token, |
| fasta.templateExpectedIdentifier.withArguments(next), |
| new SyntheticStringToken( |
| TokenType.IDENTIFIER, '', next.charOffset, 0)); |
| // Fall through to continue processing as a type reference. |
| next = token.next; |
| } else if (!next.isIdentifier) { |
| if (identical(value, 'void')) { |
| looksLikeTypeRef = true; |
| // Found a type reference, but the period |
| // and preceding identifier are both invalid. |
| reportRecoverableErrorWithToken( |
| token, fasta.templateUnexpectedToken); |
| // Fall through to continue processing as a type reference. |
| } else { |
| // No type reference. |
| return beforeStart; |
| } |
| } |
| } |
| token = next; |
| next = token.next; |
| } |
| |
| if (optional('<', next)) { |
| token = next.endGroup; |
| if (token == null) { |
| // TODO(danrubel): Consider better recovery |
| // because this is probably a type reference. |
| return beforeStart; |
| } |
| next = token.next; |
| if (optional('(', next)) { |
| // No type reference - e.g. `f<E>()`. |
| return beforeStart; |
| } |
| } |
| |
| if (optional('Function', next)) { |
| looksLikeTypeRef = true; |
| token = skipGenericFunctionType(token); |
| next = token.next; |
| } |
| |
| return next.isIdentifier || |
| (inDeclaration && next.isOperator && !optional('=', next)) || |
| looksLikeTypeRef |
| ? token |
| : beforeStart; |
| } |
| |
| /// Returns `true` if [token] matches '<' type (',' type)* '>' '(', and |
| /// otherwise returns `false`. The final '(' is not part of the grammar |
| /// construct `typeArguments`, but it is required here such that type |
| /// arguments in generic method invocations can be recognized, and as few as |
| /// possible other constructs will pass (e.g., 'a < C, D > 3'). |
| bool isValidMethodTypeArguments(Token token) { |
| Token Function(Token token) tryParseType; |
| |
| /// Returns token after match if [token] matches '<' type (',' type)* '>' |
| /// '(', and otherwise returns null. Does not produce listener events. With |
| /// respect to the final '(', please see the description of |
| /// [isValidMethodTypeArguments]. |
| Token tryParseMethodTypeArguments(Token token) { |
| if (!identical(token.kind, LT_TOKEN)) return null; |
| Token endToken = closeBraceTokenFor(token); |
| if (endToken == null || |
| !identical(endToken.next.kind, OPEN_PAREN_TOKEN)) { |
| return null; |
| } |
| token = tryParseType(token.next); |
| while (token != null && identical(token.kind, COMMA_TOKEN)) { |
| token = tryParseType(token.next); |
| } |
| if (token == null || !identical(token.kind, GT_TOKEN)) return null; |
| return token.next; |
| } |
| |
| /// Returns token after match if [token] matches identifier ('.' |
| /// identifier)?, and otherwise returns null. Does not produce listener |
| /// events. |
| Token tryParseQualified(Token token) { |
| if (!isValidTypeReference(token)) return null; |
| token = token.next; |
| if (!identical(token.kind, PERIOD_TOKEN)) return token; |
| token = token.next; |
| if (!identical(token.kind, IDENTIFIER_TOKEN)) return null; |
| return token.next; |
| } |
| |
| /// Returns token after match if [token] matches '<' type (',' type)* '>', |
| /// and otherwise returns null. Does not produce listener events. The final |
| /// '>' may be the first character in a '>>' token, in which case a |
| /// synthetic '>' token is created and returned, representing the second |
| /// '>' in the '>>' token. |
| Token tryParseNestedTypeArguments(Token token) { |
| if (!identical(token.kind, LT_TOKEN)) return null; |
| // If the initial '<' matches the first '>' in a '>>' token, we will have |
| // `token.endGroup == null`, so we cannot rely on `token.endGroup == null` |
| // to imply that the match must fail. Hence no `token.endGroup == null` |
| // test here. |
| token = tryParseType(token.next); |
| while (token != null && identical(token.kind, COMMA_TOKEN)) { |
| token = tryParseType(token.next); |
| } |
| if (token == null) return null; |
| if (identical(token.kind, GT_TOKEN)) return token.next; |
| if (!identical(token.kind, GT_GT_TOKEN)) return null; |
| // [token] is '>>' of which the final '>' that we are parsing is the first |
| // character. In order to keep the parsing process on track we must return |
| // a synthetic '>' corresponding to the second character of that '>>'. |
| Token syntheticToken = new Token(TokenType.GT, token.charOffset + 1); |
| syntheticToken.next = token.next; |
| return syntheticToken; |
| } |
| |
| /// Returns token after match if [token] matches typeName typeArguments?, |
| /// and otherwise returns null. Does not produce listener events. |
| tryParseType = (Token token) { |
| token = tryParseQualified(token); |
| if (token == null) return null; |
| Token tokenAfterQualified = token; |
| token = tryParseNestedTypeArguments(token); |
| return token == null ? tokenAfterQualified : token; |
| }; |
| |
| return tryParseMethodTypeArguments(token) != null; |
| } |
| |
| /// ``` |
| /// qualified: |
| /// identifier qualifiedRest* |
| /// ; |
| /// ``` |
| Token parseQualified(Token token, IdentifierContext context, |
| IdentifierContext continuationContext) { |
| token = ensureIdentifier(token, context); |
| while (optional('.', token.next)) { |
| token = parseQualifiedRest(token, continuationContext); |
| } |
| return token; |
| } |
| |
| /// ``` |
| /// qualifiedRestOpt: |
| /// qualifiedRest? |
| /// ; |
| /// ``` |
| Token parseQualifiedRestOpt( |
| Token token, IdentifierContext continuationContext) { |
| if (optional('.', token.next)) { |
| return parseQualifiedRest(token, continuationContext); |
| } else { |
| return token; |
| } |
| } |
| |
| /// ``` |
| /// qualifiedRest: |
| /// '.' identifier |
| /// ; |
| /// ``` |
| Token parseQualifiedRest(Token token, IdentifierContext context) { |
| token = token.next; |
| assert(optional('.', token)); |
| Token period = token; |
| token = ensureIdentifier(token, context); |
| listener.handleQualified(period); |
| return token; |
| } |
| |
| Token skipBlock(Token token) { |
| token = ensureBlock(token, null); |
| Token closeBrace = closeBraceTokenFor(token); |
| if (closeBrace == null || |
| !identical(closeBrace.kind, $CLOSE_CURLY_BRACKET)) { |
| return reportUnmatchedToken(token).next; |
| } |
| return closeBrace; |
| } |
| |
| /// ``` |
| /// enumType: |
| /// metadata 'enum' id '{' id [',' id]* [','] '}' |
| /// ; |
| /// ``` |
| Token parseEnum(Token token) { |
| Token enumKeyword = token.next; |
| assert(optional('enum', enumKeyword)); |
| listener.beginEnum(enumKeyword); |
| token = |
| ensureIdentifier(enumKeyword, IdentifierContext.enumDeclaration).next; |
| Token leftBrace = token; |
| expect('{', token); |
| int count = 0; |
| do { |
| Token next = token.next; |
| if (optional('}', next)) { |
| token = next; |
| if (count == 0) { |
| reportRecoverableError(token, fasta.messageEnumDeclarationEmpty); |
| } |
| break; |
| } |
| token = parseMetadataStar(token); |
| if (!identical(token.next, next)) { |
| listener.handleRecoverableError( |
| fasta.messageAnnotationOnEnumConstant, next, token); |
| } |
| token = |
| ensureIdentifier(token, IdentifierContext.enumValueDeclaration).next; |
| count++; |
| } while (optional(',', token)); |
| expect('}', token); |
| listener.endEnum(enumKeyword, leftBrace, count); |
| return token; |
| } |
| |
| Token parseClassOrNamedMixinApplication( |
| Token token, Token beforeAbstractToken) { |
| token = token.next; |
| listener.beginClassOrNamedMixinApplication(token); |
| Token begin = beforeAbstractToken?.next ?? token; |
| if (beforeAbstractToken != null) { |
| token = parseModifier(beforeAbstractToken).next; |
| listener.handleModifiers(1); |
| } else { |
| listener.handleModifiers(0); |
| } |
| Token classKeyword = token; |
| expect("class", token); |
| Token name = |
| ensureIdentifier(token, IdentifierContext.classOrNamedMixinDeclaration); |
| token = parseTypeVariablesOpt(name); |
| if (optional('=', token.next)) { |
| listener.beginNamedMixinApplication(begin, name); |
| return parseNamedMixinApplication(token, begin, classKeyword); |
| } else { |
| listener.beginClassDeclaration(begin, name); |
| return parseClass(token, begin, classKeyword); |
| } |
| } |
| |
| Token parseNamedMixinApplication( |
| Token token, Token begin, Token classKeyword) { |
| Token equals = token = token.next; |
| assert(optional('=', equals)); |
| token = parseType(token); |
| token = parseMixinApplicationRest(token); |
| Token implementsKeyword = null; |
| if (optional('implements', token.next)) { |
| implementsKeyword = token.next; |
| token = parseTypeList(implementsKeyword); |
| } |
| token = ensureSemicolon(token); |
| listener.endNamedMixinApplication( |
| begin, classKeyword, equals, implementsKeyword, token); |
| return token; |
| } |
| |
| /// Parse the portion of a class declaration (not a mixin application) that |
| /// follows the end of the type parameters. |
| /// |
| /// ``` |
| /// classDefinition: |
| /// metadata abstract? 'class' identifier typeParameters? |
| /// (superclass mixins?)? interfaces? |
| /// '{' (metadata classMemberDefinition)* '}' | |
| /// metadata abstract? 'class' mixinApplicationClass |
| /// ; |
| /// ``` |
| Token parseClass(Token token, Token begin, Token classKeyword) { |
| Token start = token; |
| token = parseClassHeaderOpt(token, begin, classKeyword); |
| if (!optional('{', token.next)) { |
| // Recovery |
| token = parseClassHeaderRecovery(start, begin, classKeyword); |
| ensureBlock(token, fasta.templateExpectedClassBody); |
| } |
| token = parseClassBody(token); |
| listener.endClassDeclaration(begin, token); |
| return token; |
| } |
| |
| Token parseClassHeaderOpt(Token token, Token begin, Token classKeyword) { |
| token = parseClassExtendsOpt(token); |
| token = parseClassImplementsOpt(token); |
| Token nativeToken; |
| if (optional('native', token.next)) { |
| nativeToken = token.next; |
| token = parseNativeClause(token); |
| } |
| listener.handleClassHeader(begin, classKeyword, nativeToken); |
| return token; |
| } |
| |
| /// Recover given out-of-order clauses in a class header. |
| Token parseClassHeaderRecovery(Token token, Token begin, Token classKeyword) { |
| final primaryListener = listener; |
| final recoveryListener = new ClassHeaderRecoveryListener(primaryListener); |
| |
| // Reparse to determine which clauses have already been parsed |
| // but intercept the events so they are not sent to the primary listener. |
| listener = recoveryListener; |
| token = parseClassHeaderOpt(token, begin, classKeyword); |
| bool hasExtends = recoveryListener.extendsKeyword != null; |
| bool hasImplements = recoveryListener.implementsKeyword != null; |
| Token withKeyword = recoveryListener.withKeyword; |
| |
| // Update the recovery listener to forward subsequent events |
| // to the primary listener. |
| recoveryListener.listener = primaryListener; |
| |
| // Parse additional out-of-order clauses |
| Token start; |
| do { |
| start = token; |
| |
| // Check for extraneous token in the middle of a class header. |
| token = skipUnexpectedTokenOpt( |
| token, const <String>['extends', 'with', 'implements', '{']); |
| |
| // During recovery, clauses are parsed in the same order |
| // and generate the same events as in the parseClassHeader method above. |
| recoveryListener.clear(); |
| Token next = token.next; |
| if (optional('with', next)) { |
| // If there is a `with` clause without a preceding `extends` clause |
| // then insert a synthetic `extends` clause and parse both clauses. |
| Token extendsKeyword = |
| new SyntheticKeywordToken(Keyword.EXTENDS, next.offset); |
| Token superclassToken = new SyntheticStringToken( |
| TokenType.IDENTIFIER, 'Object', next.offset, 0); |
| rewriter.insertTokenAfter(token, extendsKeyword); |
| rewriter.insertTokenAfter(extendsKeyword, superclassToken); |
| token = parseType(extendsKeyword); |
| token = parseMixinApplicationRest(token); |
| listener.handleClassExtends(extendsKeyword); |
| } else { |
| token = parseClassExtendsOpt(token); |
| |
| if (recoveryListener.extendsKeyword != null) { |
| if (hasExtends) { |
| reportRecoverableError( |
| recoveryListener.extendsKeyword, fasta.messageMultipleExtends); |
| } else { |
| if (withKeyword != null) { |
| reportRecoverableError(recoveryListener.extendsKeyword, |
| fasta.messageWithBeforeExtends); |
| } else if (hasImplements) { |
| reportRecoverableError(recoveryListener.extendsKeyword, |
| fasta.messageImplementsBeforeExtends); |
| } |
| hasExtends = true; |
| } |
| } |
| } |
| |
| if (recoveryListener.withKeyword != null) { |
| if (withKeyword != null) { |
| reportRecoverableError( |
| recoveryListener.withKeyword, fasta.messageMultipleWith); |
| } else { |
| if (hasImplements) { |
| reportRecoverableError(recoveryListener.withKeyword, |
| fasta.messageImplementsBeforeWith); |
| } |
| withKeyword = recoveryListener.withKeyword; |
| } |
| } |
| |
| token = parseClassImplementsOpt(token); |
| |
| if (recoveryListener.implementsKeyword != null) { |
| if (hasImplements) { |
| reportRecoverableError(recoveryListener.implementsKeyword, |
| fasta.messageMultipleImplements); |
| } else { |
| hasImplements = true; |
| } |
| } |
| |
| listener.handleRecoverClassHeader(); |
| |
| // Exit if a class body is detected, or if no progress has been made |
| } while (!optional('{', token.next) && start != token); |
| |
| if (withKeyword != null && !hasExtends) { |
| reportRecoverableError(withKeyword, fasta.messageWithWithoutExtends); |
| } |
| |
| listener = primaryListener; |
| return token; |
| } |
| |
| Token parseClassExtendsOpt(Token token) { |
| Token next = token.next; |
| if (optional('extends', next)) { |
| Token extendsKeyword = next; |
| token = parseType(next); |
| if (optional('with', token.next)) { |
| token = parseMixinApplicationRest(token); |
| } else { |
| token = token; |
| } |
| listener.handleClassExtends(extendsKeyword); |
| } else { |
| listener.handleNoType(token); |
| listener.handleClassExtends(null); |
| } |
| return token; |
| } |
| |
| /// ``` |
| /// implementsClause: |
| /// 'implements' typeName (',' typeName)* |
| /// ; |
| /// ``` |
| Token parseClassImplementsOpt(Token token) { |
| Token implementsKeyword; |
| int interfacesCount = 0; |
| if (optional('implements', token.next)) { |
| implementsKeyword = token.next; |
| do { |
| token = parseType(token.next); |
| ++interfacesCount; |
| } while (optional(',', token.next)); |
| } |
| listener.handleClassImplements(implementsKeyword, interfacesCount); |
| return token; |
| } |
| |
| Token parseStringPart(Token token) { |
| token = token.next; |
| if (token.kind != STRING_TOKEN) { |
| token = |
| reportUnrecoverableErrorWithToken(token, fasta.templateExpectedString) |
| .next; |
| } |
| listener.handleStringPart(token); |
| return token; |
| } |
| |
| /// Insert a synthetic identifier after the given [token] and create an error |
| /// message based on the given [context]. Return the synthetic identifier that |
| /// was inserted. |
| Token insertSyntheticIdentifier(Token token, IdentifierContext context, |
| {Message message, Token messageOnToken}) { |
| Token next = token.next; |
| reportRecoverableError(messageOnToken ?? next, |
| message ?? context.recoveryTemplate.withArguments(next)); |
| Token identifier = new SyntheticStringToken( |
| TokenType.IDENTIFIER, |
| context == IdentifierContext.methodDeclaration || |
| context == IdentifierContext.topLevelVariableDeclaration || |
| context == IdentifierContext.fieldDeclaration |
| ? '#synthetic_identifier_${next.offset}' |
| : '', |
| next.charOffset, |
| 0); |
| rewriter.insertTokenAfter(token, identifier); |
| return token.next; |
| } |
| |
| /// Parse a simple identifier at the given [token], and return the identifier |
| /// that was parsed. |
| /// |
| /// If the token is not an identifier, or is not appropriate for use as an |
| /// identifier in the given [context], create a synthetic identifier, report |
| /// an error, and return the synthetic identifier. |
| Token ensureIdentifier(Token token, IdentifierContext context) { |
| Token next = token.next; |
| if (!next.isIdentifier) { |
| if (optional("void", next)) { |
| reportRecoverableError(next, fasta.messageInvalidVoid); |
| token = next; |
| } else if (next is ErrorToken) { |
| // TODO(brianwilkerson): This preserves the current semantics, but the |
| // listener should not be recovering from this case, so this needs to be |
| // reworked to recover in this method (probably inside the outermost |
| // if statement). |
| token = |
| reportUnrecoverableErrorWithToken(next, context.recoveryTemplate) |
| .next; |
| } else if (isIdentifierForRecovery(next, context)) { |
| reportRecoverableErrorWithToken(next, context.recoveryTemplate); |
| token = next; |
| } else if (isPostIdentifierForRecovery(next, context) || |
| isStartOfNextSibling(next, context)) { |
| token = insertSyntheticIdentifier(token, context); |
| } else if (next.isKeywordOrIdentifier) { |
| reportRecoverableErrorWithToken(next, context.recoveryTemplate); |
| token = next; |
| } else if (next.isUserDefinableOperator && |
| context == IdentifierContext.methodDeclaration) { |
| // If this is a user definable operator, then assume that the user has |
| // forgotten the `operator` keyword. |
| token = rewriteAndRecover(token, fasta.messageMissingOperatorKeyword, |
| new SyntheticKeywordToken(Keyword.OPERATOR, next.offset)); |
| return parseOperatorName(token); |
| } else { |
| reportRecoverableErrorWithToken(next, context.recoveryTemplate); |
| if (context == IdentifierContext.methodDeclaration) { |
| // Since the token is not a keyword or identifier, consume it to |
| // ensure forward progress in parseMethod. |
| token = next.next; |
| // Supply a non-empty method name so that it does not accidentally |
| // match the default constructor. |
| token = insertSyntheticIdentifier(next, context); |
| } else if (context == IdentifierContext.topLevelVariableDeclaration || |
| context == IdentifierContext.fieldDeclaration) { |
| // Since the token is not a keyword or identifier, consume it to |
| // ensure forward progress in parseField. |
| token = next.next; |
| // Supply a non-empty method name so that it does not accidentally |
| // match the default constructor. |
| token = insertSyntheticIdentifier(next, context); |
| } else if (context == IdentifierContext.constructorReference) { |
| token = insertSyntheticIdentifier(token, context); |
| } else { |
| token = next; |
| } |
| } |
| } else if (next.type.isBuiltIn && !context.isBuiltInIdentifierAllowed) { |
| if (context.inDeclaration) { |
| reportRecoverableErrorWithToken( |
| next, fasta.templateBuiltInIdentifierInDeclaration); |
| } else if (!optional("dynamic", next)) { |
| if (context == IdentifierContext.typeReference && |
| optional('.', next.next)) { |
| // Built in identifiers may be used as a prefix |
| } else { |
| reportRecoverableErrorWithToken( |
| next, fasta.templateBuiltInIdentifierAsType); |
| } |
| } |
| token = next; |
| } else if (!inPlainSync && next.type.isPseudo) { |
| if (optional('await', next)) { |
| reportRecoverableError(next, fasta.messageAwaitAsIdentifier); |
| } else if (optional('yield', next)) { |
| reportRecoverableError(next, fasta.messageYieldAsIdentifier); |
| } else if (optional('async', next)) { |
| reportRecoverableError(next, fasta.messageAsyncAsIdentifier); |
| } |
| token = next; |
| } else { |
| token = next; |
| } |
| listener.handleIdentifier(token, context); |
| return token; |
| } |
| |
| /// Return `true` if the given [token] should be treated like the start of |
| /// an expression for the purposes of recovery. |
| bool isExpressionStartForRecovery(Token next) => |
| next.isKeywordOrIdentifier || |
| next.type == TokenType.DOUBLE || |
| next.type == TokenType.HASH || |
| next.type == TokenType.HEXADECIMAL || |
| next.type == TokenType.IDENTIFIER || |
| next.type == TokenType.INT || |
| next.type == TokenType.STRING || |
| optional('{', next) || |
| optional('(', next) || |
| optional('[', next) || |
| optional('[]', next) || |
| optional('<', next) || |
| optional('!', next) || |
| optional('-', next) || |
| optional('~', next) || |
| optional('++', next) || |
| optional('--', next); |
| |
| /// Return `true` if the given [token] should be treated like an identifier in |
| /// the given [context] for the purposes of recovery. |
| bool isIdentifierForRecovery(Token token, IdentifierContext context) { |
| if (!token.type.isKeyword) { |
| return false; |
| } |
| return isPostIdentifierForRecovery(token.next, context); |
| } |
| |
| /// Return `true` if the given [token] appears to be a token that would be |
| /// expected after an identifier in the given [context]. |
| bool isPostIdentifierForRecovery(Token token, IdentifierContext context) { |
| if (token.isEof) { |
| return true; |
| } |
| List<String> followingValues; |
| if (context == IdentifierContext.classOrNamedMixinDeclaration) { |
| followingValues = ['<', 'extends', 'with', 'implements', '{']; |
| } else if (context == IdentifierContext.combinator) { |
| followingValues = [';']; |
| } else if (context == IdentifierContext.constructorReferenceContinuation) { |
| followingValues = ['.', ',', '(', ')', '[', ']', '}', ';']; |
| } else if (context == IdentifierContext.fieldDeclaration) { |
| followingValues = [';', '=', ',', '}']; |
| } else if (context == IdentifierContext.enumDeclaration) { |
| followingValues = ['{']; |
| } else if (context == IdentifierContext.enumValueDeclaration) { |
| followingValues = [',', '}']; |
| } else if (context == IdentifierContext.expression || |
| context == IdentifierContext.expressionContinuation) { |
| if (token.isOperator) { |
| return true; |
| } |
| followingValues = [ |
| '.', |
| ',', |
| '(', |
| ')', |
| '[', |
| ']', |
| '}', |
| '?', |
| ':', |
| 'as', |
| 'is', |
| ';' |
| ]; |
| } else if (context == IdentifierContext.formalParameterDeclaration) { |
| followingValues = [':', '=', ',', '(', ')', '[', ']', '{', '}']; |
| } else if (context == IdentifierContext.importPrefixDeclaration) { |
| followingValues = [';', 'hide', 'show', 'deferred', 'as']; |
| } else if (context == IdentifierContext.labelDeclaration) { |
| followingValues = [':']; |
| } else if (context == IdentifierContext.libraryName || |
| context == IdentifierContext.libraryNameContinuation) { |
| followingValues = ['.', ';']; |
| } else if (context == IdentifierContext.literalSymbol || |
| context == IdentifierContext.literalSymbolContinuation) { |
| followingValues = ['.', ';']; |
| } else if (context == IdentifierContext.localAccessorDeclaration) { |
| followingValues = ['(', '{', '=>']; |
| } else if (context == IdentifierContext.localFunctionDeclaration || |
| context == IdentifierContext.localFunctionDeclarationContinuation) { |
| followingValues = ['.', '(', '{', '=>']; |
| } else if (context == IdentifierContext.localVariableDeclaration) { |
| followingValues = [';', '=', ',', '}']; |
| } else if (context == IdentifierContext.methodDeclaration || |
| context == IdentifierContext.methodDeclarationContinuation) { |
| followingValues = ['.', '(', '{', '=>']; |
| } else if (context == IdentifierContext.topLevelFunctionDeclaration) { |
| followingValues = ['(', '{', '=>']; |
| } else if (context == IdentifierContext.topLevelVariableDeclaration) { |
| followingValues = [';', '=', ',']; |
| } else if (context == IdentifierContext.typedefDeclaration) { |
| followingValues = ['(', '<', ';']; |
| } else if (context == IdentifierContext.typeReference || |
| context == IdentifierContext.typeReferenceContinuation) { |
| followingValues = ['>', ')', ']', '}', ',', ';']; |
| } else if (context == IdentifierContext.typeVariableDeclaration) { |
| followingValues = ['<', '>', ';', '}']; |
| } else { |
| return false; |
| } |
| for (String tokenValue in followingValues) { |
| if (optional(tokenValue, token)) { |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| /// Return `true` if the given [token] appears to be the start of a (virtual) |
| /// node that would be a sibling of the current node or one of its parents. |
| /// The type of the current node is suggested by the given [context]. |
| bool isStartOfNextSibling(Token token, IdentifierContext context) { |
| if (!token.type.isKeyword) { |
| return false; |
| } |
| |
| List<String> classMemberKeywords() => |
| <String>['const', 'final', 'var', 'void']; |
| List<String> statementKeywords() => <String>[ |
| 'const', |
| 'do', |
| 'final', |
| 'if', |
| 'switch', |
| 'try', |
| 'var', |
| 'void', |
| 'while' |
| ]; |
| List<String> topLevelKeywords() => <String>[ |
| 'class', |
| 'const', |
| 'enum', |
| 'export', |
| 'final', |
| 'import', |
| 'library', |
| 'part', |
| 'typedef', |
| 'var', |
| 'void' |
| ]; |
| |
| // TODO(brianwilkerson): At the moment, this test is entirely based on data |
| // that can be represented declaratively. If that proves to be sufficient, |
| // then this data can be moved into a field in IdentifierContext and we |
| // could create a method to test whether a given token matches one of the |
| // patterns. |
| List<String> initialKeywords; |
| if (context == IdentifierContext.classOrNamedMixinDeclaration) { |
| initialKeywords = topLevelKeywords(); |
| } else if (context == IdentifierContext.fieldDeclaration) { |
| initialKeywords = classMemberKeywords(); |
| } else if (context == IdentifierContext.enumDeclaration) { |
| initialKeywords = topLevelKeywords(); |
| } else if (context == IdentifierContext.formalParameterDeclaration) { |
| initialKeywords = topLevelKeywords() |
| ..addAll(classMemberKeywords()) |
| ..addAll(statementKeywords()) |
| ..add('covariant'); |
| } else if (context == IdentifierContext.importPrefixDeclaration) { |
| initialKeywords = topLevelKeywords(); |
| } else if (context == IdentifierContext.labelDeclaration) { |
| initialKeywords = statementKeywords(); |
| } else if (context == IdentifierContext.localAccessorDeclaration) { |
| initialKeywords = statementKeywords(); |
| } else if (context == IdentifierContext.localFunctionDeclaration) { |
| initialKeywords = statementKeywords(); |
| } else if (context == |
| IdentifierContext.localFunctionDeclarationContinuation) { |
| initialKeywords = statementKeywords(); |
| } else if (context == IdentifierContext.localVariableDeclaration) { |
| initialKeywords = statementKeywords(); |
| } else if (context == IdentifierContext.methodDeclaration) { |
| initialKeywords = classMemberKeywords(); |
| } else if (context == IdentifierContext.methodDeclarationContinuation) { |
| initialKeywords = classMemberKeywords(); |
| } else if (context == IdentifierContext.topLevelFunctionDeclaration) { |
| initialKeywords = topLevelKeywords(); |
| } else if (context == IdentifierContext.topLevelVariableDeclaration) { |
| initialKeywords = topLevelKeywords(); |
| } else if (context == IdentifierContext.typedefDeclaration) { |
| initialKeywords = topLevelKeywords(); |
| } else if (context == IdentifierContext.typeVariableDeclaration) { |
| initialKeywords = topLevelKeywords() |
| ..addAll(classMemberKeywords()) |
| ..addAll(statementKeywords()); |
| } else { |
| return false; |
| } |
| for (String tokenValue in initialKeywords) { |
| if (optional(tokenValue, token)) { |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| Token expect(String string, Token token) { |
| // TODO(danrubel): update all uses of expect(';'...) to ensureSemicolon |
| // then add assert(!identical(';', string)); |
| if (!identical(string, token.stringValue)) { |
| return reportUnrecoverableError( |
| token, fasta.templateExpectedButGot.withArguments(string)) |
| .next; |
| } |
| return token.next; |
| } |
| |
| /// ``` |
| /// typeVariable: |
| /// metadata? identifier (('extends' | 'super') typeName)? |
| /// ; |
| /// ``` |
| Token parseTypeVariable(Token token) { |
| listener.beginTypeVariable(token.next); |
| token = parseMetadataStar(token); |
| token = ensureIdentifier(token, IdentifierContext.typeVariableDeclaration); |
| Token extendsOrSuper = null; |
| Token next = token.next; |
| if (optional('extends', next) || optional('super', next)) { |
| extendsOrSuper = next; |
| token = parseType(next); |
| } else { |
| listener.handleNoType(token); |
| } |
| listener.endTypeVariable(token.next, extendsOrSuper); |
| return token; |
| } |
| |
| /// Returns `true` if the stringValue of the [token] is either [value1], |
| /// [value2], or [value3]. |
| bool isOneOf3(Token token, String value1, String value2, String value3) { |
| String stringValue = token.stringValue; |
| return identical(value1, stringValue) || |
| identical(value2, stringValue) || |
| identical(value3, stringValue); |
| } |
| |
| /// Returns `true` if the stringValue of the [token] is either [value1], |
| /// [value2], [value3], or [value4]. |
| bool isOneOf4( |
| Token token, String value1, String value2, String value3, String value4) { |
| String stringValue = token.stringValue; |
| return identical(value1, stringValue) || |
| identical(value2, stringValue) || |
| identical(value3, stringValue) || |
| identical(value4, stringValue); |
| } |
| |
| bool notEofOrValue(String value, Token token) { |
| return !identical(token.kind, EOF_TOKEN) && |
| !identical(value, token.stringValue); |
| } |
| |
| /// Parse a type, if it is appropriate to do so. |
| /// |
| /// If this method can parse a type, it will return the next (non-null) token |
| /// after the type. Otherwise, it returns null. |
| Token parseType(Token token, |
| [TypeContinuation continuation = TypeContinuation.Required, |
| IdentifierContext continuationContext, |
| MemberKind memberKind, |
| Token varFinalOrConst]) { |
| /// True if we've seen the `var` keyword. |
| bool hasVar = false; |
| |
| /// The token before [token]. |
| Token beforeToken; |
| |
| /// The token before the `begin` token. |
| Token beforeBegin; |
| |
| /// Where the type begins. |
| Token begin; |
| |
| /// Non-null if 'void' is the first token. |
| Token voidToken; |
| |
| /// True if the tokens at [begin] looks like a type. |
| bool looksLikeType = false; |
| |
| /// True if a type that could be a return type for a generalized function |
| /// type was seen during analysis. |
| bool hasReturnType = false; |
| |
| /// The identifier context to use for parsing the type. |
| IdentifierContext context = IdentifierContext.typeReference; |
| |
| /// Non-null if type arguments were seen during analysis. |
| Token typeArguments; |
| |
| /// The number of function types seen during analysis. |
| int functionTypes = 0; |
| |
| /// The tokens before the start of type variables of function types seen |
| /// during analysis. Notice that the tokens in this list might precede |
| /// either `'<'` or `'('` as not all function types have type parameters. |
| /// Also, it is safe to assume that [closeBraceTokenFor] will return |
| /// non-null for all of the tokens following these tokens. |
| Link<Token> typeVariableStarters = const Link<Token>(); |
| |
| { |
| // Analyse the next tokens to see if they could be a type. |
| |
| if (continuation == |
| TypeContinuation.ExpressionStatementOrConstDeclaration) { |
| // This is a special case. The first token is `const` and we need to |
| // analyze the tokens following the const keyword. |
| assert(optional("const", token.next)); |
| beforeBegin = token; |
| begin = beforeToken = token.next; |
| token = beforeToken.next; |
| } else { |
| beforeToken = beforeBegin = token; |
| token = begin = token.next; |
| } |
| |
| if (optional("void", token)) { |
| // `void` is a type. |
| looksLikeType = true; |
| beforeToken = voidToken = token; |
| token = token.next; |
| } else if (isValidTypeReference(token) && |
| !isGeneralizedFunctionType(token)) { |
| // We're looking at an identifier that could be a type (or `dynamic`). |
| looksLikeType = true; |
| beforeToken = token; |
| token = token.next; |
| if (optional(".", token) && isValidTypeReference(token.next)) { |
| // We're looking at `prefix '.' identifier`. |
| context = IdentifierContext.prefixedTypeReference; |
| beforeToken = token.next; |
| token = beforeToken.next; |
| } |
| if (optional("<", token)) { |
| Token close = closeBraceTokenFor(token); |
| if (close != null && |
| (optional(">", close) || optional(">>", close))) { |
| // We found some type arguments. |
| typeArguments = token; |
| beforeToken = close; |
| token = close.next; |
| } |
| } |
| } else if (token.isModifier && isValidTypeReference(token.next)) { |
| // Recovery - report error and skip modifier |
| reportRecoverableErrorWithToken(token, fasta.templateExpectedType); |
| return parseType(token, continuation, continuationContext, memberKind); |
| } |
| |
| // If what we have seen so far looks like a type, that could be a return |
| // type for a generalized function type. |
| hasReturnType = looksLikeType; |
| |
| while (optional("Function", token)) { |
| Token typeVariableStart = token; |
| if (optional("<", token.next)) { |
| Token close = closeBraceTokenFor(token.next); |
| if (close != null && optional(">", close)) { |
| beforeToken = previousToken(token, close); |
| token = close; |
| } else { |
| break; // Not a function type. |
| } |
| } |
| if (optional("(", token.next)) { |
| // This is a function type. |
| Token close = closeBraceTokenFor(token.next); |
| assert(optional(")", close)); |
| looksLikeType = true; |
| functionTypes++; |
| typeVariableStarters = |
| typeVariableStarters.prepend(typeVariableStart); |
| beforeToken = close; |
| token = close.next; |
| } else { |
| break; // Not a function type. |
| } |
| } |
| } |
| |
| /// Call this function when it's known that [begin] is a type. This |
| /// function will call the appropriate event methods on [listener] to |
| /// handle the type. |
| Token commitType() { |
| int count = 0; |
| for (Token typeVariableStart in typeVariableStarters) { |
| count++; |
| parseTypeVariablesOpt(typeVariableStart); |
| listener.beginFunctionType(begin); |
| } |
| assert(count == functionTypes); |
| |
| if (functionTypes > 0 && !hasReturnType) { |
| // A function type without return type. |
| // Push the non-existing return type first. The loop below will |
| // generate the full type. |
| listener.handleNoType(beforeBegin); |
| token = beforeBegin; |
| } else if (voidToken != null) { |
| listener.handleVoidKeyword(voidToken); |
| token = voidToken; |
| } else { |
| token = ensureIdentifier(beforeBegin, context); |
| token = parseQualifiedRestOpt( |
| token, IdentifierContext.typeReferenceContinuation); |
| assert(typeArguments == null || typeArguments == token.next); |
| token = parseTypeArgumentsOpt(token); |
| listener.handleType(begin, token.next); |
| } |
| |
| for (int i = 0; i < functionTypes; i++) { |
| Token next = token.next; |
| assert(optional('Function', next)); |
| Token functionToken = next; |
| if (optional("<", next.next)) { |
| // Skip type parameters, they were parsed above. |
| next = closeBraceTokenFor(next.next); |
| } |
| token = parseFormalParametersRequiredOpt( |
| next, MemberKind.GeneralizedFunctionType); |
| listener.endFunctionType(functionToken, token.next); |
| } |
| |
| if (hasVar) { |
| reportRecoverableError(begin, fasta.messageTypeAfterVar); |
| } |
| |
| return token; |
| } |
| |
| /// Returns true if [kind] could be the end of a variable declaration. |
| bool looksLikeVariableDeclarationEnd(int kind) { |
| return EQ_TOKEN == kind || |
| SEMICOLON_TOKEN == kind || |
| COMMA_TOKEN == kind || |
| // Recovery: Return true for these additional invalid situations |
| // in which we assume a missing semicolon. |
| OPEN_CURLY_BRACKET_TOKEN == kind || |
| CLOSE_CURLY_BRACKET_TOKEN == kind; |
| } |
| |
| /// Returns true if [token] could be the start of a function body. |
| bool looksLikeFunctionBody(Token token) { |
| return optional('{', token) || |
| optional('=>', token) || |
| optional('async', token) || |
| optional('sync', token); |
| } |
| |
| /// Returns true if [token] could be the start of a function declaration |
| /// without a return type. |
| bool looksLikeFunctionDeclaration(Token token) { |
| if (!token.isIdentifier) { |
| return false; |
| } |
| token = token.next; |
| if (optional('<', token)) { |
| Token closeBrace = closeBraceTokenFor(token); |
| if (closeBrace == null) return false; |
| token = closeBrace.next; |
| } |
| if (optional('(', token)) { |
| return looksLikeFunctionBody(closeBraceTokenFor(token).next); |
| } |
| return false; |
| } |
| |
| FormalParameterKind parameterKind; |
| switch (continuation) { |
| case TypeContinuation.Required: |
| // If the token after the type is not an identifier, |
| // the report a missing type |
| if (!token.isIdentifier) { |
| if (memberKind == MemberKind.TopLevelField || |
| memberKind == MemberKind.NonStaticField || |
| memberKind == MemberKind.StaticField || |
| memberKind == MemberKind.Local) { |
| reportRecoverableError( |
| begin, fasta.messageMissingConstFinalVarOrType); |
| listener.handleNoType(beforeBegin); |
| return beforeBegin; |
| } |
| } |
| return commitType(); |
| |
| optional: |
| case TypeContinuation.Optional: |
| if (looksLikeType) { |
| if (functionTypes > 0) { |
| return commitType(); // Parse function type. |
| } |
| if (voidToken != null) { |
| listener.handleVoidKeyword(voidToken); |
| return voidToken; |
| } |
| if (token.isIdentifier || optional('this', token)) { |
| return commitType(); // Parse type. |
| } |
| } |
| listener.handleNoType(beforeBegin); |
| return beforeBegin; |
| |
| case TypeContinuation.OptionalAfterVar: |
| hasVar = true; |
| continue optional; |
| |
| case TypeContinuation.Typedef: |
| if (optional('=', token)) { |
| return null; // This isn't a type, it's a new-style typedef. |
| } |
| continue optional; |
| |
| case TypeContinuation.ExpressionStatementOrDeclaration: |
| assert(begin.isIdentifier || identical(begin.stringValue, 'void')); |
| if (!inPlainSync && optional("await", begin)) { |
| return parseExpressionStatement(beforeBegin); |
| } |
| |
| if (looksLikeType && token.isIdentifier) { |
| Token afterId = token.next; |
| |
| int afterIdKind = afterId.kind; |
| if (looksLikeVariableDeclarationEnd(afterIdKind)) { |
| // We are looking at `type identifier` followed by |
| // `(',' | '=' | ';')`. |
| |
| // TODO(ahe): Generate type events and call |
| // parseVariablesDeclarationRest instead. |
| return parseVariablesDeclaration(beforeBegin); |
| } else if (OPEN_PAREN_TOKEN == afterIdKind) { |
| // We are looking at `type identifier '('`. |
| if (looksLikeFunctionBody(closeBraceTokenFor(afterId).next)) { |
| // We are looking at `type identifier '(' ... ')'` followed |
| // `( '{' | '=>' | 'async' | 'sync' )`. |
| |
| // Although it looks like there are no type variables here, they |
| // may get injected from a comment. |
| Token beforeFormals = parseTypeVariablesOpt(token); |
| |
| listener.beginLocalFunctionDeclaration(begin); |
| listener.handleModifiers(0); |
| if (voidToken != null) { |
| listener.handleVoidKeyword(voidToken); |
| } else { |
| commitType(); |
| } |
| return parseNamedFunctionRest( |
| beforeToken, begin, beforeFormals, false); |
| } |
| } else if (identical(afterIdKind, LT_TOKEN)) { |
| // We are looking at `type identifier '<'`. |
| Token beforeFormals = closeBraceTokenFor(afterId); |
| if (beforeFormals?.next != null && |
| optional("(", beforeFormals.next)) { |
| if (looksLikeFunctionBody( |
| closeBraceTokenFor(beforeFormals.next).next)) { |
| // We are looking at "type identifier '<' ... '>' '(' ... ')'" |
| // followed by '{', '=>', 'async', or 'sync'. |
| parseTypeVariablesOpt(token); |
| listener.beginLocalFunctionDeclaration(begin); |
| listener.handleModifiers(0); |
| if (voidToken != null) { |
| listener.handleVoidKeyword(voidToken); |
| } else { |
| commitType(); |
| } |
| return parseNamedFunctionRest( |
| beforeToken, begin, beforeFormals, false); |
| } |
| } |
| } |
| // Fall-through to expression statement. |
| } else { |
| beforeToken = beforeBegin; |
| token = begin; |
| if (optional(':', token.next)) { |
| return parseLabeledStatement(beforeToken); |
| } else if (optional('(', token.next)) { |
| if (looksLikeFunctionBody(closeBraceTokenFor(token.next).next)) { |
| // We are looking at `identifier '(' ... ')'` followed by `'{'`, |
| // `'=>'`, `'async'`, or `'sync'`. |
| |
| // Although it looks like there are no type variables here, they |
| // may get injected from a comment. |
| Token formals = parseTypeVariablesOpt(token); |
| |
| listener.beginLocalFunctionDeclaration(token); |
| listener.handleModifiers(0); |
| listener.handleNoType(token); |
| return parseNamedFunctionRest(beforeToken, begin, formals, false); |
| } |
| } else if (optional('<', token.next)) { |
| Token gt = closeBraceTokenFor(token.next); |
| if (gt?.next != null && optional("(", gt.next)) { |
| if (looksLikeFunctionBody(closeBraceTokenFor(gt.next).next)) { |
| // We are looking at `identifier '<' ... '>' '(' ... ')'` |
| // followed by `'{'`, `'=>'`, `'async'`, or `'sync'`. |
| parseTypeVariablesOpt(token); |
| listener.beginLocalFunctionDeclaration(token); |
| listener.handleModifiers(0); |
| listener.handleNoType(token); |
| return parseNamedFunctionRest(beforeToken, begin, gt, false); |
| } |
| } |
| // Fall through to expression statement. |
| } |
| } |
| return parseExpressionStatement(beforeBegin); |
| |
| case TypeContinuation.ExpressionStatementOrConstDeclaration: |
| Token identifier; |
| if (looksLikeType && token.isIdentifier) { |
| identifier = token; |
| } else if (begin.next.isIdentifier) { |
| identifier = begin.next; |
| } |
| if (identifier != null) { |
| if (looksLikeVariableDeclarationEnd(identifier.next.kind)) { |
| // We are looking at "const type identifier" followed by '=', ';', |
| // or ','. |
| |
| // TODO(ahe): Generate type events and call |
| // parseVariablesDeclarationRest instead. |
| return parseVariablesDeclaration(beforeBegin); |
| } |
| // Fall-through to expression statement. |
| } |
| |
| return parseExpressionStatement(beforeBegin); |
| |
| case TypeContinuation.SendOrFunctionLiteral: |
| Token beforeName; |
| Token name; |
| bool hasReturnType; |
| if (looksLikeType && looksLikeFunctionDeclaration(token)) { |
| beforeName = beforeToken; |
| name = token; |
| hasReturnType = true; |
| // Fall-through to parseNamedFunctionRest below. |
| } else if (looksLikeFunctionDeclaration(begin)) { |
| beforeName = beforeBegin; |
| name = begin; |
| hasReturnType = false; |
| // Fall-through to parseNamedFunctionRest below. |
| } else { |
| return parseSend(beforeBegin, continuationContext); |
| } |
| |
| Token formals = parseTypeVariablesOpt(name); |
| listener.beginNamedFunctionExpression(begin); |
| listener.handleModifiers(0); |
| if (hasReturnType) { |
| if (voidToken != null) { |
| listener.handleVoidKeyword(voidToken); |
| } else { |
| commitType(); |
| } |
| reportRecoverableError( |
| begin, fasta.messageReturnTypeFunctionExpression); |
| } else { |
| listener.handleNoType(formals); |
| } |
| if (beforeName.next != name) |
| throw new StateError("beforeName.next != name"); |
| return parseNamedFunctionRest(beforeName, begin, formals, true); |
| |
| case TypeContinuation.NormalFormalParameter: |
| case TypeContinuation.NormalFormalParameterAfterVar: |
| parameterKind = FormalParameterKind.mandatory; |
| hasVar = continuation == TypeContinuation.NormalFormalParameterAfterVar; |
| continue handleParameters; |
| |
| case TypeContinuation.OptionalPositionalFormalParameter: |
| case TypeContinuation.OptionalPositionalFormalParameterAfterVar: |
| parameterKind = FormalParameterKind.optionalPositional; |
| hasVar = continuation == |
| TypeContinuation.OptionalPositionalFormalParameterAfterVar; |
| continue handleParameters; |
| |
| case TypeContinuation.NamedFormalParameterAfterVar: |
| hasVar = true; |
| continue handleParameters; |
| |
| handleParameters: |
| case TypeContinuation.NamedFormalParameter: |
| parameterKind ??= FormalParameterKind.optionalNamed; |
| bool inFunctionType = memberKind == MemberKind.GeneralizedFunctionType; |
| bool isNamedParameter = |
| parameterKind == FormalParameterKind.optionalNamed; |
| |
| bool untyped = false; |
| if (!looksLikeType || optional("this", begin)) { |
| untyped = true; |
| beforeToken = beforeBegin; |
| token = begin; |
| } |
| |
| Token thisKeyword; |
| Token periodAfterThis; |
| Token beforeNameToken = beforeToken; |
| Token nameToken = token; |
| IdentifierContext nameContext = |
| IdentifierContext.formalParameterDeclaration; |
| beforeToken = token; |
| token = token.next; |
| if (inFunctionType) { |
| if (isNamedParameter) { |
| nameContext = IdentifierContext.formalParameterDeclaration; |
| if (!nameToken.isKeywordOrIdentifier) { |
| beforeToken = beforeNameToken; |
| token = nameToken; |
| } |
| } else if (nameToken.isKeywordOrIdentifier) { |
| if (untyped) { |
| // Type is required in a function type but name is not. |
| untyped = false; |
| nameContext = null; |
| beforeNameToken = nameToken; |
| nameToken = nameToken.next; |
| } else { |
| nameContext = IdentifierContext.formalParameterDeclaration; |
| } |
| } else { |
| // No name required in a function type. |
| nameContext = null; |
| beforeToken = beforeNameToken; |
| token = nameToken; |
| } |
| } else if (optional('this', nameToken)) { |
| thisKeyword = nameToken; |
| if (!optional('.', token)) { |
| // Recover from a missing period by inserting one. |
| Message message = fasta.templateExpectedButGot.withArguments('.'); |
| Token newToken = |
| new SyntheticToken(TokenType.PERIOD, token.charOffset); |
| periodAfterThis = |
| rewriteAndRecover(thisKeyword, message, newToken).next; |
| } else { |
| periodAfterThis = token; |
| } |
| beforeToken = periodAfterThis; |
| token = periodAfterThis.next; |
| nameContext = IdentifierContext.fieldInitializer; |
| if (!token.isIdentifier) { |
| // Recover from a missing identifier by inserting one. |
| token = insertSyntheticIdentifier(beforeToken, nameContext); |
| } |
| beforeNameToken = beforeToken; |
| beforeToken = nameToken = token; |
| token = token.next; |
| } else if (!nameToken.isIdentifier) { |
| if (optional('.', nameToken)) { |
| // Recovery: |
| // Looks like a prefixed type, but missing the type and param names. |
| // Set the nameToken so that a synthetic identifier is inserted |
| // after the `.` token. |
| beforeToken = beforeNameToken = nameToken; |
| token = nameToken = nameToken.next; |
| } else if (context == IdentifierContext.prefixedTypeReference) { |
| // Recovery: |
| // Looks like a prefixed type, but missing the parameter name. |
| beforeToken = nameToken = |
| insertSyntheticIdentifier(beforeNameToken, nameContext); |
| token = beforeToken.next; |
| } else { |
| untyped = true; |
| beforeNameToken = beforeBegin; |
| beforeToken = nameToken = begin; |
| token = nameToken.next; |
| } |
| } |
| if (isNamedParameter && nameToken.lexeme.startsWith("_")) { |
| // TODO(ahe): Move this to after committing the type. |
| reportRecoverableError(nameToken, fasta.messagePrivateNamedParameter); |
| } |
| |
| Token inlineFunctionTypeStart; |
| if (optional("<", token)) { |
| Token closer = closeBraceTokenFor(token); |
| if (closer != null) { |
| if (optional("(", closer.next)) { |
| if (varFinalOrConst != null) { |
| reportRecoverableError( |
| varFinalOrConst, fasta.messageFunctionTypedParameterVar); |
| } |
| inlineFunctionTypeStart = beforeToken; |
| beforeToken = token; |
| token = token.next; |
| } |
| } |
| } else if (optional("(", token)) { |
| if (varFinalOrConst != null) { |
| reportRecoverableError( |
| varFinalOrConst, fasta.messageFunctionTypedParameterVar); |
| } |
| inlineFunctionTypeStart = beforeToken; |
| beforeToken = closeBraceTokenFor(token); |
| token = beforeToken.next; |
| } |
| |
| if (inlineFunctionTypeStart != null) { |
| token = parseTypeVariablesOpt(inlineFunctionTypeStart); |
| // TODO(brianwilkerson): Figure out how to remove the invocation of |
| // `previous`. The method `parseTypeVariablesOpt` returns the last |
| // consumed token. |
| beforeToken = token.previous; |
| listener |
| .beginFunctionTypedFormalParameter(inlineFunctionTypeStart.next); |
| if (!untyped) { |
| if (voidToken != null) { |
| listener.handleVoidKeyword(voidToken); |
| } else { |
| Token saved = token; |
| commitType(); |
| token = saved; |
| // We need to recompute the before tokens because [commitType] can |
| // cause synthetic tokens to be inserted. |
| beforeToken = previousToken(beforeToken, token); |
| beforeNameToken = previousToken(beforeNameToken, nameToken); |
| } |
| } else { |
| listener.handleNoType(beforeToken); |
| } |
| beforeToken = parseFormalParametersRequiredOpt( |
| token, MemberKind.FunctionTypedParameter); |
| token = beforeToken.next; |
| listener.endFunctionTypedFormalParameter(); |
| |
| // Generalized function types don't allow inline function types. |
| // The following isn't allowed: |
| // int Function(int bar(String x)). |
| if (memberKind == MemberKind.GeneralizedFunctionType) { |
| reportRecoverableError(inlineFunctionTypeStart.next, |
| fasta.messageInvalidInlineFunctionType); |
| } |
| } else if (untyped) { |
| listener.handleNoType(token); |
| } else { |
| Token saved = token; |
| commitType(); |
| token = saved; |
| // We need to recompute the before tokens because [commitType] can |
| // cause synthetic tokens to be inserted. |
| beforeToken = previousToken(beforeToken, token); |
| beforeNameToken = previousToken(beforeNameToken, nameToken); |
| } |
| |
| if (nameContext != null) { |
| nameToken = ensureIdentifier(beforeNameToken, nameContext); |
| // We need to recompute the before tokens because [ensureIdentifier] |
| // can cause synthetic tokens to be inserted. |
| beforeToken = previousToken(beforeToken, token); |
| } else { |
| listener.handleNoName(nameToken); |
| } |
| |
| String value = token.stringValue; |
| if ((identical('=', value)) || (identical(':', value))) { |
| Token equal = token; |
| beforeToken = parseExpression(token); |
| token = beforeToken.next; |
| listener.handleValuedFormalParameter(equal, token); |
| if (isMandatoryFormalParameterKind(parameterKind)) { |
| reportRecoverableError( |
| equal, fasta.messageRequiredParameterWithDefault); |
| } else if (isOptionalPositionalFormalParameterKind(parameterKind) && |
| identical(':', value)) { |
| reportRecoverableError( |
| equal, fasta.messagePositionalParameterWithEquals); |
| } else if (inFunctionType || |
| memberKind == MemberKind.FunctionTypeAlias || |
| memberKind == MemberKind.FunctionTypedParameter) { |
| reportRecoverableError( |
| equal, fasta.messageFunctionTypeDefaultValue); |
| } |
| } else { |
| listener.handleFormalParameterWithoutValue(token); |
| } |
| listener.endFormalParameter( |
| thisKeyword, periodAfterThis, nameToken, parameterKind, memberKind); |
| return beforeToken; |
| } |
| |
| throw "Internal error: Unhandled continuation '$continuation'."; |
| } |
| |
| Token parseTypeArgumentsOpt(Token token) { |
| Token next = token.next; |
| if (optional('<', next)) { |
| BeginToken begin = next; |
| rewriteLtEndGroupOpt(begin); |
| listener.beginTypeArguments(begin); |
| int count = 0; |
| do { |
| token = parseType(token.next); |
| ++count; |
| } while (optional(',', token.next)); |
| token = begin.endToken = ensureGt(token); |
| listener.endTypeArguments(count, begin, token); |
| } else { |
| listener.handleNoTypeArguments(next); |
| } |
| return token; |
| } |
| |
| Token parseTypeVariablesOpt(Token token) { |
| Token next = token.next; |
| if (optional('<', next)) { |
| BeginToken begin = next; |
| rewriteLtEndGroupOpt(begin); |
| listener.beginTypeVariables(begin); |
| int count = 0; |
| do { |
| token = parseTypeVariable(token.next); |
| ++count; |
| } while (optional(',', token.next)); |
| token = begin.endToken = ensureGt(token); |
| listener.endTypeVariables(count, begin, token); |
| } else { |
| listener.handleNoTypeVariables(next); |
| } |
| return token; |
| } |
| |
| /// Parse a top level field or function. |
| /// |
| /// This method is only invoked from outside the parser. As a result, this |
| /// method takes the next token to be consumed rather than the last consumed |
| /// token and returns the token after the last consumed token rather than the |
| /// last consumed token. |
| Token parseTopLevelMember(Token token) { |
| token = parseMetadataStar(syntheticPreviousToken(token)); |
| return parseTopLevelMemberImpl(token).next; |
| } |
| |
| Token parseTopLevelMemberImpl(Token token) { |
| Token beforeStart = token; |
| Token next = token.next; |
| listener.beginTopLevelMember(next); |
| |
| Token externalToken; |
| Token varFinalOrConst; |
| TypeContinuation typeContinuation; |
| |
| if (isModifier(next)) { |
| if (optional('external', next)) { |
| externalToken = token = next; |
| next = token.next; |
| } |
| if (isModifier(next)) { |
| if (optional('final', next)) { |
| typeContinuation = TypeContinuation.Optional; |
| varFinalOrConst = token = next; |
| next = token.next; |
| } else if (optional('var', next)) { |
| typeContinuation = TypeContinuation.OptionalAfterVar; |
| varFinalOrConst = token = next; |
| next = token.next; |
| } else if (optional('const', next)) { |
| typeContinuation = TypeContinuation.Optional; |
| varFinalOrConst = token = next; |
| next = token.next; |
| } |
| if (isModifier(next)) { |
| ModifierRecoveryContext2 context = new ModifierRecoveryContext2(this); |
| token = context.parseTopLevelModifiers(token, typeContinuation, |
| externalToken: externalToken, varFinalOrConst: varFinalOrConst); |
| next = token.next; |
| |
| typeContinuation = context.typeContinuation; |
| externalToken = context.externalToken; |
| varFinalOrConst = context.varFinalOrConst; |
| context = null; |
| } |
| } |
| } |
| typeContinuation ??= TypeContinuation.Required; |
| |
| Token beforeType = token; |
| // TODO(danrubel): Consider changing the listener contract |
| // so that the type reference can be parsed immediately |
| // rather than skipped now and parsed later. |
| token = skipTypeReferenceOpt(token, true); |
| if (token == beforeType) { |
| // There is no type reference. |
| beforeType = null; |
| } |
| next = token.next; |
| |
| Token getOrSet; |
| String value = next.stringValue; |
| if |