| // This code was auto-generated, is not intended to be edited, and is subject to |
| // significant change. Please see the README file for more information. |
| |
| library engine.parser; |
| |
| import 'dart:collection'; |
| import 'java_core.dart'; |
| import 'java_engine.dart'; |
| import 'instrumentation.dart'; |
| import 'error.dart'; |
| import 'source.dart'; |
| import 'scanner.dart'; |
| import 'ast.dart'; |
| import 'utilities_dart.dart'; |
| |
| /** |
| * Instances of the class {@code CommentAndMetadata} implement a simple data-holder for a method |
| * that needs to return multiple values. |
| * @coverage dart.engine.parser |
| */ |
| class CommentAndMetadata { |
| /** |
| * The documentation comment that was parsed, or {@code null} if none was given. |
| */ |
| Comment _comment; |
| /** |
| * The metadata that was parsed. |
| */ |
| List<Annotation> _metadata; |
| /** |
| * Initialize a newly created holder with the given data. |
| * @param comment the documentation comment that was parsed |
| * @param metadata the metadata that was parsed |
| */ |
| CommentAndMetadata(Comment comment, List<Annotation> metadata) { |
| this._comment = comment; |
| this._metadata = metadata; |
| } |
| /** |
| * Return the documentation comment that was parsed, or {@code null} if none was given. |
| * @return the documentation comment that was parsed |
| */ |
| Comment get comment => _comment; |
| /** |
| * Return the metadata that was parsed. If there was no metadata, then the list will be empty. |
| * @return the metadata that was parsed |
| */ |
| List<Annotation> get metadata => _metadata; |
| } |
| /** |
| * Instances of the class {@code FinalConstVarOrType} implement a simple data-holder for a method |
| * that needs to return multiple values. |
| * @coverage dart.engine.parser |
| */ |
| class FinalConstVarOrType { |
| /** |
| * The 'final', 'const' or 'var' keyword, or {@code null} if none was given. |
| */ |
| Token _keyword; |
| /** |
| * The type, of {@code null} if no type was specified. |
| */ |
| TypeName _type; |
| /** |
| * Initialize a newly created holder with the given data. |
| * @param keyword the 'final', 'const' or 'var' keyword |
| * @param type the type |
| */ |
| FinalConstVarOrType(Token keyword, TypeName type) { |
| this._keyword = keyword; |
| this._type = type; |
| } |
| /** |
| * Return the 'final', 'const' or 'var' keyword, or {@code null} if none was given. |
| * @return the 'final', 'const' or 'var' keyword |
| */ |
| Token get keyword => _keyword; |
| /** |
| * Return the type, of {@code null} if no type was specified. |
| * @return the type |
| */ |
| TypeName get type => _type; |
| } |
| /** |
| * Instances of the class {@code Modifiers} implement a simple data-holder for a method that needs |
| * to return multiple values. |
| * @coverage dart.engine.parser |
| */ |
| class Modifiers { |
| /** |
| * The token representing the keyword 'abstract', or {@code null} if the keyword was not found. |
| */ |
| Token _abstractKeyword; |
| /** |
| * The token representing the keyword 'const', or {@code null} if the keyword was not found. |
| */ |
| Token _constKeyword; |
| /** |
| * The token representing the keyword 'external', or {@code null} if the keyword was not found. |
| */ |
| Token _externalKeyword; |
| /** |
| * The token representing the keyword 'factory', or {@code null} if the keyword was not found. |
| */ |
| Token _factoryKeyword; |
| /** |
| * The token representing the keyword 'final', or {@code null} if the keyword was not found. |
| */ |
| Token _finalKeyword; |
| /** |
| * The token representing the keyword 'static', or {@code null} if the keyword was not found. |
| */ |
| Token _staticKeyword; |
| /** |
| * The token representing the keyword 'var', or {@code null} if the keyword was not found. |
| */ |
| Token _varKeyword; |
| /** |
| * Initialize a newly created and empty set of modifiers. |
| */ |
| Modifiers() : super() { |
| } |
| /** |
| * Return the token representing the keyword 'abstract', or {@code null} if the keyword was not |
| * found. |
| * @return the token representing the keyword 'abstract' |
| */ |
| Token get abstractKeyword => _abstractKeyword; |
| /** |
| * Return the token representing the keyword 'const', or {@code null} if the keyword was not |
| * found. |
| * @return the token representing the keyword 'const' |
| */ |
| Token get constKeyword => _constKeyword; |
| /** |
| * Return the token representing the keyword 'external', or {@code null} if the keyword was not |
| * found. |
| * @return the token representing the keyword 'external' |
| */ |
| Token get externalKeyword => _externalKeyword; |
| /** |
| * Return the token representing the keyword 'factory', or {@code null} if the keyword was not |
| * found. |
| * @return the token representing the keyword 'factory' |
| */ |
| Token get factoryKeyword => _factoryKeyword; |
| /** |
| * Return the token representing the keyword 'final', or {@code null} if the keyword was not |
| * found. |
| * @return the token representing the keyword 'final' |
| */ |
| Token get finalKeyword => _finalKeyword; |
| /** |
| * Return the token representing the keyword 'static', or {@code null} if the keyword was not |
| * found. |
| * @return the token representing the keyword 'static' |
| */ |
| Token get staticKeyword => _staticKeyword; |
| /** |
| * Return the token representing the keyword 'var', or {@code null} if the keyword was not found. |
| * @return the token representing the keyword 'var' |
| */ |
| Token get varKeyword => _varKeyword; |
| /** |
| * Set the token representing the keyword 'abstract' to the given token. |
| * @param abstractKeyword the token representing the keyword 'abstract' |
| */ |
| void set abstractKeyword(Token abstractKeyword2) { |
| this._abstractKeyword = abstractKeyword2; |
| } |
| /** |
| * Set the token representing the keyword 'const' to the given token. |
| * @param constKeyword the token representing the keyword 'const' |
| */ |
| void set constKeyword(Token constKeyword2) { |
| this._constKeyword = constKeyword2; |
| } |
| /** |
| * Set the token representing the keyword 'external' to the given token. |
| * @param externalKeyword the token representing the keyword 'external' |
| */ |
| void set externalKeyword(Token externalKeyword2) { |
| this._externalKeyword = externalKeyword2; |
| } |
| /** |
| * Set the token representing the keyword 'factory' to the given token. |
| * @param factoryKeyword the token representing the keyword 'factory' |
| */ |
| void set factoryKeyword(Token factoryKeyword2) { |
| this._factoryKeyword = factoryKeyword2; |
| } |
| /** |
| * Set the token representing the keyword 'final' to the given token. |
| * @param finalKeyword the token representing the keyword 'final' |
| */ |
| void set finalKeyword(Token finalKeyword2) { |
| this._finalKeyword = finalKeyword2; |
| } |
| /** |
| * Set the token representing the keyword 'static' to the given token. |
| * @param staticKeyword the token representing the keyword 'static' |
| */ |
| void set staticKeyword(Token staticKeyword2) { |
| this._staticKeyword = staticKeyword2; |
| } |
| /** |
| * Set the token representing the keyword 'var' to the given token. |
| * @param varKeyword the token representing the keyword 'var' |
| */ |
| void set varKeyword(Token varKeyword2) { |
| this._varKeyword = varKeyword2; |
| } |
| String toString() { |
| JavaStringBuilder builder = new JavaStringBuilder(); |
| bool needsSpace = appendKeyword(builder, false, _abstractKeyword); |
| needsSpace = appendKeyword(builder, needsSpace, _constKeyword); |
| needsSpace = appendKeyword(builder, needsSpace, _externalKeyword); |
| needsSpace = appendKeyword(builder, needsSpace, _factoryKeyword); |
| needsSpace = appendKeyword(builder, needsSpace, _finalKeyword); |
| needsSpace = appendKeyword(builder, needsSpace, _staticKeyword); |
| appendKeyword(builder, needsSpace, _varKeyword); |
| return builder.toString(); |
| } |
| /** |
| * If the given keyword is not {@code null}, append it to the given builder, prefixing it with a |
| * space if needed. |
| * @param builder the builder to which the keyword will be appended |
| * @param needsSpace {@code true} if the keyword needs to be prefixed with a space |
| * @param keyword the keyword to be appended |
| * @return {@code true} if subsequent keywords need to be prefixed with a space |
| */ |
| bool appendKeyword(JavaStringBuilder builder, bool needsSpace, Token keyword) { |
| if (keyword != null) { |
| if (needsSpace) { |
| builder.appendChar(0x20); |
| } |
| builder.append(keyword.lexeme); |
| return true; |
| } |
| return needsSpace; |
| } |
| } |
| /** |
| * Instances of the class {@code Parser} are used to parse tokens into an AST structure. |
| * @coverage dart.engine.parser |
| */ |
| class Parser { |
| /** |
| * The source being parsed. |
| */ |
| Source _source; |
| /** |
| * The error listener that will be informed of any errors that are found during the parse. |
| */ |
| AnalysisErrorListener _errorListener; |
| /** |
| * The next token to be parsed. |
| */ |
| Token _currentToken; |
| /** |
| * A flag indicating whether the parser is currently in the body of a loop. |
| */ |
| bool _inLoop = false; |
| /** |
| * A flag indicating whether the parser is currently in a switch statement. |
| */ |
| bool _inSwitch = false; |
| static String _HIDE = "hide"; |
| static String _OF = "of"; |
| static String _ON = "on"; |
| static String _SHOW = "show"; |
| static String _NATIVE = "native"; |
| /** |
| * Initialize a newly created parser. |
| * @param source the source being parsed |
| * @param errorListener the error listener that will be informed of any errors that are found |
| * during the parse |
| */ |
| Parser(Source source, AnalysisErrorListener errorListener) { |
| this._source = source; |
| this._errorListener = errorListener; |
| } |
| /** |
| * Parse a compilation unit, starting with the given token. |
| * @param token the first token of the compilation unit |
| * @return the compilation unit that was parsed |
| */ |
| CompilationUnit parseCompilationUnit(Token token) { |
| InstrumentationBuilder instrumentation = Instrumentation.builder2("dart.engine.Parser.parseCompilationUnit"); |
| try { |
| _currentToken = token; |
| return parseCompilationUnit2(); |
| } finally { |
| instrumentation.log(); |
| } |
| } |
| /** |
| * Parse an expression, starting with the given token. |
| * @param token the first token of the expression |
| * @return the expression that was parsed, or {@code null} if the tokens do not represent a |
| * recognizable expression |
| */ |
| Expression parseExpression(Token token) { |
| InstrumentationBuilder instrumentation = Instrumentation.builder2("dart.engine.Parser.parseExpression"); |
| try { |
| _currentToken = token; |
| return parseExpression2(); |
| } finally { |
| instrumentation.log(); |
| } |
| } |
| /** |
| * Parse a statement, starting with the given token. |
| * @param token the first token of the statement |
| * @return the statement that was parsed, or {@code null} if the tokens do not represent a |
| * recognizable statement |
| */ |
| Statement parseStatement(Token token) { |
| InstrumentationBuilder instrumentation = Instrumentation.builder2("dart.engine.Parser.parseStatement"); |
| try { |
| _currentToken = token; |
| return parseStatement2(); |
| } finally { |
| instrumentation.log(); |
| } |
| } |
| /** |
| * Parse a sequence of statements, starting with the given token. |
| * @param token the first token of the sequence of statement |
| * @return the statements that were parsed, or {@code null} if the tokens do not represent a |
| * recognizable sequence of statements |
| */ |
| List<Statement> parseStatements(Token token) { |
| InstrumentationBuilder instrumentation = Instrumentation.builder2("dart.engine.Parser.parseStatements"); |
| try { |
| _currentToken = token; |
| return parseStatements2(); |
| } finally { |
| instrumentation.log(); |
| } |
| } |
| void set currentToken(Token currentToken2) { |
| this._currentToken = currentToken2; |
| } |
| /** |
| * Advance to the next token in the token stream. |
| */ |
| void advance() { |
| _currentToken = _currentToken.next; |
| } |
| /** |
| * Append the character equivalent of the given scalar value to the given builder. Use the start |
| * and end indices to report an error, and don't append anything to the builder, if the scalar |
| * value is invalid. |
| * @param builder the builder to which the scalar value is to be appended |
| * @param escapeSequence the escape sequence that was parsed to produce the scalar value |
| * @param scalarValue the value to be appended |
| * @param startIndex the index of the first character representing the scalar value |
| * @param endIndex the index of the last character representing the scalar value |
| */ |
| void appendScalarValue(JavaStringBuilder builder, String escapeSequence, int scalarValue, int startIndex, int endIndex) { |
| if (scalarValue < 0 || scalarValue > Character.MAX_CODE_POINT || (scalarValue >= 0xD800 && scalarValue <= 0xDFFF)) { |
| reportError4(ParserErrorCode.INVALID_CODE_POINT, [escapeSequence]); |
| return; |
| } |
| if (scalarValue < Character.MAX_VALUE) { |
| builder.appendChar((scalarValue as int)); |
| } else { |
| builder.append(Character.toChars(scalarValue)); |
| } |
| } |
| /** |
| * Compute the content of a string with the given literal representation. |
| * @param lexeme the literal representation of the string |
| * @return the actual value of the string |
| */ |
| String computeStringValue(String lexeme) { |
| if (lexeme.startsWith("r\"\"\"") || lexeme.startsWith("r'''")) { |
| if (lexeme.length > 4) { |
| return lexeme.substring(4, lexeme.length - 3); |
| } |
| } else if (lexeme.startsWith("r\"") || lexeme.startsWith("r'")) { |
| if (lexeme.length > 2) { |
| return lexeme.substring(2, lexeme.length - 1); |
| } |
| } |
| int start = 0; |
| if (lexeme.startsWith("\"\"\"") || lexeme.startsWith("'''")) { |
| start += 3; |
| } else if (lexeme.startsWith("\"") || lexeme.startsWith("'")) { |
| start += 1; |
| } |
| int end = lexeme.length; |
| if (end > 3 && (lexeme.endsWith("\"\"\"") || lexeme.endsWith("'''"))) { |
| end -= 3; |
| } else if (end > 1 && (lexeme.endsWith("\"") || lexeme.endsWith("'"))) { |
| end -= 1; |
| } |
| JavaStringBuilder builder = new JavaStringBuilder(); |
| int index = start; |
| while (index < end) { |
| index = translateCharacter(builder, lexeme, index); |
| } |
| return builder.toString(); |
| } |
| /** |
| * Create a synthetic identifier. |
| * @return the synthetic identifier that was created |
| */ |
| SimpleIdentifier createSyntheticIdentifier() => new SimpleIdentifier.full(createSyntheticToken2(TokenType.IDENTIFIER)); |
| /** |
| * Create a synthetic string literal. |
| * @return the synthetic string literal that was created |
| */ |
| SimpleStringLiteral createSyntheticStringLiteral() => new SimpleStringLiteral.full(createSyntheticToken2(TokenType.STRING), ""); |
| /** |
| * Create a synthetic token representing the given keyword. |
| * @return the synthetic token that was created |
| */ |
| Token createSyntheticToken(Keyword keyword) => new KeywordToken_11(keyword, _currentToken.offset); |
| /** |
| * Create a synthetic token with the given type. |
| * @return the synthetic token that was created |
| */ |
| Token createSyntheticToken2(TokenType type) => new StringToken(type, "", _currentToken.offset); |
| /** |
| * Check that the given expression is assignable and report an error if it isn't. |
| * <pre> |
| * assignableExpression ::= |
| * primary (arguments* assignableSelector)+ |
| * | 'super' assignableSelector |
| * | identifier |
| * assignableSelector ::= |
| * '[' expression ']' |
| * | '.' identifier |
| * </pre> |
| * @param expression the expression being checked |
| */ |
| void ensureAssignable(Expression expression) { |
| if (expression != null && !expression.isAssignable()) { |
| reportError4(ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE, []); |
| } |
| } |
| /** |
| * If the current token is a keyword matching the given string, return it after advancing to the |
| * next token. Otherwise report an error and return the current token without advancing. |
| * @param keyword the keyword that is expected |
| * @return the token that matched the given type |
| */ |
| Token expect(Keyword keyword) { |
| if (matches(keyword)) { |
| return andAdvance; |
| } |
| reportError4(ParserErrorCode.EXPECTED_TOKEN, [keyword.syntax]); |
| return _currentToken; |
| } |
| /** |
| * If the current token has the expected type, return it after advancing to the next token. |
| * Otherwise report an error and return the current token without advancing. |
| * @param type the type of token that is expected |
| * @return the token that matched the given type |
| */ |
| Token expect2(TokenType type) { |
| if (matches5(type)) { |
| return andAdvance; |
| } |
| if (identical(type, TokenType.SEMICOLON)) { |
| reportError5(ParserErrorCode.EXPECTED_TOKEN, _currentToken.previous, [type.lexeme]); |
| } else { |
| reportError4(ParserErrorCode.EXPECTED_TOKEN, [type.lexeme]); |
| } |
| return _currentToken; |
| } |
| /** |
| * Advance to the next token in the token stream, making it the new current token. |
| * @return the token that was current before this method was invoked |
| */ |
| Token get andAdvance { |
| Token token = _currentToken; |
| advance(); |
| return token; |
| } |
| /** |
| * Return {@code true} if the current token is the first token of a return type that is followed |
| * by an identifier, possibly followed by a list of type parameters, followed by a |
| * left-parenthesis. This is used by parseTypeAlias to determine whether or not to parse a return |
| * type. |
| * @return {@code true} if we can successfully parse the rest of a type alias if we first parse a |
| * return type. |
| */ |
| bool hasReturnTypeInTypeAlias() { |
| Token next = skipReturnType(_currentToken); |
| if (next == null) { |
| return false; |
| } |
| return matchesIdentifier2(next); |
| } |
| /** |
| * Return {@code true} if the current token appears to be the beginning of a function declaration. |
| * @return {@code true} if the current token appears to be the beginning of a function declaration |
| */ |
| bool isFunctionDeclaration() { |
| if (matches(Keyword.VOID)) { |
| return true; |
| } |
| Token afterReturnType = skipTypeName(_currentToken); |
| if (afterReturnType == null) { |
| afterReturnType = _currentToken; |
| } |
| Token afterIdentifier = skipSimpleIdentifier(afterReturnType); |
| if (afterIdentifier == null) { |
| afterIdentifier = skipSimpleIdentifier(_currentToken); |
| } |
| if (afterIdentifier == null) { |
| return false; |
| } |
| return isFunctionExpression(afterIdentifier); |
| } |
| /** |
| * Return {@code true} if the given token appears to be the beginning of a function expression. |
| * @param startToken the token that might be the start of a function expression |
| * @return {@code true} if the given token appears to be the beginning of a function expression |
| */ |
| bool isFunctionExpression(Token startToken) { |
| Token afterParameters = skipFormalParameterList(startToken); |
| if (afterParameters == null) { |
| return false; |
| } |
| return matchesAny(afterParameters, [TokenType.OPEN_CURLY_BRACKET, TokenType.FUNCTION]); |
| } |
| /** |
| * Return {@code true} if the given character is a valid hexadecimal digit. |
| * @param character the character being tested |
| * @return {@code true} if the character is a valid hexadecimal digit |
| */ |
| bool isHexDigit(int character) => (0x30 <= character && character <= 0x39) || (0x41 <= character && character <= 0x46) || (0x61 <= character && character <= 0x66); |
| /** |
| * Return {@code true} if the current token is the first token in an initialized variable |
| * declaration rather than an expression. This method assumes that we have already skipped past |
| * any metadata that might be associated with the declaration. |
| * <pre> |
| * initializedVariableDeclaration ::= |
| * declaredIdentifier ('=' expression)? (',' initializedIdentifier) |
| * declaredIdentifier ::= |
| * metadata finalConstVarOrType identifier |
| * finalConstVarOrType ::= |
| * 'final' type? |
| * | 'const' type? |
| * | 'var' |
| * | type |
| * type ::= |
| * qualified typeArguments? |
| * initializedIdentifier ::= |
| * identifier ('=' expression)? |
| * </pre> |
| * @return {@code true} if the current token is the first token in an initialized variable |
| * declaration |
| */ |
| bool isInitializedVariableDeclaration() { |
| if (matches(Keyword.FINAL) || matches(Keyword.CONST) || matches(Keyword.VAR)) { |
| return true; |
| } |
| Token token = skipTypeName(_currentToken); |
| if (token == null) { |
| return false; |
| } |
| token = skipSimpleIdentifier(token); |
| if (token == null) { |
| return false; |
| } |
| TokenType type2 = token.type; |
| return identical(type2, TokenType.EQ) || identical(type2, TokenType.COMMA) || identical(type2, TokenType.SEMICOLON) || matches3(token, Keyword.IN); |
| } |
| /** |
| * Return {@code true} if the given token appears to be the beginning of an operator declaration. |
| * @param startToken the token that might be the start of an operator declaration |
| * @return {@code true} if the given token appears to be the beginning of an operator declaration |
| */ |
| bool isOperator(Token startToken) { |
| if (startToken.isOperator()) { |
| Token token = startToken.next; |
| while (token.isOperator()) { |
| token = token.next; |
| } |
| return matches4(token, TokenType.OPEN_PAREN); |
| } |
| return false; |
| } |
| /** |
| * Return {@code true} if the current token appears to be the beginning of a switch member. |
| * @return {@code true} if the current token appears to be the beginning of a switch member |
| */ |
| bool isSwitchMember() { |
| Token token = _currentToken; |
| while (matches4(token, TokenType.IDENTIFIER) && matches4(token.next, TokenType.COLON)) { |
| token = token.next.next; |
| } |
| if (identical(token.type, TokenType.KEYWORD)) { |
| Keyword keyword2 = ((token as KeywordToken)).keyword; |
| return identical(keyword2, Keyword.CASE) || identical(keyword2, Keyword.DEFAULT); |
| } |
| return false; |
| } |
| /** |
| * Compare the given tokens to find the token that appears first in the source being parsed. That |
| * is, return the left-most of all of the tokens. The arguments are allowed to be {@code null}. |
| * Return the token with the smallest offset, or {@code null} if there are no arguments or if all |
| * of the arguments are {@code null}. |
| * @param tokens the tokens being compared |
| * @return the token with the smallest offset |
| */ |
| Token lexicallyFirst(List<Token> tokens) { |
| Token first = null; |
| int firstOffset = 2147483647; |
| for (Token token in tokens) { |
| if (token != null) { |
| int offset2 = token.offset; |
| if (offset2 < firstOffset) { |
| first = token; |
| } |
| } |
| } |
| return first; |
| } |
| /** |
| * Return {@code true} if the current token matches the given keyword. |
| * @param keyword the keyword that can optionally appear in the current location |
| * @return {@code true} if the current token matches the given keyword |
| */ |
| bool matches(Keyword keyword) => matches3(_currentToken, keyword); |
| /** |
| * Return {@code true} if the current token matches the given identifier. |
| * @param identifier the identifier that can optionally appear in the current location |
| * @return {@code true} if the current token matches the given identifier |
| */ |
| bool matches2(String identifier) => identical(_currentToken.type, TokenType.IDENTIFIER) && _currentToken.lexeme == identifier; |
| /** |
| * Return {@code true} if the given token matches the given keyword. |
| * @param token the token being tested |
| * @param keyword the keyword that is being tested for |
| * @return {@code true} if the given token matches the given keyword |
| */ |
| bool matches3(Token token, Keyword keyword2) => identical(token.type, TokenType.KEYWORD) && identical(((token as KeywordToken)).keyword, keyword2); |
| /** |
| * Return {@code true} if the given token has the given type. |
| * @param token the token being tested |
| * @param type the type of token that is being tested for |
| * @return {@code true} if the given token has the given type |
| */ |
| bool matches4(Token token, TokenType type2) => identical(token.type, type2); |
| /** |
| * Return {@code true} if the current token has the given type. Note that this method, unlike |
| * other variants, will modify the token stream if possible to match a wider range of tokens. In |
| * particular, if we are attempting to match a '>' and the next token is either a '>>' or '>>>', |
| * the token stream will be re-written and {@code true} will be returned. |
| * @param type the type of token that can optionally appear in the current location |
| * @return {@code true} if the current token has the given type |
| */ |
| bool matches5(TokenType type2) { |
| TokenType currentType = _currentToken.type; |
| if (currentType != type2) { |
| if (identical(type2, TokenType.GT)) { |
| if (identical(currentType, TokenType.GT_GT)) { |
| int offset2 = _currentToken.offset; |
| Token first = new Token(TokenType.GT, offset2); |
| Token second = new Token(TokenType.GT, offset2 + 1); |
| second.setNext(_currentToken.next); |
| first.setNext(second); |
| _currentToken.previous.setNext(first); |
| _currentToken = first; |
| return true; |
| } else if (identical(currentType, TokenType.GT_EQ)) { |
| int offset3 = _currentToken.offset; |
| Token first = new Token(TokenType.GT, offset3); |
| Token second = new Token(TokenType.EQ, offset3 + 1); |
| second.setNext(_currentToken.next); |
| first.setNext(second); |
| _currentToken.previous.setNext(first); |
| _currentToken = first; |
| return true; |
| } else if (identical(currentType, TokenType.GT_GT_EQ)) { |
| int offset4 = _currentToken.offset; |
| Token first = new Token(TokenType.GT, offset4); |
| Token second = new Token(TokenType.GT, offset4 + 1); |
| Token third = new Token(TokenType.EQ, offset4 + 2); |
| third.setNext(_currentToken.next); |
| second.setNext(third); |
| first.setNext(second); |
| _currentToken.previous.setNext(first); |
| _currentToken = first; |
| return true; |
| } |
| } |
| return false; |
| } |
| return true; |
| } |
| /** |
| * Return {@code true} if the given token has any one of the given types. |
| * @param token the token being tested |
| * @param types the types of token that are being tested for |
| * @return {@code true} if the given token has any of the given types |
| */ |
| bool matchesAny(Token token, List<TokenType> types) { |
| TokenType actualType = token.type; |
| for (TokenType type in types) { |
| if (identical(actualType, type)) { |
| return true; |
| } |
| } |
| return false; |
| } |
| /** |
| * Return {@code true} if the current token is a valid identifier. Valid identifiers include |
| * built-in identifiers (pseudo-keywords). |
| * @return {@code true} if the current token is a valid identifier |
| */ |
| bool matchesIdentifier() => matchesIdentifier2(_currentToken); |
| /** |
| * Return {@code true} if the given token is a valid identifier. Valid identifiers include |
| * built-in identifiers (pseudo-keywords). |
| * @return {@code true} if the given token is a valid identifier |
| */ |
| bool matchesIdentifier2(Token token) => matches4(token, TokenType.IDENTIFIER) || (matches4(token, TokenType.KEYWORD) && ((token as KeywordToken)).keyword.isPseudoKeyword()); |
| /** |
| * If the current token has the given type, then advance to the next token and return {@code true}. Otherwise, return {@code false} without advancing. |
| * @param type the type of token that can optionally appear in the current location |
| * @return {@code true} if the current token has the given type |
| */ |
| bool optional(TokenType type) { |
| if (matches5(type)) { |
| advance(); |
| return true; |
| } |
| return false; |
| } |
| /** |
| * Parse an additive expression. |
| * <pre> |
| * additiveExpression ::= |
| * multiplicativeExpression (additiveOperator multiplicativeExpression) |
| * | 'super' (additiveOperator multiplicativeExpression)+ |
| * </pre> |
| * @return the additive expression that was parsed |
| */ |
| Expression parseAdditiveExpression() { |
| Expression expression; |
| if (matches(Keyword.SUPER) && _currentToken.next.type.isAdditiveOperator()) { |
| expression = new SuperExpression.full(andAdvance); |
| } else { |
| expression = parseMultiplicativeExpression(); |
| } |
| while (_currentToken.type.isAdditiveOperator()) { |
| Token operator = andAdvance; |
| expression = new BinaryExpression.full(expression, operator, parseMultiplicativeExpression()); |
| } |
| return expression; |
| } |
| /** |
| * Parse an annotation. |
| * <pre> |
| * annotation ::= |
| * '@' qualified ('.' identifier)? arguments? |
| * </pre> |
| * @return the annotation that was parsed |
| */ |
| Annotation parseAnnotation() { |
| Token atSign = expect2(TokenType.AT); |
| Identifier name = parsePrefixedIdentifier(); |
| Token period = null; |
| SimpleIdentifier constructorName = null; |
| if (matches5(TokenType.PERIOD)) { |
| period = andAdvance; |
| constructorName = parseSimpleIdentifier(); |
| } |
| ArgumentList arguments = null; |
| if (matches5(TokenType.OPEN_PAREN)) { |
| arguments = parseArgumentList(); |
| } |
| return new Annotation.full(atSign, name, period, constructorName, arguments); |
| } |
| /** |
| * Parse an argument. |
| * <pre> |
| * argument ::= |
| * namedArgument |
| * | expression |
| * namedArgument ::= |
| * label expression |
| * </pre> |
| * @return the argument that was parsed |
| */ |
| Expression parseArgument() { |
| if (matchesIdentifier() && matches4(peek(), TokenType.COLON)) { |
| SimpleIdentifier label = new SimpleIdentifier.full(andAdvance); |
| Label name = new Label.full(label, andAdvance); |
| return new NamedExpression.full(name, parseExpression2()); |
| } else { |
| return parseExpression2(); |
| } |
| } |
| /** |
| * Parse an argument definition test. |
| * <pre> |
| * argumentDefinitionTest ::= |
| * '?' identifier |
| * </pre> |
| * @return the argument definition test that was parsed |
| */ |
| ArgumentDefinitionTest parseArgumentDefinitionTest() { |
| Token question = expect2(TokenType.QUESTION); |
| SimpleIdentifier identifier = parseSimpleIdentifier(); |
| return new ArgumentDefinitionTest.full(question, identifier); |
| } |
| /** |
| * Parse a list of arguments. |
| * <pre> |
| * arguments ::= |
| * '(' argumentList? ')' |
| * argumentList ::= |
| * namedArgument (',' namedArgument) |
| * | expressionList (',' namedArgument) |
| * </pre> |
| * @return the argument list that was parsed |
| */ |
| ArgumentList parseArgumentList() { |
| Token leftParenthesis = expect2(TokenType.OPEN_PAREN); |
| List<Expression> arguments = new List<Expression>(); |
| if (matches5(TokenType.CLOSE_PAREN)) { |
| return new ArgumentList.full(leftParenthesis, arguments, andAdvance); |
| } |
| Expression argument = parseArgument(); |
| arguments.add(argument); |
| bool foundNamedArgument = argument is NamedExpression; |
| bool generatedError = false; |
| while (optional(TokenType.COMMA)) { |
| argument = parseArgument(); |
| arguments.add(argument); |
| if (foundNamedArgument) { |
| if (!generatedError && argument is! NamedExpression) { |
| reportError4(ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT, []); |
| generatedError = true; |
| } |
| } else if (argument is NamedExpression) { |
| foundNamedArgument = true; |
| } |
| } |
| Token rightParenthesis = expect2(TokenType.CLOSE_PAREN); |
| return new ArgumentList.full(leftParenthesis, arguments, rightParenthesis); |
| } |
| /** |
| * Parse an assert statement. |
| * <pre> |
| * assertStatement ::= |
| * 'assert' '(' conditionalExpression ')' ';' |
| * </pre> |
| * @return the assert statement |
| */ |
| AssertStatement parseAssertStatement() { |
| Token keyword = expect(Keyword.ASSERT); |
| Token leftParen = expect2(TokenType.OPEN_PAREN); |
| Expression expression = parseConditionalExpression(); |
| Token rightParen = expect2(TokenType.CLOSE_PAREN); |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new AssertStatement.full(keyword, leftParen, expression, rightParen, semicolon); |
| } |
| /** |
| * Parse an assignable expression. |
| * <pre> |
| * assignableExpression ::= |
| * primary (arguments* assignableSelector)+ |
| * | 'super' assignableSelector |
| * | identifier |
| * </pre> |
| * @param primaryAllowed {@code true} if the expression is allowed to be a primary without any |
| * assignable selector |
| * @return the assignable expression that was parsed |
| */ |
| Expression parseAssignableExpression(bool primaryAllowed) { |
| if (matches(Keyword.SUPER)) { |
| return parseAssignableSelector(new SuperExpression.full(andAdvance), false); |
| } |
| Expression expression = parsePrimaryExpression(); |
| bool isOptional = primaryAllowed || expression is SimpleIdentifier; |
| while (true) { |
| while (matches5(TokenType.OPEN_PAREN)) { |
| ArgumentList argumentList = parseArgumentList(); |
| if (expression is SimpleIdentifier) { |
| expression = new MethodInvocation.full(null, null, (expression as SimpleIdentifier), argumentList); |
| } else if (expression is PrefixedIdentifier) { |
| PrefixedIdentifier identifier = expression as PrefixedIdentifier; |
| expression = new MethodInvocation.full(identifier.prefix, identifier.period, identifier.identifier, argumentList); |
| } else if (expression is PropertyAccess) { |
| PropertyAccess access = expression as PropertyAccess; |
| expression = new MethodInvocation.full(access.target, access.operator, access.propertyName, argumentList); |
| } else { |
| expression = new FunctionExpressionInvocation.full(expression, argumentList); |
| } |
| if (!primaryAllowed) { |
| isOptional = false; |
| } |
| } |
| Expression selectorExpression = parseAssignableSelector(expression, isOptional || (expression is PrefixedIdentifier)); |
| if (identical(selectorExpression, expression)) { |
| if (!isOptional && (expression is PrefixedIdentifier)) { |
| PrefixedIdentifier identifier = expression as PrefixedIdentifier; |
| expression = new PropertyAccess.full(identifier.prefix, identifier.period, identifier.identifier); |
| } |
| return expression; |
| } |
| expression = selectorExpression; |
| isOptional = true; |
| } |
| } |
| /** |
| * Parse an assignable selector. |
| * <pre> |
| * assignableSelector ::= |
| * '[' expression ']' |
| * | '.' identifier |
| * </pre> |
| * @param prefix the expression preceding the selector |
| * @param optional {@code true} if the selector is optional |
| * @return the assignable selector that was parsed |
| */ |
| Expression parseAssignableSelector(Expression prefix, bool optional) { |
| if (matches5(TokenType.OPEN_SQUARE_BRACKET)) { |
| Token leftBracket = andAdvance; |
| Expression index = parseExpression2(); |
| Token rightBracket = expect2(TokenType.CLOSE_SQUARE_BRACKET); |
| return new IndexExpression.forTarget_full(prefix, leftBracket, index, rightBracket); |
| } else if (matches5(TokenType.PERIOD)) { |
| Token period = andAdvance; |
| return new PropertyAccess.full(prefix, period, parseSimpleIdentifier()); |
| } else { |
| if (!optional) { |
| reportError4(ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR, []); |
| } |
| return prefix; |
| } |
| } |
| /** |
| * Parse a bitwise and expression. |
| * <pre> |
| * bitwiseAndExpression ::= |
| * equalityExpression ('&' equalityExpression) |
| * | 'super' ('&' equalityExpression)+ |
| * </pre> |
| * @return the bitwise and expression that was parsed |
| */ |
| Expression parseBitwiseAndExpression() { |
| Expression expression; |
| if (matches(Keyword.SUPER) && matches4(peek(), TokenType.AMPERSAND)) { |
| expression = new SuperExpression.full(andAdvance); |
| } else { |
| expression = parseEqualityExpression(); |
| } |
| while (matches5(TokenType.AMPERSAND)) { |
| Token operator = andAdvance; |
| expression = new BinaryExpression.full(expression, operator, parseEqualityExpression()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a bitwise or expression. |
| * <pre> |
| * bitwiseOrExpression ::= |
| * bitwiseXorExpression ('|' bitwiseXorExpression) |
| * | 'super' ('|' bitwiseXorExpression)+ |
| * </pre> |
| * @return the bitwise or expression that was parsed |
| */ |
| Expression parseBitwiseOrExpression() { |
| Expression expression; |
| if (matches(Keyword.SUPER) && matches4(peek(), TokenType.BAR)) { |
| expression = new SuperExpression.full(andAdvance); |
| } else { |
| expression = parseBitwiseXorExpression(); |
| } |
| while (matches5(TokenType.BAR)) { |
| Token operator = andAdvance; |
| expression = new BinaryExpression.full(expression, operator, parseBitwiseXorExpression()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a bitwise exclusive-or expression. |
| * <pre> |
| * bitwiseXorExpression ::= |
| * bitwiseAndExpression ('^' bitwiseAndExpression) |
| * | 'super' ('^' bitwiseAndExpression)+ |
| * </pre> |
| * @return the bitwise exclusive-or expression that was parsed |
| */ |
| Expression parseBitwiseXorExpression() { |
| Expression expression; |
| if (matches(Keyword.SUPER) && matches4(peek(), TokenType.CARET)) { |
| expression = new SuperExpression.full(andAdvance); |
| } else { |
| expression = parseBitwiseAndExpression(); |
| } |
| while (matches5(TokenType.CARET)) { |
| Token operator = andAdvance; |
| expression = new BinaryExpression.full(expression, operator, parseBitwiseAndExpression()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a block. |
| * <pre> |
| * block ::= |
| * '{' statements '}' |
| * </pre> |
| * @return the block that was parsed |
| */ |
| Block parseBlock() { |
| Token leftBracket = expect2(TokenType.OPEN_CURLY_BRACKET); |
| List<Statement> statements = new List<Statement>(); |
| Token statementStart = _currentToken; |
| while (!matches5(TokenType.EOF) && !matches5(TokenType.CLOSE_CURLY_BRACKET)) { |
| Statement statement = parseStatement2(); |
| if (statement != null) { |
| statements.add(statement); |
| } |
| if (identical(_currentToken, statementStart)) { |
| reportError5(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, [_currentToken.lexeme]); |
| advance(); |
| } |
| statementStart = _currentToken; |
| } |
| Token rightBracket = expect2(TokenType.CLOSE_CURLY_BRACKET); |
| return new Block.full(leftBracket, statements, rightBracket); |
| } |
| /** |
| * Parse a break statement. |
| * <pre> |
| * breakStatement ::= |
| * 'break' identifier? ';' |
| * </pre> |
| * @return the break statement that was parsed |
| */ |
| Statement parseBreakStatement() { |
| Token breakKeyword = expect(Keyword.BREAK); |
| SimpleIdentifier label = null; |
| if (matchesIdentifier()) { |
| label = parseSimpleIdentifier(); |
| } |
| if (!_inLoop && !_inSwitch && label == null) { |
| reportError5(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword, []); |
| } |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new BreakStatement.full(breakKeyword, label, semicolon); |
| } |
| /** |
| * Parse a cascade section. |
| * <pre> |
| * cascadeSection ::= |
| * '..' (cascadeSelector arguments*) (assignableSelector arguments*)* cascadeAssignment? |
| * cascadeSelector ::= |
| * '[' expression ']' |
| * | identifier |
| * cascadeAssignment ::= |
| * assignmentOperator expressionWithoutCascade |
| * </pre> |
| * @return the expression representing the cascaded method invocation |
| */ |
| Expression parseCascadeSection() { |
| Token period = expect2(TokenType.PERIOD_PERIOD); |
| Expression expression = null; |
| SimpleIdentifier functionName = null; |
| if (matchesIdentifier()) { |
| functionName = parseSimpleIdentifier(); |
| } else if (identical(_currentToken.type, TokenType.OPEN_SQUARE_BRACKET)) { |
| Token leftBracket = andAdvance; |
| Expression index = parseExpression2(); |
| Token rightBracket = expect2(TokenType.CLOSE_SQUARE_BRACKET); |
| expression = new IndexExpression.forCascade_full(period, leftBracket, index, rightBracket); |
| period = null; |
| } else { |
| reportError5(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, [_currentToken.lexeme]); |
| return expression; |
| } |
| if (identical(_currentToken.type, TokenType.OPEN_PAREN)) { |
| while (identical(_currentToken.type, TokenType.OPEN_PAREN)) { |
| if (functionName != null) { |
| expression = new MethodInvocation.full(expression, period, functionName, parseArgumentList()); |
| period = null; |
| functionName = null; |
| } else if (expression == null) { |
| return null; |
| } else { |
| expression = new FunctionExpressionInvocation.full(expression, parseArgumentList()); |
| } |
| } |
| } else if (functionName != null) { |
| expression = new PropertyAccess.full(expression, period, functionName); |
| period = null; |
| } |
| bool progress = true; |
| while (progress) { |
| progress = false; |
| Expression selector = parseAssignableSelector(expression, true); |
| if (selector != expression) { |
| expression = selector; |
| progress = true; |
| while (identical(_currentToken.type, TokenType.OPEN_PAREN)) { |
| expression = new FunctionExpressionInvocation.full(expression, parseArgumentList()); |
| } |
| } |
| } |
| if (_currentToken.type.isAssignmentOperator()) { |
| Token operator = andAdvance; |
| ensureAssignable(expression); |
| expression = new AssignmentExpression.full(expression, operator, parseExpressionWithoutCascade()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a class declaration. |
| * <pre> |
| * classDeclaration ::= |
| * metadata 'abstract'? 'class' name typeParameterList? (extendsClause withClause?)? implementsClause? '{' classMembers '}' |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the member |
| * @param abstractKeyword the token for the keyword 'abstract', or {@code null} if the keyword was |
| * not given |
| * @return the class declaration that was parsed |
| */ |
| ClassDeclaration parseClassDeclaration(CommentAndMetadata commentAndMetadata, Token abstractKeyword) { |
| Token keyword = expect(Keyword.CLASS); |
| SimpleIdentifier name = parseSimpleIdentifier(); |
| String className = name.name; |
| TypeParameterList typeParameters = null; |
| if (matches5(TokenType.LT)) { |
| typeParameters = parseTypeParameterList(); |
| } |
| ExtendsClause extendsClause = null; |
| WithClause withClause = null; |
| ImplementsClause implementsClause = null; |
| bool foundClause = true; |
| while (foundClause) { |
| if (matches(Keyword.EXTENDS)) { |
| if (extendsClause == null) { |
| extendsClause = parseExtendsClause(); |
| if (withClause != null) { |
| reportError5(ParserErrorCode.WITH_BEFORE_EXTENDS, withClause.withKeyword, []); |
| } else if (implementsClause != null) { |
| reportError5(ParserErrorCode.IMPLEMENTS_BEFORE_EXTENDS, implementsClause.keyword, []); |
| } |
| } else { |
| reportError5(ParserErrorCode.MULTIPLE_EXTENDS_CLAUSES, extendsClause.keyword, []); |
| parseExtendsClause(); |
| } |
| } else if (matches(Keyword.WITH)) { |
| if (withClause == null) { |
| withClause = parseWithClause(); |
| if (implementsClause != null) { |
| reportError5(ParserErrorCode.IMPLEMENTS_BEFORE_WITH, implementsClause.keyword, []); |
| } |
| } else { |
| reportError5(ParserErrorCode.MULTIPLE_WITH_CLAUSES, withClause.withKeyword, []); |
| parseWithClause(); |
| } |
| } else if (matches(Keyword.IMPLEMENTS)) { |
| if (implementsClause == null) { |
| implementsClause = parseImplementsClause(); |
| } else { |
| reportError5(ParserErrorCode.MULTIPLE_IMPLEMENTS_CLAUSES, implementsClause.keyword, []); |
| parseImplementsClause(); |
| } |
| } else { |
| foundClause = false; |
| } |
| } |
| if (withClause != null && extendsClause == null) { |
| reportError5(ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword, []); |
| } |
| Token leftBracket = null; |
| List<ClassMember> members = null; |
| Token rightBracket = null; |
| if (matches5(TokenType.OPEN_CURLY_BRACKET)) { |
| leftBracket = expect2(TokenType.OPEN_CURLY_BRACKET); |
| members = parseClassMembers(className, ((leftBracket as BeginToken)).endToken != null); |
| rightBracket = expect2(TokenType.CLOSE_CURLY_BRACKET); |
| } else { |
| leftBracket = createSyntheticToken2(TokenType.OPEN_CURLY_BRACKET); |
| rightBracket = createSyntheticToken2(TokenType.CLOSE_CURLY_BRACKET); |
| reportError4(ParserErrorCode.MISSING_CLASS_BODY, []); |
| } |
| return new ClassDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, abstractKeyword, keyword, name, typeParameters, extendsClause, withClause, implementsClause, leftBracket, members, rightBracket); |
| } |
| /** |
| * Parse a class member. |
| * <pre> |
| * classMemberDefinition ::= |
| * declaration ';' |
| * | methodSignature functionBody |
| * </pre> |
| * @param className the name of the class containing the member being parsed |
| * @return the class member that was parsed |
| */ |
| ClassMember parseClassMember(String className) { |
| CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); |
| Modifiers modifiers = parseModifiers(); |
| if (matches(Keyword.VOID)) { |
| TypeName returnType = parseReturnType(); |
| if (matches(Keyword.GET) && matchesIdentifier2(peek())) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseGetter(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, returnType); |
| } else if (matches(Keyword.SET) && matchesIdentifier2(peek())) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseSetter(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, returnType); |
| } else if (matches(Keyword.OPERATOR) && isOperator(peek())) { |
| validateModifiersForOperator(modifiers); |
| return parseOperator(commentAndMetadata, modifiers.externalKeyword, returnType); |
| } else if (matchesIdentifier() && matchesAny(peek(), [TokenType.OPEN_PAREN, TokenType.OPEN_CURLY_BRACKET, TokenType.FUNCTION])) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseMethodDeclaration(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, returnType); |
| } else { |
| if (matchesIdentifier()) { |
| if (matchesAny(peek(), [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { |
| reportError(ParserErrorCode.VOID_VARIABLE, returnType, []); |
| return parseInitializedIdentifierList(commentAndMetadata, modifiers.staticKeyword, validateModifiersForField(modifiers), returnType); |
| } |
| } |
| if (isOperator(peek())) { |
| validateModifiersForOperator(modifiers); |
| return parseOperator(commentAndMetadata, modifiers.externalKeyword, returnType); |
| } |
| reportError5(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []); |
| return null; |
| } |
| } else if (matches(Keyword.GET) && matchesIdentifier2(peek())) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseGetter(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, null); |
| } else if (matches(Keyword.SET) && matchesIdentifier2(peek())) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseSetter(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, null); |
| } else if (matches(Keyword.OPERATOR) && isOperator(peek())) { |
| validateModifiersForOperator(modifiers); |
| return parseOperator(commentAndMetadata, modifiers.externalKeyword, null); |
| } else if (!matchesIdentifier()) { |
| if (isOperator(peek())) { |
| validateModifiersForOperator(modifiers); |
| return parseOperator(commentAndMetadata, modifiers.externalKeyword, null); |
| } |
| reportError5(ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken, []); |
| return null; |
| } else if (matches4(peek(), TokenType.PERIOD) && matchesIdentifier2(peek2(2)) && matches4(peek2(3), TokenType.OPEN_PAREN)) { |
| return parseConstructor(commentAndMetadata, modifiers.externalKeyword, validateModifiersForConstructor(modifiers), modifiers.factoryKeyword, parseSimpleIdentifier(), andAdvance, parseSimpleIdentifier(), parseFormalParameterList()); |
| } else if (matches4(peek(), TokenType.OPEN_PAREN)) { |
| SimpleIdentifier methodName = parseSimpleIdentifier(); |
| FormalParameterList parameters = parseFormalParameterList(); |
| if (matches5(TokenType.COLON) || modifiers.factoryKeyword != null || methodName.name == className) { |
| return parseConstructor(commentAndMetadata, modifiers.externalKeyword, validateModifiersForConstructor(modifiers), modifiers.factoryKeyword, methodName, null, null, parameters); |
| } |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| validateFormalParameterList(parameters); |
| return parseMethodDeclaration2(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, null, methodName, parameters); |
| } else if (matchesAny(peek(), [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { |
| return parseInitializedIdentifierList(commentAndMetadata, modifiers.staticKeyword, validateModifiersForField(modifiers), null); |
| } |
| TypeName type = parseTypeName(); |
| if (matches(Keyword.GET) && matchesIdentifier2(peek())) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseGetter(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, type); |
| } else if (matches(Keyword.SET) && matchesIdentifier2(peek())) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseSetter(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, type); |
| } else if (matches(Keyword.OPERATOR) && isOperator(peek())) { |
| validateModifiersForOperator(modifiers); |
| return parseOperator(commentAndMetadata, modifiers.externalKeyword, type); |
| } else if (!matchesIdentifier()) { |
| if (matches5(TokenType.CLOSE_CURLY_BRACKET)) { |
| return parseInitializedIdentifierList(commentAndMetadata, modifiers.staticKeyword, validateModifiersForField(modifiers), type); |
| } |
| if (isOperator(peek())) { |
| validateModifiersForOperator(modifiers); |
| return parseOperator(commentAndMetadata, modifiers.externalKeyword, type); |
| } |
| reportError5(ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken, []); |
| return null; |
| } else if (matches4(peek(), TokenType.OPEN_PAREN)) { |
| validateModifiersForGetterOrSetterOrMethod(modifiers); |
| return parseMethodDeclaration(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, type); |
| } |
| return parseInitializedIdentifierList(commentAndMetadata, modifiers.staticKeyword, validateModifiersForField(modifiers), type); |
| } |
| /** |
| * Parse a list of class members. |
| * <pre> |
| * classMembers ::= |
| * (metadata memberDefinition) |
| * </pre> |
| * @param className the name of the class whose members are being parsed |
| * @param balancedBrackets {@code true} if the opening and closing brackets for the class are |
| * balanced |
| * @return the list of class members that were parsed |
| */ |
| List<ClassMember> parseClassMembers(String className, bool balancedBrackets) { |
| List<ClassMember> members = new List<ClassMember>(); |
| Token memberStart = _currentToken; |
| while (!matches5(TokenType.EOF) && !matches5(TokenType.CLOSE_CURLY_BRACKET) && (balancedBrackets || (!matches(Keyword.CLASS) && !matches(Keyword.TYPEDEF)))) { |
| if (matches5(TokenType.SEMICOLON)) { |
| reportError5(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, [_currentToken.lexeme]); |
| advance(); |
| } else { |
| ClassMember member = parseClassMember(className); |
| if (member != null) { |
| members.add(member); |
| } |
| } |
| if (identical(_currentToken, memberStart)) { |
| reportError5(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, [_currentToken.lexeme]); |
| advance(); |
| } |
| memberStart = _currentToken; |
| } |
| return members; |
| } |
| /** |
| * Parse a class type alias. |
| * <pre> |
| * classTypeAlias ::= |
| * identifier typeParameters? '=' 'abstract'? mixinApplication |
| * mixinApplication ::= |
| * type withClause implementsClause? ';' |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the member |
| * @param keyword the token representing the 'typedef' keyword |
| * @return the class type alias that was parsed |
| */ |
| ClassTypeAlias parseClassTypeAlias(CommentAndMetadata commentAndMetadata, Token keyword) { |
| SimpleIdentifier name = parseSimpleIdentifier(); |
| TypeParameterList typeParameters = null; |
| if (matches5(TokenType.LT)) { |
| typeParameters = parseTypeParameterList(); |
| } |
| Token equals = expect2(TokenType.EQ); |
| Token abstractKeyword = null; |
| if (matches(Keyword.ABSTRACT)) { |
| abstractKeyword = andAdvance; |
| } |
| TypeName superclass = parseTypeName(); |
| WithClause withClause = null; |
| if (matches(Keyword.WITH)) { |
| withClause = parseWithClause(); |
| } |
| ImplementsClause implementsClause = null; |
| if (matches(Keyword.IMPLEMENTS)) { |
| implementsClause = parseImplementsClause(); |
| } |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new ClassTypeAlias.full(commentAndMetadata.comment, commentAndMetadata.metadata, keyword, name, typeParameters, equals, abstractKeyword, superclass, withClause, implementsClause, semicolon); |
| } |
| /** |
| * Parse a list of combinators in a directive. |
| * <pre> |
| * combinator ::= |
| * 'show' identifier (',' identifier) |
| * | 'hide' identifier (',' identifier) |
| * </pre> |
| * @return the combinators that were parsed |
| */ |
| List<Combinator> parseCombinators() { |
| List<Combinator> combinators = new List<Combinator>(); |
| while (matches2(_SHOW) || matches2(_HIDE)) { |
| Token keyword = expect2(TokenType.IDENTIFIER); |
| if (keyword.lexeme == _SHOW) { |
| List<SimpleIdentifier> shownNames = parseIdentifierList(); |
| combinators.add(new ShowCombinator.full(keyword, shownNames)); |
| } else { |
| List<SimpleIdentifier> hiddenNames = parseIdentifierList(); |
| combinators.add(new HideCombinator.full(keyword, hiddenNames)); |
| } |
| } |
| return combinators; |
| } |
| /** |
| * Parse the documentation comment and metadata preceeding a declaration. This method allows any |
| * number of documentation comments to occur before, after or between the metadata, but only |
| * returns the last (right-most) documentation comment that is found. |
| * <pre> |
| * metadata ::= |
| * annotation |
| * </pre> |
| * @return the documentation comment and metadata that were parsed |
| */ |
| CommentAndMetadata parseCommentAndMetadata() { |
| Comment comment = parseDocumentationComment(); |
| List<Annotation> metadata = new List<Annotation>(); |
| while (matches5(TokenType.AT)) { |
| metadata.add(parseAnnotation()); |
| Comment optionalComment = parseDocumentationComment(); |
| if (optionalComment != null) { |
| comment = optionalComment; |
| } |
| } |
| return new CommentAndMetadata(comment, metadata); |
| } |
| /** |
| * Parse a comment reference from the source between square brackets. |
| * <pre> |
| * commentReference ::= |
| * 'new'? prefixedIdentifier |
| * </pre> |
| * @param referenceSource the source occurring between the square brackets within a documentation |
| * comment |
| * @param sourceOffset the offset of the first character of the reference source |
| * @return the comment reference that was parsed |
| */ |
| CommentReference parseCommentReference(String referenceSource, int sourceOffset) { |
| if (referenceSource.length == 0) { |
| return null; |
| } |
| try { |
| List<bool> errorFound = [false]; |
| AnalysisErrorListener listener = new AnalysisErrorListener_12(errorFound); |
| StringScanner scanner = new StringScanner(null, referenceSource, listener); |
| scanner.setSourceStart(1, 1, sourceOffset); |
| Token firstToken = scanner.tokenize(); |
| if (!errorFound[0]) { |
| Token newKeyword = null; |
| if (matches3(firstToken, Keyword.NEW)) { |
| newKeyword = firstToken; |
| firstToken = firstToken.next; |
| } |
| if (matchesIdentifier2(firstToken)) { |
| Token secondToken = firstToken.next; |
| Token thirdToken = secondToken.next; |
| Token nextToken; |
| Identifier identifier; |
| if (matches4(secondToken, TokenType.PERIOD) && matchesIdentifier2(thirdToken)) { |
| identifier = new PrefixedIdentifier.full(new SimpleIdentifier.full(firstToken), secondToken, new SimpleIdentifier.full(thirdToken)); |
| nextToken = thirdToken.next; |
| } else { |
| identifier = new SimpleIdentifier.full(firstToken); |
| nextToken = firstToken.next; |
| } |
| if (nextToken.type != TokenType.EOF) { |
| } |
| return new CommentReference.full(newKeyword, identifier); |
| } else if (matches3(firstToken, Keyword.THIS) || matches3(firstToken, Keyword.NULL) || matches3(firstToken, Keyword.TRUE) || matches3(firstToken, Keyword.FALSE)) { |
| return null; |
| } else if (matches4(firstToken, TokenType.STRING)) { |
| } else { |
| } |
| } |
| } catch (exception) { |
| } |
| return null; |
| } |
| /** |
| * Parse all of the comment references occurring in the given array of documentation comments. |
| * <pre> |
| * commentReference ::= |
| * '[' 'new'? qualified ']' libraryReference? |
| * libraryReference ::= |
| * '(' stringLiteral ')' |
| * </pre> |
| * @param tokens the comment tokens representing the documentation comments to be parsed |
| * @return the comment references that were parsed |
| */ |
| List<CommentReference> parseCommentReferences(List<Token> tokens) { |
| List<CommentReference> references = new List<CommentReference>(); |
| for (Token token in tokens) { |
| String comment = token.lexeme; |
| int leftIndex = comment.indexOf('['); |
| while (leftIndex >= 0) { |
| int rightIndex = comment.indexOf(']', leftIndex); |
| if (rightIndex >= 0) { |
| int firstChar = comment.codeUnitAt(leftIndex + 1); |
| if (firstChar != 0x27 && firstChar != 0x22 && firstChar != 0x3A) { |
| CommentReference reference = parseCommentReference(comment.substring(leftIndex + 1, rightIndex), token.offset + leftIndex + 1); |
| if (reference != null) { |
| references.add(reference); |
| } |
| } |
| } else { |
| rightIndex = leftIndex + 1; |
| } |
| leftIndex = comment.indexOf('[', rightIndex); |
| } |
| } |
| return references; |
| } |
| /** |
| * Parse a compilation unit. |
| * <p> |
| * Specified: |
| * <pre> |
| * compilationUnit ::= |
| * scriptTag? directive* topLevelDeclaration |
| * </pre> |
| * Actual: |
| * <pre> |
| * compilationUnit ::= |
| * scriptTag? topLevelElement |
| * topLevelElement ::= |
| * directive |
| * | topLevelDeclaration |
| * </pre> |
| * @return the compilation unit that was parsed |
| */ |
| CompilationUnit parseCompilationUnit2() { |
| Token firstToken = _currentToken; |
| ScriptTag scriptTag = null; |
| if (matches5(TokenType.SCRIPT_TAG)) { |
| scriptTag = new ScriptTag.full(andAdvance); |
| } |
| bool libraryDirectiveFound = false; |
| bool partOfDirectiveFound = false; |
| bool partDirectiveFound = false; |
| bool directiveFoundAfterDeclaration = false; |
| List<Directive> directives = new List<Directive>(); |
| List<CompilationUnitMember> declarations = new List<CompilationUnitMember>(); |
| Token memberStart = _currentToken; |
| while (!matches5(TokenType.EOF)) { |
| CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); |
| if ((matches(Keyword.IMPORT) || matches(Keyword.EXPORT) || matches(Keyword.LIBRARY) || matches(Keyword.PART)) && !matches4(peek(), TokenType.PERIOD) && !matches4(peek(), TokenType.LT)) { |
| Directive directive = parseDirective(commentAndMetadata); |
| if (declarations.length > 0 && !directiveFoundAfterDeclaration) { |
| reportError4(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION, []); |
| directiveFoundAfterDeclaration = true; |
| } |
| if (directive is LibraryDirective) { |
| if (libraryDirectiveFound) { |
| reportError4(ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES, []); |
| } else { |
| if (directives.length > 0) { |
| reportError4(ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST, []); |
| } |
| libraryDirectiveFound = true; |
| } |
| } else if (directive is PartDirective) { |
| partDirectiveFound = true; |
| } else if (partDirectiveFound) { |
| if (directive is ExportDirective) { |
| reportError4(ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, []); |
| } else if (directive is ImportDirective) { |
| reportError4(ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, []); |
| } |
| } |
| if (directive is PartOfDirective) { |
| if (partOfDirectiveFound) { |
| reportError4(ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES, []); |
| } else { |
| for (Directive preceedingDirective in directives) { |
| reportError5(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, preceedingDirective.keyword, []); |
| } |
| partOfDirectiveFound = true; |
| } |
| } else { |
| if (partOfDirectiveFound) { |
| reportError5(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, directive.keyword, []); |
| } |
| } |
| directives.add(directive); |
| } else if (matches5(TokenType.SEMICOLON)) { |
| reportError5(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, [_currentToken.lexeme]); |
| advance(); |
| } else { |
| CompilationUnitMember member = parseCompilationUnitMember(commentAndMetadata); |
| if (member != null) { |
| declarations.add(member); |
| } |
| } |
| if (identical(_currentToken, memberStart)) { |
| reportError5(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, [_currentToken.lexeme]); |
| advance(); |
| } |
| memberStart = _currentToken; |
| } |
| return new CompilationUnit.full(firstToken, scriptTag, directives, declarations, _currentToken); |
| } |
| /** |
| * Parse a compilation unit member. |
| * <pre> |
| * compilationUnitMember ::= |
| * classDefinition |
| * | functionTypeAlias |
| * | external functionSignature |
| * | external getterSignature |
| * | external setterSignature |
| * | functionSignature functionBody |
| * | returnType? getOrSet identifier formalParameterList functionBody |
| * | (final | const) type? staticFinalDeclarationList ';' |
| * | variableDeclaration ';' |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the member |
| * @return the compilation unit member that was parsed |
| */ |
| CompilationUnitMember parseCompilationUnitMember(CommentAndMetadata commentAndMetadata) { |
| Modifiers modifiers = parseModifiers(); |
| if (matches(Keyword.CLASS)) { |
| return parseClassDeclaration(commentAndMetadata, validateModifiersForClass(modifiers)); |
| } else if (matches(Keyword.TYPEDEF) && !matches4(peek(), TokenType.PERIOD) && !matches4(peek(), TokenType.LT)) { |
| validateModifiersForTypedef(modifiers); |
| return parseTypeAlias(commentAndMetadata); |
| } |
| if (matches(Keyword.VOID)) { |
| TypeName returnType = parseReturnType(); |
| if ((matches(Keyword.GET) || matches(Keyword.SET)) && matchesIdentifier2(peek())) { |
| validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, null); |
| } else if (matches(Keyword.OPERATOR) && isOperator(peek())) { |
| reportError5(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken, []); |
| return null; |
| } else if (matchesIdentifier() && matchesAny(peek(), [TokenType.OPEN_PAREN, TokenType.OPEN_CURLY_BRACKET, TokenType.FUNCTION])) { |
| validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, returnType); |
| } else { |
| if (matchesIdentifier()) { |
| if (matchesAny(peek(), [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { |
| reportError(ParserErrorCode.VOID_VARIABLE, returnType, []); |
| return new TopLevelVariableDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, parseVariableDeclarationList2(null, validateModifiersForTopLevelVariable(modifiers), null), expect2(TokenType.SEMICOLON)); |
| } |
| } |
| reportError5(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []); |
| return null; |
| } |
| } else if ((matches(Keyword.GET) || matches(Keyword.SET)) && matchesIdentifier2(peek())) { |
| validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, null); |
| } else if (matches(Keyword.OPERATOR) && isOperator(peek())) { |
| reportError5(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken, []); |
| return null; |
| } else if (!matchesIdentifier()) { |
| reportError5(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []); |
| return null; |
| } else if (matches4(peek(), TokenType.OPEN_PAREN)) { |
| validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, null); |
| } else if (matchesAny(peek(), [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { |
| return new TopLevelVariableDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, parseVariableDeclarationList2(null, validateModifiersForTopLevelVariable(modifiers), null), expect2(TokenType.SEMICOLON)); |
| } |
| TypeName returnType = parseReturnType(); |
| if (matches(Keyword.GET) || matches(Keyword.SET)) { |
| validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, returnType); |
| } else if (matches(Keyword.OPERATOR) && isOperator(peek())) { |
| reportError5(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken, []); |
| return null; |
| } else if (!matchesIdentifier()) { |
| reportError5(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []); |
| return null; |
| } |
| if (matchesAny(peek(), [TokenType.OPEN_PAREN, TokenType.FUNCTION, TokenType.OPEN_CURLY_BRACKET])) { |
| validateModifiersForTopLevelFunction(modifiers); |
| return parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, returnType); |
| } |
| return new TopLevelVariableDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, parseVariableDeclarationList2(null, validateModifiersForTopLevelVariable(modifiers), returnType), expect2(TokenType.SEMICOLON)); |
| } |
| /** |
| * Parse a conditional expression. |
| * <pre> |
| * conditionalExpression ::= |
| * logicalOrExpression ('?' expressionWithoutCascade ':' expressionWithoutCascade)? |
| * </pre> |
| * @return the conditional expression that was parsed |
| */ |
| Expression parseConditionalExpression() { |
| Expression condition = parseLogicalOrExpression(); |
| if (!matches5(TokenType.QUESTION)) { |
| return condition; |
| } |
| Token question = andAdvance; |
| Expression thenExpression = parseExpressionWithoutCascade(); |
| Token colon = expect2(TokenType.COLON); |
| Expression elseExpression = parseExpressionWithoutCascade(); |
| return new ConditionalExpression.full(condition, question, thenExpression, colon, elseExpression); |
| } |
| /** |
| * Parse a const expression. |
| * <pre> |
| * constExpression ::= |
| * instanceCreationExpression |
| * | listLiteral |
| * | mapLiteral |
| * </pre> |
| * @return the const expression that was parsed |
| */ |
| Expression parseConstExpression() { |
| Token keyword = expect(Keyword.CONST); |
| if (matches5(TokenType.OPEN_SQUARE_BRACKET) || matches5(TokenType.INDEX)) { |
| return parseListLiteral(keyword, null); |
| } else if (matches5(TokenType.OPEN_CURLY_BRACKET)) { |
| return parseMapLiteral(keyword, null); |
| } else if (matches5(TokenType.LT)) { |
| return parseListOrMapLiteral(keyword); |
| } |
| return parseInstanceCreationExpression(keyword); |
| } |
| ConstructorDeclaration parseConstructor(CommentAndMetadata commentAndMetadata, Token externalKeyword, Token constKeyword, Token factoryKeyword, SimpleIdentifier returnType, Token period, SimpleIdentifier name, FormalParameterList parameters) { |
| bool bodyAllowed = externalKeyword == null; |
| Token separator = null; |
| List<ConstructorInitializer> initializers = null; |
| if (matches5(TokenType.COLON)) { |
| separator = andAdvance; |
| initializers = new List<ConstructorInitializer>(); |
| do { |
| if (matches(Keyword.THIS)) { |
| if (matches4(peek(), TokenType.OPEN_PAREN)) { |
| bodyAllowed = false; |
| initializers.add(parseRedirectingConstructorInvocation()); |
| } else if (matches4(peek(), TokenType.PERIOD) && matches4(peek2(3), TokenType.OPEN_PAREN)) { |
| bodyAllowed = false; |
| initializers.add(parseRedirectingConstructorInvocation()); |
| } else { |
| initializers.add(parseConstructorFieldInitializer()); |
| } |
| } else if (matches(Keyword.SUPER)) { |
| initializers.add(parseSuperConstructorInvocation()); |
| } else { |
| initializers.add(parseConstructorFieldInitializer()); |
| } |
| } while (optional(TokenType.COMMA)); |
| } |
| ConstructorName redirectedConstructor = null; |
| FunctionBody body; |
| if (matches5(TokenType.EQ)) { |
| separator = andAdvance; |
| redirectedConstructor = parseConstructorName(); |
| body = new EmptyFunctionBody.full(expect2(TokenType.SEMICOLON)); |
| } else { |
| body = parseFunctionBody(true, false); |
| if (!bodyAllowed && body is! EmptyFunctionBody) { |
| reportError4(ParserErrorCode.EXTERNAL_CONSTRUCTOR_WITH_BODY, []); |
| } |
| } |
| return new ConstructorDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, constKeyword, factoryKeyword, returnType, period, name, parameters, separator, initializers, redirectedConstructor, body); |
| } |
| /** |
| * Parse a field initializer within a constructor. |
| * <pre> |
| * fieldInitializer: |
| * ('this' '.')? identifier '=' conditionalExpression cascadeSection |
| * </pre> |
| * @return the field initializer that was parsed |
| */ |
| ConstructorFieldInitializer parseConstructorFieldInitializer() { |
| Token keyword = null; |
| Token period = null; |
| if (matches(Keyword.THIS)) { |
| keyword = andAdvance; |
| period = expect2(TokenType.PERIOD); |
| } |
| SimpleIdentifier fieldName = parseSimpleIdentifier(); |
| Token equals = expect2(TokenType.EQ); |
| Expression expression = parseConditionalExpression(); |
| TokenType tokenType = _currentToken.type; |
| if (identical(tokenType, TokenType.PERIOD_PERIOD)) { |
| List<Expression> cascadeSections = new List<Expression>(); |
| while (identical(tokenType, TokenType.PERIOD_PERIOD)) { |
| Expression section = parseCascadeSection(); |
| if (section != null) { |
| cascadeSections.add(section); |
| } |
| tokenType = _currentToken.type; |
| } |
| expression = new CascadeExpression.full(expression, cascadeSections); |
| } |
| return new ConstructorFieldInitializer.full(keyword, period, fieldName, equals, expression); |
| } |
| /** |
| * Parse the name of a constructor. |
| * <pre> |
| * constructorName: |
| * type ('.' identifier)? |
| * </pre> |
| * @return the constructor name that was parsed |
| */ |
| ConstructorName parseConstructorName() { |
| TypeName type = parseTypeName(); |
| Token period = null; |
| SimpleIdentifier name = null; |
| if (matches5(TokenType.PERIOD)) { |
| period = andAdvance; |
| name = parseSimpleIdentifier(); |
| } |
| return new ConstructorName.full(type, period, name); |
| } |
| /** |
| * Parse a continue statement. |
| * <pre> |
| * continueStatement ::= |
| * 'continue' identifier? ';' |
| * </pre> |
| * @return the continue statement that was parsed |
| */ |
| Statement parseContinueStatement() { |
| Token continueKeyword = expect(Keyword.CONTINUE); |
| if (!_inLoop && !_inSwitch) { |
| reportError5(ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword, []); |
| } |
| SimpleIdentifier label = null; |
| if (matchesIdentifier()) { |
| label = parseSimpleIdentifier(); |
| } |
| if (_inSwitch && !_inLoop && label == null) { |
| reportError5(ParserErrorCode.CONTINUE_WITHOUT_LABEL_IN_CASE, continueKeyword, []); |
| } |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new ContinueStatement.full(continueKeyword, label, semicolon); |
| } |
| /** |
| * Parse a directive. |
| * <pre> |
| * directive ::= |
| * exportDirective |
| * | libraryDirective |
| * | importDirective |
| * | partDirective |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the directive |
| * @return the directive that was parsed |
| */ |
| Directive parseDirective(CommentAndMetadata commentAndMetadata) { |
| if (matches(Keyword.IMPORT)) { |
| return parseImportDirective(commentAndMetadata); |
| } else if (matches(Keyword.EXPORT)) { |
| return parseExportDirective(commentAndMetadata); |
| } else if (matches(Keyword.LIBRARY)) { |
| return parseLibraryDirective(commentAndMetadata); |
| } else if (matches(Keyword.PART)) { |
| return parsePartDirective(commentAndMetadata); |
| } else { |
| return null; |
| } |
| } |
| /** |
| * Parse a documentation comment. |
| * <pre> |
| * documentationComment ::= |
| * multiLineComment? |
| * | singleLineComment |
| * </pre> |
| * @return the documentation comment that was parsed, or {@code null} if there was no comment |
| */ |
| Comment parseDocumentationComment() { |
| List<Token> commentTokens = new List<Token>(); |
| Token commentToken = _currentToken.precedingComments; |
| while (commentToken != null) { |
| if (identical(commentToken.type, TokenType.SINGLE_LINE_COMMENT)) { |
| if (commentToken.lexeme.startsWith("///")) { |
| if (commentTokens.length == 1 && commentTokens[0].lexeme.startsWith("/**")) { |
| commentTokens.clear(); |
| } |
| commentTokens.add(commentToken); |
| } |
| } else { |
| if (commentToken.lexeme.startsWith("/**")) { |
| commentTokens.clear(); |
| commentTokens.add(commentToken); |
| } |
| } |
| commentToken = commentToken.next; |
| } |
| if (commentTokens.isEmpty) { |
| return null; |
| } |
| List<Token> tokens = new List.from(commentTokens); |
| List<CommentReference> references = parseCommentReferences(tokens); |
| return Comment.createDocumentationComment2(tokens, references); |
| } |
| /** |
| * Parse a do statement. |
| * <pre> |
| * doStatement ::= |
| * 'do' statement 'while' '(' expression ')' ';' |
| * </pre> |
| * @return the do statement that was parsed |
| */ |
| Statement parseDoStatement() { |
| bool wasInLoop = _inLoop; |
| _inLoop = true; |
| try { |
| Token doKeyword = expect(Keyword.DO); |
| Statement body = parseStatement2(); |
| Token whileKeyword = expect(Keyword.WHILE); |
| Token leftParenthesis = expect2(TokenType.OPEN_PAREN); |
| Expression condition = parseExpression2(); |
| Token rightParenthesis = expect2(TokenType.CLOSE_PAREN); |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new DoStatement.full(doKeyword, body, whileKeyword, leftParenthesis, condition, rightParenthesis, semicolon); |
| } finally { |
| _inLoop = wasInLoop; |
| } |
| } |
| /** |
| * Parse an empty statement. |
| * <pre> |
| * emptyStatement ::= |
| * ';' |
| * </pre> |
| * @return the empty statement that was parsed |
| */ |
| Statement parseEmptyStatement() => new EmptyStatement.full(andAdvance); |
| /** |
| * Parse an equality expression. |
| * <pre> |
| * equalityExpression ::= |
| * relationalExpression (equalityOperator relationalExpression)? |
| * | 'super' equalityOperator relationalExpression |
| * </pre> |
| * @return the equality expression that was parsed |
| */ |
| Expression parseEqualityExpression() { |
| Expression expression; |
| if (matches(Keyword.SUPER) && _currentToken.next.type.isEqualityOperator()) { |
| expression = new SuperExpression.full(andAdvance); |
| } else { |
| expression = parseRelationalExpression(); |
| } |
| while (_currentToken.type.isEqualityOperator()) { |
| Token operator = andAdvance; |
| expression = new BinaryExpression.full(expression, operator, parseRelationalExpression()); |
| } |
| return expression; |
| } |
| /** |
| * Parse an export directive. |
| * <pre> |
| * exportDirective ::= |
| * metadata 'export' stringLiteral combinator*';' |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the directive |
| * @return the export directive that was parsed |
| */ |
| ExportDirective parseExportDirective(CommentAndMetadata commentAndMetadata) { |
| Token exportKeyword = expect(Keyword.EXPORT); |
| StringLiteral libraryUri = parseStringLiteral(); |
| List<Combinator> combinators = parseCombinators(); |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new ExportDirective.full(commentAndMetadata.comment, commentAndMetadata.metadata, exportKeyword, libraryUri, combinators, semicolon); |
| } |
| /** |
| * Parse an expression that does not contain any cascades. |
| * <pre> |
| * expression ::= |
| * assignableExpression assignmentOperator expression |
| * | conditionalExpression cascadeSection |
| * | throwExpression |
| * </pre> |
| * @return the expression that was parsed |
| */ |
| Expression parseExpression2() { |
| if (matches(Keyword.THROW)) { |
| return parseThrowExpression(); |
| } else if (matches(Keyword.RETHROW)) { |
| return parseRethrowExpression(); |
| } |
| Expression expression = parseConditionalExpression(); |
| TokenType tokenType = _currentToken.type; |
| if (identical(tokenType, TokenType.PERIOD_PERIOD)) { |
| List<Expression> cascadeSections = new List<Expression>(); |
| while (identical(tokenType, TokenType.PERIOD_PERIOD)) { |
| Expression section = parseCascadeSection(); |
| if (section != null) { |
| cascadeSections.add(section); |
| } |
| tokenType = _currentToken.type; |
| } |
| return new CascadeExpression.full(expression, cascadeSections); |
| } else if (tokenType.isAssignmentOperator()) { |
| Token operator = andAdvance; |
| ensureAssignable(expression); |
| return new AssignmentExpression.full(expression, operator, parseExpression2()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a list of expressions. |
| * <pre> |
| * expressionList ::= |
| * expression (',' expression) |
| * </pre> |
| * @return the expression that was parsed |
| */ |
| List<Expression> parseExpressionList() { |
| List<Expression> expressions = new List<Expression>(); |
| expressions.add(parseExpression2()); |
| while (optional(TokenType.COMMA)) { |
| expressions.add(parseExpression2()); |
| } |
| return expressions; |
| } |
| /** |
| * Parse an expression that does not contain any cascades. |
| * <pre> |
| * expressionWithoutCascade ::= |
| * assignableExpression assignmentOperator expressionWithoutCascade |
| * | conditionalExpression |
| * | throwExpressionWithoutCascade |
| * </pre> |
| * @return the expression that was parsed |
| */ |
| Expression parseExpressionWithoutCascade() { |
| if (matches(Keyword.THROW)) { |
| return parseThrowExpressionWithoutCascade(); |
| } else if (matches(Keyword.RETHROW)) { |
| return parseRethrowExpression(); |
| } |
| Expression expression = parseConditionalExpression(); |
| if (_currentToken.type.isAssignmentOperator()) { |
| Token operator = andAdvance; |
| ensureAssignable(expression); |
| expression = new AssignmentExpression.full(expression, operator, parseExpressionWithoutCascade()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a class extends clause. |
| * <pre> |
| * classExtendsClause ::= |
| * 'extends' type |
| * </pre> |
| * @return the class extends clause that was parsed |
| */ |
| ExtendsClause parseExtendsClause() { |
| Token keyword = expect(Keyword.EXTENDS); |
| TypeName superclass = parseTypeName(); |
| return new ExtendsClause.full(keyword, superclass); |
| } |
| /** |
| * Parse the 'final', 'const', 'var' or type preceding a variable declaration. |
| * <pre> |
| * finalConstVarOrType ::= |
| * | 'final' type? |
| * | 'const' type? |
| * | 'var' |
| * | type |
| * </pre> |
| * @param optional {@code true} if the keyword and type are optional |
| * @return the 'final', 'const', 'var' or type that was parsed |
| */ |
| FinalConstVarOrType parseFinalConstVarOrType(bool optional) { |
| Token keyword = null; |
| TypeName type = null; |
| if (matches(Keyword.FINAL) || matches(Keyword.CONST)) { |
| keyword = andAdvance; |
| if (matchesIdentifier2(peek()) || matches4(peek(), TokenType.LT) || matches3(peek(), Keyword.THIS)) { |
| type = parseTypeName(); |
| } |
| } else if (matches(Keyword.VAR)) { |
| keyword = andAdvance; |
| } else { |
| if (matchesIdentifier2(peek()) || matches4(peek(), TokenType.LT) || matches3(peek(), Keyword.THIS) || (matches4(peek(), TokenType.PERIOD) && matchesIdentifier2(peek2(2)) && (matchesIdentifier2(peek2(3)) || matches4(peek2(3), TokenType.LT) || matches3(peek2(3), Keyword.THIS)))) { |
| type = parseReturnType(); |
| } else if (!optional) { |
| reportError4(ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE, []); |
| } |
| } |
| return new FinalConstVarOrType(keyword, type); |
| } |
| /** |
| * Parse a formal parameter. At most one of {@code isOptional} and {@code isNamed} can be{@code true}. |
| * <pre> |
| * defaultFormalParameter ::= |
| * normalFormalParameter ('=' expression)? |
| * defaultNamedParameter ::= |
| * normalFormalParameter (':' expression)? |
| * </pre> |
| * @param kind the kind of parameter being expected based on the presence or absence of group |
| * delimiters |
| * @return the formal parameter that was parsed |
| */ |
| FormalParameter parseFormalParameter(ParameterKind kind) { |
| NormalFormalParameter parameter = parseNormalFormalParameter(); |
| if (matches5(TokenType.EQ)) { |
| Token seperator = andAdvance; |
| Expression defaultValue = parseExpression2(); |
| if (identical(kind, ParameterKind.NAMED)) { |
| reportError5(ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, seperator, []); |
| } else if (identical(kind, ParameterKind.REQUIRED)) { |
| reportError(ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter, []); |
| } |
| return new DefaultFormalParameter.full(parameter, kind, seperator, defaultValue); |
| } else if (matches5(TokenType.COLON)) { |
| Token seperator = andAdvance; |
| Expression defaultValue = parseExpression2(); |
| if (identical(kind, ParameterKind.POSITIONAL)) { |
| reportError5(ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER, seperator, []); |
| } else if (identical(kind, ParameterKind.REQUIRED)) { |
| reportError(ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter, []); |
| } |
| return new DefaultFormalParameter.full(parameter, kind, seperator, defaultValue); |
| } else if (kind != ParameterKind.REQUIRED) { |
| return new DefaultFormalParameter.full(parameter, kind, null, null); |
| } |
| return parameter; |
| } |
| /** |
| * Parse a list of formal parameters. |
| * <pre> |
| * formalParameterList ::= |
| * '(' ')' |
| * | '(' normalFormalParameters (',' optionalFormalParameters)? ')' |
| * | '(' optionalFormalParameters ')' |
| * normalFormalParameters ::= |
| * normalFormalParameter (',' normalFormalParameter) |
| * optionalFormalParameters ::= |
| * optionalPositionalFormalParameters |
| * | namedFormalParameters |
| * optionalPositionalFormalParameters ::= |
| * '[' defaultFormalParameter (',' defaultFormalParameter)* ']' |
| * namedFormalParameters ::= |
| * '{' defaultNamedParameter (',' defaultNamedParameter)* '}' |
| * </pre> |
| * @return the formal parameters that were parsed |
| */ |
| FormalParameterList parseFormalParameterList() { |
| Token leftParenthesis = expect2(TokenType.OPEN_PAREN); |
| if (matches5(TokenType.CLOSE_PAREN)) { |
| return new FormalParameterList.full(leftParenthesis, null, null, null, andAdvance); |
| } |
| List<FormalParameter> parameters = new List<FormalParameter>(); |
| List<FormalParameter> normalParameters = new List<FormalParameter>(); |
| List<FormalParameter> positionalParameters = new List<FormalParameter>(); |
| List<FormalParameter> namedParameters = new List<FormalParameter>(); |
| List<FormalParameter> currentParameters = normalParameters; |
| Token leftSquareBracket = null; |
| Token rightSquareBracket = null; |
| Token leftCurlyBracket = null; |
| Token rightCurlyBracket = null; |
| ParameterKind kind = ParameterKind.REQUIRED; |
| bool firstParameter = true; |
| bool reportedMuliplePositionalGroups = false; |
| bool reportedMulipleNamedGroups = false; |
| bool reportedMixedGroups = false; |
| Token initialToken = null; |
| do { |
| if (firstParameter) { |
| firstParameter = false; |
| } else if (!optional(TokenType.COMMA)) { |
| if ((leftParenthesis is BeginToken) && ((leftParenthesis as BeginToken)).endToken != null) { |
| reportError4(ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]); |
| } else { |
| reportError5(ParserErrorCode.MISSING_CLOSING_PARENTHESIS, _currentToken.previous, []); |
| break; |
| } |
| } |
| initialToken = _currentToken; |
| if (matches5(TokenType.OPEN_SQUARE_BRACKET)) { |
| if (leftSquareBracket != null && !reportedMuliplePositionalGroups) { |
| reportError4(ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS, []); |
| reportedMuliplePositionalGroups = true; |
| } |
| if (leftCurlyBracket != null && !reportedMixedGroups) { |
| reportError4(ParserErrorCode.MIXED_PARAMETER_GROUPS, []); |
| reportedMixedGroups = true; |
| } |
| leftSquareBracket = andAdvance; |
| currentParameters = positionalParameters; |
| kind = ParameterKind.POSITIONAL; |
| } else if (matches5(TokenType.OPEN_CURLY_BRACKET)) { |
| if (leftCurlyBracket != null && !reportedMulipleNamedGroups) { |
| reportError4(ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS, []); |
| reportedMulipleNamedGroups = true; |
| } |
| if (leftSquareBracket != null && !reportedMixedGroups) { |
| reportError4(ParserErrorCode.MIXED_PARAMETER_GROUPS, []); |
| reportedMixedGroups = true; |
| } |
| leftCurlyBracket = andAdvance; |
| currentParameters = namedParameters; |
| kind = ParameterKind.NAMED; |
| } |
| FormalParameter parameter = parseFormalParameter(kind); |
| parameters.add(parameter); |
| currentParameters.add(parameter); |
| if (matches5(TokenType.CLOSE_SQUARE_BRACKET)) { |
| rightSquareBracket = andAdvance; |
| currentParameters = normalParameters; |
| if (leftSquareBracket == null) { |
| if (leftCurlyBracket != null) { |
| reportError4(ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); |
| rightCurlyBracket = rightSquareBracket; |
| rightSquareBracket = null; |
| } else { |
| reportError4(ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, ["["]); |
| } |
| } |
| kind = ParameterKind.REQUIRED; |
| } else if (matches5(TokenType.CLOSE_CURLY_BRACKET)) { |
| rightCurlyBracket = andAdvance; |
| currentParameters = normalParameters; |
| if (leftCurlyBracket == null) { |
| if (leftSquareBracket != null) { |
| reportError4(ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); |
| rightSquareBracket = rightCurlyBracket; |
| rightCurlyBracket = null; |
| } else { |
| reportError4(ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, ["{"]); |
| } |
| } |
| kind = ParameterKind.REQUIRED; |
| } |
| } while (!matches5(TokenType.CLOSE_PAREN) && initialToken != _currentToken); |
| Token rightParenthesis = expect2(TokenType.CLOSE_PAREN); |
| if (leftSquareBracket != null && rightSquareBracket == null) { |
| reportError4(ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); |
| } |
| if (leftCurlyBracket != null && rightCurlyBracket == null) { |
| reportError4(ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); |
| } |
| if (leftSquareBracket == null) { |
| leftSquareBracket = leftCurlyBracket; |
| } |
| if (rightSquareBracket == null) { |
| rightSquareBracket = rightCurlyBracket; |
| } |
| return new FormalParameterList.full(leftParenthesis, parameters, leftSquareBracket, rightSquareBracket, rightParenthesis); |
| } |
| /** |
| * Parse a for statement. |
| * <pre> |
| * forStatement ::= |
| * 'for' '(' forLoopParts ')' statement |
| * forLoopParts ::= |
| * forInitializerStatement expression? ';' expressionList? |
| * | declaredIdentifier 'in' expression |
| * | identifier 'in' expression |
| * forInitializerStatement ::= |
| * localVariableDeclaration ';' |
| * | expression? ';' |
| * </pre> |
| * @return the for statement that was parsed |
| */ |
| Statement parseForStatement() { |
| bool wasInLoop = _inLoop; |
| _inLoop = true; |
| try { |
| Token forKeyword = expect(Keyword.FOR); |
| Token leftParenthesis = expect2(TokenType.OPEN_PAREN); |
| VariableDeclarationList variableList = null; |
| Expression initialization = null; |
| if (!matches5(TokenType.SEMICOLON)) { |
| CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); |
| if (matchesIdentifier() && matches3(peek(), Keyword.IN)) { |
| List<VariableDeclaration> variables = new List<VariableDeclaration>(); |
| SimpleIdentifier variableName = parseSimpleIdentifier(); |
| variables.add(new VariableDeclaration.full(null, null, variableName, null, null)); |
| variableList = new VariableDeclarationList.full(commentAndMetadata.comment, commentAndMetadata.metadata, null, null, variables); |
| } else if (isInitializedVariableDeclaration()) { |
| variableList = parseVariableDeclarationList(commentAndMetadata); |
| } else { |
| initialization = parseExpression2(); |
| } |
| if (matches(Keyword.IN)) { |
| DeclaredIdentifier loopVariable = null; |
| if (variableList == null) { |
| reportError4(ParserErrorCode.MISSING_VARIABLE_IN_FOR_EACH, []); |
| } else { |
| NodeList<VariableDeclaration> variables2 = variableList.variables; |
| if (variables2.length > 1) { |
| reportError4(ParserErrorCode.MULTIPLE_VARIABLES_IN_FOR_EACH, [variables2.length.toString()]); |
| } |
| VariableDeclaration variable = variables2[0]; |
| if (variable.initializer != null) { |
| reportError4(ParserErrorCode.INITIALIZED_VARIABLE_IN_FOR_EACH, []); |
| } |
| loopVariable = new DeclaredIdentifier.full(commentAndMetadata.comment, commentAndMetadata.metadata, variableList.keyword, variableList.type, variable.name); |
| } |
| Token inKeyword = expect(Keyword.IN); |
| Expression iterator = parseExpression2(); |
| Token rightParenthesis = expect2(TokenType.CLOSE_PAREN); |
| Statement body = parseStatement2(); |
| return new ForEachStatement.full(forKeyword, leftParenthesis, loopVariable, inKeyword, iterator, rightParenthesis, body); |
| } |
| } |
| Token leftSeparator = expect2(TokenType.SEMICOLON); |
| Expression condition = null; |
| if (!matches5(TokenType.SEMICOLON)) { |
| condition = parseExpression2(); |
| } |
| Token rightSeparator = expect2(TokenType.SEMICOLON); |
| List<Expression> updaters = null; |
| if (!matches5(TokenType.CLOSE_PAREN)) { |
| updaters = parseExpressionList(); |
| } |
| Token rightParenthesis = expect2(TokenType.CLOSE_PAREN); |
| Statement body = parseStatement2(); |
| return new ForStatement.full(forKeyword, leftParenthesis, variableList, initialization, leftSeparator, condition, rightSeparator, updaters, rightParenthesis, body); |
| } finally { |
| _inLoop = wasInLoop; |
| } |
| } |
| /** |
| * Parse a function body. |
| * <pre> |
| * functionBody ::= |
| * '=>' expression ';' |
| * | block |
| * functionExpressionBody ::= |
| * '=>' expression |
| * | block |
| * </pre> |
| * @param mayBeEmpty {@code true} if the function body is allowed to be empty |
| * @param inExpression {@code true} if the function body is being parsed as part of an expression |
| * and therefore does not have a terminating semicolon |
| * @return the function body that was parsed |
| */ |
| FunctionBody parseFunctionBody(bool mayBeEmpty, bool inExpression) { |
| bool wasInLoop = _inLoop; |
| bool wasInSwitch = _inSwitch; |
| _inLoop = false; |
| _inSwitch = false; |
| try { |
| if (matches5(TokenType.SEMICOLON)) { |
| if (!mayBeEmpty) { |
| reportError4(ParserErrorCode.MISSING_FUNCTION_BODY, []); |
| } |
| return new EmptyFunctionBody.full(andAdvance); |
| } else if (matches5(TokenType.FUNCTION)) { |
| Token functionDefinition = andAdvance; |
| Expression expression = parseExpression2(); |
| Token semicolon = null; |
| if (!inExpression) { |
| semicolon = expect2(TokenType.SEMICOLON); |
| } |
| return new ExpressionFunctionBody.full(functionDefinition, expression, semicolon); |
| } else if (matches5(TokenType.OPEN_CURLY_BRACKET)) { |
| return new BlockFunctionBody.full(parseBlock()); |
| } else if (matches2(_NATIVE)) { |
| Token nativeToken = andAdvance; |
| StringLiteral stringLiteral = parseStringLiteral(); |
| return new NativeFunctionBody.full(nativeToken, stringLiteral, expect2(TokenType.SEMICOLON)); |
| } else { |
| reportError4(ParserErrorCode.MISSING_FUNCTION_BODY, []); |
| return new EmptyFunctionBody.full(createSyntheticToken2(TokenType.SEMICOLON)); |
| } |
| } finally { |
| _inLoop = wasInLoop; |
| _inSwitch = wasInSwitch; |
| } |
| } |
| /** |
| * Parse a function declaration. |
| * <pre> |
| * functionDeclaration ::= |
| * functionSignature functionBody |
| * | returnType? getOrSet identifier formalParameterList functionBody |
| * </pre> |
| * @param commentAndMetadata the documentation comment and metadata to be associated with the |
| * declaration |
| * @param externalKeyword the 'external' keyword, or {@code null} if the function is not external |
| * @param returnType the return type, or {@code null} if there is no return type |
| * @param isStatement {@code true} if the function declaration is being parsed as a statement |
| * @return the function declaration that was parsed |
| */ |
| FunctionDeclaration parseFunctionDeclaration(CommentAndMetadata commentAndMetadata, Token externalKeyword, TypeName returnType) { |
| Token keyword = null; |
| bool isGetter = false; |
| if (matches(Keyword.GET) && !matches4(peek(), TokenType.OPEN_PAREN)) { |
| keyword = andAdvance; |
| isGetter = true; |
| } else if (matches(Keyword.SET) && !matches4(peek(), TokenType.OPEN_PAREN)) { |
| keyword = andAdvance; |
| } |
| SimpleIdentifier name = parseSimpleIdentifier(); |
| FormalParameterList parameters = null; |
| if (!isGetter) { |
| if (matches5(TokenType.OPEN_PAREN)) { |
| parameters = parseFormalParameterList(); |
| validateFormalParameterList(parameters); |
| } else { |
| reportError4(ParserErrorCode.MISSING_FUNCTION_PARAMETERS, []); |
| } |
| } else if (matches5(TokenType.OPEN_PAREN)) { |
| reportError4(ParserErrorCode.GETTER_WITH_PARAMETERS, []); |
| parseFormalParameterList(); |
| } |
| FunctionBody body; |
| if (externalKeyword == null) { |
| body = parseFunctionBody(false, false); |
| } else { |
| body = new EmptyFunctionBody.full(expect2(TokenType.SEMICOLON)); |
| } |
| return new FunctionDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, returnType, keyword, name, new FunctionExpression.full(parameters, body)); |
| } |
| /** |
| * Parse a function declaration statement. |
| * <pre> |
| * functionDeclarationStatement ::= |
| * functionSignature functionBody |
| * </pre> |
| * @return the function declaration statement that was parsed |
| */ |
| Statement parseFunctionDeclarationStatement() => parseFunctionDeclarationStatement2(parseCommentAndMetadata(), parseOptionalReturnType()); |
| /** |
| * Parse a function declaration statement. |
| * <pre> |
| * functionDeclarationStatement ::= |
| * functionSignature functionBody |
| * </pre> |
| * @param commentAndMetadata the documentation comment and metadata to be associated with the |
| * declaration |
| * @param returnType the return type, or {@code null} if there is no return type |
| * @return the function declaration statement that was parsed |
| */ |
| Statement parseFunctionDeclarationStatement2(CommentAndMetadata commentAndMetadata, TypeName returnType) => new FunctionDeclarationStatement.full(parseFunctionDeclaration(commentAndMetadata, null, returnType)); |
| /** |
| * Parse a function expression. |
| * <pre> |
| * functionExpression ::= |
| * formalParameterList functionExpressionBody |
| * </pre> |
| * @return the function expression that was parsed |
| */ |
| FunctionExpression parseFunctionExpression() { |
| FormalParameterList parameters = parseFormalParameterList(); |
| validateFormalParameterList(parameters); |
| FunctionBody body = parseFunctionBody(false, true); |
| return new FunctionExpression.full(parameters, body); |
| } |
| /** |
| * Parse a function type alias. |
| * <pre> |
| * functionTypeAlias ::= |
| * functionPrefix typeParameterList? formalParameterList ';' |
| * functionPrefix ::= |
| * returnType? name |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the member |
| * @param keyword the token representing the 'typedef' keyword |
| * @return the function type alias that was parsed |
| */ |
| FunctionTypeAlias parseFunctionTypeAlias(CommentAndMetadata commentAndMetadata, Token keyword) { |
| TypeName returnType = null; |
| if (hasReturnTypeInTypeAlias()) { |
| returnType = parseReturnType(); |
| } |
| SimpleIdentifier name = parseSimpleIdentifier(); |
| TypeParameterList typeParameters = null; |
| if (matches5(TokenType.LT)) { |
| typeParameters = parseTypeParameterList(); |
| } |
| if (matches5(TokenType.SEMICOLON) || matches5(TokenType.EOF)) { |
| reportError4(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS, []); |
| FormalParameterList parameters = new FormalParameterList.full(createSyntheticToken2(TokenType.OPEN_PAREN), null, null, null, createSyntheticToken2(TokenType.CLOSE_PAREN)); |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new FunctionTypeAlias.full(commentAndMetadata.comment, commentAndMetadata.metadata, keyword, returnType, name, typeParameters, parameters, semicolon); |
| } else if (!matches5(TokenType.OPEN_PAREN)) { |
| reportError4(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS, []); |
| return null; |
| } |
| FormalParameterList parameters = parseFormalParameterList(); |
| validateFormalParameterList(parameters); |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new FunctionTypeAlias.full(commentAndMetadata.comment, commentAndMetadata.metadata, keyword, returnType, name, typeParameters, parameters, semicolon); |
| } |
| /** |
| * Parse a getter. |
| * <pre> |
| * getter ::= |
| * getterSignature functionBody? |
| * getterSignature ::= |
| * 'external'? 'static'? returnType? 'get' identifier |
| * </pre> |
| * @param commentAndMetadata the documentation comment and metadata to be associated with the |
| * declaration |
| * @param externalKeyword the 'external' token |
| * @param staticKeyword the static keyword, or {@code null} if the getter is not static |
| * @param the return type that has already been parsed, or {@code null} if there was no return |
| * type |
| * @return the getter that was parsed |
| */ |
| MethodDeclaration parseGetter(CommentAndMetadata commentAndMetadata, Token externalKeyword, Token staticKeyword, TypeName returnType) { |
| Token propertyKeyword = expect(Keyword.GET); |
| SimpleIdentifier name = parseSimpleIdentifier(); |
| if (matches5(TokenType.OPEN_PAREN) && matches4(peek(), TokenType.CLOSE_PAREN)) { |
| reportError4(ParserErrorCode.GETTER_WITH_PARAMETERS, []); |
| advance(); |
| advance(); |
| } |
| FunctionBody body = parseFunctionBody(true, false); |
| if (externalKeyword != null && body is! EmptyFunctionBody) { |
| reportError4(ParserErrorCode.EXTERNAL_GETTER_WITH_BODY, []); |
| } |
| return new MethodDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, propertyKeyword, null, name, null, body); |
| } |
| /** |
| * Parse a list of identifiers. |
| * <pre> |
| * identifierList ::= |
| * identifier (',' identifier) |
| * </pre> |
| * @return the list of identifiers that were parsed |
| */ |
| List<SimpleIdentifier> parseIdentifierList() { |
| List<SimpleIdentifier> identifiers = new List<SimpleIdentifier>(); |
| identifiers.add(parseSimpleIdentifier()); |
| while (matches5(TokenType.COMMA)) { |
| advance(); |
| identifiers.add(parseSimpleIdentifier()); |
| } |
| return identifiers; |
| } |
| /** |
| * Parse an if statement. |
| * <pre> |
| * ifStatement ::= |
| * 'if' '(' expression ')' statement ('else' statement)? |
| * </pre> |
| * @return the if statement that was parsed |
| */ |
| Statement parseIfStatement() { |
| Token ifKeyword = expect(Keyword.IF); |
| Token leftParenthesis = expect2(TokenType.OPEN_PAREN); |
| Expression condition = parseExpression2(); |
| Token rightParenthesis = expect2(TokenType.CLOSE_PAREN); |
| Statement thenStatement = parseStatement2(); |
| Token elseKeyword = null; |
| Statement elseStatement = null; |
| if (matches(Keyword.ELSE)) { |
| elseKeyword = andAdvance; |
| elseStatement = parseStatement2(); |
| } |
| return new IfStatement.full(ifKeyword, leftParenthesis, condition, rightParenthesis, thenStatement, elseKeyword, elseStatement); |
| } |
| /** |
| * Parse an implements clause. |
| * <pre> |
| * implementsClause ::= |
| * 'implements' type (',' type) |
| * </pre> |
| * @return the implements clause that was parsed |
| */ |
| ImplementsClause parseImplementsClause() { |
| Token keyword = expect(Keyword.IMPLEMENTS); |
| List<TypeName> interfaces = new List<TypeName>(); |
| interfaces.add(parseTypeName()); |
| while (optional(TokenType.COMMA)) { |
| interfaces.add(parseTypeName()); |
| } |
| return new ImplementsClause.full(keyword, interfaces); |
| } |
| /** |
| * Parse an import directive. |
| * <pre> |
| * importDirective ::= |
| * metadata 'import' stringLiteral ('as' identifier)? combinator*';' |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the directive |
| * @return the import directive that was parsed |
| */ |
| ImportDirective parseImportDirective(CommentAndMetadata commentAndMetadata) { |
| Token importKeyword = expect(Keyword.IMPORT); |
| StringLiteral libraryUri = parseStringLiteral(); |
| Token asToken = null; |
| SimpleIdentifier prefix = null; |
| if (matches(Keyword.AS)) { |
| asToken = andAdvance; |
| prefix = parseSimpleIdentifier(); |
| } |
| List<Combinator> combinators = parseCombinators(); |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new ImportDirective.full(commentAndMetadata.comment, commentAndMetadata.metadata, importKeyword, libraryUri, asToken, prefix, combinators, semicolon); |
| } |
| /** |
| * Parse a list of initialized identifiers. |
| * <pre> |
| * ?? ::= |
| * 'static'? ('var' | type) initializedIdentifierList ';' |
| * | 'final' type? initializedIdentifierList ';' |
| * initializedIdentifierList ::= |
| * initializedIdentifier (',' initializedIdentifier) |
| * initializedIdentifier ::= |
| * identifier ('=' expression)? |
| * </pre> |
| * @param commentAndMetadata the documentation comment and metadata to be associated with the |
| * declaration |
| * @param staticKeyword the static keyword, or {@code null} if the getter is not static |
| * @param keyword the token representing the 'final', 'const' or 'var' keyword, or {@code null} if |
| * there is no keyword |
| * @param type the type that has already been parsed, or {@code null} if 'var' was provided |
| * @return the getter that was parsed |
| */ |
| FieldDeclaration parseInitializedIdentifierList(CommentAndMetadata commentAndMetadata, Token staticKeyword, Token keyword, TypeName type) { |
| VariableDeclarationList fieldList = parseVariableDeclarationList2(null, keyword, type); |
| return new FieldDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, staticKeyword, fieldList, expect2(TokenType.SEMICOLON)); |
| } |
| /** |
| * Parse an instance creation expression. |
| * <pre> |
| * instanceCreationExpression ::= |
| * ('new' | 'const') type ('.' identifier)? argumentList |
| * </pre> |
| * @param keyword the 'new' or 'const' keyword that introduces the expression |
| * @return the instance creation expression that was parsed |
| */ |
| InstanceCreationExpression parseInstanceCreationExpression(Token keyword) { |
| ConstructorName constructorName = parseConstructorName(); |
| ArgumentList argumentList = parseArgumentList(); |
| return new InstanceCreationExpression.full(keyword, constructorName, argumentList); |
| } |
| /** |
| * Parse a library directive. |
| * <pre> |
| * libraryDirective ::= |
| * metadata 'library' identifier ';' |
| * </pre> |
| * @param commentAndMetadata the metadata to be associated with the directive |
| * @return the library directive that was parsed |
| */ |
| LibraryDirective parseLibraryDirective(CommentAndMetadata commentAndMetadata) { |
| Token keyword = expect(Keyword.LIBRARY); |
| LibraryIdentifier libraryName = parseLibraryName(ParserErrorCode.MISSING_NAME_IN_LIBRARY_DIRECTIVE, keyword); |
| Token semicolon = expect2(TokenType.SEMICOLON); |
| return new LibraryDirective.full(commentAndMetadata.comment, commentAndMetadata.metadata, keyword, libraryName, semicolon); |
| } |
| /** |
| * Parse a library identifier. |
| * <pre> |
| * libraryIdentifier ::= |
| * identifier ('.' identifier) |
| * </pre> |
| * @return the library identifier that was parsed |
| */ |
| LibraryIdentifier parseLibraryIdentifier() { |
| List<SimpleIdentifier> components = new List<SimpleIdentifier>(); |
| components.add(parseSimpleIdentifier()); |
| while (matches5(TokenType.PERIOD)) { |
| advance(); |
| components.add(parseSimpleIdentifier()); |
| } |
| return new LibraryIdentifier.full(components); |
| } |
| /** |
| * Parse a library name. |
| * <pre> |
| * libraryName ::= |
| * libraryIdentifier |
| * </pre> |
| * @param missingNameError the error code to be used if the library name is missing |
| * @param missingNameToken the token associated with the error produced if the library name is |
| * missing |
| * @return the library name that was parsed |
| */ |
| LibraryIdentifier parseLibraryName(ParserErrorCode missingNameError, Token missingNameToken) { |
| if (matchesIdentifier()) { |
| return parseLibraryIdentifier(); |
| } else if (matches5(TokenType.STRING)) { |
| StringLiteral string = parseStringLiteral(); |
| reportError(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string, []); |
| } else { |
| reportError5(missingNameError, missingNameToken, []); |
| } |
| List<SimpleIdentifier> components = new List<SimpleIdentifier>(); |
| components.add(createSyntheticIdentifier()); |
| return new LibraryIdentifier.full(components); |
| } |
| /** |
| * Parse a list literal. |
| * <pre> |
| * listLiteral ::= |
| * 'const'? typeArguments? '[' (expressionList ','?)? ']' |
| * </pre> |
| * @param modifier the 'const' modifier appearing before the literal, or {@code null} if there is |
| * no modifier |
| * @param typeArguments the type arguments appearing before the literal, or {@code null} if there |
| * are no type arguments |
| * @return the list literal that was parsed |
| */ |
| ListLiteral parseListLiteral(Token modifier, TypeArgumentList typeArguments) { |
| if (matches5(TokenType.INDEX)) { |
| BeginToken leftBracket = new BeginToken(TokenType.OPEN_SQUARE_BRACKET, _currentToken.offset); |
| Token rightBracket = new Token(TokenType.CLOSE_SQUARE_BRACKET, _currentToken.offset + 1); |
| leftBracket.endToken = rightBracket; |
| rightBracket.setNext(_currentToken.next); |
| leftBracket.setNext(rightBracket); |
| _currentToken.previous.setNext(leftBracket); |
| _currentToken = _currentToken.next; |
| return new ListLiteral.full(modifier, typeArguments, leftBracket, null, rightBracket); |
| } |
| Token leftBracket = expect2(TokenType.OPEN_SQUARE_BRACKET); |
| if (matches5(TokenType.CLOSE_SQUARE_BRACKET)) { |
| return new ListLiteral.full(modifier, typeArguments, leftBracket, null, andAdvance); |
| } |
| List<Expression> elements = new List<Expression>(); |
| elements.add(parseExpression2()); |
| while (optional(TokenType.COMMA)) { |
| if (matches5(TokenType.CLOSE_SQUARE_BRACKET)) { |
| return new ListLiteral.full(modifier, typeArguments, leftBracket, elements, andAdvance); |
| } |
| elements.add(parseExpression2()); |
| } |
| Token rightBracket = expect2(TokenType.CLOSE_SQUARE_BRACKET); |
| return new ListLiteral.full(modifier, typeArguments, leftBracket, elements, rightBracket); |
| } |
| /** |
| * Parse a list or map literal. |
| * <pre> |
| * listOrMapLiteral ::= |
| * listLiteral |
| * | mapLiteral |
| * </pre> |
| * @param modifier the 'const' modifier appearing before the literal, or {@code null} if there is |
| * no modifier |
| * @return the list or map literal that was parsed |
| */ |
| TypedLiteral parseListOrMapLiteral(Token modifier) { |
| TypeArgumentList typeArguments = null; |
| if (matches5(TokenType.LT)) { |
| typeArguments = parseTypeArgumentList(); |
| } |
| if (matches5(TokenType.OPEN_CURLY_BRACKET)) { |
| return parseMapLiteral(modifier, typeArguments); |
| } else if (matches5(TokenType.OPEN_SQUARE_BRACKET) || matches5(TokenType.INDEX)) { |
| return parseListLiteral(modifier, typeArguments); |
| } |
| reportError4(ParserErrorCode.EXPECTED_LIST_OR_MAP_LITERAL, []); |
| return new ListLiteral.full(modifier, typeArguments, createSyntheticToken2(TokenType.OPEN_SQUARE_BRACKET), null, createSyntheticToken2(TokenType.CLOSE_SQUARE_BRACKET)); |
| } |
| /** |
| * Parse a logical and expression. |
| * <pre> |
| * logicalAndExpression ::= |
| * bitwiseOrExpression ('&&' bitwiseOrExpression) |
| * </pre> |
| * @return the logical and expression that was parsed |
| */ |
| Expression parseLogicalAndExpression() { |
| Expression expression = parseBitwiseOrExpression(); |
| while (matches5(TokenType.AMPERSAND_AMPERSAND)) { |
| Token operator = andAdvance; |
| expression = new BinaryExpression.full(expression, operator, parseBitwiseOrExpression()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a logical or expression. |
| * <pre> |
| * logicalOrExpression ::= |
| * logicalAndExpression ('||' logicalAndExpression) |
| * </pre> |
| * @return the logical or expression that was parsed |
| */ |
| Expression parseLogicalOrExpression() { |
| Expression expression = parseLogicalAndExpression(); |
| while (matches5(TokenType.BAR_BAR)) { |
| Token operator = andAdvance; |
| expression = new BinaryExpression.full(expression, operator, parseLogicalAndExpression()); |
| } |
| return expression; |
| } |
| /** |
| * Parse a map literal. |
| * <pre> |
| * mapLiteral ::= |
| * 'const'? typeArguments? '{' (mapLiteralEntry (',' mapLiteralEntry)* ','?)? '}' |
| * </pre> |
| * @param modifier the 'const' modifier appearing before the literal, or {@code null} if there is |
| * no modifier |
| * @param typeArguments the type arguments that were declared, or {@code null} if there are no |
| * type arguments |
| * @return the map literal that was parsed |
| */ |
| MapLiteral parseMapLiteral(Token modifier, TypeArgumentList typeArguments) { |
| Token leftBracket = expect2(TokenType.OPEN_CURLY_BRACKET); |
| List<MapLiteralEntry> entries = new List<MapLiteralEntry>(); |
| if (matches5(TokenType.CLOSE_CURLY_BRACKET)) { |
| return new MapLiteral.full(modifier, typeArguments, leftBracket, entries, andAdvance); |
| } |
| entries.add(parseMapLiteralEntry()); |
| while (optional(TokenType.COMMA)) { |
| if (matches5(TokenType.CLOSE_CURLY_BRACKET)) { |
| return new MapLiteral.full(modifier, typeArguments, leftBracket, entries, andAdvance); |
| } |
| entries.add(parseMapLiteralEntry()); |
| } |
| Token rightBracket = expect2(TokenType.CLOSE_CURLY_BRACKET); |
| return new MapLiteral.full(modifier, typeArguments, leftBracket, entries, rightBracket); |
| } |
| /** |
| * Parse a map literal entry. |
| * <pre> |
| * mapLiteralEntry ::= |
| * stringLiteral ':' expression |
| * </pre> |
| * @return the map literal entry that was parsed |
| */ |
| MapLiteralEntry parseMapLiteralEntry() { |
| StringLiteral key = parseStringLiteral(); |
| Token separator = expect2(TokenType.COLON); |
| Expression value = parseExpression2(); |
| return new MapLiteralEntry.full(key, separator, value); |
| } |
| /** |
| * Parse a method declaration. |
| * <pre> |
| * functionDeclaration ::= |
| * 'external'? 'static'? functionSignature functionBody |
| * | 'external'? functionSignature ';' |
| * </pre> |
| * @param commentAndMetadata the documentation comment and metadata to be associated with the |
| * declaration |
| * @param externalKeyword the 'external' token |
| * @param staticKeyword the static keyword, or {@code null} if the getter is not static |
| * @param returnType the return type of the method |
| * @return the method declaration that was parsed |
| */ |
| MethodDeclaration parseMethodDeclaration(CommentAndMetadata commentAndMetadata, Token externalKeyword, Token staticKeyword, TypeName returnType) { |
| SimpleIdentifier methodName = parseSimpleIdentifier(); |
| FormalParameterList parameters = parseFormalParameterList(); |
| validateFormalParameterList(parameters); |
| return parseMethodDeclaration2(commentAndMetadata, externalKeyword, staticKeyword, returnType, methodName, parameters); |
| } |
| /** |
| * Parse a method declaration. |
| * <pre> |
| * functionDeclaration ::= |
| * ('external' 'static'?)? functionSignature functionBody |
| * | 'external'? functionSignature ';' |
| * </pre> |
| * @param commentAndMetadata the documentation comment and metadata to be associated with the |
| * declaration |
| * @param externalKeyword the 'external' token |
| * @param staticKeyword the static keyword, or {@code null} if the getter is not static |
| * @param returnType the return type of the method |
| * @param name the name of the method |
| * @param parameters the parameters to the method |
| * @return the method declaration that was parsed |
| */ |
| MethodDeclaration parseMethodDeclaration2(CommentAndMetadata commentAndMetadata, Token externalKeyword, Token staticKeyword, TypeName returnType, SimpleIdentifier name, FormalParameterList parameters) { |
| FunctionBody body = parseFunctionBody(externalKeyword != null || staticKeyword == null, false); |
| if (externalKeyword != null) { |
| if (body is! EmptyFunctionBody) { |
| reportError(ParserErrorCode.EXTERNAL_METHOD_WITH_BODY, body, []); |
| } |
| } else if (staticKeyword != null) { |
| if (body is EmptyFunctionBody) { |
| reportError(ParserErrorCode.ABSTRACT_STATIC_METHOD, body, []); |
| } |
| } |
| return new MethodDeclaration.full(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, null, null, name, parameters, body); |
| } |
| /** |
| * Parse the modifiers preceding a declaration. This method allows the modifiers to appear in any |
| * order but does generate errors for duplicated modifiers. Checks for other problems, such as |
| * having the modifiers appear in the wrong order or specifying both 'const' and 'final', are |
| * reported in one of the methods whose name is prefixed with {@code validateModifiersFor}. |
| * <pre> |
| * modifiers ::= |
| * ('abstract' | 'const' | 'external' | 'factory' | 'final' | 'static' | 'var') |
| * </pre> |
| * @return the modifiers that were parsed |
| */ |
| Modifiers parseModifiers() { |
| Modifiers modifiers = new Modifiers(); |
| bool progress = true; |
| while (progress) { |
| if (matches(Keyword.ABSTRACT) && !matches4(peek(), TokenType.PERIOD) && !matches4(peek(), TokenType.LT)) { |
| if (modifiers.abstractKeyword != null) { |
| reportError4(ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); |
| advance(); |
| } else { |
| modifiers.abstractKeyword = andAdvance; |
| } |
| } else if (matches(Keyword.CONST)) { |
| if (modifiers.constKeyword != null) { |
| reportError4(ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); |
| advance(); |
| } else { |
| modifiers.constKeyword = andAdvance; |
| } |
| } else if (matches(Keyword.EXTERNAL) && !matches4(peek(), TokenType.PERIOD) && !matches4(peek |