update parser to handle leading error tokens

This is the first of several CLs that move error tokens to the head
of the token stream so that parser handling of error tokens
can be simplified and less error prone.

In this particular CL:
- update parser to handle error tokens at the head of the token stream
- remove several places that analyzer was preprocessing error tokens
   now that the parser handles them

Change-Id: Iddf60ab2879567a345a692b64043bd42f1871947
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/102040
Reviewed-by: Brian Wilkerson <brianwilkerson@google.com>
Commit-Queue: Dan Rubel <danrubel@google.com>
diff --git a/pkg/analyzer/lib/src/dart/scanner/scanner.dart b/pkg/analyzer/lib/src/dart/scanner/scanner.dart
index 616b9fc..64a0b86 100644
--- a/pkg/analyzer/lib/src/dart/scanner/scanner.dart
+++ b/pkg/analyzer/lib/src/dart/scanner/scanner.dart
@@ -9,8 +9,7 @@
 import 'package:analyzer/src/dart/scanner/reader.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:front_end/src/fasta/scanner.dart' as fasta;
-import 'package:front_end/src/scanner/errors.dart' show translateErrorToken;
-import 'package:front_end/src/scanner/token.dart' show Token, TokenType;
+import 'package:front_end/src/scanner/token.dart' show Token;
 import 'package:pub_semver/pub_semver.dart';
 
 export 'package:analyzer/src/dart/error/syntactic_errors.dart';
@@ -156,12 +155,6 @@
 
     lineStarts.addAll(result.lineStarts);
     fasta.Token token = result.tokens;
-    // The default recovery strategy used by scanString
-    // places all error tokens at the head of the stream.
-    while (token.type == TokenType.BAD_INPUT) {
-      translateErrorToken(token, reportError);
-      token = token.next;
-    }
     firstToken = token;
     // Update all token offsets based upon the reader's starting offset
     if (_readerOffset != -1) {
diff --git a/pkg/analyzer/test/generated/parser_fasta_test.dart b/pkg/analyzer/test/generated/parser_fasta_test.dart
index 104879f..6de285d 100644
--- a/pkg/analyzer/test/generated/parser_fasta_test.dart
+++ b/pkg/analyzer/test/generated/parser_fasta_test.dart
@@ -24,7 +24,6 @@
     show LanguageVersionToken, ScannerConfiguration, ScannerResult, scanString;
 import 'package:front_end/src/fasta/scanner/error_token.dart' show ErrorToken;
 import 'package:front_end/src/fasta/scanner/string_scanner.dart';
-import 'package:front_end/src/scanner/errors.dart' show translateErrorToken;
 import 'package:pub_semver/src/version.dart';
 import 'package:test/test.dart';
 import 'package:test_reflective_loader/test_reflective_loader.dart';
@@ -1631,12 +1630,6 @@
     featureSet ??= FeatureSet.forTesting();
     var source = new StringSource(content, 'parser_test_StringSource.dart');
 
-    void reportError(
-        ScannerErrorCode errorCode, int offset, List<Object> arguments) {
-      listener
-          .onError(new AnalysisError(source, offset, 1, errorCode, arguments));
-    }
-
     // Adjust the feature set based on language version comment.
     void languageVersionChanged(
         fasta.Scanner scanner, LanguageVersionToken languageVersion) {
@@ -1650,16 +1643,7 @@
         includeComments: true,
         configuration: Scanner.buildConfig(featureSet),
         languageVersionChanged: languageVersionChanged);
-    Token token = result.tokens;
-    if (result.hasErrors) {
-      // The default recovery strategy used by scanString
-      // places all error tokens at the head of the stream.
-      while (token.type == TokenType.BAD_INPUT) {
-        translateErrorToken(token, reportError);
-        token = token.next;
-      }
-    }
-    _fastaTokens = token;
+    _fastaTokens = result.tokens;
 
     // Run parser
     ErrorReporter errorReporter = new ErrorReporter(listener, source);
@@ -2696,8 +2680,16 @@
    */
   _run(String enclosingEvent, f()) {
     _eventListener.begin(enclosingEvent);
+
+    // Simulate error handling of parseUnit by skipping error tokens
+    // before parsing and reporting them after parsing is complete.
+    Token errorToken = currentToken;
+    currentToken = fastaParser.skipErrorTokens(currentToken);
     var result = f();
+    fastaParser.reportAllErrorTokens(errorToken);
+
     _eventListener.end(enclosingEvent);
+
     String lexeme = currentToken is ErrorToken
         ? currentToken.runtimeType.toString()
         : currentToken.lexeme;
diff --git a/pkg/analyzer/test/generated/parser_test.dart b/pkg/analyzer/test/generated/parser_test.dart
index 3e203b1..1f29027 100644
--- a/pkg/analyzer/test/generated/parser_test.dart
+++ b/pkg/analyzer/test/generated/parser_test.dart
@@ -4398,20 +4398,12 @@
   }
 
   void test_missingClosingParenthesis() {
-    // It is possible that it is not possible to generate this error (that it's
-    // being reported in code that cannot actually be reached), but that hasn't
-    // been proven yet.
     createParser('(int a, int b ;',
-        expectedEndOffset: 14 /* ErrorToken at end of token stream */);
+        expectedEndOffset: 14 /* parsing ends at synthetic ')' */);
     FormalParameterList list = parser.parseFormalParameterList();
     expectNotNullIfNoErrors(list);
-    if (usingFastaParser) {
-      // Fasta scanner reports missing `)` error
-      listener.assertNoErrors();
-    } else {
-      listener.errors
-          .contains(expectedError(ParserErrorCode.EXPECTED_TOKEN, 14, 1));
-    }
+    listener.errors
+        .contains(expectedError(ParserErrorCode.EXPECTED_TOKEN, 14, 1));
   }
 
   void test_missingConstFinalVarOrType_static() {
@@ -9409,34 +9401,14 @@
     Source source = new TestSource();
     listener = new GatheringErrorListener();
 
-    void reportError(
-        ScannerErrorCode errorCode, int offset, List<Object> arguments) {
-      listener
-          .onError(new AnalysisError(source, offset, 1, errorCode, arguments));
-    }
-
-    //
-    // Scan the source.
-    //
     ScannerResult result = scanString(content, includeComments: true);
-    Token token = result.tokens;
-    if (result.hasErrors) {
-      // The default recovery strategy used by scanString
-      // places all error tokens at the head of the stream.
-      while (token.type == TokenType.BAD_INPUT) {
-        translateErrorToken(token, reportError);
-        token = token.next;
-      }
-    }
     listener.setLineInfo(source, result.lineStarts);
-    //
-    // Create and initialize the parser.
-    //
+
     parser = new Parser(source, listener, featureSet: FeatureSet.forTesting());
     parser.allowNativeClause = allowNativeClause;
     parser.parseFunctionBodies = parseFunctionBodies;
     parser.enableOptionalNewAndConst = enableOptionalNewAndConst;
-    parser.currentToken = token;
+    parser.currentToken = result.tokens;
   }
 
   @override
@@ -9557,31 +9529,13 @@
     Source source = new TestSource();
     GatheringErrorListener listener = new GatheringErrorListener();
 
-    void reportError(
-        ScannerErrorCode errorCode, int offset, List<Object> arguments) {
-      listener
-          .onError(new AnalysisError(source, offset, 1, errorCode, arguments));
-    }
-
-    //
-    // Scan the source.
-    //
     ScannerResult result = scanString(content, includeComments: true);
-    Token token = result.tokens;
-    if (result.hasErrors) {
-      // The default recovery strategy used by scanString
-      // places all error tokens at the head of the stream.
-      while (token.type == TokenType.BAD_INPUT) {
-        translateErrorToken(token, reportError);
-        token = token.next;
-      }
-    }
     listener.setLineInfo(source, result.lineStarts);
 
     Parser parser =
         new Parser(source, listener, featureSet: FeatureSet.forTesting());
     parser.enableOptionalNewAndConst = enableOptionalNewAndConst;
-    CompilationUnit unit = parser.parseCompilationUnit(token);
+    CompilationUnit unit = parser.parseCompilationUnit(result.tokens);
     expect(unit, isNotNull);
     if (codes != null) {
       listener.assertErrorsWithCodes(codes);
@@ -9601,30 +9555,12 @@
     Source source = NonExistingSource.unknown;
     listener ??= AnalysisErrorListener.NULL_LISTENER;
 
-    void reportError(
-        ScannerErrorCode errorCode, int offset, List<Object> arguments) {
-      listener
-          .onError(new AnalysisError(source, offset, 1, errorCode, arguments));
-    }
-
-    //
-    // Scan the source.
-    //
     ScannerResult result = scanString(content, includeComments: true);
-    Token token = result.tokens;
-    if (result.hasErrors) {
-      // The default recovery strategy used by scanString
-      // places all error tokens at the head of the stream.
-      while (token.type == TokenType.BAD_INPUT) {
-        translateErrorToken(token, reportError);
-        token = token.next;
-      }
-    }
 
     Parser parser =
         new Parser(source, listener, featureSet: FeatureSet.forTesting());
     parser.enableOptionalNewAndConst = enableOptionalNewAndConst;
-    CompilationUnit unit = parser.parseCompilationUnit(token);
+    CompilationUnit unit = parser.parseCompilationUnit(result.tokens);
     unit.lineInfo = new LineInfo(result.lineStarts);
     return unit;
   }
@@ -9952,31 +9888,13 @@
     Source source = new TestSource();
     listener = new GatheringErrorListener();
 
-    void reportError(
-        ScannerErrorCode errorCode, int offset, List<Object> arguments) {
-      listener
-          .onError(new AnalysisError(source, offset, 1, errorCode, arguments));
-    }
-
-    //
-    // Scan the source.
-    //
     ScannerResult result = scanString(content, includeComments: true);
-    Token token = result.tokens;
-    if (result.hasErrors) {
-      // The default recovery strategy used by scanString
-      // places all error tokens at the head of the stream.
-      while (token.type == TokenType.BAD_INPUT) {
-        translateErrorToken(token, reportError);
-        token = token.next;
-      }
-    }
     listener.setLineInfo(source, result.lineStarts);
 
     Parser parser =
         new Parser(source, listener, featureSet: FeatureSet.forTesting());
     parser.enableOptionalNewAndConst = enableOptionalNewAndConst;
-    Statement statement = parser.parseStatement(token);
+    Statement statement = parser.parseStatement(result.tokens);
     expect(statement, isNotNull);
     return statement;
   }
@@ -9997,31 +9915,13 @@
     Source source = new TestSource();
     GatheringErrorListener listener = new GatheringErrorListener();
 
-    void reportError(
-        ScannerErrorCode errorCode, int offset, List<Object> arguments) {
-      listener
-          .onError(new AnalysisError(source, offset, 1, errorCode, arguments));
-    }
-
-    //
-    // Scan the source.
-    //
     ScannerResult result = scanString(content);
-    Token token = result.tokens;
-    if (result.hasErrors) {
-      // The default recovery strategy used by scanString
-      // places all error tokens at the head of the stream.
-      while (token.type == TokenType.BAD_INPUT) {
-        translateErrorToken(token, reportError);
-        token = token.next;
-      }
-    }
     listener.setLineInfo(source, result.lineStarts);
 
     Parser parser =
         new Parser(source, listener, featureSet: FeatureSet.forTesting());
     parser.enableOptionalNewAndConst = enableOptionalNewAndConst;
-    List<Statement> statements = parser.parseStatements(token);
+    List<Statement> statements = parser.parseStatements(result.tokens);
     expect(statements, hasLength(expectedCount));
     listener.assertErrorsWithCodes(errorCodes);
     return statements;
diff --git a/pkg/analyzer/test/generated/scanner_test.dart b/pkg/analyzer/test/generated/scanner_test.dart
index 02039f4..d73d00d 100644
--- a/pkg/analyzer/test/generated/scanner_test.dart
+++ b/pkg/analyzer/test/generated/scanner_test.dart
@@ -9,6 +9,7 @@
 import 'package:analyzer/src/dart/scanner/reader.dart';
 import 'package:analyzer/src/dart/scanner/scanner.dart';
 import 'package:analyzer/src/generated/source.dart';
+import 'package:front_end/src/fasta/scanner/error_token.dart';
 import 'package:test/test.dart';
 import 'package:test_reflective_loader/test_reflective_loader.dart';
 
@@ -143,11 +144,15 @@
     // See https://github.com/dart-lang/sdk/issues/30320
     String source = '<!-- @Component(';
     GatheringErrorListener listener = new GatheringErrorListener();
-    _scanWithListener(source, listener);
-    listener.assertErrorsWithCodes(const [
-      ScannerErrorCode.EXPECTED_TOKEN,
-      ScannerErrorCode.EXPECTED_TOKEN,
-    ]);
+    Scanner scanner =
+        new Scanner(null, new CharSequenceReader(source), listener)
+          ..configureFeatures(featureSet);
+    Token token = scanner.tokenize();
+    expect(token, TypeMatcher<UnmatchedToken>());
+    token = token.next;
+    expect(token, TypeMatcher<UnmatchedToken>());
+    token = token.next;
+    expect(token, isNot(TypeMatcher<ErrorToken>()));
   }
 
   void _assertLineInfo(
diff --git a/pkg/front_end/lib/src/fasta/parser/parser.dart b/pkg/front_end/lib/src/fasta/parser/parser.dart
index 51b32e6..6e60793 100644
--- a/pkg/front_end/lib/src/fasta/parser/parser.dart
+++ b/pkg/front_end/lib/src/fasta/parser/parser.dart
@@ -330,6 +330,11 @@
   /// ;
   /// ```
   Token parseUnit(Token token) {
+    // Skip over error tokens and report them at the end
+    // so that the parser has the chance to adjust the error location.
+    Token errorToken = token;
+    token = skipErrorTokens(errorToken);
+
     listener.beginCompilationUnit(token);
     int count = 0;
     DirectiveContext directiveState = new DirectiveContext();
@@ -358,6 +363,7 @@
       }
     }
     token = token.next;
+    reportAllErrorTokens(errorToken);
     listener.endCompilationUnit(count, token);
     // Clear fields that could lead to memory leak.
     cachedRewriter = null;
@@ -6297,6 +6303,21 @@
     listener.handleErrorToken(token);
   }
 
+  Token reportAllErrorTokens(Token token) {
+    while (token is ErrorToken) {
+      reportErrorToken(token);
+      token = token.next;
+    }
+    return token;
+  }
+
+  Token skipErrorTokens(Token token) {
+    while (token is ErrorToken) {
+      token = token.next;
+    }
+    return token;
+  }
+
   Token parseInvalidTopLevelDeclaration(Token token) {
     Token next = token.next;
     reportRecoverableErrorWithToken(