Migrate the LSP protocol generator

Change-Id: Ib44004af7ee760c3cec4f6bb72e7adba0b64988e
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/195180
Reviewed-by: Danny Tuppeny <danny@tuppeny.com>
Reviewed-by: Konstantin Shcheglov <scheglov@google.com>
diff --git a/pkg/analysis_server/tool/lsp_spec/codegen_dart.dart b/pkg/analysis_server/tool/lsp_spec/codegen_dart.dart
index 9c7e2b3..63bfa14 100644
--- a/pkg/analysis_server/tool/lsp_spec/codegen_dart.dart
+++ b/pkg/analysis_server/tool/lsp_spec/codegen_dart.dart
@@ -2,18 +2,16 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-// @dart = 2.9
-
 import 'package:dart_style/dart_style.dart';
-import 'package:meta/meta.dart';
 
 import 'typescript.dart';
 import 'typescript_parser.dart';
 
 final formatter = DartFormatter();
 Map<String, Interface> _interfaces = {};
+
+/// TODO(dantup): Rename namespaces -> enums since they're always that now.
 Map<String, Namespace> _namespaces = {};
-// TODO(dantup): Rename namespaces -> enums since they're always that now.
 Map<String, List<String>> _subtypes = {};
 Map<String, TypeAlias> _typeAliases = {};
 
@@ -63,10 +61,11 @@
 }
 
 TypeBase resolveTypeAlias(TypeBase type, {resolveEnumClasses = false}) {
-  if (type is Type && _typeAliases.containsKey(type.name)) {
+  if (type is Type) {
     final alias = _typeAliases[type.name];
     // Only follow the type if we're not an enum, or we wanted to follow enums.
-    if (!_namespaces.containsKey(alias.name) || resolveEnumClasses) {
+    if (alias != null &&
+        (!_namespaces.containsKey(alias.name) || resolveEnumClasses)) {
       return alias.baseType;
     }
   }
@@ -83,7 +82,7 @@
 }
 
 /// Recursively gets all members from superclasses.
-List<Field> _getAllFields(Interface interface) {
+List<Field> _getAllFields(Interface? interface) {
   // Handle missing interfaces (such as special cased interfaces that won't
   // be included in this model).
   if (interface == null) {
@@ -99,8 +98,8 @@
 }
 
 /// Returns a copy of the list sorted by name with duplicates (by name+type) removed.
-List<AstNode> _getSortedUnique(List<AstNode> items) {
-  final uniqueByName = <String, AstNode>{};
+List<N> _getSortedUnique<N extends AstNode>(List<N> items) {
+  final uniqueByName = <String, N>{};
   items.forEach((item) {
     // It's fine to have the same name used for different types (eg. namespace +
     // type alias) but some types are just duplicated entirely in the spec in
@@ -325,10 +324,11 @@
   var comment = node.commentText?.trim();
   if (comment != null && comment.isNotEmpty) {
     comment = _rewriteCommentReference(comment);
-    Iterable<String> lines = comment.split('\n');
+    var originalLines = comment.split('\n');
     // Wrap at 80 - 4 ('/// ') - indent characters.
-    lines = _wrapLines(lines, (80 - 4 - buffer.totalIndent).clamp(0, 80));
-    lines.forEach((l) => buffer.writeIndentedln('/// $l'.trim()));
+    var wrappedLines =
+        _wrapLines(originalLines, (80 - 4 - buffer.totalIndent).clamp(0, 80));
+    wrappedLines.forEach((l) => buffer.writeIndentedln('/// $l'.trim()));
   }
   // Marking LSP-deprecated fields as deprecated in Dart results in a lot
   // of warnings because we still often populate these fields for clients that
@@ -466,7 +466,7 @@
 
 void _writeFromJsonCode(
     IndentableStringBuffer buffer, TypeBase type, String valueCode,
-    {bool allowsNull, bool requiresBracesInInterpolation = false}) {
+    {required bool allowsNull, bool requiresBracesInInterpolation = false}) {
   type = resolveTypeAlias(type);
 
   if (_isSimpleType(type)) {
@@ -506,7 +506,7 @@
 
 void _writeFromJsonCodeForLiteralUnion(
     IndentableStringBuffer buffer, LiteralUnionType union, String valueCode,
-    {bool allowsNull}) {
+    {required bool allowsNull}) {
   final allowedValues = [
     if (allowsNull) null,
     ...union.literalTypes.map((t) => t.literal)
@@ -518,7 +518,7 @@
 
 void _writeFromJsonCodeForUnion(
     IndentableStringBuffer buffer, UnionType union, String valueCode,
-    {bool allowsNull, @required bool requiresBracesInInterpolation}) {
+    {required bool allowsNull, required bool requiresBracesInInterpolation}) {
   // Write a check against each type, eg.:
   // x is y ? new Either.tx(x) : (...)
   var hasIncompleteCondition = false;
@@ -575,7 +575,7 @@
     ..indent();
   // First check whether any of our subclasses can deserialise this.
   for (final subclassName in _subtypes[interface.name] ?? const <String>[]) {
-    final subclass = _interfaces[subclassName];
+    final subclass = _interfaces[subclassName]!;
     buffer
       ..writeIndentedln(
           'if (${subclass.name}.canParse(json, nullLspJsonReporter)) {')
@@ -792,7 +792,7 @@
 }
 
 void _writeTypeCheckCondition(IndentableStringBuffer buffer,
-    Interface interface, String valueCode, TypeBase type, String reporter) {
+    Interface? interface, String valueCode, TypeBase type, String reporter) {
   type = resolveTypeAlias(type);
 
   final dartType = type.dartType;
@@ -840,7 +840,6 @@
     }
     buffer.write(')');
   } else if (interface != null &&
-      interface.typeArgs != null &&
       interface.typeArgs.any((typeArg) => typeArg.lexeme == fullDartType)) {
     final comment = '/* $fullDartType.canParse($valueCode) */';
     print(
diff --git a/pkg/analysis_server/tool/lsp_spec/typescript.dart b/pkg/analysis_server/tool/lsp_spec/typescript.dart
index da58944..268b1f8 100644
--- a/pkg/analysis_server/tool/lsp_spec/typescript.dart
+++ b/pkg/analysis_server/tool/lsp_spec/typescript.dart
@@ -2,8 +2,6 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-// @dart = 2.9
-
 import 'typescript_parser.dart';
 
 /// Removes types that are in the spec that we don't want in other signatures.
@@ -22,10 +20,6 @@
 }
 
 String cleanComment(String comment) {
-  if (comment == null) {
-    return null;
-  }
-
   // Remove the start/end comment markers.
   if (comment.startsWith('/**') && comment.endsWith('*/')) {
     comment = comment.substring(3, comment.length - 2);
@@ -51,7 +45,7 @@
 
 /// Improves comments in generated code to support where types may have been
 /// altered (for ex. with [getImprovedType] above).
-String getImprovedComment(String interfaceName, String fieldName) {
+String? getImprovedComment(String interfaceName, String fieldName) {
   const _improvedComments = <String, Map<String, String>>{
     'ResponseError': {
       'data':
@@ -73,7 +67,7 @@
 /// - Narrows unions to single types where they're only generated on the server
 ///   and we know we always use a specific type. This avoids wrapping a lot
 ///   of code in `EitherX<Y,Z>.tX()` and simplifies the testing of them.
-String getImprovedType(String interfaceName, String fieldName) {
+String? getImprovedType(String interfaceName, String? fieldName) {
   const _improvedTypeMappings = <String, Map<String, String>>{
     'Diagnostic': {
       'severity': 'DiagnosticSeverity',
diff --git a/pkg/analysis_server/tool/lsp_spec/typescript_parser.dart b/pkg/analysis_server/tool/lsp_spec/typescript_parser.dart
index b18b819..9dc61e0 100644
--- a/pkg/analysis_server/tool/lsp_spec/typescript_parser.dart
+++ b/pkg/analysis_server/tool/lsp_spec/typescript_parser.dart
@@ -2,8 +2,6 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-// @dart = 2.9
-
 import 'dart:math';
 
 import 'package:analysis_server/src/utilities/strings.dart' show capitalize;
@@ -64,14 +62,14 @@
   String get typeArgsString => '<${elementType.dartTypeWithTypeArgs}>';
 }
 
-class AstNode {
-  final Comment commentNode;
+abstract class AstNode {
+  final Comment? commentNode;
   final bool isDeprecated;
   AstNode(this.commentNode)
-      : isDeprecated = commentNode?.text?.contains('@deprecated') ?? false;
-  String get commentText => commentNode?.text;
+      : isDeprecated = commentNode?.text.contains('@deprecated') ?? false;
+  String? get commentText => commentNode?.text;
 
-  String get name => null;
+  String get name;
 }
 
 class Comment extends AstNode {
@@ -81,13 +79,16 @@
   Comment(this.token)
       : text = cleanComment(token.lexeme),
         super(null);
+
+  @override
+  String get name => throw UnsupportedError('Comments do not have a name.');
 }
 
 class Const extends Member {
   Token nameToken;
   TypeBase type;
   Token valueToken;
-  Const(Comment comment, this.nameToken, this.type, this.valueToken)
+  Const(Comment? comment, this.nameToken, this.type, this.valueToken)
       : super(comment);
 
   @override
@@ -111,7 +112,7 @@
   final bool allowsNull;
   final bool allowsUndefined;
   Field(
-    Comment comment,
+    Comment? comment,
     this.nameToken,
     this.type,
     this.allowsNull,
@@ -125,7 +126,7 @@
 class FixedValueField extends Field {
   final Token valueToken;
   FixedValueField(
-    Comment comment,
+    Comment? comment,
     Token nameToken,
     this.valueToken,
     TypeBase type,
@@ -138,7 +139,7 @@
   final TypeBase indexType;
   final TypeBase valueType;
   Indexer(
-    Comment comment,
+    Comment? comment,
     this.indexType,
     this.valueType,
   ) : super(comment);
@@ -161,7 +162,7 @@
   final List<Member> members;
 
   Interface(
-    Comment comment,
+    Comment? comment,
     this.nameToken,
     this.typeArgs,
     this.baseTypes,
@@ -221,15 +222,15 @@
       '<${indexType.dartTypeWithTypeArgs}, ${valueType.dartTypeWithTypeArgs}>';
 }
 
-class Member extends AstNode {
-  Member(Comment comment) : super(comment);
+abstract class Member extends AstNode {
+  Member(Comment? comment) : super(comment);
 }
 
 class Namespace extends AstNode {
   final Token nameToken;
   final List<Member> members;
   Namespace(
-    Comment comment,
+    Comment? comment,
     this.nameToken,
     this.members,
   ) : super(comment);
@@ -241,14 +242,13 @@
 class Parser {
   final List<Token> _tokens;
   int _current = 0;
-  List<AstNode> _nodes;
+  final List<AstNode> _nodes = [];
   Parser(this._tokens);
 
   bool get _isAtEnd => _peek().type == TokenType.EOF;
 
   List<AstNode> parse() {
-    if (_nodes == null) {
-      _nodes = <AstNode>[];
+    if (_nodes.isEmpty) {
       while (!_isAtEnd) {
         _nodes.add(_topLevel());
       }
@@ -262,17 +262,17 @@
   /// Checks if the next token is [type] without advancing.
   bool _check(TokenType type) => !_isAtEnd && _peek().type == type;
 
-  Comment _comment() {
+  Comment? _comment() {
     if (_peek().type != TokenType.COMMENT) {
       return null;
     }
     return Comment(_advance());
   }
 
-  Const _const(String containerName, Comment leadingComment) {
+  Const _const(String containerName, Comment? leadingComment) {
     _eatUnwantedKeywords();
     final name = _consume(TokenType.IDENTIFIER, 'Expected identifier');
-    TypeBase type;
+    TypeBase? type;
     if (_match([TokenType.COLON])) {
       type = _type(containerName, name.lexeme);
     }
@@ -283,7 +283,7 @@
     }
 
     _consume(TokenType.SEMI_COLON, 'Expected ;');
-    return Const(leadingComment, name, type, value);
+    return Const(leadingComment, name, type!, value!);
   }
 
   /// Ensures the next token is [type] and moves to the next, throwing [message]
@@ -302,7 +302,7 @@
     // but we have a keyword token, then treat it as an identifier.
     if (type == TokenType.IDENTIFIER) {
       final next = !_isAtEnd ? _peek() : null;
-      if (_isKeyword(next?.type)) {
+      if (next != null && _isKeyword(next.type)) {
         _advance();
         return Token(TokenType.IDENTIFIER, next.lexeme);
       }
@@ -316,7 +316,7 @@
     _match([TokenType.READONLY_KEYWORD]);
   }
 
-  Namespace _enum(Comment leadingComment) {
+  Namespace _enum(Comment? leadingComment) {
     final name = _consume(TokenType.IDENTIFIER, 'Expected identifier');
     _consume(TokenType.LEFT_BRACE, 'Expected {');
     final consts = <Const>[];
@@ -333,7 +333,7 @@
   Const _enumValue(String enumName) {
     final leadingComment = _comment();
     final name = _consume(TokenType.IDENTIFIER, 'Expected identifier');
-    TypeBase type;
+    TypeBase? type;
     if (_match([TokenType.COLON])) {
       type = _type(enumName, name.lexeme);
     }
@@ -342,16 +342,16 @@
     if (type == null && value != null) {
       type = typeOfLiteral(value.type);
     }
-    return Const(leadingComment, name, type, value);
+    return Const(leadingComment, name, type!, value!);
   }
 
-  Field _field(String containerName, Comment leadingComment) {
+  Field _field(String containerName, Comment? leadingComment) {
     _eatUnwantedKeywords();
     final name = _consume(TokenType.IDENTIFIER, 'Expected identifier');
     var canBeUndefined = _match([TokenType.QUESTION]);
     _consume(TokenType.COLON, 'Expected :');
     TypeBase type;
-    Token value;
+    Token? value;
     type = _type(containerName, name.lexeme,
         includeUndefined: canBeUndefined, improveTypes: true);
 
@@ -370,7 +370,7 @@
       final _linkTypePattern = RegExp(r'See \{@link (\w+)\}\.?');
       final linkTypeMatch = _linkTypePattern.firstMatch(commentText);
       if (linkTypeMatch != null) {
-        type = Type.identifier(linkTypeMatch.group(1));
+        type = Type.identifier(linkTypeMatch.group(1)!);
         leadingComment = Comment(Token(TokenType.COMMENT,
             '// ' + commentText.replaceAll(_linkTypePattern, '')));
       }
@@ -388,14 +388,13 @@
 
     var canBeNull = false;
     if (type is UnionType) {
-      UnionType union = type;
       // Since undefined and null can appear in the union type list but we want to
       // handle it specially in the code generation, we promote them to fields on
       // the Field.
-      canBeUndefined |= union.types.any(isUndefinedType);
-      canBeNull = union.types.any((t) => isNullType(t) || isAnyType(t));
+      canBeUndefined |= type.types.any(isUndefinedType);
+      canBeNull = type.types.any((t) => isNullType(t) || isAnyType(t));
       // Finally, we need to remove them from the union.
-      final remainingTypes = union.types
+      final remainingTypes = type.types
           .where((t) => !isNullType(t) && !isUndefinedType(t))
           .toList();
 
@@ -413,7 +412,7 @@
     return Field(leadingComment, name, type, canBeNull, canBeUndefined);
   }
 
-  Indexer _indexer(String containerName, Comment leadingComment) {
+  Indexer _indexer(String containerName, Comment? leadingComment) {
     final indexer = _field(containerName, leadingComment);
     _consume(TokenType.RIGHT_BRACKET, 'Expected ]');
     _consume(TokenType.COLON, 'Expected :');
@@ -427,7 +426,7 @@
     return Indexer(leadingComment, indexer.type, type);
   }
 
-  Interface _interface(Comment leadingComment) {
+  Interface _interface(Comment? leadingComment) {
     final name = _consume(TokenType.IDENTIFIER, 'Expected identifier');
     final typeArgs = <Token>[];
     if (_match([TokenType.LESS])) {
@@ -493,7 +492,7 @@
     }
   }
 
-  Namespace _namespace(Comment leadingComment) {
+  Namespace _namespace(Comment? leadingComment) {
     final name = _consume(TokenType.IDENTIFIER, 'Expected identifier');
     _consume(TokenType.LEFT_BRACE, 'Expected {');
     final members = <Member>[];
@@ -560,7 +559,7 @@
 
   TypeBase _type(
     String containerName,
-    String fieldName, {
+    String? fieldName, {
     bool includeUndefined = false,
     bool improveTypes = false,
   }) {
@@ -585,7 +584,7 @@
 
         // If we have a single member that is an indexer type, we can use a Map.
         if (members.length == 1 && members.single is Indexer) {
-          Indexer indexer = members.single;
+          var indexer = members.single as Indexer;
           type = MapType(indexer.indexType, indexer.valueType);
         } else {
           // Add a synthetic interface to the parsers list of nodes to represent this type.
@@ -628,7 +627,7 @@
         type = ArrayType(tupleType);
       } else {
         var typeName = _consume(TokenType.IDENTIFIER, 'Expected identifier');
-        final typeArgs = <Type>[];
+        final typeArgs = <TypeBase>[];
         if (_match([TokenType.LESS])) {
           while (true) {
             typeArgs.add(_type(containerName, fieldName));
@@ -679,7 +678,7 @@
     return type;
   }
 
-  TypeAlias _typeAlias(Comment leadingComment) {
+  TypeAlias _typeAlias(Comment? leadingComment) {
     final name = _consume(TokenType.IDENTIFIER, 'Expected identifier');
     _consume(TokenType.EQUAL, 'Expected =');
     final type = _type(name.lexeme, null);
@@ -732,16 +731,18 @@
     }
 
     final string = _source.substring(_startOfToken, _currentPos);
-    if (_keywords.containsKey(string)) {
-      _addToken(_keywords[string]);
+    var keyword = _keywords[string];
+    if (keyword != null) {
+      _addToken(keyword);
     } else {
       _addToken(TokenType.IDENTIFIER);
     }
   }
 
-  bool _isAlpha(String s) => _validIdentifierCharacters.hasMatch(s);
+  bool _isAlpha(String? s) =>
+      s != null && _validIdentifierCharacters.hasMatch(s);
 
-  bool _isDigit(String s) => s != null && (s.codeUnitAt(0) ^ 0x30) <= 9;
+  bool _isDigit(String? s) => s != null && (s.codeUnitAt(0) ^ 0x30) <= 9;
 
   bool _match(String expected) {
     if (_isAtEnd || _source[_currentPos] != expected) {
@@ -771,9 +772,9 @@
     _addToken(TokenType.NUMBER);
   }
 
-  String _peek() => _isAtEnd ? null : _source[_currentPos];
+  String? _peek() => _isAtEnd ? null : _source[_currentPos];
 
-  String _peekNext() => _isNextAtEnd ? null : _source[_currentPos + 1];
+  String? _peekNext() => _isNextAtEnd ? null : _source[_currentPos + 1];
 
   void _scanToken() {
     const singleCharTokens = <String, TokenType>{
@@ -795,8 +796,9 @@
     };
 
     final c = _advance();
-    if (singleCharTokens.containsKey(c)) {
-      _addToken(singleCharTokens[c]);
+    var token = singleCharTokens[c];
+    if (token != null) {
+      _addToken(token);
       return;
     }
     switch (c) {
@@ -979,7 +981,7 @@
   final Token nameToken;
   final TypeBase baseType;
   TypeAlias(
-    Comment comment,
+    Comment? comment,
     this.nameToken,
     this.baseType,
   ) : super(comment);