Version 2.12.0-198.0.dev

Merge commit 'cabe1e995e2b9a2874fd6a95419be6c91d2a68d7' into 'dev'
diff --git a/pkg/analysis_server/README.md b/pkg/analysis_server/README.md
index d93666c..c6a1813 100644
--- a/pkg/analysis_server/README.md
+++ b/pkg/analysis_server/README.md
@@ -13,8 +13,8 @@
 Clients (typically tools, such as an editor) are expected to run the analysis
 server in a separate process and communicate with it using a JSON protocol. The
 original protocol is specified in the file [`analysis_server/doc/api.html`][api]
-and (less complete) [Language Server Protocol][lsp_spec] support is documented
-in [`tool/lsp_spec/README.md`](tool/lsp_spec/README.md).
+and [Language Server Protocol][lsp_spec] support is documented in
+[`tool/lsp_spec/README.md`](tool/lsp_spec/README.md).
 
 ## Features and bugs
 
diff --git a/pkg/analysis_server/lib/src/computer/computer_highlights.dart b/pkg/analysis_server/lib/src/computer/computer_highlights.dart
index d149af1..e2ca9ea 100644
--- a/pkg/analysis_server/lib/src/computer/computer_highlights.dart
+++ b/pkg/analysis_server/lib/src/computer/computer_highlights.dart
@@ -2,28 +2,57 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
+import 'package:analysis_server/lsp_protocol/protocol_generated.dart'
+    show SemanticTokenTypes, SemanticTokenModifiers;
+import 'package:analysis_server/src/lsp/constants.dart'
+    show CustomSemanticTokenModifiers;
+import 'package:analysis_server/src/lsp/semantic_tokens/encoder.dart'
+    show SemanticTokenInfo;
+import 'package:analysis_server/src/lsp/semantic_tokens/mapping.dart'
+    show highlightRegionTokenModifiers, highlightRegionTokenTypes;
 import 'package:analyzer/dart/ast/ast.dart';
 import 'package:analyzer/dart/ast/token.dart';
 import 'package:analyzer/dart/ast/visitor.dart';
 import 'package:analyzer/dart/element/element.dart';
+import 'package:analyzer/source/source_range.dart';
 import 'package:analyzer/src/dart/ast/extensions.dart';
 import 'package:analyzer_plugin/protocol/protocol_common.dart' hide Element;
 
-/// A computer for [HighlightRegion]s in a Dart [CompilationUnit].
+/// A computer for [HighlightRegion]s and LSP [SemanticTokenInfo] in a Dart [CompilationUnit].
 class DartUnitHighlightsComputer {
   final CompilationUnit _unit;
+  final SourceRange range;
 
-  final List<HighlightRegion> _regions = <HighlightRegion>[];
+  final _regions = <HighlightRegion>[];
+  final _semanticTokens = <SemanticTokenInfo>[];
+  bool _computeRegions = false;
+  bool _computeSemanticTokens = false;
 
-  DartUnitHighlightsComputer(this._unit);
+  /// Creates a computer for [HighlightRegion]s and LSP [SemanticTokenInfo] in a
+  /// Dart [CompilationUnit].
+  ///
+  /// If [range] is supplied, tokens outside of this range will not be included
+  /// in results.
+  DartUnitHighlightsComputer(this._unit, {this.range});
 
   /// Returns the computed highlight regions, not `null`.
   List<HighlightRegion> compute() {
+    _reset();
+    _computeRegions = true;
     _unit.accept(_DartUnitHighlightsComputerVisitor(this));
     _addCommentRanges();
     return _regions;
   }
 
+  /// Returns the computed semantic tokens, not `null`.
+  List<SemanticTokenInfo> computeSemanticTokens() {
+    _reset();
+    _computeSemanticTokens = true;
+    _unit.accept(_DartUnitHighlightsComputerVisitor(this));
+    _addCommentRanges();
+    return _semanticTokens;
+  }
+
   void _addCommentRanges() {
     var token = _unit.beginToken;
     while (token != null) {
@@ -394,8 +423,40 @@
     return false;
   }
 
-  void _addRegion(int offset, int length, HighlightRegionType type) {
-    _regions.add(HighlightRegion(type, offset, length));
+  /// Adds a highlight region/semantic token for the given [offset]/[length].
+  ///
+  /// If [semanticTokenType] or [semanticTokenModifiers] are not provided, the
+  /// values from the default LSP mapping for [type] (also used for plugins)
+  /// will be used instead.
+  ///
+  /// If the computer has a [range] set, tokens that fall outside of that range
+  /// will not be recorded.
+  void _addRegion(
+    int offset,
+    int length,
+    HighlightRegionType type, {
+    SemanticTokenTypes semanticTokenType,
+    Set<SemanticTokenModifiers> semanticTokenModifiers,
+  }) {
+    if (range != null) {
+      final end = offset + length;
+      // Skip token if it ends before the range of starts after the range.
+      if (end < range.offset || offset > range.end) {
+        return;
+      }
+    }
+    if (_computeRegions) {
+      _regions.add(HighlightRegion(type, offset, length));
+    }
+    if (_computeSemanticTokens) {
+      // Use default mappings if an overriden type/modifiers were not supplied.
+      semanticTokenType ??= highlightRegionTokenTypes[type];
+      semanticTokenModifiers ??= highlightRegionTokenModifiers[type];
+      if (semanticTokenType != null) {
+        _semanticTokens.add(SemanticTokenInfo(
+            offset, length, semanticTokenType, semanticTokenModifiers));
+      }
+    }
   }
 
   bool _addRegion_node(AstNode node, HighlightRegionType type) {
@@ -412,11 +473,18 @@
     _addRegion(offset, end - offset, type);
   }
 
-  void _addRegion_token(Token token, HighlightRegionType type) {
+  void _addRegion_token(
+    Token token,
+    HighlightRegionType type, {
+    SemanticTokenTypes semanticTokenType,
+    Set<SemanticTokenModifiers> semanticTokenModifiers,
+  }) {
     if (token != null) {
       var offset = token.offset;
       var length = token.length;
-      _addRegion(offset, length, type);
+      _addRegion(offset, length, type,
+          semanticTokenType: semanticTokenType,
+          semanticTokenModifiers: semanticTokenModifiers);
     }
   }
 
@@ -427,6 +495,13 @@
     _addRegion(offset, end - offset, type);
   }
 
+  void _reset() {
+    _computeRegions = false;
+    _computeSemanticTokens = false;
+    _regions.clear();
+    _semanticTokens.clear();
+  }
+
   static bool _isDynamicExpression(Expression e) {
     var type = e.staticType;
     return type != null && type.isDynamic;
@@ -453,13 +528,15 @@
 
   @override
   void visitAssertStatement(AssertStatement node) {
-    computer._addRegion_token(node.assertKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.assertKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitAssertStatement(node);
   }
 
   @override
   void visitAwaitExpression(AwaitExpression node) {
-    computer._addRegion_token(node.awaitKeyword, HighlightRegionType.BUILT_IN);
+    computer._addRegion_token(node.awaitKeyword, HighlightRegionType.BUILT_IN,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitAwaitExpression(node);
   }
 
@@ -478,14 +555,17 @@
 
   @override
   void visitBreakStatement(BreakStatement node) {
-    computer._addRegion_token(node.breakKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.breakKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitBreakStatement(node);
   }
 
   @override
   void visitCatchClause(CatchClause node) {
-    computer._addRegion_token(node.catchKeyword, HighlightRegionType.KEYWORD);
-    computer._addRegion_token(node.onKeyword, HighlightRegionType.BUILT_IN);
+    computer._addRegion_token(node.catchKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
+    computer._addRegion_token(node.onKeyword, HighlightRegionType.BUILT_IN,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitCatchClause(node);
   }
 
@@ -515,8 +595,8 @@
 
   @override
   void visitContinueStatement(ContinueStatement node) {
-    computer._addRegion_token(
-        node.continueKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.continueKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitContinueStatement(node);
   }
 
@@ -529,8 +609,10 @@
 
   @override
   void visitDoStatement(DoStatement node) {
-    computer._addRegion_token(node.doKeyword, HighlightRegionType.KEYWORD);
-    computer._addRegion_token(node.whileKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.doKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
+    computer._addRegion_token(node.whileKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitDoStatement(node);
   }
 
@@ -592,27 +674,33 @@
 
   @override
   void visitForEachPartsWithDeclaration(ForEachPartsWithDeclaration node) {
-    computer._addRegion_token(node.inKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.inKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitForEachPartsWithDeclaration(node);
   }
 
   @override
   void visitForEachPartsWithIdentifier(ForEachPartsWithIdentifier node) {
-    computer._addRegion_token(node.inKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.inKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitForEachPartsWithIdentifier(node);
   }
 
   @override
   void visitForElement(ForElement node) {
-    computer._addRegion_token(node.awaitKeyword, HighlightRegionType.BUILT_IN);
-    computer._addRegion_token(node.forKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.awaitKeyword, HighlightRegionType.BUILT_IN,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
+    computer._addRegion_token(node.forKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitForElement(node);
   }
 
   @override
   void visitForStatement(ForStatement node) {
-    computer._addRegion_token(node.awaitKeyword, HighlightRegionType.BUILT_IN);
-    computer._addRegion_token(node.forKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.awaitKeyword, HighlightRegionType.BUILT_IN,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
+    computer._addRegion_token(node.forKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitForStatement(node);
   }
 
@@ -660,15 +748,19 @@
 
   @override
   void visitIfElement(IfElement node) {
-    computer._addRegion_token(node.ifKeyword, HighlightRegionType.KEYWORD);
-    computer._addRegion_token(node.elseKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.ifKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
+    computer._addRegion_token(node.elseKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitIfElement(node);
   }
 
   @override
   void visitIfStatement(IfStatement node) {
-    computer._addRegion_token(node.ifKeyword, HighlightRegionType.KEYWORD);
-    computer._addRegion_token(node.elseKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.ifKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
+    computer._addRegion_token(node.elseKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitIfStatement(node);
   }
 
@@ -795,13 +887,15 @@
 
   @override
   void visitRethrowExpression(RethrowExpression node) {
-    computer._addRegion_token(node.rethrowKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.rethrowKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitRethrowExpression(node);
   }
 
   @override
   void visitReturnStatement(ReturnStatement node) {
-    computer._addRegion_token(node.returnKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.returnKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitReturnStatement(node);
   }
 
@@ -852,19 +946,22 @@
 
   @override
   void visitSwitchCase(SwitchCase node) {
-    computer._addRegion_token(node.keyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.keyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitSwitchCase(node);
   }
 
   @override
   void visitSwitchDefault(SwitchDefault node) {
-    computer._addRegion_token(node.keyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.keyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitSwitchDefault(node);
   }
 
   @override
   void visitSwitchStatement(SwitchStatement node) {
-    computer._addRegion_token(node.switchKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.switchKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitSwitchStatement(node);
   }
 
@@ -876,7 +973,8 @@
 
   @override
   void visitThrowExpression(ThrowExpression node) {
-    computer._addRegion_token(node.throwKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.throwKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitThrowExpression(node);
   }
 
@@ -889,8 +987,10 @@
 
   @override
   void visitTryStatement(TryStatement node) {
-    computer._addRegion_token(node.tryKeyword, HighlightRegionType.KEYWORD);
-    computer._addRegion_token(node.finallyKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.tryKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
+    computer._addRegion_token(node.finallyKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitTryStatement(node);
   }
 
@@ -915,7 +1015,8 @@
 
   @override
   void visitWhileStatement(WhileStatement node) {
-    computer._addRegion_token(node.whileKeyword, HighlightRegionType.KEYWORD);
+    computer._addRegion_token(node.whileKeyword, HighlightRegionType.KEYWORD,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitWhileStatement(node);
   }
 
@@ -931,7 +1032,8 @@
     var star = node.star;
     var offset = keyword.offset;
     var end = star != null ? star.end : keyword.end;
-    computer._addRegion(offset, end - offset, HighlightRegionType.BUILT_IN);
+    computer._addRegion(offset, end - offset, HighlightRegionType.BUILT_IN,
+        semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     super.visitYieldStatement(node);
   }
 
@@ -941,7 +1043,8 @@
       var star = node.star;
       var offset = keyword.offset;
       var end = star != null ? star.end : keyword.end;
-      computer._addRegion(offset, end - offset, HighlightRegionType.BUILT_IN);
+      computer._addRegion(offset, end - offset, HighlightRegionType.BUILT_IN,
+          semanticTokenModifiers: {CustomSemanticTokenModifiers.control});
     }
   }
 }
diff --git a/pkg/analysis_server/lib/src/lsp/constants.dart b/pkg/analysis_server/lib/src/lsp/constants.dart
index 81468ca..ebc2978 100644
--- a/pkg/analysis_server/lib/src/lsp/constants.dart
+++ b/pkg/analysis_server/lib/src/lsp/constants.dart
@@ -99,9 +99,25 @@
       Method('textDocument/semanticTokens');
 }
 
+abstract class CustomSemanticTokenModifiers {
+  // A modifier applied to control keywords like if/for/etc. so they can be
+  // coloured differently to other keywords (void, import, etc), matching the
+  // original Dart textmate grammar.
+  // https://github.com/dart-lang/dart-syntax-highlight/blob/84a8e84f79bc917ebd959a4587349c865dc945e0/grammars/dart.json#L244-L261
+  static const control = SemanticTokenModifiers('control');
+
+  /// All custom semantic token modifiers, used to populate the LSP Legend which must
+  /// include all used modifiers.
+  static const values = [control];
+}
+
 abstract class CustomSemanticTokenTypes {
   static const annotation = SemanticTokenTypes('annotation');
   static const boolean = SemanticTokenTypes('boolean');
+
+  /// All custom semantic token types, used to populate the LSP Legend which must
+  /// include all used types.
+  static const values = [annotation, boolean];
 }
 
 /// CodeActionKinds supported by the server that are not declared in the LSP spec.
diff --git a/pkg/analysis_server/lib/src/lsp/handlers/handler_semantic_tokens.dart b/pkg/analysis_server/lib/src/lsp/handlers/handler_semantic_tokens.dart
index 7ffda4f..ad40077 100644
--- a/pkg/analysis_server/lib/src/lsp/handlers/handler_semantic_tokens.dart
+++ b/pkg/analysis_server/lib/src/lsp/handlers/handler_semantic_tokens.dart
@@ -11,40 +11,45 @@
 import 'package:analysis_server/src/lsp/lsp_analysis_server.dart';
 import 'package:analysis_server/src/lsp/mapping.dart';
 import 'package:analysis_server/src/lsp/semantic_tokens/encoder.dart';
-import 'package:analysis_server/src/plugin/result_merger.dart';
 import 'package:analyzer/dart/analysis/results.dart';
+import 'package:analyzer/source/source_range.dart';
 import 'package:analyzer_plugin/protocol/protocol_common.dart';
 
-class SemanticTokensHandler
-    extends MessageHandler<SemanticTokensParams, SemanticTokens>
+abstract class AbstractSemanticTokensHandler<T>
+    extends MessageHandler<T, SemanticTokens>
     with LspPluginRequestHandlerMixin {
-  SemanticTokensHandler(LspAnalysisServer server) : super(server);
-
-  @override
-  Method get handlesMessage => Method.textDocument_semanticTokens_full;
-
-  @override
-  LspJsonHandler<SemanticTokensParams> get jsonHandler =>
-      SemanticTokensParams.jsonHandler;
+  AbstractSemanticTokensHandler(LspAnalysisServer server) : super(server);
 
   List<List<HighlightRegion>> getPluginResults(String path) {
     final notificationManager = server.notificationManager;
     return notificationManager.highlights.getResults(path);
   }
 
-  Future<List<HighlightRegion>> getServerResult(String path) async {
+  Future<List<SemanticTokenInfo>> getServerResult(
+      String path, SourceRange range) async {
     final result = await server.getResolvedUnit(path);
     if (result?.state == ResultState.VALID) {
-      final computer = DartUnitHighlightsComputer(result.unit);
-      return computer.compute();
+      final computer = DartUnitHighlightsComputer(result.unit, range: range);
+      return computer.computeSemanticTokens();
     }
     return [];
   }
 
-  @override
-  Future<ErrorOr<SemanticTokens>> handle(
-      SemanticTokensParams params, CancellationToken token) async {
-    final path = pathOfDoc(params.textDocument);
+  Iterable<SemanticTokenInfo> _filter(
+      Iterable<SemanticTokenInfo> tokens, SourceRange range) {
+    if (range == null) {
+      return tokens;
+    }
+
+    return tokens.where((token) =>
+        !(token.offset + token.length < range.offset ||
+            token.offset > range.end));
+  }
+
+  Future<ErrorOr<SemanticTokens>> _handleImpl(
+      TextDocumentIdentifier textDocument, CancellationToken token,
+      {Range range}) async {
+    final path = pathOfDoc(textDocument);
 
     return path.mapResult((path) async {
       final lineInfo = server.getLineInfo(path);
@@ -54,31 +59,88 @@
         return success(null);
       }
 
-      // We need to be able to split multiline tokens up if a client does not
-      // support them. Doing this correctly requires access to the line endings
-      // and indenting so we must get a copy of the file contents. Although this
-      // is on the Dart unit result, we may also need this for files being
-      // handled by plugins.
-      final file = server.resourceProvider.getFile(path);
-      if (!file.exists) {
-        return success(null);
-      }
-      final fileContents = file.readAsStringSync();
+      return toSourceRange(lineInfo, range).mapResult((range) async {
+        final serverTokens = await getServerResult(path, range);
+        final pluginHighlightRegions =
+            getPluginResults(path).expand((results) => results).toList();
 
-      final allResults = [
-        await getServerResult(path),
-        ...getPluginResults(path),
-      ];
+        if (token.isCancellationRequested) {
+          return cancelled();
+        }
 
-      final merger = ResultMerger();
-      final mergedResults = merger.mergeHighlightRegions(allResults);
+        final encoder = SemanticTokenEncoder();
+        Iterable<SemanticTokenInfo> pluginTokens =
+            encoder.convertHighlightToTokens(pluginHighlightRegions);
 
-      final encoder = SemanticTokenEncoder();
-      final tokens =
-          encoder.convertHighlights(mergedResults, lineInfo, fileContents);
-      final semanticTokens = encoder.encodeTokens(tokens);
+        // Plugin tokens are not filtered at source, so need to be filtered here.
+        pluginTokens = _filter(pluginTokens, range);
 
-      return success(semanticTokens);
+        Iterable<SemanticTokenInfo> tokens = [...serverTokens, ...pluginTokens];
+
+        // Capabilities exist for supporting multiline/overlapping tokens. These
+        // could be used if any clients take it up (VS Code does not).
+        // - clientCapabilities?.multilineTokenSupport
+        // - clientCapabilities?.overlappingTokenSupport
+        final allowMultilineTokens = false;
+        final allowOverlappingTokens = false;
+
+        // Some of the translation operations and the final encoding require
+        // the tokens to be sorted. Do it once here to avoid each method needing
+        // to do it itself (resulting in multiple sorts).
+        tokens = tokens.toList()
+          ..sort(SemanticTokenInfo.offsetLengthPrioritySort);
+
+        if (!allowOverlappingTokens) {
+          tokens = encoder.splitOverlappingTokens(tokens);
+        }
+
+        if (!allowMultilineTokens) {
+          tokens = tokens
+              .expand((token) => encoder.splitMultilineTokens(token, lineInfo));
+
+          // Tokens may need re-filtering after being split up as there may
+          // now be tokens outside of the range.
+          tokens = _filter(tokens, range);
+        }
+
+        final semanticTokens = encoder.encodeTokens(tokens.toList(), lineInfo);
+
+        return success(semanticTokens);
+      });
     });
   }
 }
+
+class SemanticTokensFullHandler
+    extends AbstractSemanticTokensHandler<SemanticTokensParams> {
+  SemanticTokensFullHandler(LspAnalysisServer server) : super(server);
+
+  @override
+  Method get handlesMessage => Method.textDocument_semanticTokens_full;
+
+  @override
+  LspJsonHandler<SemanticTokensParams> get jsonHandler =>
+      SemanticTokensParams.jsonHandler;
+
+  @override
+  Future<ErrorOr<SemanticTokens>> handle(
+          SemanticTokensParams params, CancellationToken token) =>
+      _handleImpl(params.textDocument, token);
+}
+
+class SemanticTokensRangeHandler
+    extends AbstractSemanticTokensHandler<SemanticTokensRangeParams> {
+  SemanticTokensRangeHandler(LspAnalysisServer server) : super(server);
+
+  @override
+  Method get handlesMessage => Method.textDocument_semanticTokens_range;
+
+  @override
+  LspJsonHandler<SemanticTokensRangeParams> get jsonHandler =>
+      SemanticTokensRangeParams.jsonHandler;
+
+  @override
+  Future<ErrorOr<SemanticTokens>> handle(
+          SemanticTokensRangeParams params, CancellationToken token) =>
+      _handleImpl(params.textDocument, token, range: params.range);
+}
diff --git a/pkg/analysis_server/lib/src/lsp/handlers/handler_states.dart b/pkg/analysis_server/lib/src/lsp/handlers/handler_states.dart
index 0a7d9a6..c1298e5 100644
--- a/pkg/analysis_server/lib/src/lsp/handlers/handler_states.dart
+++ b/pkg/analysis_server/lib/src/lsp/handlers/handler_states.dart
@@ -103,7 +103,8 @@
     registerHandler(WorkspaceDidChangeConfigurationMessageHandler(server));
     registerHandler(ReanalyzeHandler(server));
     registerHandler(WillRenameFilesHandler(server));
-    registerHandler(SemanticTokensHandler(server));
+    registerHandler(SemanticTokensFullHandler(server));
+    registerHandler(SemanticTokensRangeHandler(server));
   }
 }
 
diff --git a/pkg/analysis_server/lib/src/lsp/mapping.dart b/pkg/analysis_server/lib/src/lsp/mapping.dart
index 0366e79..d02da72 100644
--- a/pkg/analysis_server/lib/src/lsp/mapping.dart
+++ b/pkg/analysis_server/lib/src/lsp/mapping.dart
@@ -27,6 +27,7 @@
 import 'package:analyzer/diagnostic/diagnostic.dart' as analyzer;
 import 'package:analyzer/error/error.dart' as server;
 import 'package:analyzer/source/line_info.dart' as server;
+import 'package:analyzer/source/source_range.dart' as server;
 import 'package:analyzer/src/error/codes.dart';
 import 'package:analyzer/src/services/available_declarations.dart';
 import 'package:analyzer/src/services/available_declarations.dart' as dec;
@@ -1205,6 +1206,29 @@
   );
 }
 
+ErrorOr<server.SourceRange> toSourceRange(
+    server.LineInfo lineInfo, Range range) {
+  if (range == null) {
+    return success(null);
+  }
+
+  // If there is a range, convert to offsets because that's what
+  // the tokens are computed using initially.
+  final start = toOffset(lineInfo, range.start);
+  final end = toOffset(lineInfo, range.end);
+  if (start?.isError ?? false) {
+    return failure(start);
+  }
+  if (end?.isError ?? false) {
+    return failure(end);
+  }
+
+  final startOffset = start?.result;
+  final endOffset = end?.result;
+
+  return success(server.SourceRange(startOffset, endOffset - startOffset));
+}
+
 lsp.TextDocumentEdit toTextDocumentEdit(FileEditInformation edit) {
   return lsp.TextDocumentEdit(
     textDocument: edit.doc,
diff --git a/pkg/analysis_server/lib/src/lsp/semantic_tokens/encoder.dart b/pkg/analysis_server/lib/src/lsp/semantic_tokens/encoder.dart
index 735aa27..4ee6f99 100644
--- a/pkg/analysis_server/lib/src/lsp/semantic_tokens/encoder.dart
+++ b/pkg/analysis_server/lib/src/lsp/semantic_tokens/encoder.dart
@@ -6,6 +6,7 @@
 import 'dart:math' as math;
 
 import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
+import 'package:analysis_server/src/lsp/constants.dart';
 import 'package:analysis_server/src/lsp/semantic_tokens/legend.dart';
 import 'package:analysis_server/src/lsp/semantic_tokens/mapping.dart';
 import 'package:analyzer/source/line_info.dart';
@@ -15,22 +16,14 @@
 /// token types/modifiers and encodes them into a [List<int>] in a
 /// [SemanticTokens] (a [List<int>]) as described by the LSP spec .
 class SemanticTokenEncoder {
-  /// Converts [regions]s into LSP [SemanticTokenInfo], splitting multiline tokens
-  /// and nested tokens if required.
-  List<SemanticTokenInfo> convertHighlights(
-      List<HighlightRegion> regions, LineInfo lineInfo, String fileContent) {
-    // LSP is zero-based but server is 1-based.
-    const lspPositionOffset = -1;
+  // LSP is zero-based but server is 1-based.
+  static const _serverToLspLineOffset = -1;
 
+  /// Converts [regions]s into LSP [SemanticTokenInfo].
+  List<SemanticTokenInfo> convertHighlightToTokens(
+      List<HighlightRegion> regions) {
     final tokens = <SemanticTokenInfo>[];
 
-    // Capabilities exist for supporting multiline/overlapping tokens. These
-    // could be used if any clients take it up (VS Code does not).
-    // - clientCapabilities?.multilineTokenSupport
-    // - clientCapabilities?.overlappingTokenSupport
-    final allowMultilineTokens = false;
-    final allowOverlappingTokens = false;
-
     Iterable<HighlightRegion> translatedRegions = regions;
 
     // Remove any tokens that will not be mapped as there's no point further processing
@@ -38,22 +31,11 @@
     translatedRegions = translatedRegions
         .where((region) => highlightRegionTokenTypes.containsKey(region.type));
 
-    if (!allowMultilineTokens) {
-      translatedRegions = translatedRegions.expand(
-          (region) => _splitMultilineRegions(region, lineInfo, fileContent));
-    }
-
-    if (!allowOverlappingTokens) {
-      translatedRegions = _splitOverlappingTokens(translatedRegions);
-    }
-
     for (final region in translatedRegions) {
       final tokenType = highlightRegionTokenTypes[region.type];
-      final start = lineInfo.getLocation(region.offset);
 
       tokens.add(SemanticTokenInfo(
-        start.lineNumber + lspPositionOffset,
-        start.columnNumber + lspPositionOffset,
+        region.offset,
         region.length,
         tokenType,
         highlightRegionTokenModifiers[region.type],
@@ -63,20 +45,24 @@
     return tokens;
   }
 
-  SemanticTokens encodeTokens(List<SemanticTokenInfo> tokens) {
+  /// Encodes tokens according to the LSP spec.
+  ///
+  /// Tokens must be pre-sorted by offset so that relative line/columns are accurate.
+  SemanticTokens encodeTokens(
+      List<SemanticTokenInfo> sortedTokens, LineInfo lineInfo) {
     final encodedTokens = <int>[];
     var lastLine = 0;
     var lastColumn = 0;
 
-    // Ensure tokens are all sorted by location in file regardless of the order
-    // they were registered.
-    tokens.sort(SemanticTokenInfo.offsetSort);
+    for (final token in sortedTokens) {
+      final location = lineInfo.getLocation(token.offset);
+      final tokenLine = location.lineNumber + _serverToLspLineOffset;
+      final tokenColumn = location.columnNumber + _serverToLspLineOffset;
 
-    for (final token in tokens) {
-      var relativeLine = token.line - lastLine;
+      final relativeLine = tokenLine - lastLine;
       // Column is relative to last only if on the same line.
-      var relativeColumn =
-          relativeLine == 0 ? token.column - lastColumn : token.column;
+      final relativeColumn =
+          relativeLine == 0 ? tokenColumn - lastColumn : tokenColumn;
 
       // The resulting array is groups of 5 items as described in the LSP spec:
       // https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#textDocument_semanticTokens
@@ -88,52 +74,20 @@
         semanticTokenLegend.bitmaskForModifiers(token.modifiers) ?? 0
       ]);
 
-      lastLine = token.line;
-      lastColumn = token.column;
+      lastLine = tokenLine;
+      lastColumn = tokenColumn;
     }
 
     return SemanticTokens(data: encodedTokens);
   }
 
-  /// Sorted for highlight regions that ensures tokens are sorted in offset order
-  /// then longest first, then by priority, and finally by name. This ensures
-  /// the order is always stable.
-  int _regionOffsetLengthPrioritySorter(
-      HighlightRegion r1, HighlightRegion r2) {
-    const priorities = {
-      // Ensure boolean comes above keyword.
-      HighlightRegionType.LITERAL_BOOLEAN: 1,
-    };
-
-    // First sort by offset.
-    if (r1.offset != r2.offset) {
-      return r1.offset.compareTo(r2.offset);
-    }
-
-    // Then length (so longest are first).
-    if (r1.length != r2.length) {
-      return -r1.length.compareTo(r2.length);
-    }
-
-    // Next sort by priority (if different).
-    final priority1 = priorities[r1.type] ?? 0;
-    final priority2 = priorities[r2.type] ?? 0;
-    if (priority1 != priority2) {
-      return priority1.compareTo(priority2);
-    }
-
-    // If the tokens had the same offset and length, sort by name. This
-    // is completely arbitrary but it's only important that it is consistent
-    // between regions and the sort is stable.
-    return r1.type.name.compareTo(r2.type.name);
-  }
-
   /// Splits multiline regions into multiple regions for clients that do not support
-  /// multiline tokens.
-  Iterable<HighlightRegion> _splitMultilineRegions(
-      HighlightRegion region, LineInfo lineInfo, String fileContent) sync* {
-    final start = lineInfo.getLocation(region.offset);
-    final end = lineInfo.getLocation(region.offset + region.length);
+  /// multiline tokens. Multiline tokens will be split at the end of the line and
+  /// line endings and indenting will be included in the tokens.
+  Iterable<SemanticTokenInfo> splitMultilineTokens(
+      SemanticTokenInfo token, LineInfo lineInfo) sync* {
+    final start = lineInfo.getLocation(token.offset);
+    final end = lineInfo.getLocation(token.offset + token.length);
 
     // Create a region for each line in the original region.
     for (var lineNumber = start.lineNumber;
@@ -141,51 +95,35 @@
         lineNumber++) {
       final isFirstLine = lineNumber == start.lineNumber;
       final isLastLine = lineNumber == end.lineNumber;
-      final isSingleLine = start.lineNumber == end.lineNumber;
       final lineOffset = lineInfo.getOffsetOfLine(lineNumber - 1);
 
-      var startOffset = isFirstLine ? start.columnNumber - 1 : 0;
-      var endOffset = isLastLine
+      final startOffset = isFirstLine ? start.columnNumber - 1 : 0;
+      final endOffset = isLastLine
           ? end.columnNumber - 1
           : lineInfo.getOffsetOfLine(lineNumber) - lineOffset;
-      var length = endOffset - startOffset;
+      final length = endOffset - startOffset;
 
-      // When we split multiline tokens, we may end up with leading/trailing
-      // whitespace which doesn't make sense to include in the token. Examine
-      // the content to remove this.
-      if (!isSingleLine) {
-        final tokenContent = fileContent.substring(
-            lineOffset + startOffset, lineOffset + endOffset);
-        final leadingWhitespaceCount =
-            tokenContent.length - tokenContent.trimLeft().length;
-        final trailingWhitespaceCount =
-            tokenContent.length - tokenContent.trimRight().length;
-
-        startOffset += leadingWhitespaceCount;
-        endOffset -= trailingWhitespaceCount;
-        length = endOffset - startOffset;
-      }
-
-      yield HighlightRegion(region.type, lineOffset + startOffset, length);
+      yield SemanticTokenInfo(
+          lineOffset + startOffset, length, token.type, token.modifiers);
     }
   }
 
-  Iterable<HighlightRegion> _splitOverlappingTokens(
-      Iterable<HighlightRegion> regions) sync* {
-    if (regions.isEmpty) {
+  /// Splits overlapping/nested tokens into descrete ranges for the "top-most"
+  /// token.
+  ///
+  /// Tokens must be pre-sorted by offset, with tokens having the same offset sorted
+  /// with the longest first.
+  Iterable<SemanticTokenInfo> splitOverlappingTokens(
+      Iterable<SemanticTokenInfo> sortedTokens) sync* {
+    if (sortedTokens.isEmpty) {
       return;
     }
 
-    // Sort tokens so by offset, shortest length, priority then name to ensure
-    // tne sort is always stable.
-    final sortedRegions = regions.toList()
-      ..sort(_regionOffsetLengthPrioritySorter);
+    final firstToken = sortedTokens.first;
+    final stack = ListQueue<SemanticTokenInfo>()..add(firstToken);
+    var pos = firstToken.offset;
 
-    final firstRegion = sortedRegions.first;
-    final stack = ListQueue<HighlightRegion>()..add(firstRegion);
-    var pos = firstRegion.offset;
-
-    for (final current in sortedRegions.skip(1)) {
+    for (final current in sortedTokens.skip(1)) {
       if (stack.last != null) {
         final last = stack.last;
         final newPos = current.offset;
@@ -194,7 +132,7 @@
           // the position of this next region, whichever is shorter.
           final end = math.min(last.offset + last.length, newPos);
           final length = end - pos;
-          yield HighlightRegion(last.type, pos, length);
+          yield SemanticTokenInfo(pos, length, last.type, last.modifiers);
           pos = newPos;
         }
       }
@@ -208,7 +146,7 @@
       final newPos = last.offset + last.length;
       final length = newPos - pos;
       if (length > 0) {
-        yield HighlightRegion(last.type, pos, length);
+        yield SemanticTokenInfo(pos, length, last.type, last.modifiers);
         pos = newPos;
       }
     }
@@ -216,17 +154,43 @@
 }
 
 class SemanticTokenInfo {
-  final int line;
-  final int column;
+  final int offset;
   final int length;
   final SemanticTokenTypes type;
   final Set<SemanticTokenModifiers> modifiers;
 
-  SemanticTokenInfo(
-      this.line, this.column, this.length, this.type, this.modifiers);
+  SemanticTokenInfo(this.offset, this.length, this.type, this.modifiers);
 
-  static int offsetSort(SemanticTokenInfo t1, SemanticTokenInfo t2) =>
-      t1.line == t2.line
-          ? t1.column.compareTo(t2.column)
-          : t1.line.compareTo(t2.line);
+  /// Sorter for semantic tokens that ensures tokens are sorted in offset order
+  /// then longest first, then by priority, and finally by name. This ensures
+  /// the order is always stable.
+  static int offsetLengthPrioritySort(
+      SemanticTokenInfo t1, SemanticTokenInfo t2) {
+    final priorities = {
+      // Ensure boolean comes above keyword.
+      CustomSemanticTokenTypes.boolean: 1,
+    };
+
+    // First sort by offset.
+    if (t1.offset != t2.offset) {
+      return t1.offset.compareTo(t2.offset);
+    }
+
+    // Then length (so longest are first).
+    if (t1.length != t1.length) {
+      return -t1.length.compareTo(t2.length);
+    }
+
+    // Next sort by priority (if different).
+    final priority1 = priorities[t1.type] ?? 0;
+    final priority2 = priorities[t2.type] ?? 0;
+    if (priority1 != priority2) {
+      return priority1.compareTo(priority2);
+    }
+
+    // If the tokens had the same offset and length, sort by name. This
+    // is completely arbitrary but it's only important that it is consistent
+    // between tokens and the sort is stable.
+    return t1.type.toString().compareTo(t2.type.toString());
+  }
 }
diff --git a/pkg/analysis_server/lib/src/lsp/semantic_tokens/legend.dart b/pkg/analysis_server/lib/src/lsp/semantic_tokens/legend.dart
index ad4220e..ef2d8c4 100644
--- a/pkg/analysis_server/lib/src/lsp/semantic_tokens/legend.dart
+++ b/pkg/analysis_server/lib/src/lsp/semantic_tokens/legend.dart
@@ -5,6 +5,7 @@
 import 'dart:math' as math;
 
 import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
+import 'package:analysis_server/src/lsp/constants.dart';
 import 'package:analysis_server/src/lsp/semantic_tokens/mapping.dart';
 import 'package:meta/meta.dart';
 
@@ -27,11 +28,15 @@
   List<SemanticTokenTypes> _usedTokenTypes;
 
   SemanticTokenLegendLookup() {
-    // Build lists of all tokens and modifiers that exist in our mappings. These will
-    // be used to determine the indexes used for communication.
-    _usedTokenTypes = Set.of(highlightRegionTokenTypes.values).toList();
-    _usedTokenModifiers =
-        Set.of(highlightRegionTokenModifiers.values.expand((v) => v)).toList();
+    // Build lists of all tokens and modifiers that exist in our mappings or that
+    // we have added as custom types. These will be used to determine the indexes used for communication.
+    _usedTokenTypes = Set.of(highlightRegionTokenTypes.values
+            .followedBy(CustomSemanticTokenTypes.values))
+        .toList();
+    _usedTokenModifiers = Set.of(highlightRegionTokenModifiers.values
+            .expand((v) => v)
+            .followedBy(CustomSemanticTokenModifiers.values))
+        .toList();
 
     // Build the LSP Legend which tells the client all of the tokens and modifiers
     // we will use in the order they should be accessed by index/bit.
diff --git a/pkg/analysis_server/lib/src/lsp/server_capabilities_computer.dart b/pkg/analysis_server/lib/src/lsp/server_capabilities_computer.dart
index 97c8de4..f67feaa 100644
--- a/pkg/analysis_server/lib/src/lsp/server_capabilities_computer.dart
+++ b/pkg/analysis_server/lib/src/lsp/server_capabilities_computer.dart
@@ -8,9 +8,6 @@
 import 'package:analysis_server/src/lsp/lsp_analysis_server.dart';
 import 'package:analysis_server/src/lsp/semantic_tokens/legend.dart';
 
-/// Semantic tokens temporarily disabled due to a race condition.
-const enableSemanticTokens = false;
-
 /// Helper for reading client dynamic registrations which may be ommitted by the
 /// client.
 class ClientDynamicRegistrations {
@@ -44,7 +41,7 @@
     Method.workspace_willRenameFiles,
     // Sematic tokens are all registered under a single "method" as the
     // actual methods are controlled by the server capabilities.
-    if (enableSemanticTokens) CustomMethods.semanticTokenDynamicRegistration,
+    CustomMethods.semanticTokenDynamicRegistration,
   ];
   final ClientCapabilities _capabilities;
 
@@ -236,18 +233,18 @@
               FoldingRangeRegistrationOptions>.t1(
               true,
             ),
-      semanticTokensProvider:
-          dynamicRegistrations.semanticTokens || !enableSemanticTokens
-              ? null
-              : Either2<SemanticTokensOptions,
-                  SemanticTokensRegistrationOptions>.t1(
-                  SemanticTokensOptions(
-                    legend: semanticTokenLegend.lspLegend,
-                    full: Either2<bool, SemanticTokensOptionsFull>.t2(
-                      SemanticTokensOptionsFull(delta: false),
-                    ),
-                  ),
+      semanticTokensProvider: dynamicRegistrations.semanticTokens
+          ? null
+          : Either2<SemanticTokensOptions,
+              SemanticTokensRegistrationOptions>.t1(
+              SemanticTokensOptions(
+                legend: semanticTokenLegend.lspLegend,
+                full: Either2<bool, SemanticTokensOptionsFull>.t2(
+                  SemanticTokensOptionsFull(delta: false),
                 ),
+                range: Either2<bool, SemanticTokensOptionsRange>.t1(true),
+              ),
+            ),
       executeCommandProvider: ExecuteCommandOptions(
         commands: Commands.serverSupportedCommands,
         workDoneProgress: true,
@@ -450,7 +447,7 @@
       Method.workspace_didChangeConfiguration,
     );
     register(
-      dynamicRegistrations.semanticTokens && enableSemanticTokens,
+      dynamicRegistrations.semanticTokens,
       CustomMethods.semanticTokenDynamicRegistration,
       SemanticTokensRegistrationOptions(
         documentSelector: fullySupportedTypes,
@@ -458,6 +455,7 @@
         full: Either2<bool, SemanticTokensOptionsFull>.t2(
           SemanticTokensOptionsFull(delta: false),
         ),
+        range: Either2<bool, SemanticTokensOptionsRange>.t1(true),
       ),
     );
 
diff --git a/pkg/analysis_server/lib/src/server/driver.dart b/pkg/analysis_server/lib/src/server/driver.dart
index 183c669..d7efddf 100644
--- a/pkg/analysis_server/lib/src/server/driver.dart
+++ b/pkg/analysis_server/lib/src/server/driver.dart
@@ -98,6 +98,11 @@
   /// The path to the data cache.
   static const String CACHE_FOLDER = 'cache';
 
+  /// The name of the flag specifying the server protocol to use.
+  static const String SERVER_PROTOCOL = 'protocol';
+  static const String PROTOCOL_ANALYZER = 'analyzer';
+  static const String PROTOCOL_LSP = 'lsp';
+
   /// The name of the flag to use the Language Server Protocol (LSP).
   static const String USE_LSP = 'lsp';
 
@@ -124,17 +129,26 @@
   /// If [sendPort] is not null, assumes this is launched in an isolate and will
   /// connect to the original isolate via an [IsolateChannel].
   @override
-  void start(List<String> arguments, [SendPort sendPort]) {
-    var parser = _createArgParser();
+  void start(
+    List<String> arguments, {
+    SendPort sendPort,
+    bool defaultToLsp = false,
+  }) {
+    var parser = createArgParser(defaultToLsp: defaultToLsp);
     var results = parser.parse(arguments);
 
     var analysisServerOptions = AnalysisServerOptions();
     analysisServerOptions.newAnalysisDriverLog =
         results[ANALYSIS_DRIVER_LOG] ?? results[ANALYSIS_DRIVER_LOG_ALIAS];
     analysisServerOptions.clientId = results[CLIENT_ID];
-    analysisServerOptions.useLanguageServerProtocol = results[USE_LSP];
-    // For clients that don't supply their own identifier, use a default based on
-    // whether the server will run in LSP mode or not.
+    if (results.wasParsed(USE_LSP)) {
+      analysisServerOptions.useLanguageServerProtocol = results[USE_LSP];
+    } else {
+      analysisServerOptions.useLanguageServerProtocol =
+          results[SERVER_PROTOCOL] == PROTOCOL_LSP;
+    }
+    // For clients that don't supply their own identifier, use a default based
+    // on whether the server will run in LSP mode or not.
     analysisServerOptions.clientId ??=
         analysisServerOptions.useLanguageServerProtocol
             ? 'unknown.client.lsp'
@@ -478,11 +492,95 @@
     return runZoned(callback, zoneSpecification: zoneSpecification);
   }
 
+  DartSdk _createDefaultSdk(String defaultSdkPath) {
+    var resourceProvider = PhysicalResourceProvider.INSTANCE;
+    return FolderBasedDartSdk(
+      resourceProvider,
+      resourceProvider.getFolder(defaultSdkPath),
+    );
+  }
+
+  /// Constructs a uuid combining the current date and a random integer.
+  String _generateUuidString() {
+    var millisecondsSinceEpoch = DateTime.now().millisecondsSinceEpoch;
+    var random = Random().nextInt(0x3fffffff);
+    return '$millisecondsSinceEpoch$random';
+  }
+
+  String _getSdkPath(ArgResults args) {
+    if (args[DART_SDK] != null) {
+      return args[DART_SDK];
+    } else if (args[DART_SDK_ALIAS] != null) {
+      return args[DART_SDK_ALIAS];
+    } else {
+      return getSdkPath();
+    }
+  }
+
+  /// Print information about how to use the server.
+  void _printUsage(
+    ArgParser parser,
+    telemetry.Analytics analytics, {
+    bool fromHelp = false,
+  }) {
+    print('Usage: $BINARY_NAME [flags]');
+    print('');
+    print('Supported flags are:');
+    print(parser.usage);
+
+    if (telemetry.SHOW_ANALYTICS_UI) {
+      // Print analytics status and information.
+      if (fromHelp) {
+        print('');
+        print(telemetry.analyticsNotice);
+      }
+      print('');
+      print(telemetry.createAnalyticsStatusMessage(analytics.enabled,
+          command: ANALYTICS_FLAG));
+    }
+  }
+
+  /// Read the UUID from disk, generating and storing a new one if necessary.
+  String _readUuid(InstrumentationService service) {
+    final instrumentationLocation =
+        PhysicalResourceProvider.INSTANCE.getStateLocation('.instrumentation');
+    if (instrumentationLocation == null) {
+      return _generateUuidString();
+    }
+    var uuidFile = File(instrumentationLocation.getChild('uuid.txt').path);
+    try {
+      if (uuidFile.existsSync()) {
+        var uuid = uuidFile.readAsStringSync();
+        if (uuid != null && uuid.length > 5) {
+          return uuid;
+        }
+      }
+    } catch (exception, stackTrace) {
+      service.logException(exception, stackTrace);
+    }
+    var uuid = _generateUuidString();
+    try {
+      uuidFile.parent.createSync(recursive: true);
+      uuidFile.writeAsStringSync(uuid);
+    } catch (exception, stackTrace) {
+      service.logException(exception, stackTrace);
+      // Slightly alter the uuid to indicate it was not persisted
+      uuid = 'temp-$uuid';
+    }
+    return uuid;
+  }
+
   /// Create and return the parser used to parse the command-line arguments.
-  ArgParser _createArgParser() {
-    var parser = ArgParser();
-    parser.addFlag(HELP_OPTION,
-        abbr: 'h', negatable: false, help: 'Print this usage information.');
+  static ArgParser createArgParser({
+    int usageLineLength,
+    bool includeHelpFlag = true,
+    bool defaultToLsp = false,
+  }) {
+    var parser = ArgParser(usageLineLength: usageLineLength);
+    if (includeHelpFlag) {
+      parser.addFlag(HELP_OPTION,
+          abbr: 'h', negatable: false, help: 'Print this usage information.');
+    }
     parser.addOption(CLIENT_ID,
         valueHelp: 'name',
         help: 'An identifier for the analysis server client.');
@@ -495,10 +593,28 @@
     parser.addOption(CACHE_FOLDER,
         valueHelp: 'path',
         help: 'Override the location of the analysis server\'s cache.');
+
+    parser.addOption(
+      SERVER_PROTOCOL,
+      defaultsTo: defaultToLsp ? PROTOCOL_LSP : PROTOCOL_ANALYZER,
+      valueHelp: 'protocol',
+      allowed: [PROTOCOL_LSP, PROTOCOL_ANALYZER],
+      allowedHelp: {
+        PROTOCOL_LSP: 'The Language Server Protocol '
+            '(https://microsoft.github.io/language-server-protocol)',
+        PROTOCOL_ANALYZER: 'Dart\'s analysis server protocol '
+            '(https://dart.dev/go/analysis-server-protocol)',
+      },
+      help:
+          'Specify the protocol to use to communicate with the analysis server.',
+    );
+    // This option is hidden but still accepted; it's effectively translated to
+    // the 'protocol' option above.
     parser.addFlag(USE_LSP,
         defaultsTo: false,
         negatable: false,
-        help: 'Whether to use the Language Server Protocol (LSP).');
+        help: 'Whether to use the Language Server Protocol (LSP).',
+        hide: true);
 
     parser.addSeparator('Server diagnostics:');
 
@@ -584,84 +700,6 @@
     return parser;
   }
 
-  DartSdk _createDefaultSdk(String defaultSdkPath) {
-    var resourceProvider = PhysicalResourceProvider.INSTANCE;
-    return FolderBasedDartSdk(
-      resourceProvider,
-      resourceProvider.getFolder(defaultSdkPath),
-    );
-  }
-
-  /// Constructs a uuid combining the current date and a random integer.
-  String _generateUuidString() {
-    var millisecondsSinceEpoch = DateTime.now().millisecondsSinceEpoch;
-    var random = Random().nextInt(0x3fffffff);
-    return '$millisecondsSinceEpoch$random';
-  }
-
-  String _getSdkPath(ArgResults args) {
-    if (args[DART_SDK] != null) {
-      return args[DART_SDK];
-    } else if (args[DART_SDK_ALIAS] != null) {
-      return args[DART_SDK_ALIAS];
-    } else {
-      return getSdkPath();
-    }
-  }
-
-  /// Print information about how to use the server.
-  void _printUsage(
-    ArgParser parser,
-    telemetry.Analytics analytics, {
-    bool fromHelp = false,
-  }) {
-    print('Usage: $BINARY_NAME [flags]');
-    print('');
-    print('Supported flags are:');
-    print(parser.usage);
-
-    if (telemetry.SHOW_ANALYTICS_UI) {
-      // Print analytics status and information.
-      if (fromHelp) {
-        print('');
-        print(telemetry.analyticsNotice);
-      }
-      print('');
-      print(telemetry.createAnalyticsStatusMessage(analytics.enabled,
-          command: ANALYTICS_FLAG));
-    }
-  }
-
-  /// Read the UUID from disk, generating and storing a new one if necessary.
-  String _readUuid(InstrumentationService service) {
-    final instrumentationLocation =
-        PhysicalResourceProvider.INSTANCE.getStateLocation('.instrumentation');
-    if (instrumentationLocation == null) {
-      return _generateUuidString();
-    }
-    var uuidFile = File(instrumentationLocation.getChild('uuid.txt').path);
-    try {
-      if (uuidFile.existsSync()) {
-        var uuid = uuidFile.readAsStringSync();
-        if (uuid != null && uuid.length > 5) {
-          return uuid;
-        }
-      }
-    } catch (exception, stackTrace) {
-      service.logException(exception, stackTrace);
-    }
-    var uuid = _generateUuidString();
-    try {
-      uuidFile.parent.createSync(recursive: true);
-      uuidFile.writeAsStringSync(uuid);
-    } catch (exception, stackTrace) {
-      service.logException(exception, stackTrace);
-      // Slightly alter the uuid to indicate it was not persisted
-      uuid = 'temp-$uuid';
-    }
-    return uuid;
-  }
-
   /// Perform log files rolling.
   ///
   /// Rename existing files with names `[path].(x)` to `[path].(x+1)`.
diff --git a/pkg/analysis_server/lib/starter.dart b/pkg/analysis_server/lib/starter.dart
index 5877e34..78fb773 100644
--- a/pkg/analysis_server/lib/starter.dart
+++ b/pkg/analysis_server/lib/starter.dart
@@ -31,5 +31,6 @@
   set instrumentationService(InstrumentationService service);
 
   /// Use the given command-line [arguments] to start this server.
-  void start(List<String> arguments, [SendPort sendPort]);
+  void start(List<String> arguments,
+      {SendPort sendPort, bool defaultToLsp = false});
 }
diff --git a/pkg/analysis_server/test/lsp/initialization_test.dart b/pkg/analysis_server/test/lsp/initialization_test.dart
index fad9377..ce2263f 100644
--- a/pkg/analysis_server/test/lsp/initialization_test.dart
+++ b/pkg/analysis_server/test/lsp/initialization_test.dart
@@ -131,8 +131,7 @@
     expect(initResult.capabilities.foldingRangeProvider, isNotNull);
     expect(initResult.capabilities.workspace.fileOperations.willRename,
         equals(ServerCapabilitiesComputer.fileOperationRegistrationOptions));
-    expect(initResult.capabilities.semanticTokensProvider,
-        enableSemanticTokens ? isNotNull : isNull);
+    expect(initResult.capabilities.semanticTokensProvider, isNotNull);
 
     expect(didGetRegisterCapabilityRequest, isFalse);
   }
diff --git a/pkg/analysis_server/test/lsp/semantic_tokens_test.dart b/pkg/analysis_server/test/lsp/semantic_tokens_test.dart
index a8a7a157..3886184 100644
--- a/pkg/analysis_server/test/lsp/semantic_tokens_test.dart
+++ b/pkg/analysis_server/test/lsp/semantic_tokens_test.dart
@@ -375,6 +375,48 @@
     expect(decoded, equals([...expected1, ...expected2]));
   }
 
+  Future<void> test_keywords() async {
+    // "control" keywords should be tagged with a modifier so the client
+    // can colour them differently to other keywords.
+    final content = r'''
+    void main() async {
+      var a = new Object();
+      await null;
+      if (false) {
+        print('test');
+      }
+    }
+    ''';
+
+    final expected = [
+      _Token('void', SemanticTokenTypes.keyword),
+      _Token('main', SemanticTokenTypes.function,
+          [SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
+      _Token('async', SemanticTokenTypes.keyword,
+          [CustomSemanticTokenModifiers.control]),
+      _Token('var', SemanticTokenTypes.keyword),
+      _Token('a', SemanticTokenTypes.variable,
+          [SemanticTokenModifiers.declaration]),
+      _Token('new', SemanticTokenTypes.keyword),
+      _Token('Object', SemanticTokenTypes.class_),
+      _Token('await', SemanticTokenTypes.keyword,
+          [CustomSemanticTokenModifiers.control]),
+      _Token('null', SemanticTokenTypes.keyword),
+      _Token('if', SemanticTokenTypes.keyword,
+          [CustomSemanticTokenModifiers.control]),
+      _Token('false', CustomSemanticTokenTypes.boolean),
+      _Token('print', SemanticTokenTypes.function),
+      _Token("'test'", SemanticTokenTypes.string),
+    ];
+
+    await initialize();
+    await openFile(mainFileUri, withoutMarkers(content));
+
+    final tokens = await getSemanticTokens(mainFileUri);
+    final decoded = decodeSemanticTokens(content, tokens);
+    expect(decoded, equals(expected));
+  }
+
   Future<void> test_lastLine_code() async {
     final content = 'String var;';
 
@@ -413,11 +455,11 @@
  */''';
 
     final expected = [
-      _Token('/**', SemanticTokenTypes.comment,
+      _Token('/**\n', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
-      _Token('* Trailing comment', SemanticTokenTypes.comment,
+      _Token(' * Trailing comment\n', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
-      _Token('*/', SemanticTokenTypes.comment,
+      _Token(' */', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
     ];
 
@@ -524,27 +566,27 @@
 
   Future<void> test_multilineRegions() async {
     final content = '''
-    /**
-     * This is my class comment
-     *
-     * There are
-     * multiple lines
-     */
-    class MyClass {}
+/**
+ * This is my class comment
+ *
+ * There are
+ * multiple lines
+ */
+class MyClass {}
     ''';
 
     final expected = [
-      _Token('/**', SemanticTokenTypes.comment,
+      _Token('/**\n', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
-      _Token('* This is my class comment', SemanticTokenTypes.comment,
+      _Token(' * This is my class comment\n', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
-      _Token('*', SemanticTokenTypes.comment,
+      _Token(' *\n', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
-      _Token('* There are', SemanticTokenTypes.comment,
+      _Token(' * There are\n', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
-      _Token('* multiple lines', SemanticTokenTypes.comment,
+      _Token(' * multiple lines\n', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
-      _Token('*/', SemanticTokenTypes.comment,
+      _Token(' */', SemanticTokenTypes.comment,
           [SemanticTokenModifiers.documentation]),
       _Token('class', SemanticTokenTypes.keyword),
       _Token('MyClass', SemanticTokenTypes.class_),
@@ -558,15 +600,104 @@
     expect(decoded, equals(expected));
   }
 
-  Future<void> test_strings() async {
-    final content = r'''
-    String foo(String c) => c;
-    const string1 = 'test';
-    const string2 = 'test1 $string1 test2 ${foo('test3')}';
-    const string3 = r'$string1 ${string1.length}';
+  Future<void> test_range() async {
+    final content = '''
+    /// class docs
+    class [[MyClass<T> {
+      // class comment
+    }]]
+
+    // Trailing comment
     ''';
 
     final expected = [
+      _Token('MyClass', SemanticTokenTypes.class_),
+      _Token('T', SemanticTokenTypes.typeParameter),
+      _Token('// class comment', SemanticTokenTypes.comment),
+    ];
+
+    await initialize();
+    await openFile(mainFileUri, withoutMarkers(content));
+
+    final tokens =
+        await getSemanticTokensRange(mainFileUri, rangeFromMarkers(content));
+    final decoded = decodeSemanticTokens(withoutMarkers(content), tokens);
+    expect(decoded, equals(expected));
+  }
+
+  Future<void> test_range_entireFile() async {
+    final content = '''[[
+    /// class docs
+    class MyClass<T> {
+      // class comment
+    }
+
+    // Trailing comment
+    ]]''';
+
+    final expected = [
+      _Token('/// class docs', SemanticTokenTypes.comment,
+          [SemanticTokenModifiers.documentation]),
+      _Token('class', SemanticTokenTypes.keyword),
+      _Token('MyClass', SemanticTokenTypes.class_),
+      _Token('T', SemanticTokenTypes.typeParameter),
+      _Token('// class comment', SemanticTokenTypes.comment),
+      _Token('// Trailing comment', SemanticTokenTypes.comment),
+    ];
+
+    await initialize();
+    await openFile(mainFileUri, withoutMarkers(content));
+
+    final tokens =
+        await getSemanticTokensRange(mainFileUri, rangeFromMarkers(content));
+    final decoded = decodeSemanticTokens(withoutMarkers(content), tokens);
+    expect(decoded, equals(expected));
+  }
+
+  Future<void> test_range_multilineRegions() async {
+    final content = '''
+    /**
+     * This is my class comment
+     *
+     * [[There are
+     * multiple lines
+     */
+    class]] MyClass {}
+    ''';
+
+    final expected = [
+      _Token('     * There are\n', SemanticTokenTypes.comment,
+          [SemanticTokenModifiers.documentation]),
+      _Token('     * multiple lines\n', SemanticTokenTypes.comment,
+          [SemanticTokenModifiers.documentation]),
+      _Token('     */', SemanticTokenTypes.comment,
+          [SemanticTokenModifiers.documentation]),
+      _Token('class', SemanticTokenTypes.keyword),
+    ];
+
+    await initialize();
+    await openFile(mainFileUri, withoutMarkers(content));
+
+    final tokens =
+        await getSemanticTokensRange(mainFileUri, rangeFromMarkers(content));
+    final decoded = decodeSemanticTokens(withoutMarkers(content), tokens);
+    expect(decoded, equals(expected));
+  }
+
+  Future<void> test_strings() async {
+    final content = '''
+String foo(String c) => c;
+const string1 = 'test';
+const string2 = 'test1 \$string1 test2 \${foo('test3')}';
+const string3 = r'\$string1 \${string1.length}';
+const string4 = \'\'\'
+multi
+  line
+    string
+\'\'\';
+''';
+
+    final expected = [
       _Token('String', SemanticTokenTypes.class_),
       _Token('foo', SemanticTokenTypes.function,
           [SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
@@ -592,6 +723,14 @@
       _Token('string3', SemanticTokenTypes.variable,
           [SemanticTokenModifiers.declaration]),
       _Token(r"r'$string1 ${string1.length}'", SemanticTokenTypes.string),
+      _Token('const', SemanticTokenTypes.keyword),
+      _Token('string4', SemanticTokenTypes.variable,
+          [SemanticTokenModifiers.declaration]),
+      _Token("'''\n", SemanticTokenTypes.string),
+      _Token('multi\n', SemanticTokenTypes.string),
+      _Token('  line\n', SemanticTokenTypes.string),
+      _Token('    string\n', SemanticTokenTypes.string),
+      _Token("'''", SemanticTokenTypes.string),
     ];
 
     await initialize();
diff --git a/pkg/analysis_server/test/lsp/server_abstract.dart b/pkg/analysis_server/test/lsp/server_abstract.dart
index 6388898..74726a9 100644
--- a/pkg/analysis_server/test/lsp/server_abstract.dart
+++ b/pkg/analysis_server/test/lsp/server_abstract.dart
@@ -1030,6 +1030,17 @@
     return expectSuccessfulResponseTo(request, SemanticTokens.fromJson);
   }
 
+  Future<SemanticTokens> getSemanticTokensRange(Uri uri, Range range) {
+    final request = makeRequest(
+      Method.textDocument_semanticTokens_range,
+      SemanticTokensRangeParams(
+        textDocument: TextDocumentIdentifier(uri: uri.toString()),
+        range: range,
+      ),
+    );
+    return expectSuccessfulResponseTo(request, SemanticTokens.fromJson);
+  }
+
   Future<SignatureHelp> getSignatureHelp(Uri uri, Position pos,
       [SignatureHelpContext context]) {
     final request = makeRequest(
diff --git a/pkg/analysis_server/tool/lsp_spec/README.md b/pkg/analysis_server/tool/lsp_spec/README.md
index 11227a8..2f679b1 100644
--- a/pkg/analysis_server/tool/lsp_spec/README.md
+++ b/pkg/analysis_server/tool/lsp_spec/README.md
@@ -103,6 +103,7 @@
 | textDocument/prepareRename | ✅ | ✅ | | ✅ | ✅ |
 | textDocument/foldingRange | ✅ | ✅ | ✅ | ✅ | ✅ |
 | textDocument/semanticTokens/full | ✅ | ✅ | ✅ | ✅ | ✅ |
+| textDocument/semanticTokens/range | ✅ | ✅ | ✅ | ✅ | ✅ |
 
 ## Custom Methods and Notifications
 
diff --git a/pkg/analyzer/CHANGELOG.md b/pkg/analyzer/CHANGELOG.md
index 665eb9b..cf138c1 100644
--- a/pkg/analyzer/CHANGELOG.md
+++ b/pkg/analyzer/CHANGELOG.md
@@ -1,6 +1,7 @@
 ## 0.41.2-dev
 * Deprecated `FunctionTypeAliasElement.function`.
   Use `TypeAliasElement.aliasedElement` instead.
+* Widened the dependency on package:crypto to include version 3.0.0.
 
 ## 0.41.1
 * Updated `PackageBuildWorkspace` that supports `package:build` to stop
diff --git a/pkg/analyzer/lib/error/error.dart b/pkg/analyzer/lib/error/error.dart
index 965f076..ba3d969 100644
--- a/pkg/analyzer/lib/error/error.dart
+++ b/pkg/analyzer/lib/error/error.dart
@@ -542,7 +542,6 @@
   HintCode.NULL_AWARE_IN_LOGICAL_OPERATOR,
   HintCode.NULL_CHECK_ALWAYS_FAILS,
   HintCode.NULLABLE_TYPE_IN_CATCH_CLAUSE,
-  HintCode.OVERRIDE_EQUALS_BUT_NOT_HASH_CODE,
   HintCode.OVERRIDE_ON_NON_OVERRIDING_FIELD,
   HintCode.OVERRIDE_ON_NON_OVERRIDING_GETTER,
   HintCode.OVERRIDE_ON_NON_OVERRIDING_METHOD,
diff --git a/pkg/analyzer/lib/src/dart/error/hint_codes.dart b/pkg/analyzer/lib/src/dart/error/hint_codes.dart
index 402b9c7..030afac 100644
--- a/pkg/analyzer/lib/src/dart/error/hint_codes.dart
+++ b/pkg/analyzer/lib/src/dart/error/hint_codes.dart
@@ -1546,19 +1546,6 @@
       hasPublishedDocs: true);
 
   /**
-   * Hint for classes that override equals, but not hashCode.
-   *
-   * Parameters:
-   * 0: the name of the current class
-   */
-  // TODO(brianwilkerson) Decide whether we want to implement this check
-  //  (possibly as a lint) or remove the hint code.
-  static const HintCode OVERRIDE_EQUALS_BUT_NOT_HASH_CODE = HintCode(
-      'OVERRIDE_EQUALS_BUT_NOT_HASH_CODE',
-      "The class '{0}' overrides 'operator==', but not 'get hashCode'.",
-      correction: "Try implementing 'hashCode'.");
-
-  /**
    * A field with the override annotation does not override a getter or setter.
    *
    * No parameters.
diff --git a/pkg/analyzer/pubspec.yaml b/pkg/analyzer/pubspec.yaml
index a07adc0..19837d5 100644
--- a/pkg/analyzer/pubspec.yaml
+++ b/pkg/analyzer/pubspec.yaml
@@ -12,7 +12,7 @@
   cli_util: '>=0.1.4 <0.3.0'
   collection: ^1.15.0-nullsafety.5
   convert: ^2.0.0
-  crypto: ^2.0.0
+  crypto: '>=2.0.0 <4.0.0'
   glob: '>=1.0.3 <3.0.0'
   meta: ^1.2.3
   package_config: ^1.0.0
diff --git a/pkg/analyzer/test/src/diagnostics/must_be_a_native_function_type_test.dart b/pkg/analyzer/test/src/diagnostics/must_be_a_native_function_type_test.dart
index 8c6ecc1..c46e611 100644
--- a/pkg/analyzer/test/src/diagnostics/must_be_a_native_function_type_test.dart
+++ b/pkg/analyzer/test/src/diagnostics/must_be_a_native_function_type_test.dart
@@ -42,6 +42,51 @@
     ]);
   }
 
+  test_lookupFunction_Pointer() async {
+    await assertNoErrorsInCode(r'''
+import 'dart:ffi';
+typedef S = Void Function(Pointer);
+typedef F = void Function(Pointer);
+void f(DynamicLibrary lib) {
+  lib.lookupFunction<S, F>('g');
+}
+''');
+  }
+
+  // TODO(https://dartbug.com/44594): Should this be an error or not?
+  test_lookupFunction_PointerNativeFunction() async {
+    await assertErrorsInCode(r'''
+import 'dart:ffi';
+typedef S = Void Function(Pointer<NativeFunction>);
+typedef F = void Function(Pointer<NativeFunction>);
+void f(DynamicLibrary lib) {
+  lib.lookupFunction<S, F>('g');
+}
+''', [error(FfiCode.MUST_BE_A_NATIVE_FUNCTION_TYPE, 173, 1)]);
+  }
+
+  test_lookupFunction_PointerNativeFunction2() async {
+    await assertNoErrorsInCode(r'''
+import 'dart:ffi';
+typedef S = Void Function(Pointer<NativeFunction<Int8 Function()>>);
+typedef F = void Function(Pointer<NativeFunction<Int8 Function()>>);
+void f(DynamicLibrary lib) {
+  lib.lookupFunction<S, F>('g');
+}
+''');
+  }
+
+  test_lookupFunction_PointerVoid() async {
+    await assertNoErrorsInCode(r'''
+import 'dart:ffi';
+typedef S = Pointer<Void> Function(Pointer<Void>);
+typedef F = Pointer<Void> Function(Pointer<Void>);
+void f(DynamicLibrary lib) {
+  lib.lookupFunction<S, F>('g');
+}
+''');
+  }
+
   test_lookupFunction_T() async {
     await assertErrorsInCode(r'''
 import 'dart:ffi';
diff --git a/pkg/analyzer/test/src/diagnostics/override_equals_but_not_hashcode_test.dart b/pkg/analyzer/test/src/diagnostics/override_equals_but_not_hashcode_test.dart
deleted file mode 100644
index 6479ead..0000000
--- a/pkg/analyzer/test/src/diagnostics/override_equals_but_not_hashcode_test.dart
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-import 'package:analyzer/src/error/codes.dart';
-import 'package:test_reflective_loader/test_reflective_loader.dart';
-
-import '../dart/resolution/context_collection_resolution.dart';
-
-main() {
-  defineReflectiveSuite(() {
-    defineReflectiveTests(OverrideEqualsButNotHashCodeTest);
-  });
-}
-
-@reflectiveTest
-class OverrideEqualsButNotHashCodeTest extends PubPackageResolutionTest {
-  test_overrideBoth() async {
-    await assertNoErrorsInCode(r'''
-class A {
-  bool operator ==(x) { return x; }
-  get hashCode => 0;
-}''');
-  }
-
-  @failingTest
-  test_overrideEquals_andNotHashCode() async {
-    await assertErrorsInCode(r'''
-class A {
-  bool operator ==(x) {}
-}''', [
-      error(HintCode.OVERRIDE_EQUALS_BUT_NOT_HASH_CODE, 6, 1),
-    ]);
-  }
-}
diff --git a/pkg/analyzer/test/src/diagnostics/test_all.dart b/pkg/analyzer/test/src/diagnostics/test_all.dart
index d8c3e4e..f119043 100644
--- a/pkg/analyzer/test/src/diagnostics/test_all.dart
+++ b/pkg/analyzer/test/src/diagnostics/test_all.dart
@@ -484,8 +484,6 @@
     as object_cannot_extend_another_class;
 import 'optional_parameter_in_operator_test.dart'
     as optional_parameter_in_operator;
-import 'override_equals_but_not_hashcode_test.dart'
-    as override_equals_but_not_hashcode;
 import 'override_on_non_overriding_field_test.dart'
     as override_on_non_overriding_field;
 import 'override_on_non_overriding_getter_test.dart'
@@ -984,7 +982,6 @@
     nullable_type_in_with_clause.main();
     object_cannot_extend_another_class.main();
     optional_parameter_in_operator.main();
-    override_equals_but_not_hashcode.main();
     override_on_non_overriding_field.main();
     override_on_non_overriding_getter.main();
     override_on_non_overriding_method.main();
diff --git a/pkg/compiler/lib/src/elements/types.dart b/pkg/compiler/lib/src/elements/types.dart
index bfd7666..4c261da 100644
--- a/pkg/compiler/lib/src/elements/types.dart
+++ b/pkg/compiler/lib/src/elements/types.dart
@@ -688,6 +688,7 @@
     // Canonicalize empty collections to constants to save storage.
     if (parameterTypes.isEmpty) parameterTypes = const [];
     if (optionalParameterTypes.isEmpty) optionalParameterTypes = const [];
+    if (namedParameters.isEmpty) namedParameters = const [];
     if (namedParameterTypes.isEmpty) namedParameterTypes = const [];
     if (requiredNamedParameters.isEmpty) requiredNamedParameters = const {};
     if (typeVariables.isEmpty) typeVariables = const [];
diff --git a/pkg/dartdev/lib/dartdev.dart b/pkg/dartdev/lib/dartdev.dart
index 5e8f2a9..b194847 100644
--- a/pkg/dartdev/lib/dartdev.dart
+++ b/pkg/dartdev/lib/dartdev.dart
@@ -20,6 +20,7 @@
 import 'src/commands/compile.dart';
 import 'src/commands/create.dart';
 import 'src/commands/fix.dart';
+import 'src/commands/language_server.dart';
 import 'src/commands/run.dart';
 import 'src/commands/test.dart';
 import 'src/core.dart';
@@ -96,6 +97,7 @@
     addCommand(CompileCommand(verbose: verbose));
     addCommand(FixCommand(verbose: verbose));
     addCommand(FormatCommand(verbose: verbose));
+    addCommand(LanguageServerCommand(verbose: verbose));
     addCommand(MigrateCommand(verbose: verbose));
     addCommand(pubCommand());
     addCommand(RunCommand(verbose: verbose));
diff --git a/pkg/dartdev/lib/src/commands/language_server.dart b/pkg/dartdev/lib/src/commands/language_server.dart
new file mode 100644
index 0000000..924ae19
--- /dev/null
+++ b/pkg/dartdev/lib/src/commands/language_server.dart
@@ -0,0 +1,53 @@
+// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:io' as io;
+
+import 'package:analysis_server/src/server/driver.dart' as server_driver;
+import 'package:args/args.dart';
+
+import '../core.dart';
+import '../utils.dart';
+
+class LanguageServerCommand extends DartdevCommand {
+  static const String commandName = 'language-server';
+
+  static const String commandDescription = '''
+Start Dart's analysis server.
+
+This is a long-running process used to provide language services to IDEs and other tooling clients.
+
+It communicates over stdin and stdout and provides services like code completion, errors and warnings, and refactorings. This command is generally not user-facing but consumed by higher level tools.
+
+For more information about the server's capabilities and configuration, see:
+
+  https://github.com/dart-lang/sdk/tree/master/pkg/analysis_server''';
+
+  LanguageServerCommand({bool verbose = false})
+      : super(commandName, commandDescription, hidden: !verbose);
+
+  @override
+  ArgParser createArgParser() {
+    return server_driver.Driver.createArgParser(
+      usageLineLength: dartdevUsageLineLength,
+      includeHelpFlag: false,
+      defaultToLsp: true,
+    );
+  }
+
+  @override
+  Future<int> run() async {
+    final driver = server_driver.Driver();
+    driver.start(
+      argResults.arguments,
+      defaultToLsp: true,
+    );
+
+    // The server will continue to run past the return from this method.
+    //
+    // On an error on startup, the server will set the dart:io exitCode value
+    // (or, call exit() directly).
+    return io.exitCode;
+  }
+}
diff --git a/pkg/dartdev/pubspec.yaml b/pkg/dartdev/pubspec.yaml
index d6347db..a8fef55 100644
--- a/pkg/dartdev/pubspec.yaml
+++ b/pkg/dartdev/pubspec.yaml
@@ -6,6 +6,8 @@
 environment:
   sdk: '>=2.6.0 <3.0.0'
 dependencies:
+  analysis_server:
+    path: ../analysis_server
   analysis_server_client:
     path: ../analysis_server_client
   analyzer:
@@ -22,8 +24,7 @@
     path: ../nnbd_migration
   path: ^1.0.0
   pedantic: ^1.9.0
-  pub:
-    path: ../../third_party/pkg/pub
+  pub: any
   stagehand: any
   telemetry:
     path: ../telemetry
diff --git a/pkg/dartdev/test/commands/language_server_test.dart b/pkg/dartdev/test/commands/language_server_test.dart
new file mode 100644
index 0000000..fcc637f
--- /dev/null
+++ b/pkg/dartdev/test/commands/language_server_test.dart
@@ -0,0 +1,101 @@
+// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:convert';
+import 'dart:io';
+
+import 'package:test/test.dart';
+
+import '../utils.dart' as utils;
+
+void main() {
+  group(
+    'language-server',
+    defineLanguageServerTests,
+    timeout: utils.longTimeout,
+  );
+}
+
+void defineLanguageServerTests() {
+  utils.TestProject project;
+  Process process;
+
+  tearDown(() {
+    project?.dispose();
+    process?.kill();
+  });
+
+  Future runWithLsp(List<String> args) async {
+    project = utils.project();
+
+    process = await project.start(args);
+
+    final Stream<String> inStream =
+        process.stdout.transform<String>(utf8.decoder);
+
+    // Send an LSP init.
+    final String message = jsonEncode({
+      'jsonrpc': '2.0',
+      'id': 1,
+      'method': 'initialize',
+      'params': {
+        'processId': pid,
+        'clientInfo': {'name': 'dart-cli-tester'},
+        'capabilities': {},
+        'rootUri': project.dir.uri.toString(),
+      },
+    });
+
+    process.stdin.write('Content-Length: ${message.length}\r\n');
+    process.stdin.write('\r\n');
+    process.stdin.write(message);
+
+    List<String> responses = await inStream.take(2).toList();
+    expect(responses, hasLength(2));
+
+    expect(responses[0], startsWith('Content-Length: '));
+
+    final json = jsonDecode(responses[1]);
+    expect(json['id'], 1);
+    expect(json['result'], isNotNull);
+    final result = json['result'];
+    expect(result['capabilities'], isNotNull);
+    expect(result['serverInfo'], isNotNull);
+    final serverInfo = result['serverInfo'];
+    expect(serverInfo['name'], isNotEmpty);
+
+    process.kill();
+    process = null;
+  }
+
+  test('protocol default', () async {
+    return runWithLsp(['language-server']);
+  });
+
+  test('protocol lsp', () async {
+    return runWithLsp(['language-server', '--protocol=lsp']);
+  });
+
+  test('protocol analyzer', () async {
+    project = utils.project();
+
+    process = await project.start(['language-server', '--protocol=analyzer']);
+
+    final Stream<String> inStream = process.stdout
+        .transform<String>(utf8.decoder)
+        .transform<String>(const LineSplitter());
+
+    final line = await inStream.first;
+    final json = jsonDecode(line);
+
+    expect(json['event'], 'server.connected');
+    expect(json['params'], isNotNull);
+    final params = json['params'];
+    expect(params['version'], isNotEmpty);
+    expect(params['pid'], isNot(0));
+
+    process.kill();
+    process = null;
+  });
+}
diff --git a/pkg/dartdev/test/test_all.dart b/pkg/dartdev/test/test_all.dart
index edd644b..f232948 100644
--- a/pkg/dartdev/test/test_all.dart
+++ b/pkg/dartdev/test/test_all.dart
@@ -12,6 +12,7 @@
 import 'commands/flag_test.dart' as flag;
 import 'commands/format_test.dart' as format;
 import 'commands/help_test.dart' as help;
+import 'commands/language_server_test.dart' as language_server;
 import 'commands/migrate_test.dart' as migrate;
 import 'commands/pub_test.dart' as pub;
 import 'commands/run_test.dart' as run;
@@ -39,6 +40,7 @@
     help.main();
     implicit_smoke.main();
     invalid_smoke.main();
+    language_server.main();
     migrate.main();
     no_such_file.main();
     pub.main();
diff --git a/pkg/dartdev/test/utils.dart b/pkg/dartdev/test/utils.dart
index 0ef90d3..de507c1 100644
--- a/pkg/dartdev/test/utils.dart
+++ b/pkg/dartdev/test/utils.dart
@@ -94,6 +94,20 @@
         environment: {if (logAnalytics) '_DARTDEV_LOG_ANALYTICS': 'true'});
   }
 
+  Future<Process> start(
+    List<String> arguments, {
+    String workingDir,
+  }) {
+    return Process.start(
+        Platform.resolvedExecutable,
+        [
+          '--no-analytics',
+          ...arguments,
+        ],
+        workingDirectory: workingDir ?? dir.path,
+        environment: {if (logAnalytics) '_DARTDEV_LOG_ANALYTICS': 'true'});
+  }
+
   String _sdkRootPath;
 
   /// Return the root of the SDK.
diff --git a/pkg/vm/lib/transformations/type_flow/transformer.dart b/pkg/vm/lib/transformations/type_flow/transformer.dart
index 9f4dfd2..a192694 100644
--- a/pkg/vm/lib/transformations/type_flow/transformer.dart
+++ b/pkg/vm/lib/transformations/type_flow/transformer.dart
@@ -313,13 +313,6 @@
     if (_typeFlowAnalysis.isMemberUsed(member)) {
       if (member is Field) {
         _setInferredType(member, _typeFlowAnalysis.fieldType(member));
-
-        final unboxingInfoMetadata =
-            _unboxingInfo.getUnboxingInfoOfMember(member);
-        if (unboxingInfoMetadata != null &&
-            !unboxingInfoMetadata.isFullyBoxed) {
-          _unboxingInfoMetadata.mapping[member] = unboxingInfoMetadata;
-        }
       } else {
         Args<Type> argTypes = _typeFlowAnalysis.argumentTypes(member);
         final uncheckedParameters =
@@ -350,36 +343,41 @@
               skipCheck: uncheckedParameters.contains(param));
         }
 
-        final unboxingInfoMetadata =
-            _unboxingInfo.getUnboxingInfoOfMember(member);
-        if (unboxingInfoMetadata != null &&
-            !unboxingInfoMetadata.isFullyBoxed) {
-          _unboxingInfoMetadata.mapping[member] = unboxingInfoMetadata;
-        }
-
         // TODO(alexmarkov): figure out how to pass receiver type.
       }
-    } else if (!member.isAbstract &&
-        !fieldMorpher.isExtraMemberWithReachableBody(member)) {
-      _setUnreachable(member);
-    } else if (member is! Field) {
+
       final unboxingInfoMetadata =
           _unboxingInfo.getUnboxingInfoOfMember(member);
-      if (unboxingInfoMetadata != null) {
-        // Check for partitions that only have abstract methods should be marked as boxed.
-        if (unboxingInfoMetadata.returnInfo ==
-            UnboxingInfoMetadata.kUnboxingCandidate) {
-          unboxingInfoMetadata.returnInfo = UnboxingInfoMetadata.kBoxed;
-        }
-        for (int i = 0; i < unboxingInfoMetadata.unboxedArgsInfo.length; i++) {
-          if (unboxingInfoMetadata.unboxedArgsInfo[i] ==
+      if (unboxingInfoMetadata != null && !unboxingInfoMetadata.isFullyBoxed) {
+        _unboxingInfoMetadata.mapping[member] = unboxingInfoMetadata;
+      }
+    } else {
+      if (!member.isAbstract &&
+          !fieldMorpher.isExtraMemberWithReachableBody(member)) {
+        _setUnreachable(member);
+      }
+
+      if (member is! Field) {
+        final unboxingInfoMetadata =
+            _unboxingInfo.getUnboxingInfoOfMember(member);
+        if (unboxingInfoMetadata != null) {
+          // Check for partitions that only have abstract methods should be marked as boxed.
+          if (unboxingInfoMetadata.returnInfo ==
               UnboxingInfoMetadata.kUnboxingCandidate) {
-            unboxingInfoMetadata.unboxedArgsInfo[i] =
-                UnboxingInfoMetadata.kBoxed;
+            unboxingInfoMetadata.returnInfo = UnboxingInfoMetadata.kBoxed;
           }
-        }
-        if (!unboxingInfoMetadata.isFullyBoxed) {
-          _unboxingInfoMetadata.mapping[member] = unboxingInfoMetadata;
+          for (int i = 0;
+              i < unboxingInfoMetadata.unboxedArgsInfo.length;
+              i++) {
+            if (unboxingInfoMetadata.unboxedArgsInfo[i] ==
+                UnboxingInfoMetadata.kUnboxingCandidate) {
+              unboxingInfoMetadata.unboxedArgsInfo[i] =
+                  UnboxingInfoMetadata.kBoxed;
+            }
+          }
+          if (!unboxingInfoMetadata.isFullyBoxed) {
+            _unboxingInfoMetadata.mapping[member] = unboxingInfoMetadata;
+          }
         }
       }
     }
diff --git a/runtime/tests/vm/dart/regress_44563_test.dart b/runtime/tests/vm/dart/regress_44563_test.dart
new file mode 100644
index 0000000..6b1baa6
--- /dev/null
+++ b/runtime/tests/vm/dart/regress_44563_test.dart
@@ -0,0 +1,31 @@
+// Copyright (c) 2021, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Verifies that unboxing info is attached to a member with unreachable body,
+// which is still used as an interface target.
+// Regression test for https://github.com/dart-lang/sdk/issues/44563.
+
+import 'package:expect/expect.dart';
+
+class BaseClass {
+  int get value => 0;
+}
+
+class Class1 extends BaseClass {
+  @pragma('vm:never-inline')
+  int get value => 1;
+}
+
+class Class2 extends BaseClass {
+  @pragma('vm:never-inline')
+  int get value => 2;
+}
+
+bool nonConstantCondition = int.parse("1") == 1;
+
+void main() {
+  BaseClass obj = BaseClass();
+  obj = nonConstantCondition ? Class1() : Class2();
+  Expect.equals(1, obj.value);
+}
diff --git a/runtime/tests/vm/dart_2/regress_44563_test.dart b/runtime/tests/vm/dart_2/regress_44563_test.dart
new file mode 100644
index 0000000..6b1baa6
--- /dev/null
+++ b/runtime/tests/vm/dart_2/regress_44563_test.dart
@@ -0,0 +1,31 @@
+// Copyright (c) 2021, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Verifies that unboxing info is attached to a member with unreachable body,
+// which is still used as an interface target.
+// Regression test for https://github.com/dart-lang/sdk/issues/44563.
+
+import 'package:expect/expect.dart';
+
+class BaseClass {
+  int get value => 0;
+}
+
+class Class1 extends BaseClass {
+  @pragma('vm:never-inline')
+  int get value => 1;
+}
+
+class Class2 extends BaseClass {
+  @pragma('vm:never-inline')
+  int get value => 2;
+}
+
+bool nonConstantCondition = int.parse("1") == 1;
+
+void main() {
+  BaseClass obj = BaseClass();
+  obj = nonConstantCondition ? Class1() : Class2();
+  Expect.equals(1, obj.value);
+}
diff --git a/tests/ffi/vmspecific_static_checks_test.dart b/tests/ffi/vmspecific_static_checks_test.dart
index c77245e..870de9c 100644
--- a/tests/ffi/vmspecific_static_checks_test.dart
+++ b/tests/ffi/vmspecific_static_checks_test.dart
@@ -40,6 +40,8 @@
   testLookupFunctionGeneric2();
   testLookupFunctionWrongNativeFunctionSignature();
   testLookupFunctionTypeMismatch();
+  testLookupFunctionPointervoid();
+  testLookupFunctionPointerNFdyn();
   testNativeFunctionSignatureInvalidReturn();
   testNativeFunctionSignatureInvalidParam();
   testNativeFunctionSignatureInvalidOptionalNamed();
@@ -291,6 +293,24 @@
   l.lookupFunction<NativeDoubleUnOp, IntUnOp>("cos"); //# 18: compile-time error
 }
 
+typedef PointervoidN = Void Function(Pointer<void>);
+typedef PointervoidD = void Function(Pointer<void>);
+
+void testLookupFunctionPointervoid() {
+  DynamicLibrary l = dlopenPlatformSpecific("ffi_test_dynamic_library");
+  // TODO(https://dartbug.com/44593): This should be a compile-time error in CFE.
+  // l.lookupFunction<PointervoidN, PointervoidD>("cos");
+}
+
+typedef PointerNFdynN = Void Function(Pointer<NativeFunction>);
+typedef PointerNFdynD = void Function(Pointer<NativeFunction>);
+
+void testLookupFunctionPointerNFdyn() {
+  DynamicLibrary l = dlopenPlatformSpecific("ffi_test_dynamic_library");
+  // TODO(https://dartbug.com/44594): Should this be an error or not?
+  // l.lookupFunction<PointerNFdynN, PointerNFdynD>("cos");
+}
+
 // TODO(dacoharkes): make the next 4 test compile errors
 typedef Invalid1 = int Function(Int8);
 typedef Invalid2 = Int8 Function(int);
diff --git a/tests/ffi_2/vmspecific_static_checks_test.dart b/tests/ffi_2/vmspecific_static_checks_test.dart
index 69efa84..ebbe474 100644
--- a/tests/ffi_2/vmspecific_static_checks_test.dart
+++ b/tests/ffi_2/vmspecific_static_checks_test.dart
@@ -40,6 +40,8 @@
   testLookupFunctionGeneric2();
   testLookupFunctionWrongNativeFunctionSignature();
   testLookupFunctionTypeMismatch();
+  testLookupFunctionPointervoid();
+  testLookupFunctionPointerNFdyn();
   testNativeFunctionSignatureInvalidReturn();
   testNativeFunctionSignatureInvalidParam();
   testNativeFunctionSignatureInvalidOptionalNamed();
@@ -291,6 +293,24 @@
   l.lookupFunction<NativeDoubleUnOp, IntUnOp>("cos"); //# 18: compile-time error
 }
 
+typedef PointervoidN = Void Function(Pointer<void>);
+typedef PointervoidD = void Function(Pointer<void>);
+
+void testLookupFunctionPointervoid() {
+  DynamicLibrary l = dlopenPlatformSpecific("ffi_test_dynamic_library");
+  // TODO(https://dartbug.com/44593): This should be a compile-time error in CFE.
+  // l.lookupFunction<PointervoidN, PointervoidD>("cos");
+}
+
+typedef PointerNFdynN = Void Function(Pointer<NativeFunction>);
+typedef PointerNFdynD = void Function(Pointer<NativeFunction>);
+
+void testLookupFunctionPointerNFdyn() {
+  DynamicLibrary l = dlopenPlatformSpecific("ffi_test_dynamic_library");
+  // TODO(https://dartbug.com/44594): Should this be an error or not?
+  // l.lookupFunction<PointerNFdynN, PointerNFdynD>("cos");
+}
+
 // TODO(dacoharkes): make the next 4 test compile errors
 typedef Invalid1 = int Function(Int8);
 typedef Invalid2 = Int8 Function(int);
diff --git a/tools/VERSION b/tools/VERSION
index baa01ff..02f8b93 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 12
 PATCH 0
-PRERELEASE 197
+PRERELEASE 198
 PRERELEASE_PATCH 0
\ No newline at end of file