Version 2.10.0-74.0.dev

Merge commit '50736d2bd960c42e303d1a37ee6e0623e4c0c9d6' into 'dev'
diff --git a/pkg/front_end/lib/src/fasta/incremental_compiler.dart b/pkg/front_end/lib/src/fasta/incremental_compiler.dart
index d2d1a31..01fc714 100644
--- a/pkg/front_end/lib/src/fasta/incremental_compiler.dart
+++ b/pkg/front_end/lib/src/fasta/incremental_compiler.dart
@@ -93,7 +93,7 @@
 
 import 'util/experiment_environment_getter.dart' show getExperimentEnvironment;
 
-import 'util/textual_outline_v2.dart' show textualOutline;
+import 'util/textual_outline.dart' show textualOutline;
 
 import 'fasta_codes.dart'
     show
diff --git a/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart b/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart
index aec4c80..74b894e 100644
--- a/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart
+++ b/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart
@@ -630,7 +630,8 @@
     Expression write;
     if (writeTarget.isMissing) {
       write = inferrer.createMissingPropertySet(
-          node.writeOffset, writeReceiver, readType, node.propertyName, value);
+          node.writeOffset, writeReceiver, readType, node.propertyName, value,
+          forEffect: node.forEffect);
     } else {
       assert(writeTarget.isExtensionMember);
       write = new StaticInvocation(
diff --git a/pkg/front_end/lib/src/fasta/util/textual_outline.dart b/pkg/front_end/lib/src/fasta/util/textual_outline.dart
new file mode 100644
index 0000000..4b1f8cc
--- /dev/null
+++ b/pkg/front_end/lib/src/fasta/util/textual_outline.dart
@@ -0,0 +1,865 @@
+// Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:typed_data' show Uint8List;
+
+import 'dart:io' show File;
+
+import 'package:_fe_analyzer_shared/src/parser/class_member_parser.dart'
+    show ClassMemberParser;
+
+import 'package:_fe_analyzer_shared/src/parser/identifier_context.dart';
+
+import 'package:_fe_analyzer_shared/src/scanner/scanner.dart'
+    show ErrorToken, LanguageVersionToken, Scanner;
+
+import 'package:_fe_analyzer_shared/src/scanner/utf8_bytes_scanner.dart'
+    show Utf8BytesScanner;
+
+import 'package:_fe_analyzer_shared/src/parser/listener.dart';
+
+import 'package:_fe_analyzer_shared/src/scanner/token.dart' show Token;
+
+abstract class _Chunk implements Comparable<_Chunk> {
+  int originalPosition;
+
+  List<_MetadataChunk> metadata;
+
+  void _printNormalHeaderWithMetadata(
+      StringBuffer sb, bool extraLine, String indent) {
+    if (sb.isNotEmpty) {
+      sb.write("\n");
+      if (extraLine) sb.write("\n");
+    }
+    printMetadata(sb, indent);
+    sb.write(indent);
+  }
+
+  void printOn(StringBuffer sb, {String indent: "", bool extraLine: true});
+
+  void printMetadata(StringBuffer sb, String indent) {
+    if (metadata != null) {
+      for (_MetadataChunk m in metadata) {
+        m.printMetadataOn(sb, indent);
+      }
+    }
+  }
+
+  /// Merge and sort this chunk internally (e.g. a class might merge and sort
+  /// its members).
+  /// The provided [sb] should be clean and be thought of as dirty after this
+  /// call.
+  void internalMergeAndSort(StringBuffer sb);
+
+  @override
+  int compareTo(_Chunk other) {
+    // Generally we compare according to the original position.
+    if (originalPosition < other.originalPosition) return -1;
+    return 1;
+  }
+
+  /// Prints tokens from [fromToken] to [toToken] into [sb].
+  ///
+  /// Adds space as "normal" given the tokens start and end.
+  ///
+  /// If [skipContentOnEndGroupUntilToToken] is true, upon meeting a token that
+  /// has an endGroup where the endGroup is [toToken] the Tokens between that
+  /// and [toToken] is skipped, i.e. it jumps directly to [toToken].
+  void printTokenRange(Token fromToken, Token toToken, StringBuffer sb,
+      {bool skipContentOnEndGroupUntilToToken: false,
+      bool includeToToken: true}) {
+    int endOfLast = fromToken.end;
+    Token token = fromToken;
+    Token afterEnd = toToken;
+    if (includeToToken) afterEnd = toToken.next;
+    bool nextTokenIsEndGroup = false;
+    while (token != afterEnd) {
+      if (token.offset > endOfLast && !nextTokenIsEndGroup) {
+        sb.write(" ");
+      }
+
+      sb.write(token.lexeme);
+      endOfLast = token.end;
+      if (skipContentOnEndGroupUntilToToken &&
+          token.endGroup != null &&
+          token.endGroup == toToken) {
+        token = token.endGroup;
+        nextTokenIsEndGroup = true;
+      } else {
+        token = token.next;
+        nextTokenIsEndGroup = false;
+      }
+    }
+  }
+}
+
+class _LanguageVersionChunk extends _Chunk {
+  final int major;
+  final int minor;
+
+  _LanguageVersionChunk(this.major, this.minor);
+
+  @override
+  void printOn(StringBuffer sb, {String indent: "", bool extraLine: true}) {
+    _printNormalHeaderWithMetadata(sb, extraLine, indent);
+    sb.write("// @dart = ${major}.${minor}");
+  }
+
+  @override
+  void internalMergeAndSort(StringBuffer sb) {
+    // Cannot be sorted.
+    assert(sb.isEmpty);
+  }
+}
+
+abstract class _TokenChunk extends _Chunk {
+  final Token startToken;
+  final Token endToken;
+
+  _TokenChunk(this.startToken, this.endToken);
+
+  void _printOnWithoutHeaderAndMetadata(StringBuffer sb) {
+    printTokenRange(startToken, endToken, sb);
+  }
+
+  @override
+  void printOn(StringBuffer sb, {String indent: "", bool extraLine: true}) {
+    _printNormalHeaderWithMetadata(sb, extraLine, indent);
+    _printOnWithoutHeaderAndMetadata(sb);
+  }
+
+  @override
+  void internalMergeAndSort(StringBuffer sb) {
+    // Generally cannot be sorted.
+    assert(sb.isEmpty);
+  }
+}
+
+abstract class _SortableChunk extends _TokenChunk {
+  _SortableChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+
+  @override
+  int compareTo(_Chunk o) {
+    if (o is! _SortableChunk) return super.compareTo(o);
+
+    _SortableChunk other = o;
+
+    // Compare lexemes from startToken and at most the next 10 tokens.
+    // For valid code this should be more than enough. Note that this won't
+    // sort as a text-sort would as for instance "C<Foo>" and "C2<Foo>" will
+    // say "C" < "C2" where a text-sort would say "C<" > "C2". This doesn't
+    // really matter as long as the sorting is consistent (i.e. the textual
+    // outline always sorts like this).
+    Token thisToken = startToken;
+    Token otherToken = other.startToken;
+    int steps = 0;
+    while (thisToken.lexeme == otherToken.lexeme) {
+      if (steps++ > 10) break;
+      thisToken = thisToken.next;
+      otherToken = otherToken.next;
+    }
+    if (thisToken.lexeme == otherToken.lexeme) return super.compareTo(o);
+    return thisToken.lexeme.compareTo(otherToken.lexeme);
+  }
+}
+
+class _ImportExportChunk extends _Chunk {
+  final List<_SingleImportExportChunk> content;
+
+  _ImportExportChunk(this.content, int originalPosition) {
+    this.originalPosition = originalPosition;
+  }
+
+  @override
+  void printOn(StringBuffer sb, {String indent: "", bool extraLine: true}) {
+    if (sb.isNotEmpty) {
+      sb.write("\n");
+    }
+
+    for (int i = 0; i < content.length; i++) {
+      _SingleImportExportChunk chunk = content[i];
+      chunk.printOn(sb,
+          indent: indent,
+          // add extra space if there's metadata
+          extraLine: chunk.metadata != null);
+    }
+  }
+
+  @override
+  void internalMergeAndSort(StringBuffer sb) {
+    assert(sb.isEmpty);
+    content.sort();
+  }
+}
+
+abstract class _SingleImportExportChunk extends _SortableChunk {
+  final Token firstShowOrHide;
+  final List<_NamespaceCombinator> combinators;
+  String sortedShowAndHide;
+
+  _SingleImportExportChunk(
+      Token startToken, Token endToken, this.firstShowOrHide, this.combinators)
+      : super(startToken, endToken);
+
+  @override
+  void internalMergeAndSort(StringBuffer sb) {
+    assert(sb.isEmpty);
+    if (firstShowOrHide == null) return;
+    for (int i = 0; i < combinators.length; i++) {
+      sb.write(" ");
+      _NamespaceCombinator combinator = combinators[i];
+      sb.write(combinator.isShow ? "show " : "hide ");
+      List<String> sorted = combinator.names.toList()..sort();
+      for (int j = 0; j < sorted.length; j++) {
+        if (j > 0) sb.write(", ");
+        sb.write(sorted[j]);
+      }
+    }
+    sb.write(";");
+    sortedShowAndHide = sb.toString();
+  }
+
+  @override
+  void _printOnWithoutHeaderAndMetadata(StringBuffer sb) {
+    if (sortedShowAndHide == null) {
+      return super._printOnWithoutHeaderAndMetadata(sb);
+    }
+    printTokenRange(startToken, firstShowOrHide, sb, includeToToken: false);
+    sb.write(sortedShowAndHide);
+  }
+}
+
+class _ImportChunk extends _SingleImportExportChunk {
+  _ImportChunk(Token startToken, Token endToken, Token firstShowOrHide,
+      List<_NamespaceCombinator> combinators)
+      : super(startToken, endToken, firstShowOrHide, combinators);
+}
+
+class _ExportChunk extends _SingleImportExportChunk {
+  _ExportChunk(Token startToken, Token endToken, Token firstShowOrHide,
+      List<_NamespaceCombinator> combinators)
+      : super(startToken, endToken, firstShowOrHide, combinators);
+}
+
+class _NamespaceCombinator {
+  final bool isShow;
+  final Set<String> names;
+
+  _NamespaceCombinator.hide(List<String> names)
+      : isShow = false,
+        names = names.toSet();
+
+  _NamespaceCombinator.show(List<String> names)
+      : isShow = true,
+        names = names.toSet();
+}
+
+class _LibraryNameChunk extends _TokenChunk {
+  _LibraryNameChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _PartChunk extends _TokenChunk {
+  _PartChunk(Token startToken, Token endToken) : super(startToken, endToken);
+}
+
+class _PartOfChunk extends _TokenChunk {
+  _PartOfChunk(Token startToken, Token endToken) : super(startToken, endToken);
+}
+
+abstract class _ClassChunk extends _SortableChunk {
+  List<_Chunk> content = new List<_Chunk>();
+  Token headerEnd;
+  Token footerStart;
+
+  _ClassChunk(Token startToken, Token endToken) : super(startToken, endToken);
+
+  @override
+  void printOn(StringBuffer sb, {String indent: "", bool extraLine: true}) {
+    _printNormalHeaderWithMetadata(sb, extraLine, indent);
+
+    // Header.
+    printTokenRange(startToken, headerEnd, sb);
+
+    // Content.
+    for (int i = 0; i < content.length; i++) {
+      _Chunk chunk = content[i];
+      chunk.printOn(sb, indent: "  $indent", extraLine: false);
+    }
+
+    // Footer.
+    if (footerStart != null) {
+      if (content.isNotEmpty) {
+        sb.write("\n");
+      }
+      sb.write(indent);
+
+      printTokenRange(footerStart, endToken, sb);
+    }
+  }
+
+  @override
+  void internalMergeAndSort(StringBuffer sb) {
+    assert(sb.isEmpty);
+    content = _mergeAndSort(content);
+  }
+}
+
+class _ClassDeclarationChunk extends _ClassChunk {
+  _ClassDeclarationChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _MixinDeclarationChunk extends _ClassChunk {
+  _MixinDeclarationChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _ExtensionDeclarationChunk extends _ClassChunk {
+  _ExtensionDeclarationChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _NamedMixinApplicationChunk extends _ClassChunk {
+  _NamedMixinApplicationChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+abstract class _ProcedureEtcChunk extends _SortableChunk {
+  _ProcedureEtcChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+
+  void _printOnWithoutHeaderAndMetadata(StringBuffer sb) {
+    printTokenRange(startToken, endToken, sb,
+        skipContentOnEndGroupUntilToToken: true);
+  }
+}
+
+class _ClassMethodChunk extends _ProcedureEtcChunk {
+  _ClassMethodChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _TopLevelMethodChunk extends _ProcedureEtcChunk {
+  _TopLevelMethodChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _ClassFactoryMethodChunk extends _ProcedureEtcChunk {
+  _ClassFactoryMethodChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _ClassFieldsChunk extends _ProcedureEtcChunk {
+  _ClassFieldsChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _TopLevelFieldsChunk extends _ProcedureEtcChunk {
+  _TopLevelFieldsChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _FunctionTypeAliasChunk extends _ProcedureEtcChunk {
+  _FunctionTypeAliasChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+}
+
+class _EnumChunk extends _SortableChunk {
+  _EnumChunk(Token startToken, Token endToken) : super(startToken, endToken);
+}
+
+class _MetadataChunk extends _TokenChunk {
+  _MetadataChunk(Token startToken, Token endToken)
+      : super(startToken, endToken);
+
+  void printMetadataOn(StringBuffer sb, String indent) {
+    sb.write(indent);
+    printTokenRange(startToken, endToken, sb);
+    sb.write("\n");
+  }
+}
+
+class _UnknownChunk extends _TokenChunk {
+  final bool addMarkerForUnknownForTest;
+  _UnknownChunk(
+      this.addMarkerForUnknownForTest, Token startToken, Token endToken)
+      : super(startToken, endToken);
+
+  void _printOnWithoutHeaderAndMetadata(StringBuffer sb) {
+    if (addMarkerForUnknownForTest) {
+      sb.write("---- unknown chunk starts ----\n");
+      super._printOnWithoutHeaderAndMetadata(sb);
+      sb.write("\n---- unknown chunk ends ----");
+      return;
+    }
+    super._printOnWithoutHeaderAndMetadata(sb);
+  }
+}
+
+class _UnknownTokenBuilder {
+  Token start;
+  Token interimEnd;
+}
+
+class BoxedInt {
+  int value;
+  BoxedInt(this.value);
+}
+
+// TODO(jensj): Better support for not carring about preexisting spaces, e.g.
+//              "foo(int a, int b)" will be different from "foo(int a,int b)".
+// TODO(jensj): Specify scanner settings to match that of the compiler.
+// TODO(jensj): Canonicalize show/hides on imports/exports. E.g.
+//              "show A hide B" could just be "show A".
+//              "show A show B" could just be "show A, B".
+//              "show A, B, C hide A show A" would be empty.
+
+String textualOutline(List<int> rawBytes,
+    {bool throwOnUnexpected: false,
+    bool performModelling: false,
+    bool addMarkerForUnknownForTest: false}) {
+  Uint8List bytes = new Uint8List(rawBytes.length + 1);
+  bytes.setRange(0, rawBytes.length, rawBytes);
+
+  List<_Chunk> parsedChunks = new List<_Chunk>();
+
+  BoxedInt originalPosition = new BoxedInt(0);
+
+  Utf8BytesScanner scanner = new Utf8BytesScanner(bytes, includeComments: false,
+      languageVersionChanged:
+          (Scanner scanner, LanguageVersionToken languageVersion) {
+    parsedChunks.add(
+        new _LanguageVersionChunk(languageVersion.major, languageVersion.minor)
+          ..originalPosition = originalPosition.value++);
+  });
+  Token firstToken = scanner.tokenize();
+  if (firstToken == null) {
+    if (throwOnUnexpected) throw "firstToken is null";
+    return null;
+  }
+
+  TextualOutlineListener listener = new TextualOutlineListener();
+  ClassMemberParser classMemberParser = new ClassMemberParser(listener);
+  classMemberParser.parseUnit(firstToken);
+
+  Token nextToken = firstToken;
+  _UnknownTokenBuilder currentUnknown = new _UnknownTokenBuilder();
+  while (nextToken != null) {
+    if (nextToken is ErrorToken) {
+      return null;
+    }
+    if (nextToken.isEof) break;
+
+    nextToken = _textualizeTokens(listener, nextToken, currentUnknown,
+        parsedChunks, originalPosition, addMarkerForUnknownForTest);
+  }
+  outputUnknownChunk(currentUnknown, parsedChunks, originalPosition,
+      addMarkerForUnknownForTest);
+
+  if (nextToken == null) return null;
+
+  if (performModelling) {
+    parsedChunks = _mergeAndSort(parsedChunks);
+  }
+
+  StringBuffer sb = new StringBuffer();
+  for (_Chunk chunk in parsedChunks) {
+    chunk.printOn(sb);
+  }
+
+  return sb.toString();
+}
+
+List<_Chunk> _mergeAndSort(List<_Chunk> chunks) {
+  List<_Chunk> result =
+      new List<_Chunk>.filled(chunks.length, null, growable: true);
+  List<_MetadataChunk> metadataChunks;
+  List<_SingleImportExportChunk> importExportChunks;
+  int outSize = 0;
+  StringBuffer sb = new StringBuffer();
+  for (_Chunk chunk in chunks) {
+    if (chunk is _MetadataChunk) {
+      metadataChunks ??= new List<_MetadataChunk>();
+      metadataChunks.add(chunk);
+    } else {
+      chunk.metadata = metadataChunks;
+      metadataChunks = null;
+      chunk.internalMergeAndSort(sb);
+      sb.clear();
+
+      if (chunk is _SingleImportExportChunk) {
+        importExportChunks ??= new List<_SingleImportExportChunk>();
+        importExportChunks.add(chunk);
+      } else {
+        if (importExportChunks != null) {
+          _ImportExportChunk importExportChunk = new _ImportExportChunk(
+              importExportChunks, importExportChunks.first.originalPosition);
+          importExportChunk.internalMergeAndSort(sb);
+          sb.clear();
+          result[outSize++] = importExportChunk;
+          importExportChunks = null;
+        }
+        result[outSize++] = chunk;
+      }
+    }
+  }
+  if (metadataChunks != null) {
+    for (_MetadataChunk metadata in metadataChunks) {
+      result[outSize++] = metadata;
+    }
+  }
+  if (importExportChunks != null) {
+    _ImportExportChunk importExportChunk = new _ImportExportChunk(
+        importExportChunks, importExportChunks.first.originalPosition);
+    importExportChunk.internalMergeAndSort(sb);
+    sb.clear();
+    result[outSize++] = importExportChunk;
+    importExportChunks = null;
+  }
+  result.length = outSize;
+
+  result.sort();
+  return result;
+}
+
+/// Parses a chunk of tokens and returns the next - unparsed - token or null
+/// on error.
+Token _textualizeTokens(
+    TextualOutlineListener listener,
+    Token token,
+    _UnknownTokenBuilder currentUnknown,
+    List<_Chunk> parsedChunks,
+    BoxedInt originalPosition,
+    bool addMarkerForUnknownForTest) {
+  _ClassChunk classChunk = listener.classStartToChunk[token];
+  if (classChunk != null) {
+    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition,
+        addMarkerForUnknownForTest);
+    parsedChunks.add(classChunk..originalPosition = originalPosition.value++);
+    return _textualizeClass(
+        listener, classChunk, originalPosition, addMarkerForUnknownForTest);
+  }
+
+  _SingleImportExportChunk singleImportExport =
+      listener.importExportsStartToChunk[token];
+  if (singleImportExport != null) {
+    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition,
+        addMarkerForUnknownForTest);
+    parsedChunks
+        .add(singleImportExport..originalPosition = originalPosition.value++);
+    return singleImportExport.endToken.next;
+  }
+
+  _TokenChunk knownUnsortableChunk =
+      listener.unsortableElementStartToChunk[token];
+  if (knownUnsortableChunk != null) {
+    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition,
+        addMarkerForUnknownForTest);
+    parsedChunks
+        .add(knownUnsortableChunk..originalPosition = originalPosition.value++);
+    return knownUnsortableChunk.endToken.next;
+  }
+
+  _TokenChunk elementChunk = listener.elementStartToChunk[token];
+  if (elementChunk != null) {
+    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition,
+        addMarkerForUnknownForTest);
+    parsedChunks.add(elementChunk..originalPosition = originalPosition.value++);
+    return elementChunk.endToken.next;
+  }
+
+  _MetadataChunk metadataChunk = listener.metadataStartToChunk[token];
+  if (metadataChunk != null) {
+    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition,
+        addMarkerForUnknownForTest);
+    parsedChunks
+        .add(metadataChunk..originalPosition = originalPosition.value++);
+    return metadataChunk.endToken.next;
+  }
+
+  // This token --- and whatever else tokens until we reach a start token we
+  // know is an unknown chunk. We don't yet know the end.
+  if (currentUnknown.start == null) {
+    // Start of unknown chunk.
+    currentUnknown.start = token;
+    currentUnknown.interimEnd = token;
+  } else {
+    // Continued unknown chunk.
+    currentUnknown.interimEnd = token;
+  }
+  return token.next;
+}
+
+Token _textualizeClass(TextualOutlineListener listener, _ClassChunk classChunk,
+    BoxedInt originalPosition, bool addMarkerForUnknownForTest) {
+  Token token = classChunk.startToken;
+  // Class header.
+  while (token != classChunk.endToken) {
+    if (token.endGroup == classChunk.endToken) {
+      break;
+    }
+    token = token.next;
+  }
+  classChunk.headerEnd = token;
+
+  if (token == classChunk.endToken) {
+    // This for instance happens on named mixins, e.g.
+    // class C<T> = Object with A<Function(T)>;
+    // or when the class has no content, e.g.
+    // class C { }
+    // either way, output the end token right away to avoid a weird line break.
+  } else {
+    token = token.next;
+    // "Normal" class with (possibly) content.
+    _UnknownTokenBuilder currentUnknown = new _UnknownTokenBuilder();
+    while (token != classChunk.endToken) {
+      token = _textualizeTokens(listener, token, currentUnknown,
+          classChunk.content, originalPosition, addMarkerForUnknownForTest);
+    }
+    outputUnknownChunk(currentUnknown, classChunk.content, originalPosition,
+        addMarkerForUnknownForTest);
+    classChunk.footerStart = classChunk.endToken;
+  }
+
+  return classChunk.endToken.next;
+}
+
+/// Outputs an unknown chunk if one has been started.
+///
+/// Resets the given builder.
+void outputUnknownChunk(
+    _UnknownTokenBuilder _currentUnknown,
+    List<_Chunk> parsedChunks,
+    BoxedInt originalPosition,
+    bool addMarkerForUnknownForTest) {
+  if (_currentUnknown.start == null) return;
+  parsedChunks.add(new _UnknownChunk(addMarkerForUnknownForTest,
+      _currentUnknown.start, _currentUnknown.interimEnd)
+    ..originalPosition = originalPosition.value++);
+  _currentUnknown.start = null;
+  _currentUnknown.interimEnd = null;
+}
+
+main(List<String> args) {
+  File f = new File(args[0]);
+  Uint8List data = f.readAsBytesSync();
+  String outline =
+      textualOutline(data, throwOnUnexpected: true, performModelling: true);
+  if (args.length > 1 && args[1] == "--overwrite") {
+    f.writeAsStringSync(outline);
+  } else if (args.length > 1 && args[1] == "--benchmark") {
+    Stopwatch stopwatch = new Stopwatch()..start();
+    int numRuns = 100;
+    for (int i = 0; i < numRuns; i++) {
+      String outline2 =
+          textualOutline(data, throwOnUnexpected: true, performModelling: true);
+      if (outline2 != outline) throw "Not the same result every time";
+    }
+    stopwatch.stop();
+    print("First $numRuns took ${stopwatch.elapsedMilliseconds} ms "
+        "(i.e. ${stopwatch.elapsedMilliseconds / numRuns}ms/iteration)");
+    stopwatch = new Stopwatch()..start();
+    numRuns = 2500;
+    for (int i = 0; i < numRuns; i++) {
+      String outline2 =
+          textualOutline(data, throwOnUnexpected: true, performModelling: true);
+      if (outline2 != outline) throw "Not the same result every time";
+    }
+    stopwatch.stop();
+    print("Next $numRuns took ${stopwatch.elapsedMilliseconds} ms "
+        "(i.e. ${stopwatch.elapsedMilliseconds / numRuns}ms/iteration)");
+  } else {
+    print(outline);
+  }
+}
+
+class TextualOutlineListener extends Listener {
+  Map<Token, _ClassChunk> classStartToChunk = {};
+  Map<Token, _TokenChunk> elementStartToChunk = {};
+  Map<Token, _MetadataChunk> metadataStartToChunk = {};
+  Map<Token, _SingleImportExportChunk> importExportsStartToChunk = {};
+  Map<Token, _TokenChunk> unsortableElementStartToChunk = {};
+
+  @override
+  void endClassMethod(Token getOrSet, Token beginToken, Token beginParam,
+      Token beginInitializers, Token endToken) {
+    elementStartToChunk[beginToken] =
+        new _ClassMethodChunk(beginToken, endToken);
+  }
+
+  @override
+  void endTopLevelMethod(Token beginToken, Token getOrSet, Token endToken) {
+    elementStartToChunk[beginToken] =
+        new _TopLevelMethodChunk(beginToken, endToken);
+  }
+
+  @override
+  void endClassFactoryMethod(
+      Token beginToken, Token factoryKeyword, Token endToken) {
+    elementStartToChunk[beginToken] =
+        new _ClassFactoryMethodChunk(beginToken, endToken);
+  }
+
+  @override
+  void handleNativeFunctionBodySkipped(Token nativeToken, Token semicolon) {
+    // Allow native functions.
+  }
+
+  @override
+  void endClassFields(
+      Token abstractToken,
+      Token externalToken,
+      Token staticToken,
+      Token covariantToken,
+      Token lateToken,
+      Token varFinalOrConst,
+      int count,
+      Token beginToken,
+      Token endToken) {
+    elementStartToChunk[beginToken] =
+        new _ClassFieldsChunk(beginToken, endToken);
+  }
+
+  @override
+  void endTopLevelFields(
+      Token externalToken,
+      Token staticToken,
+      Token covariantToken,
+      Token lateToken,
+      Token varFinalOrConst,
+      int count,
+      Token beginToken,
+      Token endToken) {
+    elementStartToChunk[beginToken] =
+        new _TopLevelFieldsChunk(beginToken, endToken);
+  }
+
+  void endFunctionTypeAlias(
+      Token typedefKeyword, Token equals, Token endToken) {
+    elementStartToChunk[typedefKeyword] =
+        new _FunctionTypeAliasChunk(typedefKeyword, endToken);
+  }
+
+  void endEnum(Token enumKeyword, Token leftBrace, int count) {
+    elementStartToChunk[enumKeyword] =
+        new _EnumChunk(enumKeyword, leftBrace.endGroup);
+  }
+
+  @override
+  void endLibraryName(Token libraryKeyword, Token semicolon) {
+    unsortableElementStartToChunk[libraryKeyword] =
+        new _LibraryNameChunk(libraryKeyword, semicolon);
+  }
+
+  @override
+  void endPart(Token partKeyword, Token semicolon) {
+    unsortableElementStartToChunk[partKeyword] =
+        new _PartChunk(partKeyword, semicolon);
+  }
+
+  @override
+  void endPartOf(
+      Token partKeyword, Token ofKeyword, Token semicolon, bool hasName) {
+    unsortableElementStartToChunk[partKeyword] =
+        new _PartOfChunk(partKeyword, semicolon);
+  }
+
+  @override
+  void endMetadata(Token beginToken, Token periodBeforeName, Token endToken) {
+    // Metadata's endToken is the one *after* the actual end of the metadata.
+    metadataStartToChunk[beginToken] =
+        new _MetadataChunk(beginToken, endToken.previous);
+  }
+
+  @override
+  void endClassDeclaration(Token beginToken, Token endToken) {
+    classStartToChunk[beginToken] =
+        new _ClassDeclarationChunk(beginToken, endToken);
+  }
+
+  @override
+  void endMixinDeclaration(Token mixinKeyword, Token endToken) {
+    classStartToChunk[mixinKeyword] =
+        new _MixinDeclarationChunk(mixinKeyword, endToken);
+  }
+
+  @override
+  void endExtensionDeclaration(
+      Token extensionKeyword, Token onKeyword, Token endToken) {
+    classStartToChunk[extensionKeyword] =
+        new _ExtensionDeclarationChunk(extensionKeyword, endToken);
+  }
+
+  @override
+  void endNamedMixinApplication(Token beginToken, Token classKeyword,
+      Token equals, Token implementsKeyword, Token endToken) {
+    classStartToChunk[beginToken] =
+        new _NamedMixinApplicationChunk(beginToken, endToken);
+  }
+
+  Token firstShowOrHide;
+  List<_NamespaceCombinator> _combinators;
+  List<String> _combinatorNames;
+
+  @override
+  beginExport(Token export) {
+    _combinators = <_NamespaceCombinator>[];
+  }
+
+  @override
+  beginImport(Token import) {
+    _combinators = <_NamespaceCombinator>[];
+  }
+
+  @override
+  void beginShow(Token show) {
+    if (firstShowOrHide == null) firstShowOrHide = show;
+    _combinatorNames = <String>[];
+  }
+
+  @override
+  void beginHide(Token hide) {
+    if (firstShowOrHide == null) firstShowOrHide = hide;
+    _combinatorNames = <String>[];
+  }
+
+  @override
+  void endHide(Token hide) {
+    _combinators.add(new _NamespaceCombinator.hide(_combinatorNames));
+    _combinatorNames = null;
+  }
+
+  @override
+  void endShow(Token show) {
+    _combinators.add(new _NamespaceCombinator.show(_combinatorNames));
+    _combinatorNames = null;
+  }
+
+  @override
+  void handleIdentifier(Token token, IdentifierContext context) {
+    if (_combinatorNames != null && context == IdentifierContext.combinator) {
+      _combinatorNames.add(token.lexeme);
+    }
+  }
+
+  @override
+  void endImport(Token importKeyword, Token semicolon) {
+    if (importKeyword != null && semicolon != null) {
+      importExportsStartToChunk[importKeyword] = new _ImportChunk(
+          importKeyword, semicolon, firstShowOrHide, _combinators);
+    }
+    _combinators = null;
+    firstShowOrHide = null;
+  }
+
+  @override
+  void endExport(Token exportKeyword, Token semicolon) {
+    importExportsStartToChunk[exportKeyword] = new _ExportChunk(
+        exportKeyword, semicolon, firstShowOrHide, _combinators);
+    _combinators = null;
+    firstShowOrHide = null;
+  }
+}
diff --git a/pkg/front_end/lib/src/fasta/util/textual_outline_v2.dart b/pkg/front_end/lib/src/fasta/util/textual_outline_v2.dart
deleted file mode 100644
index d808874..0000000
--- a/pkg/front_end/lib/src/fasta/util/textual_outline_v2.dart
+++ /dev/null
@@ -1,686 +0,0 @@
-// Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-import 'dart:typed_data' show Uint8List;
-
-import 'dart:io' show File;
-
-import 'package:_fe_analyzer_shared/src/parser/class_member_parser.dart'
-    show ClassMemberParser;
-
-import 'package:_fe_analyzer_shared/src/scanner/scanner.dart'
-    show ErrorToken, LanguageVersionToken, Scanner;
-
-import 'package:_fe_analyzer_shared/src/scanner/utf8_bytes_scanner.dart'
-    show Utf8BytesScanner;
-
-import '../../fasta/source/directive_listener.dart' show DirectiveListener;
-
-import 'package:_fe_analyzer_shared/src/scanner/token.dart' show Token;
-
-abstract class _Chunk implements Comparable<_Chunk> {
-  final int originalPosition;
-
-  List<_MetadataChunk> metadata;
-
-  _Chunk(this.originalPosition);
-
-  void printOn(StringBuffer sb, String indent);
-
-  void printMetadata(StringBuffer sb, String indent) {
-    if (metadata != null) {
-      for (_MetadataChunk m in metadata) {
-        m.printMetadataOn(sb, indent);
-      }
-    }
-  }
-
-  void internalMergeAndSort();
-
-  @override
-  int compareTo(_Chunk other) {
-    // Generally we compare according to the original position.
-    if (originalPosition < other.originalPosition) return -1;
-    return 1;
-  }
-}
-
-class _LanguageVersionChunk extends _Chunk {
-  final int major;
-  final int minor;
-
-  _LanguageVersionChunk(int originalPosition, this.major, this.minor)
-      : super(originalPosition);
-
-  @override
-  void printOn(StringBuffer sb, String indent) {
-    if (sb.isNotEmpty) {
-      sb.write("\n\n");
-    }
-    printMetadata(sb, indent);
-    sb.write("// @dart = ${major}.${minor}");
-  }
-
-  @override
-  void internalMergeAndSort() {
-    // Cannot be sorted.
-  }
-}
-
-abstract class _TokenChunk extends _Chunk {
-  final Token startToken;
-  final Token endToken;
-
-  _TokenChunk(int originalPosition, this.startToken, this.endToken)
-      : super(originalPosition);
-
-  void printOn(StringBuffer sb, String indent) {
-    int endOfLast = startToken.end;
-    if (sb.isNotEmpty) {
-      sb.write("\n");
-      if (indent.isEmpty && this is! _SingleImportExportChunk) {
-        // Hack to imitate v1.
-        sb.write("\n");
-      }
-    }
-    printMetadata(sb, indent);
-    sb.write(indent);
-
-    Token token = startToken;
-    Token afterEnd = endToken.next;
-    while (token != afterEnd) {
-      if (token.offset > endOfLast) {
-        sb.write(" ");
-      }
-
-      sb.write(token.lexeme);
-      endOfLast = token.end;
-      token = token.next;
-    }
-  }
-
-  @override
-  void internalMergeAndSort() {
-    // Generally cannot be sorted.
-  }
-}
-
-abstract class _SortableChunk extends _TokenChunk {
-  _SortableChunk(int originalPosition, Token startToken, Token endToken)
-      : super(originalPosition, startToken, endToken);
-
-  @override
-  int compareTo(_Chunk o) {
-    if (o is! _SortableChunk) return super.compareTo(o);
-
-    _SortableChunk other = o;
-
-    // Compare lexemes from startToken and at most the next 10 tokens.
-    // For valid code this should be more than enough. Note that this won't
-    // sort as a text-sort would as for instance "C<Foo>" and "C2<Foo>" will
-    // say "C" < "C2" where a text-sort would say "C<" > "C2". This doesn't
-    // really matter as long as the sorting is consistent (i.e. the textual
-    // outline always sorts like this).
-    Token thisToken = startToken;
-    Token otherToken = other.startToken;
-    int steps = 0;
-    while (thisToken.lexeme == otherToken.lexeme) {
-      if (steps++ > 10) break;
-      thisToken = thisToken.next;
-      otherToken = otherToken.next;
-    }
-    if (thisToken.lexeme == otherToken.lexeme) return super.compareTo(o);
-    return thisToken.lexeme.compareTo(otherToken.lexeme);
-  }
-}
-
-class _ImportExportChunk extends _Chunk {
-  final List<_SingleImportExportChunk> content =
-      new List<_SingleImportExportChunk>();
-
-  _ImportExportChunk(int originalPosition) : super(originalPosition);
-
-  @override
-  void printOn(StringBuffer sb, String indent) {
-    if (sb.isNotEmpty) {
-      sb.write("\n");
-    }
-    printMetadata(sb, indent);
-
-    for (_SingleImportExportChunk chunk in content) {
-      chunk.printOn(sb, indent);
-    }
-  }
-
-  @override
-  void internalMergeAndSort() {
-    content.sort();
-  }
-}
-
-class _SingleImportExportChunk extends _SortableChunk {
-  _SingleImportExportChunk(
-      int originalPosition, Token startToken, Token endToken)
-      : super(originalPosition, startToken, endToken);
-}
-
-class _KnownUnsortableChunk extends _TokenChunk {
-  _KnownUnsortableChunk(int originalPosition, Token startToken, Token endToken)
-      : super(originalPosition, startToken, endToken);
-}
-
-class _ClassChunk extends _SortableChunk {
-  List<_Chunk> content = new List<_Chunk>();
-  Token headerEnd;
-  Token footerStart;
-
-  _ClassChunk(int originalPosition, Token startToken, Token endToken)
-      : super(originalPosition, startToken, endToken);
-
-  void printOn(StringBuffer sb, String indent) {
-    int endOfLast = startToken.end;
-    if (sb.isNotEmpty) {
-      sb.write("\n\n");
-      sb.write(indent);
-    }
-
-    printMetadata(sb, indent);
-
-    // Header.
-    Token token = startToken;
-    Token afterEnd = headerEnd.next;
-    while (token != afterEnd) {
-      if (token.offset > endOfLast) {
-        sb.write(" ");
-      }
-
-      sb.write(token.lexeme);
-      endOfLast = token.end;
-
-      token = token.next;
-    }
-
-    // Content.
-    for (_Chunk chunk in content) {
-      chunk.printOn(sb, "  $indent");
-    }
-
-    // Footer.
-    if (footerStart != null) {
-      if (content.isNotEmpty) {
-        sb.write("\n");
-        sb.write(indent);
-      }
-      endOfLast = footerStart.end;
-      token = footerStart;
-      afterEnd = endToken.next;
-      while (token != afterEnd) {
-        if (token.offset > endOfLast) {
-          sb.write(" ");
-        }
-
-        sb.write(token.lexeme);
-        endOfLast = token.end;
-
-        token = token.next;
-      }
-    }
-  }
-
-  @override
-  void internalMergeAndSort() {
-    content = _mergeAndSort(content);
-  }
-}
-
-class _ProcedureEtcChunk extends _SortableChunk {
-  final Set<int> nonClassEndOffsets;
-  _ProcedureEtcChunk(int originalPosition, Token startToken, Token endToken,
-      this.nonClassEndOffsets)
-      : super(originalPosition, startToken, endToken);
-
-  void printOn(StringBuffer sb, String indent) {
-    int endOfLast = startToken.end;
-    if (sb.isNotEmpty) {
-      sb.write("\n");
-      if (indent.isEmpty) {
-        // Hack to imitate v1.
-        sb.write("\n");
-      }
-    }
-    printMetadata(sb, indent);
-    sb.write(indent);
-
-    Token token = startToken;
-    Token afterEnd = endToken.next;
-    bool nextTokenIsEndGroup = false;
-    while (token != afterEnd) {
-      if (token.offset > endOfLast && !nextTokenIsEndGroup) {
-        sb.write(" ");
-      }
-
-      sb.write(token.lexeme);
-      endOfLast = token.end;
-
-      if (token.endGroup != null &&
-          nonClassEndOffsets.contains(token.endGroup.offset)) {
-        token = token.endGroup;
-        nextTokenIsEndGroup = true;
-      } else {
-        token = token.next;
-        nextTokenIsEndGroup = false;
-      }
-    }
-  }
-}
-
-class _MetadataChunk extends _TokenChunk {
-  _MetadataChunk(int originalPosition, Token startToken, Token endToken)
-      : super(originalPosition, startToken, endToken);
-
-  void printMetadataOn(StringBuffer sb, String indent) {
-    int endOfLast = startToken.end;
-    sb.write(indent);
-    Token token = startToken;
-    Token afterEnd = endToken.next;
-    while (token != afterEnd) {
-      if (token.offset > endOfLast) {
-        sb.write(" ");
-      }
-
-      sb.write(token.lexeme);
-      endOfLast = token.end;
-      token = token.next;
-    }
-    sb.write("\n");
-  }
-}
-
-class _UnknownChunk extends _TokenChunk {
-  _UnknownChunk(int originalPosition, Token startToken, Token endToken)
-      : super(originalPosition, startToken, endToken);
-}
-
-class _UnknownTokenBuilder {
-  Token start;
-  Token interimEnd;
-}
-
-class BoxedInt {
-  int value;
-  BoxedInt(this.value);
-}
-
-// TODO(jensj): Better support for show/hide on imports/exports.
-
-String textualOutline(List<int> rawBytes,
-    {bool throwOnUnexpected: false,
-    bool performModelling: false,
-    bool addMarkerForUnknownForTest: false}) {
-  // TODO(jensj): We need to specify the scanner settings to match that of the
-  // compiler!
-  Uint8List bytes = new Uint8List(rawBytes.length + 1);
-  bytes.setRange(0, rawBytes.length, rawBytes);
-
-  List<_Chunk> parsedChunks = new List<_Chunk>();
-
-  BoxedInt originalPosition = new BoxedInt(0);
-
-  Utf8BytesScanner scanner = new Utf8BytesScanner(bytes, includeComments: false,
-      languageVersionChanged:
-          (Scanner scanner, LanguageVersionToken languageVersion) {
-    parsedChunks.add(new _LanguageVersionChunk(originalPosition.value++,
-        languageVersion.major, languageVersion.minor));
-  });
-  Token firstToken = scanner.tokenize();
-  if (firstToken == null) {
-    if (throwOnUnexpected) throw "firstToken is null";
-    return null;
-  }
-
-  TextualOutlineListener listener = new TextualOutlineListener();
-  ClassMemberParser classMemberParser = new ClassMemberParser(listener);
-  classMemberParser.parseUnit(firstToken);
-
-  Token nextToken = firstToken;
-  _UnknownTokenBuilder currentUnknown = new _UnknownTokenBuilder();
-  while (nextToken != null) {
-    if (nextToken is ErrorToken) {
-      return null;
-    }
-    if (nextToken.isEof) break;
-
-    nextToken = _textualizeTokens(
-        listener, nextToken, currentUnknown, parsedChunks, originalPosition);
-  }
-  outputUnknownChunk(currentUnknown, parsedChunks, originalPosition);
-
-  if (nextToken == null) return null;
-
-  if (performModelling) {
-    parsedChunks = _mergeAndSort(parsedChunks);
-  }
-
-  StringBuffer sb = new StringBuffer();
-  for (_Chunk chunk in parsedChunks) {
-    chunk.printOn(sb, "");
-  }
-
-  return sb.toString();
-}
-
-List<_Chunk> _mergeAndSort(List<_Chunk> chunks) {
-  // TODO(jensj): Only put into new list of there's metadata.
-  List<_Chunk> result =
-      new List<_Chunk>.filled(chunks.length, null, growable: true);
-  List<_MetadataChunk> metadataChunks;
-  int outSize = 0;
-  for (_Chunk chunk in chunks) {
-    if (chunk is _MetadataChunk) {
-      metadataChunks ??= new List<_MetadataChunk>();
-      metadataChunks.add(chunk);
-    } else {
-      chunk.metadata = metadataChunks;
-      metadataChunks = null;
-      chunk.internalMergeAndSort();
-      result[outSize++] = chunk;
-    }
-  }
-  if (metadataChunks != null) {
-    for (_MetadataChunk metadata in metadataChunks) {
-      result[outSize++] = metadata;
-    }
-  }
-  result.length = outSize;
-
-  result.sort();
-  return result;
-}
-
-/// Parses a chunk of tokens and returns the next - unparsed - token or null
-/// on error.
-Token _textualizeTokens(
-    TextualOutlineListener listener,
-    Token token,
-    _UnknownTokenBuilder currentUnknown,
-    List<_Chunk> parsedChunks,
-    BoxedInt originalPosition) {
-  Token classEndToken = listener.classStartToFinish[token];
-  if (classEndToken != null) {
-    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition);
-
-    _ClassChunk classChunk =
-        new _ClassChunk(originalPosition.value++, token, classEndToken);
-    parsedChunks.add(classChunk);
-    return _textualizeClass(listener, classChunk, originalPosition);
-  }
-
-  Token isImportExportEndToken = listener.importExportsStartToFinish[token];
-  if (isImportExportEndToken != null) {
-    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition);
-
-    _ImportExportChunk importExportChunk =
-        new _ImportExportChunk(originalPosition.value++);
-    parsedChunks.add(importExportChunk);
-    return _textualizeImportExports(listener, token, importExportChunk);
-  }
-
-  Token isKnownUnsortableEndToken =
-      listener.unsortableElementStartToFinish[token];
-  if (isKnownUnsortableEndToken != null) {
-    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition);
-
-    Token beginToken = token;
-    parsedChunks.add(new _KnownUnsortableChunk(
-        originalPosition.value++, beginToken, isKnownUnsortableEndToken));
-    return isKnownUnsortableEndToken.next;
-  }
-
-  Token elementEndToken = listener.elementStartToFinish[token];
-  if (elementEndToken != null) {
-    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition);
-
-    Token beginToken = token;
-    parsedChunks.add(new _ProcedureEtcChunk(originalPosition.value++,
-        beginToken, elementEndToken, listener.nonClassEndOffsets));
-    return elementEndToken.next;
-  }
-
-  Token metadataEndToken = listener.metadataStartToFinish[token];
-  if (metadataEndToken != null) {
-    outputUnknownChunk(currentUnknown, parsedChunks, originalPosition);
-
-    Token beginToken = token;
-    parsedChunks.add(new _MetadataChunk(
-        originalPosition.value++, beginToken, metadataEndToken));
-    return metadataEndToken.next;
-  }
-
-  // This token --- and whatever else tokens until we reach a start token we
-  // know is an unknown chunk. We don't yet know the end.
-  if (currentUnknown.start == null) {
-    // Start of unknown chunk.
-    currentUnknown.start = token;
-    currentUnknown.interimEnd = token;
-  } else {
-    // Continued unknown chunk.
-    currentUnknown.interimEnd = token;
-  }
-  return token.next;
-}
-
-Token _textualizeImportExports(TextualOutlineListener listener, Token token,
-    _ImportExportChunk importExportChunk) {
-  int originalPosition = 0;
-  Token endToken = listener.importExportsStartToFinish[token];
-  while (endToken != null) {
-    importExportChunk.content
-        .add(new _SingleImportExportChunk(originalPosition++, token, endToken));
-    token = endToken.next;
-    endToken = listener.importExportsStartToFinish[token];
-  }
-
-  return token;
-}
-
-Token _textualizeClass(TextualOutlineListener listener, _ClassChunk classChunk,
-    BoxedInt originalPosition) {
-  Token token = classChunk.startToken;
-  // Class header.
-  while (token != classChunk.endToken) {
-    if (token.endGroup == classChunk.endToken) {
-      break;
-    }
-    token = token.next;
-  }
-  classChunk.headerEnd = token;
-
-  if (token == classChunk.endToken) {
-    // This for instance happens on named mixins, e.g.
-    // class C<T> = Object with A<Function(T)>;
-    // or when the class has no content, e.g.
-    // class C { }
-    // either way, output the end token right away to avoid a weird line break.
-  } else {
-    token = token.next;
-    // "Normal" class with (possibly) content.
-    _UnknownTokenBuilder currentUnknown = new _UnknownTokenBuilder();
-    while (token != classChunk.endToken) {
-      token = _textualizeTokens(listener, token, currentUnknown,
-          classChunk.content, originalPosition);
-    }
-    outputUnknownChunk(currentUnknown, classChunk.content, originalPosition);
-    classChunk.footerStart = classChunk.endToken;
-  }
-
-  return classChunk.endToken.next;
-}
-
-/// Outputs an unknown chunk if one has been started.
-///
-/// Resets the given builder.
-void outputUnknownChunk(_UnknownTokenBuilder _currentUnknown,
-    List<_Chunk> parsedChunks, BoxedInt originalPosition) {
-  if (_currentUnknown.start == null) return;
-  parsedChunks.add(new _UnknownChunk(
-    originalPosition.value++,
-    _currentUnknown.start,
-    _currentUnknown.interimEnd,
-  ));
-  _currentUnknown.start = null;
-  _currentUnknown.interimEnd = null;
-}
-
-main(List<String> args) {
-  File f = new File(args[0]);
-  Uint8List data = f.readAsBytesSync();
-  String outline =
-      textualOutline(data, throwOnUnexpected: true, performModelling: true);
-  if (args.length > 1 && args[1] == "--overwrite") {
-    f.writeAsStringSync(outline);
-  } else if (args.length > 1 && args[1] == "--benchmark") {
-    Stopwatch stopwatch = new Stopwatch()..start();
-    for (int i = 0; i < 100; i++) {
-      String outline2 =
-          textualOutline(data, throwOnUnexpected: true, performModelling: true);
-      if (outline2 != outline) throw "Not the same result every time";
-    }
-    stopwatch.stop();
-    print("First 100 took ${stopwatch.elapsedMilliseconds} ms");
-    stopwatch = new Stopwatch()..start();
-    for (int i = 0; i < 10000; i++) {
-      String outline2 =
-          textualOutline(data, throwOnUnexpected: true, performModelling: true);
-      if (outline2 != outline) throw "Not the same result every time";
-    }
-    stopwatch.stop();
-    print("Next 10,000 took ${stopwatch.elapsedMilliseconds} ms");
-  } else {
-    print(outline);
-  }
-}
-
-class TextualOutlineListener extends DirectiveListener {
-  Set<int> nonClassEndOffsets = new Set<int>();
-  Map<Token, Token> classStartToFinish = {};
-  Map<Token, Token> elementStartToFinish = {};
-  Map<Token, Token> metadataStartToFinish = {};
-  Map<Token, Token> importExportsStartToFinish = {};
-  Map<Token, Token> unsortableElementStartToFinish = {};
-
-  @override
-  void endClassMethod(Token getOrSet, Token beginToken, Token beginParam,
-      Token beginInitializers, Token endToken) {
-    nonClassEndOffsets.add(endToken.offset);
-    elementStartToFinish[beginToken] = endToken;
-  }
-
-  @override
-  void endTopLevelMethod(Token beginToken, Token getOrSet, Token endToken) {
-    nonClassEndOffsets.add(endToken.offset);
-    elementStartToFinish[beginToken] = endToken;
-  }
-
-  @override
-  void endClassFactoryMethod(
-      Token beginToken, Token factoryKeyword, Token endToken) {
-    nonClassEndOffsets.add(endToken.offset);
-    elementStartToFinish[beginToken] = endToken;
-  }
-
-  @override
-  void handleNativeFunctionBodySkipped(Token nativeToken, Token semicolon) {
-    // Allow native functions.
-  }
-
-  @override
-  void endClassFields(
-      Token abstractToken,
-      Token externalToken,
-      Token staticToken,
-      Token covariantToken,
-      Token lateToken,
-      Token varFinalOrConst,
-      int count,
-      Token beginToken,
-      Token endToken) {
-    elementStartToFinish[beginToken] = endToken;
-  }
-
-  @override
-  void endTopLevelFields(
-      Token externalToken,
-      Token staticToken,
-      Token covariantToken,
-      Token lateToken,
-      Token varFinalOrConst,
-      int count,
-      Token beginToken,
-      Token endToken) {
-    elementStartToFinish[beginToken] = endToken;
-  }
-
-  void endFunctionTypeAlias(
-      Token typedefKeyword, Token equals, Token endToken) {
-    elementStartToFinish[typedefKeyword] = endToken;
-  }
-
-  void endEnum(Token enumKeyword, Token leftBrace, int count) {
-    elementStartToFinish[enumKeyword] = leftBrace.endGroup;
-  }
-
-  @override
-  void endLibraryName(Token libraryKeyword, Token semicolon) {
-    unsortableElementStartToFinish[libraryKeyword] = semicolon;
-  }
-
-  @override
-  void endPart(Token partKeyword, Token semicolon) {
-    unsortableElementStartToFinish[partKeyword] = semicolon;
-  }
-
-  @override
-  void endPartOf(
-      Token partKeyword, Token ofKeyword, Token semicolon, bool hasName) {
-    unsortableElementStartToFinish[partKeyword] = semicolon;
-  }
-
-  @override
-  void endMetadata(Token beginToken, Token periodBeforeName, Token endToken) {
-    // Metadata's endToken is the one *after* the actual end of the metadata.
-    metadataStartToFinish[beginToken] = endToken.previous;
-  }
-
-  @override
-  void endClassDeclaration(Token beginToken, Token endToken) {
-    classStartToFinish[beginToken] = endToken;
-  }
-
-  @override
-  void endMixinDeclaration(Token mixinKeyword, Token endToken) {
-    classStartToFinish[mixinKeyword] = endToken;
-  }
-
-  @override
-  void endExtensionDeclaration(
-      Token extensionKeyword, Token onKeyword, Token endToken) {
-    classStartToFinish[extensionKeyword] = endToken;
-  }
-
-  @override
-  void endNamedMixinApplication(Token beginToken, Token classKeyword,
-      Token equals, Token implementsKeyword, Token endToken) {
-    classStartToFinish[beginToken] = endToken;
-  }
-
-  @override
-  void endImport(Token importKeyword, Token semicolon) {
-    importExportsStartToFinish[importKeyword] = semicolon;
-  }
-
-  @override
-  void endExport(Token exportKeyword, Token semicolon) {
-    importExportsStartToFinish[exportKeyword] = semicolon;
-  }
-}
diff --git a/pkg/front_end/test/fasta/testing/suite.dart b/pkg/front_end/test/fasta/testing/suite.dart
index 67cb343..db48578 100644
--- a/pkg/front_end/test/fasta/testing/suite.dart
+++ b/pkg/front_end/test/fasta/testing/suite.dart
@@ -155,7 +155,11 @@
 const String experimentalFlagOptions = '--enable-experiment=';
 const String overwriteCurrentSdkVersion = '--overwrite-current-sdk-version=';
 
-class TestOptions {
+/// Options used for all tests within a given folder.
+///
+/// This is used for instance for defining target, mode, and experiment specific
+/// test folders.
+class FolderOptions {
   final Map<ExperimentalFlag, bool> _experimentalFlags;
   final bool forceLateLowering;
   final bool forceNoExplicitGetterCalls;
@@ -163,7 +167,7 @@
   final String target;
   final String overwriteCurrentSdkVersion;
 
-  TestOptions(this._experimentalFlags,
+  FolderOptions(this._experimentalFlags,
       {this.forceLateLowering: false,
       this.forceNoExplicitGetterCalls: false,
       this.nnbdAgnosticMode: false,
@@ -184,8 +188,12 @@
   }
 }
 
-class LinkDependenciesOptions {
-  final Set<Uri> content;
+/// Options for a single test located within its own subfolder.
+///
+/// This is used for instance for defining custom link dependencies and
+/// setting up custom experimental flag defaults for a single test.
+class TestOptions {
+  final Set<Uri> linkDependencies;
   final NnbdMode nnbdMode;
   final AllowedExperimentalFlags allowedExperimentalFlags;
   final Map<ExperimentalFlag, Version> experimentEnabledVersion;
@@ -193,12 +201,12 @@
   Component component;
   List<Iterable<String>> errors;
 
-  LinkDependenciesOptions(this.content,
+  TestOptions(this.linkDependencies,
       {this.nnbdMode,
       this.allowedExperimentalFlags,
       this.experimentEnabledVersion,
       this.experimentReleasedVersion})
-      : assert(content != null);
+      : assert(linkDependencies != null);
 }
 
 class FastaContext extends ChainContext with MatchContext {
@@ -216,8 +224,8 @@
       <Component, List<Iterable<String>>>{};
   final Uri platformBinaries;
   final Map<UriConfiguration, UriTranslator> _uriTranslators = {};
+  final Map<Uri, FolderOptions> _folderOptions = {};
   final Map<Uri, TestOptions> _testOptions = {};
-  final Map<Uri, LinkDependenciesOptions> _linkDependencies = {};
   final Map<Uri, Uri> _librariesJson = {};
 
   @override
@@ -302,22 +310,22 @@
     }
   }
 
-  TestOptions _computeTestOptionsForDirectory(Directory directory) {
-    TestOptions testOptions = _testOptions[directory.uri];
-    if (testOptions == null) {
+  FolderOptions _computeFolderOptions(Directory directory) {
+    FolderOptions folderOptions = _folderOptions[directory.uri];
+    if (folderOptions == null) {
       bool forceLateLowering = false;
       bool forceNoExplicitGetterCalls = false;
       bool nnbdAgnosticMode = false;
       String target = "vm";
       if (directory.uri == baseUri) {
-        testOptions = new TestOptions({},
+        folderOptions = new FolderOptions({},
             forceLateLowering: forceLateLowering,
             forceNoExplicitGetterCalls: forceNoExplicitGetterCalls,
             nnbdAgnosticMode: nnbdAgnosticMode,
             target: target);
       } else {
         File optionsFile =
-            new File.fromUri(directory.uri.resolve('test.options'));
+            new File.fromUri(directory.uri.resolve('folder.options'));
         if (optionsFile.existsSync()) {
           List<String> experimentalFlagsArguments = [];
           String overwriteCurrentSdkVersionArgument = null;
@@ -345,7 +353,7 @@
             }
           }
 
-          testOptions = new TestOptions(
+          folderOptions = new FolderOptions(
               parseExperimentalFlags(
                   parseExperimentalArguments(experimentalFlagsArguments),
                   onError: (String message) => throw new ArgumentError(message),
@@ -357,21 +365,21 @@
               target: target,
               overwriteCurrentSdkVersion: overwriteCurrentSdkVersionArgument);
         } else {
-          testOptions = _computeTestOptionsForDirectory(directory.parent);
+          folderOptions = _computeFolderOptions(directory.parent);
         }
       }
-      _testOptions[directory.uri] = testOptions;
+      _folderOptions[directory.uri] = folderOptions;
     }
-    return testOptions;
+    return folderOptions;
   }
 
   /// Computes the experimental flag for [description].
   ///
   /// [forcedExperimentalFlags] is used to override the default flags for
   /// [description].
-  TestOptions computeTestOptions(TestDescription description) {
+  FolderOptions computeFolderOptions(TestDescription description) {
     Directory directory = new File.fromUri(description.uri).parent;
-    return _computeTestOptionsForDirectory(directory);
+    return _computeFolderOptions(directory);
   }
 
   Future<UriTranslator> computeUriTranslator(
@@ -381,7 +389,7 @@
     if (uriTranslator == null) {
       Uri sdk = Uri.base.resolve("sdk/");
       Uri packages = Uri.base.resolve(".packages");
-      TestOptions testOptions = computeTestOptions(description);
+      FolderOptions folderOptions = computeFolderOptions(description);
       CompilerOptions compilerOptions = new CompilerOptions()
         ..onDiagnostic = (DiagnosticMessage message) {
           throw message.plainTextFormatted.join("\n");
@@ -390,17 +398,17 @@
         ..packagesFileUri = uriConfiguration.packageConfigUri ?? packages
         ..environmentDefines = {}
         ..experimentalFlags =
-            testOptions.computeExperimentalFlags(experimentalFlags)
+            folderOptions.computeExperimentalFlags(experimentalFlags)
         ..nnbdMode = weak
             ? NnbdMode.Weak
-            : (testOptions.nnbdAgnosticMode
+            : (folderOptions.nnbdAgnosticMode
                 ? NnbdMode.Agnostic
                 : NnbdMode.Strong)
         ..librariesSpecificationUri =
             uriConfiguration.librariesSpecificationUri;
-      if (testOptions.overwriteCurrentSdkVersion != null) {
+      if (folderOptions.overwriteCurrentSdkVersion != null) {
         compilerOptions.currentSdkVersion =
-            testOptions.overwriteCurrentSdkVersion;
+            folderOptions.overwriteCurrentSdkVersion;
       }
       ProcessedOptions options = new ProcessedOptions(options: compilerOptions);
       uriTranslator = await options.getUriTranslator();
@@ -409,16 +417,14 @@
     return uriTranslator;
   }
 
-  /// Computes the link dependencies for [description].
-  LinkDependenciesOptions computeLinkDependenciesOptions(
-      TestDescription description) {
+  /// Computes the test for [description].
+  TestOptions computeTestOptions(TestDescription description) {
     Directory directory = new File.fromUri(description.uri).parent;
-    LinkDependenciesOptions linkDependenciesOptions =
-        _linkDependencies[directory.uri];
-    if (linkDependenciesOptions == null) {
+    TestOptions testOptions = _testOptions[directory.uri];
+    if (testOptions == null) {
       File optionsFile =
-          new File.fromUri(directory.uri.resolve('link.options'));
-      Set<Uri> content = new Set<Uri>();
+          new File.fromUri(directory.uri.resolve('test.options'));
+      Set<Uri> linkDependencies = new Set<Uri>();
       NnbdMode nnbdMode;
       AllowedExperimentalFlags allowedExperimentalFlags;
       Map<ExperimentalFlag, Version> experimentEnabledVersion;
@@ -472,18 +478,18 @@
               }
               uri = f.uri;
             }
-            content.add(uri);
+            linkDependencies.add(uri);
           }
         }
       }
-      linkDependenciesOptions = new LinkDependenciesOptions(content,
+      testOptions = new TestOptions(linkDependencies,
           nnbdMode: nnbdMode,
           allowedExperimentalFlags: allowedExperimentalFlags,
           experimentEnabledVersion: experimentEnabledVersion,
           experimentReleasedVersion: experimentReleasedVersion);
-      _linkDependencies[directory.uri] = linkDependenciesOptions;
+      _testOptions[directory.uri] = testOptions;
     }
-    return linkDependenciesOptions;
+    return testOptions;
   }
 
   /// Libraries json for [description].
@@ -613,10 +619,11 @@
   bool get isRuntime => true;
 
   Future<Result<int>> run(ComponentResult result, FastaContext context) async {
-    TestOptions testOptions = context.computeTestOptions(result.description);
+    FolderOptions folderOptions =
+        context.computeFolderOptions(result.description);
     Map<ExperimentalFlag, bool> experimentalFlags =
-        testOptions.computeExperimentalFlags(context.experimentalFlags);
-    switch (testOptions.target) {
+        folderOptions.computeExperimentalFlags(context.experimentalFlags);
+    switch (folderOptions.target) {
       case "vm":
         if (context.platformUri == null) {
           throw "Executed `Run` step before initializing the context.";
@@ -642,7 +649,7 @@
         return pass(0);
       default:
         throw new ArgumentError(
-            "Unsupported run target '${testOptions.target}'.");
+            "Unsupported run target '${folderOptions.target}'.");
     }
   }
 }
@@ -666,16 +673,17 @@
 
     Uri librariesSpecificationUri =
         context.computeLibrariesSpecificationUri(description);
-    LinkDependenciesOptions linkDependenciesOptions =
-        context.computeLinkDependenciesOptions(description);
     TestOptions testOptions = context.computeTestOptions(description);
+    FolderOptions folderOptions = context.computeFolderOptions(description);
     Map<ExperimentalFlag, bool> experimentalFlags =
-        testOptions.computeExperimentalFlags(context.experimentalFlags);
+        folderOptions.computeExperimentalFlags(context.experimentalFlags);
     NnbdMode nnbdMode = context.weak ||
             !isExperimentEnabled(ExperimentalFlag.nonNullable,
                 experimentalFlags: experimentalFlags)
         ? NnbdMode.Weak
-        : (testOptions.nnbdAgnosticMode ? NnbdMode.Agnostic : NnbdMode.Strong);
+        : (folderOptions.nnbdAgnosticMode
+            ? NnbdMode.Agnostic
+            : NnbdMode.Strong);
     List<Uri> inputs = <Uri>[description.uri];
 
     ProcessedOptions createProcessedOptions(
@@ -694,9 +702,9 @@
         ..allowedExperimentalFlagsForTesting = allowedExperimentalFlags
         ..experimentEnabledVersionForTesting = experimentEnabledVersion
         ..experimentReleasedVersionForTesting = experimentReleasedVersion;
-      if (testOptions.overwriteCurrentSdkVersion != null) {
+      if (folderOptions.overwriteCurrentSdkVersion != null) {
         compilerOptions.currentSdkVersion =
-            testOptions.overwriteCurrentSdkVersion;
+            folderOptions.overwriteCurrentSdkVersion;
       }
       return new ProcessedOptions(options: compilerOptions, inputs: inputs);
     }
@@ -707,26 +715,26 @@
 
     ProcessedOptions options = createProcessedOptions(
         nnbdMode,
-        linkDependenciesOptions.allowedExperimentalFlags,
-        linkDependenciesOptions.experimentEnabledVersion,
-        linkDependenciesOptions.experimentReleasedVersion);
+        testOptions.allowedExperimentalFlags,
+        testOptions.experimentEnabledVersion,
+        testOptions.experimentReleasedVersion);
 
-    if (linkDependenciesOptions.content.isNotEmpty &&
-        linkDependenciesOptions.component == null) {
+    if (testOptions.linkDependencies.isNotEmpty &&
+        testOptions.component == null) {
       // Compile linked dependency.
       ProcessedOptions linkOptions = options;
-      if (linkDependenciesOptions.nnbdMode != null) {
+      if (testOptions.nnbdMode != null) {
         linkOptions = createProcessedOptions(
-            linkDependenciesOptions.nnbdMode,
-            linkDependenciesOptions.allowedExperimentalFlags,
-            linkDependenciesOptions.experimentEnabledVersion,
-            linkDependenciesOptions.experimentReleasedVersion);
+            testOptions.nnbdMode,
+            testOptions.allowedExperimentalFlags,
+            testOptions.experimentEnabledVersion,
+            testOptions.experimentReleasedVersion);
       }
       await CompilerContext.runWithOptions(linkOptions, (_) async {
         KernelTarget sourceTarget = await outlineInitialization(context,
-            description, testOptions, linkDependenciesOptions.content.toList());
-        if (linkDependenciesOptions.errors != null) {
-          errors.addAll(linkDependenciesOptions.errors);
+            description, folderOptions, testOptions.linkDependencies.toList());
+        if (testOptions.errors != null) {
+          errors.addAll(testOptions.errors);
         }
         Component p = await sourceTarget.buildOutlines();
         if (fullCompile) {
@@ -750,27 +758,27 @@
           }
         }
 
-        linkDependenciesOptions.component = p;
+        testOptions.component = p;
         List<Library> keepLibraries = new List<Library>();
         for (Library lib in p.libraries) {
-          if (linkDependenciesOptions.content.contains(lib.importUri)) {
+          if (testOptions.linkDependencies.contains(lib.importUri)) {
             keepLibraries.add(lib);
           }
         }
         p.libraries.clear();
         p.libraries.addAll(keepLibraries);
-        linkDependenciesOptions.errors = errors.toList();
+        testOptions.errors = errors.toList();
         errors.clear();
       });
     }
 
     return await CompilerContext.runWithOptions(options, (_) async {
-      Component alsoAppend = linkDependenciesOptions.component;
+      Component alsoAppend = testOptions.component;
       if (description.uri.pathSegments.last.endsWith(".no_link.dart")) {
         alsoAppend = null;
       }
       KernelTarget sourceTarget = await outlineInitialization(
-          context, description, testOptions, <Uri>[description.uri],
+          context, description, folderOptions, <Uri>[description.uri],
           alsoAppend: alsoAppend);
       ValidatingInstrumentation instrumentation =
           new ValidatingInstrumentation();
@@ -814,7 +822,7 @@
   Future<KernelTarget> outlineInitialization(
       FastaContext context,
       TestDescription description,
-      TestOptions testOptions,
+      FolderOptions testOptions,
       List<Uri> entryPoints,
       {Component alsoAppend}) async {
     Component platform = await context.loadPlatform();
diff --git a/pkg/front_end/test/fasta/textual_outline_suite.dart b/pkg/front_end/test/fasta/textual_outline_suite.dart
index c622db4..db5dda5 100644
--- a/pkg/front_end/test/fasta/textual_outline_suite.dart
+++ b/pkg/front_end/test/fasta/textual_outline_suite.dart
@@ -9,7 +9,7 @@
 
 import 'package:dart_style/dart_style.dart' show DartFormatter;
 
-import 'package:front_end/src/fasta/util/textual_outline_v2.dart';
+import 'package:front_end/src/fasta/util/textual_outline.dart';
 import 'package:testing/testing.dart'
     show
         Chain,
diff --git a/pkg/front_end/test/spell_checking_list_code.txt b/pkg/front_end/test/spell_checking_list_code.txt
index e6e026a..eb44178 100644
--- a/pkg/front_end/test/spell_checking_list_code.txt
+++ b/pkg/front_end/test/spell_checking_list_code.txt
@@ -150,6 +150,7 @@
 caret
 carets
 carriage
+carring
 cascades
 casing
 cast
@@ -569,6 +570,7 @@
 johnniwinther
 js
 json
+jumps
 juxtaposition
 juxtapositions
 k
@@ -639,6 +641,7 @@
 mc
 md
 me
+meeting
 merely
 meta
 metadata's
@@ -816,6 +819,7 @@
 pragma
 pre
 prebuilt
+preexisting
 preorder
 prev
 prime
diff --git a/pkg/front_end/test/spell_checking_list_tests.txt b/pkg/front_end/test/spell_checking_list_tests.txt
index d39c684..9ab0fd2 100644
--- a/pkg/front_end/test/spell_checking_list_tests.txt
+++ b/pkg/front_end/test/spell_checking_list_tests.txt
@@ -531,6 +531,7 @@
 selection
 sensitive
 services
+setups
 severe
 sheets
 shipped
@@ -566,6 +567,7 @@
 stupid
 subcommand
 subdir
+subfolder
 subtool
 subtools
 subtyping1a
diff --git a/pkg/front_end/test/textual_outline_test.dart b/pkg/front_end/test/textual_outline_test.dart
index 59bf5eb..4a059e8 100644
--- a/pkg/front_end/test/textual_outline_test.dart
+++ b/pkg/front_end/test/textual_outline_test.dart
@@ -1,5 +1,5 @@
 import "dart:convert";
-import "package:front_end/src/fasta/util/textual_outline_v2.dart";
+import "package:front_end/src/fasta/util/textual_outline.dart";
 
 main() {
   // Doesn't sort if not asked to perform modelling.
@@ -177,7 +177,7 @@
   if (result !=
       """
 import "bar.dart";
-import "foo.dart" show B, A, C;
+import "foo.dart" show A, B, C;
 
 main() {}
 
@@ -203,7 +203,7 @@
   if (result !=
       """
 export "bar.dart";
-export "foo.dart" show B, A, C;
+export "foo.dart" show A, B, C;
 
 main() {}
 
@@ -234,9 +234,9 @@
   if (result !=
       """
 export "bar.dart";
-export "foo.dart" show B, A, C;
+export "foo.dart" show A, B, C;
 import "bar.dart";
-import "foo.dart" show B, A, C;
+import "foo.dart" show A, B, C;
 
 main() {}
 
@@ -271,8 +271,6 @@
   }
 
   // Ending metadata (not associated with anything) is still present.
-  // TODO: The extra metadata should actually stay at the bottom as it's not
-  // associated with anything and it will now basically be associated with foo.
   result = textualOutline(utf8.encode("""
 @Object2()
 foo() {
@@ -337,4 +335,89 @@
 class D2 = Super with Mixin;""") {
     throw "Unexpected result: $result";
   }
+
+  // Metadata on imports / exports.
+  result = textualOutline(utf8.encode("""
+@Object1
+export "a3.dart";
+@Object2
+import "a2.dart";
+@Object3
+export "a1.dart";
+@Object4
+import "a0.dart";
+"""),
+      throwOnUnexpected: true,
+      performModelling: true,
+      addMarkerForUnknownForTest: true);
+  if (result !=
+      """
+@Object3
+export "a1.dart";
+
+@Object1
+export "a3.dart";
+
+@Object4
+import "a0.dart";
+
+@Object2
+import "a2.dart";""") {
+    throw "Unexpected result: $result";
+  }
+
+  // Doesn't crash on illegal import/export.
+  // Note that for now a bad import becomes unknown as it has
+  // 'advanced recovery' via "handleRecoverImport" whereas exports enforce the
+  // structure more.
+  result = textualOutline(utf8.encode("""
+// bad line.
+import "a0.dart" show
+// ok line
+import "a1.dart" show foo;
+// bad line.
+export "a2.dart" show
+// ok line
+export "a3.dart" show foo;
+"""),
+      throwOnUnexpected: true,
+      performModelling: true,
+      addMarkerForUnknownForTest: true);
+  if (result !=
+      """
+---- unknown chunk starts ----
+import "a0.dart" show ;
+---- unknown chunk ends ----
+
+export "a2.dart" show ;
+export "a3.dart" show foo;
+import "a1.dart" show foo;""") {
+    throw "Unexpected result: $result";
+  }
+
+  // Enums.
+  result = textualOutline(utf8.encode("""
+library test;
+
+enum E { v1 }
+final x = E.v1;
+
+main() {
+  x;
+}
+"""),
+      throwOnUnexpected: true,
+      performModelling: true,
+      addMarkerForUnknownForTest: true);
+  if (result !=
+      """
+library test;
+
+enum E { v1 }
+
+final x = E.v1;
+
+main() {}""") {
+    throw "Unexpected result: $result";
+  }
 }
diff --git a/pkg/front_end/testcases/agnostic/test.options b/pkg/front_end/testcases/agnostic/folder.options
similarity index 100%
rename from pkg/front_end/testcases/agnostic/test.options
rename to pkg/front_end/testcases/agnostic/folder.options
diff --git a/pkg/front_end/testcases/extensions/test.options b/pkg/front_end/testcases/extensions/folder.options
similarity index 100%
rename from pkg/front_end/testcases/extensions/test.options
rename to pkg/front_end/testcases/extensions/folder.options
diff --git a/pkg/front_end/testcases/extensions/issue39938/link.options b/pkg/front_end/testcases/extensions/issue39938/test.options
similarity index 100%
rename from pkg/front_end/testcases/extensions/issue39938/link.options
rename to pkg/front_end/testcases/extensions/issue39938/test.options
diff --git a/pkg/front_end/testcases/extensions/issue43218.dart b/pkg/front_end/testcases/extensions/issue43218.dart
new file mode 100644
index 0000000..fa413a6
--- /dev/null
+++ b/pkg/front_end/testcases/extensions/issue43218.dart
@@ -0,0 +1,27 @@
+// Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+class C {
+  int value;
+  C() : value = 0 {}
+  init() {
+    value = 0;
+  }
+
+  int get id => value;
+  void set id(int v) {
+    this.value = v;
+  }
+}
+
+extension Ext on C {
+  int get id => this.value + 1;
+}
+
+test() {
+  C c = C();
+  Ext(c).id++;
+}
+
+main() {}
diff --git a/pkg/front_end/testcases/extensions/issue43218.dart.outline.expect b/pkg/front_end/testcases/extensions/issue43218.dart.outline.expect
new file mode 100644
index 0000000..193d91e
--- /dev/null
+++ b/pkg/front_end/testcases/extensions/issue43218.dart.outline.expect
@@ -0,0 +1,34 @@
+library;
+import self as self;
+import "dart:core" as core;
+
+class C extends core::Object {
+  field core::int* value;
+  constructor •() → self::C*
+    ;
+  method init() → dynamic
+    ;
+  get id() → core::int*
+    ;
+  set id(core::int* v) → void
+    ;
+  abstract member-signature get _identityHashCode() → core::int*; -> core::Object::_identityHashCode
+  abstract member-signature method _instanceOf(dynamic instantiatorTypeArguments, dynamic functionTypeArguments, dynamic type) → core::bool*; -> core::Object::_instanceOf
+  abstract member-signature method _simpleInstanceOf(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOf
+  abstract member-signature method _simpleInstanceOfTrue(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOfTrue
+  abstract member-signature method _simpleInstanceOfFalse(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOfFalse
+  abstract member-signature operator ==(dynamic other) → core::bool*; -> core::Object::==
+  abstract member-signature get hashCode() → core::int*; -> core::Object::hashCode
+  abstract member-signature method toString() → core::String*; -> core::Object::toString
+  abstract member-signature method noSuchMethod(core::Invocation* invocation) → dynamic; -> core::Object::noSuchMethod
+  abstract member-signature get runtimeType() → core::Type*; -> core::Object::runtimeType
+}
+extension Ext on self::C* {
+  get id = self::Ext|get#id;
+}
+static method Ext|get#id(final self::C* #this) → core::int*
+  ;
+static method test() → dynamic
+  ;
+static method main() → dynamic
+  ;
diff --git a/pkg/front_end/testcases/extensions/issue43218.dart.strong.expect b/pkg/front_end/testcases/extensions/issue43218.dart.strong.expect
new file mode 100644
index 0000000..c6208d5
--- /dev/null
+++ b/pkg/front_end/testcases/extensions/issue43218.dart.strong.expect
@@ -0,0 +1,48 @@
+library;
+//
+// Problems in library:
+//
+// pkg/front_end/testcases/extensions/issue43218.dart:24:10: Error: The setter 'id' isn't defined for the class 'int'.
+// Try correcting the name to the name of an existing setter, or defining a setter or field named 'id'.
+//   Ext(c).id++;
+//          ^^
+//
+import self as self;
+import "dart:core" as core;
+
+class C extends core::Object {
+  field core::int* value;
+  constructor •() → self::C*
+    : self::C::value = 0, super core::Object::•() {}
+  method init() → dynamic {
+    this.{self::C::value} = 0;
+  }
+  get id() → core::int*
+    return this.{self::C::value};
+  set id(core::int* v) → void {
+    this.{self::C::value} = v;
+  }
+  abstract member-signature get _identityHashCode() → core::int*; -> core::Object::_identityHashCode
+  abstract member-signature method _instanceOf(dynamic instantiatorTypeArguments, dynamic functionTypeArguments, dynamic type) → core::bool*; -> core::Object::_instanceOf
+  abstract member-signature method _simpleInstanceOf(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOf
+  abstract member-signature method _simpleInstanceOfTrue(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOfTrue
+  abstract member-signature method _simpleInstanceOfFalse(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOfFalse
+  abstract member-signature operator ==(dynamic other) → core::bool*; -> core::Object::==
+  abstract member-signature get hashCode() → core::int*; -> core::Object::hashCode
+  abstract member-signature method toString() → core::String*; -> core::Object::toString
+  abstract member-signature method noSuchMethod(core::Invocation* invocation) → dynamic; -> core::Object::noSuchMethod
+  abstract member-signature get runtimeType() → core::Type*; -> core::Object::runtimeType
+}
+extension Ext on self::C* {
+  get id = self::Ext|get#id;
+}
+static method Ext|get#id(final self::C* #this) → core::int*
+  return #this.{self::C::value}.{core::num::+}(1);
+static method test() → dynamic {
+  self::C* c = new self::C::•();
+  let final self::C* #t1 = c in invalid-expression "pkg/front_end/testcases/extensions/issue43218.dart:24:10: Error: The setter 'id' isn't defined for the class 'int'.
+Try correcting the name to the name of an existing setter, or defining a setter or field named 'id'.
+  Ext(c).id++;
+         ^^";
+}
+static method main() → dynamic {}
diff --git a/pkg/front_end/testcases/extensions/issue43218.dart.strong.transformed.expect b/pkg/front_end/testcases/extensions/issue43218.dart.strong.transformed.expect
new file mode 100644
index 0000000..c6208d5
--- /dev/null
+++ b/pkg/front_end/testcases/extensions/issue43218.dart.strong.transformed.expect
@@ -0,0 +1,48 @@
+library;
+//
+// Problems in library:
+//
+// pkg/front_end/testcases/extensions/issue43218.dart:24:10: Error: The setter 'id' isn't defined for the class 'int'.
+// Try correcting the name to the name of an existing setter, or defining a setter or field named 'id'.
+//   Ext(c).id++;
+//          ^^
+//
+import self as self;
+import "dart:core" as core;
+
+class C extends core::Object {
+  field core::int* value;
+  constructor •() → self::C*
+    : self::C::value = 0, super core::Object::•() {}
+  method init() → dynamic {
+    this.{self::C::value} = 0;
+  }
+  get id() → core::int*
+    return this.{self::C::value};
+  set id(core::int* v) → void {
+    this.{self::C::value} = v;
+  }
+  abstract member-signature get _identityHashCode() → core::int*; -> core::Object::_identityHashCode
+  abstract member-signature method _instanceOf(dynamic instantiatorTypeArguments, dynamic functionTypeArguments, dynamic type) → core::bool*; -> core::Object::_instanceOf
+  abstract member-signature method _simpleInstanceOf(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOf
+  abstract member-signature method _simpleInstanceOfTrue(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOfTrue
+  abstract member-signature method _simpleInstanceOfFalse(dynamic type) → core::bool*; -> core::Object::_simpleInstanceOfFalse
+  abstract member-signature operator ==(dynamic other) → core::bool*; -> core::Object::==
+  abstract member-signature get hashCode() → core::int*; -> core::Object::hashCode
+  abstract member-signature method toString() → core::String*; -> core::Object::toString
+  abstract member-signature method noSuchMethod(core::Invocation* invocation) → dynamic; -> core::Object::noSuchMethod
+  abstract member-signature get runtimeType() → core::Type*; -> core::Object::runtimeType
+}
+extension Ext on self::C* {
+  get id = self::Ext|get#id;
+}
+static method Ext|get#id(final self::C* #this) → core::int*
+  return #this.{self::C::value}.{core::num::+}(1);
+static method test() → dynamic {
+  self::C* c = new self::C::•();
+  let final self::C* #t1 = c in invalid-expression "pkg/front_end/testcases/extensions/issue43218.dart:24:10: Error: The setter 'id' isn't defined for the class 'int'.
+Try correcting the name to the name of an existing setter, or defining a setter or field named 'id'.
+  Ext(c).id++;
+         ^^";
+}
+static method main() → dynamic {}
diff --git a/pkg/front_end/testcases/extensions/issue43218.dart.textual_outline.expect b/pkg/front_end/testcases/extensions/issue43218.dart.textual_outline.expect
new file mode 100644
index 0000000..4eec6f8
--- /dev/null
+++ b/pkg/front_end/testcases/extensions/issue43218.dart.textual_outline.expect
@@ -0,0 +1,11 @@
+class C {
+  int value;
+  C() : value = 0 {}
+  init() {}
+  int get id => value;
+  void set id(int v) {}
+}
+extension Ext ;
+on C (){}
+test() {}
+main() {}
diff --git a/pkg/front_end/testcases/general/experiment_release_version/link.options b/pkg/front_end/testcases/general/experiment_release_version/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/experiment_release_version/link.options
rename to pkg/front_end/testcases/general/experiment_release_version/test.options
diff --git a/pkg/front_end/testcases/general/issue41210b/link.options b/pkg/front_end/testcases/general/issue41210b/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/issue41210b/link.options
rename to pkg/front_end/testcases/general/issue41210b/test.options
diff --git a/pkg/front_end/testcases/general/with_dependencies/abstract_members_from_dill/link.options b/pkg/front_end/testcases/general/with_dependencies/abstract_members_from_dill/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/with_dependencies/abstract_members_from_dill/link.options
rename to pkg/front_end/testcases/general/with_dependencies/abstract_members_from_dill/test.options
diff --git a/pkg/front_end/testcases/general/with_dependencies/extension_from_dill/link.options b/pkg/front_end/testcases/general/with_dependencies/extension_from_dill/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/with_dependencies/extension_from_dill/link.options
rename to pkg/front_end/testcases/general/with_dependencies/extension_from_dill/test.options
diff --git a/pkg/front_end/testcases/general/with_dependencies/issue_43084/link.options b/pkg/front_end/testcases/general/with_dependencies/issue_43084/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/with_dependencies/issue_43084/link.options
rename to pkg/front_end/testcases/general/with_dependencies/issue_43084/test.options
diff --git a/pkg/front_end/testcases/general/with_dependencies/issue_43084_2/link.options b/pkg/front_end/testcases/general/with_dependencies/issue_43084_2/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/with_dependencies/issue_43084_2/link.options
rename to pkg/front_end/testcases/general/with_dependencies/issue_43084_2/test.options
diff --git a/pkg/front_end/testcases/general/with_dependencies/mixin_from_dill/link.options b/pkg/front_end/testcases/general/with_dependencies/mixin_from_dill/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/with_dependencies/mixin_from_dill/link.options
rename to pkg/front_end/testcases/general/with_dependencies/mixin_from_dill/test.options
diff --git a/pkg/front_end/testcases/general/with_dependencies/stub_or_not/link.options b/pkg/front_end/testcases/general/with_dependencies/stub_or_not/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/with_dependencies/stub_or_not/link.options
rename to pkg/front_end/testcases/general/with_dependencies/stub_or_not/test.options
diff --git a/pkg/front_end/testcases/general/with_dependencies/variance_from_dill/link.options b/pkg/front_end/testcases/general/with_dependencies/variance_from_dill/test.options
similarity index 100%
rename from pkg/front_end/testcases/general/with_dependencies/variance_from_dill/link.options
rename to pkg/front_end/testcases/general/with_dependencies/variance_from_dill/test.options
diff --git a/pkg/front_end/testcases/general_nnbd_opt_out/test.options b/pkg/front_end/testcases/general_nnbd_opt_out/folder.options
similarity index 100%
rename from pkg/front_end/testcases/general_nnbd_opt_out/test.options
rename to pkg/front_end/testcases/general_nnbd_opt_out/folder.options
diff --git a/pkg/front_end/testcases/implicit_getter_calls/test.options b/pkg/front_end/testcases/implicit_getter_calls/folder.options
similarity index 100%
rename from pkg/front_end/testcases/implicit_getter_calls/test.options
rename to pkg/front_end/testcases/implicit_getter_calls/folder.options
diff --git a/pkg/front_end/testcases/inference/infer_statics_transitively3.dart.textual_outline_modelled.expect b/pkg/front_end/testcases/inference/infer_statics_transitively3.dart.textual_outline_modelled.expect
index 841c0e4..d1e56fa 100644
--- a/pkg/front_end/testcases/inference/infer_statics_transitively3.dart.textual_outline_modelled.expect
+++ b/pkg/front_end/testcases/inference/infer_statics_transitively3.dart.textual_outline_modelled.expect
@@ -1,7 +1,7 @@
 library test;
 
-import 'infer_statics_transitively3_a.dart' as p show a2, A;
-import 'infer_statics_transitively3_a.dart' show a1, A;
+import 'infer_statics_transitively3_a.dart' as p show A, a2;
+import 'infer_statics_transitively3_a.dart' show A, a1;
 
 const t1 = 1;
 const t2 = t1;
diff --git a/pkg/front_end/testcases/late_lowering/test.options b/pkg/front_end/testcases/late_lowering/folder.options
similarity index 100%
rename from pkg/front_end/testcases/late_lowering/test.options
rename to pkg/front_end/testcases/late_lowering/folder.options
diff --git a/pkg/front_end/testcases/late_lowering/issue41436c/link.options b/pkg/front_end/testcases/late_lowering/issue41436c/test.options
similarity index 100%
rename from pkg/front_end/testcases/late_lowering/issue41436c/link.options
rename to pkg/front_end/testcases/late_lowering/issue41436c/test.options
diff --git a/pkg/front_end/testcases/nnbd/test.options b/pkg/front_end/testcases/nnbd/folder.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd/test.options
rename to pkg/front_end/testcases/nnbd/folder.options
diff --git a/pkg/front_end/testcases/nnbd/from_agnostic/link.options b/pkg/front_end/testcases/nnbd/from_agnostic/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd/from_agnostic/link.options
rename to pkg/front_end/testcases/nnbd/from_agnostic/test.options
diff --git a/pkg/front_end/testcases/nnbd/strong_lib_not_ok_from_dill/link.options b/pkg/front_end/testcases/nnbd/strong_lib_not_ok_from_dill/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd/strong_lib_not_ok_from_dill/link.options
rename to pkg/front_end/testcases/nnbd/strong_lib_not_ok_from_dill/test.options
diff --git a/pkg/front_end/testcases/nnbd/strong_ok_from_dill/link.options b/pkg/front_end/testcases/nnbd/strong_ok_from_dill/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd/strong_ok_from_dill/link.options
rename to pkg/front_end/testcases/nnbd/strong_ok_from_dill/test.options
diff --git a/pkg/front_end/testcases/nnbd/strong_package_not_ok_from_dill/link.options b/pkg/front_end/testcases/nnbd/strong_package_not_ok_from_dill/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd/strong_package_not_ok_from_dill/link.options
rename to pkg/front_end/testcases/nnbd/strong_package_not_ok_from_dill/test.options
diff --git a/pkg/front_end/testcases/nnbd_mixed/experiment_release_version/link.options b/pkg/front_end/testcases/nnbd_mixed/experiment_release_version/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd_mixed/experiment_release_version/link.options
rename to pkg/front_end/testcases/nnbd_mixed/experiment_release_version/test.options
diff --git a/pkg/front_end/testcases/nnbd_mixed/test.options b/pkg/front_end/testcases/nnbd_mixed/folder.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd_mixed/test.options
rename to pkg/front_end/testcases/nnbd_mixed/folder.options
diff --git a/pkg/front_end/testcases/nnbd_mixed/issue40512/link.options b/pkg/front_end/testcases/nnbd_mixed/issue40512/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd_mixed/issue40512/link.options
rename to pkg/front_end/testcases/nnbd_mixed/issue40512/test.options
diff --git a/pkg/front_end/testcases/nnbd_mixed/issue41210a/link.options b/pkg/front_end/testcases/nnbd_mixed/issue41210a/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd_mixed/issue41210a/link.options
rename to pkg/front_end/testcases/nnbd_mixed/issue41210a/test.options
diff --git a/pkg/front_end/testcases/nnbd_mixed/issue42387/link.options b/pkg/front_end/testcases/nnbd_mixed/issue42387/test.options
similarity index 100%
rename from pkg/front_end/testcases/nnbd_mixed/issue42387/link.options
rename to pkg/front_end/testcases/nnbd_mixed/issue42387/test.options
diff --git a/pkg/front_end/testcases/nonfunction_type_aliases/test.options b/pkg/front_end/testcases/nonfunction_type_aliases/folder.options
similarity index 100%
rename from pkg/front_end/testcases/nonfunction_type_aliases/test.options
rename to pkg/front_end/testcases/nonfunction_type_aliases/folder.options
diff --git a/pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/test.options b/pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/folder.options
similarity index 100%
rename from pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/test.options
rename to pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/folder.options
diff --git a/pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/issue_43084/link.options b/pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/issue_43084/test.options
similarity index 100%
rename from pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/issue_43084/link.options
rename to pkg/front_end/testcases/nonfunction_type_aliases_no_nnbd/issue_43084/test.options
diff --git a/pkg/front_end/testcases/textual_outline.status b/pkg/front_end/testcases/textual_outline.status
index 6159e50..b36cbe2 100644
--- a/pkg/front_end/testcases/textual_outline.status
+++ b/pkg/front_end/testcases/textual_outline.status
@@ -68,6 +68,7 @@
 extensions/issue40596: FormatterCrash
 extensions/issue40713: FormatterCrash
 extensions/issue40816: FormatterCrash
+extensions/issue43218: FormatterCrash
 extensions/missing_toplevel: FormatterCrash
 extensions/nested_on_types: FormatterCrash
 extensions/null_aware: FormatterCrash
diff --git a/pkg/front_end/testcases/triple_shift/test.options b/pkg/front_end/testcases/triple_shift/folder.options
similarity index 100%
rename from pkg/front_end/testcases/triple_shift/test.options
rename to pkg/front_end/testcases/triple_shift/folder.options
diff --git a/pkg/front_end/testcases/value_class/test.options b/pkg/front_end/testcases/value_class/folder.options
similarity index 100%
rename from pkg/front_end/testcases/value_class/test.options
rename to pkg/front_end/testcases/value_class/folder.options
diff --git a/pkg/front_end/testcases/variance/test.options b/pkg/front_end/testcases/variance/folder.options
similarity index 100%
rename from pkg/front_end/testcases/variance/test.options
rename to pkg/front_end/testcases/variance/folder.options
diff --git a/runtime/vm/compiler/backend/block_scheduler.cc b/runtime/vm/compiler/backend/block_scheduler.cc
index 9807a19..ccc10b1 100644
--- a/runtime/vm/compiler/backend/block_scheduler.cc
+++ b/runtime/vm/compiler/backend/block_scheduler.cc
@@ -25,26 +25,23 @@
                           BlockEntryInstr* successor,
                           const Array& edge_counters,
                           intptr_t entry_count) {
-  TargetEntryInstr* target = successor->AsTargetEntry();
-  if (target != NULL) {
+  ASSERT(entry_count != 0);
+  if (auto target = successor->AsTargetEntry()) {
     // If this block ends in a goto, the edge count of this edge is the same
     // as the count on the single outgoing edge. This is true as long as the
     // block does not throw an exception.
     intptr_t count = GetEdgeCount(edge_counters, target->preorder_number());
-    if ((count >= 0) && (entry_count != 0)) {
+    if (count >= 0) {
       double weight =
           static_cast<double>(count) / static_cast<double>(entry_count);
       target->set_edge_weight(weight);
     }
-  } else {
-    GotoInstr* jump = block->last_instruction()->AsGoto();
-    if (jump != NULL) {
-      intptr_t count = GetEdgeCount(edge_counters, block->preorder_number());
-      if ((count >= 0) && (entry_count != 0)) {
-        double weight =
-            static_cast<double>(count) / static_cast<double>(entry_count);
-        jump->set_edge_weight(weight);
-      }
+  } else if (auto jump = block->last_instruction()->AsGoto()) {
+    intptr_t count = GetEdgeCount(edge_counters, block->preorder_number());
+    if (count >= 0) {
+      double weight =
+          static_cast<double>(count) / static_cast<double>(entry_count);
+      jump->set_edge_weight(weight);
     }
   }
 }
@@ -82,6 +79,9 @@
   const intptr_t entry_count =
       GetEdgeCount(edge_counters, entry->preorder_number());
   graph_entry->set_entry_count(entry_count);
+  if (entry_count == 0) {
+    return;  // Nothing to do.
+  }
 
   for (BlockIterator it = flow_graph->reverse_postorder_iterator(); !it.Done();
        it.Advance()) {
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index df2ec7f..2d73e68 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -1253,6 +1253,15 @@
  public:
   MoveOperands(Location dest, Location src) : dest_(dest), src_(src) {}
 
+  MoveOperands(const MoveOperands& other)
+      : dest_(other.dest_), src_(other.src_) {}
+
+  MoveOperands& operator=(const MoveOperands& other) {
+    dest_ = other.dest_;
+    src_ = other.src_;
+    return *this;
+  }
+
   Location src() const { return src_; }
   Location dest() const { return dest_; }
 
@@ -1302,8 +1311,6 @@
  private:
   Location dest_;
   Location src_;
-
-  DISALLOW_COPY_AND_ASSIGN(MoveOperands);
 };
 
 class ParallelMoveInstr : public TemplateInstruction<0, NoThrow> {
@@ -1319,6 +1326,8 @@
     return false;
   }
 
+  const GrowableArray<MoveOperands*>& moves() const { return moves_; }
+
   MoveOperands* AddMove(Location dest, Location src) {
     MoveOperands* move = new MoveOperands(dest, src);
     moves_.Add(move);
diff --git a/runtime/vm/compiler/backend/linearscan.cc b/runtime/vm/compiler/backend/linearscan.cc
index 499d169..ecdac22 100644
--- a/runtime/vm/compiler/backend/linearscan.cc
+++ b/runtime/vm/compiler/backend/linearscan.cc
@@ -2729,72 +2729,36 @@
   return GetLiveRange(range->vreg())->spill_slot().Equals(target);
 }
 
-void FlowGraphAllocator::ConnectSplitSiblings(LiveRange* parent,
-                                              BlockEntryInstr* source_block,
-                                              BlockEntryInstr* target_block) {
-  TRACE_ALLOC(THR_Print("Connect v%" Pd " on the edge B%" Pd " -> B%" Pd "\n",
-                        parent->vreg(), source_block->block_id(),
-                        target_block->block_id()));
-  if (parent->next_sibling() == NULL) {
-    // Nothing to connect. The whole range was allocated to the same location.
-    TRACE_ALLOC(THR_Print("range v%" Pd " has no siblings\n", parent->vreg()));
-    return;
-  }
-
-  const intptr_t source_pos = source_block->end_pos() - 1;
-  ASSERT(IsInstructionEndPosition(source_pos));
-
-  const intptr_t target_pos = target_block->start_pos();
-
-  Location target;
-  Location source;
-
-#if defined(INCLUDE_LINEAR_SCAN_TRACING_CODE)
-  LiveRange* source_cover = NULL;
-  LiveRange* target_cover = NULL;
-#endif
-
-  LiveRange* range = parent;
-  while ((range != NULL) && (source.IsInvalid() || target.IsInvalid())) {
-    if (range->CanCover(source_pos)) {
-      ASSERT(source.IsInvalid());
-      source = range->assigned_location();
-#if defined(INCLUDE_LINEAR_SCAN_TRACING_CODE)
-      source_cover = range;
-#endif
+static LiveRange* FindCover(LiveRange* parent, intptr_t pos) {
+  for (LiveRange* range = parent; range != nullptr;
+       range = range->next_sibling()) {
+    if (range->CanCover(pos)) {
+      return range;
     }
-    if (range->CanCover(target_pos)) {
-      ASSERT(target.IsInvalid());
-      target = range->assigned_location();
-#if defined(INCLUDE_LINEAR_SCAN_TRACING_CODE)
-      target_cover = range;
-#endif
+  }
+  UNREACHABLE();
+  return nullptr;
+}
+
+static bool AreLocationsAllTheSame(const GrowableArray<Location>& locs) {
+  for (intptr_t j = 1; j < locs.length(); j++) {
+    if (!locs[j].Equals(locs[0])) {
+      return false;
     }
-
-    range = range->next_sibling();
   }
+  return true;
+}
 
-  TRACE_ALLOC(THR_Print("connecting v%" Pd " between [%" Pd ", %" Pd ") {%s} "
-                        "to [%" Pd ", %" Pd ") {%s}\n",
-                        parent->vreg(), source_cover->Start(),
-                        source_cover->End(), source.Name(),
-                        target_cover->Start(), target_cover->End(),
-                        target.Name()));
-
-  // Siblings were allocated to the same register.
-  if (source.Equals(target)) return;
-
-  // Values are eagerly spilled. Spill slot already contains appropriate value.
-  if (TargetLocationIsSpillSlot(parent, target)) {
-    return;
-  }
-
-  Instruction* last = source_block->last_instruction();
-  if ((last->SuccessorCount() == 1) && !source_block->IsGraphEntry()) {
+// Emit move on the edge from |pred| to |succ|.
+static void EmitMoveOnEdge(BlockEntryInstr* succ,
+                           BlockEntryInstr* pred,
+                           MoveOperands move) {
+  Instruction* last = pred->last_instruction();
+  if ((last->SuccessorCount() == 1) && !pred->IsGraphEntry()) {
     ASSERT(last->IsGoto());
-    last->AsGoto()->GetParallelMove()->AddMove(target, source);
+    last->AsGoto()->GetParallelMove()->AddMove(move.dest(), move.src());
   } else {
-    target_block->GetParallelMove()->AddMove(target, source);
+    succ->GetParallelMove()->AddMove(move.dest(), move.src());
   }
 }
 
@@ -2805,7 +2769,7 @@
     LiveRange* range = live_ranges_[vreg];
     if (range == NULL) continue;
 
-    while (range->next_sibling() != NULL) {
+    while (range->next_sibling() != nullptr) {
       LiveRange* sibling = range->next_sibling();
       TRACE_ALLOC(THR_Print("connecting [%" Pd ", %" Pd ") [", range->Start(),
                             range->End()));
@@ -2826,14 +2790,145 @@
   }
 
   // Resolve non-linear control flow across branches.
+  // At joins we attempt to sink duplicated moves from predecessors into join
+  // itself as long as their source is not blocked by other moves.
+  // Moves which are candidates to sinking are collected in the |pending|
+  // array and we later compute which one of them we can emit (|can_emit|)
+  // at the join itself.
+  GrowableArray<Location> src_locs(2);
+  GrowableArray<MoveOperands> pending(10);
+  BitVector* can_emit = new BitVector(flow_graph_.zone(), 10);
   for (intptr_t i = 1; i < block_order_.length(); i++) {
     BlockEntryInstr* block = block_order_[i];
     BitVector* live = liveness_.GetLiveInSet(block);
     for (BitVector::Iterator it(live); !it.Done(); it.Advance()) {
       LiveRange* range = GetLiveRange(it.Current());
-      for (intptr_t j = 0; j < block->PredecessorCount(); j++) {
-        ConnectSplitSiblings(range, block->PredecessorAt(j), block);
+      if (range->next_sibling() == nullptr) {
+        // Nothing to connect. The whole range was allocated to the same
+        // location.
+        TRACE_ALLOC(
+            THR_Print("range v%" Pd " has no siblings\n", range->vreg()));
+        continue;
       }
+
+      LiveRange* dst_cover = FindCover(range, block->start_pos());
+      Location dst = dst_cover->assigned_location();
+
+      TRACE_ALLOC(THR_Print("range v%" Pd
+                            " is allocated to %s on entry to B%" Pd
+                            " covered by [%" Pd ", %" Pd ")\n",
+                            range->vreg(), dst.ToCString(), block->block_id(),
+                            dst_cover->Start(), dst_cover->End()));
+
+      if (TargetLocationIsSpillSlot(range, dst)) {
+        // Values are eagerly spilled. Spill slot already contains appropriate
+        // value.
+        TRACE_ALLOC(
+            THR_Print("  [no resolution necessary - range is spilled]\n"));
+        continue;
+      }
+
+      src_locs.Clear();
+      for (intptr_t j = 0; j < block->PredecessorCount(); j++) {
+        BlockEntryInstr* pred = block->PredecessorAt(j);
+        LiveRange* src_cover = FindCover(range, pred->end_pos() - 1);
+        Location src = src_cover->assigned_location();
+        src_locs.Add(src);
+
+        TRACE_ALLOC(THR_Print("| incoming value in %s on exit from B%" Pd
+                              " covered by [%" Pd ", %" Pd ")\n",
+                              src.ToCString(), pred->block_id(),
+                              src_cover->Start(), src_cover->End()));
+      }
+
+      // Check if all source locations are the same for the range. Then
+      // we can try to emit a single move at the destination if we can
+      // guarantee that source location is available on all incoming edges.
+      // (i.e. it is not destroyed by some other move).
+      if ((src_locs.length() > 1) && AreLocationsAllTheSame(src_locs)) {
+        if (!dst.Equals(src_locs[0])) {
+          // We have a non-redundant move which potentially can be performed
+          // at the start of block, however we will only be able to check
+          // whether or not source location is alive on all incoming edges
+          // only when we finish processing all live-in values.
+          pending.Add(MoveOperands(dst, src_locs[0]));
+        }
+
+        // Next incoming value.
+        continue;
+      }
+
+      for (intptr_t j = 0; j < block->PredecessorCount(); j++) {
+        if (dst.Equals(src_locs[j])) {
+          // Redundant move.
+          continue;
+        }
+
+        EmitMoveOnEdge(block, block->PredecessorAt(j), {dst, src_locs[j]});
+      }
+    }
+
+    // For each pending move we need to check if it can be emitted into the
+    // destination block (prerequisite for that is that predecessors should
+    // not destroy the value in the Goto move).
+    if (pending.length() > 0) {
+      if (can_emit->length() < pending.length()) {
+        can_emit = new BitVector(flow_graph_.zone(), pending.length());
+      }
+      can_emit->SetAll();
+
+      // Set to |true| when we discover more blocked pending moves and
+      // need to run another run through pending moves to propagate that.
+      bool changed = false;
+
+      // Process all pending moves and check if any move in the predecessor
+      // blocks then by overwriting their source.
+      for (intptr_t j = 0; j < pending.length(); j++) {
+        Location src = pending[j].src();
+        for (intptr_t p = 0; p < block->PredecessorCount(); p++) {
+          BlockEntryInstr* pred = block->PredecessorAt(p);
+          for (auto move :
+               pred->last_instruction()->AsGoto()->GetParallelMove()->moves()) {
+            if (!move->IsRedundant() && move->dest().Equals(src)) {
+              can_emit->Remove(j);
+              changed = true;
+              break;
+            }
+          }
+        }
+      }
+
+      // Check if newly discovered blocked moves block any other pending moves.
+      while (changed) {
+        changed = false;
+        for (intptr_t j = 0; j < pending.length(); j++) {
+          if (can_emit->Contains(j)) {
+            for (intptr_t k = 0; k < pending.length(); k++) {
+              if (!can_emit->Contains(k) &&
+                  pending[k].dest().Equals(pending[j].src())) {
+                can_emit->Remove(j);
+                changed = true;
+                break;
+              }
+            }
+          }
+        }
+      }
+
+      // Emit pending moves either in the successor block or in predecessors
+      // (if they are blocked).
+      for (intptr_t j = 0; j < pending.length(); j++) {
+        const auto& move = pending[j];
+        if (can_emit->Contains(j)) {
+          block->GetParallelMove()->AddMove(move.dest(), move.src());
+        } else {
+          for (intptr_t p = 0; p < block->PredecessorCount(); p++) {
+            EmitMoveOnEdge(block, block->PredecessorAt(p), move);
+          }
+        }
+      }
+
+      pending.Clear();
     }
   }
 
diff --git a/runtime/vm/compiler/backend/linearscan.h b/runtime/vm/compiler/backend/linearscan.h
index 740e489..9843b0b 100644
--- a/runtime/vm/compiler/backend/linearscan.h
+++ b/runtime/vm/compiler/backend/linearscan.h
@@ -174,9 +174,6 @@
 
   // Connect split siblings over non-linear control flow edges.
   void ResolveControlFlow();
-  void ConnectSplitSiblings(LiveRange* range,
-                            BlockEntryInstr* source_block,
-                            BlockEntryInstr* target_block);
 
   // Returns true if the target location is the spill slot for the given range.
   bool TargetLocationIsSpillSlot(LiveRange* range, Location target);
diff --git a/runtime/vm/compiler/frontend/flow_graph_builder.cc b/runtime/vm/compiler/frontend/flow_graph_builder.cc
index 95fc854..e09620b 100644
--- a/runtime/vm/compiler/frontend/flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/flow_graph_builder.cc
@@ -52,9 +52,12 @@
   ASSERT(call_->deopt_id() != DeoptId::kNone);
   const intptr_t outer_deopt_id = call_->deopt_id();
   // Scale the edge weights by the call count for the inlined function.
-  double scale_factor =
-      static_cast<double>(call_->CallCount()) /
-      static_cast<double>(caller_graph_->graph_entry()->entry_count());
+  double scale_factor = 1.0;
+  if (caller_graph_->graph_entry()->entry_count() != 0) {
+    scale_factor =
+        static_cast<double>(call_->CallCount()) /
+        static_cast<double>(caller_graph_->graph_entry()->entry_count());
+  }
   for (BlockIterator block_it = callee_graph->postorder_iterator();
        !block_it.Done(); block_it.Advance()) {
     BlockEntryInstr* block = block_it.Current();
diff --git a/tools/VERSION b/tools/VERSION
index f7620a6..21f041f 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 10
 PATCH 0
-PRERELEASE 73
+PRERELEASE 74
 PRERELEASE_PATCH 0
\ No newline at end of file