Version 2.16.0-117.0.dev

Merge commit '295aea1eda54a328ff68fe1c38c74fb0a9028c52' into 'dev'
diff --git a/BUILD.gn b/BUILD.gn
index ad09ed8..338f5ec 100644
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -50,6 +50,16 @@
     "utils/kernel-service:kernel-service",
   ]
 
+  # We do not support AOT on ia32 and should therefore cannot provide native
+  # snapshot tooling.
+  if (dart_target_arch != "ia32") {
+    if (is_product) {
+      deps += [ "runtime/bin:analyze_snapshot_product" ]
+    } else {
+      deps += [ "runtime/bin:analyze_snapshot" ]
+    }
+  }
+
   # TODO(bkonyi): this dep causes a segfault on Android XARM_X64 builds.
   # See issue #41776.
   if (dart_target_arch != "arm") {
diff --git a/DEPS b/DEPS
index 2b0e05f..103dfce 100644
--- a/DEPS
+++ b/DEPS
@@ -39,7 +39,7 @@
 
   # Checked-in SDK version. The checked-in SDK is a Dart SDK distribution in a
   # cipd package used to run Dart scripts in the build and test infrastructure.
-  "sdk_tag": "version:2.15.0-268.8.beta",
+  "sdk_tag": "version:2.15.1",
 
   # co19 is a cipd package. Use update.sh in tests/co19[_2] to update these
   # hashes. It requires access to the dart-build-access group, which EngProd
diff --git a/pkg/analysis_server/lib/src/lsp/handlers/handler_completion.dart b/pkg/analysis_server/lib/src/lsp/handlers/handler_completion.dart
index 10e4cf9..d2bd74d 100644
--- a/pkg/analysis_server/lib/src/lsp/handlers/handler_completion.dart
+++ b/pkg/analysis_server/lib/src/lsp/handlers/handler_completion.dart
@@ -65,8 +65,9 @@
     final unit = await path.mapResult(requireResolvedUnit);
 
     final lineInfo = await unit.map(
-      // If we don't have a unit, we can still try to obtain the line info for
-      // plugin contributors.
+      // If we don't have a unit, we can still try to obtain the line info from
+      // the server (this could be because the file is non-Dart, such as YAML or
+      // another handled by a plugin).
       (error) => path.mapResult(getLineInfo),
       (unit) => success(unit.lineInfo),
     );
diff --git a/pkg/analysis_server/lib/src/lsp/lsp_analysis_server.dart b/pkg/analysis_server/lib/src/lsp/lsp_analysis_server.dart
index 55a9da8..e4e8d17 100644
--- a/pkg/analysis_server/lib/src/lsp/lsp_analysis_server.dart
+++ b/pkg/analysis_server/lib/src/lsp/lsp_analysis_server.dart
@@ -261,12 +261,23 @@
     capabilitiesComputer.performDynamicRegistration();
   }
 
-  /// Return the LineInfo for the file with the given [path]. The file is
-  /// analyzed in one of the analysis drivers to which the file was added,
-  /// otherwise in the first driver, otherwise `null` is returned.
+  /// Return a [LineInfo] for the file with the given [path].
+  ///
+  /// If the file does not exist or cannot be read, returns `null`.
+  ///
+  /// This method supports non-Dart files but uses the current content of the
+  /// file which may not be the latest analyzed version of the file if it was
+  /// recently modified, so using the lineInfo from an analyzed result may be
+  /// preferable.
   LineInfo? getLineInfo(String path) {
-    var result = getAnalysisDriver(path)?.getFileSync(path);
-    return result is FileResult ? result.lineInfo : null;
+    try {
+      final content = resourceProvider.getFile(path).readAsStringSync();
+      return LineInfo.fromContent(content);
+    } on FileSystemException {
+      // If the file does not exist or cannot be read, return null to allow
+      // the caller to decide how to handle this.
+      return null;
+    }
   }
 
   /// Gets the version of a document known to the server, returning a
diff --git a/pkg/analysis_server/lib/src/lsp/mapping.dart b/pkg/analysis_server/lib/src/lsp/mapping.dart
index ffabd19..8a26c20 100644
--- a/pkg/analysis_server/lib/src/lsp/mapping.dart
+++ b/pkg/analysis_server/lib/src/lsp/mapping.dart
@@ -23,6 +23,7 @@
 import 'package:analyzer/dart/analysis/results.dart' as server;
 import 'package:analyzer/error/error.dart' as server;
 import 'package:analyzer/source/line_info.dart' as server;
+import 'package:analyzer/source/line_info.dart';
 import 'package:analyzer/source/source_range.dart' as server;
 import 'package:analyzer/src/dart/analysis/search.dart' as server
     show DeclarationKind;
@@ -123,8 +124,11 @@
       edits
           .map((e) => FileEditInformation(
                 server.getVersionedDocumentIdentifier(e.file),
-                // We should never produce edits for a file with no LineInfo.
-                server.getLineInfo(e.file)!,
+                // If we expect to create the file, server.getLineInfo() won't
+                // provide a LineInfo so create one from empty contents.
+                e.fileStamp == -1
+                    ? LineInfo.fromContent('')
+                    : server.getLineInfo(e.file)!,
                 e.edits,
                 // fileStamp == 1 is used by the server to indicate the file needs creating.
                 newFile: e.fileStamp == -1,
diff --git a/pkg/analyzer/lib/src/test_utilities/find_node.dart b/pkg/analyzer/lib/src/test_utilities/find_node.dart
index a248815..2b94f63 100644
--- a/pkg/analyzer/lib/src/test_utilities/find_node.dart
+++ b/pkg/analyzer/lib/src/test_utilities/find_node.dart
@@ -312,6 +312,11 @@
     return _node(search, (n) => n is PropertyAccess);
   }
 
+  RedirectingConstructorInvocation redirectingConstructorInvocation(
+      String search) {
+    return _node(search, (n) => n is RedirectingConstructorInvocation);
+  }
+
   RethrowExpression rethrow_(String search) {
     return _node(search, (n) => n is RethrowExpression);
   }
diff --git a/pkg/analyzer/test/generated/element_resolver_test.dart b/pkg/analyzer/test/generated/element_resolver_test.dart
index 745bc51..f6b0fed 100644
--- a/pkg/analyzer/test/generated/element_resolver_test.dart
+++ b/pkg/analyzer/test/generated/element_resolver_test.dart
@@ -2,32 +2,14 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-import 'package:analyzer/dart/analysis/features.dart';
 import 'package:analyzer/dart/ast/ast.dart';
-import 'package:analyzer/dart/ast/token.dart';
 import 'package:analyzer/dart/element/element.dart';
-import 'package:analyzer/dart/element/scope.dart';
-import 'package:analyzer/dart/element/type.dart';
-import 'package:analyzer/dart/element/type_provider.dart';
-import 'package:analyzer/src/dart/ast/ast.dart';
-import 'package:analyzer/src/dart/ast/ast_factory.dart';
-import 'package:analyzer/src/dart/element/element.dart';
-import 'package:analyzer/src/dart/element/inheritance_manager3.dart';
-import 'package:analyzer/src/dart/resolver/flow_analysis_visitor.dart';
-import 'package:analyzer/src/generated/resolver.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:analyzer/src/generated/testing/ast_test_factory.dart';
-import 'package:analyzer/src/generated/testing/element_factory.dart';
-import 'package:analyzer/src/source/source_resource.dart';
-import 'package:analyzer/src/test_utilities/resource_provider_mixin.dart';
+import 'package:analyzer/src/dart/error/hint_codes.dart';
 import 'package:test/test.dart';
 import 'package:test_reflective_loader/test_reflective_loader.dart';
 
 import '../src/dart/resolution/context_collection_resolution.dart';
 import '../util/element_type_matchers.dart';
-import 'elements_types_mixin.dart';
-import 'test_analysis_context.dart';
-import 'test_support.dart';
 
 main() {
   defineReflectiveSuite(() {
@@ -36,15 +18,6 @@
   });
 }
 
-/// Wrapper around the test package's `fail` function.
-///
-/// Unlike the test package's `fail` function, this function is not annotated
-/// with @alwaysThrows, so we can call it at the top of a test method without
-/// causing the rest of the method to be flagged as dead code.
-void _fail(String message) {
-  fail(message);
-}
-
 @reflectiveTest
 class AnnotationElementResolverTest extends PubPackageResolutionTest {
   test_class_namedConstructor() async {
@@ -264,553 +237,332 @@
 }
 
 @reflectiveTest
-class ElementResolverTest with ResourceProviderMixin, ElementsTypesMixin {
-  /// The error listener to which errors will be reported.
-  late GatheringErrorListener _listener;
-
-  /// The type provider used to access the types.
-  late TypeProvider _typeProvider;
-
-  /// The library containing the code being resolved.
-  late LibraryElementImpl _definingLibrary;
-
-  /// The compilation unit containing the code being resolved.
-  late CompilationUnitElementImpl _definingCompilationUnit;
-
-  /// The resolver visitor that maintains the state for the resolver.
-  late ResolverVisitor _visitor;
-
-  @override
-  TypeProvider get typeProvider => _typeProvider;
-
-  void fail_visitExportDirective_combinators() {
-    _fail("Not yet tested");
-    // Need to set up the exported library so that the identifier can be
-    // resolved.
-    ExportDirective directive = AstTestFactory.exportDirective2('dart:math', [
-      AstTestFactory.hideCombinator2(["A"])
-    ]);
-    _resolveNode(directive);
-    _listener.assertNoErrors();
-  }
-
-  void fail_visitFunctionExpressionInvocation() {
-    _fail("Not yet tested");
-    _listener.assertNoErrors();
-  }
-
-  void fail_visitImportDirective_combinators_noPrefix() {
-    _fail("Not yet tested");
-    // Need to set up the imported library so that the identifier can be
-    // resolved.
-    ImportDirective directive =
-        AstTestFactory.importDirective3('dart:math', null, [
-      AstTestFactory.showCombinator2(["A"])
-    ]);
-    _resolveNode(directive);
-    _listener.assertNoErrors();
-  }
-
-  void fail_visitImportDirective_combinators_prefix() {
-    _fail("Not yet tested");
-    // Need to set up the imported library so that the identifiers can be
-    // resolved.
-    String prefixName = "p";
-    _definingLibrary.imports = <ImportElement>[
-      ElementFactory.importFor(
-          _LibraryElementMock(), ElementFactory.prefix(prefixName))
-    ];
-    ImportDirective directive =
-        AstTestFactory.importDirective3('dart:math', prefixName, [
-      AstTestFactory.showCombinator2(["A"]),
-      AstTestFactory.hideCombinator2(["B"])
-    ]);
-    _resolveNode(directive);
-    _listener.assertNoErrors();
-  }
-
-  void fail_visitRedirectingConstructorInvocation() {
-    _fail("Not yet tested");
-    _listener.assertNoErrors();
-  }
-
-  void setUp() {
-    _listener = GatheringErrorListener();
-    _createResolver();
-  }
-
-  @FailingTest(issue: 'https://github.com/dart-lang/sdk/issues/44522')
+class ElementResolverTest extends PubPackageResolutionTest {
   test_visitBreakStatement_withLabel() async {
-    // loop: while (true) {
-    //   break loop;
-    // }
-    String label = "loop";
-    LabelElementImpl labelElement = LabelElementImpl(label, -1, false, false);
-    BreakStatement breakStatement = AstTestFactory.breakStatement2(label);
-    Expression condition = AstTestFactory.booleanLiteral(true);
-    WhileStatement whileStatement =
-        AstTestFactory.whileStatement(condition, breakStatement);
-    expect(_resolveBreak(breakStatement, labelElement, whileStatement),
-        same(labelElement));
-    expect(breakStatement.target, same(whileStatement));
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+test() {
+  loop: while (true) {
+    break loop;
+  }
+}
+''');
+    var breakStatement = findNode.breakStatement('break loop');
+    expect(breakStatement.label!.staticElement, findElement.label('loop'));
+    expect(breakStatement.target, findNode.whileStatement('while (true)'));
   }
 
   test_visitBreakStatement_withoutLabel() async {
-    BreakStatement statement = AstTestFactory.breakStatement();
-    _resolveStatement(statement, null, null);
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+test() {
+  while (true) {
+    break;
+  }
+}
+''');
+    var breakStatement = findNode.breakStatement('break');
+    expect(breakStatement.target, findNode.whileStatement('while (true)'));
   }
 
   test_visitCommentReference_prefixedIdentifier_class_getter() async {
-    LibraryElementImpl library =
-        ElementFactory.library(_definingLibrary.context, "lib");
-    CompilationUnitElementImpl unit =
-        library.definingCompilationUnit as CompilationUnitElementImpl;
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    unit.classes = [classA];
+    await assertNoErrorsInCode('''
+class A {
+  int get p => 0;
+  set p(int value) {}
+}
 
-    // set accessors
-    String propName = "p";
-    PropertyAccessorElement getter =
-        ElementFactory.getterElement(propName, false, _typeProvider.intType);
-    PropertyAccessorElement setter =
-        ElementFactory.setterElement(propName, false, _typeProvider.intType);
-    classA.accessors = <PropertyAccessorElement>[getter, setter];
-    // prepare "A.p"
-    PrefixedIdentifierImpl prefixed = AstTestFactory.identifier5('A', 'p');
-    prefixed.prefix.scopeLookupResult = ScopeLookupResult(classA, null);
-    CommentReference commentReference =
-        astFactory.commentReference(null, prefixed);
-    // resolve
-    _resolveNode(commentReference);
-    expect(prefixed.prefix.staticElement, classA);
-    expect(prefixed.identifier.staticElement, getter);
-    _listener.assertNoErrors();
+/// [A.p]
+test() {}
+''');
+    var prefixed = findNode.prefixed('A.p');
+    expect(prefixed.prefix.staticElement, findElement.class_('A'));
+    expect(prefixed.identifier.staticElement, findElement.getter('p'));
   }
 
   test_visitCommentReference_prefixedIdentifier_class_method() async {
-    LibraryElementImpl library =
-        ElementFactory.library(_definingLibrary.context, "lib");
-    CompilationUnitElementImpl unit =
-        library.definingCompilationUnit as CompilationUnitElementImpl;
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    unit.classes = [classA];
-    // set method
-    MethodElement method =
-        ElementFactory.methodElement("m", _typeProvider.intType);
-    classA.methods = <MethodElement>[method];
-    // prepare "A.m"
-    PrefixedIdentifierImpl prefixed = AstTestFactory.identifier5('A', 'm');
-    prefixed.prefix.scopeLookupResult = ScopeLookupResult(classA, null);
-    CommentReference commentReference =
-        astFactory.commentReference(null, prefixed);
-    // resolve
-    _resolveNode(commentReference);
-    expect(prefixed.prefix.staticElement, classA);
-    expect(prefixed.identifier.staticElement, method);
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+class A {
+  m() {}
+}
+
+/// [A.m]
+test() {}
+''');
+    var prefixed = findNode.prefixed('A.m');
+    expect(prefixed.prefix.staticElement, findElement.class_('A'));
+    expect(prefixed.identifier.staticElement, findElement.method('m'));
   }
 
   test_visitCommentReference_prefixedIdentifier_class_operator() async {
-    LibraryElementImpl library =
-        ElementFactory.library(_definingLibrary.context, "lib");
-    CompilationUnitElementImpl unit =
-        library.definingCompilationUnit as CompilationUnitElementImpl;
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    unit.classes = [classA];
-    // set method
-    MethodElement method =
-        ElementFactory.methodElement("==", _typeProvider.boolType);
-    classA.methods = <MethodElement>[method];
-    // prepare "A.=="
-    PrefixedIdentifierImpl prefixed = AstTestFactory.identifier5('A', '==');
-    prefixed.prefix.scopeLookupResult = ScopeLookupResult(classA, null);
-    CommentReference commentReference =
-        astFactory.commentReference(null, prefixed);
-    // resolve
-    _resolveNode(commentReference);
-    expect(prefixed.prefix.staticElement, classA);
-    expect(prefixed.identifier.staticElement, method);
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+class A {
+  operator ==(other) => true;
+}
+
+/// [A.==]
+test() {}
+''');
+    var prefixed = findNode.prefixed('A.==');
+    expect(prefixed.prefix.staticElement, findElement.class_('A'));
+    expect(prefixed.identifier.staticElement, findElement.method('=='));
   }
 
   test_visitConstructorName_named() async {
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    _encloseElement(classA);
-    String constructorName = "a";
-    ConstructorElement constructor =
-        ElementFactory.constructorElement2(classA, constructorName);
-    classA.constructors = <ConstructorElement>[constructor];
-    ConstructorName name = AstTestFactory.constructorName(
-        AstTestFactory.namedType(classA), constructorName);
-    _resolveNode(name);
-    expect(name.staticElement, same(constructor));
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+class A implements B {
+  A.a();
+}
+class B {
+  factory B() = A.a/*reference*/;
+}
+''');
+    expect(findNode.constructorName('A.a/*reference*/').staticElement,
+        same(findElement.constructor('a')));
   }
 
   test_visitConstructorName_unnamed() async {
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    _encloseElement(classA);
-    String constructorName = 'named';
-    ConstructorElement constructor =
-        ElementFactory.constructorElement2(classA, constructorName);
-    classA.constructors = <ConstructorElement>[constructor];
-    ConstructorName name = AstTestFactory.constructorName(
-        AstTestFactory.namedType(classA), constructorName);
-    _resolveNode(name);
-    expect(name.staticElement, same(constructor));
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+class A implements B {
+  A();
+}
+class B {
+  factory B() = A/*reference*/;
+}
+''');
+    expect(findNode.constructorName('A/*reference*/').staticElement,
+        same(findElement.unnamedConstructor('A')));
   }
 
-  @FailingTest(issue: 'https://github.com/dart-lang/sdk/issues/44522')
   test_visitContinueStatement_withLabel() async {
-    // loop: while (true) {
-    //   continue loop;
-    // }
-    String label = "loop";
-    LabelElementImpl labelElement = LabelElementImpl(label, -1, false, false);
-    ContinueStatement continueStatement =
-        AstTestFactory.continueStatement(label);
-    Expression condition = AstTestFactory.booleanLiteral(true);
-    WhileStatement whileStatement =
-        AstTestFactory.whileStatement(condition, continueStatement);
-    expect(_resolveContinue(continueStatement, labelElement, whileStatement),
-        same(labelElement));
-    expect(continueStatement.target, same(whileStatement));
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+test() {
+  loop: while (true) {
+    continue loop;
+  }
+}
+''');
+    var continueStatement = findNode.continueStatement('continue loop');
+    expect(continueStatement.label!.staticElement, findElement.label('loop'));
+    expect(continueStatement.target, findNode.whileStatement('while (true)'));
   }
 
   test_visitContinueStatement_withoutLabel() async {
-    ContinueStatement statement = AstTestFactory.continueStatement();
-    _resolveStatement(statement, null, null);
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+test() {
+  while (true) {
+    continue;
+  }
+}
+''');
+    var continueStatement = findNode.continueStatement('continue');
+    expect(continueStatement.target, findNode.whileStatement('while (true)'));
   }
 
-  @FailingTest(issue: 'https://github.com/dart-lang/sdk/issues/44522')
+  test_visitExportDirective_combinators() async {
+    await assertNoErrorsInCode('''
+export 'dart:math' hide pi;
+''');
+    var pi = findElement
+        .export('dart:math')
+        .exportedLibrary!
+        .exportNamespace
+        .get('pi') as PropertyAccessorElement;
+    expect(findNode.simple('pi').staticElement, pi.variable);
+  }
+
   test_visitExportDirective_noCombinators() async {
-    var directive = AstTestFactory.exportDirective2('dart:math');
-    directive.element = ElementFactory.exportFor(
-        ElementFactory.library(_definingLibrary.context, "lib"));
-    _resolveNode(directive);
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+export 'dart:math';
+''');
+    expect(findNode.export('dart:math').element!.exportedLibrary!.name,
+        'dart.math');
   }
 
   test_visitFieldFormalParameter() async {
-    String fieldName = "f";
-    InterfaceType intType = _typeProvider.intType;
-    FieldElementImpl fieldElement =
-        ElementFactory.fieldElement(fieldName, false, false, false, intType);
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    classA.fields = <FieldElement>[fieldElement];
-    var parameter = AstTestFactory.fieldFormalParameter2(fieldName);
-    FieldFormalParameterElementImpl parameterElement =
-        ElementFactory.fieldFormalParameter(parameter.identifier);
-    parameterElement.field = fieldElement;
-    parameterElement.type = intType;
-    parameter.identifier.staticElement = parameterElement;
-    _resolveInClass(parameter, classA);
-    expect(parameter.declaredElement!.type, same(intType));
+    await assertNoErrorsInCode('''
+class A {
+  int f;
+  A(this.f);
+}
+''');
+    expect(
+        findNode.fieldFormalParameter('this.f').declaredElement!.type, intType);
   }
 
-  @FailingTest(issue: 'https://github.com/dart-lang/sdk/issues/44522')
+  test_visitImportDirective_combinators_noPrefix() async {
+    await assertErrorsInCode('''
+import 'dart:math' show pi;
+''', [
+      error(HintCode.UNUSED_IMPORT, 7, 11),
+    ]);
+    var pi = findElement
+        .import('dart:math')
+        .importedLibrary!
+        .exportNamespace
+        .get('pi') as PropertyAccessorElement;
+    expect(findNode.simple('pi').staticElement, pi.variable);
+  }
+
+  test_visitImportDirective_combinators_prefix() async {
+    await assertErrorsInCode('''
+import 'dart:math' as p show pi hide ln10;
+''', [
+      error(HintCode.UNUSED_IMPORT, 7, 11),
+    ]);
+    var mathNamespace =
+        findElement.import('dart:math').importedLibrary!.exportNamespace;
+    var pi = mathNamespace.get('pi') as PropertyAccessorElement;
+    expect(findNode.simple('pi').staticElement, pi.variable);
+    var ln10 = mathNamespace.get('ln10') as PropertyAccessorElement;
+    expect(findNode.simple('ln10').staticElement, ln10.variable);
+  }
+
   test_visitImportDirective_noCombinators_noPrefix() async {
-    var directive = AstTestFactory.importDirective3('dart:math', null);
-    directive.element = ElementFactory.importFor(
-        ElementFactory.library(_definingLibrary.context, "lib"), null);
-    _resolveNode(directive);
-    _listener.assertNoErrors();
+    await assertErrorsInCode('''
+import 'dart:math';
+''', [
+      error(HintCode.UNUSED_IMPORT, 7, 11),
+    ]);
+    expect(findNode.import('dart:math').element!.importedLibrary!.name,
+        'dart.math');
   }
 
-  @FailingTest(issue: 'https://github.com/dart-lang/sdk/issues/44522')
   test_visitImportDirective_noCombinators_prefix() async {
-    String prefixName = "p";
-    ImportElement importElement = ElementFactory.importFor(
-        ElementFactory.library(_definingLibrary.context, "lib"),
-        ElementFactory.prefix(prefixName));
-    _definingLibrary.imports = <ImportElement>[importElement];
-    var directive = AstTestFactory.importDirective3('dart:math', prefixName);
-    directive.element = importElement;
-    _resolveNode(directive);
-    _listener.assertNoErrors();
+    await assertErrorsInCode('''
+import 'dart:math' as p;
+''', [
+      error(HintCode.UNUSED_IMPORT, 7, 11),
+    ]);
+    expect(findNode.import('dart:math').element!.importedLibrary!.name,
+        'dart.math');
   }
 
-  @FailingTest(issue: 'https://github.com/dart-lang/sdk/issues/44522')
   test_visitImportDirective_withCombinators() async {
-    ShowCombinator combinator = AstTestFactory.showCombinator2(["A", "B", "C"]);
-    var directive =
-        AstTestFactory.importDirective3('dart:math', null, [combinator]);
-    LibraryElementImpl library =
-        ElementFactory.library(_definingLibrary.context, "lib");
-    TopLevelVariableElementImpl varA =
-        ElementFactory.topLevelVariableElement2("A");
-    TopLevelVariableElementImpl varB =
-        ElementFactory.topLevelVariableElement2("B");
-    TopLevelVariableElementImpl varC =
-        ElementFactory.topLevelVariableElement2("C");
-    CompilationUnitElementImpl unit =
-        library.definingCompilationUnit as CompilationUnitElementImpl;
-    unit.accessors = <PropertyAccessorElement>[
-      varA.getter!,
-      varA.setter!,
-      varB.getter!,
-      varC.setter!
-    ];
-    unit.topLevelVariables = <TopLevelVariableElement>[varA, varB, varC];
-    directive.element = ElementFactory.importFor(library, null);
-    _resolveNode(directive);
-    expect(combinator.shownNames[0].staticElement, same(varA));
-    expect(combinator.shownNames[1].staticElement, same(varB));
-    expect(combinator.shownNames[2].staticElement, same(varC));
-    _listener.assertNoErrors();
+    newFile('$testPackageLibPath/lib1.dart', content: r'''
+int v1 = 0;
+final int v2 = 0;
+''');
+    await assertErrorsInCode('''
+import 'lib1.dart' show v1, v2;
+''', [
+      error(HintCode.UNUSED_IMPORT, 7, 11),
+    ]);
+    var importedVariables = findNode
+        .import('lib1.dart')
+        .element!
+        .importedLibrary!
+        .definingCompilationUnit
+        .topLevelVariables;
+    var v1 = importedVariables.where((v) => v.name == 'v1').single;
+    var v2 = importedVariables.where((v) => v.name == 'v2').single;
+    expect(findNode.simple('v1').staticElement, same(v1));
+    expect(findNode.simple('v2').staticElement, same(v2));
   }
 
   test_visitInstanceCreationExpression_named() async {
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    _definingCompilationUnit.classes = [classA];
-    String constructorName = "a";
-    ConstructorElement constructor =
-        ElementFactory.constructorElement2(classA, constructorName);
-    classA.constructors = <ConstructorElement>[constructor];
-    var name = AstTestFactory.constructorName(
-        AstTestFactory.namedType(classA), constructorName);
-    name.staticElement = constructor;
-    InstanceCreationExpression creation =
-        AstTestFactory.instanceCreationExpression(Keyword.NEW, name);
-    _resolveNode(creation);
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+class A {
+  A.a();
+}
+test() => new A.a();
+''');
+    expect(findNode.instanceCreation('new A.a').constructorName.staticElement,
+        findElement.constructor('a'));
+  }
+
+  test_visitInstanceCreationExpression_named_namedParameter() async {
+    await assertNoErrorsInCode('''
+class A {
+  A.named({int a = 0});
+}
+test() => new A.named(a: 0);
+''');
+    expect(
+        findNode.simple('a:').staticElement, same(findElement.parameter('a')));
   }
 
   test_visitInstanceCreationExpression_unnamed() async {
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    _definingCompilationUnit.classes = [classA];
-    String constructorName = 'named';
-    ConstructorElement constructor =
-        ElementFactory.constructorElement2(classA, constructorName);
-    classA.constructors = <ConstructorElement>[constructor];
-    var name = AstTestFactory.constructorName(
-        AstTestFactory.namedType(classA), constructorName);
-    name.staticElement = constructor;
-    InstanceCreationExpression creation =
-        AstTestFactory.instanceCreationExpression(Keyword.NEW, name);
-    _resolveNode(creation);
-    _listener.assertNoErrors();
-  }
-
-  test_visitInstanceCreationExpression_unnamed_namedParameter() async {
-    ClassElementImpl classA = ElementFactory.classElement2("A");
-    _definingCompilationUnit.classes = [classA];
-    String constructorName = 'named';
-    ConstructorElementImpl constructor =
-        ElementFactory.constructorElement2(classA, constructorName);
-    String parameterName = "a";
-    ParameterElement parameter =
-        ElementFactory.namedParameter2(parameterName, _typeProvider.intType);
-    constructor.parameters = <ParameterElement>[parameter];
-    classA.constructors = <ConstructorElement>[constructor];
-    var name = AstTestFactory.constructorName(
-        AstTestFactory.namedType(classA), constructorName);
-    name.staticElement = constructor;
-    InstanceCreationExpression creation =
-        AstTestFactory.instanceCreationExpression(Keyword.NEW, name, [
-      AstTestFactory.namedExpression2(parameterName, AstTestFactory.integer(0))
-    ]);
-    _resolveNode(creation);
-    expect(
-        (creation.argumentList.arguments[0] as NamedExpression)
-            .name
-            .label
-            .staticElement,
-        same(parameter));
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+class A {}
+test() => new A();
+''');
+    expect(findNode.instanceCreation('new A').constructorName.staticElement,
+        findElement.unnamedConstructor('A'));
   }
 
   test_visitMethodInvocation() async {
-    InterfaceType numType = _typeProvider.numType;
-    var iGetter = ElementFactory.getterElement('i', true, numType);
-    _definingCompilationUnit.accessors = [iGetter];
-    var left = AstTestFactory.identifier3("i")
-      ..scopeLookupResult = ScopeLookupResult(iGetter, null);
-    String methodName = "abs";
-    MethodInvocation invocation =
-        AstTestFactory.methodInvocation(left, methodName);
-    _resolveNode(invocation);
-    expect(invocation.methodName.staticElement!.declaration,
-        same(numType.getMethod(methodName)));
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+num get i => 0;
+test() => i.abs();
+''');
+    expect(
+        findNode
+            .methodInvocation('i.abs()')
+            .methodName
+            .staticElement!
+            .declaration,
+        same(typeProvider.numType.getMethod('abs')));
   }
 
   test_visitPrefixedIdentifier_dynamic() async {
-    DartType dynamicType = _typeProvider.dynamicType;
-    var target = AstTestFactory.identifier3("a");
-    VariableElementImpl variable = ElementFactory.localVariableElement(target);
-    variable.type = dynamicType;
-    target.staticElement = variable;
-    target.staticType = dynamicType;
-    PrefixedIdentifier identifier =
-        AstTestFactory.identifier(target, AstTestFactory.identifier3("b"));
-    _resolveNode(identifier);
+    await assertNoErrorsInCode('''
+test(dynamic a) => a.b;
+''');
+    var identifier = findNode.prefixed('a.b');
     expect(identifier.staticElement, isNull);
     expect(identifier.identifier.staticElement, isNull);
-    _listener.assertNoErrors();
+  }
+
+  test_visitRedirectingConstructorInvocation_named() async {
+    await assertNoErrorsInCode('''
+class C {
+  C(int x) : this.named(x /*usage*/);
+  C.named(int y);
+}
+''');
+    var invocation = findNode.redirectingConstructorInvocation('this');
+    var namedConstructor = findElement.constructor('named', of: 'C');
+    expect(invocation.staticElement, namedConstructor);
+    expect(invocation.constructorName!.staticElement, namedConstructor);
+    expect(findNode.simple('x /*usage*/').staticParameterElement,
+        findElement.parameter('y'));
+  }
+
+  test_visitRedirectingConstructorInvocation_unnamed() async {
+    await assertNoErrorsInCode('''
+class C {
+  C.named(int x) : this(x /*usage*/);
+  C(int y);
+}
+''');
+    expect(findNode.redirectingConstructorInvocation('this').staticElement,
+        findElement.unnamedConstructor('C'));
+    expect(findNode.simple('x /*usage*/').staticParameterElement,
+        findElement.parameter('y'));
   }
 
   test_visitSuperConstructorInvocation() async {
-    ClassElementImpl superclass = ElementFactory.classElement2("A");
-    _encloseElement(superclass);
-    ConstructorElementImpl superConstructor =
-        ElementFactory.constructorElement2(superclass, null);
-    superclass.constructors = <ConstructorElement>[superConstructor];
-    ClassElementImpl subclass =
-        ElementFactory.classElement("B", interfaceTypeStar(superclass));
-    _encloseElement(subclass);
-    ConstructorElementImpl subConstructor =
-        ElementFactory.constructorElement2(subclass, null);
-    subclass.constructors = <ConstructorElement>[subConstructor];
-    SuperConstructorInvocation invocation =
-        AstTestFactory.superConstructorInvocation();
-    AstTestFactory.classDeclaration(null, 'C', null, null, null, null, [
-      AstTestFactory.constructorDeclaration(AstTestFactory.identifier3('C'),
-          null, AstTestFactory.formalParameterList(), [invocation])
-    ]);
-    _definingCompilationUnit.classes = [superclass, subclass];
-    _resolveInClass(invocation, subclass);
-    expect(invocation.staticElement, superConstructor);
-    _listener.assertNoErrors();
+    await assertNoErrorsInCode('''
+class A {}
+class B extends A {
+  B() : super();
+}
+''');
+    expect(findNode.superConstructorInvocation('super').staticElement,
+        findElement.unnamedConstructor('A'));
   }
 
   test_visitSuperConstructorInvocation_namedParameter() async {
-    ClassElementImpl superclass = ElementFactory.classElement2("A");
-    _encloseElement(superclass);
-    ConstructorElementImpl superConstructor =
-        ElementFactory.constructorElement2(superclass, null);
-    String parameterName = "p";
-    ParameterElement parameter = ElementFactory.namedParameter(parameterName)
-      ..type = _typeProvider.dynamicType;
-    superConstructor.parameters = <ParameterElement>[parameter];
-    superclass.constructors = <ConstructorElement>[superConstructor];
-    ClassElementImpl subclass =
-        ElementFactory.classElement("B", interfaceTypeStar(superclass));
-    _encloseElement(subclass);
-    ConstructorElementImpl subConstructor =
-        ElementFactory.constructorElement2(subclass, null);
-    subclass.constructors = <ConstructorElement>[subConstructor];
-    SuperConstructorInvocation invocation =
-        AstTestFactory.superConstructorInvocation([
-      AstTestFactory.namedExpression2(parameterName, AstTestFactory.integer(0))
-    ]);
-    AstTestFactory.classDeclaration(null, 'C', null, null, null, null, [
-      AstTestFactory.constructorDeclaration(AstTestFactory.identifier3('C'),
-          null, AstTestFactory.formalParameterList(), [invocation])
-    ]);
-    _definingCompilationUnit.classes = [superclass, subclass];
-    _resolveInClass(invocation, subclass);
-    expect(invocation.staticElement, superConstructor);
-    expect(
-        (invocation.argumentList.arguments[0] as NamedExpression)
-            .name
-            .label
-            .staticElement,
-        same(parameter));
-    _listener.assertNoErrors();
-  }
-
-  /// Create and return the resolver used by the tests.
-  void _createResolver() {
-    var context = TestAnalysisContext();
-    _typeProvider = context.typeProviderLegacy;
-
-    Source source = FileSource(getFile("/test.dart"));
-    _definingCompilationUnit = CompilationUnitElementImpl();
-    _definingCompilationUnit.librarySource =
-        _definingCompilationUnit.source = source;
-    _definingLibrary = ElementFactory.library(context, "test");
-    _definingLibrary.definingCompilationUnit = _definingCompilationUnit;
-
-    _definingLibrary.typeProvider = context.typeProviderLegacy;
-    _definingLibrary.typeSystem = context.typeSystemLegacy;
-    var inheritance = InheritanceManager3();
-
-    var featureSet = FeatureSet.latestLanguageVersion();
-    _visitor = ResolverVisitor(
-        inheritance, _definingLibrary, source, _typeProvider, _listener,
-        featureSet: featureSet,
-        flowAnalysisHelper:
-            FlowAnalysisHelper(context.typeSystemLegacy, false, featureSet));
-  }
-
-  void _encloseElement(ElementImpl element) {
-    if (element is ClassElement) {
-      element.enclosingElement = _definingLibrary;
-    }
-  }
-
-  /// Return the element associated with the label of [statement] after the
-  /// resolver has resolved it.  [labelElement] is the label element to be
-  /// defined in the statement's label scope, and [labelTarget] is the statement
-  /// the label resolves to.
-  Element? _resolveBreak(BreakStatement statement,
-      LabelElementImpl labelElement, Statement labelTarget) {
-    _resolveStatement(statement, labelElement, labelTarget);
-    return statement.label!.staticElement;
-  }
-
-  /// Return the element associated with the label [statement] after the
-  /// resolver has resolved it.  [labelElement] is the label element to be
-  /// defined in the statement's label scope, and [labelTarget] is the AST node
-  /// the label resolves to.
-  ///
-  /// @param statement the statement to be resolved
-  /// @param labelElement the label element to be defined in the statement's
-  ///          label scope
-  /// @return the element to which the statement's label was resolved
-  Element? _resolveContinue(ContinueStatement statement,
-      LabelElementImpl labelElement, AstNode labelTarget) {
-    _resolveStatement(statement, labelElement, labelTarget);
-    return statement.label!.staticElement;
-  }
-
-  /// Return the element associated with the given identifier after the resolver
-  /// has resolved the identifier.
-  ///
-  /// @param node the expression to be resolved
-  /// @param enclosingClass the element representing the class enclosing the
-  ///          identifier
-  /// @return the element to which the expression was resolved
-  void _resolveInClass(AstNode node, ClassElement enclosingClass) {
-    try {
-      _visitor.enclosingClass = enclosingClass;
-      node.accept(_visitor);
-    } finally {
-      _visitor.enclosingClass = null;
-    }
-  }
-
-  /// Return the element associated with the given identifier after the resolver
-  /// has resolved the identifier.
-  ///
-  /// @param node the expression to be resolved
-  /// @param definedElements the elements that are to be defined in the scope in
-  ///          which the element is being resolved
-  /// @return the element to which the expression was resolved
-  void _resolveNode(AstNode node) {
-    node.accept(_visitor);
-  }
-
-  /// Return the element associated with the label of the given statement after
-  /// the resolver has resolved the statement.
-  ///
-  /// @param statement the statement to be resolved
-  /// @param labelElement the label element to be defined in the statement's
-  ///          label scope
-  /// @return the element to which the statement's label was resolved
-  void _resolveStatement(Statement statement, LabelElementImpl? labelElement,
-      AstNode? labelTarget) {
-    statement.accept(_visitor);
-  }
+    await assertNoErrorsInCode('''
+class A {
+  A({dynamic p});
 }
-
-class _LibraryElementMock implements LibraryElement {
-  @override
-  noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
+class B extends A {
+  B() : super(p: 0);
+}
+''');
+    expect(findNode.superConstructorInvocation('super').staticElement,
+        findElement.unnamedConstructor('A'));
+    expect(
+        findNode.simple('p:').staticElement, same(findElement.parameter('p')));
+  }
 }
diff --git a/pkg/js/CHANGELOG.md b/pkg/js/CHANGELOG.md
index 571726d..8c22600 100644
--- a/pkg/js/CHANGELOG.md
+++ b/pkg/js/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.6.4
+
+* Includes `@staticInterop` to allow interop with native types from `dart:html`.
+
 ## 0.6.3
 
 * Stable release for null safety.
diff --git a/pkg/js/README.md b/pkg/js/README.md
index 7279082..4f75675 100644
--- a/pkg/js/README.md
+++ b/pkg/js/README.md
@@ -131,6 +131,48 @@
 }
 ```
 
+## Interop with native types using `@staticInterop`
+
+Previously, you could not use `@JS()` or `@anonymous` types to interface with
+native types that were reserved within `dart:html` e.g. `Window`.
+
+Using `@staticInterop` will now let you do so. However, it requires that there
+be no instance members within the class (constructors are still allowed). You
+can use static extension methods instead to declare these members. For example:
+
+```dart
+@JS()
+library static_interop;
+
+import 'dart:html' as html;
+
+import 'package:js/js.dart';
+
+@JS()
+@staticInterop
+class JSWindow {}
+
+extension JSWindowExtension on JSWindow {
+  external String get name;
+  String get nameAllCaps => name.toUpperCase();
+}
+
+void main() {
+  var jsWindow = html.window as JSWindow;
+  print(jsWindow.name.toUpperCase() == jsWindow.nameAllCaps);
+}
+```
+
+Note that in the above you can have both `external` and non-`external` members
+in the extension. You can have `external` variables, getters/setters, and
+methods within a static extension currently. These `external` members are
+lowered to their respective `js_util` calls under the hood. For example, the
+`external` `name` getter is equivalent to `js_util.getProperty(this, 'name')`.
+
+In general, it's advised to use `@staticInterop` wherever you can over using
+just `@JS()`. There will be fewer surprises and it's aligned with the statically
+typed future planned for JS interop.
+
 ## Reporting issues
 
 Please file bugs and feature requests on the [SDK issue tracker][issues].
diff --git a/runtime/BUILD.gn b/runtime/BUILD.gn
index 8b3999a..273a109 100644
--- a/runtime/BUILD.gn
+++ b/runtime/BUILD.gn
@@ -238,6 +238,7 @@
 source_set("dart_api") {
   public_configs = [ ":dart_public_config" ]
   sources = [
+    "include/analyze_snapshot_api.h",
     "include/dart_api.h",
     "include/dart_api_dl.c",
     "include/dart_api_dl.h",
@@ -285,9 +286,11 @@
   public_configs = [ ":dart_public_config" ]
   sources = [
     "$target_gen_dir/version.cc",
+    "include/analyze_snapshot_api.h",
     "include/dart_api.h",
     "include/dart_native_api.h",
     "include/dart_tools_api.h",
+    "vm/analyze_snapshot_api_impl.cc",
     "vm/dart_api_impl.cc",
     "vm/native_api_impl.cc",
     "vm/version.h",
diff --git a/runtime/bin/BUILD.gn b/runtime/bin/BUILD.gn
index ea7d40d..04e73a2 100644
--- a/runtime/bin/BUILD.gn
+++ b/runtime/bin/BUILD.gn
@@ -880,6 +880,45 @@
   extra_deps += [ ":elf_loader_product" ]
 }
 
+dart_executable("analyze_snapshot") {
+  extra_configs = [ "..:dart_precompiled_runtime_config" ]
+  extra_deps = [
+    "..:libdart_precompiled_runtime",
+    "../platform:libdart_platform_precompiled_runtime",
+  ]
+
+  extra_sources = [
+    "analyze_snapshot.cc",
+    "builtin.cc",
+    "loader.cc",
+    "loader.h",
+  ]
+
+  if (dart_runtime_mode == "release") {
+    extra_deps += [ ":elf_loader_product" ]
+  } else {
+    extra_deps += [ ":elf_loader" ]
+  }
+}
+
+dart_executable("analyze_snapshot_product") {
+  use_product_mode = true
+  extra_configs = [ "..:dart_precompiled_runtime_config" ]
+  extra_deps = [
+    "..:libdart_precompiled_runtime_product",
+    "../platform:libdart_platform_precompiled_runtime_product",
+  ]
+
+  extra_sources = [
+    "analyze_snapshot.cc",
+    "builtin.cc",
+    "loader.cc",
+    "loader.h",
+  ]
+
+  extra_deps += [ ":elf_loader_product" ]
+}
+
 executable("process_test") {
   sources = [ "process_test.cc" ]
 }
diff --git a/runtime/bin/analyze_snapshot.cc b/runtime/bin/analyze_snapshot.cc
new file mode 100644
index 0000000..9bf6202
--- /dev/null
+++ b/runtime/bin/analyze_snapshot.cc
@@ -0,0 +1,249 @@
+// Copyright (c) 2021, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "bin/elf_loader.h"
+#include "bin/error_exit.h"
+#include "bin/file.h"
+
+#include "bin/options.h"
+#include "bin/platform.h"
+
+#include "include/analyze_snapshot_api.h"
+
+namespace dart {
+namespace bin {
+
+#define STRING_OPTIONS_LIST(V) V(out, out_path)
+
+#define BOOL_OPTIONS_LIST(V)                                                   \
+  V(help, help)                                                                \
+  V(version, version)
+
+#define STRING_OPTION_DEFINITION(flag, variable)                               \
+  static const char* variable = nullptr;                                       \
+  DEFINE_STRING_OPTION(flag, variable)
+STRING_OPTIONS_LIST(STRING_OPTION_DEFINITION)
+#undef STRING_OPTION_DEFINITION
+
+#define BOOL_OPTION_DEFINITION(flag, variable)                                 \
+  static bool variable = false;                                                \
+  DEFINE_BOOL_OPTION(flag, variable)
+BOOL_OPTIONS_LIST(BOOL_OPTION_DEFINITION)
+#undef BOOL_OPTION_DEFINITION
+
+// clang-format off
+static void PrintUsage() {
+  Syslog::PrintErr(
+"Usage: analyze_snapshot [<vm-flags>] [<options>] <snapshot_data>            \n"
+"                                                                            \n"
+"Common options:                                                             \n"
+"--help                                                                      \n"
+"  Display this message.                                                     \n"
+"--version                                                                   \n"
+"  Print the SDK version.                                                    \n"
+"--out                                                                       \n"
+"  Path to generate the analysis results JSON.                               \n"
+"                                                                            \n"
+"If omitting [<vm-flags>] the VM parsing the snapshot is created with the    \n"
+"following default flags:                                                    \n"
+"--enable_mirrors=false                                                      \n"
+"--background_compilation                                                    \n"
+"--lazy_async_stacks                                                         \n"
+"--precompilation                                                            \n"
+"                                                                            \n"
+"\n");
+}
+// clang-format on
+
+const uint8_t* vm_snapshot_data = nullptr;
+const uint8_t* vm_snapshot_instructions = nullptr;
+const uint8_t* vm_isolate_data = nullptr;
+const uint8_t* vm_isolate_instructions = nullptr;
+
+// Parse out the command line arguments. Returns -1 if the arguments
+// are incorrect, 0 otherwise.
+static int ParseArguments(int argc,
+                          char** argv,
+                          CommandLineOptions* vm_options,
+                          CommandLineOptions* inputs) {
+  // Skip the binary name.
+  int i = 1;
+
+  // Parse out the vm options.
+  while ((i < argc) && OptionProcessor::IsValidShortFlag(argv[i])) {
+    if (OptionProcessor::TryProcess(argv[i], vm_options)) {
+      i += 1;
+      continue;
+    }
+    vm_options->AddArgument(argv[i]);
+    i += 1;
+  }
+
+  // Parse out the kernel inputs.
+  while (i < argc) {
+    inputs->AddArgument(argv[i]);
+    i++;
+  }
+
+  if (help) {
+    PrintUsage();
+    Platform::Exit(0);
+  } else if (version) {
+    Syslog::PrintErr("Dart SDK version: %s\n", Dart_VersionString());
+    Platform::Exit(0);
+  }
+
+  // Verify consistency of arguments.
+  if (inputs->count() < 1) {
+    Syslog::PrintErr("At least one input is required\n");
+    return -1;
+  }
+
+  if (out_path == nullptr) {
+    Syslog::PrintErr(
+        "Please specify an output path for analysis with the --out flag.\n\n");
+    return -1;
+  }
+
+  return 0;
+}
+
+PRINTF_ATTRIBUTE(1, 2) static void PrintErrAndExit(const char* format, ...) {
+  va_list args;
+  va_start(args, format);
+  Syslog::VPrintErr(format, args);
+  va_end(args);
+
+  Dart_ExitScope();
+  Dart_ShutdownIsolate();
+  exit(kErrorExitCode);
+}
+
+static File* OpenFile(const char* filename) {
+  File* file = File::Open(nullptr, filename, File::kWriteTruncate);
+  if (file == nullptr) {
+    PrintErrAndExit("Error: Unable to write file: %s\n\n", filename);
+  }
+  return file;
+}
+
+static void WriteFile(const char* filename,
+                      const char* buffer,
+                      const intptr_t size) {
+  File* file = OpenFile(filename);
+  RefCntReleaseScope<File> rs(file);
+  if (!file->WriteFully(buffer, size)) {
+    PrintErrAndExit("Error: Unable to write file: %s\n\n", filename);
+  }
+}
+
+int RunAnalyzer(int argc, char** argv) {
+  // Constant mirrors gen_snapshot binary, subject to change.
+  const int EXTRA_VM_ARGUMENTS = 7;
+  CommandLineOptions vm_options(argc + EXTRA_VM_ARGUMENTS);
+  CommandLineOptions inputs(argc);
+  // Parse command line arguments.
+  if (ParseArguments(argc, argv, &vm_options, &inputs) < 0) {
+    PrintUsage();
+    return kErrorExitCode;
+  }
+
+  // Initialize VM with default flags if none are provided.
+  // TODO(#47924): Implement auto-parsing of flags from the snapshot file.
+  if (vm_options.count() == 0) {
+    vm_options.AddArgument("--enable_mirrors=false");
+    vm_options.AddArgument("--background_compilation");
+    vm_options.AddArgument("--lazy_async_stacks");
+    vm_options.AddArgument("--precompilation");
+  }
+
+  char* error = Dart_SetVMFlags(vm_options.count(), vm_options.arguments());
+  if (error != nullptr) {
+    Syslog::PrintErr("Setting VM flags failed: %s\n", error);
+    free(error);
+    return kErrorExitCode;
+  }
+
+  // Dart_LoadELF will crash on nonexistant file non-gracefully
+  // even though it should return `nullptr`.
+  File* const file =
+      File::Open(/*namespc=*/nullptr, inputs.GetArgument(0), File::kRead);
+  if (file == nullptr) {
+    Syslog::PrintErr("Snapshot file does not exist\n");
+    return kErrorExitCode;
+  }
+  file->Release();
+
+  const char* loader_error = nullptr;
+  Dart_LoadedElf* loaded_elf = Dart_LoadELF(
+      inputs.GetArgument(0), 0, &loader_error, &vm_snapshot_data,
+      &vm_snapshot_instructions, &vm_isolate_data, &vm_isolate_instructions);
+
+  if (loaded_elf == nullptr) {
+    Syslog::PrintErr("Failure calling Dart_LoadELF:\n%s\n", loader_error);
+    return kErrorExitCode;
+  }
+
+  // Begin initialization
+  Dart_InitializeParams init_params = {};
+  memset(&init_params, 0, sizeof(init_params));
+  init_params.version = DART_INITIALIZE_PARAMS_CURRENT_VERSION;
+  init_params.vm_snapshot_data = vm_snapshot_data;
+  init_params.vm_snapshot_instructions = vm_snapshot_instructions;
+
+  init_params.file_open = DartUtils::OpenFile;
+  init_params.file_read = DartUtils::ReadFile;
+  init_params.file_write = DartUtils::WriteFile;
+  init_params.file_close = DartUtils::CloseFile;
+  init_params.entropy_source = DartUtils::EntropySource;
+
+  error = Dart_Initialize(&init_params);
+  if (error != nullptr) {
+    Syslog::PrintErr("VM initialization failed: %s\n", error);
+    free(error);
+    return kErrorExitCode;
+  }
+
+  auto isolate_group_data = std::unique_ptr<IsolateGroupData>(
+      new IsolateGroupData(nullptr, nullptr, nullptr, false));
+
+  Dart_IsolateFlags isolate_flags;
+  Dart_IsolateFlagsInitialize(&isolate_flags);
+  // Null safety can be determined from the snapshot itself
+  isolate_flags.null_safety =
+      Dart_DetectNullSafety(nullptr, nullptr, nullptr, vm_snapshot_data,
+                            vm_snapshot_instructions, nullptr, -1);
+
+  Dart_CreateIsolateGroup(nullptr, nullptr, vm_isolate_data,
+                          vm_isolate_instructions, &isolate_flags,
+                          isolate_group_data.get(),
+                          /*isolate_data=*/nullptr, &error);
+
+  if (error != nullptr) {
+    Syslog::PrintErr("Dart_CreateIsolateGroup Error: %s\n", error);
+    free(error);
+    return kErrorExitCode;
+  }
+
+  dart::snapshot_analyzer::Dart_SnapshotAnalyzerInformation info = {
+      vm_snapshot_data, vm_snapshot_instructions, vm_isolate_data,
+      vm_isolate_instructions};
+
+  char* out = NULL;
+  intptr_t out_len = 0;
+
+  Dart_EnterScope();
+  Dart_DumpSnapshotInformationAsJson(&out, &out_len, &info);
+  WriteFile(out_path, out, out_len);
+  // Since ownership of the JSON buffer is ours, free before we exit.
+  free(out);
+  Dart_ExitScope();
+  Dart_ShutdownIsolate();
+  return 0;
+}
+}  // namespace bin
+}  // namespace dart
+int main(int argc, char** argv) {
+  return dart::bin::RunAnalyzer(argc, argv);
+}
diff --git a/runtime/include/analyze_snapshot_api.h b/runtime/include/analyze_snapshot_api.h
new file mode 100644
index 0000000..e02f461
--- /dev/null
+++ b/runtime/include/analyze_snapshot_api.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2021, the Dart project authors.  Please see the AUTHORS file
+ * for details. All rights reserved. Use of this source code is governed by a
+ * BSD-style license that can be found in the LICENSE file.
+ */
+
+#ifndef RUNTIME_INCLUDE_ANALYZE_SNAPSHOT_API_H_
+#define RUNTIME_INCLUDE_ANALYZE_SNAPSHOT_API_H_
+
+#include <stdint.h>
+
+namespace dart {
+namespace snapshot_analyzer {
+typedef struct {
+  const uint8_t* vm_snapshot_data;
+  const uint8_t* vm_snapshot_instructions;
+  const uint8_t* vm_isolate_data;
+  const uint8_t* vm_isolate_instructions;
+} Dart_SnapshotAnalyzerInformation;
+
+void Dart_DumpSnapshotInformationAsJson(char** buffer,
+                                        intptr_t* buffer_length,
+                                        Dart_SnapshotAnalyzerInformation* info);
+}  // namespace snapshot_analyzer
+}  // namespace dart
+
+#endif  // RUNTIME_INCLUDE_ANALYZE_SNAPSHOT_API_H_
diff --git a/runtime/tests/vm/dart/analyze_snapshot_binary_test.dart b/runtime/tests/vm/dart/analyze_snapshot_binary_test.dart
new file mode 100644
index 0000000..1cec0b7
--- /dev/null
+++ b/runtime/tests/vm/dart/analyze_snapshot_binary_test.dart
@@ -0,0 +1,300 @@
+// Copyright (c) 2018, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:convert';
+import 'dart:io';
+import 'dart:async';
+
+import 'package:expect/expect.dart';
+import 'package:native_stack_traces/elf.dart';
+import 'package:path/path.dart' as path;
+
+import 'use_flag_test_helper.dart';
+
+// Used to ensure we don't have multiple equivalent calls to test.
+final _seenDescriptions = <String>{};
+
+Future<void> testAOT(String dillPath,
+    {bool useAsm = false,
+    bool forceDrops = false,
+    bool useDispatch = true,
+    bool stripUtil = false, // Note: forced true if useAsm.
+    bool stripFlag = false,
+    bool disassemble = false}) async {
+  if (const bool.fromEnvironment('dart.vm.product') && disassemble) {
+    Expect.isFalse(disassemble, 'no use of disassembler in PRODUCT mode');
+  }
+
+  final analyzeSnapshot = path.join(
+      buildDir,
+      bool.fromEnvironment('dart.vm.product')
+          ? 'analyze_snapshot_product'
+          : 'analyze_snapshot');
+
+  // For assembly, we can't test the sizes of the snapshot sections, since we
+  // don't have a Mach-O reader for Mac snapshots and for ELF, the assembler
+  // merges the text/data sections and the VM/isolate section symbols may not
+  // have length information. Thus, we force external stripping so we can test
+  // the approximate size of the stripped snapshot.
+  if (useAsm) {
+    stripUtil = true;
+  }
+
+  final descriptionBuilder = StringBuffer()..write(useAsm ? 'assembly' : 'elf');
+  if (forceDrops) {
+    descriptionBuilder.write('-dropped');
+  }
+  if (!useDispatch) {
+    descriptionBuilder.write('-nodispatch');
+  }
+  if (stripFlag) {
+    descriptionBuilder.write('-intstrip');
+  }
+  if (stripUtil) {
+    descriptionBuilder.write('-extstrip');
+  }
+  if (disassemble) {
+    descriptionBuilder.write('-disassembled');
+  }
+
+  final description = descriptionBuilder.toString();
+  Expect.isTrue(_seenDescriptions.add(description),
+      "test configuration $description would be run multiple times");
+
+  await withTempDir('analyze_snapshot_binary-$description',
+      (String tempDir) async {
+    // Generate the snapshot
+    final snapshotPath = path.join(tempDir, 'test.snap');
+    final commonSnapshotArgs = [
+      if (stripFlag) '--strip', //  gen_snapshot specific and not a VM flag.
+      if (forceDrops) ...[
+        '--dwarf-stack-traces',
+        '--no-retain-function-objects',
+        '--no-retain-code-objects'
+      ],
+      useDispatch ? '--use-table-dispatch' : '--no-use-table-dispatch',
+      if (disassemble) '--disassemble', // Not defined in PRODUCT mode.
+      dillPath,
+    ];
+
+    if (useAsm) {
+      final assemblyPath = path.join(tempDir, 'test.S');
+
+      await run(genSnapshot, <String>[
+        '--snapshot-kind=app-aot-assembly',
+        '--assembly=$assemblyPath',
+        ...commonSnapshotArgs,
+      ]);
+
+      await assembleSnapshot(assemblyPath, snapshotPath);
+    } else {
+      await run(genSnapshot, <String>[
+        '--snapshot-kind=app-aot-elf',
+        '--elf=$snapshotPath',
+        ...commonSnapshotArgs,
+      ]);
+    }
+
+    print("Snapshot generated at $snapshotPath.");
+
+    // May not be ELF, but another format.
+    final elf = Elf.fromFile(snapshotPath);
+    if (!useAsm) {
+      Expect.isNotNull(elf);
+    }
+
+    if (elf != null) {
+      // Verify some ELF file format parameters.
+      final textSections = elf.namedSections(".text");
+      Expect.isNotEmpty(textSections);
+      Expect.isTrue(
+          textSections.length <= 2, "More text sections than expected");
+      final dataSections = elf.namedSections(".rodata");
+      Expect.isNotEmpty(dataSections);
+      Expect.isTrue(
+          dataSections.length <= 2, "More data sections than expected");
+    }
+
+    final analyzerOutputPath = path.join(tempDir, 'analyze_test.json');
+
+    // This will throw if exit code is not 0.
+    await run(analyzeSnapshot, <String>[
+      '--out=$analyzerOutputPath',
+      '$snapshotPath',
+    ]);
+
+    final analyzerJsonBytes = await readFile(analyzerOutputPath);
+    final analyzerJson = json.decode(analyzerJsonBytes);
+    Expect.isFalse(analyzerJson.isEmpty);
+    Expect.isTrue(analyzerJson.keys
+        .toSet()
+        .containsAll(['snapshot_data', 'class_table', 'object_pool']));
+  });
+}
+
+Match? matchComplete(RegExp regexp, String line) {
+  Match? match = regexp.firstMatch(line);
+  if (match == null) return match;
+  if (match.start != 0 || match.end != line.length) return null;
+  return match;
+}
+
+// All fields of "Raw..." classes defined in "raw_object.h" must be included in
+// the giant macro in "raw_object_fields.cc". This function attempts to check
+// that with some basic regexes.
+testMacros() async {
+  const String className = "([a-z0-9A-Z]+)";
+  const String rawClass = "Raw$className";
+  const String fieldName = "([a-z0-9A-Z_]+)";
+
+  final Map<String, Set<String>> fields = {};
+
+  final String rawObjectFieldsPath =
+      path.join(sdkDir, 'runtime', 'vm', 'raw_object_fields.cc');
+  final RegExp fieldEntry = RegExp(" *F\\($className, $fieldName\\) *\\\\?");
+
+  await for (String line in File(rawObjectFieldsPath)
+      .openRead()
+      .cast<List<int>>()
+      .transform(utf8.decoder)
+      .transform(LineSplitter())) {
+    Match? match = matchComplete(fieldEntry, line);
+    if (match != null) {
+      fields
+          .putIfAbsent(match.group(1)!, () => Set<String>())
+          .add(match.group(2)!);
+    }
+  }
+
+  final RegExp classStart = RegExp("class $rawClass : public $rawClass {");
+  final RegExp classEnd = RegExp("}");
+  final RegExp field = RegExp("  $rawClass. +$fieldName;.*");
+
+  final String rawObjectPath =
+      path.join(sdkDir, 'runtime', 'vm', 'raw_object.h');
+
+  String? currentClass;
+  bool hasMissingFields = false;
+  await for (String line in File(rawObjectPath)
+      .openRead()
+      .cast<List<int>>()
+      .transform(utf8.decoder)
+      .transform(LineSplitter())) {
+    Match? match = matchComplete(classStart, line);
+    if (match != null) {
+      currentClass = match.group(1);
+      continue;
+    }
+    match = matchComplete(classEnd, line);
+    if (match != null) {
+      currentClass = null;
+      continue;
+    }
+    match = matchComplete(field, line);
+    if (match != null && currentClass != null) {
+      if (fields[currentClass] == null) {
+        hasMissingFields = true;
+        print("$currentClass is missing entirely.");
+        continue;
+      }
+      if (!fields[currentClass]!.contains(match.group(2)!)) {
+        hasMissingFields = true;
+        print("$currentClass is missing ${match.group(2)!}.");
+      }
+    }
+  }
+
+  if (hasMissingFields) {
+    Expect.fail("$rawObjectFieldsPath is missing some fields. "
+        "Please update it to match $rawObjectPath.");
+  }
+}
+
+main() async {
+  void printSkip(String description) =>
+      print('Skipping $description for ${path.basename(buildDir)} '
+              'on ${Platform.operatingSystem}' +
+          (clangBuildToolsDir == null ? ' without //buildtools' : ''));
+
+  // We don't have access to the SDK on Android.
+  if (Platform.isAndroid) {
+    printSkip('all tests');
+    return;
+  }
+
+  await testMacros();
+
+  await withTempDir('analyze_snapshot_binary', (String tempDir) async {
+    // We only need to generate the dill file once for all JIT tests.
+    final _thisTestPath = path.join(sdkDir, 'runtime', 'tests', 'vm', 'dart',
+        'analyze_snapshot_binary_test.dart');
+
+    // We only need to generate the dill file once for all AOT tests.
+    final aotDillPath = path.join(tempDir, 'aot_test.dill');
+    await run(genKernel, <String>[
+      '--aot',
+      '--platform',
+      platformDill,
+      ...Platform.executableArguments.where((arg) =>
+          arg.startsWith('--enable-experiment=') ||
+          arg == '--sound-null-safety' ||
+          arg == '--no-sound-null-safety'),
+      '-o',
+      aotDillPath,
+      _thisTestPath
+    ]);
+
+    // Just as a reminder for AOT tests:
+    // * If useAsm is true, then stripUtil is forced (as the assembler may add
+    //   extra information that needs stripping), so no need to specify
+    //   stripUtil for useAsm tests.
+
+    // Test unstripped ELF generation directly.
+    await testAOT(aotDillPath);
+    await testAOT(aotDillPath, forceDrops: true);
+    await testAOT(aotDillPath, forceDrops: true, useDispatch: false);
+
+    // Test flag-stripped ELF generation.
+    await testAOT(aotDillPath, stripFlag: true);
+
+    // Since we can't force disassembler support after the fact when running
+    // in PRODUCT mode, skip any --disassemble tests. Do these tests last as
+    // they have lots of output and so the log will be truncated.
+    if (!const bool.fromEnvironment('dart.vm.product')) {
+      // Regression test for dartbug.com/41149.
+      await testAOT(aotDillPath, disassemble: true);
+    }
+
+    // We neither generate assembly nor have a stripping utility on Windows.
+    if (Platform.isWindows) {
+      printSkip('external stripping and assembly tests');
+      return;
+    }
+
+    // The native strip utility on Mac OS X doesn't recognize ELF files.
+    if (Platform.isMacOS && clangBuildToolsDir == null) {
+      printSkip('ELF external stripping test');
+    } else {
+      // Test unstripped ELF generation that is then externally stripped.
+      await testAOT(aotDillPath, stripUtil: true);
+    }
+
+    // TODO(sstrickl): Currently we can't assemble for SIMARM64 on MacOSX.
+    // For example, the test runner still uses blobs for
+    // dartkp-mac-*-simarm64. Change assembleSnapshot and remove this check
+    // when we can.
+    if (Platform.isMacOS && buildDir.endsWith('SIMARM64')) {
+      printSkip('assembly tests');
+      return;
+    }
+    // Test unstripped assembly generation that is then externally stripped.
+    await testAOT(aotDillPath, useAsm: true);
+    // Test stripped assembly generation that is then externally stripped.
+    await testAOT(aotDillPath, useAsm: true, stripFlag: true);
+  });
+}
+
+Future<String> readFile(String file) {
+  return new File(file).readAsString();
+}
diff --git a/runtime/tests/vm/dart_2/analyze_snapshot_binary_test.dart b/runtime/tests/vm/dart_2/analyze_snapshot_binary_test.dart
new file mode 100644
index 0000000..48f91fb
--- /dev/null
+++ b/runtime/tests/vm/dart_2/analyze_snapshot_binary_test.dart
@@ -0,0 +1,300 @@
+// Copyright (c) 2018, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// @dart = 2.9
+
+import 'dart:convert';
+import 'dart:io';
+import 'dart:async';
+
+import 'package:expect/expect.dart';
+import 'package:native_stack_traces/elf.dart';
+import 'package:path/path.dart' as path;
+
+import 'use_flag_test_helper.dart';
+
+// Used to ensure we don't have multiple equivalent calls to test.
+final _seenDescriptions = <String>{};
+
+Future<void> testAOT(String dillPath,
+    {bool useAsm = false,
+    bool forceDrops = false,
+    bool useDispatch = true,
+    bool stripUtil = false, // Note: forced true if useAsm.
+    bool stripFlag = false,
+    bool disassemble = false}) async {
+  if (const bool.fromEnvironment('dart.vm.product') && disassemble) {
+    Expect.isFalse(disassemble, 'no use of disassembler in PRODUCT mode');
+  }
+
+  final analyzeSnapshot = path.join(
+      buildDir,
+      bool.fromEnvironment('dart.vm.product')
+          ? 'analyze_snapshot_product'
+          : 'analyze_snapshot');
+
+  // For assembly, we can't test the sizes of the snapshot sections, since we
+  // don't have a Mach-O reader for Mac snapshots and for ELF, the assembler
+  // merges the text/data sections and the VM/isolate section symbols may not
+  // have length information. Thus, we force external stripping so we can test
+  // the approximate size of the stripped snapshot.
+  if (useAsm) {
+    stripUtil = true;
+  }
+
+  final descriptionBuilder = StringBuffer()..write(useAsm ? 'assembly' : 'elf');
+  if (forceDrops) {
+    descriptionBuilder.write('-dropped');
+  }
+  if (!useDispatch) {
+    descriptionBuilder.write('-nodispatch');
+  }
+  if (stripFlag) {
+    descriptionBuilder.write('-intstrip');
+  }
+  if (stripUtil) {
+    descriptionBuilder.write('-extstrip');
+  }
+  if (disassemble) {
+    descriptionBuilder.write('-disassembled');
+  }
+
+  final description = descriptionBuilder.toString();
+  Expect.isTrue(_seenDescriptions.add(description),
+      "test configuration $description would be run multiple times");
+
+  await withTempDir('analyze_snapshot_binary-$description',
+      (String tempDir) async {
+    // Generate the snapshot
+    final snapshotPath = path.join(tempDir, 'test.snap');
+    final commonSnapshotArgs = [
+      if (stripFlag) '--strip', //  gen_snapshot specific and not a VM flag.
+      if (forceDrops) ...[
+        '--dwarf-stack-traces',
+        '--no-retain-function-objects',
+        '--no-retain-code-objects'
+      ],
+      useDispatch ? '--use-table-dispatch' : '--no-use-table-dispatch',
+      if (disassemble) '--disassemble', // Not defined in PRODUCT mode.
+      dillPath,
+    ];
+
+    if (useAsm) {
+      final assemblyPath = path.join(tempDir, 'test.S');
+
+      await run(genSnapshot, <String>[
+        '--snapshot-kind=app-aot-assembly',
+        '--assembly=$assemblyPath',
+        ...commonSnapshotArgs,
+      ]);
+
+      await assembleSnapshot(assemblyPath, snapshotPath);
+    } else {
+      await run(genSnapshot, <String>[
+        '--snapshot-kind=app-aot-elf',
+        '--elf=$snapshotPath',
+        ...commonSnapshotArgs,
+      ]);
+    }
+
+    print("Snapshot generated at $snapshotPath.");
+
+    // May not be ELF, but another format.
+    final elf = Elf.fromFile(snapshotPath);
+    if (!useAsm) {
+      Expect.isNotNull(elf);
+    }
+
+    if (elf != null) {
+      // Verify some ELF file format parameters.
+      final textSections = elf.namedSections(".text");
+      Expect.isNotEmpty(textSections);
+      Expect.isTrue(
+          textSections.length <= 2, "More text sections than expected");
+      final dataSections = elf.namedSections(".rodata");
+      Expect.isNotEmpty(dataSections);
+      Expect.isTrue(
+          dataSections.length <= 2, "More data sections than expected");
+    }
+
+    final analyzerOutputPath = path.join(tempDir, 'analyze_test.json');
+
+    // This will throw if exit code is not 0.
+    await run(analyzeSnapshot, <String>[
+      '--out=$analyzerOutputPath',
+      '$snapshotPath',
+    ]);
+
+    final analyzerJsonBytes = await readFile(analyzerOutputPath);
+    final analyzerJson = json.decode(analyzerJsonBytes);
+    Expect.isFalse(analyzerJson.isEmpty);
+    Expect.isTrue(analyzerJson.keys
+        .toSet()
+        .containsAll(['snapshot_data', 'class_table', 'object_pool']));
+  });
+}
+
+Match matchComplete(RegExp regexp, String line) {
+  Match match = regexp.firstMatch(line);
+  if (match == null) return match;
+  if (match.start != 0 || match.end != line.length) return null;
+  return match;
+}
+
+// All fields of "Raw..." classes defined in "raw_object.h" must be included in
+// the giant macro in "raw_object_fields.cc". This function attempts to check
+// that with some basic regexes.
+testMacros() async {
+  const String className = "([a-z0-9A-Z]+)";
+  const String rawClass = "Raw$className";
+  const String fieldName = "([a-z0-9A-Z_]+)";
+
+  final Map<String, Set<String>> fields = {};
+
+  final String rawObjectFieldsPath =
+      path.join(sdkDir, 'runtime', 'vm', 'raw_object_fields.cc');
+  final RegExp fieldEntry = RegExp(" *F\\($className, $fieldName\\) *\\\\?");
+
+  await for (String line in File(rawObjectFieldsPath)
+      .openRead()
+      .cast<List<int>>()
+      .transform(utf8.decoder)
+      .transform(LineSplitter())) {
+    Match match = matchComplete(fieldEntry, line);
+    if (match != null) {
+      fields
+          .putIfAbsent(match.group(1), () => Set<String>())
+          .add(match.group(2));
+    }
+  }
+
+  final RegExp classStart = RegExp("class $rawClass : public $rawClass {");
+  final RegExp classEnd = RegExp("}");
+  final RegExp field = RegExp("  $rawClass. +$fieldName;.*");
+
+  final String rawObjectPath =
+      path.join(sdkDir, 'runtime', 'vm', 'raw_object.h');
+
+  String currentClass;
+  bool hasMissingFields = false;
+  await for (String line in File(rawObjectPath)
+      .openRead()
+      .cast<List<int>>()
+      .transform(utf8.decoder)
+      .transform(LineSplitter())) {
+    Match match = matchComplete(classStart, line);
+    if (match != null) {
+      currentClass = match.group(1);
+      continue;
+    }
+
+    match = matchComplete(classEnd, line);
+    if (match != null) {
+      currentClass = null;
+      continue;
+    }
+
+    match = matchComplete(field, line);
+    if (match != null && currentClass != null) {
+      if (fields[currentClass] == null) {
+        hasMissingFields = true;
+        print("$currentClass is missing entirely.");
+        continue;
+      }
+      if (!fields[currentClass].contains(match.group(2))) {
+        hasMissingFields = true;
+        print("$currentClass is missing ${match.group(2)}.");
+      }
+    }
+  }
+
+  if (hasMissingFields) {
+    Expect.fail("$rawObjectFieldsPath is missing some fields. "
+        "Please update it to match $rawObjectPath.");
+  }
+}
+
+main() async {
+  void printSkip(String description) =>
+      print('Skipping $description for ${path.basename(buildDir)} '
+              'on ${Platform.operatingSystem}' +
+          (clangBuildToolsDir == null ? ' without //buildtools' : ''));
+
+  // We don't have access to the SDK on Android.
+  if (Platform.isAndroid) {
+    printSkip('all tests');
+    return;
+  }
+
+  await testMacros();
+
+  await withTempDir('analyze_snapshot_binary', (String tempDir) async {
+    // We only need to generate the dill file once for all JIT tests.
+    final _thisTestPath = path.join(sdkDir, 'runtime', 'tests', 'vm', 'dart_2',
+        'analyze_snapshot_binary_test.dart');
+
+    // We only need to generate the dill file once for all AOT tests.
+    final aotDillPath = path.join(tempDir, 'aot_test.dill');
+    await run(genKernel, <String>[
+      '--aot',
+      '--platform',
+      platformDill,
+      '-o',
+      aotDillPath,
+      _thisTestPath
+    ]);
+
+    // Just as a reminder for AOT tests:
+    // * If useAsm is true, then stripUtil is forced (as the assembler may add
+    //   extra information that needs stripping), so no need to specify
+    //   stripUtil for useAsm tests.
+
+    // Test unstripped ELF generation directly.
+    await testAOT(aotDillPath);
+    await testAOT(aotDillPath, forceDrops: true);
+    await testAOT(aotDillPath, forceDrops: true, useDispatch: false);
+
+    // Test flag-stripped ELF generation.
+    await testAOT(aotDillPath, stripFlag: true);
+
+    // Since we can't force disassembler support after the fact when running
+    // in PRODUCT mode, skip any --disassemble tests. Do these tests last as
+    // they have lots of output and so the log will be truncated.
+    if (!const bool.fromEnvironment('dart.vm.product')) {
+      // Regression test for dartbug.com/41149.
+      await testAOT(aotDillPath, disassemble: true);
+    }
+
+    // We neither generate assembly nor have a stripping utility on Windows.
+    if (Platform.isWindows) {
+      printSkip('external stripping and assembly tests');
+      return;
+    }
+
+    // The native strip utility on Mac OS X doesn't recognize ELF files.
+    if (Platform.isMacOS && clangBuildToolsDir == null) {
+      printSkip('ELF external stripping test');
+    } else {
+      // Test unstripped ELF generation that is then externally stripped.
+      await testAOT(aotDillPath, stripUtil: true);
+    }
+
+    // TODO(sstrickl): Currently we can't assemble for SIMARM64 on MacOSX.
+    // For example, the test runner still uses blobs for
+    // dartkp-mac-*-simarm64. Change assembleSnapshot and remove this check
+    // when we can.
+    if (Platform.isMacOS && buildDir.endsWith('SIMARM64')) {
+      printSkip('assembly tests');
+      return;
+    }
+    // Test unstripped assembly generation that is then externally stripped.
+    await testAOT(aotDillPath, useAsm: true);
+    // Test stripped assembly generation that is then externally stripped.
+    await testAOT(aotDillPath, useAsm: true, stripFlag: true);
+  });
+}
+
+Future<String> readFile(String file) {
+  return new File(file).readAsString();
+}
diff --git a/runtime/tests/vm/dart_2/causal_stacks/utils.dart b/runtime/tests/vm/dart_2/causal_stacks/utils.dart
index a17cade..0d1489c 100644
--- a/runtime/tests/vm/dart_2/causal_stacks/utils.dart
+++ b/runtime/tests/vm/dart_2/causal_stacks/utils.dart
@@ -131,7 +131,7 @@
 
 Future listenAsyncStarThrowAsync() async {
   // Listening to an async* doesn't create the usual await-for StreamIterator.
-  StreamSubscription ss = asyncStarThrowAsync().listen((Future f) {});
+  StreamSubscription ss = asyncStarThrowAsync().listen((Future f) => 42);
   await ss.asFuture();
 }
 
diff --git a/runtime/tests/vm/vm.status b/runtime/tests/vm/vm.status
index f4bc06c..5e342f3 100644
--- a/runtime/tests/vm/vm.status
+++ b/runtime/tests/vm/vm.status
@@ -37,7 +37,9 @@
 dart_2/stack_overflow_shared_test: Pass, Slow # Uses --shared-slow-path-triggers-gc flag.
 
 [ $arch == ia32 ]
+dart/analyze_snapshot_binary_test: SkipByDesign # No IA32 support.
 dart/disassemble_aot_test: SkipByDesign # IA32 does not support AOT.
+dart_2/analyze_snapshot_binary_test: SkipByDesign # No IA32 support.
 dart_2/disassemble_aot_test: SkipByDesign # IA32 does not support AOT.
 
 [ $builder_tag == asan ]
diff --git a/runtime/vm/analyze_snapshot_api_impl.cc b/runtime/vm/analyze_snapshot_api_impl.cc
new file mode 100644
index 0000000..ef73e76
--- /dev/null
+++ b/runtime/vm/analyze_snapshot_api_impl.cc
@@ -0,0 +1,202 @@
+// Copyright (c) 2021, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "include/analyze_snapshot_api.h"
+#include "vm/dart_api_impl.h"
+#include "vm/json_writer.h"
+#include "vm/object.h"
+#include "vm/object_store.h"
+#include "vm/thread.h"
+
+namespace dart {
+namespace snapshot_analyzer {
+void DumpClassTable(Thread* thread, dart::JSONWriter* js) {
+  auto class_table = thread->isolate_group()->class_table();
+
+  Class& cls = Class::Handle();
+  String& name = String::Handle();
+  js->OpenArray("class_table");
+
+  for (intptr_t i = 1; i < class_table->NumCids(); i++) {
+    if (!class_table->HasValidClassAt(i)) {
+      continue;
+    }
+    cls = class_table->At(i);
+    if (!cls.IsNull()) {
+      name = cls.Name();
+      js->OpenObject();
+      js->PrintProperty("id", i);
+      js->PrintProperty("name", name.ToCString());
+
+      // Note: Some meta info is stripped from the snapshot, it's important
+      // to check every field for NULL to avoid segfaults.
+      const Library& library = Library::Handle(cls.library());
+      if (!library.IsNull()) {
+        String& lib_name = String::Handle();
+        lib_name = String::NewFormatted(
+            Heap::kOld, "%s%s", String::Handle(library.url()).ToCString(),
+            String::Handle(library.private_key()).ToCString());
+        js->PrintProperty("library", lib_name.ToCString());
+      }
+
+      const AbstractType& super_type = AbstractType::Handle(cls.super_type());
+      if (super_type.IsNull()) {
+      } else {
+        const String& super_name = String::Handle(super_type.Name());
+        js->PrintProperty("super_class", super_name.ToCString());
+      }
+
+      const Array& interfaces_array = Array::Handle(cls.interfaces());
+      if (!interfaces_array.IsNull()) {
+        if (interfaces_array.Length() > 0) {
+          js->OpenArray("interfaces");
+          AbstractType& interface = AbstractType::Handle();
+          intptr_t len = interfaces_array.Length();
+          for (intptr_t i = 0; i < len; i++) {
+            interface ^= interfaces_array.At(i);
+            js->PrintValue(interface.ToCString());
+          }
+          js->CloseArray();
+        }
+      }
+      const Array& functions_array = Array::Handle(cls.functions());
+      if (!functions_array.IsNull()) {
+        if (functions_array.Length() > 0) {
+          js->OpenArray("functions");
+          Function& function = Function::Handle();
+          intptr_t len = functions_array.Length();
+          for (intptr_t i = 0; i < len; i++) {
+            function ^= functions_array.At(i);
+            if (function.IsNull() || !function.HasCode()) {
+              continue;
+            }
+            const Code& code = Code::Handle(function.CurrentCode());
+            intptr_t size = code.Size();
+
+            // Note: Some entry points here will be pointing to the VM
+            // instructions buffer.
+
+            // Note: code_entry will contain the address in the memory
+            // In order to resolve it to a relative offset in the instructions
+            // buffer we need to pick the base address and substract it from
+            // the entry point address.
+            auto code_entry = code.EntryPoint();
+            // On different architectures the type of the underlying
+            // dart::uword can result in an unsigned long long vs unsigned long
+            // mismatch.
+            uint64_t code_addr = static_cast<uint64_t>(code_entry);
+            js->OpenObject();
+            js->PrintProperty("name", function.ToCString());
+            js->PrintfProperty("code_entry", "0x%" PRIx64 "", code_addr);
+            js->PrintProperty("size", size);
+            js->CloseObject();
+          }
+          js->CloseArray();
+        }
+      }
+      const Array& fields_array = Array::Handle(cls.fields());
+      if (fields_array.IsNull()) {
+      } else {
+        if (fields_array.Length() > 0) {
+          js->OpenArray("fields");
+          Field& field = Field::Handle();
+          for (intptr_t i = 0; i < fields_array.Length(); i++) {
+            field ^= fields_array.At(i);
+            js->PrintValue(field.ToCString());
+          }
+          js->CloseArray();
+        }
+      }
+    }
+    js->CloseObject();
+  }
+  js->CloseArray();
+}
+void DumpObjectPool(Thread* thread, dart::JSONWriter* js) {
+  js->OpenArray("object_pool");
+
+  auto pool_ptr = thread->isolate_group()->object_store()->global_object_pool();
+  const auto& pool = ObjectPool::Handle(ObjectPool::RawCast(pool_ptr));
+  for (intptr_t i = 0; i < pool.Length(); i++) {
+    auto type = pool.TypeAt(i);
+    // Only interested in tagged objects.
+    // All these checks are required otherwise ToCString() will segfault.
+    if (type != ObjectPool::EntryType::kTaggedObject) {
+      continue;
+    }
+
+    auto entry = pool.ObjectAt(i);
+    if (!entry.IsHeapObject()) {
+      continue;
+    }
+
+    intptr_t cid = entry.GetClassId();
+
+    switch (cid) {
+      case kOneByteStringCid: {
+        js->OpenObject();
+        js->PrintProperty("type", "kOneByteString");
+        js->PrintProperty("id", i);
+        js->PrintProperty("offset", pool.element_offset(i));
+        js->PrintProperty("value", Object::Handle(entry).ToCString());
+        js->CloseObject();
+        break;
+      }
+      case kTwoByteStringCid: {
+        // TODO(#47924): Add support.
+        break;
+      }
+      default:
+        // TODO(#47924): Investigate other types of objects to parse.
+        break;
+    }
+  }
+  js->CloseArray();
+}
+// TODO(#47924): Add processing of the entires in the dispatch table.
+// Below is an example skeleton
+// void DumpDispatchTable(dart::Thread* thread) {
+//   auto dispatch = thread->isolate_group()->dispatch_table();
+//   auto length = dispatch->length();
+// We must unbias the array entries so we don't crash on null access.
+//   auto entries = dispatch->ArrayOrigin() - DispatchTable::OriginElement();
+//   for (intptr_t i = 0; i < length; i++) {
+//     OS::Print("0x%lx at %ld\n", entries[i], i);
+//   }
+// }
+
+void Dart_DumpSnapshotInformationAsJson(
+    char** buffer,
+    intptr_t* buffer_length,
+    Dart_SnapshotAnalyzerInformation* info) {
+  Thread* thread = Thread::Current();
+  DARTSCOPE(thread);
+  JSONWriter js;
+  // Open empty object so output is valid/parsable JSON.
+  js.OpenObject();
+  js.OpenObject("snapshot_data");
+  // Base addreses of the snapshot data, useful to calculate relative offsets.
+  js.PrintfProperty("vm_data", "%p", info->vm_snapshot_data);
+  js.PrintfProperty("vm_instructions", "%p", info->vm_snapshot_instructions);
+  js.PrintfProperty("isolate_data", "%p", info->vm_isolate_data);
+  js.PrintfProperty("isolate_instructions", "%p",
+                    info->vm_isolate_instructions);
+  js.CloseObject();
+
+  {
+    // Debug builds assert that our thread has a lock before accessing
+    // vm internal fields.
+    SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
+    DumpClassTable(thread, &js);
+    DumpObjectPool(thread, &js);
+  }
+
+  // Close our empty object.
+  js.CloseObject();
+
+  // Give ownership to caller.
+  js.Steal(buffer, buffer_length);
+}
+}  // namespace snapshot_analyzer
+}  // namespace dart
diff --git a/runtime/vm/app_snapshot.cc b/runtime/vm/app_snapshot.cc
index 7a94ef8..d1d09c7 100644
--- a/runtime/vm/app_snapshot.cc
+++ b/runtime/vm/app_snapshot.cc
@@ -997,7 +997,14 @@
       AutoTraceObjectName(func, MakeDisambiguatedFunctionName(s, func));
       WriteFromTo(func);
       if (kind == Snapshot::kFullAOT) {
-        WriteCompressedField(func, code);
+#if defined(DART_PRECOMPILER)
+        CodePtr code = func->untag()->code();
+        const auto code_index = s->GetCodeIndex(code);
+        s->WriteUnsigned(code_index);
+        s->AttributePropertyRef(code, "code_");
+#else
+        UNREACHABLE();
+#endif
       } else if (s->kind() == Snapshot::kFullJIT) {
         NOT_IN_PRECOMPILED(WriteCompressedField(func, unoptimized_code));
         WriteCompressedField(func, code);
@@ -1038,6 +1045,87 @@
 };
 #endif  // !DART_PRECOMPILED_RUNTIME
 
+template <bool need_entry_point_for_non_discarded>
+DART_FORCE_INLINE static CodePtr GetCodeAndEntryPointByIndex(
+    const Deserializer* d,
+    intptr_t code_index,
+    uword* entry_point) {
+  code_index -= 1;  // 0 is reserved for LazyCompile stub.
+
+  // In root unit and VM isolate snapshot code_indices are self-contained
+  // they point into instruction table and/or into the code cluster.
+  // In non-root units we might also refer to code objects from the
+  // parent unit which means code_index is biased by num_base_objects_
+  const intptr_t base = d->is_non_root_unit() ? d->num_base_objects() : 0;
+  if (code_index < base) {
+    CodePtr code = static_cast<CodePtr>(d->Ref(code_index));
+    if (need_entry_point_for_non_discarded) {
+      *entry_point = Code::EntryPointOf(code);
+    }
+    return code;
+  }
+  code_index -= base;
+
+  // At this point code_index is refering to a code object which is either
+  // discarded or exists in the Code cluster. Non-discarded Code objects
+  // are associated with the tail of the instruction table and have the
+  // same order there and in the Code cluster. This means that
+  // subtracting first_entry_with_code yields index into the Code cluster.
+  // This also works for deferred code objects in root unit's snapshot
+  // due to the choice of encoding (see Serializer::GetCodeIndex).
+  const intptr_t first_entry_with_code =
+      d->instructions_table().rodata()->first_entry_with_code;
+  if (code_index < first_entry_with_code) {
+    *entry_point = d->instructions_table().EntryPointAt(code_index);
+    return StubCode::UnknownDartCode().ptr();
+  } else {
+    const intptr_t cluster_index = code_index - first_entry_with_code;
+    CodePtr code =
+        static_cast<CodePtr>(d->Ref(d->code_start_index() + cluster_index));
+    if (need_entry_point_for_non_discarded) {
+      *entry_point = Code::EntryPointOf(code);
+    }
+    return code;
+  }
+}
+
+CodePtr Deserializer::GetCodeByIndex(intptr_t code_index,
+                                     uword* entry_point) const {
+  // See Serializer::GetCodeIndex for how code_index is encoded.
+  if (code_index == 0) {
+    return StubCode::LazyCompile().ptr();
+  } else if (FLAG_precompiled_mode) {
+    return GetCodeAndEntryPointByIndex<
+        /*need_entry_point_for_non_discarded=*/false>(this, code_index,
+                                                      entry_point);
+  } else {
+    // -1 below because 0 is reserved for LazyCompile stub.
+    const intptr_t ref = code_start_index_ + code_index - 1;
+    ASSERT(code_start_index_ <= ref && ref < code_stop_index_);
+    return static_cast<CodePtr>(Ref(ref));
+  }
+}
+
+intptr_t Deserializer::CodeIndexToClusterIndex(const InstructionsTable& table,
+                                               intptr_t code_index) {
+  // Note: code indices we are interpreting here originate from the root
+  // loading unit which means base is equal to 0.
+  // See comments which clarify the connection between code_index and
+  // index into the Code cluster.
+  ASSERT(FLAG_precompiled_mode);
+  const intptr_t first_entry_with_code = table.rodata()->first_entry_with_code;
+  return code_index - 1 - first_entry_with_code;
+}
+
+uword Deserializer::GetEntryPointByCodeIndex(intptr_t code_index) const {
+  // See Deserializer::GetCodeByIndex which this code repeats.
+  ASSERT(FLAG_precompiled_mode);
+  uword entry_point = 0;
+  GetCodeAndEntryPointByIndex</*need_entry_point_for_non_discarded=*/true>(
+      this, code_index, &entry_point);
+  return entry_point;
+}
+
 class FunctionDeserializationCluster : public DeserializationCluster {
  public:
   FunctionDeserializationCluster() : DeserializationCluster("Function") {}
@@ -1064,11 +1152,10 @@
 
       if (kind == Snapshot::kFullAOT) {
         const intptr_t code_index = d->ReadUnsigned();
-        CodePtr code = static_cast<CodePtr>(d->Ref(code_index));
+        uword entry_point = 0;
+        CodePtr code = d->GetCodeByIndex(code_index, &entry_point);
         func->untag()->code_ = code;
-        if (Code::IsUnknownDartCode(code)) {
-          const uword entry_point = d->instructions_table().EntryPointAt(
-              code_index - d->code_start_index());
+        if (entry_point != 0) {
           func->untag()->entry_point_ = entry_point;
           func->untag()->unchecked_entry_point_ = entry_point;
         }
@@ -1788,7 +1875,11 @@
     if (s->kind() == Snapshot::kFullJIT) {
       s->Push(code->untag()->deopt_info_array_);
       s->Push(code->untag()->static_calls_target_table_);
+      s->Push(code->untag()->compressed_stackmaps_);
     } else if (s->kind() == Snapshot::kFullAOT) {
+      // Note: we don't trace compressed_stackmaps_ because we are going to emit
+      // a separate mapping table into RO data which is not going to be a real
+      // heap object.
 #if defined(DART_PRECOMPILER)
       auto const calls_array = code->untag()->static_calls_target_table_;
       if (calls_array != Array::null()) {
@@ -1819,10 +1910,6 @@
 #endif
     }
 
-    if (s->InCurrentLoadingUnitOrRoot(code->untag()->compressed_stackmaps_)) {
-      s->Push(code->untag()->compressed_stackmaps_);
-    }
-
     if (Code::IsDiscarded(code)) {
       ASSERT(s->kind() == Snapshot::kFullAOT && FLAG_dwarf_stack_traces_mode &&
              !FLAG_retain_code_objects);
@@ -1869,50 +1956,60 @@
 
   struct CodeOrderInfo {
     CodePtr code;
-    intptr_t order;
-    intptr_t original_index;
+    intptr_t not_discarded;  // 1 if this code was not discarded and
+                             // 0 otherwise.
+    intptr_t instructions_id;
   };
 
   // We sort code objects in such a way that code objects with the same
-  // instructions are grouped together. To make sorting more stable between
-  // similar programs we also sort them further by their original indices -
-  // this helps to stabilize output of --print-instructions-sizes-to which uses
-  // the name of the first code object (among those pointing to the same
-  // instruction objects).
+  // instructions are grouped together and ensure that all instructions
+  // without associated code objects are grouped together at the beginning of
+  // the code section. InstructionsTable encoding assumes that all
+  // instructions with non-discarded Code objects are grouped at the end.
+  //
+  // Note that in AOT mode we expect that all Code objects pointing to
+  // the same instructions are deduplicated, as in bare instructions mode
+  // there is no way to identify which specific Code object (out of those
+  // which point to the specific instructions range) actually corresponds
+  // to a particular frame.
   static int CompareCodeOrderInfo(CodeOrderInfo const* a,
                                   CodeOrderInfo const* b) {
-    if (a->order < b->order) return -1;
-    if (a->order > b->order) return 1;
-    if (a->original_index < b->original_index) return -1;
-    if (a->original_index > b->original_index) return 1;
+    if (a->not_discarded < b->not_discarded) return -1;
+    if (a->not_discarded > b->not_discarded) return 1;
+    if (a->instructions_id < b->instructions_id) return -1;
+    if (a->instructions_id > b->instructions_id) return 1;
     return 0;
   }
 
-  static void Insert(GrowableArray<CodeOrderInfo>* order_list,
+  static void Insert(Serializer* s,
+                     GrowableArray<CodeOrderInfo>* order_list,
                      IntMap<intptr_t>* order_map,
-                     CodePtr code,
-                     intptr_t original_index) {
+                     CodePtr code) {
     InstructionsPtr instr = code->untag()->instructions_;
     intptr_t key = static_cast<intptr_t>(instr);
-    intptr_t order;
+    intptr_t instructions_id = 0;
+
     if (order_map->HasKey(key)) {
-      order = order_map->Lookup(key);
+      // We are expected to merge code objects which point to the same
+      // instructions in the precompiled mode.
+      RELEASE_ASSERT(!FLAG_precompiled_mode);
+      instructions_id = order_map->Lookup(key);
     } else {
-      order = order_list->length() + 1;
-      order_map->Insert(key, order);
+      instructions_id = order_map->Length() + 1;
+      order_map->Insert(key, instructions_id);
     }
     CodeOrderInfo info;
     info.code = code;
-    info.order = order;
-    info.original_index = original_index;
+    info.instructions_id = instructions_id;
+    info.not_discarded = Code::IsDiscarded(code) ? 0 : 1;
     order_list->Add(info);
   }
 
-  static void Sort(GrowableArray<CodePtr>* codes) {
+  static void Sort(Serializer* s, GrowableArray<CodePtr>* codes) {
     GrowableArray<CodeOrderInfo> order_list;
     IntMap<intptr_t> order_map;
     for (intptr_t i = 0; i < codes->length(); i++) {
-      Insert(&order_list, &order_map, (*codes)[i], i);
+      Insert(s, &order_list, &order_map, (*codes)[i]);
     }
     order_list.Sort(CompareCodeOrderInfo);
     ASSERT(order_list.length() == codes->length());
@@ -1921,11 +2018,11 @@
     }
   }
 
-  static void Sort(GrowableArray<Code*>* codes) {
+  static void Sort(Serializer* s, GrowableArray<Code*>* codes) {
     GrowableArray<CodeOrderInfo> order_list;
     IntMap<intptr_t> order_map;
     for (intptr_t i = 0; i < codes->length(); i++) {
-      Insert(&order_list, &order_map, (*codes)[i]->ptr(), i);
+      Insert(s, &order_list, &order_map, (*codes)[i]->ptr());
     }
     order_list.Sort(CompareCodeOrderInfo);
     ASSERT(order_list.length() == codes->length());
@@ -1934,27 +2031,49 @@
     }
   }
 
+  intptr_t NonDiscardedCodeCount() {
+    intptr_t count = 0;
+    for (auto code : objects_) {
+      if (!Code::IsDiscarded(code)) {
+        count++;
+      }
+    }
+    return count;
+  }
+
   void WriteAlloc(Serializer* s) {
+    const intptr_t non_discarded_count = NonDiscardedCodeCount();
     const intptr_t count = objects_.length();
-    s->WriteUnsigned(count);
-    for (intptr_t i = 0; i < count; i++) {
-      WriteAlloc(s, objects_[i]);
+    ASSERT(count == non_discarded_count || (s->kind() == Snapshot::kFullAOT));
+
+    first_ref_ = s->next_ref_index();
+    s->WriteUnsigned(non_discarded_count);
+    for (auto code : objects_) {
+      if (!Code::IsDiscarded(code)) {
+        WriteAlloc(s, code);
+      } else {
+        // Mark discarded code unreachable, so that we could later
+        // assign artificial references to it.
+        s->heap()->SetObjectId(code, kUnreachableReference);
+      }
     }
-    const intptr_t deferred_count = deferred_objects_.length();
-    s->WriteUnsigned(deferred_count);
-    for (intptr_t i = 0; i < deferred_count; i++) {
-      WriteAlloc(s, deferred_objects_[i]);
+
+    s->WriteUnsigned(deferred_objects_.length());
+    first_deferred_ref_ = s->next_ref_index();
+    for (auto code : deferred_objects_) {
+      ASSERT(!Code::IsDiscarded(code));
+      WriteAlloc(s, code);
     }
+    last_ref_ = s->next_ref_index() - 1;
   }
 
   void WriteAlloc(Serializer* s, CodePtr code) {
+    ASSERT(!Code::IsDiscarded(code));
     s->AssignRef(code);
     AutoTraceObjectName(code, MakeDisambiguatedCodeName(s, code));
     const int32_t state_bits = code->untag()->state_bits_;
     s->Write<int32_t>(state_bits);
-    if (!Code::DiscardedBit::decode(state_bits)) {
-      target_memory_size_ += compiler::target::Code::InstanceSize(0);
-    }
+    target_memory_size_ += compiler::target::Code::InstanceSize(0);
   }
 
   void WriteFill(Serializer* s) {
@@ -1962,12 +2081,20 @@
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       CodePtr code = objects_[i];
-      WriteFill(s, kind, code, false);
+#if defined(DART_PRECOMPILER)
+      if (FLAG_write_v8_snapshot_profile_to != nullptr &&
+          Code::IsDiscarded(code)) {
+        s->CreateArtificialNodeIfNeeded(code);
+      }
+#endif
+      // Note: for discarded code this function will not write anything out
+      // it is only called to produce information into snapshot profile.
+      WriteFill(s, kind, code, /*deferred=*/false);
     }
     const intptr_t deferred_count = deferred_objects_.length();
     for (intptr_t i = 0; i < deferred_count; i++) {
       CodePtr code = deferred_objects_[i];
-      WriteFill(s, kind, code, true);
+      WriteFill(s, kind, code, /*deferred=*/true);
     }
   }
 
@@ -1975,6 +2102,7 @@
                  Snapshot::Kind kind,
                  CodePtr code,
                  bool deferred) {
+    const intptr_t bytes_written = s->bytes_written();
     AutoTraceObjectName(code, MakeDisambiguatedCodeName(s, code));
 
     intptr_t pointer_offsets_length =
@@ -2024,6 +2152,8 @@
 #endif  // defined(DART_PRECOMPILER)
 
     if (Code::IsDiscarded(code)) {
+      // No bytes should be written to represent this code.
+      ASSERT(s->bytes_written() == bytes_written);
       // Only write instructions, compressed stackmaps and state bits
       // for the discarded Code objects.
       ASSERT(kind == Snapshot::kFullAOT && FLAG_dwarf_stack_traces_mode &&
@@ -2036,7 +2166,6 @@
         s->AttributePropertyRef(owner, "owner_");
       }
 #endif
-
       return;
     }
 
@@ -2053,10 +2182,8 @@
     WriteField(code, exception_handlers_);
     WriteField(code, pc_descriptors_);
     WriteField(code, catch_entry_);
-    if (s->InCurrentLoadingUnitOrRoot(code->untag()->compressed_stackmaps_)) {
+    if (s->kind() == Snapshot::kFullJIT) {
       WriteField(code, compressed_stackmaps_);
-    } else {
-      WriteFieldValue(compressed_stackmaps_, CompressedStackMaps::null());
     }
     if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) {
       WriteFieldValue(inlined_id_to_function_, Array::null());
@@ -2098,7 +2225,14 @@
             Object::NameVisibility::kInternalName));
   }
 
+  intptr_t first_ref() const { return first_ref_; }
+  intptr_t first_deferred_ref() const { return first_deferred_ref_; }
+  intptr_t last_ref() const { return last_ref_; }
+
  private:
+  intptr_t first_ref_;
+  intptr_t first_deferred_ref_;
+  intptr_t last_ref_;
   GrowableArray<CodePtr> objects_;
   GrowableArray<CodePtr> deferred_objects_;
   Array& array_;
@@ -2119,6 +2253,7 @@
       ReadAllocOneCode(d, old_space);
     }
     stop_index_ = d->next_index();
+    d->set_code_stop_index(stop_index_);
     deferred_start_index_ = d->next_index();
     const intptr_t deferred_count = d->ReadUnsigned();
     for (intptr_t i = 0; i < deferred_count; i++) {
@@ -2129,15 +2264,11 @@
 
   void ReadAllocOneCode(Deserializer* d, PageSpace* old_space) {
     const int32_t state_bits = d->Read<int32_t>();
-    if (Code::DiscardedBit::decode(state_bits)) {
-      ASSERT(StubCode::HasBeenInitialized());
-      d->AssignRef(StubCode::UnknownDartCode().ptr());
-    } else {
-      auto code = static_cast<CodePtr>(
-          old_space->AllocateSnapshot(Code::InstanceSize(0)));
-      d->AssignRef(code);
-      code->untag()->state_bits_ = state_bits;
-    }
+    ASSERT(!Code::DiscardedBit::decode(state_bits));
+    auto code = static_cast<CodePtr>(
+        old_space->AllocateSnapshot(Code::InstanceSize(0)));
+    d->AssignRef(code);
+    code->untag()->state_bits_ = state_bits;
   }
 
   void ReadFill(Deserializer* d, bool primary) {
@@ -2153,17 +2284,12 @@
   void ReadFill(Deserializer* d, intptr_t id, bool deferred) {
     auto const code = static_cast<CodePtr>(d->Ref(id));
 
-#if defined(DART_PRECOMPILED_RUNTIME)
-    if (Code::IsUnknownDartCode(code)) {
-      d->ReadInstructions(code, deferred, /*discarded=*/true);
-      return;
-    }
-#endif  // defined(DART_PRECOMPILED_RUNTIME)
+    ASSERT(!Code::IsUnknownDartCode(code));
 
     Deserializer::InitializeHeader(code, kCodeCid, Code::InstanceSize(0));
     ASSERT(!Code::IsDiscarded(code));
 
-    d->ReadInstructions(code, deferred, /*discarded=*/false);
+    d->ReadInstructions(code, deferred);
 
     // There would be a single global pool if this is a full AOT snapshot
     // with bare instructions.
@@ -2178,8 +2304,12 @@
     code->untag()->pc_descriptors_ =
         static_cast<PcDescriptorsPtr>(d->ReadRef());
     code->untag()->catch_entry_ = d->ReadRef();
-    code->untag()->compressed_stackmaps_ =
-        static_cast<CompressedStackMapsPtr>(d->ReadRef());
+    if (d->kind() == Snapshot::kFullJIT) {
+      code->untag()->compressed_stackmaps_ =
+          static_cast<CompressedStackMapsPtr>(d->ReadRef());
+    } else if (d->kind() == Snapshot::kFullAOT) {
+      code->untag()->compressed_stackmaps_ = CompressedStackMaps::null();
+    }
     code->untag()->inlined_id_to_function_ =
         static_cast<ArrayPtr>(d->ReadRef());
     code->untag()->code_source_map_ =
@@ -2690,7 +2820,7 @@
       s->AssignRef(map);
       AutoTraceObject(map);
       const intptr_t length = UntaggedCompressedStackMaps::SizeField::decode(
-          map->untag()->flags_and_size_);
+          map->untag()->payload()->flags_and_size);
       s->WriteUnsigned(length);
       target_memory_size_ +=
           compiler::target::CompressedStackMaps::InstanceSize(length);
@@ -2702,10 +2832,11 @@
     for (intptr_t i = 0; i < count; i++) {
       CompressedStackMapsPtr map = objects_[i];
       AutoTraceObject(map);
-      s->WriteUnsigned(map->untag()->flags_and_size_);
+      s->WriteUnsigned(map->untag()->payload()->flags_and_size);
       const intptr_t length = UntaggedCompressedStackMaps::SizeField::decode(
-          map->untag()->flags_and_size_);
-      uint8_t* cdata = reinterpret_cast<uint8_t*>(map->untag()->data());
+          map->untag()->payload()->flags_and_size);
+      uint8_t* cdata =
+          reinterpret_cast<uint8_t*>(map->untag()->payload()->data());
       s->WriteBytes(cdata, length);
     }
   }
@@ -2743,8 +2874,9 @@
           static_cast<CompressedStackMapsPtr>(d->Ref(id));
       Deserializer::InitializeHeader(map, kCompressedStackMapsCid,
                                      CompressedStackMaps::InstanceSize(length));
-      map->untag()->flags_and_size_ = flags_and_size;
-      uint8_t* cdata = reinterpret_cast<uint8_t*>(map->untag()->data());
+      map->untag()->payload()->flags_and_size = flags_and_size;
+      uint8_t* cdata =
+          reinterpret_cast<uint8_t*>(map->untag()->payload()->data());
       d->ReadBytes(cdata, length);
     }
   }
@@ -5690,7 +5822,9 @@
   V(canonical_type_parameters, Array,                                          \
     HashTables::New<CanonicalTypeParameterSet>(4))                             \
   ONLY_IN_PRODUCT(ONLY_IN_AOT(                                                 \
-      V(closure_functions, GrowableObjectArray, GrowableObjectArray::null())))
+      V(closure_functions, GrowableObjectArray, GrowableObjectArray::null()))) \
+  ONLY_IN_AOT(V(canonicalized_stack_map_entries, CompressedStackMaps,          \
+                CompressedStackMaps::null()))
 
   ProgramSerializationRoots(ZoneGrowableArray<Object*>* base_objects,
                             ObjectStore* object_store,
@@ -5774,6 +5908,10 @@
     s->WriteDispatchTable(dispatch_table_entries_);
   }
 
+  virtual const CompressedStackMaps& canonicalized_stack_map_entries() const {
+    return saved_canonicalized_stack_map_entries_;
+  }
+
  private:
   ZoneGrowableArray<Object*>* const base_objects_;
   ObjectStore* const object_store_;
@@ -5852,9 +5990,7 @@
   }
 
   void PushRoots(Serializer* s) {
-    intptr_t num_deferred_objects = unit_->deferred_objects()->length();
-    for (intptr_t i = 0; i < num_deferred_objects; i++) {
-      const Object* deferred_object = (*unit_->deferred_objects())[i];
+    for (auto deferred_object : *unit_->deferred_objects()) {
       ASSERT(deferred_object->IsCode());
       CodePtr code = static_cast<CodePtr>(deferred_object->ptr());
       ObjectPoolPtr pool = code->untag()->object_pool_;
@@ -5868,7 +6004,6 @@
           }
         }
       }
-      s->Push(code->untag()->compressed_stackmaps_);
       s->Push(code->untag()->code_source_map_);
     }
   }
@@ -5891,7 +6026,6 @@
       ASSERT(!Code::IsDiscarded(code));
       s->WriteInstructions(code->untag()->instructions_,
                            code->untag()->unchecked_offset_, code, false);
-      s->WriteRootRef(code->untag()->compressed_stackmaps_, "deferred-code");
       s->WriteRootRef(code->untag()->code_source_map_, "deferred-code");
     }
 
@@ -5940,7 +6074,7 @@
     for (intptr_t id = deferred_start_index_; id < deferred_stop_index_; id++) {
       CodePtr code = static_cast<CodePtr>(d->Ref(id));
       ASSERT(!Code::IsUnknownDartCode(code));
-      d->ReadInstructions(code, /*deferred=*/false, /*discarded=*/false);
+      d->ReadInstructions(code, /*deferred=*/false);
       if (code->untag()->owner_->IsHeapObject() &&
           code->untag()->owner_->IsFunction()) {
         FunctionPtr func = static_cast<FunctionPtr>(code->untag()->owner_);
@@ -5964,8 +6098,6 @@
         }
 #endif
       }
-      code->untag()->compressed_stackmaps_ =
-          static_cast<CompressedStackMapsPtr>(d->ReadRef());
       code->untag()->code_source_map_ =
           static_cast<CodeSourceMapPtr>(d->ReadRef());
     }
@@ -5988,8 +6120,12 @@
     if (isolate_group->dispatch_table_snapshot() != nullptr) {
       ReadStream stream(isolate_group->dispatch_table_snapshot(),
                         isolate_group->dispatch_table_snapshot_size());
-      d->ReadDispatchTable(&stream, /*deferred=*/true, deferred_start_index_,
-                           deferred_stop_index_);
+      const GrowableObjectArray& tables = GrowableObjectArray::Handle(
+          isolate_group->object_store()->instructions_tables());
+      InstructionsTable& root_table = InstructionsTable::Handle();
+      root_table ^= tables.At(0);
+      d->ReadDispatchTable(&stream, /*deferred=*/true, root_table,
+                           deferred_start_index_, deferred_stop_index_);
     }
   }
 
@@ -6028,7 +6164,6 @@
       num_base_objects_(0),
       num_written_objects_(0),
       next_ref_index_(kFirstReference),
-      previous_text_offset_(0),
       initial_field_table_(thread->isolate_group()->initial_field_table()),
       vm_(vm),
       profile_writer_(profile_writer)
@@ -6065,7 +6200,13 @@
 void Serializer::AddBaseObject(ObjectPtr base_object,
                                const char* type,
                                const char* name) {
-  AssignRef(base_object);
+  // Don't assign references to the discarded code.
+  const bool is_discarded_code = base_object->IsHeapObject() &&
+                                 base_object->IsCode() &&
+                                 Code::IsDiscarded(Code::RawCast(base_object));
+  if (!is_discarded_code) {
+    AssignRef(base_object);
+  }
   num_base_objects_++;
 
   if ((profile_writer_ != nullptr) && (type != nullptr)) {
@@ -6589,8 +6730,100 @@
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
-intptr_t Serializer::PrepareInstructions() {
-  if (!Snapshot::IncludesCode(kind())) return 0;
+#if defined(DART_PRECOMPILER)
+// We use the following encoding schemes when encoding references to Code
+// objects.
+//
+// In AOT mode:
+//
+// 0        --  LazyCompile stub
+// 1        -+
+//           |  for non-root-unit/non-VM snapshots
+// ...        > reference into parent snapshot objects
+//           |  (base is num_base_objects_ in this case, 0 otherwise).
+// base     -+
+// base + 1 -+
+//           |  for non-deferred Code objects (those with instructions)
+//            > index in into the instructions table (code_index_).
+//           |  (L is code_index_.Length()).
+// base + L -+
+// ...      -+
+//           |  for deferred Code objects (those without instructions)
+//            > index of this Code object in the deferred part of the
+//           |  Code cluster.
+//
+// Note that this encoding has the following property: non-discarded
+// non-deferred Code objects form the tail of the instruction table
+// which makes indices assigned to non-discarded non-deferred Code objects
+// and deferred Code objects continuous. This means when decoding
+// code_index - (base + 1) - first_entry_with_code yields an index of the
+// Code object in the Code cluster both for non-deferred and deferred
+// Code objects.
+//
+// For JIT snapshots we do:
+//
+// 0        --  LazyCompile stub
+// 1        -+
+//           |
+// ...        > index of the Code object in the Code cluster.
+//           |
+//
+intptr_t Serializer::GetCodeIndex(CodePtr code) {
+  // In the precompiled mode Code object is uniquely identified by its
+  // instructions (because ProgramVisitor::DedupInstructions will dedup Code
+  // objects with the same instructions).
+  if (code == StubCode::LazyCompile().ptr() && !vm_) {
+    return 0;
+  } else if (FLAG_precompiled_mode) {
+    const intptr_t ref = heap_->GetObjectId(code);
+    ASSERT(!IsReachableReference(ref) == Code::IsDiscarded(code));
+
+    const intptr_t base =
+        (vm_ || current_loading_unit_id() == LoadingUnit::kRootId)
+            ? 0
+            : num_base_objects_;
+
+    // Check if we are referring to the Code object which originates from the
+    // parent loading unit. In this case we write out the reference of this
+    // object.
+    if (!Code::IsDiscarded(code) && ref < base) {
+      RELEASE_ASSERT(current_loading_unit_id() != LoadingUnit::kRootId);
+      return 1 + ref;
+    }
+
+    // Otherwise the code object must either be discarded or originate from
+    // the Code cluster.
+    ASSERT(Code::IsDiscarded(code) || (code_cluster_->first_ref() <= ref &&
+                                       ref <= code_cluster_->last_ref()));
+
+    // If Code object is non-deferred then simply write out the index of the
+    // entry point, otherwise write out the index of the deferred code object.
+    if (ref < code_cluster_->first_deferred_ref()) {
+      const intptr_t key = static_cast<intptr_t>(code->untag()->instructions_);
+      ASSERT(code_index_.HasKey(key));
+      const intptr_t result = code_index_.Lookup(key);
+      ASSERT(0 < result && result <= code_index_.Length());
+      // Note: result already has + 1.
+      return base + result;
+    } else {
+      // Note: only root snapshot can have deferred Code objects in the
+      // cluster.
+      const intptr_t cluster_index = ref - code_cluster_->first_deferred_ref();
+      return 1 + base + code_index_.Length() + cluster_index;
+    }
+  } else {
+    const intptr_t ref = heap_->GetObjectId(code);
+    ASSERT(IsAllocatedReference(ref));
+    ASSERT(code_cluster_->first_ref() <= ref &&
+           ref <= code_cluster_->last_ref());
+    return 1 + (ref - code_cluster_->first_ref());
+  }
+}
+#endif  // defined(DART_PRECOMPILER)
+
+void Serializer::PrepareInstructions(
+    const CompressedStackMaps& canonical_stack_map_entries) {
+  if (!Snapshot::IncludesCode(kind())) return;
 
   // Code objects that have identical/duplicate instructions must be adjacent in
   // the order that Code objects are written because the encoding of the
@@ -6599,14 +6832,14 @@
   // that allows for mapping return addresses back to Code objects depends on
   // this sorting.
   if (code_cluster_ != nullptr) {
-    CodeSerializationCluster::Sort(code_cluster_->objects());
+    CodeSerializationCluster::Sort(this, code_cluster_->objects());
   }
   if ((loading_units_ != nullptr) &&
       (current_loading_unit_id_ == LoadingUnit::kRootId)) {
     for (intptr_t i = LoadingUnit::kRootId + 1; i < loading_units_->length();
          i++) {
       auto unit_objects = loading_units_->At(i)->deferred_objects();
-      CodeSerializationCluster::Sort(unit_objects);
+      CodeSerializationCluster::Sort(this, unit_objects);
       ASSERT(unit_objects->length() == 0 || code_cluster_ != nullptr);
       for (intptr_t j = 0; j < unit_objects->length(); j++) {
         code_cluster_->deferred_objects()->Add(unit_objects->At(j)->ptr());
@@ -6640,10 +6873,165 @@
     GrowableArray<ImageWriterCommand> writer_commands;
     RelocateCodeObjects(vm_, &code_objects, &writer_commands);
     image_writer_->PrepareForSerialization(&writer_commands);
-    return code_objects.length();
+
+    if (code_objects.length() == 0) {
+      return;
+    }
+
+    // Build UntaggedInstructionsTable::Data object to be added to the
+    // read-only data section of the snapshot. It contains:
+    //
+    //    - a binary search table mapping an Instructions entry point to its
+    //      stack maps (by offset from the beginning of the Data object);
+    //    - followed by stack maps bytes;
+    //    - followed by canonical stack map entries.
+    //
+    struct StackMapInfo : public ZoneAllocated {
+      CompressedStackMapsPtr map;
+      intptr_t use_count;
+      uint32_t offset;
+    };
+
+    GrowableArray<StackMapInfo*> stack_maps;
+    IntMap<StackMapInfo*> stack_maps_info;
+
+    // Build code_index_ (which maps Instructions object to the order in
+    // which they appear in the code section in the end) and collect all
+    // stack maps.
+    // We also find the first Instructions object which is going to have
+    // Code object associated with it. This will allow to reduce the binary
+    // search space when searching specifically for the code object in runtime.
+    uint32_t total = 0;
+    intptr_t not_discarded_count = 0;
+    uint32_t first_entry_with_code = 0;
+    for (auto& cmd : writer_commands) {
+      if (cmd.op == ImageWriterCommand::InsertInstructionOfCode) {
+        RELEASE_ASSERT(code_objects[total] ==
+                       cmd.insert_instruction_of_code.code);
+        ASSERT(!Code::IsDiscarded(cmd.insert_instruction_of_code.code) ||
+               (not_discarded_count == 0));
+        if (!Code::IsDiscarded(cmd.insert_instruction_of_code.code)) {
+          if (not_discarded_count == 0) {
+            first_entry_with_code = total;
+          }
+          not_discarded_count++;
+        }
+        total++;
+
+        // Update code_index_.
+        {
+          const intptr_t instr = static_cast<intptr_t>(
+              cmd.insert_instruction_of_code.code->untag()->instructions_);
+          ASSERT(!code_index_.HasKey(instr));
+          code_index_.Insert(instr, total);
+        }
+
+        // Collect stack maps.
+        CompressedStackMapsPtr stack_map =
+            cmd.insert_instruction_of_code.code->untag()->compressed_stackmaps_;
+        const intptr_t key = static_cast<intptr_t>(stack_map);
+
+        if (stack_maps_info.HasKey(key)) {
+          stack_maps_info.Lookup(key)->use_count++;
+        } else {
+          auto info = new StackMapInfo();
+          info->map = stack_map;
+          info->use_count = 1;
+          stack_maps.Add(info);
+          stack_maps_info.Insert(key, info);
+        }
+      }
+    }
+    ASSERT(static_cast<intptr_t>(total) == code_index_.Length());
+    instructions_table_len_ = not_discarded_count;
+
+    // Sort stack maps by usage so that most commonly used stack maps are
+    // together at the start of the Data object.
+    stack_maps.Sort([](StackMapInfo* const* a, StackMapInfo* const* b) {
+      if ((*a)->use_count < (*b)->use_count) return 1;
+      if ((*a)->use_count > (*b)->use_count) return -1;
+      return 0;
+    });
+
+    // Build Data object.
+    MallocWriteStream pc_mapping(4 * KB);
+
+    // Write the header out.
+    {
+      UntaggedInstructionsTable::Data header;
+      memset(&header, 0, sizeof(header));
+      header.length = total;
+      header.first_entry_with_code = first_entry_with_code;
+      pc_mapping.WriteFixed<UntaggedInstructionsTable::Data>(header);
+    }
+
+    // Reserve space for the binary search table.
+    for (auto& cmd : writer_commands) {
+      if (cmd.op == ImageWriterCommand::InsertInstructionOfCode) {
+        pc_mapping.WriteFixed<UntaggedInstructionsTable::DataEntry>({0, 0});
+      }
+    }
+
+    // Now write collected stack maps after the binary search table.
+    auto write_stack_map = [&](CompressedStackMapsPtr smap) {
+      const auto flags_and_size = smap->untag()->payload()->flags_and_size;
+      const auto payload_size =
+          UntaggedCompressedStackMaps::SizeField::decode(flags_and_size);
+      pc_mapping.WriteFixed<uint32_t>(flags_and_size);
+      pc_mapping.WriteBytes(smap->untag()->payload()->data(), payload_size);
+    };
+
+    for (auto sm : stack_maps) {
+      sm->offset = pc_mapping.bytes_written();
+      write_stack_map(sm->map);
+    }
+
+    // Write canonical entries (if any).
+    if (!canonical_stack_map_entries.IsNull()) {
+      auto header = reinterpret_cast<UntaggedInstructionsTable::Data*>(
+          pc_mapping.buffer());
+      header->canonical_stack_map_entries_offset = pc_mapping.bytes_written();
+      write_stack_map(canonical_stack_map_entries.ptr());
+    }
+    const auto total_bytes = pc_mapping.bytes_written();
+
+    // Now that we have offsets to all stack maps we can write binary
+    // search table.
+    pc_mapping.SetPosition(
+        sizeof(UntaggedInstructionsTable::Data));  // Skip the header.
+    for (auto& cmd : writer_commands) {
+      if (cmd.op == ImageWriterCommand::InsertInstructionOfCode) {
+        CompressedStackMapsPtr smap =
+            cmd.insert_instruction_of_code.code->untag()->compressed_stackmaps_;
+        const auto offset =
+            stack_maps_info.Lookup(static_cast<intptr_t>(smap))->offset;
+        const auto entry = image_writer_->GetTextOffsetFor(
+            Code::InstructionsOf(cmd.insert_instruction_of_code.code),
+            cmd.insert_instruction_of_code.code);
+
+        pc_mapping.WriteFixed<UntaggedInstructionsTable::DataEntry>(
+            {static_cast<uint32_t>(entry), offset});
+      }
+    }
+    // Restore position so that Steal does not truncate the buffer.
+    pc_mapping.SetPosition(total_bytes);
+
+    intptr_t length = 0;
+    uint8_t* bytes = pc_mapping.Steal(&length);
+
+    instructions_table_rodata_offset_ =
+        image_writer_->AddBytesToData(bytes, length);
+    // Attribute all bytes in this object to the root for simplicity.
+    if (profile_writer_ != nullptr) {
+      const auto offset_space = vm_ ? IdSpace::kVmData : IdSpace::kIsolateData;
+      profile_writer_->AttributeReferenceTo(
+          V8SnapshotProfileWriter::kArtificialRootId,
+          V8SnapshotProfileWriter::Reference::Property(
+              "<instructions-table-rodata>"),
+          {offset_space, instructions_table_rodata_offset_});
+    }
   }
 #endif  // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
-  return 0;
 }
 
 void Serializer::WriteInstructions(InstructionsPtr instr,
@@ -6669,25 +7057,16 @@
         {offset_space, offset});
   }
 
+  if (Code::IsDiscarded(code)) {
+    // Discarded Code objects are not supported in the vm isolate snapshot.
+    ASSERT(!vm_);
+    return;
+  }
+
   if (FLAG_precompiled_mode) {
-    ASSERT(offset != 0);
-    RELEASE_ASSERT(offset >= previous_text_offset_);
-    const uint32_t delta = offset - previous_text_offset_;
-    WriteUnsigned(delta);
     const uint32_t payload_info =
         (unchecked_offset << 1) | (Code::HasMonomorphicEntry(code) ? 0x1 : 0x0);
     WriteUnsigned(payload_info);
-    previous_text_offset_ = offset;
-
-    if (Code::IsDiscarded(code)) {
-      // Discarded Code objects are not supported in the vm isolate snapshot.
-      ASSERT(!vm_);
-      // Stack maps of discarded Code objects are written along with
-      // instructions so they can be added to instructions table during
-      // deserialization.
-      WritePropertyRef(code->untag()->compressed_stackmaps_,
-                       "compressed_stackmaps_");
-    }
     return;
   }
 #endif
@@ -6718,11 +7097,11 @@
   }
   return image_writer_->data_size();
 }
-#endif
+#endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
 void Serializer::Push(ObjectPtr object) {
-  if (object->IsHeapObject() && object->IsCode() &&
-      !Snapshot::IncludesCode(kind_)) {
+  const bool is_code = object->IsHeapObject() && object->IsCode();
+  if (is_code && !Snapshot::IncludesCode(kind_)) {
     return;  // Do not trace, will write null.
   }
 
@@ -6740,7 +7119,9 @@
     heap_->SetObjectId(object, kUnallocatedReference);
     ASSERT(IsReachableReference(heap_->GetObjectId(object)));
     stack_.Add(object);
-    num_written_objects_++;
+    if (!(is_code && Code::IsDiscarded(Code::RawCast(object)))) {
+      num_written_objects_++;
+    }
 #if defined(SNAPSHOT_BACKTRACE)
     parent_pairs_.Add(&Object::Handle(zone_, object));
     parent_pairs_.Add(&Object::Handle(zone_, current_parent_));
@@ -6852,6 +7233,11 @@
 #define CID_CLUSTER(Type)                                                      \
   reinterpret_cast<Type##SerializationCluster*>(clusters_by_cid_[k##Type##Cid])
 
+const CompressedStackMaps& SerializationRoots::canonicalized_stack_map_entries()
+    const {
+  return CompressedStackMaps::Handle();
+}
+
 ZoneGrowableArray<Object*>* Serializer::Serialize(SerializationRoots* roots) {
   // While object_currently_writing_ is initialized to the artificial root, we
   // set up a scope to ensure proper flushing to the profile.
@@ -6967,7 +7353,7 @@
     cid_clusters[cid] = cluster;
   }
 
-  instructions_table_len_ = PrepareInstructions();
+  PrepareInstructions(roots->canonicalized_stack_map_entries());
 
   intptr_t num_objects = num_base_objects_ + num_written_objects_;
 #if defined(ARCH_IS_64_BIT)
@@ -6987,6 +7373,7 @@
   }
   ASSERT((instructions_table_len_ == 0) || FLAG_precompiled_mode);
   WriteUnsigned(instructions_table_len_);
+  WriteUnsigned(instructions_table_rodata_offset_);
 
   for (SerializationCluster* cluster : clusters) {
     cluster->WriteAndMeasureAlloc(this);
@@ -7030,6 +7417,7 @@
   PrintSnapshotSizes();
 
   heap()->ResetObjectIdTable();
+
   return objects_;
 }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
@@ -7092,12 +7480,6 @@
   }
 
   ASSERT(code_cluster_ != nullptr);
-  // Reference IDs in a cluster are allocated sequentially, so we can use the
-  // first code object's reference ID to calculate the cluster index.
-  const intptr_t first_code_id = RefId(code_cluster_->objects()->At(0));
-  // The first object in the code cluster must have its reference ID allocated.
-  ASSERT(IsAllocatedReference(first_code_id));
-
   // If instructions can be deduped, the code order table in the deserializer
   // may not contain all Code objects in the snapshot. Thus, we write the ID
   // for the first code object here so we can retrieve it during deserialization
@@ -7112,8 +7494,7 @@
   // We could also map Code objects to the first Code object in the cluster with
   // the same entry point and serialize that ID instead, but that loses
   // information about which Code object was originally referenced.
-  ASSERT(first_code_id <= compiler::target::kWordMax);
-  WriteUnsigned(first_code_id);
+  WriteUnsigned(code_cluster_->first_ref());
 
   CodePtr previous_code = nullptr;
   CodePtr recent[kDispatchTableRecentCount] = {nullptr};
@@ -7154,11 +7535,9 @@
     }
     // We have a non-repeated, non-recent entry, so encode the reference ID of
     // the code object and emit that.
-    auto const object_id = RefId(code);
-    // Make sure that this code object has an allocated reference ID.
-    ASSERT(IsAllocatedReference(object_id));
+    auto const code_index = GetCodeIndex(code);
     // Use the index in the code cluster, not in the snapshot..
-    auto const encoded = kDispatchTableIndexBase + (object_id - first_code_id);
+    auto const encoded = kDispatchTableIndexBase + code_index;
     ASSERT(encoded <= compiler::target::kWordMax);
     Write(encoded);
     recent[recent_index] = code;
@@ -7225,8 +7604,7 @@
     }
     if (instructions_table_len_ > 0) {
       const intptr_t memory_size =
-          compiler::target::InstructionsTable::InstanceSize(
-              instructions_table_len_) +
+          compiler::target::InstructionsTable::InstanceSize() +
           compiler::target::Array::InstanceSize(instructions_table_len_);
       clusters_by_size.Add(new (zone_) FakeSerializationCluster(
           "InstructionsTable", instructions_table_len_, 0, memory_size));
@@ -7278,7 +7656,6 @@
       image_reader_(nullptr),
       refs_(nullptr),
       next_ref_index_(kFirstReference),
-      previous_text_offset_(0),
       clusters_(nullptr),
       initial_field_table_(thread->isolate_group()->initial_field_table()),
       is_non_root_unit_(is_non_root_unit),
@@ -7480,25 +7857,25 @@
   return NULL;
 }
 
-void Deserializer::ReadDispatchTable(ReadStream* stream,
-                                     bool deferred,
-                                     intptr_t deferred_code_start_index,
-                                     intptr_t deferred_code_end_index) {
+void Deserializer::ReadDispatchTable(
+    ReadStream* stream,
+    bool deferred,
+    const InstructionsTable& root_instruction_table,
+    intptr_t deferred_code_start_index,
+    intptr_t deferred_code_end_index) {
 #if defined(DART_PRECOMPILED_RUNTIME)
   const uint8_t* table_snapshot_start = stream->AddressOfCurrentPosition();
   const intptr_t length = stream->ReadUnsigned();
   if (length == 0) return;
 
-  // Not all Code objects may be in the code_order_table when instructions can
-  // be deduplicated. Thus, we serialize the reference ID of the first code
-  // object, from which we can get the reference ID for any code object.
   const intptr_t first_code_id = stream->ReadUnsigned();
+  deferred_code_start_index -= first_code_id;
+  deferred_code_end_index -= first_code_id;
 
   auto const IG = isolate_group();
   auto code = IG->object_store()->dispatch_table_null_error_stub();
   ASSERT(code != Code::null());
   uword null_entry = Code::EntryPointOf(code);
-  uword not_loaded_entry = StubCode::NotLoaded().EntryPoint();
 
   DispatchTable* table;
   if (deferred) {
@@ -7529,24 +7906,20 @@
     } else if (encoded <= kDispatchTableMaxRepeat) {
       repeat_count = encoded - 1;
     } else {
-      intptr_t cluster_index = encoded - kDispatchTableIndexBase;
+      const intptr_t code_index = encoded - kDispatchTableIndexBase;
       if (deferred) {
-        intptr_t id = first_code_id + cluster_index;
-        if ((deferred_code_start_index <= id) &&
-            (id < deferred_code_end_index)) {
-          // Deferred instructions are at the end of the instructions table.
-          value = instructions_table().EntryPointAt(
-              instructions_table().length() - deferred_code_end_index + id);
+        const intptr_t code_id =
+            CodeIndexToClusterIndex(root_instruction_table, code_index);
+        if ((deferred_code_start_index <= code_id) &&
+            (code_id < deferred_code_end_index)) {
+          auto code = static_cast<CodePtr>(Ref(first_code_id + code_id));
+          value = Code::EntryPointOf(code);
         } else {
           // Reuse old value from the dispatch table.
           value = array[i];
         }
       } else {
-        if (cluster_index < instructions_table().length()) {
-          value = instructions_table().EntryPointAt(cluster_index);
-        } else {
-          value = not_loaded_entry;
-        }
+        value = GetEntryPointByCodeIndex(code_index);
       }
       recent[recent_index] = value;
       recent_index = (recent_index + 1) & kDispatchTableRecentMask;
@@ -7682,11 +8055,8 @@
   return ApiError::New(msg, Heap::kOld);
 }
 
-void Deserializer::ReadInstructions(CodePtr code,
-                                    bool deferred,
-                                    bool discarded) {
+void Deserializer::ReadInstructions(CodePtr code, bool deferred) {
   if (deferred) {
-    ASSERT(!discarded);
 #if defined(DART_PRECOMPILED_RUNTIME)
     uword entry_point = StubCode::NotLoaded().EntryPoint();
     code->untag()->entry_point_ = entry_point;
@@ -7701,9 +8071,9 @@
   }
 
 #if defined(DART_PRECOMPILED_RUNTIME)
-  previous_text_offset_ += ReadUnsigned();
-  const uword payload_start =
-      image_reader_->GetBareInstructionsAt(previous_text_offset_);
+  const uword payload_start = instructions_table_.EntryPointAt(
+      instructions_table_.rodata()->first_entry_with_code +
+      instructions_index_);
   const uint32_t payload_info = ReadUnsigned();
   const uint32_t unchecked_offset = payload_info >> 1;
   const bool has_monomorphic_entrypoint = (payload_info & 0x1) == 0x1;
@@ -7717,23 +8087,15 @@
   const uword monomorphic_entry_point =
       payload_start + monomorphic_entry_offset;
 
-  ObjectPtr code_descriptor = code;
-  if (discarded) {
-    code_descriptor = static_cast<CompressedStackMapsPtr>(ReadRef());
-  }
+  instructions_table_.SetCodeAt(instructions_index_++, code);
 
-  instructions_table_.SetEntryAt(instructions_index_++, payload_start,
-                                 has_monomorphic_entrypoint, code_descriptor);
-
-  if (!discarded) {
-    // There are no serialized RawInstructions objects in this mode.
-    code->untag()->instructions_ = Instructions::null();
-    code->untag()->entry_point_ = entry_point;
-    code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
-    code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
-    code->untag()->monomorphic_unchecked_entry_point_ =
-        monomorphic_entry_point + unchecked_offset;
-  }
+  // There are no serialized RawInstructions objects in this mode.
+  code->untag()->instructions_ = Instructions::null();
+  code->untag()->entry_point_ = entry_point;
+  code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
+  code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
+  code->untag()->monomorphic_unchecked_entry_point_ =
+      monomorphic_entry_point + unchecked_offset;
 #else
   InstructionsPtr instr = image_reader_->GetInstructionsAt(Read<uint32_t>());
   uint32_t unchecked_offset = ReadUnsigned();
@@ -7751,15 +8113,21 @@
 
 void Deserializer::EndInstructions() {
 #if defined(DART_PRECOMPILED_RUNTIME)
+  if (instructions_table_.IsNull()) {
+    ASSERT(instructions_index_ == 0);
+    return;
+  }
+
+  const auto& code_objects =
+      Array::Handle(instructions_table_.ptr()->untag()->code_objects());
+  ASSERT(code_objects.Length() == instructions_index_);
+
   uword previous_end = image_reader_->GetBareInstructionsEnd();
   for (intptr_t i = instructions_index_ - 1; i >= 0; --i) {
-    ObjectPtr descriptor = instructions_table_.DescriptorAt(i);
-    uword start = instructions_table_.PayloadStartAt(i);
+    CodePtr code = Code::RawCast(code_objects.At(i));
+    uword start = Code::PayloadStartOf(code);
     ASSERT(start <= previous_end);
-    if (descriptor->IsCode()) {
-      CodePtr code = static_cast<CodePtr>(descriptor);
-      code->untag()->instructions_length_ = previous_end - start;
-    }
+    code->untag()->instructions_length_ = previous_end - start;
     previous_end = start;
   }
 
@@ -7772,6 +8140,8 @@
   }
   if ((tables.Length() == 0) ||
       (tables.At(tables.Length() - 1) != instructions_table_.ptr())) {
+    ASSERT((!is_non_root_unit_ && tables.Length() == 0) ||
+           (is_non_root_unit_ && tables.Length() > 0));
     tables.Add(instructions_table_, Heap::kOld);
   }
 #endif
@@ -7805,6 +8175,8 @@
   num_clusters_ = ReadUnsigned();
   const intptr_t initial_field_table_len = ReadUnsigned();
   const intptr_t instructions_table_len = ReadUnsigned();
+  const uint32_t instruction_table_data_offset = ReadUnsigned();
+  USE(instruction_table_data_offset);
 
   clusters_ = new DeserializationCluster*[num_clusters_];
   refs = Array::New(num_objects_ + kFirstReference, Heap::kOld);
@@ -7818,8 +8190,18 @@
     ASSERT(FLAG_precompiled_mode);
     const uword start_pc = image_reader_->GetBareInstructionsAt(0);
     const uword end_pc = image_reader_->GetBareInstructionsEnd();
-    instructions_table_ =
-        InstructionsTable::New(instructions_table_len, start_pc, end_pc);
+    uword instruction_table_data = 0;
+    if (instruction_table_data_offset != 0) {
+      // NoSafepointScope to satisfy assertion in DataStart. InstructionsTable
+      // data resides in RO memory and is immovable and immortal making it
+      // safe to use DataStart result outside of NoSafepointScope.
+      NoSafepointScope no_safepoint;
+      instruction_table_data = reinterpret_cast<uword>(
+          OneByteString::DataStart(String::Handle(static_cast<StringPtr>(
+              image_reader_->GetObjectAt(instruction_table_data_offset)))));
+    }
+    instructions_table_ = InstructionsTable::New(
+        instructions_table_len, start_pc, end_pc, instruction_table_data);
   }
 #else
   ASSERT(instructions_table_len == 0);
diff --git a/runtime/vm/app_snapshot.h b/runtime/vm/app_snapshot.h
index 7bc3032..ce084c0 100644
--- a/runtime/vm/app_snapshot.h
+++ b/runtime/vm/app_snapshot.h
@@ -176,6 +176,8 @@
   virtual void AddBaseObjects(Serializer* serializer) = 0;
   virtual void PushRoots(Serializer* serializer) = 0;
   virtual void WriteRoots(Serializer* serializer) = 0;
+
+  virtual const CompressedStackMaps& canonicalized_stack_map_entries() const;
 };
 
 class DeserializationRoots {
@@ -228,9 +230,12 @@
   void AddBaseObject(ObjectPtr base_object,
                      const char* type = nullptr,
                      const char* name = nullptr);
+
   intptr_t AssignRef(ObjectPtr object);
   intptr_t AssignArtificialRef(ObjectPtr object = nullptr);
 
+  intptr_t GetCodeIndex(CodePtr code);
+
   void Push(ObjectPtr object);
 
   void AddUntracedRef() { num_written_objects_++; }
@@ -418,8 +423,8 @@
   }
 
   // Sorts Code objects and reorders instructions before writing snapshot.
-  // Returns length of instructions table (in bare instructions mode).
-  intptr_t PrepareInstructions();
+  // Builds binary search table for stack maps.
+  void PrepareInstructions(const CompressedStackMaps& canonical_smap);
 
   void WriteInstructions(InstructionsPtr instr,
                          uint32_t unchecked_offset,
@@ -508,12 +513,12 @@
   intptr_t num_base_objects_;
   intptr_t num_written_objects_;
   intptr_t next_ref_index_;
-  intptr_t previous_text_offset_;
   FieldTable* initial_field_table_;
 
   intptr_t dispatch_table_size_ = 0;
   intptr_t bytes_heap_allocated_ = 0;
   intptr_t instructions_table_len_ = 0;
+  intptr_t instructions_table_rodata_offset_ = 0;
 
   // True if writing VM snapshot, false for Isolate snapshot.
   bool vm_;
@@ -537,6 +542,7 @@
 
 #if defined(DART_PRECOMPILER)
   IntMap<intptr_t> deduped_instructions_sources_;
+  IntMap<intptr_t> code_index_;
 #endif
 
   intptr_t current_loading_unit_id_ = 0;
@@ -657,6 +663,16 @@
     return refs_->untag()->element(index);
   }
 
+  CodePtr GetCodeByIndex(intptr_t code_index, uword* entry_point) const;
+  uword GetEntryPointByCodeIndex(intptr_t code_index) const;
+
+  // If |code_index| corresponds to a non-discarded Code object returns
+  // index within the code cluster that corresponds to this Code object.
+  // Otherwise, if |code_index| corresponds to the discarded Code then
+  // returns -1.
+  static intptr_t CodeIndexToClusterIndex(const InstructionsTable& table,
+                                          intptr_t code_index);
+
   ObjectPtr ReadRef() { return Ref(ReadUnsigned()); }
 
   template <typename T, typename... P>
@@ -685,7 +701,7 @@
     return Read<int32_t>();
   }
 
-  void ReadInstructions(CodePtr code, bool deferred, bool discarded);
+  void ReadInstructions(CodePtr code, bool deferred);
   void EndInstructions();
   ObjectPtr GetObjectAt(uint32_t offset) const;
 
@@ -694,10 +710,12 @@
   DeserializationCluster* ReadCluster();
 
   void ReadDispatchTable() {
-    ReadDispatchTable(&stream_, /*deferred=*/false, -1, -1);
+    ReadDispatchTable(&stream_, /*deferred=*/false, InstructionsTable::Handle(),
+                      -1, -1);
   }
   void ReadDispatchTable(ReadStream* stream,
                          bool deferred,
+                         const InstructionsTable& root_instruction_table,
                          intptr_t deferred_code_start_index,
                          intptr_t deferred_code_end_index);
 
@@ -708,10 +726,13 @@
   FieldTable* initial_field_table() const { return initial_field_table_; }
   bool is_non_root_unit() const { return is_non_root_unit_; }
   void set_code_start_index(intptr_t value) { code_start_index_ = value; }
-  intptr_t code_start_index() { return code_start_index_; }
+  intptr_t code_start_index() const { return code_start_index_; }
+  void set_code_stop_index(intptr_t value) { code_stop_index_ = value; }
+  intptr_t code_stop_index() const { return code_stop_index_; }
   const InstructionsTable& instructions_table() const {
     return instructions_table_;
   }
+  intptr_t num_base_objects() const { return num_base_objects_; }
 
  private:
   Heap* heap_;
@@ -724,8 +745,8 @@
   intptr_t num_clusters_;
   ArrayPtr refs_;
   intptr_t next_ref_index_;
-  intptr_t previous_text_offset_;
   intptr_t code_start_index_ = 0;
+  intptr_t code_stop_index_ = 0;
   intptr_t instructions_index_ = 0;
   DeserializationCluster** clusters_;
   FieldTable* initial_field_table_;
diff --git a/runtime/vm/bitmap_test.cc b/runtime/vm/bitmap_test.cc
index 2fdab17..0e7837f 100644
--- a/runtime/vm/bitmap_test.cc
+++ b/runtime/vm/bitmap_test.cc
@@ -53,7 +53,7 @@
   // Create a CompressedStackMaps object and verify its contents.
   const auto& maps1 = CompressedStackMaps::Handle(
       thread->zone(), MapsFromBuilder(thread->zone(), builder1));
-  CompressedStackMaps::Iterator it1(thread, maps1);
+  auto it1 = maps1.iterator(thread);
   EXPECT(it1.MoveNext());
 
   EXPECT_EQ(kTestPcOffset, it1.pc_offset());
@@ -86,7 +86,7 @@
 
   const auto& maps2 = CompressedStackMaps::Handle(
       thread->zone(), MapsFromBuilder(thread->zone(), builder1));
-  CompressedStackMaps::Iterator it2(thread, maps2);
+  auto it2 = maps2.iterator(thread);
   EXPECT(it2.MoveNext());
 
   EXPECT_EQ(kTestPcOffset, it2.pc_offset());
diff --git a/runtime/vm/compiler/assembler/disassembler.cc b/runtime/vm/compiler/assembler/disassembler.cc
index ac67f41..2777ccd 100644
--- a/runtime/vm/compiler/assembler/disassembler.cc
+++ b/runtime/vm/compiler/assembler/disassembler.cc
@@ -296,10 +296,9 @@
   {
     const auto& stackmaps =
         CompressedStackMaps::Handle(zone, code.compressed_stackmaps());
-    CompressedStackMaps::Iterator it(thread, stackmaps);
     TextBuffer buffer(100);
     buffer.Printf("StackMaps for function '%s' {\n", function_fullname);
-    it.WriteToBuffer(&buffer, "\n");
+    stackmaps.WriteToBuffer(&buffer, "\n");
     buffer.AddString("}\n");
     THR_Print("%s", buffer.buffer());
   }
diff --git a/runtime/vm/compiler/runtime_api.h b/runtime/vm/compiler/runtime_api.h
index 1cdca3d..c7fd307 100644
--- a/runtime/vm/compiler/runtime_api.h
+++ b/runtime/vm/compiler/runtime_api.h
@@ -891,10 +891,14 @@
 
 class CompressedStackMaps : public AllStatic {
  public:
-  static word HeaderSize();
+  static word HeaderSize() { return ObjectHeaderSize() + PayloadHeaderSize(); }
   static word InstanceSize();
   static word InstanceSize(word payload_size);
   FINAL_CLASS();
+
+ private:
+  static word ObjectHeaderSize();
+  static word PayloadHeaderSize();
 };
 
 class LocalVarDescriptors : public AllStatic {
diff --git a/runtime/vm/compiler/runtime_offsets_extracted.h b/runtime/vm/compiler/runtime_offsets_extracted.h
index 5c98bc9..c29345b 100644
--- a/runtime/vm/compiler/runtime_offsets_extracted.h
+++ b/runtime/vm/compiler/runtime_offsets_extracted.h
@@ -39,10 +39,6 @@
     ExceptionHandlers_elements_start_offset = 12;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 20;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     8;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 4;
@@ -499,8 +495,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 28;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 20;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 8;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 4;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 12;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 8;
@@ -526,6 +524,8 @@
     8;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    24;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -599,10 +599,6 @@
     ExceptionHandlers_elements_start_offset = 24;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -1070,8 +1066,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 24;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -1097,6 +1095,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -1171,10 +1171,6 @@
     ExceptionHandlers_elements_start_offset = 12;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 20;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     8;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 4;
@@ -1628,8 +1624,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 28;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 20;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 8;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 4;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 12;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 8;
@@ -1655,6 +1653,8 @@
     8;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    24;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -1728,10 +1728,6 @@
     ExceptionHandlers_elements_start_offset = 24;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -2200,8 +2196,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 24;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -2227,6 +2225,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -2301,10 +2301,6 @@
     ExceptionHandlers_elements_start_offset = 16;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -2769,8 +2765,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 16;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -2796,6 +2794,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -2870,10 +2870,6 @@
     ExceptionHandlers_elements_start_offset = 16;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -3339,8 +3335,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 16;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -3366,6 +3364,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -3439,10 +3439,6 @@
     ExceptionHandlers_elements_start_offset = 12;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 20;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     8;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 4;
@@ -3896,8 +3892,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 28;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 20;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 8;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 4;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 12;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 8;
@@ -3923,6 +3921,8 @@
     8;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    24;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -3993,10 +3993,6 @@
     ExceptionHandlers_elements_start_offset = 24;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -4461,8 +4457,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 24;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -4488,6 +4486,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -4559,10 +4559,6 @@
     ExceptionHandlers_elements_start_offset = 12;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 20;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     8;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 4;
@@ -5013,8 +5009,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 28;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 20;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 8;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 4;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 12;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 8;
@@ -5040,6 +5038,8 @@
     8;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    24;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -5110,10 +5110,6 @@
     ExceptionHandlers_elements_start_offset = 24;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -5579,8 +5575,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 24;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -5606,6 +5604,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -5677,10 +5677,6 @@
     ExceptionHandlers_elements_start_offset = 16;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -6142,8 +6138,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 16;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -6169,6 +6167,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -6240,10 +6240,6 @@
     ExceptionHandlers_elements_start_offset = 16;
 static constexpr dart::compiler::target::word ExceptionHandlers_element_size =
     12;
-static constexpr dart::compiler::target::word
-    InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word InstructionsTable_element_size =
-    4;
 static constexpr dart::compiler::target::word ObjectPool_elements_start_offset =
     16;
 static constexpr dart::compiler::target::word ObjectPool_element_size = 8;
@@ -6706,8 +6702,10 @@
 static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
 static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
-static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
-    16;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word Context_header_size = 16;
 static constexpr dart::compiler::target::word Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word DynamicLibrary_InstanceSize = 16;
@@ -6733,6 +6731,8 @@
     16;
 static constexpr dart::compiler::target::word
     InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+    48;
 static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -6813,10 +6813,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 20;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 8;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 4;
 static constexpr dart::compiler::target::word
@@ -7327,7 +7323,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 20;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 8;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 8;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 4;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 12;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -7355,6 +7353,8 @@
     AOT_Instructions_UnalignedHeaderSize = 8;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 4;
 static constexpr dart::compiler::target::word
@@ -7442,10 +7442,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -7960,7 +7956,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -7988,6 +7986,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
@@ -8078,10 +8078,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -8597,7 +8593,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -8625,6 +8623,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
@@ -8712,10 +8712,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -9229,7 +9225,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 16;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -9257,6 +9255,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
@@ -9344,10 +9344,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -9862,7 +9858,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 16;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -9890,6 +9888,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
@@ -9976,10 +9976,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 20;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 8;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 4;
 static constexpr dart::compiler::target::word
@@ -10486,7 +10482,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 20;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 8;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 8;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 4;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 12;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -10514,6 +10512,8 @@
     AOT_Instructions_UnalignedHeaderSize = 8;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 4;
 static constexpr dart::compiler::target::word
@@ -10598,10 +10598,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -11112,7 +11108,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -11140,6 +11138,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
@@ -11227,10 +11227,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -11742,7 +11738,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 40;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -11770,6 +11768,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
@@ -11854,10 +11854,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -12367,7 +12363,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 16;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -12395,6 +12393,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
@@ -12479,10 +12479,6 @@
 static constexpr dart::compiler::target::word
     AOT_ExceptionHandlers_element_size = 12;
 static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_elements_start_offset = 40;
-static constexpr dart::compiler::target::word
-    AOT_InstructionsTable_element_size = 4;
-static constexpr dart::compiler::target::word
     AOT_ObjectPool_elements_start_offset = 16;
 static constexpr dart::compiler::target::word AOT_ObjectPool_element_size = 8;
 static constexpr dart::compiler::target::word
@@ -12993,7 +12989,9 @@
 static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
 static constexpr dart::compiler::target::word
-    AOT_CompressedStackMaps_HeaderSize = 16;
+    AOT_CompressedStackMaps_ObjectHeaderSize = 8;
+static constexpr dart::compiler::target::word
+    AOT_CompressedStackMaps_PayloadHeaderSize = 4;
 static constexpr dart::compiler::target::word AOT_Context_header_size = 16;
 static constexpr dart::compiler::target::word AOT_Double_InstanceSize = 16;
 static constexpr dart::compiler::target::word AOT_DynamicLibrary_InstanceSize =
@@ -13021,6 +13019,8 @@
     AOT_Instructions_UnalignedHeaderSize = 16;
 static constexpr dart::compiler::target::word
     AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+    AOT_InstructionsTable_InstanceSize = 48;
 static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
 static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
 static constexpr dart::compiler::target::word
diff --git a/runtime/vm/compiler/runtime_offsets_list.h b/runtime/vm/compiler/runtime_offsets_list.h
index 71cc428..1328b23 100644
--- a/runtime/vm/compiler/runtime_offsets_list.h
+++ b/runtime/vm/compiler/runtime_offsets_list.h
@@ -51,7 +51,6 @@
   ARRAY(Context, variable_offset)                                              \
   ARRAY(ContextScope, element_offset)                                          \
   ARRAY(ExceptionHandlers, element_offset)                                     \
-  ARRAY(InstructionsTable, element_offset)                                     \
   ARRAY(ObjectPool, element_offset)                                            \
   ARRAY(OneByteString, element_offset)                                         \
   ARRAY(TypeArguments, type_at_offset)                                         \
@@ -61,7 +60,6 @@
   ARRAY_SIZEOF(Context, InstanceSize, variable_offset)                         \
   ARRAY_SIZEOF(ContextScope, InstanceSize, element_offset)                     \
   ARRAY_SIZEOF(ExceptionHandlers, InstanceSize, element_offset)                \
-  ARRAY_SIZEOF(InstructionsTable, InstanceSize, element_offset)                \
   ARRAY_SIZEOF(ObjectPool, InstanceSize, element_offset)                       \
   ARRAY_SIZEOF(OneByteString, InstanceSize, element_offset)                    \
   ARRAY_SIZEOF(TypeArguments, InstanceSize, type_at_offset)                    \
@@ -350,7 +348,9 @@
   SIZEOF(Closure, InstanceSize, UntaggedClosure)                               \
   SIZEOF(ClosureData, InstanceSize, UntaggedClosureData)                       \
   SIZEOF(CodeSourceMap, HeaderSize, UntaggedCodeSourceMap)                     \
-  SIZEOF(CompressedStackMaps, HeaderSize, UntaggedCompressedStackMaps)         \
+  SIZEOF(CompressedStackMaps, ObjectHeaderSize, UntaggedCompressedStackMaps)   \
+  SIZEOF(CompressedStackMaps, PayloadHeaderSize,                               \
+         UntaggedCompressedStackMaps::Payload)                                 \
   SIZEOF(Context, header_size, UntaggedContext)                                \
   SIZEOF(Double, InstanceSize, UntaggedDouble)                                 \
   SIZEOF(DynamicLibrary, InstanceSize, UntaggedDynamicLibrary)                 \
@@ -370,6 +370,7 @@
   SIZEOF(Instructions, UnalignedHeaderSize, UntaggedInstructions)              \
   SIZEOF(InstructionsSection, UnalignedHeaderSize,                             \
          UntaggedInstructionsSection)                                          \
+  SIZEOF(InstructionsTable, InstanceSize, UntaggedInstructionsTable)           \
   SIZEOF(Int32x4, InstanceSize, UntaggedInt32x4)                               \
   SIZEOF(Integer, InstanceSize, UntaggedInteger)                               \
   SIZEOF(KernelProgramInfo, InstanceSize, UntaggedKernelProgramInfo)           \
diff --git a/runtime/vm/image_snapshot.cc b/runtime/vm/image_snapshot.cc
index f77d389..e812345 100644
--- a/runtime/vm/image_snapshot.cc
+++ b/runtime/vm/image_snapshot.cc
@@ -237,6 +237,12 @@
   return offset;
 }
 
+intptr_t ImageWriter::SizeInSnapshotForBytes(intptr_t length) {
+  // We are just going to write it out as a string.
+  return compiler::target::String::InstanceSize(
+      length * OneByteString::kBytesPerElement);
+}
+
 intptr_t ImageWriter::SizeInSnapshot(ObjectPtr raw_object) {
   const classid_t cid = raw_object->GetClassId();
 
@@ -280,13 +286,21 @@
 }
 
 uint32_t ImageWriter::GetDataOffsetFor(ObjectPtr raw_object) {
-  intptr_t snap_size = SizeInSnapshot(raw_object);
-  intptr_t offset = next_data_offset_;
+  const intptr_t snap_size = SizeInSnapshot(raw_object);
+  const intptr_t offset = next_data_offset_;
   next_data_offset_ += snap_size;
   objects_.Add(ObjectData(raw_object));
   return offset;
 }
 
+uint32_t ImageWriter::AddBytesToData(uint8_t* bytes, intptr_t length) {
+  const intptr_t snap_size = SizeInSnapshotForBytes(length);
+  const intptr_t offset = next_data_offset_;
+  next_data_offset_ += snap_size;
+  objects_.Add(ObjectData(bytes, length));
+  return offset;
+}
+
 intptr_t ImageWriter::GetTextObjectCount() const {
   return instructions_.length();
 }
@@ -439,9 +453,24 @@
     // the VM snapshot's text image.
     heap->SetObjectId(data.insns_->ptr(), 0);
   }
-  for (intptr_t i = 0; i < objects_.length(); i++) {
-    ObjectData& data = objects_[i];
-    data.obj_ = &Object::Handle(zone_, data.raw_obj_);
+  for (auto& data : objects_) {
+    if (data.is_object) {
+      data.obj = &Object::Handle(zone_, data.raw_obj);
+    }
+  }
+
+  // Once we have everything handlified we are going to do convert raw bytes
+  // to string objects. String is used for simplicity as a bit container,
+  // can't use TypedData because it has an internal pointer (data_) field.
+  for (auto& data : objects_) {
+    if (!data.is_object) {
+      const auto bytes = data.bytes;
+      data.obj = &Object::Handle(
+          zone_, OneByteString::New(bytes.buf, bytes.length, Heap::kOld));
+      data.is_object = true;
+      String::Cast(*data.obj).Hash();
+      free(bytes.buf);
+    }
   }
 
   // Needs to happen before WriteText, as we add information about the
@@ -484,8 +513,9 @@
 
   // Heap page objects start here.
 
-  for (intptr_t i = 0; i < objects_.length(); i++) {
-    const Object& obj = *objects_[i].obj_;
+  for (auto entry : objects_) {
+    ASSERT(entry.is_object);
+    const Object& obj = *entry.obj;
 #if defined(DART_PRECOMPILER)
     AutoTraceImage(obj, section_start, stream);
 #endif
@@ -498,10 +528,9 @@
     if (obj.IsCompressedStackMaps()) {
       const CompressedStackMaps& map = CompressedStackMaps::Cast(obj);
       const intptr_t payload_size = map.payload_size();
-      stream->WriteTargetWord(map.ptr()->untag()->flags_and_size_);
-      ASSERT_EQUAL(stream->Position() - object_start,
-                   compiler::target::CompressedStackMaps::HeaderSize());
-      stream->WriteBytes(map.ptr()->untag()->data(), payload_size);
+      stream->WriteFixed<uint32_t>(
+          map.ptr()->untag()->payload()->flags_and_size);
+      stream->WriteBytes(map.ptr()->untag()->payload()->data(), payload_size);
     } else if (obj.IsCodeSourceMap()) {
       const CodeSourceMap& map = CodeSourceMap::Cast(obj);
       stream->WriteTargetWord(map.Length());
diff --git a/runtime/vm/image_snapshot.h b/runtime/vm/image_snapshot.h
index 7100d3b..730b861 100644
--- a/runtime/vm/image_snapshot.h
+++ b/runtime/vm/image_snapshot.h
@@ -272,6 +272,8 @@
   int32_t GetTextOffsetFor(InstructionsPtr instructions, CodePtr code);
   uint32_t GetDataOffsetFor(ObjectPtr raw_object);
 
+  uint32_t AddBytesToData(uint8_t* bytes, intptr_t length);
+
   void Write(NonStreamingWriteStream* clustered_stream, bool vm);
   intptr_t data_size() const { return next_data_offset_; }
   intptr_t text_size() const { return next_text_offset_; }
@@ -354,12 +356,20 @@
   };
 
   struct ObjectData {
-    explicit ObjectData(ObjectPtr raw_obj) : raw_obj_(raw_obj) {}
+    explicit ObjectData(ObjectPtr raw_obj)
+        : raw_obj(raw_obj), is_object(true) {}
+    ObjectData(uint8_t* buf, intptr_t length)
+        : bytes({buf, length}), is_object(false) {}
 
     union {
-      ObjectPtr raw_obj_;
-      const Object* obj_;
+      struct {
+        uint8_t* buf;
+        intptr_t length;
+      } bytes;
+      ObjectPtr raw_obj;
+      const Object* obj;
     };
+    bool is_object;
   };
 
   // Methods abstracting out the particulars of the underlying concrete writer.
@@ -447,6 +457,8 @@
   friend class SnapshotTextObjectNamer;  // For InstructionsData.
 
  private:
+  static intptr_t SizeInSnapshotForBytes(intptr_t length);
+
   DISALLOW_COPY_AND_ASSIGN(ImageWriter);
 };
 
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index b37de90..f18ef85 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -1036,7 +1036,7 @@
         empty_compressed_stackmaps_,
         static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag));
     empty_compressed_stackmaps_->StoreNonPointer(
-        &empty_compressed_stackmaps_->untag()->flags_and_size_, 0);
+        &empty_compressed_stackmaps_->untag()->payload()->flags_and_size, 0);
     empty_compressed_stackmaps_->SetCanonical();
   }
 
@@ -14569,21 +14569,27 @@
   StoreNonPointer(&untag()->end_pc_, value);
 }
 
-void InstructionsTable::set_descriptors(const Array& value) const {
-  untag()->set_descriptors(value.ptr());
+void InstructionsTable::set_code_objects(const Array& value) const {
+  untag()->set_code_objects(value.ptr());
+}
+
+void InstructionsTable::set_rodata(uword value) const {
+  StoreNonPointer(
+      &untag()->rodata_,
+      reinterpret_cast<const UntaggedInstructionsTable::Data*>(value));
 }
 
 InstructionsTablePtr InstructionsTable::New(intptr_t length,
                                             uword start_pc,
-                                            uword end_pc) {
+                                            uword end_pc,
+                                            uword rodata) {
   ASSERT(Object::instructions_table_class() != Class::null());
   ASSERT(length >= 0);
   ASSERT(start_pc <= end_pc);
-  ASSERT(Utils::IsAligned(start_pc, kPayloadAlignment));
   Thread* thread = Thread::Current();
   InstructionsTable& result = InstructionsTable::Handle(thread->zone());
   {
-    uword size = InstructionsTable::InstanceSize(length);
+    uword size = InstructionsTable::InstanceSize();
     ObjectPtr raw =
         Object::Allocate(InstructionsTable::kClassId, size, Heap::kOld,
                          InstructionsTable::ContainsCompressedPointers());
@@ -14591,31 +14597,20 @@
     result ^= raw;
     result.set_length(length);
   }
-  const Array& descriptors =
+  const Array& code_objects =
       (length == 0) ? Object::empty_array()
                     : Array::Handle(Array::New(length, Heap::kOld));
-  result.set_descriptors(descriptors);
+  result.set_code_objects(code_objects);
   result.set_start_pc(start_pc);
   result.set_end_pc(end_pc);
+  result.set_rodata(rodata);
   return result.ptr();
 }
 
-void InstructionsTable::SetEntryAt(intptr_t index,
-                                   uword payload_start,
-                                   bool has_monomorphic_entrypoint,
-                                   ObjectPtr descriptor) const {
-  ASSERT((0 <= index) && (index < length()));
-  ASSERT(ContainsPc(payload_start));
-  ASSERT(Utils::IsAligned(payload_start, kPayloadAlignment));
-
-  const uint32_t pc_offset = ConvertPcToOffset(payload_start);
-  ASSERT((index == 0) || (PcOffsetAt(index - 1) <= pc_offset));
-  ASSERT((pc_offset & kHasMonomorphicEntrypointFlag) == 0);
-
-  untag()->data()[index] =
-      pc_offset |
-      (has_monomorphic_entrypoint ? kHasMonomorphicEntrypointFlag : 0);
-  descriptors()->untag()->set_element(index, descriptor);
+void InstructionsTable::SetCodeAt(intptr_t index, CodePtr code) const {
+  ASSERT((0 <= index) &&
+         (index < Smi::Value(code_objects()->untag()->length())));
+  code_objects()->untag()->set_element(index, code);
 }
 
 bool InstructionsTable::ContainsPc(InstructionsTablePtr table, uword pc) {
@@ -14632,21 +14627,25 @@
   return pc_offset;
 }
 
-intptr_t InstructionsTable::FindEntry(InstructionsTablePtr table, uword pc) {
+intptr_t InstructionsTable::FindEntry(InstructionsTablePtr table,
+                                      uword pc,
+                                      intptr_t start_index /* = 0 */) {
   // This can run in the middle of GC and must not allocate handles.
   NoSafepointScope no_safepoint;
   if (!InstructionsTable::ContainsPc(table, pc)) return -1;
   const uint32_t pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
-  intptr_t lo = 0;
-  intptr_t hi = InstructionsTable::length(table) - 1;
+
+  const auto rodata = table.untag()->rodata_;
+  const auto entries = rodata->entries();
+  intptr_t lo = start_index;
+  intptr_t hi = rodata->length - 1;
   while (lo <= hi) {
     intptr_t mid = (hi - lo + 1) / 2 + lo;
     ASSERT(mid >= lo);
     ASSERT(mid <= hi);
-    if (pc_offset < InstructionsTable::PcOffsetAt(table, mid)) {
+    if (pc_offset < entries[mid].pc_offset) {
       hi = mid - 1;
-    } else if ((mid != hi) &&
-               (pc_offset >= InstructionsTable::PcOffsetAt(table, mid + 1))) {
+    } else if ((mid != hi) && (pc_offset >= entries[mid + 1].pc_offset)) {
       lo = mid + 1;
     } else {
       return mid;
@@ -14655,22 +14654,66 @@
   return -1;
 }
 
-ObjectPtr InstructionsTable::DescriptorAt(InstructionsTablePtr table,
-                                          intptr_t index) {
-  ASSERT((0 <= index) && (index < InstructionsTable::length(table)));
-  return table->untag()->descriptors()->untag()->element(index);
+const UntaggedCompressedStackMaps::Payload*
+InstructionsTable::GetCanonicalStackMap(InstructionsTablePtr table) {
+  const auto rodata = table.untag()->rodata_;
+  return rodata->canonical_stack_map_entries_offset != 0
+             ? rodata->StackMapAt(rodata->canonical_stack_map_entries_offset)
+             : nullptr;
 }
 
-uword InstructionsTable::PayloadStartAt(InstructionsTablePtr table,
-                                        intptr_t index) {
-  return InstructionsTable::start_pc(table) +
-         InstructionsTable::PcOffsetAt(table, index);
+const UntaggedCompressedStackMaps::Payload* InstructionsTable::FindStackMap(
+    InstructionsTablePtr table,
+    uword pc,
+    uword* start_pc) {
+  // This can run in the middle of GC and must not allocate handles.
+  NoSafepointScope no_safepoint;
+  const intptr_t idx = FindEntry(table, pc);
+  if (idx != -1) {
+    const auto rodata = table.untag()->rodata_;
+    const auto entries = rodata->entries();
+    *start_pc = InstructionsTable::start_pc(table) + entries[idx].pc_offset;
+    return rodata->StackMapAt(entries[idx].stack_map_offset);
+  }
+  return 0;
 }
 
-uword InstructionsTable::EntryPointAt(intptr_t index) const {
-  return PayloadStartAt(index) + (HasMonomorphicEntryPointAt(index)
-                                      ? Instructions::kPolymorphicEntryOffsetAOT
-                                      : 0);
+CodePtr InstructionsTable::FindCode(InstructionsTablePtr table, uword pc) {
+  // This can run in the middle of GC and must not allocate handles.
+  NoSafepointScope no_safepoint;
+  if (!InstructionsTable::ContainsPc(table, pc)) return Code::null();
+
+  const auto rodata = table.untag()->rodata_;
+
+  const auto pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
+
+  if (pc_offset <= rodata->entries()[rodata->first_entry_with_code].pc_offset) {
+    return StubCode::UnknownDartCode().ptr();
+  }
+
+  const auto idx =
+      FindEntry(table, pc, table.untag()->rodata_->first_entry_with_code);
+  if (idx != -1) {
+    const intptr_t code_index = idx - rodata->first_entry_with_code;
+    ASSERT(code_index >= 0);
+    ASSERT(code_index <
+           Smi::Value(table.untag()->code_objects()->untag()->length()));
+    ObjectPtr result =
+        table.untag()->code_objects()->untag()->element(code_index);
+    ASSERT(result->IsCode());
+    // Note: can't use Code::RawCast(...) here because it allocates handles
+    // in DEBUG mode.
+    return static_cast<CodePtr>(result);
+  }
+
+  return Code::null();
+}
+
+uword InstructionsTable::EntryPointAt(intptr_t code_index) const {
+  ASSERT(0 <= code_index);
+  ASSERT(code_index < static_cast<intptr_t>(rodata()->length));
+  return InstructionsTable::start_pc(this->ptr()) +
+         rodata()->entries()[code_index].pc_offset;
 }
 
 const char* InstructionsTable::ToCString() const {
@@ -14966,7 +15009,7 @@
 
 uword CompressedStackMaps::Hash() const {
   NoSafepointScope scope;
-  uint8_t* data = UnsafeMutableNonPointer(&untag()->data()[0]);
+  uint8_t* data = UnsafeMutableNonPointer(&untag()->payload()->data()[0]);
   uint8_t* end = data + payload_size();
   uint32_t hash = payload_size();
   for (uint8_t* cursor = data; cursor < end; cursor++) {
@@ -14975,140 +15018,11 @@
   return FinalizeHash(hash, kHashBits);
 }
 
-CompressedStackMaps::Iterator::Iterator(const CompressedStackMaps& maps,
-                                        const CompressedStackMaps& global_table)
-    : maps_(maps),
-      bits_container_(maps_.UsesGlobalTable() ? global_table : maps_) {
-  ASSERT(!maps_.IsNull());
-  ASSERT(!bits_container_.IsNull());
-  ASSERT(!maps_.IsGlobalTable());
-  ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
-}
-
-CompressedStackMaps::Iterator::Iterator(Thread* thread,
-                                        const CompressedStackMaps& maps)
-    : CompressedStackMaps::Iterator(
-          maps,
-          // Only look up the global table if the map will end up using it.
-          maps.UsesGlobalTable() ? CompressedStackMaps::Handle(
-                                       thread->zone(),
-                                       thread->isolate_group()
-                                           ->object_store()
-                                           ->canonicalized_stack_map_entries())
-                                 : Object::null_compressed_stackmaps()) {}
-
-CompressedStackMaps::Iterator::Iterator(const CompressedStackMaps::Iterator& it)
-    : maps_(it.maps_),
-      bits_container_(it.bits_container_),
-      next_offset_(it.next_offset_),
-      current_pc_offset_(it.current_pc_offset_),
-      current_global_table_offset_(it.current_global_table_offset_),
-      current_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
-      current_non_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
-      current_bits_offset_(it.current_bits_offset_) {}
-
-bool CompressedStackMaps::Iterator::MoveNext() {
-  if (next_offset_ >= maps_.payload_size()) {
-    return false;
-  }
-
-  NoSafepointScope scope;
-  ReadStream stream(maps_.untag()->data(), maps_.payload_size(), next_offset_);
-
-  auto const pc_delta = stream.ReadLEB128();
-  ASSERT(pc_delta <= (kMaxUint32 - current_pc_offset_));
-  current_pc_offset_ += pc_delta;
-
-  // Table-using CSMs have a table offset after the PC offset delta, whereas
-  // the post-delta part of inlined entries has the same information as
-  // global table entries.
-  if (maps_.UsesGlobalTable()) {
-    current_global_table_offset_ = stream.ReadLEB128();
-    ASSERT(current_global_table_offset_ < bits_container_.payload_size());
-
-    // Since generally we only use entries in the GC and the GC only needs
-    // the rest of the entry information if the PC offset matches, we lazily
-    // load and cache the information stored in the global object when it is
-    // actually requested.
-    current_spill_slot_bit_count_ = -1;
-    current_non_spill_slot_bit_count_ = -1;
-    current_bits_offset_ = -1;
-
-    next_offset_ = stream.Position();
-  } else {
-    current_spill_slot_bit_count_ = stream.ReadLEB128();
-    ASSERT(current_spill_slot_bit_count_ >= 0);
-
-    current_non_spill_slot_bit_count_ = stream.ReadLEB128();
-    ASSERT(current_non_spill_slot_bit_count_ >= 0);
-
-    const auto stackmap_bits =
-        current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
-    const uintptr_t stackmap_size =
-        Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
-    ASSERT(stackmap_size <= (maps_.payload_size() - stream.Position()));
-
-    current_bits_offset_ = stream.Position();
-    next_offset_ = current_bits_offset_ + stackmap_size;
-  }
-
-  return true;
-}
-
-intptr_t CompressedStackMaps::Iterator::Length() const {
-  EnsureFullyLoadedEntry();
-  return current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
-}
-intptr_t CompressedStackMaps::Iterator::SpillSlotBitCount() const {
-  EnsureFullyLoadedEntry();
-  return current_spill_slot_bit_count_;
-}
-
-bool CompressedStackMaps::Iterator::IsObject(intptr_t bit_index) const {
-  EnsureFullyLoadedEntry();
-  ASSERT(bit_index >= 0 && bit_index < Length());
-  const intptr_t byte_index = bit_index >> kBitsPerByteLog2;
-  const intptr_t bit_remainder = bit_index & (kBitsPerByte - 1);
-  uint8_t byte_mask = 1U << bit_remainder;
-  const intptr_t byte_offset = current_bits_offset_ + byte_index;
-  NoSafepointScope scope;
-  return (bits_container_.untag()->data()[byte_offset] & byte_mask) != 0;
-}
-
-void CompressedStackMaps::Iterator::LazyLoadGlobalTableEntry() const {
-  ASSERT(maps_.UsesGlobalTable());
-  ASSERT(HasLoadedEntry());
-  ASSERT(current_global_table_offset_ < bits_container_.payload_size());
-
-  NoSafepointScope scope;
-  ReadStream stream(bits_container_.untag()->data(),
-                    bits_container_.payload_size(),
-                    current_global_table_offset_);
-
-  current_spill_slot_bit_count_ = stream.ReadLEB128();
-  ASSERT(current_spill_slot_bit_count_ >= 0);
-
-  current_non_spill_slot_bit_count_ = stream.ReadLEB128();
-  ASSERT(current_non_spill_slot_bit_count_ >= 0);
-
-  const auto stackmap_bits = Length();
-  const uintptr_t stackmap_size =
-      Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
-  ASSERT(stackmap_size <= (bits_container_.payload_size() - stream.Position()));
-
-  current_bits_offset_ = stream.Position();
-}
-
-void CompressedStackMaps::Iterator::WriteToBuffer(BaseTextBuffer* buffer,
-                                                  const char* separator) const {
-  CompressedStackMaps::Iterator it(*this);
-  // If we haven't loaded an entry yet, do so (but don't skip the current
-  // one if we have!)
-  if (!it.HasLoadedEntry()) {
-    if (!it.MoveNext()) return;
-  }
+void CompressedStackMaps::WriteToBuffer(BaseTextBuffer* buffer,
+                                        const char* separator) const {
+  auto it = iterator(Thread::Current());
   bool first_entry = true;
-  do {
+  while (it.MoveNext()) {
     if (!first_entry) {
       buffer->AddString(separator);
     }
@@ -15117,7 +15031,16 @@
       buffer->AddString(it.IsObject(i) ? "1" : "0");
     }
     first_entry = false;
-  } while (it.MoveNext());
+  }
+}
+
+CompressedStackMaps::Iterator<CompressedStackMaps>
+CompressedStackMaps::iterator(Thread* thread) const {
+  return Iterator<CompressedStackMaps>(
+      *this, CompressedStackMaps::Handle(
+                 thread->zone(), thread->isolate_group()
+                                     ->object_store()
+                                     ->canonicalized_stack_map_entries()));
 }
 
 CompressedStackMapsPtr CompressedStackMaps::New(const void* payload,
@@ -15147,12 +15070,13 @@
     NoSafepointScope no_safepoint;
     result ^= raw;
     result.StoreNonPointer(
-        &result.untag()->flags_and_size_,
+        &result.untag()->payload()->flags_and_size,
         UntaggedCompressedStackMaps::GlobalTableBit::encode(is_global_table) |
             UntaggedCompressedStackMaps::UsesTableBit::encode(
                 uses_global_table) |
             UntaggedCompressedStackMaps::SizeField::encode(size));
-    auto cursor = result.UnsafeMutableNonPointer(result.untag()->data());
+    auto cursor =
+        result.UnsafeMutableNonPointer(result.untag()->payload()->data());
     memcpy(cursor, payload, size);  // NOLINT
   }
 
@@ -15167,10 +15091,9 @@
     return "CompressedStackMaps()";
   }
   auto const t = Thread::Current();
-  CompressedStackMaps::Iterator it(t, *this);
   ZoneTextBuffer buffer(t->zone(), 100);
   buffer.AddString("CompressedStackMaps(");
-  it.WriteToBuffer(&buffer, ", ");
+  WriteToBuffer(&buffer, ", ");
   buffer.AddString(")");
   return buffer.buffer();
 }
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 591c5a1..ab5c257 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -5590,58 +5590,29 @@
 // Used in AOT in bare instructions mode.
 class InstructionsTable : public Object {
  public:
-  static const intptr_t kBytesPerElement = sizeof(uint32_t);
-  static const intptr_t kMaxElements = kIntptrMax / kBytesPerElement;
-
-  static const uint32_t kHasMonomorphicEntrypointFlag = 0x1;
-  static const uint32_t kPayloadAlignment = Instructions::kBarePayloadAlignment;
-  static const uint32_t kPayloadMask = ~(kPayloadAlignment - 1);
-  COMPILE_ASSERT((kPayloadMask & kHasMonomorphicEntrypointFlag) == 0);
-
-  struct ArrayTraits {
-    static intptr_t elements_start_offset() {
-      return sizeof(UntaggedInstructionsTable);
-    }
-    static constexpr intptr_t kElementSize = kBytesPerElement;
-  };
-
-  static intptr_t InstanceSize() {
-    ASSERT_EQUAL(sizeof(UntaggedInstructionsTable),
-                 OFFSET_OF_RETURNED_VALUE(UntaggedInstructionsTable, data));
-    return 0;
-  }
-  static intptr_t InstanceSize(intptr_t len) {
-    ASSERT(0 <= len && len <= kMaxElements);
-    return RoundedAllocationSize(sizeof(UntaggedInstructionsTable) +
-                                 len * kBytesPerElement);
-  }
+  static intptr_t InstanceSize() { return sizeof(UntaggedInstructionsTable); }
 
   static InstructionsTablePtr New(intptr_t length,
                                   uword start_pc,
-                                  uword end_pc);
+                                  uword end_pc,
+                                  uword rodata);
 
-  void SetEntryAt(intptr_t index,
-                  uword payload_start,
-                  bool has_monomorphic_entrypoint,
-                  ObjectPtr descriptor) const;
+  void SetCodeAt(intptr_t index, CodePtr code) const;
 
   bool ContainsPc(uword pc) const { return ContainsPc(ptr(), pc); }
   static bool ContainsPc(InstructionsTablePtr table, uword pc);
 
-  // Looks for the entry in the [table] by the given [pc].
-  // Returns index of an entry which contains [pc], or -1 if not found.
-  static intptr_t FindEntry(InstructionsTablePtr table, uword pc);
+  static CodePtr FindCode(InstructionsTablePtr table, uword pc);
 
-  intptr_t length() const { return InstructionsTable::length(this->ptr()); }
-  static intptr_t length(InstructionsTablePtr table) {
-    return table->untag()->length_;
-  }
+  static const UntaggedCompressedStackMaps::Payload*
+  FindStackMap(InstructionsTablePtr table, uword pc, uword* start_pc);
 
-  // Returns descriptor object for the entry with given index.
-  ObjectPtr DescriptorAt(intptr_t index) const {
-    return InstructionsTable::DescriptorAt(this->ptr(), index);
+  static const UntaggedCompressedStackMaps::Payload* GetCanonicalStackMap(
+      InstructionsTablePtr table);
+
+  const UntaggedInstructionsTable::Data* rodata() const {
+    return ptr()->untag()->rodata_;
   }
-  static ObjectPtr DescriptorAt(InstructionsTablePtr table, intptr_t index);
 
   // Returns start address of the instructions entry with given index.
   uword PayloadStartAt(intptr_t index) const {
@@ -5663,34 +5634,26 @@
     return table->untag()->end_pc_;
   }
 
-  ArrayPtr descriptors() const { return untag()->descriptors_; }
-
-  static uint32_t DataAt(InstructionsTablePtr table, intptr_t index) {
-    ASSERT((0 <= index) && (index < InstructionsTable::length(table)));
-    return table->untag()->data()[index];
-  }
-  uint32_t PcOffsetAt(intptr_t index) const {
-    return InstructionsTable::PcOffsetAt(this->ptr(), index);
-  }
-  static uint32_t PcOffsetAt(InstructionsTablePtr table, intptr_t index) {
-    return DataAt(table, index) & kPayloadMask;
-  }
-  bool HasMonomorphicEntryPointAt(intptr_t index) const {
-    return (DataAt(this->ptr(), index) & kHasMonomorphicEntrypointFlag) != 0;
-  }
+  ArrayPtr code_objects() const { return untag()->code_objects_; }
 
   void set_length(intptr_t value) const;
   void set_start_pc(uword value) const;
   void set_end_pc(uword value) const;
-  void set_descriptors(const Array& value) const;
+  void set_code_objects(const Array& value) const;
+  void set_rodata(uword rodata) const;
 
   uint32_t ConvertPcToOffset(uword pc) const {
     return InstructionsTable::ConvertPcToOffset(this->ptr(), pc);
   }
   static uint32_t ConvertPcToOffset(InstructionsTablePtr table, uword pc);
 
+  static intptr_t FindEntry(InstructionsTablePtr table,
+                            uword pc,
+                            intptr_t start_index = 0);
+
   FINAL_HEAP_OBJECT_IMPLEMENTATION(InstructionsTable, Object);
   friend class Class;
+  friend class Deserializer;
 };
 
 class LocalVarDescriptors : public Object {
@@ -5920,13 +5883,16 @@
   uintptr_t payload_size() const { return PayloadSizeOf(ptr()); }
   static uintptr_t PayloadSizeOf(const CompressedStackMapsPtr raw) {
     return UntaggedCompressedStackMaps::SizeField::decode(
-        raw->untag()->flags_and_size_);
+        raw->untag()->payload()->flags_and_size);
   }
 
+  const uint8_t* data() const { return ptr()->untag()->payload()->data(); }
+
   // Methods to allow use with PointerKeyValueTrait to create sets of CSMs.
   bool Equals(const CompressedStackMaps& other) const {
     // All of the table flags and payload size must match.
-    if (untag()->flags_and_size_ != other.untag()->flags_and_size_) {
+    if (untag()->payload()->flags_and_size !=
+        other.untag()->payload()->flags_and_size) {
       return false;
     }
     NoSafepointScope no_safepoint;
@@ -5934,7 +5900,10 @@
   }
   uword Hash() const;
 
-  static intptr_t HeaderSize() { return sizeof(UntaggedCompressedStackMaps); }
+  static intptr_t HeaderSize() {
+    return sizeof(UntaggedCompressedStackMaps) +
+           sizeof(UntaggedCompressedStackMaps::Payload);
+  }
   static intptr_t UnroundedSize(CompressedStackMapsPtr maps) {
     return UnroundedSize(CompressedStackMaps::PayloadSizeOf(maps));
   }
@@ -5942,8 +5911,6 @@
     return HeaderSize() + length;
   }
   static intptr_t InstanceSize() {
-    ASSERT_EQUAL(sizeof(UntaggedCompressedStackMaps),
-                 OFFSET_OF_RETURNED_VALUE(UntaggedCompressedStackMaps, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t length) {
@@ -5953,13 +5920,13 @@
   bool UsesGlobalTable() const { return UsesGlobalTable(ptr()); }
   static bool UsesGlobalTable(const CompressedStackMapsPtr raw) {
     return UntaggedCompressedStackMaps::UsesTableBit::decode(
-        raw->untag()->flags_and_size_);
+        raw->untag()->payload()->flags_and_size);
   }
 
   bool IsGlobalTable() const { return IsGlobalTable(ptr()); }
   static bool IsGlobalTable(const CompressedStackMapsPtr raw) {
     return UntaggedCompressedStackMaps::GlobalTableBit::decode(
-        raw->untag()->flags_and_size_);
+        raw->untag()->payload()->flags_and_size);
   }
 
   static CompressedStackMapsPtr NewInlined(const void* payload, intptr_t size) {
@@ -5978,17 +5945,128 @@
                /*uses_global_table=*/false);
   }
 
+  class RawPayloadHandle {
+   public:
+    RawPayloadHandle() {}
+    RawPayloadHandle(const RawPayloadHandle&) = default;
+    RawPayloadHandle& operator=(const RawPayloadHandle&) = default;
+
+    const UntaggedCompressedStackMaps::Payload* payload() const {
+      return payload_;
+    }
+    bool IsNull() const { return payload_ == nullptr; }
+
+    RawPayloadHandle& operator=(
+        const UntaggedCompressedStackMaps::Payload* payload) {
+      payload_ = payload;
+      return *this;
+    }
+
+    RawPayloadHandle& operator=(const CompressedStackMaps& maps) {
+      ASSERT(!maps.IsNull());
+      payload_ = maps.untag()->payload();
+      return *this;
+    }
+
+    RawPayloadHandle& operator=(CompressedStackMapsPtr maps) {
+      ASSERT(maps != CompressedStackMaps::null());
+      payload_ = maps.untag()->payload();
+      return *this;
+    }
+
+    uintptr_t payload_size() const {
+      return UntaggedCompressedStackMaps::SizeField::decode(
+          payload()->flags_and_size);
+    }
+    const uint8_t* data() const { return payload()->data(); }
+
+    bool UsesGlobalTable() const {
+      return UntaggedCompressedStackMaps::UsesTableBit::decode(
+          payload()->flags_and_size);
+    }
+
+    bool IsGlobalTable() const {
+      return UntaggedCompressedStackMaps::GlobalTableBit::decode(
+          payload()->flags_and_size);
+    }
+
+   private:
+    const UntaggedCompressedStackMaps::Payload* payload_ = nullptr;
+  };
+
+  template <typename PayloadHandle>
   class Iterator {
    public:
-    Iterator(const CompressedStackMaps& maps,
-             const CompressedStackMaps& global_table);
-    Iterator(Thread* thread, const CompressedStackMaps& maps);
+    Iterator(const PayloadHandle& maps, const PayloadHandle& global_table)
+        : maps_(maps),
+          bits_container_(maps.UsesGlobalTable() ? global_table : maps) {
+      ASSERT(!maps_.IsNull());
+      ASSERT(!bits_container_.IsNull());
+      ASSERT(!maps_.IsGlobalTable());
+      ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
+    }
 
-    explicit Iterator(const CompressedStackMaps::Iterator& it);
+    Iterator(const Iterator& it)
+        : maps_(it.maps_),
+          bits_container_(it.bits_container_),
+          next_offset_(it.next_offset_),
+          current_pc_offset_(it.current_pc_offset_),
+          current_global_table_offset_(it.current_global_table_offset_),
+          current_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
+          current_non_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
+          current_bits_offset_(it.current_bits_offset_) {}
 
     // Loads the next entry from [maps_], if any. If [maps_] is the null value,
     // this always returns false.
-    bool MoveNext();
+    bool MoveNext() {
+      if (next_offset_ >= maps_.payload_size()) {
+        return false;
+      }
+
+      NoSafepointScope scope;
+      ReadStream stream(maps_.data(), maps_.payload_size(), next_offset_);
+
+      auto const pc_delta = stream.ReadLEB128();
+      ASSERT(pc_delta <= (kMaxUint32 - current_pc_offset_));
+      current_pc_offset_ += pc_delta;
+
+      // Table-using CSMs have a table offset after the PC offset delta, whereas
+      // the post-delta part of inlined entries has the same information as
+      // global table entries.
+      // See comments in UntaggedCompressedStackMaps for description of
+      // encoding.
+      if (maps_.UsesGlobalTable()) {
+        current_global_table_offset_ = stream.ReadLEB128();
+        ASSERT(current_global_table_offset_ < bits_container_.payload_size());
+
+        // Since generally we only use entries in the GC and the GC only needs
+        // the rest of the entry information if the PC offset matches, we lazily
+        // load and cache the information stored in the global object when it is
+        // actually requested.
+        current_spill_slot_bit_count_ = -1;
+        current_non_spill_slot_bit_count_ = -1;
+        current_bits_offset_ = -1;
+
+        next_offset_ = stream.Position();
+      } else {
+        current_spill_slot_bit_count_ = stream.ReadLEB128();
+        ASSERT(current_spill_slot_bit_count_ >= 0);
+
+        current_non_spill_slot_bit_count_ = stream.ReadLEB128();
+        ASSERT(current_non_spill_slot_bit_count_ >= 0);
+
+        const auto stackmap_bits =
+            current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
+        const uintptr_t stackmap_size =
+            Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
+        ASSERT(stackmap_size <= (maps_.payload_size() - stream.Position()));
+
+        current_bits_offset_ = stream.Position();
+        next_offset_ = current_bits_offset_ + stackmap_size;
+      }
+
+      return true;
+    }
 
     // Finds the entry with the given PC offset starting at the current position
     // of the iterator. If [maps_] is the null value, this always returns false.
@@ -6012,21 +6090,56 @@
     }
 
     // Returns the bit length of the loaded entry.
-    intptr_t Length() const;
+    intptr_t Length() const {
+      EnsureFullyLoadedEntry();
+      return current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
+    }
     // Returns the number of spill slot bits of the loaded entry.
-    intptr_t SpillSlotBitCount() const;
+    intptr_t SpillSlotBitCount() const {
+      EnsureFullyLoadedEntry();
+      return current_spill_slot_bit_count_;
+    }
     // Returns whether the stack entry represented by the offset contains
-    // a tagged objecet.
-    bool IsObject(intptr_t bit_offset) const;
-
-    void WriteToBuffer(BaseTextBuffer* buffer, const char* separator) const;
+    // a tagged object.
+    bool IsObject(intptr_t bit_index) const {
+      EnsureFullyLoadedEntry();
+      ASSERT(bit_index >= 0 && bit_index < Length());
+      const intptr_t byte_index = bit_index >> kBitsPerByteLog2;
+      const intptr_t bit_remainder = bit_index & (kBitsPerByte - 1);
+      uint8_t byte_mask = 1U << bit_remainder;
+      const intptr_t byte_offset = current_bits_offset_ + byte_index;
+      NoSafepointScope scope;
+      return (bits_container_.data()[byte_offset] & byte_mask) != 0;
+    }
 
    private:
     bool HasLoadedEntry() const { return next_offset_ > 0; }
 
     // Caches the corresponding values from the global table in the mutable
     // fields. We lazily load these as some clients only need the PC offset.
-    void LazyLoadGlobalTableEntry() const;
+    void LazyLoadGlobalTableEntry() const {
+      ASSERT(maps_.UsesGlobalTable());
+      ASSERT(HasLoadedEntry());
+      ASSERT(current_global_table_offset_ < bits_container_.payload_size());
+
+      NoSafepointScope scope;
+      ReadStream stream(bits_container_.data(), bits_container_.payload_size(),
+                        current_global_table_offset_);
+
+      current_spill_slot_bit_count_ = stream.ReadLEB128();
+      ASSERT(current_spill_slot_bit_count_ >= 0);
+
+      current_non_spill_slot_bit_count_ = stream.ReadLEB128();
+      ASSERT(current_non_spill_slot_bit_count_ >= 0);
+
+      const auto stackmap_bits = Length();
+      const uintptr_t stackmap_size =
+          Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
+      ASSERT(stackmap_size <=
+             (bits_container_.payload_size() - stream.Position()));
+
+      current_bits_offset_ = stream.Position();
+    }
 
     void EnsureFullyLoadedEntry() const {
       ASSERT(HasLoadedEntry());
@@ -6036,8 +6149,8 @@
       }
     }
 
-    const CompressedStackMaps& maps_;
-    const CompressedStackMaps& bits_container_;
+    const PayloadHandle& maps_;
+    const PayloadHandle& bits_container_;
 
     uintptr_t next_offset_ = 0;
     uint32_t current_pc_offset_ = 0;
@@ -6053,6 +6166,10 @@
     friend class StackMapEntry;
   };
 
+  Iterator<CompressedStackMaps> iterator(Thread* thread) const;
+
+  void WriteToBuffer(BaseTextBuffer* buffer, const char* separator) const;
+
  private:
   static CompressedStackMapsPtr New(const void* payload,
                                     intptr_t size,
@@ -6286,6 +6403,10 @@
 #endif
   }
 
+  static uword UncheckedEntryPointOf(const CodePtr code) {
+    return code->untag()->unchecked_entry_point_;
+  }
+
   // Returns the unchecked entry point of [instructions()].
   uword UncheckedEntryPoint() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
@@ -9762,6 +9883,7 @@
   friend class Symbols;
   friend class Utf8;
   friend class OneByteStringMessageSerializationCluster;
+  friend class Deserializer;
 };
 
 class TwoByteString : public AllStatic {
diff --git a/runtime/vm/program_visitor.cc b/runtime/vm/program_visitor.cc
index 918f7d7..9d57230 100644
--- a/runtime/vm/program_visitor.cc
+++ b/runtime/vm/program_visitor.cc
@@ -454,7 +454,8 @@
 
 class StackMapEntry : public ZoneAllocated {
  public:
-  StackMapEntry(Zone* zone, const CompressedStackMaps::Iterator& it)
+  StackMapEntry(Zone* zone,
+                const CompressedStackMaps::Iterator<CompressedStackMaps>& it)
       : maps_(CompressedStackMaps::Handle(zone, it.maps_.ptr())),
         bits_container_(
             CompressedStackMaps::Handle(zone, it.bits_container_.ptr())),
@@ -525,7 +526,7 @@
   }
   const uint8_t* PayloadData() const {
     ASSERT(!Thread::Current()->IsAtSafepoint());
-    return bits_container_.ptr()->untag()->data() + bits_offset_;
+    return bits_container_.ptr()->untag()->payload()->data() + bits_offset_;
   }
 
   const CompressedStackMaps& maps_;
@@ -582,8 +583,8 @@
 
     void VisitCode(const Code& code) {
       compressed_stackmaps_ = code.compressed_stackmaps();
-      CompressedStackMaps::Iterator it(compressed_stackmaps_,
-                                       old_global_table_);
+      CompressedStackMaps::Iterator<CompressedStackMaps> it(
+          compressed_stackmaps_, old_global_table_);
       while (it.MoveNext()) {
         auto const entry = new (zone_) StackMapEntry(zone_, it);
         auto const index = entry_indices_.LookupValue(entry);
@@ -705,7 +706,8 @@
         return Object::empty_compressed_stackmaps().ptr();
       }
       MallocWriteStream new_payload(maps.payload_size());
-      CompressedStackMaps::Iterator it(maps, old_global_table_);
+      CompressedStackMaps::Iterator<CompressedStackMaps> it(maps,
+                                                            old_global_table_);
       intptr_t last_offset = 0;
       while (it.MoveNext()) {
         StackMapEntry entry(zone_, it);
@@ -1269,6 +1271,82 @@
           code_(Code::Handle(zone)),
           instructions_(Instructions::Handle(zone)) {}
 
+    // Relink the program graph to eliminate references to the non-canonical
+    // Code objects. We want to arrive to the graph where Code objects
+    // and Instruction objects are in one-to-one relationship.
+    void PostProcess(IsolateGroup* isolate_group) {
+      const intptr_t canonical_count = canonical_objects_.Length();
+
+      auto& static_calls_array = Array::Handle(zone_);
+      auto& static_calls_table_entry = Object::Handle(zone_);
+
+      auto should_canonicalize = [&](const Object& obj) {
+        return CanCanonicalize(Code::Cast(obj)) && !obj.InVMIsolateHeap();
+      };
+
+      auto process_pool = [&](const ObjectPool& pool) {
+        if (pool.IsNull()) {
+          return;
+        }
+
+        auto& object = Object::Handle(zone_);
+        for (intptr_t i = 0; i < pool.Length(); i++) {
+          auto const type = pool.TypeAt(i);
+          if (type != ObjectPool::EntryType::kTaggedObject) continue;
+          object = pool.ObjectAt(i);
+          if (object.IsCode() && should_canonicalize(object)) {
+            object = Canonicalize(Code::Cast(object));
+            pool.SetObjectAt(i, object);
+          }
+        }
+      };
+
+      auto& pool = ObjectPool::Handle(zone_);
+
+      auto it = canonical_objects_.GetIterator();
+      while (auto canonical_code = it.Next()) {
+        static_calls_array = (*canonical_code)->static_calls_target_table();
+        if (!static_calls_array.IsNull()) {
+          StaticCallsTable static_calls(static_calls_array);
+          for (auto& view : static_calls) {
+            static_calls_table_entry =
+                view.Get<Code::kSCallTableCodeOrTypeTarget>();
+            if (static_calls_table_entry.IsCode() &&
+                should_canonicalize(static_calls_table_entry)) {
+              static_calls_table_entry =
+                  Canonicalize(Code::Cast(static_calls_table_entry));
+              view.Set<Code::kSCallTableCodeOrTypeTarget>(
+                  static_calls_table_entry);
+            }
+          }
+        }
+
+        pool = (*canonical_code)->object_pool();
+        process_pool(pool);
+      }
+
+      auto object_store = isolate_group->object_store();
+
+      const auto& dispatch_table_entries =
+          Array::Handle(zone_, object_store->dispatch_table_code_entries());
+      if (!dispatch_table_entries.IsNull()) {
+        auto& code = Code::Handle(zone_);
+        for (intptr_t i = 0; i < dispatch_table_entries.Length(); i++) {
+          code ^= dispatch_table_entries.At(i);
+          if (should_canonicalize(code)) {
+            code ^= Canonicalize(code);
+            dispatch_table_entries.SetAt(i, code);
+          }
+        }
+      }
+
+      // If there's a global object pool, add any visitable objects.
+      pool = object_store->global_object_pool();
+      process_pool(pool);
+
+      RELEASE_ASSERT(canonical_count == canonical_objects_.Length());
+    }
+
     void VisitFunction(const Function& function) {
       if (!function.HasCode()) return;
       code_ = function.CurrentCode();
@@ -1276,12 +1354,13 @@
       // ProgramWalker, but as long as the deduplication process is idempotent,
       // the cached entry points won't change during the second visit.
       VisitCode(code_);
-      function.SetInstructionsSafe(code_);  // Update cached entry point.
+      function.SetInstructionsSafe(canonical_);  // Update cached entry point.
     }
 
     void VisitCode(const Code& code) {
+      canonical_ = code.ptr();
       if (code.IsDisabled()) return;
-      canonical_ = Dedup(code);
+      canonical_ = Canonicalize(code);
       instructions_ = canonical_.instructions();
       code.SetActiveInstructionsSafe(instructions_,
                                      code.UncheckedEntryPointOffset());
@@ -1291,6 +1370,14 @@
    private:
     bool CanCanonicalize(const Code& code) const { return !code.IsDisabled(); }
 
+    CodePtr Canonicalize(const Code& code) {
+      canonical_ = Dedup(code);
+      if (!code.is_discarded() && canonical_.is_discarded()) {
+        canonical_.set_is_discarded(false);
+      }
+      return canonical_.ptr();
+    }
+
     Code& canonical_;
     Code& code_;
     Instructions& instructions_;
@@ -1300,6 +1387,7 @@
     StackZone stack_zone(thread);
     DedupInstructionsWithSameMetadataVisitor visitor(thread->zone());
     WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
+    visitor.PostProcess(thread->isolate_group());
     return;
   }
 #endif  // defined(DART_PRECOMPILER)
diff --git a/runtime/vm/raw_object.cc b/runtime/vm/raw_object.cc
index de93e86..c32aa1e 100644
--- a/runtime/vm/raw_object.cc
+++ b/runtime/vm/raw_object.cc
@@ -115,13 +115,6 @@
       instance_size = InstructionsSection::InstanceSize(section_size);
       break;
     }
-    case kInstructionsTableCid: {
-      const InstructionsTablePtr raw_instructions_table =
-          static_cast<const InstructionsTablePtr>(this);
-      intptr_t length = raw_instructions_table->untag()->length_;
-      instance_size = InstructionsTable::InstanceSize(length);
-      break;
-    }
     case kContextCid: {
       const ContextPtr raw_context = static_cast<const ContextPtr>(this);
       intptr_t num_variables = raw_context->untag()->num_variables_;
@@ -575,7 +568,7 @@
         Smi::Value(raw_obj->untag()->length()))
 VARIABLE_COMPRESSED_VISITOR(ContextScope, raw_obj->untag()->num_variables_)
 NULL_VISITOR(Sentinel)
-VARIABLE_VISITOR(InstructionsTable, raw_obj->untag()->length_)
+REGULAR_VISITOR(InstructionsTable)
 NULL_VISITOR(Mint)
 NULL_VISITOR(Double)
 NULL_VISITOR(Float32x4)
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index 6a182bb..f980fe5 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -1864,22 +1864,6 @@
   friend class Image;
 };
 
-class UntaggedInstructionsTable : public UntaggedObject {
-  RAW_HEAP_OBJECT_IMPLEMENTATION(InstructionsTable);
-
-  POINTER_FIELD(ArrayPtr, descriptors)
-  VISIT_FROM(descriptors)
-  VISIT_TO(descriptors)
-
-  intptr_t length_;
-  uword start_pc_;
-  uword end_pc_;
-
-  // Variable length data follows here.
-  uint32_t* data() { OPEN_ARRAY_START(uint32_t, uint32_t); }
-  const uint32_t* data() const { OPEN_ARRAY_START(uint32_t, uint32_t); }
-};
-
 class UntaggedPcDescriptors : public UntaggedObject {
  public:
 // The macro argument V is passed two arguments, the raw name of the enum value
@@ -2002,75 +1986,130 @@
   RAW_HEAP_OBJECT_IMPLEMENTATION(CompressedStackMaps);
   VISIT_NOTHING();
 
-  // The most significant bits are the length of the encoded payload, in bytes.
-  // The low bits determine the expected payload contents, as described below.
-  uint32_t flags_and_size_;
+ public:
+  struct Payload {
+    // The most significant bits are the length of the encoded payload, in
+    // bytes (excluding the header itself). The low bits determine the
+    // expected payload contents, as described below.
+    uint32_t flags_and_size;
 
-  // Variable length data follows here. The contents of the payload depend on
-  // the type of CompressedStackMaps (CSM) being represented. There are three
-  // major types of CSM:
-  //
-  // 1) GlobalTableBit = false, UsesTableBit = false: CSMs that include all
-  //    information about the stack maps. The payload for these contain tightly
-  //    packed entries with the following information:
-  //
-  //   * A header containing the following three pieces of information:
-  //     * An unsigned integer representing the PC offset as a delta from the
-  //       PC offset of the previous entry (from 0 for the first entry).
-  //     * An unsigned integer representing the number of bits used for
-  //       spill slot entries.
-  //     * An unsigned integer representing the number of bits used for other
-  //       entries.
-  //   * The body containing the bits for the stack map. The length of the body
-  //     in bits is the sum of the spill slot and non-spill slot bit counts.
-  //
-  // 2) GlobalTableBit = false, UsesTableBit = true: CSMs where the majority of
-  //    the stack map information has been offloaded and canonicalized into a
-  //    global table. The payload contains tightly packed entries with the
-  //    following information:
-  //
-  //   * A header containing just an unsigned integer representing the PC offset
-  //     delta as described above.
-  //   * The body is just an unsigned integer containing the offset into the
-  //     payload for the global table.
-  //
-  // 3) GlobalTableBit = true, UsesTableBit = false: A CSM implementing the
-  //    global table. Here, the payload contains tightly packed entries with
-  //    the following information:
-  //
-  //   * A header containing the following two pieces of information:
-  //     * An unsigned integer representing the number of bits used for
-  //       spill slot entries.
-  //     * An unsigned integer representing the number of bits used for other
-  //       entries.
-  //   * The body containing the bits for the stack map. The length of the body
-  //     in bits is the sum of the spill slot and non-spill slot bit counts.
-  //
-  // In all types of CSM, each unsigned integer is LEB128 encoded, as generally
-  // they tend to fit in a single byte or two. Thus, entry headers are not a
-  // fixed length, and currently there is no random access of entries.  In
-  // addition, PC offsets are currently encoded as deltas, which also inhibits
-  // random access without accessing previous entries. That means to find an
-  // entry for a given PC offset, a linear search must be done where the payload
-  // is decoded up to the entry whose PC offset is >= the given PC.
+    // Variable length data follows here. The contents of the payload depend on
+    // the type of CompressedStackMaps (CSM) being represented. There are three
+    // major types of CSM:
+    //
+    // 1) GlobalTableBit = false, UsesTableBit = false: CSMs that include all
+    //    information about the stack maps. The payload for these contain
+    //    tightly packed entries with the following information:
+    //
+    //   * A header containing the following three pieces of information:
+    //     * An unsigned integer representing the PC offset as a delta from the
+    //       PC offset of the previous entry (from 0 for the first entry).
+    //     * An unsigned integer representing the number of bits used for
+    //       spill slot entries.
+    //     * An unsigned integer representing the number of bits used for other
+    //       entries.
+    //   * The body containing the bits for the stack map. The length of
+    //     the body in bits is the sum of the spill slot and non-spill slot
+    //     bit counts.
+    //
+    // 2) GlobalTableBit = false, UsesTableBit = true: CSMs where the majority
+    //    of the stack map information has been offloaded and canonicalized into
+    //    a global table. The payload contains tightly packed entries with the
+    //    following information:
+    //
+    //   * A header containing just an unsigned integer representing the PC
+    //     offset delta as described above.
+    //   * The body is just an unsigned integer containing the offset into the
+    //     payload for the global table.
+    //
+    // 3) GlobalTableBit = true, UsesTableBit = false: A CSM implementing the
+    //    global table. Here, the payload contains tightly packed entries with
+    //    the following information:
+    //
+    //   * A header containing the following two pieces of information:
+    //     * An unsigned integer representing the number of bits used for
+    //       spill slot entries.
+    //     * An unsigned integer representing the number of bits used for other
+    //       entries.
+    //   * The body containing the bits for the stack map. The length of the
+    //     body in bits is the sum of the spill slot and non-spill slot bit
+    //     counts.
+    //
+    // In all types of CSM, each unsigned integer is LEB128 encoded, as
+    // generally they tend to fit in a single byte or two. Thus, entry headers
+    // are not a fixed length, and currently there is no random access of
+    // entries.  In addition, PC offsets are currently encoded as deltas, which
+    // also inhibits random access without accessing previous entries. That
+    // means to find an entry for a given PC offset, a linear search must be
+    // done where the payload is decoded up to the entry whose PC offset
+    // is greater or equal to the given PC.
 
-  uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
-  const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
+    uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
+    const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
+  };
+  static_assert(sizeof(Payload) == sizeof(uint32_t));
+
+ private:
+  // We are using OPEN_ARRAY_START rather than embedding Payload directly into
+  // the UntaggedCompressedStackMaps as a field because that would introduce a
+  // padding at the end of UntaggedCompressedStackMaps - so we would not be
+  // able to use sizeof(UntaggedCompressedStackMaps) as the size of the header
+  // anyway.
+  Payload* payload() { OPEN_ARRAY_START(Payload, uint32_t); }
+  const Payload* payload() const { OPEN_ARRAY_START(Payload, uint32_t); }
 
   class GlobalTableBit : public BitField<uint32_t, bool, 0, 1> {};
   class UsesTableBit
       : public BitField<uint32_t, bool, GlobalTableBit::kNextBit, 1> {};
-  class SizeField : public BitField<uint32_t,
-                                    uint32_t,
-                                    UsesTableBit::kNextBit,
-                                    sizeof(flags_and_size_) * kBitsPerByte -
-                                        UsesTableBit::kNextBit> {};
+  class SizeField
+      : public BitField<uint32_t,
+                        uint32_t,
+                        UsesTableBit::kNextBit,
+                        sizeof(Payload::flags_and_size) * kBitsPerByte -
+                            UsesTableBit::kNextBit> {};
 
   friend class Object;
   friend class ImageWriter;
   friend class StackMapEntry;
 };
 
+class UntaggedInstructionsTable : public UntaggedObject {
+  RAW_HEAP_OBJECT_IMPLEMENTATION(InstructionsTable);
+
+  POINTER_FIELD(ArrayPtr, code_objects)
+  VISIT_FROM(code_objects)
+  VISIT_TO(code_objects)
+
+  struct DataEntry {
+    uint32_t pc_offset;
+    uint32_t stack_map_offset;
+  };
+  static_assert(sizeof(DataEntry) == sizeof(uint32_t) * 2);
+
+  struct Data {
+    uint32_t canonical_stack_map_entries_offset;
+    uint32_t length;
+    uint32_t first_entry_with_code;
+    uint32_t padding;
+
+    const DataEntry* entries() const { OPEN_ARRAY_START(DataEntry, uint32_t); }
+
+    const UntaggedCompressedStackMaps::Payload* StackMapAt(
+        intptr_t offset) const {
+      return reinterpret_cast<UntaggedCompressedStackMaps::Payload*>(
+          reinterpret_cast<uword>(this) + offset);
+    }
+  };
+  static_assert(sizeof(Data) == sizeof(uint32_t) * 4);
+
+  intptr_t length_;
+  const Data* rodata_;
+  uword start_pc_;
+  uword end_pc_;
+
+  friend class Deserializer;
+};
+
 class UntaggedLocalVarDescriptors : public UntaggedObject {
  public:
   enum VarInfoKind {
diff --git a/runtime/vm/raw_object_fields.cc b/runtime/vm/raw_object_fields.cc
index 1099528..e59f439 100644
--- a/runtime/vm/raw_object_fields.cc
+++ b/runtime/vm/raw_object_fields.cc
@@ -104,7 +104,7 @@
   F(ICData, args_descriptor_)                                                  \
   F(ICData, entries_)                                                          \
   F(ICData, owner_)                                                            \
-  F(InstructionsTable, descriptors_)                                           \
+  F(InstructionsTable, code_objects_)                                          \
   F(MegamorphicCache, target_name_)                                            \
   F(MegamorphicCache, args_descriptor_)                                        \
   F(MegamorphicCache, buckets_)                                                \
diff --git a/runtime/vm/reverse_pc_lookup_cache.cc b/runtime/vm/reverse_pc_lookup_cache.cc
index 97bf223..77d4716 100644
--- a/runtime/vm/reverse_pc_lookup_cache.cc
+++ b/runtime/vm/reverse_pc_lookup_cache.cc
@@ -11,10 +11,9 @@
 
 namespace dart {
 
-ObjectPtr ReversePc::FindCodeDescriptorInGroup(IsolateGroup* group,
-                                               uword pc,
-                                               bool is_return_address,
-                                               uword* code_start) {
+CodePtr ReversePc::FindCodeInGroup(IsolateGroup* group,
+                                   uword pc,
+                                   bool is_return_address) {
 #if defined(DART_PRECOMPILED_RUNTIME)
   // This can run in the middle of GC and must not allocate handles.
   NoSafepointScope no_safepoint;
@@ -31,30 +30,80 @@
   for (intptr_t i = 0; i < tables_length; i++) {
     InstructionsTablePtr table = static_cast<InstructionsTablePtr>(
         tables->untag()->data()->untag()->element(i));
-    intptr_t index = InstructionsTable::FindEntry(table, pc);
-    if (index >= 0) {
-      *code_start = InstructionsTable::PayloadStartAt(table, index);
-      return InstructionsTable::DescriptorAt(table, index);
+    CodePtr code = InstructionsTable::FindCode(table, pc);
+    if (code != Code::null()) {
+      return code;
+    }
+  }
+#endif  // defined(DART_PRECOMPILED_RUNTIME)
+  return Code::null();
+}
+
+const UntaggedCompressedStackMaps::Payload* ReversePc::FindStackMapInGroup(
+    IsolateGroup* group,
+    uword pc,
+    bool is_return_address,
+    uword* code_start,
+    const UntaggedCompressedStackMaps::Payload** global_table) {
+#if defined(DART_PRECOMPILED_RUNTIME)
+  // This can run in the middle of GC and must not allocate handles.
+  NoSafepointScope no_safepoint;
+
+  if (is_return_address) {
+    pc--;
+  }
+
+  // This expected number of tables is low (one per loading unit), so we go
+  // through them linearly. If this changes, would could sort the table list
+  // during deserialization and binary search for the table.
+  GrowableObjectArrayPtr tables = group->object_store()->instructions_tables();
+  intptr_t tables_length = Smi::Value(tables->untag()->length());
+  for (intptr_t i = 0; i < tables_length; i++) {
+    InstructionsTablePtr table = static_cast<InstructionsTablePtr>(
+        tables->untag()->data()->untag()->element(i));
+    auto map = InstructionsTable::FindStackMap(table, pc, code_start);
+    if (map != nullptr) {
+      // Take global table from the first table.
+      table = static_cast<InstructionsTablePtr>(
+          tables->untag()->data()->untag()->element(0));
+      *global_table = InstructionsTable::GetCanonicalStackMap(table);
+      return map;
     }
   }
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
 
   *code_start = 0;
-  return Object::null();
+  return nullptr;
 }
 
-ObjectPtr ReversePc::FindCodeDescriptor(IsolateGroup* group,
-                                        uword pc,
-                                        bool is_return_address,
-                                        uword* code_start) {
+const UntaggedCompressedStackMaps::Payload* ReversePc::FindStackMap(
+    IsolateGroup* group,
+    uword pc,
+    bool is_return_address,
+    uword* code_start,
+    const UntaggedCompressedStackMaps::Payload** global_table) {
   ASSERT(FLAG_precompiled_mode);
   NoSafepointScope no_safepoint;
 
-  ObjectPtr code_descriptor =
-      FindCodeDescriptorInGroup(group, pc, is_return_address, code_start);
-  if (code_descriptor == Object::null()) {
-    code_descriptor = FindCodeDescriptorInGroup(Dart::vm_isolate_group(), pc,
-                                                is_return_address, code_start);
+  auto map = FindStackMapInGroup(group, pc, is_return_address, code_start,
+                                 global_table);
+  if (map == nullptr) {
+    map = FindStackMapInGroup(Dart::vm_isolate_group(), pc, is_return_address,
+                              code_start, global_table);
+  }
+  return map;
+}
+
+CodePtr ReversePc::FindCode(IsolateGroup* group,
+                            uword pc,
+                            bool is_return_address) {
+  ASSERT(FLAG_precompiled_mode);
+  NoSafepointScope no_safepoint;
+
+  auto code_descriptor = FindCodeInGroup(group, pc, is_return_address);
+  if (code_descriptor == Code::null()) {
+    code_descriptor =
+        FindCodeInGroup(Dart::vm_isolate_group(), pc, is_return_address);
   }
   return code_descriptor;
 }
@@ -65,44 +114,7 @@
   ASSERT(FLAG_precompiled_mode);
   NoSafepointScope no_safepoint;
 
-  uword code_start;
-  ObjectPtr code_descriptor =
-      FindCodeDescriptor(group, pc, is_return_address, &code_start);
-  if (code_descriptor != Object::null()) {
-    if (!code_descriptor->IsCode()) {
-      ASSERT(StubCode::UnknownDartCode().PayloadStart() == 0);
-      ASSERT(StubCode::UnknownDartCode().Size() == kUwordMax);
-      ASSERT(StubCode::UnknownDartCode().IsFunctionCode());
-      ASSERT(StubCode::UnknownDartCode().IsUnknownDartCode());
-      code_descriptor = StubCode::UnknownDartCode().ptr();
-    }
-  }
-  return static_cast<CodePtr>(code_descriptor);
-}
-
-CompressedStackMapsPtr ReversePc::FindCompressedStackMaps(
-    IsolateGroup* group,
-    uword pc,
-    bool is_return_address,
-    uword* code_start) {
-  ASSERT(FLAG_precompiled_mode);
-  NoSafepointScope no_safepoint;
-
-  ObjectPtr code_descriptor =
-      FindCodeDescriptor(group, pc, is_return_address, code_start);
-  if (code_descriptor != Object::null()) {
-    if (code_descriptor->IsCode()) {
-      CodePtr code = static_cast<CodePtr>(code_descriptor);
-      ASSERT(*code_start == Code::PayloadStartOf(code));
-      return code->untag()->compressed_stackmaps();
-    } else {
-      ASSERT(code_descriptor->IsCompressedStackMaps());
-      return static_cast<CompressedStackMapsPtr>(code_descriptor);
-    }
-  }
-
-  *code_start = 0;
-  return CompressedStackMaps::null();
+  return FindCode(group, pc, is_return_address);
 }
 
 }  // namespace dart
diff --git a/runtime/vm/reverse_pc_lookup_cache.h b/runtime/vm/reverse_pc_lookup_cache.h
index e214747..21aef18 100644
--- a/runtime/vm/reverse_pc_lookup_cache.h
+++ b/runtime/vm/reverse_pc_lookup_cache.h
@@ -7,6 +7,7 @@
 
 #include "vm/allocation.h"
 #include "vm/globals.h"
+#include "vm/raw_object.h"
 #include "vm/tagged_pointer.h"
 
 namespace dart {
@@ -22,24 +23,27 @@
   // given isolate |group| and vm isolate group.
   static CodePtr Lookup(IsolateGroup* group, uword pc, bool is_return_address);
 
-  // Looks for CompressedStackMaps corresponding to |pc| in the
-  // given isolate |group| and vm isolate group.
-  // Sets |code_start| to the beginning of the instructions corresponding
-  // to |pc| (like Code::PayloadStart()).
-  static CompressedStackMapsPtr FindCompressedStackMaps(IsolateGroup* group,
-                                                        uword pc,
-                                                        bool is_return_address,
-                                                        uword* code_start);
+  static const UntaggedCompressedStackMaps::Payload* FindStackMap(
+      IsolateGroup* group,
+      uword pc,
+      bool is_return_address,
+      uword* code_start,
+      const UntaggedCompressedStackMaps::Payload** global_table);
 
  private:
-  static ObjectPtr FindCodeDescriptorInGroup(IsolateGroup* group,
-                                             uword pc,
-                                             bool is_return_address,
-                                             uword* code_start);
-  static ObjectPtr FindCodeDescriptor(IsolateGroup* group,
-                                      uword pc,
-                                      bool is_return_address,
-                                      uword* code_start);
+  static const UntaggedCompressedStackMaps::Payload* FindStackMapInGroup(
+      IsolateGroup* group,
+      uword pc,
+      bool is_return_address,
+      uword* code_start,
+      const UntaggedCompressedStackMaps::Payload** global_table);
+
+  static CodePtr FindCodeInGroup(IsolateGroup* group,
+                                 uword pc,
+                                 bool is_return_address);
+  static CodePtr FindCode(IsolateGroup* group,
+                          uword pc,
+                          bool is_return_address);
 };
 
 }  // namespace dart
diff --git a/runtime/vm/stack_frame.cc b/runtime/vm/stack_frame.cc
index e1fe0b4..55eebc9 100644
--- a/runtime/vm/stack_frame.cc
+++ b/runtime/vm/stack_frame.cc
@@ -205,13 +205,18 @@
   // helper functions to the raw object interface.
   NoSafepointScope no_safepoint;
   Code code;
-  CompressedStackMaps maps;
+
+  CompressedStackMaps::RawPayloadHandle maps;
+  CompressedStackMaps::RawPayloadHandle global_table;
+
   uword code_start;
 
   if (FLAG_precompiled_mode) {
-    maps = ReversePc::FindCompressedStackMaps(isolate_group(), pc(),
-                                              /*is_return_address=*/true,
-                                              &code_start);
+    const UntaggedCompressedStackMaps::Payload* global_table_payload;
+    maps = ReversePc::FindStackMap(isolate_group(), pc(),
+                                   /*is_return_address=*/true, &code_start,
+                                   &global_table_payload);
+    global_table = global_table_payload;
   } else {
     ObjectPtr pc_marker = *(reinterpret_cast<ObjectPtr*>(
         fp() + (runtime_frame_layout.code_from_fp * kWordSize)));
@@ -221,8 +226,12 @@
     if (pc_marker->IsHeapObject() && (pc_marker->GetClassId() == kCodeCid)) {
       code ^= pc_marker;
       code_start = code.PayloadStart();
+      ASSERT(code.compressed_stackmaps() != CompressedStackMaps::null());
       maps = code.compressed_stackmaps();
-      ASSERT(!maps.IsNull());
+      if (maps.UsesGlobalTable()) {
+        global_table =
+            isolate_group()->object_store()->canonicalized_stack_map_entries();
+      }
     } else {
       ASSERT(pc_marker == Object::null());
     }
@@ -231,11 +240,8 @@
   if (!maps.IsNull()) {
     // Optimized frames have a stack map. We need to visit the frame based
     // on the stack map.
-    CompressedStackMaps global_table;
-
-    global_table =
-        isolate_group()->object_store()->canonicalized_stack_map_entries();
-    CompressedStackMaps::Iterator it(maps, global_table);
+    CompressedStackMaps::Iterator<CompressedStackMaps::RawPayloadHandle> it(
+        maps, global_table);
     const uint32_t pc_offset = pc() - code_start;
     if (it.Find(pc_offset)) {
       ObjectPtr* first = reinterpret_cast<ObjectPtr*>(sp());
diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn
index 2b4fa76..f5a7544 100644
--- a/sdk/BUILD.gn
+++ b/sdk/BUILD.gn
@@ -26,6 +26,7 @@
   dart_stripped_binary = "dart"
   dart_precompiled_runtime_stripped_binary = "dart_precompiled_runtime_product"
   gen_snapshot_stripped_binary = "gen_snapshot_product"
+  analyze_snapshot_binary = "analyze_snapshot_product"
 }
 
 # The directory layout of the SDK is as follows:
@@ -330,6 +331,15 @@
   ]
 }
 
+copy("copy_analyze_snapshot") {
+  visibility = [ ":group_dart2native" ]
+  deps = [ "../runtime/bin:analyze_snapshot_product" ]
+  src_dir =
+      get_label_info("../runtime/bin:analyze_snapshot_product", "root_out_dir")
+  sources = [ "$src_dir/${analyze_snapshot_binary}${executable_suffix}" ]
+  outputs = [ "$root_out_dir/$dart_sdk_output/bin/utils/analyze_snapshot${executable_suffix}" ]
+}
+
 copy("copy_vm_platform_strong_product") {
   visibility = [ ":group_dart2native" ]
   deps = [ "../runtime/vm:vm_platform_product" ]
@@ -349,6 +359,7 @@
 
 group("group_dart2native") {
   deps = [
+    ":copy_analyze_snapshot",
     ":copy_dartaotruntime",
     ":copy_gen_kernel_snapshot",
     ":copy_gen_snapshot",
diff --git a/sdk/lib/html/doc/WORKAROUNDS.md b/sdk/lib/html/doc/WORKAROUNDS.md
index 39a1c06..53ed156 100644
--- a/sdk/lib/html/doc/WORKAROUNDS.md
+++ b/sdk/lib/html/doc/WORKAROUNDS.md
@@ -19,14 +19,46 @@
 As mentioned above, there exists stale interfaces. While some of these may be
 fixed in the source code, many might not.
 
-In order to circumvent this, you can use the `js_util` library, like
-`getProperty`, `setProperty`, `callMethod`, and `callConstructor`.
+In order to work around this, you can use the annotation `@staticInterop` from
+`package:js`.
 
 Let’s look at an example. `FileReader` is a `dart:html` interface that is
 missing the API `readAsBinaryString` ([#42834][]). We can work around this by
 doing something like the following:
 
 ```dart
+@JS()
+library workarounds;
+
+import 'dart:html';
+
+import 'package:async_helper/async_minitest.dart';
+import 'package:expect/expect.dart';
+import 'package:js/js.dart';
+
+@JS()
+@staticInterop
+class JSFileReader {}
+
+extension JSFileReaderExtension on JSFileReader {
+  external void readAsBinaryString(Blob blob);
+}
+
+void main() async {
+  var reader = new FileReader();
+  reader.onLoad.listen(expectAsync((event) {
+    String result = reader.result as String;
+    Expect.equals(result, '00000000');
+  }));
+  var jsReader = reader as JSFileReader;
+  jsReader.readAsBinaryString(new Blob(['00000000']));
+}
+```
+
+Alternatively, you can directly use the `js_util` library, using the methods
+`getProperty`, `setProperty`, `callMethod`, and `callConstructor`.
+
+```dart
 import 'dart:html';
 import 'dart:js_util' as js_util;
 
@@ -46,9 +78,37 @@
 }
 ```
 
-In the case where the API is missing a constructor, we can use
-`callConstructor`. For example, instead of using the factory constructor for
-`KeyboardEvent`, we can do the following:
+In the case where the API is missing a constructor, we can define a constructor
+within a `@staticInterop` class. Note that constructors, `external` or
+otherwise, are disallowed in extensions currently. For example:
+
+```dart
+@JS()
+library workarounds;
+
+import 'dart:js_util' as js_util;
+
+import 'package:expect/expect.dart';
+import 'package:js/js.dart';
+
+@JS('KeyboardEvent')
+@staticInterop
+class JSKeyboardEvent {
+  external JSKeyboardEvent(String typeArg, Object keyboardEventInit);
+}
+
+extension JSKeyboardEventExtension on JSKeyboardEvent {
+  external String get key;
+}
+
+void main() {
+  var event = JSKeyboardEvent('KeyboardEvent',
+      js_util.jsify({'key': 'A'}));
+  Expect.equals(event.key, 'A');
+}
+```
+
+or with `js_util`'s `callConstructor`:
 
 ```dart
 import 'dart:html';
@@ -73,11 +133,45 @@
 `USBDevice` ([#42200][]) due to historical reasons. These native interfaces are
 marked with `@Native`, are private, and have no attributes associated with them.
 Therefore, unlike other `@Native` objects, we can’t access any of the APIs or
-attributes associated with this interface. We can use the `js_util` library
-again to circumvent this issue. For example, we can manipulate a
-`_SubtleCrypto` object:
+attributes associated with this interface. We can again either use the
+`@staticInterop` annotation or use the `js_util` library to circumvent this
+issue. For example, we can abstract a `_SubtleCrypto` object:
 
 ```dart
+@JS()
+library workarounds;
+
+import 'dart:html';
+import 'dart:js_util' as js_util;
+import 'dart:typed_data';
+
+import 'package:js/js.dart';
+
+@JS()
+external Crypto get crypto;
+
+@JS()
+@staticInterop
+class JSSubtleCrypto {}
+
+extension JSSubtleCryptoExtension on JSSubtleCrypto {
+  external dynamic digest(String algorithm, Uint8List data);
+  Future<ByteBuffer> digestFuture(String algorithm, Uint8List data) =>
+      js_util.promiseToFuture(digest(algorithm, data));
+}
+
+void main() async {
+  var subtle = crypto.subtle! as JSSubtleCrypto;
+  var digest = await subtle.digestFuture('SHA-256', Uint8List(16));
+}
+```
+
+or with `js_util`:
+
+```dart
+@JS()
+library workarounds;
+
 import 'dart:html';
 import 'dart:js_util' as js_util;
 import 'dart:typed_data';
@@ -96,10 +190,13 @@
 }
 ```
 
-What you shouldn’t do is attempt to cast these native objects using your own JS
-interop types, e.g.
+What you shouldn’t do is attempt to cast these native objects using the
+non-`@staticInterop` `package:js` types e.g.
 
 ```dart
+@JS()
+library workarounds;
+
 import 'dart:html';
 
 import 'package:js/js.dart';
@@ -115,14 +212,13 @@
 }
 ```
 
-With the above, you’ll see a type error:
+With the above, you’ll see a static error:
 
-`Uncaught TypeError: Instance of 'SubtleCrypto': type 'Interceptor' is not a subtype of type 'SubtleCrypto'`
+`Error: Non-static JS interop class 'SubtleCrypto' conflicts with natively supported class '_SubtleCrypto' in 'dart:html'.`
 
 This is because the types in the `@Native` annotation are reserved and the above
 leads to namespace conflicts between the `@Native` type and the user JS interop
-type in the compiler. These `@Native` types inherit the `Interceptor` class,
-which is why you see the message above.
+type in the compiler. `@staticInterop` classes, however, don't have this issue.
 
 [#42834]: https://github.com/dart-lang/sdk/issues/42834
 [#42200]: https://github.com/dart-lang/sdk/issues/42200
diff --git a/tools/VERSION b/tools/VERSION
index 05c9ce1..d08d4df 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 16
 PATCH 0
-PRERELEASE 116
+PRERELEASE 117
 PRERELEASE_PATCH 0
\ No newline at end of file
diff --git a/tools/bots/test_matrix.json b/tools/bots/test_matrix.json
index a2b07417..388f037 100644
--- a/tools/bots/test_matrix.json
+++ b/tools/bots/test_matrix.json
@@ -798,7 +798,8 @@
     },
     "dartkp-linux-(debug|product|release)-simarm-crossword": {
       "options": {
-        "builder-tag": "crossword"
+        "builder-tag": "crossword",
+        "use-elf": true
       }
     },
     "dartkp-weak-asserts-linux-(debug|product|release)-simarm-crossword": {
diff --git a/tools/gn.py b/tools/gn.py
index fe4718d..32a6953 100755
--- a/tools/gn.py
+++ b/tools/gn.py
@@ -269,6 +269,8 @@
             'exe.stripped/dart_precompiled_runtime_product')
         gn_args['gen_snapshot_stripped_binary'] = (
             'exe.stripped/gen_snapshot_product')
+        gn_args['analyze_snapshot_binary'] = (
+            'exe.stripped/analyze_snapshot_product')
 
     # Setup the user-defined sysroot.
     if UseSysroot(args, gn_args):