Version 2.12.0-228.0.dev

Merge commit 'b1b45645d08d0da9575e0fde96677e779db8439a' into 'dev'
diff --git a/pkg/analyzer/lib/src/error/best_practices_verifier.dart b/pkg/analyzer/lib/src/error/best_practices_verifier.dart
index 5030608..4085b7a 100644
--- a/pkg/analyzer/lib/src/error/best_practices_verifier.dart
+++ b/pkg/analyzer/lib/src/error/best_practices_verifier.dart
@@ -1731,6 +1731,10 @@
       return;
     }
 
+    if (parent is HideCombinator) {
+      return;
+    }
+
     _checkForInvalidInternalAccess(identifier, element);
     _checkForOtherInvalidAccess(identifier, element);
   }
diff --git a/pkg/analyzer/test/src/diagnostics/deprecated_member_use_test.dart b/pkg/analyzer/test/src/diagnostics/deprecated_member_use_test.dart
index 875eb66..186c7a6 100644
--- a/pkg/analyzer/test/src/diagnostics/deprecated_member_use_test.dart
+++ b/pkg/analyzer/test/src/diagnostics/deprecated_member_use_test.dart
@@ -411,10 +411,6 @@
   "configVersion": 2,
   "packages": [ ${packages.join(', ')} ]
 }''');
-    print('''{
-  "configVersion": 2,
-  "packages": [ ${packages.join(', ')} ]
-}''');
   }
 }
 
diff --git a/pkg/analyzer/test/src/diagnostics/invalid_use_of_visible_for_testing_member_test.dart b/pkg/analyzer/test/src/diagnostics/invalid_use_of_visible_for_testing_member_test.dart
index 998e775..2ef124c 100644
--- a/pkg/analyzer/test/src/diagnostics/invalid_use_of_visible_for_testing_member_test.dart
+++ b/pkg/analyzer/test/src/diagnostics/invalid_use_of_visible_for_testing_member_test.dart
@@ -25,18 +25,40 @@
     writeTestPackageConfigWithMeta();
   }
 
-  test_export() async {
-    newFile('$testPackageRootPath/lib1.dart', content: r'''
+  test_export_hide() async {
+    newFile('$testPackageLibPath/a.dart', content: r'''
 import 'package:meta/meta.dart';
+
 @visibleForTesting
-int fn0() => 1;
-''');
-    newFile('$testPackageRootPath/lib2.dart', content: r'''
-export 'lib1.dart' show fn0;
+class A {}
+
+class B {}
 ''');
 
-    await _resolveFile('$testPackageRootPath/lib1.dart');
-    await _resolveFile('$testPackageRootPath/lib2.dart');
+    newFile('$testPackageLibPath/b.dart', content: r'''
+export 'a.dart' hide A;
+''');
+
+    await _resolveFile('$testPackageLibPath/a.dart');
+    await _resolveFile('$testPackageLibPath/b.dart');
+  }
+
+  test_export_show() async {
+    newFile('$testPackageLibPath/a.dart', content: r'''
+import 'package:meta/meta.dart';
+
+@visibleForTesting
+class A {}
+
+class B {}
+''');
+
+    newFile('$testPackageLibPath/b.dart', content: r'''
+export 'a.dart' show A;
+''');
+
+    await _resolveFile('$testPackageLibPath/a.dart');
+    await _resolveFile('$testPackageLibPath/b.dart');
   }
 
   test_fromIntegrationTestDirectory() async {
@@ -176,6 +198,49 @@
     ]);
   }
 
+  test_import_hide() async {
+    newFile('$testPackageLibPath/a.dart', content: r'''
+import 'package:meta/meta.dart';
+
+@visibleForTesting
+class A {}
+
+class B {}
+''');
+
+    newFile('$testPackageLibPath/b.dart', content: r'''
+import 'a.dart' hide A;
+
+void f(B _) {}
+''');
+
+    await _resolveFile('$testPackageLibPath/a.dart');
+    await _resolveFile('$testPackageLibPath/b.dart');
+  }
+
+  test_import_show() async {
+    newFile('$testPackageLibPath/a.dart', content: r'''
+import 'package:meta/meta.dart';
+
+@visibleForTesting
+class A {}
+
+class B {}
+''');
+
+    newFile('$testPackageLibPath/b.dart', content: r'''
+import 'a.dart' show A;
+
+void f(A _) {}
+''');
+
+    await _resolveFile('$testPackageLibPath/a.dart');
+    await _resolveFile('$testPackageLibPath/b.dart', [
+      error(HintCode.INVALID_USE_OF_VISIBLE_FOR_TESTING_MEMBER, 21, 1),
+      error(HintCode.INVALID_USE_OF_VISIBLE_FOR_TESTING_MEMBER, 32, 1),
+    ]);
+  }
+
   test_method() async {
     newFile('$testPackageRootPath/lib1.dart', content: r'''
 import 'package:meta/meta.dart';
diff --git a/pkg/nnbd_migration/lib/migration_cli.dart b/pkg/nnbd_migration/lib/migration_cli.dart
index 0c70a3e..7013c22 100644
--- a/pkg/nnbd_migration/lib/migration_cli.dart
+++ b/pkg/nnbd_migration/lib/migration_cli.dart
@@ -517,6 +517,12 @@
 
   bool _hasAnalysisErrors = false;
 
+  /// Subscription of interrupt signals (control-C).
+  StreamSubscription<ProcessSignal> _sigIntSubscription;
+
+  /// Completes when an interrupt signal (control-C) is received.
+  Completer<void> sigIntSignalled;
+
   MigrationCliRunner(this.cli, this.options, {Logger logger})
       : logger = logger ?? cli.logger;
 
@@ -559,14 +565,6 @@
   /// derived class.
   void applyHook() {}
 
-  /// Blocks until an interrupt signal (control-C) is received.  Tests may
-  /// override this method to simulate control-C.
-  @visibleForTesting
-  Future<void> blockUntilSignalInterrupt() {
-    Stream<ProcessSignal> stream = ProcessSignal.sigint.watch();
-    return stream.first;
-  }
-
   /// Computes the internet address that should be passed to `HttpServer.bind`
   /// when starting the preview server.  May be overridden in derived classes.
   Object computeBindAddress() {
@@ -613,6 +611,18 @@
         sdkPath: sdkPath);
   }
 
+  /// Subscribes to the interrupt signal (control-C).
+  @visibleForTesting
+  void listenForSignalInterrupt() {
+    var stream = ProcessSignal.sigint.watch();
+    sigIntSignalled = Completer();
+    _sigIntSubscription = stream.listen((_) {
+      if (!sigIntSignalled.isCompleted) {
+        sigIntSignalled.complete();
+      }
+    });
+  }
+
   @override
   void onException(String detail) {
     if (_hasExceptions) {
@@ -770,8 +780,15 @@
 
 ''');
 
-      // Block until sigint (ctrl-c).
-      await blockUntilSignalInterrupt();
+      listenForSignalInterrupt();
+      await Future.any([
+        sigIntSignalled.future,
+        nonNullableFix.serverIsShutdown.future,
+      ]);
+      // Either the interrupt signal was caught, or the server was shutdown.
+      // Either way, cancel the interrupt signal subscription, and shutdown the
+      // server.
+      _sigIntSubscription?.cancel();
       nonNullableFix.shutdownServer();
     } else {
       logger.stdout(ansi.emphasized('Diff of changes:'));
diff --git a/pkg/nnbd_migration/lib/src/front_end/non_nullable_fix.dart b/pkg/nnbd_migration/lib/src/front_end/non_nullable_fix.dart
index 980812c..23be8f3 100644
--- a/pkg/nnbd_migration/lib/src/front_end/non_nullable_fix.dart
+++ b/pkg/nnbd_migration/lib/src/front_end/non_nullable_fix.dart
@@ -2,6 +2,7 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
+import 'dart:async';
 import 'dart:convert' show jsonDecode, JsonEncoder;
 
 import 'package:analyzer/dart/analysis/features.dart';
@@ -78,6 +79,9 @@
   /// migrated.
   final bool Function(String) shouldBeMigratedFunction;
 
+  /// Completes when the server has been shutdown.
+  Completer<void> serverIsShutdown;
+
   NonNullableFix(this.listener, this.resourceProvider, this._getLineInfo,
       this.bindAddress, this._logger, this.shouldBeMigratedFunction,
       {List<String> included = const [],
@@ -174,8 +178,11 @@
   }
 
   void shutdownServer() {
-    _server?.close();
-    _server = null;
+    if (_server != null) {
+      _server.close();
+      _server = null;
+      serverIsShutdown.complete();
+    }
   }
 
   Future<void> startPreviewServer(
@@ -183,13 +190,18 @@
     // This method may be called multiple times, for example during a re-run.
     // But the preview server should only be started once.
     if (_server == null) {
-      _server = HttpPreviewServer(
-          state, rerun, applyHook, bindAddress, preferredPort, _logger);
+      var wrappedApplyHookWithShutdown = () {
+        shutdownServer();
+        applyHook();
+      };
+      _server = HttpPreviewServer(state, rerun, wrappedApplyHookWithShutdown,
+          bindAddress, preferredPort, _logger);
       _server.serveHttp();
       _allServers.add(_server);
       var serverHostname = await _server.boundHostname;
       var serverPort = await _server.boundPort;
       authToken = await _server.authToken;
+      serverIsShutdown = Completer();
 
       previewUrls = [
         // TODO(jcollins-g): Change protocol to only return a single string.
diff --git a/pkg/nnbd_migration/lib/src/preview/http_preview_server.dart b/pkg/nnbd_migration/lib/src/preview/http_preview_server.dart
index b4cf7d1..c817e97 100644
--- a/pkg/nnbd_migration/lib/src/preview/http_preview_server.dart
+++ b/pkg/nnbd_migration/lib/src/preview/http_preview_server.dart
@@ -27,7 +27,7 @@
   final MigrationState migrationState;
 
   /// The [PreviewSite] that can handle GET and POST requests.
-  PreviewSite previewSite;
+  PreviewSite _previewSite;
 
   /// Future that is completed with the HTTP server once it is running.
   Future<HttpServer> _serverFuture;
@@ -57,8 +57,6 @@
 
   Future<String> get authToken async {
     await _serverFuture;
-    previewSite ??=
-        PreviewSite(migrationState, rerunFunction, applyHook, _logger);
     return previewSite.serviceAuthToken;
   }
 
@@ -72,6 +70,9 @@
     return (await _serverFuture)?.port;
   }
 
+  PreviewSite get previewSite => _previewSite ??=
+      PreviewSite(migrationState, rerunFunction, applyHook, _logger);
+
   void close() {
     _serverFuture?.then((HttpServer server) {
       server.close();
@@ -100,15 +101,11 @@
 
   /// Handle a GET request received by the HTTP server.
   Future<void> _handleGetRequest(HttpRequest request) async {
-    previewSite ??=
-        PreviewSite(migrationState, rerunFunction, applyHook, _logger);
     await previewSite.handleGetRequest(request);
   }
 
   /// Handle a POST request received by the HTTP server.
   Future<void> _handlePostRequest(HttpRequest request) async {
-    previewSite ??=
-        PreviewSite(migrationState, rerunFunction, applyHook, _logger);
     await previewSite.handlePostRequest(request);
   }
 
diff --git a/pkg/nnbd_migration/test/migration_cli_test.dart b/pkg/nnbd_migration/test/migration_cli_test.dart
index c21ca01..f07ef19 100644
--- a/pkg/nnbd_migration/test/migration_cli_test.dart
+++ b/pkg/nnbd_migration/test/migration_cli_test.dart
@@ -125,11 +125,12 @@
   }
 
   @override
-  Future<void> blockUntilSignalInterrupt() async {
+  void listenForSignalInterrupt() {
     if (_runWhilePreviewServerActive == null) {
       fail('Preview server not expected to have been started');
     }
-    await _runWhilePreviewServerActive.call();
+    sigIntSignalled = Completer();
+    _runWhilePreviewServerActive.call().then((_) => sigIntSignalled.complete());
     _runWhilePreviewServerActive = null;
   }
 
diff --git a/runtime/tests/vm/dart/split_literals_test.dart b/runtime/tests/vm/dart/split_literals_test.dart
index 40ecf01..6cbc01b 100644
--- a/runtime/tests/vm/dart/split_literals_test.dart
+++ b/runtime/tests/vm/dart/split_literals_test.dart
@@ -65,7 +65,8 @@
 
     // Compile kernel to ELF.
     await run(genSnapshot, <String>[
-      "--use_bare_instructions=false",
+      "--use_bare_instructions=false", //# object: ok
+      "--use_bare_instructions=true", //# bare: ok
       "--snapshot-kind=app-aot-elf",
       "--elf=$snapshot",
       "--loading-unit-manifest=$manifest",
diff --git a/runtime/tests/vm/dart_2/split_literals_test.dart b/runtime/tests/vm/dart_2/split_literals_test.dart
index 40ecf01..6cbc01b 100644
--- a/runtime/tests/vm/dart_2/split_literals_test.dart
+++ b/runtime/tests/vm/dart_2/split_literals_test.dart
@@ -65,7 +65,8 @@
 
     // Compile kernel to ELF.
     await run(genSnapshot, <String>[
-      "--use_bare_instructions=false",
+      "--use_bare_instructions=false", //# object: ok
+      "--use_bare_instructions=true", //# bare: ok
       "--snapshot-kind=app-aot-elf",
       "--elf=$snapshot",
       "--loading-unit-manifest=$manifest",
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 2836127..59f8696 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -1403,11 +1403,26 @@
       objects_.Add(code);
     }
 
-    if (!(s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
+    if (s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions) {
+      if (FLAG_retain_function_objects) {
+        ObjectPoolPtr pool = code->ptr()->object_pool_;
+        if ((pool != ObjectPool::null()) && s->InCurrentLoadingUnit(code)) {
+          const intptr_t length = pool->ptr()->length_;
+          uint8_t* entry_bits = pool->ptr()->entry_bits();
+          for (intptr_t i = 0; i < length; i++) {
+            auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
+            if (entry_type == ObjectPool::EntryType::kTaggedObject) {
+              s->Push(pool->ptr()->data()[i].raw_obj_);
+            }
+          }
+        }
+      }
+    } else {
       if (s->InCurrentLoadingUnit(code->ptr()->object_pool_)) {
         s->Push(code->ptr()->object_pool_);
       }
     }
+
     s->Push(code->ptr()->owner_);
     s->Push(code->ptr()->exception_handlers_);
     s->Push(code->ptr()->pc_descriptors_);
@@ -1519,21 +1534,6 @@
   }
 
   void WriteAlloc(Serializer* s) {
-    Sort(&objects_);
-    auto loading_units = s->loading_units();
-    if ((loading_units != nullptr) &&
-        (s->current_loading_unit_id() == LoadingUnit::kRootId)) {
-      for (intptr_t i = LoadingUnit::kRootId + 1; i < loading_units->length();
-           i++) {
-        auto unit_objects = loading_units->At(i)->deferred_objects();
-        Sort(unit_objects);
-        for (intptr_t j = 0; j < unit_objects->length(); j++) {
-          deferred_objects_.Add(unit_objects->At(j)->raw());
-        }
-      }
-    }
-    s->PrepareInstructions(&objects_);
-
     s->WriteCid(kCodeCid);
     const intptr_t count = objects_.length();
     s->WriteUnsigned(count);
@@ -1679,7 +1679,8 @@
     s->Write<int32_t>(code->ptr()->state_bits_);
   }
 
-  GrowableArray<CodePtr>* discovered_objects() { return &objects_; }
+  GrowableArray<CodePtr>* objects() { return &objects_; }
+  GrowableArray<CodePtr>* deferred_objects() { return &deferred_objects_; }
 
   // Some code objects would have their owners dropped from the snapshot,
   // which makes it is impossible to recover program structure when
@@ -1843,12 +1844,17 @@
     ObjectPoolPtr pool = ObjectPool::RawCast(object);
     objects_.Add(pool);
 
-    const intptr_t length = pool->ptr()->length_;
-    uint8_t* entry_bits = pool->ptr()->entry_bits();
-    for (intptr_t i = 0; i < length; i++) {
-      auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
-      if (entry_type == ObjectPool::EntryType::kTaggedObject) {
-        s->Push(pool->ptr()->data()[i].raw_obj_);
+    if (s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
+        FLAG_retain_function_objects) {
+      // Treat pool as weak.
+    } else {
+      const intptr_t length = pool->ptr()->length_;
+      uint8_t* entry_bits = pool->ptr()->entry_bits();
+      for (intptr_t i = 0; i < length; i++) {
+        auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
+        if (entry_type == ObjectPool::EntryType::kTaggedObject) {
+          s->Push(pool->ptr()->data()[i].raw_obj_);
+        }
       }
     }
   }
@@ -1867,6 +1873,9 @@
   }
 
   void WriteFill(Serializer* s) {
+    bool weak = s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
+                FLAG_retain_function_objects;
+
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       ObjectPoolPtr pool = objects_[i];
@@ -1887,7 +1896,12 @@
               s->WriteElementRef(StubCode::CallBootstrapNative().raw(), j);
               break;
             }
-            s->WriteElementRef(entry.raw_obj_, j);
+            if (weak && !s->HasRef(entry.raw_obj_)) {
+              // Any value will do, but null has the shortest id.
+              s->WriteElementRef(Object::null(), j);
+            } else {
+              s->WriteElementRef(entry.raw_obj_, j);
+            }
             break;
           }
           case ObjectPool::EntryType::kImmediate: {
@@ -5269,19 +5283,26 @@
       const Object* deferred_object = (*unit_->deferred_objects())[i];
       ASSERT(deferred_object->IsCode());
       CodePtr code = static_cast<CodePtr>(deferred_object->raw());
-      if (!FLAG_use_bare_instructions) {
+      if (FLAG_use_bare_instructions) {
+        if (FLAG_retain_function_objects) {
+          ObjectPoolPtr pool = code->ptr()->object_pool_;
+          if (pool != ObjectPool::null()) {
+            const intptr_t length = pool->ptr()->length_;
+            uint8_t* entry_bits = pool->ptr()->entry_bits();
+            for (intptr_t i = 0; i < length; i++) {
+              auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
+              if (entry_type == ObjectPool::EntryType::kTaggedObject) {
+                s->Push(pool->ptr()->data()[i].raw_obj_);
+              }
+            }
+          }
+        }
+      } else {
         s->Push(code->ptr()->object_pool_);
       }
       s->Push(code->ptr()->compressed_stackmaps_);
       s->Push(code->ptr()->code_source_map_);
     }
-    {
-      GrowableArray<CodePtr> raw_codes(num_deferred_objects);
-      for (intptr_t i = 0; i < num_deferred_objects; i++) {
-        raw_codes.Add((*unit_->deferred_objects())[i]->raw());
-      }
-      s->PrepareInstructions(&raw_codes);
-    }
   }
 
   void WriteRoots(Serializer* s) {
@@ -5307,6 +5328,27 @@
       s->WriteRootRef(code->ptr()->compressed_stackmaps_, "deferred-code");
       s->WriteRootRef(code->ptr()->code_source_map_, "deferred-code");
     }
+
+    if (FLAG_use_bare_instructions && FLAG_retain_function_objects) {
+      ObjectPoolPtr pool =
+          s->isolate_group()->object_store()->global_object_pool();
+      const intptr_t length = pool->ptr()->length_;
+      uint8_t* entry_bits = pool->ptr()->entry_bits();
+      intptr_t last_write = 0;
+      for (intptr_t i = 0; i < length; i++) {
+        auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
+        if (entry_type == ObjectPool::EntryType::kTaggedObject) {
+          if (s->IsWritten(pool->ptr()->data()[i].raw_obj_)) {
+            intptr_t skip = i - last_write;
+            s->WriteUnsigned(skip);
+            s->WriteRootRef(pool->ptr()->data()[i].raw_obj_,
+                            "deferred-literal");
+            last_write = i;
+          }
+        }
+      }
+      s->WriteUnsigned(length - last_write);
+    }
 #endif
   }
 
@@ -5351,6 +5393,20 @@
           static_cast<CodeSourceMapPtr>(d->ReadRef());
     }
 
+    if (FLAG_use_bare_instructions && FLAG_retain_function_objects) {
+      ObjectPoolPtr pool =
+          d->isolate_group()->object_store()->global_object_pool();
+      const intptr_t length = pool->ptr()->length_;
+      uint8_t* entry_bits = pool->ptr()->entry_bits();
+      for (intptr_t i = d->ReadUnsigned(); i < length; i += d->ReadUnsigned()) {
+        auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
+        ASSERT(entry_type == ObjectPool::EntryType::kTaggedObject);
+        // The existing entry will usually be null, but it might also be an
+        // equivalent object that was duplicated in another loading unit.
+        pool->ptr()->data()[i].raw_obj_ = d->ReadRef();
+      }
+    }
+
     // Reinitialize the dispatch table by rereading the table's serialization
     // in the root snapshot.
     IsolateGroup* group = d->thread()->isolate()->group();
@@ -5776,11 +5832,58 @@
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
-void Serializer::PrepareInstructions(GrowableArray<CodePtr>* code_objects) {
+void Serializer::PrepareInstructions() {
+  if (!Snapshot::IncludesCode(kind())) return;
+
+  CodeSerializationCluster* cluster =
+      static_cast<CodeSerializationCluster*>(clusters_by_cid_[kCodeCid]);
+
+  // Code objects that have identical/duplicate instructions must be adjacent in
+  // the order that Code objects are written because the encoding of the
+  // reference from the Code to the Instructions assumes monotonically
+  // increasing offsets as part of a delta encoding. Also the code order table
+  // that allows for mapping return addresses back to Code objects depends on
+  // this sorting.
+  if (cluster != nullptr) {
+    CodeSerializationCluster::Sort(cluster->objects());
+  }
+  if ((loading_units_ != nullptr) &&
+      (current_loading_unit_id_ == LoadingUnit::kRootId)) {
+    for (intptr_t i = LoadingUnit::kRootId + 1; i < loading_units_->length();
+         i++) {
+      auto unit_objects = loading_units_->At(i)->deferred_objects();
+      CodeSerializationCluster::Sort(unit_objects);
+      for (intptr_t j = 0; j < unit_objects->length(); j++) {
+        cluster->deferred_objects()->Add(unit_objects->At(j)->raw());
+      }
+    }
+  }
+
 #if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
   if ((kind() == Snapshot::kFullAOT) && FLAG_use_bare_instructions) {
+    // Group the code objects whose instructions are not being deferred in this
+    // snapshot unit in the order they will be written: first the code objects
+    // encountered for this first time in this unit being written by the
+    // CodeSerializationCluster, then code object previously deferred whose
+    // instructions are now written by UnitSerializationRoots. This order needs
+    // to be known to finalize bare-instructions-mode's PC-relative calls.
+    GrowableArray<CodePtr> code_objects;
+    if (cluster != nullptr) {
+      auto in = cluster->objects();
+      for (intptr_t i = 0; i < in->length(); i++) {
+        code_objects.Add(in->At(i));
+      }
+    }
+    if (loading_units_ != nullptr) {
+      auto in =
+          loading_units_->At(current_loading_unit_id_)->deferred_objects();
+      for (intptr_t i = 0; i < in->length(); i++) {
+        code_objects.Add(in->At(i)->raw());
+      }
+    }
+
     GrowableArray<ImageWriterCommand> writer_commands;
-    RelocateCodeObjects(vm_, code_objects, &writer_commands);
+    RelocateCodeObjects(vm_, &code_objects, &writer_commands);
     image_writer_->PrepareForSerialization(&writer_commands);
   }
 #endif  // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
@@ -6030,6 +6133,8 @@
   }
 #endif
 
+  PrepareInstructions();
+
   intptr_t num_objects = num_base_objects_ + num_written_objects_;
 #if defined(ARCH_IS_64_BIT)
   if (!Utils::IsInt(32, num_objects)) {
@@ -6147,8 +6252,7 @@
   ASSERT(code_cluster != nullptr);
   // Reference IDs in a cluster are allocated sequentially, so we can use the
   // first code object's reference ID to calculate the cluster index.
-  const intptr_t first_code_id =
-      RefId(code_cluster->discovered_objects()->At(0));
+  const intptr_t first_code_id = RefId(code_cluster->objects()->At(0));
   // The first object in the code cluster must have its reference ID allocated.
   ASSERT(IsAllocatedReference(first_code_id));
 
diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h
index d3471b6..47a4594 100644
--- a/runtime/vm/clustered_snapshot.h
+++ b/runtime/vm/clustered_snapshot.h
@@ -351,7 +351,7 @@
     Write<int32_t>(cid);
   }
 
-  void PrepareInstructions(GrowableArray<CodePtr>* codes);
+  void PrepareInstructions();
   void WriteInstructions(InstructionsPtr instr,
                          uint32_t unchecked_offset,
                          CodePtr code,
@@ -419,6 +419,13 @@
     FATAL("Missing ref");
   }
 
+  bool HasRef(ObjectPtr object) const {
+    return heap_->GetObjectId(object) != kUnreachableReference;
+  }
+  bool IsWritten(ObjectPtr object) const {
+    return heap_->GetObjectId(object) > num_base_objects_;
+  }
+
  private:
   const char* ReadOnlyObjectType(intptr_t cid);
 
diff --git a/runtime/vm/compiler/relocation.cc b/runtime/vm/compiler/relocation.cc
index 89305aa..9e3c5c9 100644
--- a/runtime/vm/compiler/relocation.cc
+++ b/runtime/vm/compiler/relocation.cc
@@ -71,8 +71,15 @@
   // We're guaranteed to have all calls resolved, since
   //   * backwards calls are resolved eagerly
   //   * forward calls are resolved once the target is written
-  ASSERT(all_unresolved_calls_.IsEmpty());
-  ASSERT(unresolved_calls_by_destination_.IsEmpty());
+  if (!all_unresolved_calls_.IsEmpty()) {
+    for (auto call : all_unresolved_calls_) {
+      OS::PrintErr("Unresolved call to %s from %s\n",
+                   Object::Handle(call->callee).ToCString(),
+                   Object::Handle(call->caller).ToCString());
+    }
+  }
+  RELEASE_ASSERT(all_unresolved_calls_.IsEmpty());
+  RELEASE_ASSERT(unresolved_calls_by_destination_.IsEmpty());
 
   // Any trampolines we created must be patched with the right offsets.
   auto it = trampolines_by_destination_.GetIterator();
diff --git a/runtime/vm/compiler/type_testing_stubs_arm.cc b/runtime/vm/compiler/type_testing_stubs_arm.cc
index ed1ace2..ccd9d36 100644
--- a/runtime/vm/compiler/type_testing_stubs_arm.cc
+++ b/runtime/vm/compiler/type_testing_stubs_arm.cc
@@ -20,16 +20,8 @@
     const Type& type,
     const Class& type_class) {
   BuildOptimizedTypeTestStubFastCases(assembler, hi, type, type_class);
-  if (!compiler::IsSameObject(
-          compiler::NullObject(),
-          compiler::CastHandle<Object>(slow_type_test_stub))) {
-    __ GenerateUnRelocatedPcRelativeTailCall();
-    unresolved_calls->Add(new compiler::UnresolvedPcRelativeCall(
-        __ CodeSize(), slow_type_test_stub, /*is_tail_call=*/true));
-  } else {
-    __ Branch(compiler::Address(
-        THR, compiler::target::Thread::slow_type_test_entry_point_offset()));
-  }
+  __ Branch(compiler::Address(
+      THR, compiler::target::Thread::slow_type_test_entry_point_offset()));
 }
 
 }  // namespace dart
diff --git a/runtime/vm/compiler/type_testing_stubs_arm64.cc b/runtime/vm/compiler/type_testing_stubs_arm64.cc
index 0540b01..850b198 100644
--- a/runtime/vm/compiler/type_testing_stubs_arm64.cc
+++ b/runtime/vm/compiler/type_testing_stubs_arm64.cc
@@ -20,19 +20,11 @@
     const Type& type,
     const Class& type_class) {
   BuildOptimizedTypeTestStubFastCases(assembler, hi, type, type_class);
-  if (!compiler::IsSameObject(
-          compiler::NullObject(),
-          compiler::CastHandle<Object>(slow_type_test_stub))) {
-    __ GenerateUnRelocatedPcRelativeTailCall();
-    unresolved_calls->Add(new compiler::UnresolvedPcRelativeCall(
-        __ CodeSize(), slow_type_test_stub, /*is_tail_call=*/true));
-  } else {
-    __ ldr(TMP,
-           compiler::Address(
-               THR,
-               compiler::target::Thread::slow_type_test_entry_point_offset()));
-    __ br(TMP);
-  }
+  __ ldr(
+      TMP,
+      compiler::Address(
+          THR, compiler::target::Thread::slow_type_test_entry_point_offset()));
+  __ br(TMP);
 }
 
 }  // namespace dart
diff --git a/runtime/vm/compiler/type_testing_stubs_x64.cc b/runtime/vm/compiler/type_testing_stubs_x64.cc
index f7a974c..7722cee 100644
--- a/runtime/vm/compiler/type_testing_stubs_x64.cc
+++ b/runtime/vm/compiler/type_testing_stubs_x64.cc
@@ -20,16 +20,8 @@
     const Type& type,
     const Class& type_class) {
   BuildOptimizedTypeTestStubFastCases(assembler, hi, type, type_class);
-  if (!compiler::IsSameObject(
-          compiler::NullObject(),
-          compiler::CastHandle<Object>(slow_type_test_stub))) {
-    __ GenerateUnRelocatedPcRelativeTailCall();
-    unresolved_calls->Add(new compiler::UnresolvedPcRelativeCall(
-        __ CodeSize(), slow_type_test_stub, /*is_tail_call=*/true));
-  } else {
-    __ jmp(compiler::Address(
-        THR, compiler::target::Thread::slow_type_test_entry_point_offset()));
-  }
+  __ jmp(compiler::Address(
+      THR, compiler::target::Thread::slow_type_test_entry_point_offset()));
 }
 
 }  // namespace dart
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index 5b4a2fd..dac1451 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -1642,6 +1642,8 @@
 
   friend class Object;
   friend class CodeSerializationCluster;
+  friend class UnitSerializationRoots;
+  friend class UnitDeserializationRoots;
 };
 
 class InstructionsLayout : public ObjectLayout {
diff --git a/tools/VERSION b/tools/VERSION
index f03f533..8261e48 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 12
 PATCH 0
-PRERELEASE 227
+PRERELEASE 228
 PRERELEASE_PATCH 0
\ No newline at end of file