[vm] Switch switchable calls to load the target before the data.

Needed to be able to update call sites without pausing Dart execution.

TEST=ci
Change-Id: I8dd6e5cc4bec617da3c1198c6064cefbe86dbec2
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/441721
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Alexander Aprelev <aam@google.com>
diff --git a/runtime/vm/code_patcher.h b/runtime/vm/code_patcher.h
index 6593a29..a114b22 100644
--- a/runtime/vm/code_patcher.h
+++ b/runtime/vm/code_patcher.h
@@ -69,8 +69,6 @@
                                                 const Code& code,
                                                 ICData* ic_data);
 
-  static void InsertDeoptimizationCallAt(uword start);
-
   static void PatchPoolPointerCallAt(uword return_address,
                                      const Code& code,
                                      const Code& new_target);
@@ -86,6 +84,8 @@
                                                        const Code& target);
   static ObjectPtr GetSwitchableCallDataAt(uword return_address,
                                            const Code& caller_code);
+  static ObjectPtr GetSwitchableCallTargetAt(uword return_address,
+                                             const Code& caller_code);
   static uword GetSwitchableCallTargetEntryAt(uword return_address,
                                               const Code& caller_code);
 
diff --git a/runtime/vm/code_patcher_arm.cc b/runtime/vm/code_patcher_arm.cc
index 5aa488f..f918be3 100644
--- a/runtime/vm/code_patcher_arm.cc
+++ b/runtime/vm/code_patcher_arm.cc
@@ -27,8 +27,12 @@
   call.SetTargetCode(new_target);
 }
 
-void CodePatcher::InsertDeoptimizationCallAt(uword start) {
-  UNREACHABLE();
+void CodePatcher::PatchPoolPointerCallAt(uword return_address,
+                                         const Code& code,
+                                         const Code& new_target) {
+  ASSERT(code.ContainsInstructionAt(return_address));
+  CallPattern call(return_address, code);
+  call.SetTargetCode(new_target);
 }
 
 CodePtr CodePatcher::GetInstanceCallAt(uword return_address,
@@ -107,14 +111,23 @@
   }
 }
 
+ObjectPtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
+                                                 const Code& caller_code) {
+  if (FLAG_precompiled_mode) {
+    UNREACHABLE();
+  } else {
+    SwitchableCallPattern call(return_address, caller_code);
+    return call.target();
+  }
+}
+
 uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
                                                   const Code& caller_code) {
   if (FLAG_precompiled_mode) {
     BareSwitchableCallPattern call(return_address);
     return call.target_entry();
   } else {
-    SwitchableCallPattern call(return_address, caller_code);
-    return call.target_entry();
+    UNREACHABLE();
   }
 }
 
diff --git a/runtime/vm/code_patcher_arm64.cc b/runtime/vm/code_patcher_arm64.cc
index b4a2705..33ffe72 100644
--- a/runtime/vm/code_patcher_arm64.cc
+++ b/runtime/vm/code_patcher_arm64.cc
@@ -63,10 +63,6 @@
   call.SetTarget(new_target);
 }
 
-void CodePatcher::InsertDeoptimizationCallAt(uword start) {
-  UNREACHABLE();
-}
-
 CodePtr CodePatcher::GetInstanceCallAt(uword return_address,
                                        const Code& caller_code,
                                        Object* data) {
@@ -143,14 +139,23 @@
   }
 }
 
+ObjectPtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
+                                                 const Code& caller_code) {
+  if (FLAG_precompiled_mode) {
+    UNREACHABLE();
+  } else {
+    SwitchableCallPattern call(return_address, caller_code);
+    return call.target();
+  }
+}
+
 uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
                                                   const Code& caller_code) {
   if (FLAG_precompiled_mode) {
     BareSwitchableCallPattern call(return_address);
     return call.target_entry();
   } else {
-    SwitchableCallPattern call(return_address, caller_code);
-    return call.target_entry();
+    UNREACHABLE();
   }
 }
 
diff --git a/runtime/vm/code_patcher_arm64_test.cc b/runtime/vm/code_patcher_arm64_test.cc
index 0bd184c..ff90e9b 100644
--- a/runtime/vm/code_patcher_arm64_test.cc
+++ b/runtime/vm/code_patcher_arm64_test.cc
@@ -48,12 +48,12 @@
   SPILLS_LR_TO_FRAME({});              // Clobbered LR is OK.
 
   compiler::ObjectPoolBuilder& op = __ object_pool_builder();
-  const intptr_t ic_data_index =
-      op.AddObject(ic_data, ObjectPool::Patchability::kPatchable);
   const intptr_t stub_index =
       op.AddObject(stub, ObjectPool::Patchability::kPatchable);
-  ASSERT((ic_data_index + 1) == stub_index);
-  __ LoadDoubleWordFromPoolIndex(R5, CODE_REG, ic_data_index);
+  const intptr_t ic_data_index =
+      op.AddObject(ic_data, ObjectPool::Patchability::kPatchable);
+  ASSERT((stub_index + 1) == ic_data_index);
+  __ LoadDoubleWordFromPoolIndex(CODE_REG, R5, stub_index);
   __ Call(compiler::FieldAddress(
       CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
   RESTORES_LR_FROM_FRAME({});  // Clobbered LR is OK.
diff --git a/runtime/vm/code_patcher_arm_test.cc b/runtime/vm/code_patcher_arm_test.cc
index c963d44..a71e4a2 100644
--- a/runtime/vm/code_patcher_arm_test.cc
+++ b/runtime/vm/code_patcher_arm_test.cc
@@ -46,8 +46,10 @@
   __ set_constant_pool_allowed(true);  // Uninitialized pp is OK.
   SPILLS_LR_TO_FRAME({});              // Clobbered LR is OK.
 
+  __ LoadObject(CODE_REG, StubCode::OneArgCheckInlineCache());
   __ LoadObject(R9, ic_data);
-  __ BranchLinkPatchable(StubCode::OneArgCheckInlineCache());
+  __ Call(compiler::FieldAddress(
+      CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
   RESTORES_LR_FROM_FRAME({});  // Clobbered LR is OK.
   __ Ret();
 }
diff --git a/runtime/vm/code_patcher_ia32.cc b/runtime/vm/code_patcher_ia32.cc
index 73f8cb4..0c1e28a 100644
--- a/runtime/vm/code_patcher_ia32.cc
+++ b/runtime/vm/code_patcher_ia32.cc
@@ -198,10 +198,6 @@
   });
 }
 
-void CodePatcher::InsertDeoptimizationCallAt(uword start) {
-  UNREACHABLE();
-}
-
 CodePtr CodePatcher::GetInstanceCallAt(uword return_address,
                                        const Code& caller_code,
                                        Object* data) {
@@ -258,18 +254,13 @@
                                         const Code& caller_code,
                                         const Object& data,
                                         const Code& target) {
-  // Switchable instance calls only generated for precompilation.
-  UNREACHABLE();
+  PatchInstanceCallAt(return_address, caller_code, data, target);
 }
 
-void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
-    Thread* thread,
-    uword return_address,
-    const Code& caller_code,
-    const Object& data,
-    const Code& target) {
-  // Switchable instance calls only generated for precompilation.
-  UNREACHABLE();
+ObjectPtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
+                                                 const Code& caller_code) {
+  InstanceCall call(return_address, caller_code);
+  return call.target();
 }
 
 uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
@@ -281,9 +272,8 @@
 
 ObjectPtr CodePatcher::GetSwitchableCallDataAt(uword return_address,
                                                const Code& caller_code) {
-  // Switchable instance calls only generated for precompilation.
-  UNREACHABLE();
-  return Object::null();
+  InstanceCall call(return_address, caller_code);
+  return call.data();
 }
 
 void CodePatcher::PatchNativeCallAt(uword return_address,
diff --git a/runtime/vm/code_patcher_riscv.cc b/runtime/vm/code_patcher_riscv.cc
index 256f464..daa99fe 100644
--- a/runtime/vm/code_patcher_riscv.cc
+++ b/runtime/vm/code_patcher_riscv.cc
@@ -72,10 +72,6 @@
   call.SetTarget(new_target);
 }
 
-void CodePatcher::InsertDeoptimizationCallAt(uword start) {
-  UNREACHABLE();
-}
-
 CodePtr CodePatcher::GetInstanceCallAt(uword return_address,
                                        const Code& caller_code,
                                        Object* data) {
@@ -152,14 +148,23 @@
   }
 }
 
+ObjectPtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
+                                                 const Code& caller_code) {
+  if (FLAG_precompiled_mode) {
+    UNREACHABLE();
+  } else {
+    SwitchableCallPattern call(return_address, caller_code);
+    return call.target();
+  }
+}
+
 uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
                                                   const Code& caller_code) {
   if (FLAG_precompiled_mode) {
     BareSwitchableCallPattern call(return_address);
     return call.target_entry();
   } else {
-    SwitchableCallPattern call(return_address, caller_code);
-    return call.target_entry();
+    UNREACHABLE();
   }
 }
 
diff --git a/runtime/vm/code_patcher_riscv_test.cc b/runtime/vm/code_patcher_riscv_test.cc
index e6d4480..03b31e8 100644
--- a/runtime/vm/code_patcher_riscv_test.cc
+++ b/runtime/vm/code_patcher_riscv_test.cc
@@ -45,8 +45,8 @@
 
   // Code is generated, but not executed. Just parsed with CodePatcher.
   __ set_constant_pool_allowed(true);  // Uninitialized pp is OK.
-  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   __ LoadUniqueObject(CODE_REG, stub);
+  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   __ Call(compiler::FieldAddress(
       CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
   __ ret();
diff --git a/runtime/vm/code_patcher_x64.cc b/runtime/vm/code_patcher_x64.cc
index 6d98a80..a6404a3 100644
--- a/runtime/vm/code_patcher_x64.cc
+++ b/runtime/vm/code_patcher_x64.cc
@@ -81,6 +81,26 @@
   }
 }
 
+static void MatchDataLoadFromPool(uword* pc, intptr_t* data_index) {
+  // movq RBX, [PP + offset]
+  static int16_t load_data_disp8[] = {
+      0x49, 0x8b, 0x5f, -1,  //
+  };
+  static int16_t load_data_disp32[] = {
+      0x49, 0x8b, 0x9f, -1, -1, -1, -1,
+  };
+  if (MatchesPattern(*pc, load_data_disp8, ARRAY_SIZE(load_data_disp8))) {
+    *pc -= ARRAY_SIZE(load_data_disp8);
+    *data_index = IndexFromPPLoadDisp8(*pc + 3);
+  } else if (MatchesPattern(*pc, load_data_disp32,
+                            ARRAY_SIZE(load_data_disp32))) {
+    *pc -= ARRAY_SIZE(load_data_disp32);
+    *data_index = IndexFromPPLoadDisp32(*pc + 3);
+  } else {
+    FATAL("Expected `movq RBX, [PP + imm8|imm32]` at %" Px, *pc);
+  }
+}
+
 static void MatchCodeLoadFromPool(uword* pc, intptr_t* code_index) {
   const int16_t* load_code_disp8_pattern = FLAG_precompiled_mode
                                                ? kLoadCodeFromPoolDisp8AOT
@@ -112,29 +132,10 @@
         code_index_(-1),
         argument_index_(-1) {
     uword pc = return_address;
-
     MatchCallPattern(&pc);
+    MatchDataLoadFromPool(&pc, &argument_index_);
     MatchCodeLoadFromPool(&pc, &code_index_);
     ASSERT(Object::Handle(object_pool_.ObjectAt(code_index_)).IsCode());
-
-    // movq RBX, [PP + offset]
-    static int16_t load_argument_disp8[] = {
-        0x49, 0x8b, 0x5f, -1,  //
-    };
-    static int16_t load_argument_disp32[] = {
-        0x49, 0x8b, 0x9f, -1, -1, -1, -1,
-    };
-    if (MatchesPattern(pc, load_argument_disp8,
-                       ARRAY_SIZE(load_argument_disp8))) {
-      pc -= ARRAY_SIZE(load_argument_disp8);
-      argument_index_ = IndexFromPPLoadDisp8(pc + 3);
-    } else if (MatchesPattern(pc, load_argument_disp32,
-                              ARRAY_SIZE(load_argument_disp32))) {
-      pc -= ARRAY_SIZE(load_argument_disp32);
-      argument_index_ = IndexFromPPLoadDisp32(pc + 3);
-    } else {
-      FATAL("Failed to decode at %" Px, pc);
-    }
   }
 
   intptr_t argument_index() const { return argument_index_; }
@@ -156,14 +157,23 @@
   intptr_t argument_index_;
 
  private:
-  uword start_;
   DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall);
 };
 
-class NativeCall : public UnoptimizedCall {
+class NativeCall : public ValueObject {
  public:
   NativeCall(uword return_address, const Code& code)
-      : UnoptimizedCall(return_address, code) {}
+      : object_pool_(ObjectPool::Handle(code.GetObjectPool())),
+        code_index_(-1),
+        argument_index_(-1) {
+    uword pc = return_address;
+    MatchCallPattern(&pc);
+    MatchCodeLoadFromPool(&pc, &code_index_);
+    MatchDataLoadFromPool(&pc, &argument_index_);
+    ASSERT(Object::Handle(object_pool_.ObjectAt(code_index_)).IsCode());
+  }
+
+  intptr_t argument_index() const { return argument_index_; }
 
   NativeFunction native_function() const {
     return reinterpret_cast<NativeFunction>(
@@ -174,7 +184,22 @@
     object_pool_.SetRawValueAt(argument_index(), reinterpret_cast<uword>(func));
   }
 
+  CodePtr target() const {
+    Code& code = Code::Handle();
+    code ^= object_pool_.ObjectAt(code_index_);
+    return code.ptr();
+  }
+
+  void set_target(const Code& target) const {
+    object_pool_.SetObjectAt(code_index_, target);
+    // No need to flush the instruction cache, since the code is not modified.
+  }
+
  private:
+  const ObjectPool& object_pool_;
+  intptr_t code_index_;
+  intptr_t argument_index_;
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(NativeCall);
 };
 
@@ -293,9 +318,9 @@
     ASSERT(caller_code.ContainsInstructionAt(return_address));
     uword pc = return_address;
 
-    // callq RCX
+    // callq [CODE_REG + entrypoint_offset]
     static int16_t call_pattern[] = {
-        0xff, 0xd1,  //
+        0x41, 0xff, 0x54, 0x24, -1,  //
     };
     if (MatchesPattern(pc, call_pattern, ARRAY_SIZE(call_pattern))) {
       pc -= ARRAY_SIZE(call_pattern);
@@ -304,34 +329,7 @@
     }
 
     // movq RBX, [PP + offset]
-    static int16_t load_data_disp8[] = {
-        0x49, 0x8b, 0x5f, -1,  //
-    };
-    static int16_t load_data_disp32[] = {
-        0x49, 0x8b, 0x9f, -1, -1, -1, -1,
-    };
-    if (MatchesPattern(pc, load_data_disp8, ARRAY_SIZE(load_data_disp8))) {
-      pc -= ARRAY_SIZE(load_data_disp8);
-      data_index_ = IndexFromPPLoadDisp8(pc + 3);
-    } else if (MatchesPattern(pc, load_data_disp32,
-                              ARRAY_SIZE(load_data_disp32))) {
-      pc -= ARRAY_SIZE(load_data_disp32);
-      data_index_ = IndexFromPPLoadDisp32(pc + 3);
-    } else {
-      FATAL("Failed to decode at %" Px, pc);
-    }
-    ASSERT(!Object::Handle(object_pool_.ObjectAt(data_index_)).IsCode());
-
-    // movq rcx, [CODE_REG + entrypoint_offset]
-    static int16_t load_entry_pattern[] = {
-        0x49, 0x8b, 0x4c, 0x24, -1,
-    };
-    if (MatchesPattern(pc, load_entry_pattern,
-                       ARRAY_SIZE(load_entry_pattern))) {
-      pc -= ARRAY_SIZE(load_entry_pattern);
-    } else {
-      FATAL("Failed to decode at %" Px, pc);
-    }
+    MatchDataLoadFromPool(&pc, &data_index_);
 
     // movq CODE_REG, [PP + offset]
     static int16_t load_code_disp8[] = {
@@ -359,10 +357,7 @@
     // No need to flush the instruction cache, since the code is not modified.
   }
 
-  uword target_entry() const {
-    return Code::Handle(Code::RawCast(object_pool_.ObjectAt(target_index())))
-        .MonomorphicEntryPoint();
-  }
+  ObjectPtr target() const { return object_pool_.ObjectAt(target_index()); }
 };
 
 // See [SwitchableCallBase] for a switchable calls in general.
@@ -445,7 +440,8 @@
 void CodePatcher::PatchStaticCallAt(uword return_address,
                                     const Code& code,
                                     const Code& new_target) {
-  PatchPoolPointerCallAt(return_address, code, new_target);
+  PoolPointerCall call(return_address, code);
+  call.SetTarget(new_target);
 }
 
 void CodePatcher::PatchPoolPointerCallAt(uword return_address,
@@ -490,10 +486,6 @@
   call.set_target(target);
 }
 
-void CodePatcher::InsertDeoptimizationCallAt(uword start) {
-  UNREACHABLE();
-}
-
 FunctionPtr CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
                                                     const Code& caller_code,
                                                     ICData* ic_data_result) {
@@ -536,14 +528,23 @@
   }
 }
 
+ObjectPtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
+                                                 const Code& caller_code) {
+  if (FLAG_precompiled_mode) {
+    UNREACHABLE();
+  } else {
+    SwitchableCall call(return_address, caller_code);
+    return call.target();
+  }
+}
+
 uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
                                                   const Code& caller_code) {
   if (FLAG_precompiled_mode) {
     BareSwitchableCall call(return_address);
     return call.target_entry();
   } else {
-    SwitchableCall call(return_address, caller_code);
-    return call.target_entry();
+    UNREACHABLE();
   }
 }
 
diff --git a/runtime/vm/code_patcher_x64_test.cc b/runtime/vm/code_patcher_x64_test.cc
index 29cf624..5cff728 100644
--- a/runtime/vm/code_patcher_x64_test.cc
+++ b/runtime/vm/code_patcher_x64_test.cc
@@ -45,8 +45,10 @@
   // Code accessing pp is generated, but not executed. Uninitialized pp is OK.
   __ set_constant_pool_allowed(true);
 
+  __ LoadObject(CODE_REG, StubCode::OneArgCheckInlineCache());
   __ LoadObject(RBX, ic_data);
-  __ CallPatchable(StubCode::OneArgCheckInlineCache());
+  __ call(compiler::FieldAddress(
+      CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
   __ ret();
 }
 
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
index f139bfa..29fb234 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
@@ -471,8 +471,8 @@
          entry_kind == Code::EntryKind::kUnchecked);
   ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
   __ LoadFromOffset(R0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
-  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   __ LoadUniqueObject(CODE_REG, stub);
+  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   const intptr_t entry_point_offset =
       entry_kind == Code::EntryKind::kNormal
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
@@ -502,8 +502,8 @@
   __ LoadFromOffset(R0, SP,
                     (args_desc.Count() - 1) * compiler::target::kWordSize);
   // Use same code pattern as instance call so it can be parsed by code patcher.
-  __ LoadUniqueObject(IC_DATA_REG, cache);
   __ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
+  __ LoadUniqueObject(IC_DATA_REG, cache);
   __ Call(compiler::FieldAddress(
       CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
 
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
index a623e9e..c6c22a0 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
@@ -457,12 +457,12 @@
   __ LoadFromOffset(R0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
 
   compiler::ObjectPoolBuilder& op = __ object_pool_builder();
-  const intptr_t ic_data_index =
-      op.AddObject(ic_data, ObjectPool::Patchability::kPatchable);
   const intptr_t stub_index =
       op.AddObject(stub, ObjectPool::Patchability::kPatchable);
-  ASSERT((ic_data_index + 1) == stub_index);
-  __ LoadDoubleWordFromPoolIndex(IC_DATA_REG, CODE_REG, ic_data_index);
+  const intptr_t ic_data_index =
+      op.AddObject(ic_data, ObjectPool::Patchability::kPatchable);
+  ASSERT((stub_index + 1) == ic_data_index);
+  __ LoadDoubleWordFromPoolIndex(CODE_REG, IC_DATA_REG, stub_index);
   const intptr_t entry_point_offset =
       entry_kind == Code::EntryKind::kNormal
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
@@ -493,12 +493,12 @@
 
   // Use same code pattern as instance call so it can be parsed by code patcher.
   compiler::ObjectPoolBuilder& op = __ object_pool_builder();
-  const intptr_t data_index =
-      op.AddObject(cache, ObjectPool::Patchability::kPatchable);
   const intptr_t stub_index = op.AddObject(
       StubCode::MegamorphicCall(), ObjectPool::Patchability::kPatchable);
-  ASSERT((data_index + 1) == stub_index);
-  __ LoadDoubleWordFromPoolIndex(IC_DATA_REG, CODE_REG, data_index);
+  const intptr_t data_index =
+      op.AddObject(cache, ObjectPool::Patchability::kPatchable);
+  ASSERT((stub_index + 1) == data_index);
+  __ LoadDoubleWordFromPoolIndex(CODE_REG, IC_DATA_REG, stub_index);
   CLOBBERS_LR(__ ldr(LR, compiler::FieldAddress(
                              CODE_REG, Code::entry_point_offset(
                                            Code::EntryKind::kMonomorphic))));
@@ -547,15 +547,15 @@
       FLAG_precompiled_mode ? compiler::ObjectPoolBuilderEntry::
                                   kResetToSwitchableCallMissEntryPoint
                             : compiler::ObjectPoolBuilderEntry::kSnapshotable;
+  const intptr_t stub_index = op.AddObject(
+      initial_stub, ObjectPool::Patchability::kPatchable, snapshot_behavior);
   const intptr_t data_index =
       op.AddObject(data, ObjectPool::Patchability::kPatchable);
-  const intptr_t initial_stub_index = op.AddObject(
-      initial_stub, ObjectPool::Patchability::kPatchable, snapshot_behavior);
-  ASSERT((data_index + 1) == initial_stub_index);
+  ASSERT((stub_index + 1) == data_index);
 
   // The AOT runtime will replace the slot in the object pool with the
   // entrypoint address - see app_snapshot.cc.
-  CLOBBERS_LR(__ LoadDoubleWordFromPoolIndex(R5, LR, data_index));
+  CLOBBERS_LR(__ LoadDoubleWordFromPoolIndex(LR, R5, stub_index));
   CLOBBERS_LR(__ blr(LR));
 
   EmitCallsiteMetadata(source, DeoptId::kNone, UntaggedPcDescriptors::kOther,
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_riscv.cc b/runtime/vm/compiler/backend/flow_graph_compiler_riscv.cc
index 33e8b9e..f2a53da 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_riscv.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_riscv.cc
@@ -450,8 +450,8 @@
          entry_kind == Code::EntryKind::kUnchecked);
   ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
   __ LoadFromOffset(A0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
-  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   __ LoadUniqueObject(CODE_REG, stub);
+  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   const intptr_t entry_point_offset =
       entry_kind == Code::EntryKind::kNormal
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
@@ -482,8 +482,8 @@
   __ LoadFromOffset(A0, SP,
                     (args_desc.Count() - 1) * compiler::target::kWordSize);
   // Use same code pattern as instance call so it can be parsed by code patcher.
-  __ LoadUniqueObject(IC_DATA_REG, cache);
   __ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
+  __ LoadUniqueObject(IC_DATA_REG, cache);
   __ Call(compiler::FieldAddress(
       CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
 
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
index 815b105..da35f19 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
@@ -475,8 +475,8 @@
   // Load receiver into RDX.
   __ movq(RDX, compiler::Address(
                    RSP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize));
-  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   __ LoadUniqueObject(CODE_REG, stub);
+  __ LoadUniqueObject(IC_DATA_REG, ic_data);
   const intptr_t entry_point_offset =
       entry_kind == Code::EntryKind::kNormal
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
@@ -505,8 +505,8 @@
   __ movq(RDX, compiler::Address(RSP, (args_desc.Count() - 1) * kWordSize));
 
   // Use same code pattern as instance call so it can be parsed by code patcher.
-  __ LoadUniqueObject(IC_DATA_REG, cache);
   __ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
+  __ LoadUniqueObject(IC_DATA_REG, cache);
   __ call(compiler::FieldAddress(
       CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
 
diff --git a/runtime/vm/debugger_arm.cc b/runtime/vm/debugger_arm.cc
index 675c98e..31f3ca8 100644
--- a/runtime/vm/debugger_arm.cc
+++ b/runtime/vm/debugger_arm.cc
@@ -21,35 +21,48 @@
 
 void CodeBreakpoint::PatchCode() {
   ASSERT(!IsEnabled());
-  Code& stub_target = Code::Handle();
+  const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case UntaggedPcDescriptors::kIcCall:
-      stub_target = StubCode::ICCallBreakpoint().ptr();
+    case UntaggedPcDescriptors::kIcCall: {
+      Object& data = Object::Handle();
+      saved_value_ = CodePatcher::GetInstanceCallAt(pc_, code, &data);
+      CodePatcher::PatchInstanceCallAt(pc_, code, data,
+                                       StubCode::ICCallBreakpoint());
       break;
+    }
     case UntaggedPcDescriptors::kUnoptStaticCall:
-      stub_target = StubCode::UnoptStaticCallBreakpoint().ptr();
+      saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
+      CodePatcher::PatchStaticCallAt(pc_, code,
+                                     StubCode::UnoptStaticCallBreakpoint());
       break;
     case UntaggedPcDescriptors::kRuntimeCall:
-      stub_target = StubCode::RuntimeCallBreakpoint().ptr();
+      saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
+      CodePatcher::PatchPoolPointerCallAt(pc_, code,
+                                          StubCode::RuntimeCallBreakpoint());
       break;
     default:
       UNREACHABLE();
   }
-  const Code& code = Code::Handle(code_);
-  saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
-  CodePatcher::PatchStaticCallAt(pc_, code, stub_target);
 }
 
 void CodeBreakpoint::RestoreCode() {
   ASSERT(IsEnabled());
   const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case UntaggedPcDescriptors::kIcCall:
-    case UntaggedPcDescriptors::kUnoptStaticCall:
-    case UntaggedPcDescriptors::kRuntimeCall: {
-      CodePatcher::PatchStaticCallAt(pc_, code, Code::Handle(saved_value_));
+    case UntaggedPcDescriptors::kIcCall: {
+      Object& data = Object::Handle();
+      CodePatcher::GetInstanceCallAt(pc_, code, &data);
+      CodePatcher::PatchInstanceCallAt(pc_, code, data,
+                                       Code::Handle(saved_value_));
       break;
     }
+    case UntaggedPcDescriptors::kUnoptStaticCall:
+      CodePatcher::PatchStaticCallAt(pc_, code, Code::Handle(saved_value_));
+      break;
+    case UntaggedPcDescriptors::kRuntimeCall:
+      CodePatcher::PatchPoolPointerCallAt(pc_, code,
+                                          Code::Handle(saved_value_));
+      break;
     default:
       UNREACHABLE();
   }
diff --git a/runtime/vm/debugger_arm64.cc b/runtime/vm/debugger_arm64.cc
index 17cb472..4dab21d 100644
--- a/runtime/vm/debugger_arm64.cc
+++ b/runtime/vm/debugger_arm64.cc
@@ -30,18 +30,16 @@
                                        StubCode::ICCallBreakpoint());
       break;
     }
-    case UntaggedPcDescriptors::kUnoptStaticCall: {
+    case UntaggedPcDescriptors::kUnoptStaticCall:
       saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
-      CodePatcher::PatchPoolPointerCallAt(
-          pc_, code, StubCode::UnoptStaticCallBreakpoint());
+      CodePatcher::PatchStaticCallAt(pc_, code,
+                                     StubCode::UnoptStaticCallBreakpoint());
       break;
-    }
-    case UntaggedPcDescriptors::kRuntimeCall: {
+    case UntaggedPcDescriptors::kRuntimeCall:
       saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           StubCode::RuntimeCallBreakpoint());
       break;
-    }
     default:
       UNREACHABLE();
   }
@@ -59,11 +57,12 @@
       break;
     }
     case UntaggedPcDescriptors::kUnoptStaticCall:
-    case UntaggedPcDescriptors::kRuntimeCall: {
+      CodePatcher::PatchStaticCallAt(pc_, code, Code::Handle(saved_value_));
+      break;
+    case UntaggedPcDescriptors::kRuntimeCall:
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           Code::Handle(saved_value_));
       break;
-    }
     default:
       UNREACHABLE();
   }
diff --git a/runtime/vm/debugger_riscv.cc b/runtime/vm/debugger_riscv.cc
index 37e3804..6029c65 100644
--- a/runtime/vm/debugger_riscv.cc
+++ b/runtime/vm/debugger_riscv.cc
@@ -30,18 +30,16 @@
                                        StubCode::ICCallBreakpoint());
       break;
     }
-    case UntaggedPcDescriptors::kUnoptStaticCall: {
+    case UntaggedPcDescriptors::kUnoptStaticCall:
       saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
-      CodePatcher::PatchPoolPointerCallAt(
-          pc_, code, StubCode::UnoptStaticCallBreakpoint());
+      CodePatcher::PatchStaticCallAt(pc_, code,
+                                     StubCode::UnoptStaticCallBreakpoint());
       break;
-    }
-    case UntaggedPcDescriptors::kRuntimeCall: {
+    case UntaggedPcDescriptors::kRuntimeCall:
       saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           StubCode::RuntimeCallBreakpoint());
       break;
-    }
     default:
       UNREACHABLE();
   }
@@ -59,11 +57,12 @@
       break;
     }
     case UntaggedPcDescriptors::kUnoptStaticCall:
-    case UntaggedPcDescriptors::kRuntimeCall: {
+      CodePatcher::PatchStaticCallAt(pc_, code, Code::Handle(saved_value_));
+      break;
+    case UntaggedPcDescriptors::kRuntimeCall:
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           Code::Handle(saved_value_));
       break;
-    }
     default:
       UNREACHABLE();
   }
diff --git a/runtime/vm/debugger_x64.cc b/runtime/vm/debugger_x64.cc
index a6161a0..9c3b8ed 100644
--- a/runtime/vm/debugger_x64.cc
+++ b/runtime/vm/debugger_x64.cc
@@ -22,36 +22,48 @@
 
 void CodeBreakpoint::PatchCode() {
   ASSERT(!IsEnabled());
-  Code& stub_target = Code::Handle();
+  const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case UntaggedPcDescriptors::kIcCall:
-      stub_target = StubCode::ICCallBreakpoint().ptr();
+    case UntaggedPcDescriptors::kIcCall: {
+      Object& data = Object::Handle();
+      saved_value_ = CodePatcher::GetInstanceCallAt(pc_, code, &data);
+      CodePatcher::PatchInstanceCallAt(pc_, code, data,
+                                       StubCode::ICCallBreakpoint());
       break;
+    }
     case UntaggedPcDescriptors::kUnoptStaticCall:
-      stub_target = StubCode::UnoptStaticCallBreakpoint().ptr();
+      saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
+      CodePatcher::PatchStaticCallAt(pc_, code,
+                                     StubCode::UnoptStaticCallBreakpoint());
       break;
     case UntaggedPcDescriptors::kRuntimeCall:
-      stub_target = StubCode::RuntimeCallBreakpoint().ptr();
+      saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
+      CodePatcher::PatchPoolPointerCallAt(pc_, code,
+                                          StubCode::RuntimeCallBreakpoint());
       break;
     default:
       UNREACHABLE();
   }
-  const Code& code = Code::Handle(code_);
-  saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
-  CodePatcher::PatchPoolPointerCallAt(pc_, code, stub_target);
 }
 
 void CodeBreakpoint::RestoreCode() {
   ASSERT(IsEnabled());
   const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case UntaggedPcDescriptors::kIcCall:
+    case UntaggedPcDescriptors::kIcCall: {
+      Object& data = Object::Handle();
+      CodePatcher::GetInstanceCallAt(pc_, code, &data);
+      CodePatcher::PatchInstanceCallAt(pc_, code, data,
+                                       Code::Handle(saved_value_));
+      break;
+    }
     case UntaggedPcDescriptors::kUnoptStaticCall:
-    case UntaggedPcDescriptors::kRuntimeCall: {
+      CodePatcher::PatchStaticCallAt(pc_, code, Code::Handle(saved_value_));
+      break;
+    case UntaggedPcDescriptors::kRuntimeCall:
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           Code::Handle(saved_value_));
       break;
-    }
     default:
       UNREACHABLE();
   }
diff --git a/runtime/vm/instructions_arm.cc b/runtime/vm/instructions_arm.cc
index 667857b..e86c381 100644
--- a/runtime/vm/instructions_arm.cc
+++ b/runtime/vm/instructions_arm.cc
@@ -43,13 +43,13 @@
   ASSERT(*(reinterpret_cast<uint32_t*>(pc) - 1) == 0xe12fff3e);
 
   Register reg;
-  uword data_load_end = InstructionPattern::DecodeLoadWordFromPool(
-      pc - 2 * Instr::kInstrSize, &reg, &target_pool_index_);
-  ASSERT(IsBranchLinkScratch(reg));
-
-  InstructionPattern::DecodeLoadWordFromPool(data_load_end, &reg,
-                                             &data_pool_index_);
+  uword target_load_end = InstructionPattern::DecodeLoadWordFromPool(
+      pc - 2 * Instr::kInstrSize, &reg, &data_pool_index_);
   ASSERT(reg == R9);
+
+  InstructionPattern::DecodeLoadWordFromPool(target_load_end, &reg,
+                                             &target_pool_index_);
+  ASSERT(IsBranchLinkScratch(reg));
 }
 
 NativeCallPattern::NativeCallPattern(uword pc, const Code& code)
@@ -257,16 +257,15 @@
 
   Register reg;
   uword data_load_end = InstructionPattern::DecodeLoadWordFromPool(
-      pc - Instr::kInstrSize, &reg, &data_pool_index_);
+      pc - 2 * Instr::kInstrSize, &reg, &data_pool_index_);
   ASSERT(reg == R9);
-  InstructionPattern::DecodeLoadWordFromPool(data_load_end - Instr::kInstrSize,
-                                             &reg, &target_pool_index_);
+  InstructionPattern::DecodeLoadWordFromPool(data_load_end, &reg,
+                                             &target_pool_index_);
   ASSERT(IsBranchLinkScratch(reg));
 }
 
-uword SwitchableCallPattern::target_entry() const {
-  return Code::Handle(Code::RawCast(object_pool_.ObjectAt(target_pool_index_)))
-      .MonomorphicEntryPoint();
+ObjectPtr SwitchableCallPattern::target() const {
+  return object_pool_.ObjectAt(target_pool_index_);
 }
 
 void SwitchableCallPattern::SetTarget(const Code& target) const {
diff --git a/runtime/vm/instructions_arm.h b/runtime/vm/instructions_arm.h
index e751331..c3328a5 100644
--- a/runtime/vm/instructions_arm.h
+++ b/runtime/vm/instructions_arm.h
@@ -143,7 +143,7 @@
  public:
   SwitchableCallPattern(uword pc, const Code& code);
 
-  uword target_entry() const;
+  ObjectPtr target() const;
   void SetTarget(const Code& target) const;
 
  private:
diff --git a/runtime/vm/instructions_arm64.cc b/runtime/vm/instructions_arm64.cc
index b45e47f..66eec51 100644
--- a/runtime/vm/instructions_arm64.cc
+++ b/runtime/vm/instructions_arm64.cc
@@ -45,12 +45,12 @@
   Register data_reg, code_reg;
   intptr_t pool_index;
   InstructionPattern::DecodeLoadDoubleWordFromPool(
-      pc - 2 * Instr::kInstrSize, &data_reg, &code_reg, &pool_index);
+      pc - 2 * Instr::kInstrSize, &code_reg, &data_reg, &pool_index);
   ASSERT(data_reg == R5);
   ASSERT(IsBranchLinkScratch(code_reg));
 
-  data_pool_index_ = pool_index;
-  target_pool_index_ = pool_index + 1;
+  target_pool_index_ = pool_index;
+  data_pool_index_ = pool_index + 1;
 }
 
 NativeCallPattern::NativeCallPattern(uword pc, const Code& code)
@@ -416,17 +416,16 @@
   Register ic_data_reg, code_reg;
   intptr_t pool_index;
   InstructionPattern::DecodeLoadDoubleWordFromPool(
-      pc - 2 * Instr::kInstrSize, &ic_data_reg, &code_reg, &pool_index);
+      pc - 2 * Instr::kInstrSize, &code_reg, &ic_data_reg, &pool_index);
   ASSERT(ic_data_reg == R5);
   ASSERT(IsBranchLinkScratch(code_reg));
 
-  data_pool_index_ = pool_index;
-  target_pool_index_ = pool_index + 1;
+  target_pool_index_ = pool_index;
+  data_pool_index_ = pool_index + 1;
 }
 
-uword SwitchableCallPattern::target_entry() const {
-  return Code::Handle(Code::RawCast(object_pool_.ObjectAt(target_pool_index_)))
-      .MonomorphicEntryPoint();
+ObjectPtr SwitchableCallPattern::target() const {
+  return object_pool_.ObjectAt(target_pool_index_);
 }
 
 void SwitchableCallPattern::SetTarget(const Code& target) const {
@@ -443,12 +442,12 @@
   Register ic_data_reg, code_reg;
   intptr_t pool_index;
   InstructionPattern::DecodeLoadDoubleWordFromPool(
-      pc - Instr::kInstrSize, &ic_data_reg, &code_reg, &pool_index);
+      pc - Instr::kInstrSize, &code_reg, &ic_data_reg, &pool_index);
   ASSERT(ic_data_reg == R5);
   ASSERT(code_reg == LINK_REGISTER);
 
-  data_pool_index_ = pool_index;
-  target_pool_index_ = pool_index + 1;
+  target_pool_index_ = pool_index;
+  data_pool_index_ = pool_index + 1;
 }
 
 uword BareSwitchableCallPattern::target_entry() const {
diff --git a/runtime/vm/instructions_arm64.h b/runtime/vm/instructions_arm64.h
index ac715a4..6b3d373 100644
--- a/runtime/vm/instructions_arm64.h
+++ b/runtime/vm/instructions_arm64.h
@@ -153,7 +153,7 @@
  public:
   SwitchableCallPattern(uword pc, const Code& code);
 
-  uword target_entry() const;
+  ObjectPtr target() const;
   void SetTarget(const Code& target) const;
 
  private:
diff --git a/runtime/vm/instructions_riscv.cc b/runtime/vm/instructions_riscv.cc
index 0f9b939..1809cb7a 100644
--- a/runtime/vm/instructions_riscv.cc
+++ b/runtime/vm/instructions_riscv.cc
@@ -43,8 +43,8 @@
       data_pool_index_(-1) {
   ASSERT(code.ContainsInstructionAt(pc));
   // R is either CODE_REG (JIT) or TMP (AOT)
-  //          [lui,add,]lx IC_DATA_REG, ##(pp)
   //          [lui,add,]lx R, ##(pp)
+  //          [lui,add,]lx IC_DATA_REG, ##(pp)
   // xxxxxxxx lx ra, ##(R)
   //     xxxx jalr ra
 
@@ -52,13 +52,13 @@
   ASSERT(*reinterpret_cast<uint16_t*>(pc - 2) == 0x9082);
 
   Register reg;
-  uword data_load_end = InstructionPattern::DecodeLoadWordFromPool(
-      pc - 6, &reg, &target_pool_index_);
-  ASSERT(IsJumpAndLinkScratch(reg));
-
-  InstructionPattern::DecodeLoadWordFromPool(data_load_end, &reg,
-                                             &data_pool_index_);
+  uword target_load_end = InstructionPattern::DecodeLoadWordFromPool(
+      pc - 6, &reg, &data_pool_index_);
   ASSERT(reg == IC_DATA_REG);
+
+  InstructionPattern::DecodeLoadWordFromPool(target_load_end, &reg,
+                                             &target_pool_index_);
+  ASSERT(IsJumpAndLinkScratch(reg));
 }
 
 NativeCallPattern::NativeCallPattern(uword pc, const Code& code)
@@ -329,18 +329,32 @@
 
 void SwitchableCallPatternBase::SetData(const Object& data) const {
   ASSERT(!Object::Handle(object_pool_.ObjectAt(data_pool_index_)).IsCode());
-  object_pool_.SetObjectAt(data_pool_index_, data);
+  object_pool_.SetObjectAt<std::memory_order_release>(data_pool_index_, data);
 }
 
 SwitchableCallPattern::SwitchableCallPattern(uword pc, const Code& code)
     : SwitchableCallPatternBase(ObjectPool::Handle(code.GetObjectPool())) {
   ASSERT(code.ContainsInstructionAt(pc));
-  UNIMPLEMENTED();
+  //          [lui,add,]lx CODE, ##(pp)
+  //          [lui,add,]lx IC_DATA_REG, ##(pp)
+  // xxxxxxxx lx RA, ##(CODE)
+  //     xxxx jalr RA
+
+  // Last instruction: jalr ra.
+  ASSERT(*reinterpret_cast<uint16_t*>(pc - 2) == 0x9082);
+
+  Register reg;
+  uword target_load_end = InstructionPattern::DecodeLoadWordFromPool(
+      pc - 6, &reg, &data_pool_index_);
+  ASSERT_EQUAL(reg, IC_DATA_REG);
+
+  InstructionPattern::DecodeLoadWordFromPool(target_load_end, &reg,
+                                             &target_pool_index_);
+  ASSERT_EQUAL(reg, CODE_REG);
 }
 
-uword SwitchableCallPattern::target_entry() const {
-  return Code::Handle(Code::RawCast(object_pool_.ObjectAt(target_pool_index_)))
-      .MonomorphicEntryPoint();
+ObjectPtr SwitchableCallPattern::target() const {
+  return object_pool_.ObjectAt(target_pool_index_);
 }
 
 void SwitchableCallPattern::SetTarget(const Code& target) const {
diff --git a/runtime/vm/instructions_riscv.h b/runtime/vm/instructions_riscv.h
index e2c5072..99a9ce0 100644
--- a/runtime/vm/instructions_riscv.h
+++ b/runtime/vm/instructions_riscv.h
@@ -140,7 +140,7 @@
  public:
   SwitchableCallPattern(uword pc, const Code& code);
 
-  uword target_entry() const;
+  ObjectPtr target() const;
   void SetTarget(const Code& target) const;
 
  private: