[VM] Ensure PP is restored when returning from runtime calls in bare instructions mode.

Now that we have a moving GC it's no longer sufficient to rely on callee-saving
behavior of PP.

This change also ensures that all calls go through RuntimeEntry::Call() .

Issue https://github.com/dart-lang/sdk/issues/33274

Change-Id: I159986dc18a9b201175fd4a7064a24a1533790ef
Reviewed-on: https://dart-review.googlesource.com/c/88711
Reviewed-by: Vyacheslav Egorov <vegorov@google.com>
Commit-Queue: Martin Kustermann <kustermann@google.com>
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 3459de4..c1226e6 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -4120,7 +4120,8 @@
   }
   void SetObjectAt(intptr_t index, const Object& obj) const {
     ASSERT((TypeAt(index) == kTaggedObject) ||
-           (TypeAt(index) == kNativeEntryData));
+           (TypeAt(index) == kNativeEntryData) ||
+           (TypeAt(index) == kImmediate && obj.IsSmi()));
     StorePointer(&EntryAddr(index)->raw_obj_, obj.raw());
   }
 
diff --git a/runtime/vm/runtime_entry_arm.cc b/runtime/vm/runtime_entry_arm.cc
index 4b19f0d..3ff5758 100644
--- a/runtime/vm/runtime_entry_arm.cc
+++ b/runtime/vm/runtime_entry_arm.cc
@@ -50,6 +50,8 @@
     __ blx(TMP);
     __ LoadImmediate(TMP, VMTag::kDartCompiledTagId);
     __ str(TMP, Address(THR, Thread::vm_tag_offset()));
+    ASSERT((kAbiPreservedCpuRegs & (1 << THR)) != 0);
+    ASSERT((kAbiPreservedCpuRegs & (1 << PP)) != 0);
   } else {
     // Argument count is not checked here, but in the runtime entry for a more
     // informative error message.
diff --git a/runtime/vm/runtime_entry_arm64.cc b/runtime/vm/runtime_entry_arm64.cc
index 250e05f..a17870c 100644
--- a/runtime/vm/runtime_entry_arm64.cc
+++ b/runtime/vm/runtime_entry_arm64.cc
@@ -67,6 +67,8 @@
     __ str(TMP, Address(THR, Thread::vm_tag_offset()));
     __ mov(SP, R25);
     __ mov(CSP, R23);
+    ASSERT((kAbiPreservedCpuRegs & (1 << THR)) != 0);
+    ASSERT((kAbiPreservedCpuRegs & (1 << PP)) != 0);
   } else {
     // Argument count is not checked here, but in the runtime entry for a more
     // informative error message.
diff --git a/runtime/vm/runtime_entry_x64.cc b/runtime/vm/runtime_entry_x64.cc
index 0875c69..f84db6b 100644
--- a/runtime/vm/runtime_entry_x64.cc
+++ b/runtime/vm/runtime_entry_x64.cc
@@ -32,6 +32,8 @@
     __ movq(Assembler::VMTagAddress(), RAX);
     __ CallCFunction(RAX);
     __ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartCompiledTagId));
+    ASSERT((CallingConventions::kCalleeSaveCpuRegisters & (1 << THR)) != 0);
+    ASSERT((CallingConventions::kCalleeSaveCpuRegisters & (1 << PP)) != 0);
   } else {
     // Argument count is not checked here, but in the runtime entry for a more
     // informative error message.
diff --git a/runtime/vm/stub_code_arm.cc b/runtime/vm/stub_code_arm.cc
index 71a8559..5a4ce97 100644
--- a/runtime/vm/stub_code_arm.cc
+++ b/runtime/vm/stub_code_arm.cc
@@ -102,6 +102,12 @@
   __ LoadImmediate(R2, 0);
   __ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset());
 
+  // Restore the global object pool after returning from runtime (old space is
+  // moving, so the GOP could have been relocated).
+  if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
+    __ ldr(PP, Address(THR, Thread::global_object_pool_offset()));
+  }
+
   __ LeaveStubFrame();
 
   // The following return can jump to a lazy-deopt stub, which assumes R0
@@ -143,13 +149,7 @@
   __ ldr(CODE_REG, Address(THR, self_code_stub_offset_from_thread));
 
   __ EnterStubFrame();
-
-  __ ldr(CODE_REG, Address(THR, Thread::call_to_runtime_stub_offset()));
-  __ ldr(R9, Address(THR, Thread::OffsetFromThread(target)));
-  __ mov(R4, Operand(/*argument_count=*/0));
-  __ ldr(TMP, Address(THR, Thread::call_to_runtime_entry_point_offset()));
-  __ blx(TMP);
-
+  __ CallRuntime(*target, /*argument_count=*/0);
   if (!allow_return) {
     __ Breakpoint();
     return;
diff --git a/runtime/vm/stub_code_arm64.cc b/runtime/vm/stub_code_arm64.cc
index 91996d9..3e5928f 100644
--- a/runtime/vm/stub_code_arm64.cc
+++ b/runtime/vm/stub_code_arm64.cc
@@ -125,6 +125,13 @@
   // Reset exit frame information in Isolate structure.
   __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset());
 
+  // Restore the global object pool after returning from runtime (old space is
+  // moving, so the GOP could have been relocated).
+  if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
+    __ ldr(PP, Address(THR, Thread::global_object_pool_offset()));
+    __ sub(PP, PP, Operand(kHeapObjectTag));  // Pool in PP is untagged!
+  }
+
   __ LeaveStubFrame();
 
   // The following return can jump to a lazy-deopt stub, which assumes R0
@@ -166,13 +173,7 @@
   __ ldr(CODE_REG, Address(THR, self_code_stub_offset_from_thread));
 
   __ EnterStubFrame();
-
-  __ ldr(CODE_REG, Address(THR, Thread::call_to_runtime_stub_offset()));
-  __ ldr(R5, Address(THR, Thread::OffsetFromThread(target)));
-  __ LoadImmediate(R4, /*argument_count=*/0);
-  __ ldr(TMP, Address(THR, Thread::call_to_runtime_entry_point_offset()));
-  __ blr(TMP);
-
+  __ CallRuntime(*target, /*argument_count=*/0);
   if (!allow_return) {
     __ Breakpoint();
     return;
diff --git a/runtime/vm/stub_code_x64.cc b/runtime/vm/stub_code_x64.cc
index 7184e25..bcc2198 100644
--- a/runtime/vm/stub_code_x64.cc
+++ b/runtime/vm/stub_code_x64.cc
@@ -99,6 +99,12 @@
   // Reset exit frame information in Isolate structure.
   __ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0));
 
+  // Restore the global object pool after returning from runtime (old space is
+  // moving, so the GOP could have been relocated).
+  if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
+    __ movq(PP, Address(THR, Thread::global_object_pool_offset()));
+  }
+
   __ LeaveStubFrame();
 
   // The following return can jump to a lazy-deopt stub, which assumes RAX
@@ -136,12 +142,7 @@
   __ movq(CODE_REG, Address(THR, self_code_stub_offset_from_thread));
 
   __ EnterStubFrame();
-
-  __ movq(CODE_REG, Address(THR, Thread::call_to_runtime_stub_offset()));
-  __ movq(RBX, Address(THR, Thread::OffsetFromThread(target)));
-  __ movq(R10, Immediate(/*argument_count=*/0));
-  __ call(Address(THR, Thread::call_to_runtime_entry_point_offset()));
-
+  __ CallRuntime(*target, /*argument_count=*/0);
   if (!allow_return) {
     __ Breakpoint();
     return;