Version 2.16.0-67.0.dev

Merge commit '37d45743e11970f0eacc0ec864e97891347185f5' into 'dev'
diff --git a/runtime/vm/compiler/assembler/assembler_arm.cc b/runtime/vm/compiler/assembler/assembler_arm.cc
index 79de554..eee0fb3 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm.cc
@@ -3747,6 +3747,18 @@
   }
 }
 
+void Assembler::LoadStaticFieldAddress(Register address,
+                                       Register field,
+                                       Register scratch) {
+  LoadCompressedFieldFromOffset(
+      scratch, field, target::Field::host_offset_or_field_id_offset());
+  const intptr_t field_table_offset =
+      compiler::target::Thread::field_table_values_offset();
+  LoadMemoryValue(address, THR, static_cast<int32_t>(field_table_offset));
+  add(address, address,
+      Operand(scratch, LSL, target::kWordSizeLog2 - kSmiTagShift));
+}
+
 void Assembler::LoadFieldAddressForRegOffset(Register address,
                                              Register instance,
                                              Register offset_in_words_as_smi) {
diff --git a/runtime/vm/compiler/assembler/assembler_arm.h b/runtime/vm/compiler/assembler/assembler_arm.h
index e98eef8..457b1da 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.h
+++ b/runtime/vm/compiler/assembler/assembler_arm.h
@@ -1346,6 +1346,10 @@
                                      Register array,
                                      Register index);
 
+  void LoadStaticFieldAddress(Register address,
+                              Register field,
+                              Register scratch);
+
   void LoadCompressedFieldAddressForRegOffset(Register address,
                                               Register instance,
                                               Register offset_in_words_as_smi) {
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.cc b/runtime/vm/compiler/assembler/assembler_arm64.cc
index b844404..3bfb9a8 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64.cc
@@ -2263,6 +2263,18 @@
   }
 }
 
+void Assembler::LoadStaticFieldAddress(Register address,
+                                       Register field,
+                                       Register scratch) {
+  LoadCompressedSmiFieldFromOffset(
+      scratch, field, target::Field::host_offset_or_field_id_offset());
+  const intptr_t field_table_offset =
+      compiler::target::Thread::field_table_values_offset();
+  LoadMemoryValue(address, THR, static_cast<int32_t>(field_table_offset));
+  add(address, address,
+      Operand(scratch, LSL, target::kWordSizeLog2 - kSmiTagShift));
+}
+
 void Assembler::LoadCompressedFieldAddressForRegOffset(
     Register address,
     Register instance,
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.h b/runtime/vm/compiler/assembler/assembler_arm64.h
index d2432b5..886ab74 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.h
+++ b/runtime/vm/compiler/assembler/assembler_arm64.h
@@ -2212,6 +2212,10 @@
                                         Register array,
                                         Register index);
 
+  void LoadStaticFieldAddress(Register address,
+                              Register field,
+                              Register scratch);
+
   void LoadCompressedFieldAddressForRegOffset(Register address,
                                               Register instance,
                                               Register offset_in_words_as_smi);
diff --git a/runtime/vm/compiler/assembler/assembler_ia32.h b/runtime/vm/compiler/assembler/assembler_ia32.h
index 6bd5f28..e5b2b1a 100644
--- a/runtime/vm/compiler/assembler/assembler_ia32.h
+++ b/runtime/vm/compiler/assembler/assembler_ia32.h
@@ -882,6 +882,18 @@
                                            Register index,
                                            intptr_t extra_disp = 0);
 
+  void LoadStaticFieldAddress(Register address,
+                              Register field,
+                              Register scratch) {
+    LoadCompressedFieldFromOffset(
+        scratch, field, target::Field::host_offset_or_field_id_offset());
+    const intptr_t field_table_offset =
+        compiler::target::Thread::field_table_values_offset();
+    LoadMemoryValue(address, THR, static_cast<int32_t>(field_table_offset));
+    static_assert(kSmiTagShift == 1, "adjust scale factor");
+    leal(address, Address(address, scratch, TIMES_HALF_WORD_SIZE, 0));
+  }
+
   void LoadCompressedFieldAddressForRegOffset(Register address,
                                               Register instance,
                                               Register offset_in_words_as_smi) {
diff --git a/runtime/vm/compiler/assembler/assembler_x64.h b/runtime/vm/compiler/assembler/assembler_x64.h
index 237bd61..5312859 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.h
+++ b/runtime/vm/compiler/assembler/assembler_x64.h
@@ -1228,6 +1228,19 @@
                                            Register array,
                                            Register index);
 
+  void LoadStaticFieldAddress(Register address,
+                              Register field,
+                              Register scratch) {
+    LoadCompressedSmi(
+        scratch, compiler::FieldAddress(
+                     field, target::Field::host_offset_or_field_id_offset()));
+    const intptr_t field_table_offset =
+        compiler::target::Thread::field_table_values_offset();
+    LoadMemoryValue(address, THR, static_cast<int32_t>(field_table_offset));
+    static_assert(kSmiTagShift == 1, "adjust scale factor");
+    leaq(address, Address(address, scratch, TIMES_HALF_WORD_SIZE, 0));
+  }
+
   void LoadFieldAddressForRegOffset(Register address,
                                     Register instance,
                                     Register offset_in_words_as_smi) {
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc
index e31b958..9e56049 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc
@@ -3381,9 +3381,7 @@
 
 void LateInitializationErrorSlowPath::PushArgumentsForRuntimeCall(
     FlowGraphCompiler* compiler) {
-  const Field& original_field = Field::ZoneHandle(
-      instruction()->AsLoadField()->slot().field().Original());
-  __ PushObject(original_field);
+  __ PushObject(Field::ZoneHandle(OriginalField()));
 }
 
 void LateInitializationErrorSlowPath::EmitSharedStubCall(
@@ -3394,9 +3392,8 @@
 #else
   ASSERT(instruction()->locs()->temp(0).reg() ==
          LateInitializationErrorABI::kFieldReg);
-  const Field& original_field = Field::ZoneHandle(
-      instruction()->AsLoadField()->slot().field().Original());
-  __ LoadObject(LateInitializationErrorABI::kFieldReg, original_field);
+  __ LoadObject(LateInitializationErrorABI::kFieldReg,
+                Field::ZoneHandle(OriginalField()));
   auto object_store = compiler->isolate_group()->object_store();
   const auto& stub = Code::ZoneHandle(
       compiler->zone(),
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.h b/runtime/vm/compiler/backend/flow_graph_compiler.h
index e195a18..7bf8100 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.h
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.h
@@ -388,9 +388,11 @@
 
 class LateInitializationErrorSlowPath : public ThrowErrorSlowPathCode {
  public:
-  explicit LateInitializationErrorSlowPath(LoadFieldInstr* instruction)
+  explicit LateInitializationErrorSlowPath(Instruction* instruction)
       : ThrowErrorSlowPathCode(instruction,
-                               kLateFieldNotInitializedErrorRuntimeEntry) {}
+                               kLateFieldNotInitializedErrorRuntimeEntry) {
+    ASSERT(instruction->IsLoadField() || instruction->IsLoadStaticField());
+  }
   virtual const char* name() { return "late initialization error"; }
 
   virtual intptr_t GetNumberOfArgumentsForRuntimeCall() {
@@ -401,6 +403,13 @@
 
   virtual void EmitSharedStubCall(FlowGraphCompiler* compiler,
                                   bool save_fpu_registers);
+
+ private:
+  FieldPtr OriginalField() const {
+    return instruction()->IsLoadField()
+               ? instruction()->AsLoadField()->slot().field().Original()
+               : instruction()->AsLoadStaticField()->field().Original();
+  }
 };
 
 class FlowGraphCompiler : public ValueObject {
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index 4ca03ed..c3683d0 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -4140,10 +4140,25 @@
 LocationSummary* LoadStaticFieldInstr::MakeLocationSummary(Zone* zone,
                                                            bool opt) const {
   const intptr_t kNumInputs = 0;
-  const intptr_t kNumTemps = 0;
+  const bool use_shared_stub = UseSharedSlowPathStub(opt);
+  const intptr_t kNumTemps = calls_initializer() &&
+                                     throw_exception_on_initialization() &&
+                                     use_shared_stub
+                                 ? 1
+                                 : 0;
   LocationSummary* locs = new (zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-      calls_initializer() ? LocationSummary::kCall : LocationSummary::kNoCall);
+      calls_initializer()
+          ? (throw_exception_on_initialization()
+                 ? (use_shared_stub ? LocationSummary::kCallOnSharedSlowPath
+                                    : LocationSummary::kCallOnSlowPath)
+                 : LocationSummary::kCall)
+          : LocationSummary::kNoCall);
+  if (calls_initializer() && throw_exception_on_initialization() &&
+      use_shared_stub) {
+    locs->set_temp(
+        0, Location::RegisterLocation(LateInitializationErrorABI::kFieldReg));
+  }
   locs->set_out(0, calls_initializer() ? Location::RegisterLocation(
                                              InitStaticFieldABI::kResultReg)
                                        : Location::RequiresRegister());
@@ -4164,26 +4179,50 @@
   __ LoadMemoryValue(result, result, static_cast<int32_t>(field_offset));
 
   if (calls_initializer()) {
-    compiler::Label call_runtime, no_call;
-    __ CompareObject(result, Object::sentinel());
+    if (throw_exception_on_initialization()) {
+      ThrowErrorSlowPathCode* slow_path =
+          new LateInitializationErrorSlowPath(this);
+      compiler->AddSlowPathCode(slow_path);
 
+      __ CompareObject(result, Object::sentinel());
+      __ BranchIf(EQUAL, slow_path->entry_label());
+      return;
+    }
+    ASSERT(field().has_initializer());
+    auto object_store = compiler->isolate_group()->object_store();
+    const Field& original_field = Field::ZoneHandle(field().Original());
+
+    compiler::Label no_call, call_initializer;
+    __ CompareObject(result, Object::sentinel());
     if (!field().is_late()) {
-      __ BranchIf(EQUAL, &call_runtime);
+      __ BranchIf(EQUAL, &call_initializer);
       __ CompareObject(result, Object::transition_sentinel());
     }
-
     __ BranchIf(NOT_EQUAL, &no_call);
 
-    __ Bind(&call_runtime);
-    __ LoadObject(InitStaticFieldABI::kFieldReg,
-                  Field::ZoneHandle(field().Original()));
+    auto& stub = Code::ZoneHandle(compiler->zone());
+    __ Bind(&call_initializer);
+    if (field().needs_load_guard()) {
+      stub = object_store->init_static_field_stub();
+    } else if (field().is_late()) {
+      // The stubs below call the initializer function directly, so make sure
+      // one is created.
+      original_field.EnsureInitializerFunction();
+      stub = field().is_final()
+                 ? object_store->init_late_final_static_field_stub()
+                 : object_store->init_late_static_field_stub();
+    } else {
+      // We call to runtime for non-late fields because the stub would need to
+      // catch any exception generated by the initialization function to change
+      // the value of the static field from the transition sentinel to null.
+      stub = object_store->init_static_field_stub();
+    }
 
-    auto object_store = compiler->isolate_group()->object_store();
-    const auto& init_static_field_stub = Code::ZoneHandle(
-        compiler->zone(), object_store->init_static_field_stub());
-    compiler->GenerateStubCall(source(), init_static_field_stub,
+    __ LoadObject(InitStaticFieldABI::kFieldReg, original_field);
+    compiler->GenerateStubCall(source(), stub,
                                /*kind=*/UntaggedPcDescriptors::kOther, locs(),
                                deopt_id(), env());
+
     __ Bind(&no_call);
   }
 }
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 3c8bb1f..3ef43cc 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -5585,18 +5585,76 @@
   DISALLOW_COPY_AND_ASSIGN(GuardFieldTypeInstr);
 };
 
-class LoadStaticFieldInstr : public TemplateDefinition<0, Throws> {
+template <intptr_t N>
+class TemplateLoadField : public TemplateDefinition<N, Throws> {
+  using Base = TemplateDefinition<N, Throws>;
+
+ public:
+  TemplateLoadField(const InstructionSource& source,
+                    bool calls_initializer = false,
+                    intptr_t deopt_id = DeoptId::kNone,
+                    const Field* field = nullptr)
+      : Base(source, deopt_id),
+        token_pos_(source.token_pos),
+        calls_initializer_(calls_initializer),
+        throw_exception_on_initialization_(false) {
+    ASSERT(!calls_initializer || (deopt_id != DeoptId::kNone));
+    if (calls_initializer_) {
+      ASSERT(field != nullptr);
+      throw_exception_on_initialization_ = !field->needs_load_guard() &&
+                                           field->is_late() &&
+                                           !field->has_initializer();
+    }
+  }
+
+  virtual TokenPosition token_pos() const { return token_pos_; }
+  bool calls_initializer() const { return calls_initializer_; }
+  void set_calls_initializer(bool value) { calls_initializer_ = value; }
+
+  bool throw_exception_on_initialization() const {
+    return throw_exception_on_initialization_;
+  }
+
+  // Slow path is used if load throws exception on initialization.
+  virtual bool UseSharedSlowPathStub(bool is_optimizing) const {
+    return Base::SlowPathSharingSupported(is_optimizing);
+  }
+
+  virtual intptr_t DeoptimizationTarget() const { return Base::GetDeoptId(); }
+  virtual bool ComputeCanDeoptimize() const { return false; }
+  virtual bool ComputeCanDeoptimizeAfterCall() const {
+    return calls_initializer() && !CompilerState::Current().is_aot();
+  }
+  virtual intptr_t NumberOfInputsConsumedBeforeCall() const {
+    return Base::InputCount();
+  }
+
+  virtual bool HasUnknownSideEffects() const {
+    return calls_initializer() && !throw_exception_on_initialization();
+  }
+
+  virtual bool CanCallDart() const {
+    return calls_initializer() && !throw_exception_on_initialization();
+  }
+  virtual bool CanTriggerGC() const { return calls_initializer(); }
+  virtual bool MayThrow() const { return calls_initializer(); }
+
+ private:
+  const TokenPosition token_pos_;
+  bool calls_initializer_;
+  bool throw_exception_on_initialization_;
+
+  DISALLOW_COPY_AND_ASSIGN(TemplateLoadField);
+};
+
+class LoadStaticFieldInstr : public TemplateLoadField<0> {
  public:
   LoadStaticFieldInstr(const Field& field,
                        const InstructionSource& source,
                        bool calls_initializer = false,
                        intptr_t deopt_id = DeoptId::kNone)
-      : TemplateDefinition(source, deopt_id),
-        field_(field),
-        token_pos_(source.token_pos),
-        calls_initializer_(calls_initializer) {
-    ASSERT(!calls_initializer || (deopt_id != DeoptId::kNone));
-  }
+      : TemplateLoadField<0>(source, calls_initializer, deopt_id, &field),
+        field_(field) {}
 
   DECLARE_INSTRUCTION(LoadStaticField)
 
@@ -5604,9 +5662,6 @@
 
   const Field& field() const { return field_; }
 
-  bool calls_initializer() const { return calls_initializer_; }
-  void set_calls_initializer(bool value) { calls_initializer_ = value; }
-
   virtual bool AllowsCSE() const {
     // If two loads of a static-final-late field call the initializer and one
     // dominates another, we can remove the dominated load with the result of
@@ -5619,23 +5674,12 @@
            (!field().is_late() || field().has_initializer());
   }
 
-  virtual bool ComputeCanDeoptimize() const {
-    return calls_initializer() && !CompilerState::Current().is_aot();
-  }
-  virtual bool HasUnknownSideEffects() const { return calls_initializer(); }
-  virtual bool CanTriggerGC() const { return calls_initializer(); }
-  virtual bool MayThrow() const { return calls_initializer(); }
-
   virtual bool AttributesEqual(const Instruction& other) const;
 
-  virtual TokenPosition token_pos() const { return token_pos_; }
-
   PRINT_OPERANDS_TO_SUPPORT
 
  private:
   const Field& field_;
-  const TokenPosition token_pos_;
-  bool calls_initializer_;
 
   DISALLOW_COPY_AND_ASSIGN(LoadStaticFieldInstr);
 };
@@ -6648,46 +6692,24 @@
 // Note: if slot was a subject of the field unboxing optimization then this load
 // would both load the box stored in the field and then load the content of
 // the box.
-class LoadFieldInstr : public TemplateDefinition<1, Throws> {
+class LoadFieldInstr : public TemplateLoadField<1> {
  public:
   LoadFieldInstr(Value* instance,
                  const Slot& slot,
                  const InstructionSource& source,
                  bool calls_initializer = false,
                  intptr_t deopt_id = DeoptId::kNone)
-      : TemplateDefinition(source, deopt_id),
-        slot_(slot),
-        token_pos_(source.token_pos),
-        calls_initializer_(calls_initializer),
-        throw_exception_on_initialization_(false) {
-    ASSERT(!calls_initializer || (deopt_id != DeoptId::kNone));
-    ASSERT(!calls_initializer || slot.IsDartField());
+      : TemplateLoadField(source,
+                          calls_initializer,
+                          deopt_id,
+                          slot.IsDartField() ? &slot.field() : nullptr),
+        slot_(slot) {
     SetInputAt(0, instance);
-    if (calls_initializer_) {
-      const Field& field = slot.field();
-      throw_exception_on_initialization_ = !field.needs_load_guard() &&
-                                           field.is_late() &&
-                                           !field.has_initializer();
-    }
   }
 
   Value* instance() const { return inputs_[0]; }
   const Slot& slot() const { return slot_; }
 
-  virtual TokenPosition token_pos() const { return token_pos_; }
-
-  bool calls_initializer() const { return calls_initializer_; }
-  void set_calls_initializer(bool value) { calls_initializer_ = value; }
-
-  bool throw_exception_on_initialization() const {
-    return throw_exception_on_initialization_;
-  }
-
-  // Slow path is used if load throws exception on initialization.
-  virtual bool UseSharedSlowPathStub(bool is_optimizing) const {
-    return SlowPathSharingSupported(is_optimizing);
-  }
-
   virtual Representation representation() const;
 
   // Returns whether this instruction is an unboxed load from a _boxed_ Dart
@@ -6704,25 +6726,6 @@
 
   virtual CompileType ComputeType() const;
 
-  virtual intptr_t DeoptimizationTarget() const { return GetDeoptId(); }
-  virtual bool ComputeCanDeoptimize() const { return false; }
-  virtual bool ComputeCanDeoptimizeAfterCall() const {
-    return calls_initializer() && !CompilerState::Current().is_aot();
-  }
-  virtual intptr_t NumberOfInputsConsumedBeforeCall() const {
-    return InputCount();
-  }
-
-  virtual bool HasUnknownSideEffects() const {
-    return calls_initializer() && !throw_exception_on_initialization();
-  }
-
-  virtual bool CanCallDart() const {
-    return calls_initializer() && !throw_exception_on_initialization();
-  }
-  virtual bool CanTriggerGC() const { return calls_initializer(); }
-  virtual bool MayThrow() const { return calls_initializer(); }
-
   virtual void InferRange(RangeAnalysis* analysis, Range* range);
 
   bool IsImmutableLengthLoad() const { return slot().IsImmutableLengthSlot(); }
@@ -6761,9 +6764,6 @@
   void EmitNativeCodeForInitializerCall(FlowGraphCompiler* compiler);
 
   const Slot& slot_;
-  const TokenPosition token_pos_;
-  bool calls_initializer_;
-  bool throw_exception_on_initialization_;
 
   DISALLOW_COPY_AND_ASSIGN(LoadFieldInstr);
 };
diff --git a/runtime/vm/compiler/stub_code_compiler.cc b/runtime/vm/compiler/stub_code_compiler.cc
index 672ee76..a4c8a57 100644
--- a/runtime/vm/compiler/stub_code_compiler.cc
+++ b/runtime/vm/compiler/stub_code_compiler.cc
@@ -48,6 +48,68 @@
   __ Ret();
 }
 
+void StubCodeCompiler::GenerateInitLateStaticFieldStub(Assembler* assembler,
+                                                       bool is_final) {
+  const Register kResultReg = InitStaticFieldABI::kResultReg;
+  const Register kFunctionReg = InitLateStaticFieldInternalRegs::kFunctionReg;
+  const Register kFieldReg = InitStaticFieldABI::kFieldReg;
+  const Register kAddressReg = InitLateStaticFieldInternalRegs::kAddressReg;
+  const Register kScratchReg = InitLateStaticFieldInternalRegs::kScratchReg;
+
+  __ EnterStubFrame();
+
+  __ Comment("Calling initializer function");
+  __ PushRegister(kFieldReg);
+  __ LoadCompressedFieldFromOffset(
+      kFunctionReg, InitInstanceFieldABI::kFieldReg,
+      target::Field::initializer_function_offset());
+  if (!FLAG_precompiled_mode) {
+    __ LoadCompressedFieldFromOffset(CODE_REG, kFunctionReg,
+                                     target::Function::code_offset());
+    // Load a GC-safe value for the arguments descriptor (unused but tagged).
+    __ LoadImmediate(ARGS_DESC_REG, 0);
+  }
+  __ Call(FieldAddress(kFunctionReg, target::Function::entry_point_offset()));
+  __ MoveRegister(kResultReg, CallingConventions::kReturnReg);
+  __ PopRegister(kFieldReg);
+  __ LoadStaticFieldAddress(kAddressReg, kFieldReg, kScratchReg);
+
+  Label throw_exception;
+  if (is_final) {
+    __ Comment("Checking that initializer did not set late final field");
+    __ LoadFromOffset(kScratchReg, kAddressReg, 0);
+    __ CompareObject(kScratchReg, SentinelObject());
+    __ BranchIf(NOT_EQUAL, &throw_exception);
+  }
+
+  __ StoreToOffset(kResultReg, kAddressReg, 0);
+  __ LeaveStubFrame();
+  __ Ret();
+
+  if (is_final) {
+#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
+    // We are jumping over LeaveStubFrame so restore LR state to match one
+    // at the jump point.
+    __ set_lr_state(compiler::LRState::OnEntry().EnterFrame());
+#endif  // defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
+    __ Bind(&throw_exception);
+    __ PushObject(NullObject());  // Make room for (unused) result.
+    __ PushRegister(kFieldReg);
+    __ CallRuntime(kLateFieldAssignedDuringInitializationErrorRuntimeEntry,
+                   /*argument_count=*/1);
+    __ Breakpoint();
+  }
+}
+
+void StubCodeCompiler::GenerateInitLateStaticFieldStub(Assembler* assembler) {
+  GenerateInitLateStaticFieldStub(assembler, /*is_final=*/false);
+}
+
+void StubCodeCompiler::GenerateInitLateFinalStaticFieldStub(
+    Assembler* assembler) {
+  GenerateInitLateStaticFieldStub(assembler, /*is_final=*/true);
+}
+
 void StubCodeCompiler::GenerateInitInstanceFieldStub(Assembler* assembler) {
   __ EnterStubFrame();
   __ PushObject(NullObject());  // Make room for result.
diff --git a/runtime/vm/compiler/stub_code_compiler.h b/runtime/vm/compiler/stub_code_compiler.h
index 58f52ce..fc28901 100644
--- a/runtime/vm/compiler/stub_code_compiler.h
+++ b/runtime/vm/compiler/stub_code_compiler.h
@@ -151,6 +151,11 @@
   static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register);
 
  private:
+  // Common function for generating InitLateStaticField and
+  // InitLateFinalStaticField stubs.
+  static void GenerateInitLateStaticFieldStub(Assembler* assembler,
+                                              bool is_final);
+
   // Common function for generating InitLateInstanceField and
   // InitLateFinalInstanceField stubs.
   static void GenerateInitLateInstanceFieldStub(Assembler* assembler,
diff --git a/runtime/vm/constants_arm.h b/runtime/vm/constants_arm.h
index 6adbe81..c774203 100644
--- a/runtime/vm/constants_arm.h
+++ b/runtime/vm/constants_arm.h
@@ -412,10 +412,17 @@
 
 // ABI for InitStaticFieldStub.
 struct InitStaticFieldABI {
-  static const Register kFieldReg = R0;
+  static const Register kFieldReg = R2;
   static const Register kResultReg = R0;
 };
 
+// Registers used inside the implementation of InitLateStaticFieldStub.
+struct InitLateStaticFieldInternalRegs {
+  static const Register kFunctionReg = R0;
+  static const Register kAddressReg = R3;
+  static const Register kScratchReg = R4;
+};
+
 // ABI for InitInstanceFieldStub.
 struct InitInstanceFieldABI {
   static const Register kInstanceReg = R1;
diff --git a/runtime/vm/constants_arm64.h b/runtime/vm/constants_arm64.h
index 28ac726..7947a60 100644
--- a/runtime/vm/constants_arm64.h
+++ b/runtime/vm/constants_arm64.h
@@ -251,10 +251,17 @@
 
 // ABI for InitStaticFieldStub.
 struct InitStaticFieldABI {
-  static const Register kFieldReg = R0;
+  static const Register kFieldReg = R2;
   static const Register kResultReg = R0;
 };
 
+// Registers used inside the implementation of InitLateStaticFieldStub.
+struct InitLateStaticFieldInternalRegs {
+  static const Register kFunctionReg = R0;
+  static const Register kAddressReg = R3;
+  static const Register kScratchReg = R4;
+};
+
 // ABI for InitInstanceFieldStub.
 struct InitInstanceFieldABI {
   static const Register kInstanceReg = R1;
diff --git a/runtime/vm/constants_ia32.h b/runtime/vm/constants_ia32.h
index 5f9c331..4a497730 100644
--- a/runtime/vm/constants_ia32.h
+++ b/runtime/vm/constants_ia32.h
@@ -148,10 +148,17 @@
 
 // ABI for InitStaticFieldStub.
 struct InitStaticFieldABI {
-  static const Register kFieldReg = EAX;
+  static const Register kFieldReg = EDX;
   static const Register kResultReg = EAX;
 };
 
+// Registers used inside the implementation of InitLateStaticFieldStub.
+struct InitLateStaticFieldInternalRegs {
+  static const Register kFunctionReg = EAX;
+  static const Register kAddressReg = ECX;
+  static const Register kScratchReg = EDI;
+};
+
 // ABI for InitInstanceFieldStub.
 struct InitInstanceFieldABI {
   static const Register kInstanceReg = EBX;
diff --git a/runtime/vm/constants_x64.h b/runtime/vm/constants_x64.h
index eb24c81..3e7f3c5 100644
--- a/runtime/vm/constants_x64.h
+++ b/runtime/vm/constants_x64.h
@@ -223,10 +223,17 @@
 
 // ABI for InitStaticFieldStub.
 struct InitStaticFieldABI {
-  static const Register kFieldReg = RAX;
+  static const Register kFieldReg = RDX;
   static const Register kResultReg = RAX;
 };
 
+// Registers used inside the implementation of InitLateStaticFieldStub.
+struct InitLateStaticFieldInternalRegs {
+  static const Register kFunctionReg = RAX;
+  static const Register kAddressReg = RCX;
+  static const Register kScratchReg = RSI;
+};
+
 // ABI for InitInstanceFieldStub.
 struct InitInstanceFieldABI {
   static const Register kInstanceReg = RBX;
diff --git a/runtime/vm/object_store.h b/runtime/vm/object_store.h
index 3b308c6..03125b2 100644
--- a/runtime/vm/object_store.h
+++ b/runtime/vm/object_store.h
@@ -215,6 +215,8 @@
   RW(Code, assert_boolean_stub)                                                \
   RW(Code, instance_of_stub)                                                   \
   RW(Code, init_static_field_stub)                                             \
+  RW(Code, init_late_static_field_stub)                                        \
+  RW(Code, init_late_final_static_field_stub)                                  \
   RW(Code, init_instance_field_stub)                                           \
   RW(Code, init_late_instance_field_stub)                                      \
   RW(Code, init_late_final_instance_field_stub)                                \
@@ -296,6 +298,8 @@
   DO(re_throw_stub, ReThrow)                                                   \
   DO(assert_boolean_stub, AssertBoolean)                                       \
   DO(init_static_field_stub, InitStaticField)                                  \
+  DO(init_late_static_field_stub, InitLateStaticField)                         \
+  DO(init_late_final_static_field_stub, InitLateFinalStaticField)              \
   DO(init_instance_field_stub, InitInstanceField)                              \
   DO(init_late_instance_field_stub, InitLateInstanceField)                     \
   DO(init_late_final_instance_field_stub, InitLateFinalInstanceField)          \
diff --git a/runtime/vm/stub_code_list.h b/runtime/vm/stub_code_list.h
index 64b36a9..fa7b06b 100644
--- a/runtime/vm/stub_code_list.h
+++ b/runtime/vm/stub_code_list.h
@@ -127,6 +127,8 @@
   V(ExitSafepoint)                                                             \
   V(CallNativeThroughSafepoint)                                                \
   V(InitStaticField)                                                           \
+  V(InitLateStaticField)                                                       \
+  V(InitLateFinalStaticField)                                                  \
   V(InitInstanceField)                                                         \
   V(InitLateInstanceField)                                                     \
   V(InitLateFinalInstanceField)                                                \
diff --git a/tools/VERSION b/tools/VERSION
index 7f0dce2..b3df670 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 16
 PATCH 0
-PRERELEASE 66
+PRERELEASE 67
 PRERELEASE_PATCH 0
\ No newline at end of file