Version 2.19.0-43.0.dev

Merge commit '66929c985379c1e0419274b9e3d8e36315629031' into 'dev'
diff --git a/pkg/analysis_server/lib/src/services/correction/error_fix_status.yaml b/pkg/analysis_server/lib/src/services/correction/error_fix_status.yaml
index d110b08..4b45f75 100644
--- a/pkg/analysis_server/lib/src/services/correction/error_fix_status.yaml
+++ b/pkg/analysis_server/lib/src/services/correction/error_fix_status.yaml
@@ -24,10 +24,10 @@
 #   issue created for it.
 #
 # Stats:
-# - 760 "needsEvaluation"
-# -  28 "needsFix"
-# - 273 "hasFix"
-# -  41 "noFix"
+# - 734 "needsEvaluation"
+# -  43 "needsFix"
+# - 284 "hasFix"
+# -  47 "noFix"
 
 AnalysisOptionsErrorCode.INCLUDED_FILE_PARSE_ERROR:
   status: noFix
@@ -928,15 +928,27 @@
     user really needs to think about what constructor they were trying to invoke
     and why in order to really fix the issue.
 CompileTimeErrorCode.SUPER_IN_EXTENSION:
-  status: needsEvaluation
+  status: noFix
+  notes: |-
+    We could potentially offer a fix to remove the super invocation, but the
+    user really needs to think about what function they were trying to invoke
+    and why in order to really fix the issue.
 CompileTimeErrorCode.SUPER_IN_INVALID_CONTEXT:
-  status: needsEvaluation
+  status: noFix
+  notes: |-
+    We could potentially offer a fix to remove the super invocation, but the
+    user really needs to think about what member they were trying to invoke
+    and why in order to really fix the issue.
 CompileTimeErrorCode.SUPER_IN_REDIRECTING_CONSTRUCTOR:
   status: needsEvaluation
 CompileTimeErrorCode.SUPER_INITIALIZER_IN_OBJECT:
-  status: needsEvaluation
+  status: noFix
+  notes: |-
+    Object is only declared in SDK code; those developers do OK without fixes.
 CompileTimeErrorCode.SUPER_INVOCATION_NOT_LAST:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include moving super invocation to be last.
 CompileTimeErrorCode.SWITCH_CASE_COMPLETES_NORMALLY:
   status: hasFix
 CompileTimeErrorCode.SWITCH_EXPRESSION_NOT_ASSIGNABLE:
@@ -955,11 +967,17 @@
 CompileTimeErrorCode.TYPE_ARGUMENT_NOT_MATCHING_BOUNDS:
   status: needsEvaluation
 CompileTimeErrorCode.TYPE_PARAMETER_REFERENCED_BY_STATIC:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include (1) removing the 'static' keyword, (2) changing the type
+    variable to its bound, (3) looking for similarly spelled types.
 CompileTimeErrorCode.TYPE_PARAMETER_SUPERTYPE_OF_ITS_BOUND:
   status: needsEvaluation
 CompileTimeErrorCode.TYPE_TEST_WITH_NON_TYPE:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include: (1) changing ` is ` to ` == ` and (2) changing the
+    right operand to the static type of the left.
 CompileTimeErrorCode.TYPE_TEST_WITH_UNDEFINED_NAME:
   status: hasFix
 CompileTimeErrorCode.UNCHECKED_INVOCATION_OF_NULLABLE_VALUE:
@@ -985,7 +1003,10 @@
 CompileTimeErrorCode.UNDEFINED_CLASS_BOOLEAN:
   status: hasFix
 CompileTimeErrorCode.UNDEFINED_CONSTRUCTOR_IN_INITIALIZER:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include (1) adding a declaration for such constructor, (2) look
+    for closely named constructors.
 CompileTimeErrorCode.UNDEFINED_CONSTRUCTOR_IN_INITIALIZER_DEFAULT:
   status: hasFix
 CompileTimeErrorCode.UNDEFINED_ENUM_CONSTANT:
@@ -1066,11 +1087,17 @@
 CompileTimeErrorCode.WRONG_EXPLICIT_TYPE_PARAMETER_VARIANCE_IN_SUPERINTERFACE:
   status: needsEvaluation
 CompileTimeErrorCode.WRONG_NUMBER_OF_PARAMETERS_FOR_OPERATOR:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include (1) removing all past the first parameter, and (2)
+    removing all but a singular used parameter, if only one is used.
 CompileTimeErrorCode.WRONG_NUMBER_OF_PARAMETERS_FOR_OPERATOR_MINUS:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include (1) removing all past the first parameter, and (2)
+    removing all but a singular used parameter, if only one is used.
 CompileTimeErrorCode.WRONG_NUMBER_OF_PARAMETERS_FOR_SETTER:
-  status: needsEvaluation
+  status: needsFix
 CompileTimeErrorCode.WRONG_NUMBER_OF_TYPE_ARGUMENTS:
   status: hasFix
 CompileTimeErrorCode.WRONG_NUMBER_OF_TYPE_ARGUMENTS_ANONYMOUS_FUNCTION:
@@ -1097,9 +1124,17 @@
 CompileTimeErrorCode.WRONG_TYPE_PARAMETER_VARIANCE_POSITION:
   status: needsEvaluation
 CompileTimeErrorCode.YIELD_EACH_IN_NON_GENERATOR:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include: (1) change `yield` to `return` (maybe only if it
+    matches the return type) and (2) change function signature from implicit
+    sync to `sync*` or `async` to `async*`.
 CompileTimeErrorCode.YIELD_IN_NON_GENERATOR:
-  status: needsEvaluation
+  status: needsFix
+  notes: |-
+    Fixes could include: (1) change `yield` to `return` (maybe only if it
+    matches the return type)  and (2) change function signature from implicit
+    sync to `sync*` or `async` to `async*`.
 CompileTimeErrorCode.YIELD_EACH_OF_INVALID_TYPE:
   status: needsEvaluation
 CompileTimeErrorCode.YIELD_OF_INVALID_TYPE:
diff --git a/pkg/analyzer/README.md b/pkg/analyzer/README.md
index e1bcdad..4a3da50 100644
--- a/pkg/analyzer/README.md
+++ b/pkg/analyzer/README.md
@@ -73,11 +73,12 @@
 
 * [dart format] - a formatter for Dart code
 * [dart doc] - a documentation generator for Dart code
-* [Dart Analysis Server][analysis_sever] - a stateful server that supports IDEs and editors
+* [Dart Analysis Server][analysis_server] - a stateful server that supports IDEs and editors
 
 ## Support
 
-Post issues and feature requests at https://github.com/dart-lang/sdk/issues
+Post issues and feature requests at https://github.com/dart-lang/sdk/issues.
+These will be triaged according to the [analyzer triage priorities][triage].
 
 Questions and discussions are welcome at the
 [Dart Analyzer Discussion Group][list].
@@ -105,5 +106,6 @@
 [LICENSE]: https://github.com/dart-lang/sdk/blob/main/pkg/analyzer/LICENSE
 [dart format]: https://github.com/dart-lang/dart_style
 [dart doc]: https://github.com/dart-lang/dartdoc
-[analysis_sever]: https://github.com/dart-lang/sdk/tree/main/pkg/analysis_server
+[analysis_server]: https://github.com/dart-lang/sdk/tree/main/pkg/analysis_server
 [custom_analysis]: https://dart.dev/guides/language/analysis-options
+[triage]: https://github.com/dart-lang/sdk/blob/main/pkg/analyzer/TRIAGE.md
diff --git a/pkg/analyzer/TRIAGE.md b/pkg/analyzer/TRIAGE.md
index 4435727..8b039c1 100644
--- a/pkg/analyzer/TRIAGE.md
+++ b/pkg/analyzer/TRIAGE.md
@@ -20,11 +20,14 @@
 
 ### P0
 
-* Incorrect analysis errors or warnings, widespread
+* Incorrect analysis errors or warnings, widespread without a practical
+  workaround
 * Uncaught exceptions resulting in tool crashes, widespread and no workaround
 * Incorrect resolution of symbols or libraries, widespread and no workaround
 * Incorrect data from analyzer API, widespread and with no workaround
 * Automation resulting in corrupted code from clean inputs, widespread
+  * EXAMPLE: A commonly used or important quick fix somehow uses wrong
+    offsets and eats random chunks of code.
 * Performance regression, large and widespread
 * Any problem urgently blocking critical milestones for key users or Dart rolls
   into Flutter/Google3
@@ -33,6 +36,8 @@
 ### P1
 
 * Incorrect analysis errors or warnings, on edge cases but no workarounds
+  * EXAMPLE: Disabling the afflicted warning or error has no effect, or makes
+    the problem worse.
 * Incorrect analysis infos, widespread
 * Incorrect resolution of symbols or libraries, edge cases, or widespread but
   with workaround
@@ -41,8 +46,10 @@
 * Automation resulting in corrupted code from clean inputs, edge cases or with
   an easy workaround
 * Automation resulting in incorrect code, widespread
+  * EXAMPLE: a commonly used or important quick fix generates code that is
+    valid but produces a warning (e.g. [sdk#48946](https://github.com/dart-lang/sdk/issues/48946)).
 * Performance regression, large or widespread (but not both), or impacting key
-  users
+  users.
 * An enhancement required for critical milestones for key users, or that has
   significant evidence gathered indicating a positive impact if implemented
 * Any problem that, while it doesn't currently block, will block rolls into
@@ -52,12 +59,14 @@
 ### P2
 
 * Incorrect analysis errors or warnings, on edge cases with simple workaround
-* Incorrect analysis infos, on edge cases
+  * EXAMPLE: Disabling the error or warning 'fixes' the issue and unblocks
+    users.
+* Incorrect analysis infos/hints, on edge cases
 * Incorrect resolution of symbols or libraries, edge cases only with workarounds
 * Incorrect data from analyzer API, edge cases without workaround
 * Automation resulting in incorrect code, edge cases
 * Uncaught exceptions resulting in tool crashes, edge cases
-* Performance regression, large,  impacting edge cases, without good workarounds
+* Performance regression, large, impacting edge cases, without good workarounds
 * Security or privacy problem, theoretical & non-exploitable
 * An enhancement that the team agrees is a good idea but without strong evidence
   indicating positive impact
@@ -67,11 +76,12 @@
 * Uncaught exceptions caught by a fuzzer, but believed to be theoretical
   situations only
 * Incorrect analysis errors or warnings, theoretical
-* Incorrect analysis infos, on edge cases with workaround
+* Incorrect analysis infos/hints, on edge cases with workaround
 * Incorrect resolution of symbols or libraries, theoretical
 * Incorrect data from analyzer API, edge case with workaround available
 * Performance regression impacting edge cases with workaround or without
   workaround if small
+* Automation that should be available not triggering, on edge cases
 * Automation resulting in incorrect code, theoretical or edge cases with easy
   workaround
 * An enhancement that someone on the team thinks might be good but it isn't
@@ -79,7 +89,7 @@
 
 ### P4
 
-* Incorrect analysis infos, theoretical
+* Incorrect analysis infos/hints, theoretical
 * Incorrect data from analyzer API, theoretical
 * Theoretical performance problems
 * An enhancement that may have some evidence that it isn't a good idea to
@@ -89,11 +99,17 @@
 
 ### Terms describing impact
 
+* "commonly used" - Particularly in the case of automation, either metrics
+  indicate the automation is triggered manually a high percentage of the time
+  (IntelliJ), or it is triggered as part of bulk operations e.g. `dart fix`.
 * "edge cases" - Impacting only small parts of the ecosystem.  For example,
   one package, or one key user with a workaround.  Note this is an edge case
   from the perspective of the ecosystem vs. language definition.  If it isn't
   happening much in the wild or (if there isn't evidence either way) if it
   isn't believed to be super likely in the wild, it is an edge case.
+* "important" - For diagnostics and their associated automation, if the
+  diagnostic is part of the language definition, or the core, recommended, or
+  Flutter lint sets, it is important.
 * "theoretical" - Something that we think is unlikely to happen in the wild
   and there's no evidence for it happening in the wild.
 * "widespread" - Impact endemic throughout the ecosystem, or at least far
@@ -102,11 +118,13 @@
 ### Other terms
 
 * "automation" - Anything that changes the user's code automatically.
-  Autocompletion, quick fixing, NNBD migration, etc.
+  Autocompletion, quick fixing, refactorings, NNBD migration, etc.
 * "corrupted code" - Modification of source code in such a way that it is
   more than just a bit wrong or having some symbols that don't exist, but is
   not valid Dart and would be painful to manually correct.
+* "diagnostic" - An error, warning, hint, or lint generated by the analyzer
+  or linter.
 * "incorrect code" - Modification of code in a way that is known to be wrong,
   but would be trivial to figure out how to fix for the human using the tool.
-* "key users" - Flutter, Pub, Fuchsia, Dart, Google3, 1P
-* "tool" - Analysis Server, dartanalyzer, migration tool, analyzer-as-library
+* "key users" - Flutter, Pub, Fuchsia, Dart, Google/1P
+* "tool" - Analysis Server, dart analyzer, migration tool, analyzer-as-library
diff --git a/pkg/analyzer/messages.yaml b/pkg/analyzer/messages.yaml
index 01a38cd..ef1da68 100644
--- a/pkg/analyzer/messages.yaml
+++ b/pkg/analyzer/messages.yaml
@@ -13033,7 +13033,7 @@
       #### Description
 
       The analyzer produces this diagnostic when the keyword `super` is used
-      outside of a instance method.
+      outside of an instance method.
 
       #### Example
 
@@ -15192,8 +15192,10 @@
       }
       ```
       TODO(brianwilkerson) It would be good to add a link to the spec or some
-       other documentation that lists the number of parameters for each operator,
-       but I don't know what to link to.
+       other documentation that lists the number of parameters for each
+       operator, but I don't know what to link to.
+      TODO(brianwilkerson) Another reasonable fix is to convert the operator to
+       be a normal method. 
   WRONG_NUMBER_OF_PARAMETERS_FOR_OPERATOR_MINUS:
     sharedName: WRONG_NUMBER_OF_PARAMETERS_FOR_OPERATOR
     problemMessage: "Operator '-' should declare 0 or 1 parameter, but {0} found."
diff --git a/pkg/analyzer/tool/diagnostics/diagnostics.md b/pkg/analyzer/tool/diagnostics/diagnostics.md
index e3eb829..a63e5b4 100644
--- a/pkg/analyzer/tool/diagnostics/diagnostics.md
+++ b/pkg/analyzer/tool/diagnostics/diagnostics.md
@@ -17037,7 +17037,7 @@
 #### Description
 
 The analyzer produces this diagnostic when the keyword `super` is used
-outside of a instance method.
+outside of an instance method.
 
 #### Example
 
diff --git a/pkg/dart2wasm/bin/dart2wasm.dart b/pkg/dart2wasm/bin/dart2wasm.dart
index ac3d42d..7119b7f 100644
--- a/pkg/dart2wasm/bin/dart2wasm.dart
+++ b/pkg/dart2wasm/bin/dart2wasm.dart
@@ -57,6 +57,8 @@
       (o, value) => o.translatorOptions.sharedMemoryMaxPages = value),
   UriOption("dart-sdk", (o, value) => o.sdkPath = value,
       defaultsTo: "${_d.sdkPath}"),
+  UriOption("packages", (o, value) => o.packagesPath = value),
+  UriOption("libraries-spec", (o, value) => o.librariesSpecPath = value),
   UriOption("platform", (o, value) => o.platformPath = value),
   IntMultiOption(
       "watch", (o, values) => o.translatorOptions.watchPoints = values),
diff --git a/pkg/dart2wasm/lib/compile.dart b/pkg/dart2wasm/lib/compile.dart
index e398aee..ead7d20 100644
--- a/pkg/dart2wasm/lib/compile.dart
+++ b/pkg/dart2wasm/lib/compile.dart
@@ -45,6 +45,8 @@
   CompilerOptions compilerOptions = CompilerOptions()
     ..target = target
     ..sdkRoot = options.sdkPath
+    ..librariesSpecificationUri = options.librariesSpecPath
+    ..packagesFileUri = options.packagesPath
     ..environmentDefines = options.environment
     ..verbose = false
     ..onDiagnostic = diagnosticMessageHandler
diff --git a/pkg/dart2wasm/lib/compiler_options.dart b/pkg/dart2wasm/lib/compiler_options.dart
index 3f8c7d2..47ac8ba 100644
--- a/pkg/dart2wasm/lib/compiler_options.dart
+++ b/pkg/dart2wasm/lib/compiler_options.dart
@@ -11,6 +11,8 @@
 
   Uri sdkPath = Platform.script.resolve("../../../sdk");
   Uri? platformPath;
+  Uri? librariesSpecPath;
+  Uri? packagesPath;
   Uri mainUri;
   String outputFile;
   Map<String, String> environment = const {};
diff --git a/pkg/dds/CHANGELOG.md b/pkg/dds/CHANGELOG.md
index 8885b43..5fffdf9 100644
--- a/pkg/dds/CHANGELOG.md
+++ b/pkg/dds/CHANGELOG.md
@@ -1,3 +1,6 @@
+# 2.2.6
+- Fixed an issue where debug adapters would not automatically close after terminating/disconnecting from the debugee.
+
 # 2.2.5
 - Updated `devtools_shared` version to 2.14.1.
 
diff --git a/pkg/dds/pubspec.yaml b/pkg/dds/pubspec.yaml
index b8b0c65..0d8ae8b 100644
--- a/pkg/dds/pubspec.yaml
+++ b/pkg/dds/pubspec.yaml
@@ -1,5 +1,5 @@
 name: dds
-version: 2.2.5
+version: 2.2.6
 description: >-
   A library used to spawn the Dart Developer Service, used to communicate with
   a Dart VM Service instance.
diff --git a/runtime/include/dart_api.h b/runtime/include/dart_api.h
index bbc1291..339fd78 100644
--- a/runtime/include/dart_api.h
+++ b/runtime/include/dart_api.h
@@ -1277,6 +1277,40 @@
  */
 DART_EXPORT void Dart_NotifyLowMemory(void);
 
+typedef enum {
+  /**
+   * Balanced
+   */
+  Dart_PerformanceMode_Default,
+  /**
+   * Optimize for low latency, at the expense of throughput and memory overhead
+   * by performing work in smaller batches (requiring more overhead) or by
+   * delaying work (requiring more memory). An embedder should not remain in
+   * this mode indefinitely.
+   */
+  Dart_PerformanceMode_Latency,
+  /**
+   * Optimize for high throughput, at the expense of latency and memory overhead
+   * by performing work in larger batches with more intervening growth.
+   */
+  Dart_PerformanceMode_Throughput,
+  /**
+   * Optimize for low memory, at the expensive of throughput and latency by more
+   * frequently performing work.
+   */
+  Dart_PerformanceMode_Memory,
+} Dart_PerformanceMode;
+
+/**
+ * Set the desired performance trade-off.
+ *
+ * Requires a current isolate.
+ *
+ * Returns the previous performance mode.
+ */
+DART_EXPORT Dart_PerformanceMode
+Dart_SetPerformanceMode(Dart_PerformanceMode mode);
+
 /**
  * Starts the CPU sampling profiler.
  */
diff --git a/runtime/vm/compiler/backend/constant_propagator.cc b/runtime/vm/compiler/backend/constant_propagator.cc
index 69e28a7..8456490 100644
--- a/runtime/vm/compiler/backend/constant_propagator.cc
+++ b/runtime/vm/compiler/backend/constant_propagator.cc
@@ -292,8 +292,7 @@
 
 void ConstantPropagator::VisitStoreIndexed(StoreIndexedInstr* instr) {}
 
-void ConstantPropagator::VisitStoreInstanceField(
-    StoreInstanceFieldInstr* instr) {}
+void ConstantPropagator::VisitStoreField(StoreFieldInstr* instr) {}
 
 void ConstantPropagator::VisitMemoryCopy(MemoryCopyInstr* instr) {}
 
@@ -1710,8 +1709,8 @@
   ASSERT((defn == nullptr) || !defn->IsPushArgument());
   if ((defn != nullptr) && IsConstant(defn->constant_value()) &&
       (defn->constant_value().IsSmi() || defn->constant_value().IsOld()) &&
-      !defn->IsConstant() && !defn->IsStoreIndexed() &&
-      !defn->IsStoreInstanceField() && !defn->IsStoreStaticField()) {
+      !defn->IsConstant() && !defn->IsStoreIndexed() && !defn->IsStoreField() &&
+      !defn->IsStoreStaticField()) {
     if (FLAG_trace_constant_propagation && graph_->should_print()) {
       THR_Print("Constant v%" Pd " = %s\n", defn->ssa_temp_index(),
                 defn->constant_value().ToCString());
diff --git a/runtime/vm/compiler/backend/flow_graph.h b/runtime/vm/compiler/backend/flow_graph.h
index 3782e75..354e15f 100644
--- a/runtime/vm/compiler/backend/flow_graph.h
+++ b/runtime/vm/compiler/backend/flow_graph.h
@@ -339,12 +339,12 @@
   // to
   //
   //     v2 <- AssertAssignable:<id>(v1, ...)
-  //     StoreInstanceField(v0, v2)
+  //     StoreField(v0, v2)
   //
   // If the [AssertAssignable] causes a lazy-deopt on return, we'll have to
   // *re-try* the implicit setter call in unoptimized mode, i.e. lazy deopt to
   // before-call (otherwise - if we continued after-call - the
-  // StoreInstanceField would not be performed).
+  // StoreField would not be performed).
   void InsertSpeculativeAfter(Instruction* prev,
                               Instruction* instr,
                               Environment* env,
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.h b/runtime/vm/compiler/backend/flow_graph_compiler.h
index 8860c23..2024126 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.h
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.h
@@ -1008,7 +1008,7 @@
   friend class NullErrorSlowPath;        // For AddPcRelativeCallStubTarget().
   friend class CheckStackOverflowInstr;  // For AddPcRelativeCallStubTarget().
   friend class StoreIndexedInstr;        // For AddPcRelativeCallStubTarget().
-  friend class StoreInstanceFieldInstr;  // For AddPcRelativeCallStubTarget().
+  friend class StoreFieldInstr;          // For AddPcRelativeCallStubTarget().
   friend class CheckStackOverflowSlowPath;  // For pending_deoptimization_env_.
   friend class GraphInstrinsicCodeGenScope;   // For optimizing_.
 
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index c025d2d..bc4ebde 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -963,17 +963,17 @@
                              locs(), deopt_id(), env());
 }
 
-bool StoreInstanceFieldInstr::IsUnboxedDartFieldStore() const {
+bool StoreFieldInstr::IsUnboxedDartFieldStore() const {
   return slot().representation() == kTagged && slot().IsDartField() &&
          slot().IsUnboxed();
 }
 
-bool StoreInstanceFieldInstr::IsPotentialUnboxedDartFieldStore() const {
+bool StoreFieldInstr::IsPotentialUnboxedDartFieldStore() const {
   return slot().representation() == kTagged && slot().IsDartField() &&
          slot().IsPotentialUnboxed();
 }
 
-Representation StoreInstanceFieldInstr::RequiredInputRepresentation(
+Representation StoreFieldInstr::RequiredInputRepresentation(
     intptr_t index) const {
   ASSERT((index == 0) || (index == 1));
   if (index == 0) {
@@ -986,7 +986,7 @@
   return slot().representation();
 }
 
-Instruction* StoreInstanceFieldInstr::Canonicalize(FlowGraph* flow_graph) {
+Instruction* StoreFieldInstr::Canonicalize(FlowGraph* flow_graph) {
   // Dart objects are allocated null-initialized, which means we can eliminate
   // all initializing stores which store null value.
   // Context objects can be allocated uninitialized as a performance
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index bee10a9..2aa863f 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -447,7 +447,7 @@
   M(LoadIndexed, kNoGC)                                                        \
   M(LoadCodeUnits, kNoGC)                                                      \
   M(StoreIndexed, kNoGC)                                                       \
-  M(StoreInstanceField, _)                                                     \
+  M(StoreField, _)                                                             \
   M(LoadStaticField, _)                                                        \
   M(StoreStaticField, kNoGC)                                                   \
   M(BooleanNegate, kNoGC)                                                      \
@@ -1173,7 +1173,7 @@
   virtual bool AllowsCSE() const { return false; }
 
   // Returns true if this instruction has any side-effects besides storing.
-  // See StoreInstanceFieldInstr::HasUnknownSideEffects() for rationale.
+  // See StoreFieldInstr::HasUnknownSideEffects() for rationale.
   virtual bool HasUnknownSideEffects() const = 0;
 
   // Whether this instruction can call Dart code without going through
@@ -5452,7 +5452,7 @@
 
 enum StoreBarrierType { kNoStoreBarrier, kEmitStoreBarrier };
 
-// StoreInstanceField instruction represents a store of the given [value] into
+// StoreField instruction represents a store of the given [value] into
 // the specified [slot] on the [instance] object. [emit_store_barrier] allows to
 // specify whether the store should omit the write barrier. [kind] specifies
 // whether this store is an initializing store, i.e. the first store into a
@@ -5476,12 +5476,12 @@
 // start of internal typed data array backing) then this instruction cannot be
 // moved across instructions which can trigger GC, to ensure that
 //
-//    LoadUntagged + Arithmetic + StoreInstanceField
+//    LoadUntagged + Arithmetic + StoreField
 //
 // are performed as an effectively atomic set of instructions.
 //
 // See kernel_to_il.cc:BuildTypedDataViewFactoryConstructor.
-class StoreInstanceFieldInstr : public TemplateInstruction<2, NoThrow> {
+class StoreFieldInstr : public TemplateInstruction<2, NoThrow> {
  public:
   enum class Kind {
     // Store is known to be the first store into a slot of an object after
@@ -5493,14 +5493,14 @@
     kOther,
   };
 
-  StoreInstanceFieldInstr(const Slot& slot,
-                          Value* instance,
-                          Value* value,
-                          StoreBarrierType emit_store_barrier,
-                          const InstructionSource& source,
-                          Kind kind = Kind::kOther,
-                          compiler::Assembler::MemoryOrder memory_order =
-                              compiler::Assembler::kRelaxedNonAtomic)
+  StoreFieldInstr(const Slot& slot,
+                  Value* instance,
+                  Value* value,
+                  StoreBarrierType emit_store_barrier,
+                  const InstructionSource& source,
+                  Kind kind = Kind::kOther,
+                  compiler::Assembler::MemoryOrder memory_order =
+                      compiler::Assembler::kRelaxedNonAtomic)
       : TemplateInstruction(source),
         slot_(slot),
         emit_store_barrier_(emit_store_barrier),
@@ -5512,19 +5512,19 @@
   }
 
   // Convenience constructor that looks up an IL Slot for the given [field].
-  StoreInstanceFieldInstr(const Field& field,
-                          Value* instance,
-                          Value* value,
-                          StoreBarrierType emit_store_barrier,
-                          const InstructionSource& source,
-                          const ParsedFunction* parsed_function,
-                          Kind kind = Kind::kOther)
-      : StoreInstanceFieldInstr(Slot::Get(field, parsed_function),
-                                instance,
-                                value,
-                                emit_store_barrier,
-                                source,
-                                kind) {}
+  StoreFieldInstr(const Field& field,
+                  Value* instance,
+                  Value* value,
+                  StoreBarrierType emit_store_barrier,
+                  const InstructionSource& source,
+                  const ParsedFunction* parsed_function,
+                  Kind kind = Kind::kOther)
+      : StoreFieldInstr(Slot::Get(field, parsed_function),
+                        instance,
+                        value,
+                        emit_store_barrier,
+                        source,
+                        kind) {}
 
   virtual SpeculativeMode SpeculativeModeOfInput(intptr_t index) const {
     // In AOT unbox is done based on TFA, therefore it was proven to be correct
@@ -5535,7 +5535,7 @@
                : kGuardInputs;
   }
 
-  DECLARE_INSTRUCTION(StoreInstanceField)
+  DECLARE_INSTRUCTION(StoreField)
 
   enum { kInstancePos = 0, kValuePos = 1 };
 
@@ -5616,7 +5616,7 @@
   // Marks initializing stores. E.g. in the constructor.
   const bool is_initialization_;
 
-  DISALLOW_COPY_AND_ASSIGN(StoreInstanceFieldInstr);
+  DISALLOW_COPY_AND_ASSIGN(StoreFieldInstr);
 };
 
 class GuardFieldInstr : public TemplateInstruction<1, NoThrow, Pure> {
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index ef4150c..e4bc74b 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -2827,8 +2827,8 @@
   }
 }
 
-LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
-                                                              bool opt) const {
+LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
+                                                      bool opt) const {
   const intptr_t kNumInputs = 2;
   const intptr_t kNumTemps =
       ((IsUnboxedDartFieldStore() && opt)
@@ -2878,7 +2878,7 @@
 }
 
 static void EnsureMutableBox(FlowGraphCompiler* compiler,
-                             StoreInstanceFieldInstr* instruction,
+                             StoreFieldInstr* instruction,
                              Register box_reg,
                              const Class& cls,
                              Register instance_reg,
@@ -2897,7 +2897,7 @@
   __ Bind(&done);
 }
 
-void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
   ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
   ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@@ -2913,7 +2913,7 @@
     auto const rep = slot().representation();
     ASSERT(RepresentationUtils::IsUnboxedInteger(rep));
     const size_t value_size = RepresentationUtils::ValueSize(rep);
-    __ Comment("NativeUnboxedStoreInstanceFieldInstr");
+    __ Comment("NativeUnboxedStoreFieldInstr");
     if (value_size <= compiler::target::kWordSize) {
       const Register value = locs()->in(kValuePos).reg();
       __ StoreFieldToOffset(value, instance_reg, offset_in_bytes,
@@ -2938,17 +2938,17 @@
     if (FLAG_precompiled_mode) {
       switch (cid) {
         case kDoubleCid:
-          __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+          __ Comment("UnboxedDoubleStoreFieldInstr");
           __ StoreDToOffset(value, instance_reg,
                             offset_in_bytes - kHeapObjectTag);
           return;
         case kFloat32x4Cid:
-          __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat32x4StoreFieldInstr");
           __ StoreMultipleDToOffset(value, 2, instance_reg,
                                     offset_in_bytes - kHeapObjectTag);
           return;
         case kFloat64x2Cid:
-          __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat64x2StoreFieldInstr");
           __ StoreMultipleDToOffset(value, 2, instance_reg,
                                     offset_in_bytes - kHeapObjectTag);
           return;
@@ -2985,19 +2985,19 @@
     }
     switch (cid) {
       case kDoubleCid:
-        __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+        __ Comment("UnboxedDoubleStoreFieldInstr");
         __ StoreDToOffset(
             value, temp,
             compiler::target::Double::value_offset() - kHeapObjectTag);
         break;
       case kFloat32x4Cid:
-        __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat32x4StoreFieldInstr");
         __ StoreMultipleDToOffset(
             value, 2, temp,
             compiler::target::Float32x4::value_offset() - kHeapObjectTag);
         break;
       case kFloat64x2Cid:
-        __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat64x2StoreFieldInstr");
         __ StoreMultipleDToOffset(
             value, 2, temp,
             compiler::target::Float64x2::value_offset() - kHeapObjectTag);
diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index 8dae4ff..ca277fb 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -2458,7 +2458,7 @@
 }
 
 static void EnsureMutableBox(FlowGraphCompiler* compiler,
-                             StoreInstanceFieldInstr* instruction,
+                             StoreFieldInstr* instruction,
                              Register box_reg,
                              const Class& cls,
                              Register instance_reg,
@@ -2475,8 +2475,8 @@
   __ Bind(&done);
 }
 
-LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
-                                                              bool opt) const {
+LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
+                                                      bool opt) const {
   const intptr_t kNumInputs = 2;
   const intptr_t kNumTemps = (IsUnboxedDartFieldStore() && opt)
                                  ? (FLAG_precompiled_mode ? 0 : 2)
@@ -2521,7 +2521,7 @@
   return summary;
 }
 
-void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
   ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
   ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@@ -2536,7 +2536,7 @@
     ASSERT(memory_order_ != compiler::AssemblerBase::kRelease);
     ASSERT(RepresentationUtils::IsUnboxedInteger(slot().representation()));
     const Register value = locs()->in(kValuePos).reg();
-    __ Comment("NativeUnboxedStoreInstanceFieldInstr");
+    __ Comment("NativeUnboxedStoreFieldInstr");
     __ StoreFieldToOffset(
         value, instance_reg, offset_in_bytes,
         RepresentationUtils::OperandSize(slot().representation()));
@@ -2550,7 +2550,7 @@
     if (FLAG_precompiled_mode) {
       switch (cid) {
         case kDoubleCid:
-          __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+          __ Comment("UnboxedDoubleStoreFieldInstr");
           if (locs()->in(kValuePos).IsConstant()) {
             ASSERT(locs()
                        ->in(kValuePos)
@@ -2564,12 +2564,12 @@
           }
           return;
         case kFloat32x4Cid:
-          __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat32x4StoreFieldInstr");
           __ StoreQFieldToOffset(locs()->in(kValuePos).fpu_reg(), instance_reg,
                                  offset_in_bytes);
           return;
         case kFloat64x2Cid:
-          __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat64x2StoreFieldInstr");
           __ StoreQFieldToOffset(locs()->in(kValuePos).fpu_reg(), instance_reg,
                                  offset_in_bytes);
           return;
@@ -2608,15 +2608,15 @@
     const VRegister value = locs()->in(kValuePos).fpu_reg();
     switch (cid) {
       case kDoubleCid:
-        __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+        __ Comment("UnboxedDoubleStoreFieldInstr");
         __ StoreDFieldToOffset(value, temp, Double::value_offset());
         break;
       case kFloat32x4Cid:
-        __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat32x4StoreFieldInstr");
         __ StoreQFieldToOffset(value, temp, Float32x4::value_offset());
         break;
       case kFloat64x2Cid:
-        __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat64x2StoreFieldInstr");
         __ StoreQFieldToOffset(value, temp, Float64x2::value_offset());
         break;
       default:
diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc
index ee799ff..b84f33e 100644
--- a/runtime/vm/compiler/backend/il_ia32.cc
+++ b/runtime/vm/compiler/backend/il_ia32.cc
@@ -2120,8 +2120,8 @@
   }
 }
 
-LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
-                                                              bool opt) const {
+LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
+                                                      bool opt) const {
   const intptr_t kNumInputs = 2;
   const intptr_t kNumTemps =
       (IsUnboxedDartFieldStore() && opt)
@@ -2167,7 +2167,7 @@
 }
 
 static void EnsureMutableBox(FlowGraphCompiler* compiler,
-                             StoreInstanceFieldInstr* instruction,
+                             StoreFieldInstr* instruction,
                              Register box_reg,
                              const Class& cls,
                              Register instance_reg,
@@ -2187,7 +2187,7 @@
   __ Bind(&done);
 }
 
-void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
   ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
   ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@@ -2203,7 +2203,7 @@
     auto const rep = slot().representation();
     ASSERT(RepresentationUtils::IsUnboxedInteger(rep));
     const size_t value_size = RepresentationUtils::ValueSize(rep);
-    __ Comment("NativeUnboxedStoreInstanceFieldInstr");
+    __ Comment("NativeUnboxedStoreFieldInstr");
     if (value_size <= compiler::target::kWordSize) {
       const Register value = locs()->in(kValuePos).reg();
       __ StoreFieldToOffset(value, instance_reg, offset_in_bytes,
@@ -2253,16 +2253,16 @@
     }
     switch (cid) {
       case kDoubleCid:
-        __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+        __ Comment("UnboxedDoubleStoreFieldInstr");
         __ movsd(compiler::FieldAddress(temp, Double::value_offset()), value);
         break;
       case kFloat32x4Cid:
-        __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat32x4StoreFieldInstr");
         __ movups(compiler::FieldAddress(temp, Float32x4::value_offset()),
                   value);
         break;
       case kFloat64x2Cid:
-        __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat64x2StoreFieldInstr");
         __ movups(compiler::FieldAddress(temp, Float64x2::value_offset()),
                   value);
         break;
diff --git a/runtime/vm/compiler/backend/il_printer.cc b/runtime/vm/compiler/backend/il_printer.cc
index 076755b..05b7050 100644
--- a/runtime/vm/compiler/backend/il_printer.cc
+++ b/runtime/vm/compiler/backend/il_printer.cc
@@ -784,7 +784,7 @@
   value()->PrintTo(f);
 }
 
-void StoreInstanceFieldInstr::PrintOperandsTo(BaseTextBuffer* f) const {
+void StoreFieldInstr::PrintOperandsTo(BaseTextBuffer* f) const {
   instance()->PrintTo(f);
   f->Printf(" . %s = ", slot().Name());
   value()->PrintTo(f);
diff --git a/runtime/vm/compiler/backend/il_riscv.cc b/runtime/vm/compiler/backend/il_riscv.cc
index f53a822..041f180 100644
--- a/runtime/vm/compiler/backend/il_riscv.cc
+++ b/runtime/vm/compiler/backend/il_riscv.cc
@@ -2784,7 +2784,7 @@
 }
 
 static void EnsureMutableBox(FlowGraphCompiler* compiler,
-                             StoreInstanceFieldInstr* instruction,
+                             StoreFieldInstr* instruction,
                              Register box_reg,
                              const Class& cls,
                              Register instance_reg,
@@ -2801,8 +2801,8 @@
   __ Bind(&done);
 }
 
-LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
-                                                              bool opt) const {
+LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
+                                                      bool opt) const {
   const intptr_t kNumInputs = 2;
   const intptr_t kNumTemps = (IsUnboxedDartFieldStore() && opt)
                                  ? (FLAG_precompiled_mode ? 0 : 2)
@@ -2859,7 +2859,7 @@
   return summary;
 }
 
-void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
   ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
   ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@@ -2875,7 +2875,7 @@
     auto const rep = slot().representation();
     ASSERT(RepresentationUtils::IsUnboxedInteger(rep));
     const size_t value_size = RepresentationUtils::ValueSize(rep);
-    __ Comment("NativeUnboxedStoreInstanceFieldInstr");
+    __ Comment("NativeUnboxedStoreFieldInstr");
     if (value_size <= compiler::target::kWordSize) {
       const Register value = locs()->in(kValuePos).reg();
       __ StoreFieldToOffset(value, instance_reg, offset_in_bytes,
@@ -2902,7 +2902,7 @@
     if (FLAG_precompiled_mode) {
       switch (cid) {
         case kDoubleCid:
-          __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+          __ Comment("UnboxedDoubleStoreFieldInstr");
 #if XLEN >= 64
           if (locs()->in(kValuePos).IsConstant()) {
             ASSERT(locs()
@@ -2918,11 +2918,11 @@
                                  offset_in_bytes);
           return;
         case kFloat32x4Cid:
-          __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat32x4StoreFieldInstr");
           UNIMPLEMENTED();
           return;
         case kFloat64x2Cid:
-          __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat64x2StoreFieldInstr");
           UNIMPLEMENTED();
           return;
         default:
@@ -2960,15 +2960,15 @@
     const FRegister value = locs()->in(kValuePos).fpu_reg();
     switch (cid) {
       case kDoubleCid:
-        __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+        __ Comment("UnboxedDoubleStoreFieldInstr");
         __ StoreDFieldToOffset(value, temp, Double::value_offset());
         break;
       case kFloat32x4Cid:
-        __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat32x4StoreFieldInstr");
         UNIMPLEMENTED();
         break;
       case kFloat64x2Cid:
-        __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat64x2StoreFieldInstr");
         UNIMPLEMENTED();
         break;
       default:
diff --git a/runtime/vm/compiler/backend/il_test.cc b/runtime/vm/compiler/backend/il_test.cc
index 95f8ac0..d004834 100644
--- a/runtime/vm/compiler/backend/il_test.cc
+++ b/runtime/vm/compiler/backend/il_test.cc
@@ -104,7 +104,7 @@
        !block_it.Done(); block_it.Advance()) {
     for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
          it.Advance()) {
-      if (auto store = it.Current()->AsStoreInstanceField()) {
+      if (auto store = it.Current()->AsStoreField()) {
         EXPECT_LT(next_expected_store, expected_stores.size());
         EXPECT_STREQ(expected_stores[next_expected_store],
                      store->slot().Name());
diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc
index eb53681..b7bca3f 100644
--- a/runtime/vm/compiler/backend/il_x64.cc
+++ b/runtime/vm/compiler/backend/il_x64.cc
@@ -2461,8 +2461,8 @@
   __ Bind(&ok);
 }
 
-LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
-                                                              bool opt) const {
+LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
+                                                      bool opt) const {
   const intptr_t kNumInputs = 2;
   const intptr_t kNumTemps = (IsUnboxedDartFieldStore() && opt)
                                  ? (FLAG_precompiled_mode ? 0 : 2)
@@ -2505,7 +2505,7 @@
 }
 
 static void EnsureMutableBox(FlowGraphCompiler* compiler,
-                             StoreInstanceFieldInstr* instruction,
+                             StoreFieldInstr* instruction,
                              Register box_reg,
                              const Class& cls,
                              Register instance_reg,
@@ -2524,7 +2524,7 @@
   __ Bind(&done);
 }
 
-void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
   ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
   ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@@ -2539,7 +2539,7 @@
     ASSERT(memory_order_ != compiler::AssemblerBase::kRelease);
     ASSERT(RepresentationUtils::IsUnboxedInteger(slot().representation()));
     const Register value = locs()->in(kValuePos).reg();
-    __ Comment("NativeUnboxedStoreInstanceFieldInstr");
+    __ Comment("NativeUnboxedStoreFieldInstr");
     __ StoreFieldToOffset(
         value, instance_reg, offset_in_bytes,
         RepresentationUtils::OperandSize(slot().representation()));
@@ -2555,17 +2555,17 @@
     if (FLAG_precompiled_mode) {
       switch (cid) {
         case kDoubleCid:
-          __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+          __ Comment("UnboxedDoubleStoreFieldInstr");
           __ movsd(compiler::FieldAddress(instance_reg, offset_in_bytes),
                    value);
           return;
         case kFloat32x4Cid:
-          __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat32x4StoreFieldInstr");
           __ movups(compiler::FieldAddress(instance_reg, offset_in_bytes),
                     value);
           return;
         case kFloat64x2Cid:
-          __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+          __ Comment("UnboxedFloat64x2StoreFieldInstr");
           __ movups(compiler::FieldAddress(instance_reg, offset_in_bytes),
                     value);
           return;
@@ -2604,16 +2604,16 @@
     }
     switch (cid) {
       case kDoubleCid:
-        __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
+        __ Comment("UnboxedDoubleStoreFieldInstr");
         __ movsd(compiler::FieldAddress(temp, Double::value_offset()), value);
         break;
       case kFloat32x4Cid:
-        __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat32x4StoreFieldInstr");
         __ movups(compiler::FieldAddress(temp, Float32x4::value_offset()),
                   value);
         break;
       case kFloat64x2Cid:
-        __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
+        __ Comment("UnboxedFloat64x2StoreFieldInstr");
         __ movups(compiler::FieldAddress(temp, Float64x2::value_offset()),
                   value);
         break;
diff --git a/runtime/vm/compiler/backend/inliner.cc b/runtime/vm/compiler/backend/inliner.cc
index 4de28c1..dd8279d 100644
--- a/runtime/vm/compiler/backend/inliner.cc
+++ b/runtime/vm/compiler/backend/inliner.cc
@@ -2938,8 +2938,8 @@
   (*entry)->InheritDeoptTarget(Z, call);
 
   // This is an internal method, no need to check argument types.
-  StoreInstanceFieldInstr* store = new (Z)
-      StoreInstanceFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
+  StoreFieldInstr* store =
+      new (Z) StoreFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
                               store_barrier_type, call->source());
   flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect);
   *last = store;
diff --git a/runtime/vm/compiler/backend/linearscan.cc b/runtime/vm/compiler/backend/linearscan.cc
index 3597362..2187f8e 100644
--- a/runtime/vm/compiler/backend/linearscan.cc
+++ b/runtime/vm/compiler/backend/linearscan.cc
@@ -3209,7 +3209,7 @@
       // sense to make function frameless if it contains more than 1
       // write barrier invocation.
 #if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_ARM)
-      if (auto store_field = instruction->AsStoreInstanceField()) {
+      if (auto store_field = instruction->AsStoreField()) {
         if (store_field->ShouldEmitStoreBarrier()) {
           if (has_write_barrier_call) {
             // We already have at least one write barrier call.
diff --git a/runtime/vm/compiler/backend/redundancy_elimination.cc b/runtime/vm/compiler/backend/redundancy_elimination.cc
index eb1b922..f9355fc 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination.cc
@@ -229,10 +229,10 @@
         break;
       }
 
-      case Instruction::kStoreInstanceField: {
-        StoreInstanceFieldInstr* store = instr->AsStoreInstanceField();
-        set_representation(store->RequiredInputRepresentation(
-            StoreInstanceFieldInstr::kValuePos));
+      case Instruction::kStoreField: {
+        StoreFieldInstr* store = instr->AsStoreField();
+        set_representation(
+            store->RequiredInputRepresentation(StoreFieldInstr::kValuePos));
         instance_ = store->instance()->definition()->OriginalDefinition();
         set_kind(kInstanceField);
         instance_field_ = &store->slot();
@@ -1120,13 +1120,12 @@
       } else if (UseIsARedefinition(use) &&
                  AnyUseCreatesAlias(instr->Cast<Definition>())) {
         return true;
-      } else if ((instr->IsStoreInstanceField() &&
-                  (use->use_index() !=
-                   StoreInstanceFieldInstr::kInstancePos))) {
-        ASSERT(use->use_index() == StoreInstanceFieldInstr::kValuePos);
+      } else if ((instr->IsStoreField() &&
+                  (use->use_index() != StoreFieldInstr::kInstancePos))) {
+        ASSERT(use->use_index() == StoreFieldInstr::kValuePos);
         // If we store this value into an object that is not aliased itself
         // and we never load again then the store does not create an alias.
-        StoreInstanceFieldInstr* store = instr->AsStoreInstanceField();
+        StoreFieldInstr* store = instr->AsStoreField();
         Definition* instance =
             store->instance()->definition()->OriginalDefinition();
         if (Place::IsAllocation(instance) &&
@@ -1148,7 +1147,7 @@
         return true;
       } else if (auto* const alloc = instr->AsAllocation()) {
         // Treat inputs to an allocation instruction exactly as if they were
-        // manually stored using a StoreInstanceField instruction.
+        // manually stored using a StoreField instruction.
         if (alloc->Identity().IsAliased()) {
           return true;
         }
@@ -1194,10 +1193,9 @@
         MarkStoredValuesEscaping(instr->AsDefinition());
         continue;
       }
-      if ((use->use_index() == StoreInstanceFieldInstr::kInstancePos) &&
-          instr->IsStoreInstanceField()) {
-        MarkDefinitionAsAliased(
-            instr->AsStoreInstanceField()->value()->definition());
+      if ((use->use_index() == StoreFieldInstr::kInstancePos) &&
+          instr->IsStoreField()) {
+        MarkDefinitionAsAliased(instr->AsStoreField()->value()->definition());
       }
     }
   }
@@ -1274,7 +1272,7 @@
     return instr->AsStoreIndexed()->value()->definition();
   }
 
-  StoreInstanceFieldInstr* store_instance_field = instr->AsStoreInstanceField();
+  StoreFieldInstr* store_instance_field = instr->AsStoreField();
   if (store_instance_field != NULL) {
     return store_instance_field->value()->definition();
   }
@@ -1538,7 +1536,7 @@
 // Returns true if instruction may have a "visible" effect,
 static bool MayHaveVisibleEffect(Instruction* instr) {
   switch (instr->tag()) {
-    case Instruction::kStoreInstanceField:
+    case Instruction::kStoreField:
     case Instruction::kStoreStaticField:
     case Instruction::kStoreIndexed:
     case Instruction::kStoreIndexedUnsafe:
@@ -1857,7 +1855,7 @@
   // Such a store doesn't initialize corresponding field.
   bool IsSentinelStore(Instruction* instr) {
     Value* value = nullptr;
-    if (auto* store_field = instr->AsStoreInstanceField()) {
+    if (auto* store_field = instr->AsStoreField()) {
       value = store_field->value();
     } else if (auto* store_static = instr->AsStoreStaticField()) {
       value = store_static->value();
@@ -2200,8 +2198,7 @@
             if (auto* const load = use->instruction()->AsLoadField()) {
               place_id = GetPlaceId(load);
               slot = &load->slot();
-            } else if (auto* const store =
-                           use->instruction()->AsStoreInstanceField()) {
+            } else if (auto* const store = use->instruction()->AsStoreField()) {
               ASSERT(!alloc->IsArrayAllocation());
               place_id = GetPlaceId(store);
               slot = &store->slot();
@@ -3042,8 +3039,8 @@
 
   bool CanEliminateStore(Instruction* instr) {
     switch (instr->tag()) {
-      case Instruction::kStoreInstanceField: {
-        StoreInstanceFieldInstr* store_instance = instr->AsStoreInstanceField();
+      case Instruction::kStoreField: {
+        StoreFieldInstr* store_instance = instr->AsStoreField();
         // Can't eliminate stores that initialize fields.
         return !store_instance->is_initialization();
       }
@@ -3261,7 +3258,7 @@
             alloc->Identity().IsAllocationSinkingCandidate());
   }
 
-  if (auto* store = use->instruction()->AsStoreInstanceField()) {
+  if (auto* store = use->instruction()->AsStoreField()) {
     if (use == store->value()) {
       Definition* instance = store->instance()->definition();
       return IsSupportedAllocation(instance) &&
@@ -3307,7 +3304,7 @@
 }
 
 // Right now we are attempting to sink allocation only into
-// deoptimization exit. So candidate should only be used in StoreInstanceField
+// deoptimization exit. So candidate should only be used in StoreField
 // instructions that write into fields of the allocated object.
 static bool IsAllocationSinkingCandidate(Definition* alloc,
                                          SafeUseCheck check_type) {
@@ -3331,7 +3328,7 @@
   if (auto* const alloc = use->instruction()->AsAllocation()) {
     return alloc;
   }
-  if (auto* const store = use->instruction()->AsStoreInstanceField()) {
+  if (auto* const store = use->instruction()->AsStoreField()) {
     return store->instance()->definition();
   }
   if (auto* const store = use->instruction()->AsStoreIndexed()) {
@@ -3365,7 +3362,7 @@
   // As an allocation sinking candidate, remove stores to this candidate.
   // Do this in a two-step process, as this allocation may be used multiple
   // times in a single instruction (e.g., as the instance and the value in
-  // a StoreInstanceField). This means multiple entries may be removed from the
+  // a StoreField). This means multiple entries may be removed from the
   // use list when removing instructions, not just the current one, so
   // Value::Iterator cannot be safely used.
   GrowableArray<Instruction*> stores_to_remove;
@@ -3564,7 +3561,7 @@
         } else {
           ASSERT(use->instruction()->IsMaterializeObject() ||
                  use->instruction()->IsPhi() ||
-                 use->instruction()->IsStoreInstanceField() ||
+                 use->instruction()->IsStoreField() ||
                  use->instruction()->IsStoreIndexed());
         }
       }
@@ -3884,7 +3881,7 @@
     if (StoreDestination(use) == alloc) {
       // Allocation instructions cannot be used in as inputs to themselves.
       ASSERT(!use->instruction()->AsAllocation());
-      if (auto store = use->instruction()->AsStoreInstanceField()) {
+      if (auto store = use->instruction()->AsStoreField()) {
         AddSlot(slots, store->slot());
       } else if (auto store = use->instruction()->AsStoreIndexed()) {
         const intptr_t index = store->index()->BoundSmiConstant();
diff --git a/runtime/vm/compiler/backend/redundancy_elimination_test.cc b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
index 73b64c5..63276b1 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination_test.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
@@ -446,8 +446,8 @@
         new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
     if (!make_host_escape) {
       builder.AddInstruction(
-          new StoreInstanceFieldInstr(slot, new Value(v5), new Value(v0),
-                                      kEmitStoreBarrier, InstructionSource()));
+          new StoreFieldInstr(slot, new Value(v5), new Value(v0),
+                              kEmitStoreBarrier, InstructionSource()));
     }
     v1 = builder.AddDefinition(
         new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
@@ -460,8 +460,8 @@
       args->Add(new Value(v6));
     } else if (make_host_escape) {
       builder.AddInstruction(
-          new StoreInstanceFieldInstr(slot, new Value(v2), new Value(v0),
-                                      kEmitStoreBarrier, InstructionSource()));
+          new StoreFieldInstr(slot, new Value(v2), new Value(v0),
+                              kEmitStoreBarrier, InstructionSource()));
       args->Add(new Value(v5));
     }
     call = builder.AddInstruction(new StaticCallInstr(
@@ -857,7 +857,7 @@
          it.Advance()) {
       if (it.Current()->IsLoadField()) {
         (*loads)++;
-      } else if (it.Current()->IsStoreInstanceField()) {
+      } else if (it.Current()->IsStoreField()) {
         (*stores)++;
       }
     }
@@ -1275,8 +1275,8 @@
   StaticCallInstr* call1;
   StaticCallInstr* call2;
   AllocateObjectInstr* allocate;
-  StoreInstanceFieldInstr* store1;
-  StoreInstanceFieldInstr* store2;
+  StoreFieldInstr* store1;
+  StoreFieldInstr* store2;
 
   ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
   RELEASE_ASSERT(cursor.TryMatch({
@@ -1286,8 +1286,8 @@
       {kMatchAndMoveStaticCall, &call2},
       kMoveGlob,
       {kMatchAndMoveAllocateObject, &allocate},
-      {kMatchAndMoveStoreInstanceField, &store1},
-      {kMatchAndMoveStoreInstanceField, &store2},
+      {kMatchAndMoveStoreField, &store1},
+      {kMatchAndMoveStoreField, &store2},
   }));
 
   EXPECT(strcmp(call1->function().UserVisibleNameCString(), "foo") == 0);
diff --git a/runtime/vm/compiler/call_specializer.cc b/runtime/vm/compiler/call_specializer.cc
index 7831f36..9cf9d45 100644
--- a/runtime/vm/compiler/call_specializer.cc
+++ b/runtime/vm/compiler/call_specializer.cc
@@ -912,10 +912,10 @@
 
   // Field guard was detached.
   ASSERT(instr->FirstArgIndex() == 0);
-  StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
-      field, new (Z) Value(instr->ArgumentAt(0)),
-      new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier, instr->source(),
-      &flow_graph()->parsed_function());
+  StoreFieldInstr* store = new (Z)
+      StoreFieldInstr(field, new (Z) Value(instr->ArgumentAt(0)),
+                      new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier,
+                      instr->source(), &flow_graph()->parsed_function());
 
   // Discard the environment from the original instruction because the store
   // can't deoptimize.
diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
index 31cf456..7cd28de 100644
--- a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
@@ -517,34 +517,31 @@
 Fragment BaseFlowGraphBuilder::StoreNativeField(
     TokenPosition position,
     const Slot& slot,
-    StoreInstanceFieldInstr::Kind
-        kind /* = StoreInstanceFieldInstr::Kind::kOther */,
+    StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
     StoreBarrierType emit_store_barrier /* = kEmitStoreBarrier */,
     compiler::Assembler::MemoryOrder memory_order /* = kRelaxed */) {
   Value* value = Pop();
   if (value->BindsToConstant()) {
     emit_store_barrier = kNoStoreBarrier;
   }
-  StoreInstanceFieldInstr* store =
-      new (Z) StoreInstanceFieldInstr(slot, Pop(), value, emit_store_barrier,
-                                      InstructionSource(position), kind);
+  StoreFieldInstr* store =
+      new (Z) StoreFieldInstr(slot, Pop(), value, emit_store_barrier,
+                              InstructionSource(position), kind);
   return Fragment(store);
 }
 
-Fragment BaseFlowGraphBuilder::StoreInstanceField(
+Fragment BaseFlowGraphBuilder::StoreField(
     const Field& field,
-    StoreInstanceFieldInstr::Kind
-        kind /* = StoreInstanceFieldInstr::Kind::kOther */,
+    StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
     StoreBarrierType emit_store_barrier) {
   return StoreNativeField(TokenPosition::kNoSource,
                           Slot::Get(MayCloneField(Z, field), parsed_function_),
                           kind, emit_store_barrier);
 }
 
-Fragment BaseFlowGraphBuilder::StoreInstanceFieldGuarded(
+Fragment BaseFlowGraphBuilder::StoreFieldGuarded(
     const Field& field,
-    StoreInstanceFieldInstr::Kind
-        kind /* = StoreInstanceFieldInstr::Kind::kOther */) {
+    StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */) {
   Fragment instructions;
   const Field& field_clone = MayCloneField(Z, field);
   if (IG->use_field_guards()) {
diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.h b/runtime/vm/compiler/frontend/base_flow_graph_builder.h
index 4c1d97e..a91212e 100644
--- a/runtime/vm/compiler/frontend/base_flow_graph_builder.h
+++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.h
@@ -201,29 +201,26 @@
   Fragment StoreNativeField(
       TokenPosition position,
       const Slot& slot,
-      StoreInstanceFieldInstr::Kind kind =
-          StoreInstanceFieldInstr::Kind::kOther,
+      StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther,
       StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
       compiler::Assembler::MemoryOrder memory_order =
           compiler::Assembler::kRelaxedNonAtomic);
   Fragment StoreNativeField(
       const Slot& slot,
-      StoreInstanceFieldInstr::Kind kind =
-          StoreInstanceFieldInstr::Kind::kOther,
+      StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther,
       StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
       compiler::Assembler::MemoryOrder memory_order =
           compiler::Assembler::kRelaxedNonAtomic) {
     return StoreNativeField(TokenPosition::kNoSource, slot, kind,
                             emit_store_barrier, memory_order);
   }
-  Fragment StoreInstanceField(
+  Fragment StoreField(
       const Field& field,
-      StoreInstanceFieldInstr::Kind kind =
-          StoreInstanceFieldInstr::Kind::kOther,
+      StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther,
       StoreBarrierType emit_store_barrier = kEmitStoreBarrier);
-  Fragment StoreInstanceFieldGuarded(const Field& field,
-                                     StoreInstanceFieldInstr::Kind kind =
-                                         StoreInstanceFieldInstr::Kind::kOther);
+  Fragment StoreFieldGuarded(
+      const Field& field,
+      StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther);
   Fragment LoadStaticField(const Field& field, bool calls_initializer);
   Fragment RedefinitionWithType(const AbstractType& type);
   Fragment ReachabilityFence();
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index c6f1d55..921a465 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -173,8 +173,8 @@
   if (only_for_side_effects) {
     instructions += Drop();
   } else {
-    instructions += flow_graph_builder_->StoreInstanceFieldGuarded(
-        field, StoreInstanceFieldInstr::Kind::kInitializing);
+    instructions += flow_graph_builder_->StoreFieldGuarded(
+        field, StoreFieldInstr::Kind::kInitializing);
   }
   return instructions;
 }
@@ -196,8 +196,8 @@
   Fragment instructions;
   instructions += LoadLocal(parsed_function()->receiver_var());
   instructions += flow_graph_builder_->Constant(Object::sentinel());
-  instructions += flow_graph_builder_->StoreInstanceField(
-      field, StoreInstanceFieldInstr::Kind::kInitializing);
+  instructions += flow_graph_builder_->StoreField(
+      field, StoreFieldInstr::Kind::kInitializing);
   return instructions;
 }
 
@@ -260,7 +260,7 @@
   if (!is_redirecting_constructor) {
     // Sort list of fields (represented as their kernel offsets) which will
     // be initialized by the constructor initializer list. We will not emit
-    // StoreInstanceField instructions for those initializers though we will
+    // StoreField instructions for those initializers though we will
     // still evaluate initialization expression for its side effects.
     GrowableArray<intptr_t> constructor_initialized_field_offsets(
         initializer_fields.length());
@@ -548,7 +548,7 @@
         body += LoadLocal(&raw_parameter);
         body += flow_graph_builder_->StoreNativeField(
             Slot::GetContextVariableSlotFor(thread(), *variable),
-            StoreInstanceFieldInstr::Kind::kInitializing);
+            StoreFieldInstr::Kind::kInitializing);
       }
     }
     body += Drop();  // The context.
@@ -4196,7 +4196,7 @@
   instructions += LoadLocal(type_args_vec);
   instructions += flow_graph_builder_->StoreNativeField(
       Slot::Closure_delayed_type_arguments(),
-      StoreInstanceFieldInstr::Kind::kInitializing);
+      StoreFieldInstr::Kind::kInitializing);
   instructions += DropTemporary(&type_args_vec);
 
   // Copy over the instantiator type arguments.
@@ -4206,7 +4206,7 @@
       Slot::Closure_instantiator_type_arguments());
   instructions += flow_graph_builder_->StoreNativeField(
       Slot::Closure_instantiator_type_arguments(),
-      StoreInstanceFieldInstr::Kind::kInitializing);
+      StoreFieldInstr::Kind::kInitializing);
 
   // Copy over the function type arguments.
   instructions += LoadLocal(new_closure);
@@ -4215,7 +4215,7 @@
       Slot::Closure_function_type_arguments());
   instructions += flow_graph_builder_->StoreNativeField(
       Slot::Closure_function_type_arguments(),
-      StoreInstanceFieldInstr::Kind::kInitializing);
+      StoreFieldInstr::Kind::kInitializing);
 
   instructions += DropTempsPreserveTop(1);  // Drop old closure.
 
@@ -5258,7 +5258,7 @@
     } else {
       field = IG->object_store()->sync_star_iterator_current();
     }
-    instructions += B->StoreInstanceFieldGuarded(field);
+    instructions += B->StoreFieldGuarded(field);
     instructions += B->Constant(Bool::True());
     instructions +=
         B->Suspend(pos, SuspendInstr::StubId::kSuspendSyncStarAtYield);
@@ -5506,7 +5506,7 @@
     instructions += LoadInstantiatorTypeArguments();
     instructions += flow_graph_builder_->StoreNativeField(
         Slot::Closure_instantiator_type_arguments(),
-        StoreInstanceFieldInstr::Kind::kInitializing);
+        StoreFieldInstr::Kind::kInitializing);
   }
 
   // TODO(30455): We only need to save these if the closure uses any captured
@@ -5515,7 +5515,7 @@
   instructions += LoadFunctionTypeArguments();
   instructions += flow_graph_builder_->StoreNativeField(
       Slot::Closure_function_type_arguments(),
-      StoreInstanceFieldInstr::Kind::kInitializing);
+      StoreFieldInstr::Kind::kInitializing);
 
   if (function.IsGeneric()) {
     // Only generic functions need to have properly initialized
@@ -5524,7 +5524,7 @@
     instructions += Constant(Object::empty_type_arguments());
     instructions += flow_graph_builder_->StoreNativeField(
         Slot::Closure_delayed_type_arguments(),
-        StoreInstanceFieldInstr::Kind::kInitializing);
+        StoreFieldInstr::Kind::kInitializing);
   }
 
   return instructions;
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.cc b/runtime/vm/compiler/frontend/kernel_to_il.cc
index 69abb4e..1e3707e 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.cc
+++ b/runtime/vm/compiler/frontend/kernel_to_il.cc
@@ -138,8 +138,8 @@
   LocalVariable* context = MakeTemporary();
   instructions += LoadLocal(context);
   instructions += LoadLocal(parsed_function_->current_context_var());
-  instructions += StoreNativeField(
-      Slot::Context_parent(), StoreInstanceFieldInstr::Kind::kInitializing);
+  instructions += StoreNativeField(Slot::Context_parent(),
+                                   StoreFieldInstr::Kind::kInitializing);
   instructions += StoreLocal(TokenPosition::kNoSource,
                              parsed_function_->current_context_var());
   ++context_depth_;
@@ -553,7 +553,7 @@
   if (is_static) {
     instructions += StoreStaticField(position, field);
   } else {
-    instructions += StoreInstanceFieldGuarded(field);
+    instructions += StoreFieldGuarded(field);
   }
 
   return instructions;
@@ -1212,12 +1212,12 @@
       body += LoadLocal(object);
       body += LoadLocal(parsed_function_->RawParameterVariable(1));
       body += StoreNativeField(Slot::GrowableObjectArray_data(),
-                               StoreInstanceFieldInstr::Kind::kInitializing,
+                               StoreFieldInstr::Kind::kInitializing,
                                kNoStoreBarrier);
       body += LoadLocal(object);
       body += IntConstant(0);
       body += StoreNativeField(Slot::GrowableObjectArray_length(),
-                               StoreInstanceFieldInstr::Kind::kInitializing,
+                               StoreFieldInstr::Kind::kInitializing,
                                kNoStoreBarrier);
       break;
     }
@@ -1312,10 +1312,9 @@
       body += LoadLocal(parsed_function_->RawParameterVariable(1));
       // Uses a store-release barrier so that other isolates will see the
       // contents of the index after seeing the index itself.
-      body +=
-          StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
-                           StoreInstanceFieldInstr::Kind::kOther,
-                           kEmitStoreBarrier, compiler::Assembler::kRelease);
+      body += StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
+                               StoreFieldInstr::Kind::kOther, kEmitStoreBarrier,
+                               compiler::Assembler::kRelease);
       body += NullConstant();
       break;
     case MethodRecognizer::kUtf8DecoderScan:
@@ -1580,7 +1579,7 @@
       body += LoadLocal(typed_data_object);
       body += LoadLocal(arg_length);
       body += StoreNativeField(Slot::TypedDataBase_length(),
-                               StoreInstanceFieldInstr::Kind::kInitializing,
+                               StoreFieldInstr::Kind::kInitializing,
                                kNoStoreBarrier);
 
       // Initialize the result's data pointer field.
@@ -1589,7 +1588,7 @@
       body += LoadUntagged(compiler::target::PointerBase::data_offset());
       body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
       body += StoreNativeField(Slot::PointerBase_data(),
-                               StoreInstanceFieldInstr::Kind::kInitializing,
+                               StoreFieldInstr::Kind::kInitializing,
                                kNoStoreBarrier);
     } break;
     case MethodRecognizer::kGetNativeField: {
@@ -1738,8 +1737,8 @@
     ASSERT_EQUAL(function.NumParameters(), 2);                                 \
     body += LoadLocal(parsed_function_->RawParameterVariable(0));              \
     body += LoadLocal(parsed_function_->RawParameterVariable(1));              \
-    body += StoreNativeField(                                                  \
-        Slot::slot(), StoreInstanceFieldInstr::Kind::kOther, kNoStoreBarrier); \
+    body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther,      \
+                             kNoStoreBarrier);                                 \
     body += NullConstant();                                                    \
     break;
       STORE_NATIVE_FIELD_NO_BARRIER(IL_BODY)
@@ -1781,19 +1780,19 @@
   body += LoadLocal(view_object);
   body += LoadLocal(typed_data);
   body += StoreNativeField(token_pos, Slot::TypedDataView_typed_data(),
-                           StoreInstanceFieldInstr::Kind::kInitializing);
+                           StoreFieldInstr::Kind::kInitializing);
 
   body += LoadLocal(view_object);
   body += LoadLocal(offset_in_bytes);
-  body += StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
-                           StoreInstanceFieldInstr::Kind::kInitializing,
-                           kNoStoreBarrier);
+  body +=
+      StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
+                       StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
 
   body += LoadLocal(view_object);
   body += LoadLocal(length);
-  body += StoreNativeField(token_pos, Slot::TypedDataBase_length(),
-                           StoreInstanceFieldInstr::Kind::kInitializing,
-                           kNoStoreBarrier);
+  body +=
+      StoreNativeField(token_pos, Slot::TypedDataBase_length(),
+                       StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
 
   // Update the inner pointer.
   //
@@ -1870,7 +1869,7 @@
   fragment += StoreNativeField(
       Slot::GetContextVariableSlotFor(
           thread_, *implicit_closure_scope->context_variables()[0]),
-      StoreInstanceFieldInstr::Kind::kInitializing);
+      StoreFieldInstr::Kind::kInitializing);
 
   fragment += AllocateClosure();
   LocalVariable* closure = MakeTemporary();
@@ -1880,7 +1879,7 @@
     fragment += LoadLocal(closure);
     fragment += LoadInstantiatorTypeArguments();
     fragment += StoreNativeField(Slot::Closure_instantiator_type_arguments(),
-                                 StoreInstanceFieldInstr::Kind::kInitializing);
+                                 StoreFieldInstr::Kind::kInitializing);
   }
 
   if (target.IsGeneric()) {
@@ -1889,7 +1888,7 @@
     fragment += LoadLocal(closure);
     fragment += Constant(Object::empty_type_arguments());
     fragment += StoreNativeField(Slot::Closure_delayed_type_arguments(),
-                                 StoreInstanceFieldInstr::Kind::kInitializing);
+                                 StoreFieldInstr::Kind::kInitializing);
   }
 
   return fragment;
@@ -3801,8 +3800,7 @@
           setter_value);
     } else {
       if (is_method) {
-        body += StoreInstanceFieldGuarded(
-            field, StoreInstanceFieldInstr::Kind::kOther);
+        body += StoreFieldGuarded(field, StoreFieldInstr::Kind::kOther);
       } else {
         body += StoreStaticField(TokenPosition::kNoSource, field);
       }
@@ -4126,15 +4124,15 @@
 
   body += LoadLocal(error_instance);
   body += LoadLocal(CurrentException());
-  body += StoreNativeField(Slot::UnhandledException_exception(),
-                           StoreInstanceFieldInstr::Kind::kInitializing,
-                           kNoStoreBarrier);
+  body +=
+      StoreNativeField(Slot::UnhandledException_exception(),
+                       StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
 
   body += LoadLocal(error_instance);
   body += LoadLocal(CurrentStackTrace());
-  body += StoreNativeField(Slot::UnhandledException_stacktrace(),
-                           StoreInstanceFieldInstr::Kind::kInitializing,
-                           kNoStoreBarrier);
+  body +=
+      StoreNativeField(Slot::UnhandledException_stacktrace(),
+                       StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
 
   return body;
 }
@@ -4289,8 +4287,8 @@
   body += AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
   body += LoadLocal(MakeTemporary("compound"));  // Duplicate Struct or Union.
   body += LoadLocal(typed_data);
-  body += StoreInstanceField(compound_typed_data_base,
-                             StoreInstanceFieldInstr::Kind::kInitializing);
+  body += StoreField(compound_typed_data_base,
+                     StoreFieldInstr::Kind::kInitializing);
   body += DropTempsPreserveTop(1);  // Drop TypedData.
   return body;
 }
diff --git a/runtime/vm/compiler/graph_intrinsifier.cc b/runtime/vm/compiler/graph_intrinsifier.cc
index 9765e4e..4f4963b 100644
--- a/runtime/vm/compiler/graph_intrinsifier.cc
+++ b/runtime/vm/compiler/graph_intrinsifier.cc
@@ -872,7 +872,7 @@
   builder.AddInstruction(new CheckClassInstr(new Value(data), DeoptId::kNone,
                                              *value_check, builder.Source()));
 
-  builder.AddInstruction(new StoreInstanceFieldInstr(
+  builder.AddInstruction(new StoreFieldInstr(
       Slot::GrowableObjectArray_data(), new Value(growable_array),
       new Value(data), kEmitStoreBarrier, builder.Source()));
   // Return null.
@@ -894,7 +894,7 @@
 
   builder.AddInstruction(
       new CheckSmiInstr(new Value(length), DeoptId::kNone, builder.Source()));
-  builder.AddInstruction(new StoreInstanceFieldInstr(
+  builder.AddInstruction(new StoreFieldInstr(
       Slot::GrowableObjectArray_length(), new Value(growable_array),
       new Value(length), kNoStoreBarrier, builder.Source()));
   Definition* null_def = builder.AddNullDefinition();
@@ -1105,7 +1105,7 @@
                               /*is_checked=*/true);
   }
 
-  builder.AddInstruction(new (zone) StoreInstanceFieldInstr(
+  builder.AddInstruction(new (zone) StoreFieldInstr(
       slot, new (zone) Value(receiver), new (zone) Value(value), barrier_mode,
       builder.Source()));
 
diff --git a/runtime/vm/compiler/jit/jit_call_specializer.cc b/runtime/vm/compiler/jit/jit_call_specializer.cc
index 3afc611..da4e0fc 100644
--- a/runtime/vm/compiler/jit/jit_call_specializer.cc
+++ b/runtime/vm/compiler/jit/jit_call_specializer.cc
@@ -190,10 +190,9 @@
   } else {
     initial_value = new (Z) Value(flow_graph()->constant_null());
   }
-  StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
+  StoreFieldInstr* store = new (Z) StoreFieldInstr(
       Slot::Context_parent(), new (Z) Value(replacement), initial_value,
-      kNoStoreBarrier, alloc->source(),
-      StoreInstanceFieldInstr::Kind::kInitializing);
+      kNoStoreBarrier, alloc->source(), StoreFieldInstr::Kind::kInitializing);
   flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
   cursor = replacement;
 
@@ -208,9 +207,9 @@
       initial_value = new (Z) Value(flow_graph()->constant_null());
     }
 
-    store = new (Z) StoreInstanceFieldInstr(
+    store = new (Z) StoreFieldInstr(
         *slot, new (Z) Value(replacement), initial_value, kNoStoreBarrier,
-        alloc->source(), StoreInstanceFieldInstr::Kind::kInitializing);
+        alloc->source(), StoreFieldInstr::Kind::kInitializing);
     flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
     cursor = store;
   }
diff --git a/runtime/vm/compiler/write_barrier_elimination.cc b/runtime/vm/compiler/write_barrier_elimination.cc
index 86f58bd..89fcb74 100644
--- a/runtime/vm/compiler/write_barrier_elimination.cc
+++ b/runtime/vm/compiler/write_barrier_elimination.cc
@@ -44,7 +44,7 @@
 // This optimization removes write barriers from some store instructions under
 // certain assumptions which the runtime is responsible to sustain.
 //
-// We can skip a write barrier on a StoreInstanceField to a container object X
+// We can skip a write barrier on a StoreField to a container object X
 // if we know that either:
 //   - X is in new-space, or
 //   - X is in old-space, and:
@@ -385,7 +385,7 @@
     Instruction* const current = it.Current();
 
     if (finalize) {
-      if (StoreInstanceFieldInstr* instr = current->AsStoreInstanceField()) {
+      if (StoreFieldInstr* instr = current->AsStoreField()) {
         Definition* const container = instr->instance()->definition();
         if (IsUsable(container) && vector_->Contains(Index(container))) {
           DEBUG_ASSERT(SlotEligibleForWBE(instr->slot()));
diff --git a/runtime/vm/compiler/write_barrier_elimination_test.cc b/runtime/vm/compiler/write_barrier_elimination_test.cc
index 284d112..1e1e490 100644
--- a/runtime/vm/compiler/write_barrier_elimination_test.cc
+++ b/runtime/vm/compiler/write_barrier_elimination_test.cc
@@ -63,8 +63,8 @@
   auto entry = flow_graph->graph_entry()->normal_entry();
   EXPECT(entry != nullptr);
 
-  StoreInstanceFieldInstr* store1 = nullptr;
-  StoreInstanceFieldInstr* store2 = nullptr;
+  StoreFieldInstr* store1 = nullptr;
+  StoreFieldInstr* store2 = nullptr;
 
   ILMatcher cursor(flow_graph, entry);
   RELEASE_ASSERT(cursor.TryMatch({
@@ -73,9 +73,9 @@
       kMoveGlob,
       kMatchAndMoveBranchTrue,
       kMoveGlob,
-      {kMatchAndMoveStoreInstanceField, &store1},
+      {kMatchAndMoveStoreField, &store1},
       kMoveGlob,
-      {kMatchAndMoveStoreInstanceField, &store2},
+      {kMatchAndMoveStoreField, &store2},
   }));
 
   EXPECT(store1->ShouldEmitStoreBarrier() == false);
@@ -123,7 +123,7 @@
   auto entry = flow_graph->graph_entry()->normal_entry();
   EXPECT(entry != nullptr);
 
-  StoreInstanceFieldInstr* store = nullptr;
+  StoreFieldInstr* store = nullptr;
 
   ILMatcher cursor(flow_graph, entry);
   RELEASE_ASSERT(cursor.TryMatch({
@@ -132,7 +132,7 @@
       kMoveGlob,
       kMatchAndMoveGoto,
       kMoveGlob,
-      {kMatchAndMoveStoreInstanceField, &store},
+      {kMatchAndMoveStoreField, &store},
   }));
 
   EXPECT(store->ShouldEmitStoreBarrier() == true);
@@ -190,7 +190,7 @@
   auto entry = flow_graph->graph_entry()->normal_entry();
   EXPECT(entry != nullptr);
 
-  StoreInstanceFieldInstr* store_into_c = nullptr;
+  StoreFieldInstr* store_into_c = nullptr;
   StoreIndexedInstr* store_into_array_before_loop = nullptr;
   StoreIndexedInstr* store_into_array_after_loop = nullptr;
 
@@ -203,7 +203,7 @@
       kMoveGlob,
       kMatchAndMoveBranchTrue,
       kMoveGlob,
-      {kMatchAndMoveStoreInstanceField, &store_into_c},
+      {kMatchAndMoveStoreField, &store_into_c},
       kMoveGlob,
       kMatchAndMoveGoto,
       kMoveGlob,
@@ -302,16 +302,16 @@
   auto entry = flow_graph->graph_entry()->normal_entry();
   EXPECT(entry != nullptr);
 
-  StoreInstanceFieldInstr* store1 = nullptr;
-  StoreInstanceFieldInstr* store2 = nullptr;
+  StoreFieldInstr* store1 = nullptr;
+  StoreFieldInstr* store2 = nullptr;
 
   ILMatcher cursor(flow_graph, entry);
   RELEASE_ASSERT(cursor.TryMatch(
       {
           kMatchAndMoveAllocateObject,
           kMatchAndMoveLoadField,
-          {kMatchAndMoveStoreInstanceField, &store1},
-          {kMatchAndMoveStoreInstanceField, &store2},
+          {kMatchAndMoveStoreField, &store1},
+          {kMatchAndMoveStoreField, &store2},
       },
       kMoveGlob));
 
@@ -353,16 +353,16 @@
   auto entry = flow_graph->graph_entry()->normal_entry();
   EXPECT(entry != nullptr);
 
-  StoreInstanceFieldInstr* store1 = nullptr;
-  StoreInstanceFieldInstr* store2 = nullptr;
+  StoreFieldInstr* store1 = nullptr;
+  StoreFieldInstr* store2 = nullptr;
 
   ILMatcher cursor(flow_graph, entry);
   RELEASE_ASSERT(cursor.TryMatch(
       {
           kMatchAndMoveAllocateObject,
           kMatchAndMoveLoadStaticField,
-          {kMatchAndMoveStoreInstanceField, &store1},
-          {kMatchAndMoveStoreInstanceField, &store2},
+          {kMatchAndMoveStoreField, &store1},
+          {kMatchAndMoveStoreField, &store2},
       },
       kMoveGlob));
 
diff --git a/runtime/vm/dart_api_impl.cc b/runtime/vm/dart_api_impl.cc
index 30e4812..665cdb8 100644
--- a/runtime/vm/dart_api_impl.cc
+++ b/runtime/vm/dart_api_impl.cc
@@ -1829,6 +1829,14 @@
   // caches.
 }
 
+DART_EXPORT Dart_PerformanceMode
+Dart_SetPerformanceMode(Dart_PerformanceMode mode) {
+  Thread* T = Thread::Current();
+  CHECK_ISOLATE(T->isolate());
+  TransitionNativeToVM transition(T);
+  return T->heap()->SetMode(mode);
+}
+
 DART_EXPORT void Dart_ExitIsolate() {
   Thread* T = Thread::Current();
   CHECK_ISOLATE(T->isolate());
diff --git a/runtime/vm/dart_api_impl_test.cc b/runtime/vm/dart_api_impl_test.cc
index 5230ed8..3cbeb29 100644
--- a/runtime/vm/dart_api_impl_test.cc
+++ b/runtime/vm/dart_api_impl_test.cc
@@ -9685,6 +9685,50 @@
   EXPECT_VALID(result);
 }
 
+static void SetPerformanceModeDefault(Dart_NativeArguments args) {
+  Dart_SetPerformanceMode(Dart_PerformanceMode_Default);
+}
+static void SetPerformanceModeLatency(Dart_NativeArguments args) {
+  Dart_SetPerformanceMode(Dart_PerformanceMode_Latency);
+}
+
+static Dart_NativeFunction SetMode_native_lookup(Dart_Handle name,
+                                                 int argument_count,
+                                                 bool* auto_setup_scope) {
+  const char* cstr = nullptr;
+  Dart_StringToCString(name, &cstr);
+  if (strcmp(cstr, "SetPerformanceModeDefault") == 0) {
+    return SetPerformanceModeDefault;
+  } else if (strcmp(cstr, "SetPerformanceModeLatency") == 0) {
+    return SetPerformanceModeLatency;
+  }
+  return NULL;
+}
+
+TEST_CASE(DartAPI_SetPerformanceMode) {
+  const char* kScriptChars = R"(
+import "dart:typed_data";
+@pragma("vm:external-name", "SetPerformanceModeDefault")
+external void setPerformanceModeDefault();
+@pragma("vm:external-name", "SetPerformanceModeLatency")
+external void setPerformanceModeLatency();
+void main() {
+  for (var i = 0; i < 10; i++) {
+    setPerformanceModeLatency();
+    var t = [];
+    for (var j = 0; j < 32; j++) {
+      t.add(Uint8List(1000000));
+    }
+    setPerformanceModeDefault();
+  }
+}
+)";
+  Dart_Handle lib =
+      TestCase::LoadTestScript(kScriptChars, &SetMode_native_lookup);
+  Dart_Handle result = Dart_Invoke(lib, NewString("main"), 0, NULL);
+  EXPECT_VALID(result);
+}
+
 static void NotifyLowMemoryNative(Dart_NativeArguments args) {
   Dart_NotifyLowMemory();
 }
diff --git a/runtime/vm/heap/heap.cc b/runtime/vm/heap/heap.cc
index c218c50..8704c82 100644
--- a/runtime/vm/heap/heap.cc
+++ b/runtime/vm/heap/heap.cc
@@ -180,6 +180,11 @@
   ASSERT(thread->no_safepoint_scope_depth() == 0);
   ASSERT(thread->no_callback_scope_depth() == 0);
   ASSERT(!thread->force_growth());
+
+  if (mode_ == Dart_PerformanceMode_Latency) {
+    return;
+  }
+
   if (new_space_.ExternalInWords() >= (4 * new_space_.CapacityInWords())) {
     // Attempt to free some external allocation by a scavenge. (If the total
     // remains above the limit, next external alloc will trigger another.)
@@ -426,6 +431,15 @@
   }
 }
 
+Dart_PerformanceMode Heap::SetMode(Dart_PerformanceMode new_mode) {
+  Dart_PerformanceMode old_mode = mode_.exchange(new_mode);
+  if ((old_mode == Dart_PerformanceMode_Latency) &&
+      (new_mode == Dart_PerformanceMode_Default)) {
+    CheckCatchUp(Thread::Current());
+  }
+  return old_mode;
+}
+
 void Heap::CollectNewSpaceGarbage(Thread* thread,
                                   GCType type,
                                   GCReason reason) {
@@ -564,6 +578,15 @@
   WaitForSweeperTasks(thread);
 }
 
+void Heap::CheckCatchUp(Thread* thread) {
+  ASSERT(thread->CanCollectGarbage());
+  if (old_space()->ReachedHardThreshold()) {
+    CollectGarbage(thread, GCType::kMarkSweep, GCReason::kCatchUp);
+  } else {
+    CheckConcurrentMarking(thread, GCReason::kCatchUp, 0);
+  }
+}
+
 void Heap::CheckConcurrentMarking(Thread* thread,
                                   GCReason reason,
                                   intptr_t size) {
@@ -859,6 +882,8 @@
       return "idle";
     case GCReason::kDebugging:
       return "debugging";
+    case GCReason::kCatchUp:
+      return "catch-up";
     default:
       UNREACHABLE();
       return "";
diff --git a/runtime/vm/heap/heap.h b/runtime/vm/heap/heap.h
index 3408cb2..9c1286f 100644
--- a/runtime/vm/heap/heap.h
+++ b/runtime/vm/heap/heap.h
@@ -107,6 +107,9 @@
 
   void NotifyIdle(int64_t deadline);
 
+  Dart_PerformanceMode mode() const { return mode_; }
+  Dart_PerformanceMode SetMode(Dart_PerformanceMode mode);
+
   // Collect a single generation.
   void CollectGarbage(Thread* thread, GCType type, GCReason reason);
 
@@ -125,6 +128,7 @@
   void CollectAllGarbage(GCReason reason = GCReason::kFull,
                          bool compact = false);
 
+  void CheckCatchUp(Thread* thread);
   void CheckConcurrentMarking(Thread* thread, GCReason reason, intptr_t size);
   void StartConcurrentMarking(Thread* thread, GCReason reason);
   void WaitForMarkerTasks(Thread* thread);
@@ -362,6 +366,8 @@
   // GC stats collection.
   GCStats stats_;
 
+  RelaxedAtomic<Dart_PerformanceMode> mode_ = {Dart_PerformanceMode_Default};
+
   // This heap is in read-only mode: No allocation is allowed.
   bool read_only_;
 
diff --git a/runtime/vm/heap/pages.cc b/runtime/vm/heap/pages.cc
index e207185..11fca3d 100644
--- a/runtime/vm/heap/pages.cc
+++ b/runtime/vm/heap/pages.cc
@@ -1524,6 +1524,9 @@
   if (heap_growth_ratio_ == 100) {
     return false;
   }
+  if ((heap_ != nullptr) && (heap_->mode() == Dart_PerformanceMode_Latency)) {
+    return false;
+  }
   return after.CombinedUsedInWords() > hard_gc_threshold_in_words_;
 }
 
@@ -1531,6 +1534,9 @@
   if (heap_growth_ratio_ == 100) {
     return false;
   }
+  if ((heap_ != nullptr) && (heap_->mode() == Dart_PerformanceMode_Latency)) {
+    return false;
+  }
   return after.CombinedUsedInWords() > soft_gc_threshold_in_words_;
 }
 
diff --git a/runtime/vm/heap/safepoint.cc b/runtime/vm/heap/safepoint.cc
index 3853e9c..2eafbc9 100644
--- a/runtime/vm/heap/safepoint.cc
+++ b/runtime/vm/heap/safepoint.cc
@@ -54,14 +54,8 @@
 
   T->DecrementForceGrowthScopeDepth();
   if (!T->force_growth()) {
-    ASSERT(T->CanCollectGarbage());
     // Check if we passed the growth limit during the scope.
-    Heap* heap = T->heap();
-    if (heap->old_space()->ReachedHardThreshold()) {
-      heap->CollectGarbage(T, GCType::kMarkSweep, GCReason::kOldSpace);
-    } else {
-      heap->CheckConcurrentMarking(T, GCReason::kOldSpace, 0);
-    }
+    T->heap()->CheckCatchUp(T);
   }
 }
 
diff --git a/runtime/vm/heap/spaces.h b/runtime/vm/heap/spaces.h
index 2b9c00f..50d6b81 100644
--- a/runtime/vm/heap/spaces.h
+++ b/runtime/vm/heap/spaces.h
@@ -47,6 +47,7 @@
   kExternal,     // Dart_NewFinalizableHandle Dart_NewWeakPersistentHandle
   kIdle,         // Dart_NotifyIdle
   kDebugging,    // service request, etc.
+  kCatchUp,      // End of ForceGrowthScope or Dart_PerformanceMode_Latency.
 };
 
 }  // namespace dart
diff --git a/runtime/vm/kernel_loader.cc b/runtime/vm/kernel_loader.cc
index a7407c5..67834c1 100644
--- a/runtime/vm/kernel_loader.cc
+++ b/runtime/vm/kernel_loader.cc
@@ -2194,7 +2194,7 @@
         return converter.SimpleValue().ptr();
       } else {
         // Note: optimizer relies on DoubleInitialized bit in its field-unboxing
-        // heuristics. See JitCallSpecializer::VisitStoreInstanceField for more
+        // heuristics. See JitCallSpecializer::VisitStoreField for more
         // details.
         field.RecordStore(converter.SimpleValue());
         if (!converter.SimpleValue().IsNull() &&
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 28feedf..dea1cd7 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -4482,7 +4482,7 @@
                             const Object& owner,
                             TokenPosition token_pos,
                             TokenPosition end_token_pos);
-  friend class StoreInstanceFieldInstr;  // Generated code access to bit field.
+  friend class StoreFieldInstr;  // Generated code access to bit field.
 
   enum {
     kConstBit = 0,
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index 7acfa30..4050124 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -1501,7 +1501,7 @@
   friend class CidRewriteVisitor;
   friend class GuardFieldClassInstr;     // For sizeof(guarded_cid_/...)
   friend class LoadFieldInstr;           // For sizeof(guarded_cid_/...)
-  friend class StoreInstanceFieldInstr;  // For sizeof(guarded_cid_/...)
+  friend class StoreFieldInstr;          // For sizeof(guarded_cid_/...)
 };
 
 class alignas(8) UntaggedScript : public UntaggedObject {
diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn
index 087a32d..63ff6ff 100644
--- a/sdk/BUILD.gn
+++ b/sdk/BUILD.gn
@@ -153,6 +153,7 @@
   "mirrors",
   "svg",
   "typed_data",
+  "wasm",
   "web_audio",
   "web_gl",
   "web_sql",
diff --git a/sdk/lib/wasm/wasm_sources.gni b/sdk/lib/wasm/wasm_sources.gni
new file mode 100644
index 0000000..8530e23
--- /dev/null
+++ b/sdk/lib/wasm/wasm_sources.gni
@@ -0,0 +1,5 @@
+# Copyright (c) 2022, the Dart project authors.  Please see the AUTHORS file
+# for details. All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+wasm_sdk_sources = [ "wasm_types.dart" ]
diff --git a/tools/VERSION b/tools/VERSION
index fa4b10e..15ecf51 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 19
 PATCH 0
-PRERELEASE 42
+PRERELEASE 43
 PRERELEASE_PATCH 0
\ No newline at end of file