Version 2.17.0-74.0.dev

Merge commit '6f9289ca3050b7932ef2ac0cbc57e46eeb217a92' into 'dev'
diff --git a/runtime/vm/compiler/assembler/assembler_arm.cc b/runtime/vm/compiler/assembler/assembler_arm.cc
index 139be88..61bdd45 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm.cc
@@ -1912,6 +1912,11 @@
     str(value, dest);
   }
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
 
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.cc b/runtime/vm/compiler/assembler/assembler_arm64.cc
index 943457c..1212928 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64.cc
@@ -1272,6 +1272,11 @@
   ASSERT(memory_order == kRelaxedNonAtomic);
   str(value, dest);
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
 
@@ -1294,6 +1299,11 @@
   ASSERT(memory_order == kRelaxedNonAtomic);
   str(value, dest, kObjectBytes);
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
 
diff --git a/runtime/vm/compiler/assembler/assembler_ia32.cc b/runtime/vm/compiler/assembler/assembler_ia32.cc
index a3b12b0..69c197c 100644
--- a/runtime/vm/compiler/assembler/assembler_ia32.cc
+++ b/runtime/vm/compiler/assembler/assembler_ia32.cc
@@ -2060,6 +2060,11 @@
     movl(dest, value);
   }
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   pushl(value);
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
diff --git a/runtime/vm/compiler/assembler/assembler_riscv.cc b/runtime/vm/compiler/assembler/assembler_riscv.cc
index 1c67022..5d739a0 100644
--- a/runtime/vm/compiler/assembler/assembler_riscv.cc
+++ b/runtime/vm/compiler/assembler/assembler_riscv.cc
@@ -3142,6 +3142,11 @@
   ASSERT(memory_order == kRelaxedNonAtomic);
   sx(value, dest);
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   beq(object, value, &done, kNearJump);
   BranchIfSmi(value, &done, kNearJump);
@@ -3149,7 +3154,7 @@
   lbu(TMP2, FieldAddress(value, target::Object::tags_offset()));
   srli(TMP, TMP, target::UntaggedObject::kBarrierOverlapShift);
   and_(TMP, TMP, TMP2);
-  and_(TMP, TMP, WRITE_BARRIER_MASK);
+  andi(TMP, TMP, target::UntaggedObject::kGenerationalBarrierMask);
   beqz(TMP, &done, kNearJump);
   Stop("Store buffer update is required");
   Bind(&done);
@@ -3171,6 +3176,11 @@
     StoreToOffset(value, object, offset - kHeapObjectTag);
   }
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   beq(object, value, &done, kNearJump);
   BranchIfSmi(value, &done, kNearJump);
@@ -3178,7 +3188,7 @@
   lbu(TMP2, FieldAddress(value, target::Object::tags_offset()));
   srli(TMP, TMP, target::UntaggedObject::kBarrierOverlapShift);
   and_(TMP, TMP, TMP2);
-  and_(TMP, TMP, WRITE_BARRIER_MASK);
+  andi(TMP, TMP, target::UntaggedObject::kGenerationalBarrierMask);
   beqz(TMP, &done, kNearJump);
   Stop("Store buffer update is required");
   Bind(&done);
diff --git a/runtime/vm/compiler/assembler/assembler_x64.cc b/runtime/vm/compiler/assembler/assembler_x64.cc
index 4e36eff..82a37b8 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.cc
+++ b/runtime/vm/compiler/assembler/assembler_x64.cc
@@ -1602,6 +1602,11 @@
     movq(dest, value);
   }
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   pushq(value);
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
@@ -1627,6 +1632,11 @@
     OBJ(mov)(dest, value);
   }
 #if defined(DEBUG)
+  // We can't assert the incremental barrier is not needed here, only the
+  // generational barrier. We sometimes omit the write barrier when 'value' is
+  // a constant, but we don't eagerly mark 'value' and instead assume it is also
+  // reachable via a constant pool, so it doesn't matter if it is not traced via
+  // 'object'.
   Label done;
   pushq(value);
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
diff --git a/runtime/vm/compiler/runtime_api.cc b/runtime/vm/compiler/runtime_api.cc
index a0cca50..8f78177d5 100644
--- a/runtime/vm/compiler/runtime_api.cc
+++ b/runtime/vm/compiler/runtime_api.cc
@@ -389,6 +389,9 @@
 const word UntaggedObject::kBarrierOverlapShift =
     dart::UntaggedObject::kBarrierOverlapShift;
 
+const word UntaggedObject::kGenerationalBarrierMask =
+    dart::UntaggedObject::kGenerationalBarrierMask;
+
 bool IsTypedDataClassId(intptr_t cid) {
   return dart::IsTypedDataClassId(cid);
 }
diff --git a/runtime/vm/compiler/runtime_api.h b/runtime/vm/compiler/runtime_api.h
index af57f4ef..6a51005 100644
--- a/runtime/vm/compiler/runtime_api.h
+++ b/runtime/vm/compiler/runtime_api.h
@@ -447,6 +447,7 @@
   static const word kSizeTagMaxSizeTag;
   static const word kTagBitsSizeTagPos;
   static const word kBarrierOverlapShift;
+  static const word kGenerationalBarrierMask;
 
   static bool IsTypedDataClassId(intptr_t cid);
 };
diff --git a/tools/VERSION b/tools/VERSION
index d4f4541..e4b83ee 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 17
 PATCH 0
-PRERELEASE 73
+PRERELEASE 74
 PRERELEASE_PATCH 0
\ No newline at end of file