Version 2.13.0-70.0.dev

Merge commit 'a32b55003487bc6c5bc0d058460c5fef1c8f663b' into 'dev'
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.h b/runtime/vm/compiler/assembler/assembler_arm64.h
index 3c44731..f7ebf06 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.h
+++ b/runtime/vm/compiler/assembler/assembler_arm64.h
@@ -1496,6 +1496,7 @@
                     OperandSize sz = kEightBytes) {
     const int reg_size =
         (sz == kEightBytes) ? kXRegSizeInBits : kWRegSizeInBits;
+    ASSERT((shift >= 0) && (shift < reg_size));
     ubfm(rd, rn, (reg_size - shift) % reg_size, reg_size - shift - 1, sz);
   }
   void LsrImmediate(Register rd,
@@ -1504,11 +1505,17 @@
                     OperandSize sz = kEightBytes) {
     const int reg_size =
         (sz == kEightBytes) ? kXRegSizeInBits : kWRegSizeInBits;
+    ASSERT((shift >= 0) && (shift < reg_size));
     ubfm(rd, rn, shift, reg_size - 1, sz);
   }
-  void AsrImmediate(Register rd, Register rn, int shift) {
-    const int reg_size = kXRegSizeInBits;
-    sbfm(rd, rn, shift, reg_size - 1);
+  void AsrImmediate(Register rd,
+                    Register rn,
+                    int shift,
+                    OperandSize sz = kEightBytes) {
+    const int reg_size =
+        (sz == kEightBytes) ? kXRegSizeInBits : kWRegSizeInBits;
+    ASSERT((shift >= 0) && (shift < reg_size));
+    sbfm(rd, rn, shift, reg_size - 1, sz);
   }
 
   void VRecps(VRegister vd, VRegister vn);
diff --git a/runtime/vm/compiler/assembler/assembler_arm64_test.cc b/runtime/vm/compiler/assembler/assembler_arm64_test.cc
index 600d64b..f4b0afb 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64_test.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64_test.cc
@@ -987,6 +987,35 @@
 constexpr uint32_t kU32MinInt32 = 0x80000000u;
 constexpr uint32_t kU32MaxInt32 = 0x7fffffffu;
 
+#define FOR_EACH_ASR_32_TEST_CONFIG(M)                                         \
+  M(0u, 0, 0u)                                                                 \
+  M(1u, 0, 1u)                                                                 \
+  M(kU32MaxInt32, 0, kU32MaxInt32)                                             \
+  M(kU32MinInt32, 0, kU32MinInt32)                                             \
+  M(0u, 1, 0u)                                                                 \
+  M(1u, 1, 0u)                                                                 \
+  M(4u, 1, 2u)                                                                 \
+  M(0xffffu, 1, 0x7fffu)                                                       \
+  M(0xffffffffu, 1, 0xffffffffu)                                               \
+  M(kU32MaxInt32, 1, 0x3fffffffu)                                              \
+  M(kU32MinInt32, 1, 0xc0000000u)                                              \
+  M(kU32MinusOne, 1, 0xffffffffu)                                              \
+  M(1u, 2, 0u)                                                                 \
+  M(4u, 2, 1u)                                                                 \
+  M(0xffffu, 2, 0x3fffu)                                                       \
+  M(0xffffffffu, 2, 0xffffffffu)                                               \
+  M(kU32MaxInt32, 2, 0x1fffffffu)                                              \
+  M(kU32MinInt32, 2, 0xe0000000u)                                              \
+  M(kU32MinusOne, 2, kU32MinusOne)                                             \
+  M(0u, 31, 0u)                                                                \
+  M(1u, 31, 0u)                                                                \
+  M(4u, 31, 0u)                                                                \
+  M(0xffffu, 31, 0u)                                                           \
+  M(0xffffffffu, 31, 0xffffffffu)                                              \
+  M(kU32MaxInt32, 31, 0u)                                                      \
+  M(kU32MinInt32, 31, kU32MinusOne)                                            \
+  M(kU32MinusOne, 31, kU32MinusOne)
+
 #define FOR_EACH_LSR_32_TEST_CONFIG(M)                                         \
   M(0u, 0, 0u)                                                                 \
   M(1u, 0, 1u)                                                                 \
@@ -1058,20 +1087,26 @@
                               Int32Return, test->entry())));                   \
   }
 
+#define ASR_32_IMMEDIATE_TEST(val, shift, expected)                            \
+  SHIFT_32_IMMEDIATE_TEST(AsrImmediate, val, shift, expected)
+
 #define LSR_32_IMMEDIATE_TEST(val, shift, expected)                            \
   SHIFT_32_IMMEDIATE_TEST(LsrImmediate, val, shift, expected)
 
 #define LSL_32_IMMEDIATE_TEST(val, shift, expected)                            \
   SHIFT_32_IMMEDIATE_TEST(LslImmediate, val, shift, expected)
 
+FOR_EACH_ASR_32_TEST_CONFIG(ASR_32_IMMEDIATE_TEST)
 FOR_EACH_LSR_32_TEST_CONFIG(LSR_32_IMMEDIATE_TEST)
 FOR_EACH_LSL_32_TEST_CONFIG(LSL_32_IMMEDIATE_TEST)
 
 #undef LSL_32_IMMEDIATE_TEST
 #undef LSR_32_IMMEDIATE_TEST
+#undef ASR_32_IMMEDIATE_TEST
 #undef SHIFT_32_IMMEDIATE_TEST
 #undef FOR_EACH_LSL_32_TESTS_LIST
 #undef FOR_EACH_LSR_32_TESTS_LIST
+#undef FOR_EACH_ASR_32_TESTS_LIST
 
 ASSEMBLER_TEST_GENERATE(AndShiftRegs, assembler) {
   __ movz(R1, Immediate(42), 0);
diff --git a/runtime/vm/compiler/assembler/disassembler_arm64.cc b/runtime/vm/compiler/assembler/disassembler_arm64.cc
index fad4210..08529fe 100644
--- a/runtime/vm/compiler/assembler/disassembler_arm64.cc
+++ b/runtime/vm/compiler/assembler/disassembler_arm64.cc
@@ -30,6 +30,7 @@
  private:
   // Bottleneck functions to print into the out_buffer.
   void Print(const char* str);
+  void PrintInt(int value);
 
   // Printing of common values.
   void PrintRegister(int reg, R31Type r31t);
@@ -80,6 +81,12 @@
   buffer_[buffer_pos_] = '\0';
 }
 
+void ARM64Decoder::PrintInt(int value) {
+  buffer_pos_ += Utils::SNPrint(current_position_in_buffer(),
+                                remaining_size_in_buffer(), "%d", value);
+  buffer_[buffer_pos_] = '\0';
+}
+
 // Print the register name according to the active name converter.
 void ARM64Decoder::PrintRegister(int reg, R31Type r31t) {
   ASSERT(0 <= reg);
@@ -791,6 +798,7 @@
 }
 
 void ARM64Decoder::DecodeBitfield(Instr* instr) {
+  int reg_size = instr->SFField() == 0 ? 32 : 64;
   int op = instr->Bits(29, 2);
   int r_imm = instr->ImmRField();
   int s_imm = instr->ImmSField();
@@ -808,6 +816,10 @@
           break;
         }
       }
+      if (s_imm == (reg_size - 1)) {
+        Format(instr, "asr'sf 'rd, 'rn, 'immr");
+        break;
+      }
       Format(instr, "sbfm'sf 'rd, 'rn, 'immr, 'imms");
       break;
     case 1:
@@ -823,6 +835,15 @@
           break;
         }
       }
+      if ((s_imm != (reg_size - 1)) && ((s_imm + 1) == r_imm)) {
+        int shift = reg_size - s_imm;
+        Format(instr, "lsl'sf 'rd, 'rn, ");
+        PrintInt(shift);
+        break;
+      } else if (s_imm == (reg_size - 1)) {
+        Format(instr, "lsr'sf 'rd, 'rn, 'immr");
+        break;
+      }
       Format(instr, "ubfm'sf 'rd, 'rn, 'immr, 'imms");
       break;
     default:
diff --git a/runtime/vm/heap/pages.cc b/runtime/vm/heap/pages.cc
index 8070a58..192c3df 100644
--- a/runtime/vm/heap/pages.cc
+++ b/runtime/vm/heap/pages.cc
@@ -804,9 +804,11 @@
          (Thread::Current()->task_kind() == Thread::kScavengerTask));
 
   // Wait for the sweeper to finish mutating the large page list.
-  MonitorLocker ml(tasks_lock());
-  while (phase() == kSweepingLarge) {
-    ml.Wait();  // No safepoint check.
+  {
+    MonitorLocker ml(tasks_lock());
+    while (phase() == kSweepingLarge) {
+      ml.Wait();  // No safepoint check.
+    }
   }
 
   // Large pages may be added concurrently due to promotion in another scavenge
diff --git a/runtime/vm/heap/scavenger.cc b/runtime/vm/heap/scavenger.cc
index 5a8c0d5..e4807d5 100644
--- a/runtime/vm/heap/scavenger.cc
+++ b/runtime/vm/heap/scavenger.cc
@@ -1710,6 +1710,7 @@
 
   // Reverse the partial forwarding from the aborted scavenge. This also
   // rebuilds the remembered set.
+  heap_->WaitForSweeperTasksAtSafepoint(thread);
   Become::FollowForwardingPointers(thread);
 
   // Don't scavenge again until the next old-space GC has occurred. Prevents
diff --git a/runtime/vm/simulator_arm64.cc b/runtime/vm/simulator_arm64.cc
index fe2911c..63d9e07 100644
--- a/runtime/vm/simulator_arm64.cc
+++ b/runtime/vm/simulator_arm64.cc
@@ -1450,7 +1450,7 @@
   result &= mask;
   if (sign_extend) {
     int highest_bit = (s_bit - r_bit) & (bitwidth - 1);
-    int shift = bitwidth - highest_bit - 1;
+    int shift = 64 - highest_bit - 1;
     result <<= shift;
     result = static_cast<word>(result) >> shift;
   } else if (!zero_extend) {
diff --git a/tools/VERSION b/tools/VERSION
index ed26e86..2a1c088 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 13
 PATCH 0
-PRERELEASE 69
+PRERELEASE 70
 PRERELEASE_PATCH 0
\ No newline at end of file