Version 2.10.1
* Cherry-pick 62354c15403d42640ec9ed39e7d2e960742f7329 to stable
* Cherry-pick 68fd2a9d52802bdcc2db31d9b31a2e128b392f30 to stable
* Cherry-pick 350481a93cd6e9e8c5e387f3feca80f6beb2178a to stable
* Cherry-pick 88aa0544f78010d358d0abac76f23975422412a7 to stable
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9b56ce5..da59235 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,16 @@
-## 2.10.0
+## 2.10.1 - 2020-10-06
+
+This is a patch release that fixes the following issues:
+* crashes when developing Flutter applications (issue [#43464][]).
+* non-deterministicall weird program behaviour and/or crashes (issue
+ [flutter/flutter#66672][]).
+* uncaught TypeErrors in DDC (issue [#43661][]).
+
+[#43464]: https://github.com/dart-lang/sdk/issues/43464
+[flutter/flutter#66672]: https://github.com/flutter/flutter/issues/66672
+[#43661]: https://github.com/dart-lang/sdk/issues/43661
+
+## 2.10.0 - 2020-09-28
### Core libraries
@@ -36,7 +48,7 @@
deferred loading of types, pass `--no-defer-class-types`. See the original
post on the [unsoundness in the deferred loading algorithm][].
* Enables a new sound deferred splitting algorithm. To explicitly disable
- the new deferred splitting algorithm, pass `--no-new-deferred-split'.
+ the new deferred splitting algorithm, pass `--no-new-deferred-split`.
See the original post on the
[unsoundness in the deferred loading algorithm][].
diff --git a/pkg/dev_compiler/lib/src/kernel/compiler.dart b/pkg/dev_compiler/lib/src/kernel/compiler.dart
index 8a9fa88..284c144 100644
--- a/pkg/dev_compiler/lib/src/kernel/compiler.dart
+++ b/pkg/dev_compiler/lib/src/kernel/compiler.dart
@@ -867,21 +867,16 @@
var savedTopLevelClass = _classEmittingExtends;
_classEmittingExtends = c;
- // Refers to 'S' in `class C extends S`. Set this to null to avoid
- // referencing deferred supertypes in _emitClassConstructor's JS output.
- js_ast.Expression baseClass;
-
+ // Unroll mixins.
if (shouldDefer(supertype)) {
deferredSupertypes.add(runtimeStatement('setBaseClass(#, #)', [
getBaseClass(isMixinAliasClass(c) ? 0 : mixins.length),
emitDeferredType(supertype),
]));
- // Refers to 'supertype' without any type arguments.
supertype =
_coreTypes.rawType(supertype.classNode, _currentLibrary.nonNullable);
- } else {
- baseClass = emitClassRef(supertype);
}
+ var baseClass = emitClassRef(supertype);
if (isMixinAliasClass(c)) {
// Given `class C = Object with M [implements I1, I2 ...];`
diff --git a/runtime/tests/vm/dart/regress_43464_test.dart b/runtime/tests/vm/dart/regress_43464_test.dart
new file mode 100644
index 0000000..43a3c113
--- /dev/null
+++ b/runtime/tests/vm/dart/regress_43464_test.dart
@@ -0,0 +1,21 @@
+// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'package:expect/expect.dart';
+
+class A {}
+
+abstract class B<T> {
+ dynamic foo(T a);
+}
+
+class C extends B<A> {
+ dynamic foo(A a) {
+ return () => a;
+ }
+}
+
+main() {
+ Expect.throws(() => (C().foo as dynamic)(1));
+}
diff --git a/runtime/tests/vm/dart/write_barrier_register_clobber_test.dart b/runtime/tests/vm/dart/write_barrier_register_clobber_test.dart
new file mode 100644
index 0000000..ceff211
--- /dev/null
+++ b/runtime/tests/vm/dart/write_barrier_register_clobber_test.dart
@@ -0,0 +1,81 @@
+// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// This test attempts to verify that write barrier slow path does not
+// clobber any live values.
+
+import 'dart:_internal' show VMInternalsForTesting;
+
+import 'package:expect/expect.dart';
+
+class Old {
+ var f;
+ Old(this.f);
+}
+
+@pragma('vm:never-inline')
+int crashy(int v, List<Old> oldies) {
+ // This test attempts to create a lot of live values which would live across
+ // write barrier invocation so that when write-barrier calls runtime and
+ // clobbers a register this is detected.
+ var young = Object();
+ var len = oldies.length;
+ var i = 0;
+ var v00 = v + 0;
+ var v01 = v + 1;
+ var v02 = v + 2;
+ var v03 = v + 3;
+ var v04 = v + 4;
+ var v05 = v + 5;
+ var v06 = v + 6;
+ var v07 = v + 7;
+ var v08 = v + 8;
+ var v09 = v + 9;
+ var v10 = v + 10;
+ var v11 = v + 11;
+ var v12 = v + 12;
+ var v13 = v + 13;
+ var v14 = v + 14;
+ var v15 = v + 15;
+ var v16 = v + 16;
+ var v17 = v + 17;
+ var v18 = v + 18;
+ var v19 = v + 19;
+ while (i < len) {
+ // Eventually this will overflow store buffer and call runtime to acquire
+ // a new block.
+ oldies[i++].f = young;
+ }
+ return v00 +
+ v01 +
+ v02 +
+ v03 +
+ v04 +
+ v05 +
+ v06 +
+ v07 +
+ v08 +
+ v09 +
+ v10 +
+ v11 +
+ v12 +
+ v13 +
+ v14 +
+ v15 +
+ v16 +
+ v17 +
+ v18 +
+ v19;
+}
+
+void main(List<String> args) {
+ final init = args.contains('impossible') ? 1 : 0;
+ final oldies = List<Old>.generate(100000, (i) => Old(""));
+ VMInternalsForTesting.collectAllGarbage();
+ VMInternalsForTesting.collectAllGarbage();
+ Expect.equals(crashy(init, oldies), 190);
+ for (var o in oldies) {
+ Expect.isTrue(o.f is! String);
+ }
+}
diff --git a/runtime/tests/vm/dart_2/regress_43464_test.dart b/runtime/tests/vm/dart_2/regress_43464_test.dart
new file mode 100644
index 0000000..43a3c113
--- /dev/null
+++ b/runtime/tests/vm/dart_2/regress_43464_test.dart
@@ -0,0 +1,21 @@
+// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'package:expect/expect.dart';
+
+class A {}
+
+abstract class B<T> {
+ dynamic foo(T a);
+}
+
+class C extends B<A> {
+ dynamic foo(A a) {
+ return () => a;
+ }
+}
+
+main() {
+ Expect.throws(() => (C().foo as dynamic)(1));
+}
diff --git a/runtime/tests/vm/dart_2/write_barrier_register_clobber_test.dart b/runtime/tests/vm/dart_2/write_barrier_register_clobber_test.dart
new file mode 100644
index 0000000..ceff211
--- /dev/null
+++ b/runtime/tests/vm/dart_2/write_barrier_register_clobber_test.dart
@@ -0,0 +1,81 @@
+// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// This test attempts to verify that write barrier slow path does not
+// clobber any live values.
+
+import 'dart:_internal' show VMInternalsForTesting;
+
+import 'package:expect/expect.dart';
+
+class Old {
+ var f;
+ Old(this.f);
+}
+
+@pragma('vm:never-inline')
+int crashy(int v, List<Old> oldies) {
+ // This test attempts to create a lot of live values which would live across
+ // write barrier invocation so that when write-barrier calls runtime and
+ // clobbers a register this is detected.
+ var young = Object();
+ var len = oldies.length;
+ var i = 0;
+ var v00 = v + 0;
+ var v01 = v + 1;
+ var v02 = v + 2;
+ var v03 = v + 3;
+ var v04 = v + 4;
+ var v05 = v + 5;
+ var v06 = v + 6;
+ var v07 = v + 7;
+ var v08 = v + 8;
+ var v09 = v + 9;
+ var v10 = v + 10;
+ var v11 = v + 11;
+ var v12 = v + 12;
+ var v13 = v + 13;
+ var v14 = v + 14;
+ var v15 = v + 15;
+ var v16 = v + 16;
+ var v17 = v + 17;
+ var v18 = v + 18;
+ var v19 = v + 19;
+ while (i < len) {
+ // Eventually this will overflow store buffer and call runtime to acquire
+ // a new block.
+ oldies[i++].f = young;
+ }
+ return v00 +
+ v01 +
+ v02 +
+ v03 +
+ v04 +
+ v05 +
+ v06 +
+ v07 +
+ v08 +
+ v09 +
+ v10 +
+ v11 +
+ v12 +
+ v13 +
+ v14 +
+ v15 +
+ v16 +
+ v17 +
+ v18 +
+ v19;
+}
+
+void main(List<String> args) {
+ final init = args.contains('impossible') ? 1 : 0;
+ final oldies = List<Old>.generate(100000, (i) => Old(""));
+ VMInternalsForTesting.collectAllGarbage();
+ VMInternalsForTesting.collectAllGarbage();
+ Expect.equals(crashy(init, oldies), 190);
+ for (var o in oldies) {
+ Expect.isTrue(o.f is! String);
+ }
+}
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.cc b/runtime/vm/compiler/assembler/assembler_arm64.cc
index 2118d49..4699f5d 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64.cc
@@ -1485,7 +1485,7 @@
StoreToOffset(state, THR, target::Thread::exit_through_ffi_offset());
}
-void Assembler::EnterCallRuntimeFrame(intptr_t frame_size) {
+void Assembler::EnterCallRuntimeFrame(intptr_t frame_size, bool is_leaf) {
Comment("EnterCallRuntimeFrame");
EnterFrame(0);
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
@@ -1510,19 +1510,30 @@
Push(reg);
}
- ReserveAlignedFrameSpace(frame_size);
+ if (!is_leaf) { // Leaf calling sequence aligns the stack itself.
+ ReserveAlignedFrameSpace(frame_size);
+ } else {
+ PushPair(kCallLeafRuntimeCalleeSaveScratch1,
+ kCallLeafRuntimeCalleeSaveScratch2);
+ }
}
-void Assembler::LeaveCallRuntimeFrame() {
+void Assembler::LeaveCallRuntimeFrame(bool is_leaf) {
// SP might have been modified to reserve space for arguments
// and ensure proper alignment of the stack frame.
// We need to restore it before restoring registers.
+ const intptr_t fixed_frame_words_without_pc_and_fp =
+ target::frame_layout.dart_fixed_frame_size - 2;
const intptr_t kPushedRegistersSize =
- kDartVolatileCpuRegCount * target::kWordSize +
- kDartVolatileFpuRegCount * target::kWordSize +
- (target::frame_layout.dart_fixed_frame_size - 2) *
- target::kWordSize; // From EnterStubFrame (excluding PC / FP)
+ kDartVolatileFpuRegCount * sizeof(double) +
+ (kDartVolatileCpuRegCount + (is_leaf ? 2 : 0) +
+ fixed_frame_words_without_pc_and_fp) *
+ target::kWordSize;
AddImmediate(SP, FP, -kPushedRegistersSize);
+ if (is_leaf) {
+ PopPair(kCallLeafRuntimeCalleeSaveScratch1,
+ kCallLeafRuntimeCalleeSaveScratch2);
+ }
for (int i = kDartLastVolatileCpuReg; i >= kDartFirstVolatileCpuReg; i--) {
const Register reg = static_cast<Register>(i);
Pop(reg);
@@ -1547,6 +1558,37 @@
entry.Call(this, argument_count);
}
+void Assembler::CallRuntimeScope::Call(intptr_t argument_count) {
+ assembler_->CallRuntime(entry_, argument_count);
+}
+
+Assembler::CallRuntimeScope::~CallRuntimeScope() {
+ if (preserve_registers_) {
+ assembler_->LeaveCallRuntimeFrame(entry_.is_leaf());
+ if (restore_code_reg_) {
+ assembler_->Pop(CODE_REG);
+ }
+ }
+}
+
+Assembler::CallRuntimeScope::CallRuntimeScope(Assembler* assembler,
+ const RuntimeEntry& entry,
+ intptr_t frame_size,
+ bool preserve_registers,
+ const Address* caller)
+ : assembler_(assembler),
+ entry_(entry),
+ preserve_registers_(preserve_registers),
+ restore_code_reg_(caller != nullptr) {
+ if (preserve_registers_) {
+ if (caller != nullptr) {
+ assembler_->Push(CODE_REG);
+ assembler_->ldr(CODE_REG, *caller);
+ }
+ assembler_->EnterCallRuntimeFrame(frame_size, entry.is_leaf());
+ }
+}
+
void Assembler::EnterStubFrame() {
EnterDartFrame(0);
}
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.h b/runtime/vm/compiler/assembler/assembler_arm64.h
index 5c1fb60..c9bd624 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.h
+++ b/runtime/vm/compiler/assembler/assembler_arm64.h
@@ -1651,10 +1651,51 @@
void EnterOsrFrame(intptr_t extra_size, Register new_pp = kNoRegister);
void LeaveDartFrame(RestorePP restore_pp = kRestoreCallerPP);
- void EnterCallRuntimeFrame(intptr_t frame_size);
- void LeaveCallRuntimeFrame();
void CallRuntime(const RuntimeEntry& entry, intptr_t argument_count);
+ // Helper method for performing runtime calls from callers requiring manual
+ // register preservation is required (e.g. outside IL instructions marked
+ // as calling).
+ class CallRuntimeScope : public ValueObject {
+ public:
+ CallRuntimeScope(Assembler* assembler,
+ const RuntimeEntry& entry,
+ intptr_t frame_size,
+ bool preserve_registers = true)
+ : CallRuntimeScope(assembler,
+ entry,
+ frame_size,
+ preserve_registers,
+ /*caller=*/nullptr) {}
+
+ CallRuntimeScope(Assembler* assembler,
+ const RuntimeEntry& entry,
+ intptr_t frame_size,
+ Address caller,
+ bool preserve_registers = true)
+ : CallRuntimeScope(assembler,
+ entry,
+ frame_size,
+ preserve_registers,
+ &caller) {}
+
+ void Call(intptr_t argument_count);
+
+ ~CallRuntimeScope();
+
+ private:
+ CallRuntimeScope(Assembler* assembler,
+ const RuntimeEntry& entry,
+ intptr_t frame_size,
+ bool preserve_registers,
+ const Address* caller);
+
+ Assembler* const assembler_;
+ const RuntimeEntry& entry_;
+ const bool preserve_registers_;
+ const bool restore_code_reg_;
+ };
+
// Set up a stub frame so that the stack traversal code can easily identify
// a stub frame.
void EnterStubFrame();
@@ -2402,6 +2443,11 @@
CanBeSmi can_be_smi,
BarrierFilterMode barrier_filter_mode);
+ // Note: leaf call sequence uses some abi callee save registers as scratch
+ // so they should be manually preserved.
+ void EnterCallRuntimeFrame(intptr_t frame_size, bool is_leaf);
+ void LeaveCallRuntimeFrame(bool is_leaf);
+
friend class dart::FlowGraphCompiler;
std::function<void(Register reg)> generate_invoke_write_barrier_wrapper_;
std::function<void()> generate_invoke_array_write_barrier_;
diff --git a/runtime/vm/compiler/backend/type_propagator.cc b/runtime/vm/compiler/backend/type_propagator.cc
index eab2e16..f1aa76f 100644
--- a/runtime/vm/compiler/backend/type_propagator.cc
+++ b/runtime/vm/compiler/backend/type_propagator.cc
@@ -1405,6 +1405,12 @@
}
CompileType LoadLocalInstr::ComputeType() const {
+ if (local().needs_covariant_check_in_method()) {
+ // We may not yet have checked the actual type of the parameter value.
+ // Assuming that the value has the required type can lead to unsound
+ // optimizations. See dartbug.com/43464.
+ return CompileType::FromCid(kDynamicCid);
+ }
const AbstractType& local_type = local().type();
TraceStrongModeType(this, local_type);
return CompileType::FromAbstractType(local_type);
diff --git a/runtime/vm/compiler/frontend/scope_builder.cc b/runtime/vm/compiler/frontend/scope_builder.cc
index b91b32a..d633140 100644
--- a/runtime/vm/compiler/frontend/scope_builder.cc
+++ b/runtime/vm/compiler/frontend/scope_builder.cc
@@ -1601,6 +1601,9 @@
helper.IsCovariant() ||
(helper.IsGenericCovariantImpl() &&
(attrs.has_non_this_uses || attrs.has_tearoff_uses));
+ if (needs_covariant_check_in_method) {
+ variable->set_needs_covariant_check_in_method();
+ }
switch (type_check_mode) {
case kTypeCheckAllParameters:
diff --git a/runtime/vm/compiler/runtime_api.cc b/runtime/vm/compiler/runtime_api.cc
index 8a16400..0aecfbd 100644
--- a/runtime/vm/compiler/runtime_api.cc
+++ b/runtime/vm/compiler/runtime_api.cc
@@ -278,6 +278,10 @@
return target::Thread::OffsetFromThread(runtime_entry_);
}
+bool RuntimeEntry::is_leaf() const {
+ return runtime_entry_->is_leaf();
+}
+
namespace target {
const word kOldPageSize = dart::kOldPageSize;
diff --git a/runtime/vm/compiler/runtime_api.h b/runtime/vm/compiler/runtime_api.h
index 4d70b77..1a75739 100644
--- a/runtime/vm/compiler/runtime_api.h
+++ b/runtime/vm/compiler/runtime_api.h
@@ -233,6 +233,8 @@
word OffsetFromThread() const;
+ bool is_leaf() const;
+
protected:
RuntimeEntry(const dart::RuntimeEntry* runtime_entry,
RuntimeEntryCallInternal call)
diff --git a/runtime/vm/compiler/stub_code_compiler_arm64.cc b/runtime/vm/compiler/stub_code_compiler_arm64.cc
index 6ff81a8..6f29ad2 100644
--- a/runtime/vm/compiler/stub_code_compiler_arm64.cc
+++ b/runtime/vm/compiler/stub_code_compiler_arm64.cc
@@ -49,16 +49,12 @@
Label done;
__ tbnz(&done, R0, target::ObjectAlignment::kNewObjectBitPosition);
- if (preserve_registers) {
- __ EnterCallRuntimeFrame(0);
- } else {
- __ ReserveAlignedFrameSpace(0);
- }
- // [R0] already contains first argument.
- __ mov(R1, THR);
- __ CallRuntime(kEnsureRememberedAndMarkingDeferredRuntimeEntry, 2);
- if (preserve_registers) {
- __ LeaveCallRuntimeFrame();
+ {
+ Assembler::CallRuntimeScope scope(
+ assembler, kEnsureRememberedAndMarkingDeferredRuntimeEntry,
+ /*frame_size=*/0, /*preserve_registers=*/preserve_registers);
+ __ mov(R1, THR);
+ scope.Call(/*argument_count=*/2);
}
__ Bind(&done);
@@ -1973,16 +1969,13 @@
// Handle overflow: Call the runtime leaf function.
__ Bind(&overflow);
- // Setup frame, push callee-saved registers.
-
- __ Push(CODE_REG);
- __ ldr(CODE_REG, stub_code);
- __ EnterCallRuntimeFrame(0 * target::kWordSize);
- __ mov(R0, THR);
- __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1);
- // Restore callee-saved registers, tear down frame.
- __ LeaveCallRuntimeFrame();
- __ Pop(CODE_REG);
+ {
+ Assembler::CallRuntimeScope scope(assembler,
+ kStoreBufferBlockProcessRuntimeEntry,
+ /*frame_size=*/0, stub_code);
+ __ mov(R0, THR);
+ scope.Call(/*argument_count=*/1);
+ }
__ ret();
__ Bind(&add_to_mark_stack);
@@ -2020,13 +2013,13 @@
__ ret();
__ Bind(&marking_overflow);
- __ Push(CODE_REG);
- __ ldr(CODE_REG, stub_code);
- __ EnterCallRuntimeFrame(0 * target::kWordSize);
- __ mov(R0, THR);
- __ CallRuntime(kMarkingStackBlockProcessRuntimeEntry, 1);
- __ LeaveCallRuntimeFrame();
- __ Pop(CODE_REG);
+ {
+ Assembler::CallRuntimeScope scope(assembler,
+ kMarkingStackBlockProcessRuntimeEntry,
+ /*frame_size=*/0, stub_code);
+ __ mov(R0, THR);
+ scope.Call(/*argument_count=*/1);
+ }
__ ret();
__ Bind(&lost_race);
@@ -2059,16 +2052,13 @@
// Card table not yet allocated.
__ Bind(&remember_card_slow);
- __ Push(CODE_REG);
- __ PushPair(R0, R1);
- __ ldr(CODE_REG, stub_code);
- __ mov(R0, R1); // Arg0 = Object
- __ mov(R1, R25); // Arg1 = Slot
- __ EnterCallRuntimeFrame(0);
- __ CallRuntime(kRememberCardRuntimeEntry, 2);
- __ LeaveCallRuntimeFrame();
- __ PopPair(R0, R1);
- __ Pop(CODE_REG);
+ {
+ Assembler::CallRuntimeScope scope(assembler, kRememberCardRuntimeEntry,
+ /*frame_size=*/0, stub_code);
+ __ mov(R0, R1); // Arg0 = Object
+ __ mov(R1, R25); // Arg1 = Slot
+ scope.Call(/*argument_count=*/2);
+ }
__ ret();
}
}
diff --git a/runtime/vm/constants_arm64.h b/runtime/vm/constants_arm64.h
index 1c71baa..b5340bb 100644
--- a/runtime/vm/constants_arm64.h
+++ b/runtime/vm/constants_arm64.h
@@ -242,21 +242,22 @@
// See "Procedure Call Standard for the ARM 64-bit Architecture", document
// number "ARM IHI 0055B", May 22 2013.
+#define R(REG) (1 << REG)
+
// C++ ABI call registers.
-const RegList kAbiArgumentCpuRegs = (1 << R0) | (1 << R1) | (1 << R2) |
- (1 << R3) | (1 << R4) | (1 << R5) |
- (1 << R6) | (1 << R7);
+const RegList kAbiArgumentCpuRegs =
+ R(R0) | R(R1) | R(R2) | R(R3) | R(R4) | R(R5) | R(R6) | R(R7);
#if defined(TARGET_OS_FUCHSIA)
-const RegList kAbiPreservedCpuRegs =
- (1 << R18) | (1 << R19) | (1 << R20) | (1 << R21) | (1 << R22) |
- (1 << R23) | (1 << R24) | (1 << R25) | (1 << R26) | (1 << R27) | (1 << R28);
+const RegList kAbiPreservedCpuRegs = R(R18) | R(R19) | R(R20) | R(R21) |
+ R(R22) | R(R23) | R(R24) | R(R25) |
+ R(R26) | R(R27) | R(R28);
const Register kAbiFirstPreservedCpuReg = R18;
const Register kAbiLastPreservedCpuReg = R28;
const int kAbiPreservedCpuRegCount = 11;
#else
-const RegList kAbiPreservedCpuRegs =
- (1 << R19) | (1 << R20) | (1 << R21) | (1 << R22) | (1 << R23) |
- (1 << R24) | (1 << R25) | (1 << R26) | (1 << R27) | (1 << R28);
+const RegList kAbiPreservedCpuRegs = R(R19) | R(R20) | R(R21) | R(R22) |
+ R(R23) | R(R24) | R(R25) | R(R26) |
+ R(R27) | R(R28);
const Register kAbiFirstPreservedCpuReg = R19;
const Register kAbiLastPreservedCpuReg = R28;
const int kAbiPreservedCpuRegCount = 10;
@@ -265,11 +266,11 @@
const VRegister kAbiLastPreservedFpuReg = V15;
const int kAbiPreservedFpuRegCount = 8;
-const intptr_t kReservedCpuRegisters =
- (1 << SPREG) | // Dart SP
- (1 << FPREG) | (1 << TMP) | (1 << TMP2) | (1 << PP) | (1 << THR) |
- (1 << LR) | (1 << BARRIER_MASK) | (1 << NULL_REG) | (1 << R31) | // C++ SP
- (1 << R18) | (1 << DISPATCH_TABLE_REG);
+const intptr_t kReservedCpuRegisters = R(SPREG) | // Dart SP
+ R(FPREG) | R(TMP) | R(TMP2) | R(PP) |
+ R(THR) | R(LR) | R(BARRIER_MASK) |
+ R(NULL_REG) | R(R31) | // C++ SP
+ R(R18) | R(DISPATCH_TABLE_REG);
constexpr intptr_t kNumberOfReservedCpuRegisters = 12;
// CPU registers available to Dart allocator.
const RegList kDartAvailableCpuRegs =
@@ -284,9 +285,17 @@
const int kDartVolatileCpuRegCount = 15;
const int kDartVolatileFpuRegCount = 24;
-constexpr int kStoreBufferWrapperSize = 32;
+// Two callee save scratch registers used by leaf runtime call sequence.
+const Register kCallLeafRuntimeCalleeSaveScratch1 = R23;
+const Register kCallLeafRuntimeCalleeSaveScratch2 = R25;
+static_assert((R(kCallLeafRuntimeCalleeSaveScratch1) & kAbiPreservedCpuRegs) !=
+ 0,
+ "Need callee save scratch register for leaf runtime calls.");
+static_assert((R(kCallLeafRuntimeCalleeSaveScratch2) & kAbiPreservedCpuRegs) !=
+ 0,
+ "Need callee save scratch register for leaf runtime calls.");
-#define R(REG) (1 << REG)
+constexpr int kStoreBufferWrapperSize = 32;
class CallingConventions {
public:
diff --git a/runtime/vm/parser.cc b/runtime/vm/parser.cc
index d3e0e47..37e84a9 100644
--- a/runtime/vm/parser.cc
+++ b/runtime/vm/parser.cc
@@ -211,6 +211,9 @@
if (variable->is_explicit_covariant_parameter()) {
raw_parameter->set_is_explicit_covariant_parameter();
}
+ if (variable->needs_covariant_check_in_method()) {
+ raw_parameter->set_needs_covariant_check_in_method();
+ }
raw_parameter->set_type_check_mode(variable->type_check_mode());
if (function().HasOptionalParameters()) {
bool ok = scope->AddVariable(raw_parameter);
diff --git a/runtime/vm/runtime_entry_arm64.cc b/runtime/vm/runtime_entry_arm64.cc
index 129dae3..5fd14b5 100644
--- a/runtime/vm/runtime_entry_arm64.cc
+++ b/runtime/vm/runtime_entry_arm64.cc
@@ -57,12 +57,12 @@
// call.
// This sequence may occur in an intrinsic, so don't use registers an
// intrinsic must preserve.
- COMPILE_ASSERT(R23 != CODE_REG);
- COMPILE_ASSERT(R25 != CODE_REG);
- COMPILE_ASSERT(R23 != ARGS_DESC_REG);
- COMPILE_ASSERT(R25 != ARGS_DESC_REG);
- __ mov(R23, CSP);
- __ mov(R25, SP);
+ COMPILE_ASSERT(kCallLeafRuntimeCalleeSaveScratch1 != CODE_REG);
+ COMPILE_ASSERT(kCallLeafRuntimeCalleeSaveScratch2 != CODE_REG);
+ COMPILE_ASSERT(kCallLeafRuntimeCalleeSaveScratch1 != ARGS_DESC_REG);
+ COMPILE_ASSERT(kCallLeafRuntimeCalleeSaveScratch2 != ARGS_DESC_REG);
+ __ mov(kCallLeafRuntimeCalleeSaveScratch1, CSP);
+ __ mov(kCallLeafRuntimeCalleeSaveScratch2, SP);
__ ReserveAlignedFrameSpace(0);
__ mov(CSP, SP);
__ ldr(TMP,
@@ -71,8 +71,8 @@
__ blr(TMP);
__ LoadImmediate(TMP, VMTag::kDartCompiledTagId);
__ str(TMP, compiler::Address(THR, Thread::vm_tag_offset()));
- __ mov(SP, R25);
- __ mov(CSP, R23);
+ __ mov(SP, kCallLeafRuntimeCalleeSaveScratch2);
+ __ mov(CSP, kCallLeafRuntimeCalleeSaveScratch1);
ASSERT((kAbiPreservedCpuRegs & (1 << THR)) != 0);
ASSERT((kAbiPreservedCpuRegs & (1 << PP)) != 0);
} else {
diff --git a/runtime/vm/scopes.h b/runtime/vm/scopes.h
index a9761d7..b003324 100644
--- a/runtime/vm/scopes.h
+++ b/runtime/vm/scopes.h
@@ -91,7 +91,7 @@
is_invisible_(false),
is_captured_parameter_(false),
is_forced_stack_(false),
- is_explicit_covariant_parameter_(false),
+ covariance_mode_(kNotCovariant),
is_late_(false),
is_chained_future_(false),
expected_context_index_(-1),
@@ -147,10 +147,17 @@
}
bool is_explicit_covariant_parameter() const {
- return is_explicit_covariant_parameter_;
+ return covariance_mode_ == kExplicit;
}
- void set_is_explicit_covariant_parameter() {
- is_explicit_covariant_parameter_ = true;
+ void set_is_explicit_covariant_parameter() { covariance_mode_ = kExplicit; }
+
+ bool needs_covariant_check_in_method() const {
+ return covariance_mode_ != kNotCovariant;
+ }
+ void set_needs_covariant_check_in_method() {
+ if (covariance_mode_ == kNotCovariant) {
+ covariance_mode_ = kImplicit;
+ }
}
enum TypeCheckMode {
@@ -208,6 +215,12 @@
bool Equals(const LocalVariable& other) const;
private:
+ enum CovarianceMode {
+ kNotCovariant,
+ kImplicit,
+ kExplicit,
+ };
+
static const int kUninitializedIndex = INT_MIN;
const TokenPosition declaration_pos_;
@@ -228,7 +241,7 @@
bool is_invisible_;
bool is_captured_parameter_;
bool is_forced_stack_;
- bool is_explicit_covariant_parameter_;
+ CovarianceMode covariance_mode_;
bool is_late_;
bool is_chained_future_;
intptr_t expected_context_index_;
diff --git a/tools/VERSION b/tools/VERSION
index f0d3c56..d445c09 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -26,6 +26,6 @@
CHANNEL stable
MAJOR 2
MINOR 10
-PATCH 0
+PATCH 1
PRERELEASE 0
PRERELEASE_PATCH 0