Version 2.13.0-1.0.dev
Merge commit 'd423a3cd7aec14d6fcb4ff4f554cd3d62a857633' into 'dev'
diff --git a/runtime/vm/code_patcher.h b/runtime/vm/code_patcher.h
index 30828a1..9e1e3cb 100644
--- a/runtime/vm/code_patcher.h
+++ b/runtime/vm/code_patcher.h
@@ -86,8 +86,8 @@
const Code& target);
static ObjectPtr GetSwitchableCallDataAt(uword return_address,
const Code& caller_code);
- static CodePtr GetSwitchableCallTargetAt(uword return_address,
- const Code& caller_code);
+ static uword GetSwitchableCallTargetEntryAt(uword return_address,
+ const Code& caller_code);
static CodePtr GetNativeCallAt(uword return_address,
const Code& caller_code,
diff --git a/runtime/vm/code_patcher_arm.cc b/runtime/vm/code_patcher_arm.cc
index 89de68a..0947b58 100644
--- a/runtime/vm/code_patcher_arm.cc
+++ b/runtime/vm/code_patcher_arm.cc
@@ -108,15 +108,15 @@
}
}
-CodePtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
- const Code& caller_code) {
+uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
+ const Code& caller_code) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
BareSwitchableCallPattern call(return_address, caller_code);
- return call.target();
+ return call.target_entry();
} else {
SwitchableCallPattern call(return_address, caller_code);
- return call.target();
+ return call.target_entry();
}
}
diff --git a/runtime/vm/code_patcher_arm64.cc b/runtime/vm/code_patcher_arm64.cc
index a1d4c4d..b7590db 100644
--- a/runtime/vm/code_patcher_arm64.cc
+++ b/runtime/vm/code_patcher_arm64.cc
@@ -144,15 +144,15 @@
}
}
-CodePtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
- const Code& caller_code) {
+uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
+ const Code& caller_code) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
BareSwitchableCallPattern call(return_address, caller_code);
- return call.target();
+ return call.target_entry();
} else {
SwitchableCallPattern call(return_address, caller_code);
- return call.target();
+ return call.target_entry();
}
}
diff --git a/runtime/vm/code_patcher_ia32.cc b/runtime/vm/code_patcher_ia32.cc
index 3104115..08ce568 100644
--- a/runtime/vm/code_patcher_ia32.cc
+++ b/runtime/vm/code_patcher_ia32.cc
@@ -261,11 +261,11 @@
UNREACHABLE();
}
-CodePtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
- const Code& caller_code) {
+uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
+ const Code& caller_code) {
// Switchable instance calls only generated for precompilation.
UNREACHABLE();
- return Code::null();
+ return 0;
}
ObjectPtr CodePatcher::GetSwitchableCallDataAt(uword return_address,
diff --git a/runtime/vm/code_patcher_x64.cc b/runtime/vm/code_patcher_x64.cc
index c916789..1ba005f 100644
--- a/runtime/vm/code_patcher_x64.cc
+++ b/runtime/vm/code_patcher_x64.cc
@@ -321,8 +321,9 @@
// No need to flush the instruction cache, since the code is not modified.
}
- CodePtr target() const {
- return static_cast<CodePtr>(object_pool_.ObjectAt(target_index()));
+ uword target_entry() const {
+ return Code::Handle(Code::RawCast(object_pool_.ObjectAt(target_index())))
+ .MonomorphicEntryPoint();
}
};
@@ -395,18 +396,7 @@
object_pool_.SetRawValueAt(target_index(), target.MonomorphicEntryPoint());
}
- CodePtr target() const {
- const uword pc = object_pool_.RawValueAt(target_index());
- CodePtr result = ReversePc::Lookup(IsolateGroup::Current(), pc);
- if (result != Code::null()) {
- return result;
- }
- result = ReversePc::Lookup(Dart::vm_isolate_group(), pc);
- if (result != Code::null()) {
- return result;
- }
- UNREACHABLE();
- }
+ uword target_entry() const { return object_pool_.RawValueAt(target_index()); }
};
CodePtr CodePatcher::GetStaticCallTargetAt(uword return_address,
@@ -511,15 +501,15 @@
}
}
-CodePtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
- const Code& caller_code) {
+uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
+ const Code& caller_code) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
BareSwitchableCall call(return_address, caller_code);
- return call.target();
+ return call.target_entry();
} else {
SwitchableCall call(return_address, caller_code);
- return call.target();
+ return call.target_entry();
}
}
diff --git a/runtime/vm/compiler/backend/constant_propagator.cc b/runtime/vm/compiler/backend/constant_propagator.cc
index 427c0b9..df2b9df 100644
--- a/runtime/vm/compiler/backend/constant_propagator.cc
+++ b/runtime/vm/compiler/backend/constant_propagator.cc
@@ -854,8 +854,8 @@
if (!FLAG_fields_may_be_reset) {
const Field& field = instr->field();
ASSERT(field.is_static());
- if (field.is_final() && instr->IsFieldInitialized()) {
- Instance& obj = Instance::Handle(Z, field.StaticValue());
+ auto& obj = Instance::Handle(Z);
+ if (field.is_final() && instr->IsFieldInitialized(&obj)) {
if (obj.IsSmi() || (obj.IsOld() && obj.IsCanonical())) {
SetValue(instr, obj);
return;
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index f7cae12..b90b712 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -1135,10 +1135,32 @@
return field().ptr() == other->AsLoadStaticField()->field().ptr();
}
-bool LoadStaticFieldInstr::IsFieldInitialized() const {
+bool LoadStaticFieldInstr::IsFieldInitialized(Instance* field_value) const {
+ if (FLAG_fields_may_be_reset) {
+ return false;
+ }
+
+ // Since new isolates will be spawned, the JITed code cannot depend on whether
+ // global field was initialized when running with --enable-isolate-groups.
+ if (IsolateGroup::AreIsolateGroupsEnabled()) return false;
+
const Field& field = this->field();
- return (field.StaticValue() != Object::sentinel().ptr()) &&
- (field.StaticValue() != Object::transition_sentinel().ptr());
+ Isolate* only_isolate = IsolateGroup::Current()->FirstIsolate();
+ if (only_isolate == nullptr) {
+ // This can happen if background compiler executes this code but the mutator
+ // is being shutdown and the isolate was already unregistered from the group
+ // (and is trying to stop this BG compiler).
+ if (field_value != nullptr) {
+ *field_value = Object::sentinel().ptr();
+ }
+ return false;
+ }
+ if (field_value == nullptr) {
+ field_value = &Instance::Handle();
+ }
+ *field_value = only_isolate->field_table()->At(field.field_id());
+ return (field_value->ptr() != Object::sentinel().ptr()) &&
+ (field_value->ptr() != Object::transition_sentinel().ptr());
}
Definition* LoadStaticFieldInstr::Canonicalize(FlowGraph* flow_graph) {
@@ -1146,8 +1168,7 @@
// make it safe to omit code that checks if the field needs initialization
// because the field will be reset so it starts uninitialized in the process
// running the precompiled code. We must be prepared to reinitialize fields.
- if (calls_initializer() && !FLAG_fields_may_be_reset &&
- IsFieldInitialized()) {
+ if (calls_initializer() && IsFieldInitialized()) {
set_calls_initializer(false);
}
return this;
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 345f9b2..f27870d 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -5543,7 +5543,7 @@
virtual CompileType ComputeType() const;
const Field& field() const { return field_; }
- bool IsFieldInitialized() const;
+ bool IsFieldInitialized(Instance* field_value = nullptr) const;
bool calls_initializer() const { return calls_initializer_; }
void set_calls_initializer(bool value) { calls_initializer_ = value; }
diff --git a/runtime/vm/compiler/backend/type_propagator.cc b/runtime/vm/compiler/backend/type_propagator.cc
index 3ae21f6..5f224b06 100644
--- a/runtime/vm/compiler/backend/type_propagator.cc
+++ b/runtime/vm/compiler/backend/type_propagator.cc
@@ -1446,9 +1446,9 @@
AbstractType* abstract_type = &AbstractType::ZoneHandle(field.type());
TraceStrongModeType(this, *abstract_type);
ASSERT(field.is_static());
- const bool is_initialized = IsFieldInitialized() && !FLAG_fields_may_be_reset;
+ auto& obj = Instance::Handle();
+ const bool is_initialized = IsFieldInitialized(&obj);
if (field.is_final() && is_initialized) {
- const Instance& obj = Instance::Handle(field.StaticValue());
if (!obj.IsNull()) {
is_nullable = CompileType::kNonNullable;
cid = obj.GetClassId();
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index 9856b71..1bb096e 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -70,20 +70,6 @@
B->last_used_block_id_, prologue_info);
}
-void StreamingFlowGraphBuilder::EvaluateConstFieldValue(const Field& field) {
- ASSERT(field.is_const() && field.IsUninitialized());
-
- FieldHelper field_helper(this);
- field_helper.ReadUntilExcluding(FieldHelper::kInitializer);
- Tag initializer_tag = ReadTag(); // read first part of initializer.
-
- ASSERT(initializer_tag == kSomething);
-
- Instance& value =
- Instance::Handle(Z, constant_reader_.ReadConstantExpression());
- field.SetStaticValue(value);
-}
-
void StreamingFlowGraphBuilder::SetupDefaultParameterValues() {
intptr_t optional_parameter_count =
parsed_function()->function().NumOptionalParameters();
@@ -1024,10 +1010,6 @@
case UntaggedFunction::kImplicitGetter:
case UntaggedFunction::kImplicitStaticGetter:
case UntaggedFunction::kImplicitSetter: {
- const Field& field = Field::Handle(Z, function.accessor_field());
- if (field.is_const() && field.IsUninitialized()) {
- EvaluateConstFieldValue(field);
- }
return B->BuildGraphOfFieldAccessor(function);
}
case UntaggedFunction::kFieldInitializer:
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
index f469fc4..7f133f7 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
@@ -59,7 +59,6 @@
void ParseKernelASTFunction();
void ReadForwardingStubTarget(const Function& function);
- void EvaluateConstFieldValue(const Field& field);
void SetupDefaultParameterValues();
FlowGraph* BuildGraphOfFieldInitializer();
diff --git a/runtime/vm/instructions_arm.cc b/runtime/vm/instructions_arm.cc
index 16bb992..c2b7014 100644
--- a/runtime/vm/instructions_arm.cc
+++ b/runtime/vm/instructions_arm.cc
@@ -292,9 +292,11 @@
ASSERT(reg == CODE_REG);
}
-CodePtr SwitchableCallPattern::target() const {
- return static_cast<CodePtr>(object_pool_.ObjectAt(target_pool_index_));
+uword SwitchableCallPattern::target_entry() const {
+ return Code::Handle(Code::RawCast(object_pool_.ObjectAt(target_pool_index_)))
+ .MonomorphicEntryPoint();
}
+
void SwitchableCallPattern::SetTarget(const Code& target) const {
ASSERT(Object::Handle(object_pool_.ObjectAt(target_pool_index_)).IsCode());
object_pool_.SetObjectAt(target_pool_index_, target);
@@ -316,17 +318,8 @@
ASSERT(reg == LINK_REGISTER);
}
-CodePtr BareSwitchableCallPattern::target() const {
- const uword pc = object_pool_.RawValueAt(target_pool_index_);
- CodePtr result = ReversePc::Lookup(IsolateGroup::Current(), pc);
- if (result != Code::null()) {
- return result;
- }
- result = ReversePc::Lookup(Dart::vm_isolate_group(), pc);
- if (result != Code::null()) {
- return result;
- }
- UNREACHABLE();
+uword BareSwitchableCallPattern::target_entry() const {
+ return object_pool_.RawValueAt(target_pool_index_);
}
void BareSwitchableCallPattern::SetTarget(const Code& target) const {
diff --git a/runtime/vm/instructions_arm.h b/runtime/vm/instructions_arm.h
index 5fe9ab3..7b7b43b 100644
--- a/runtime/vm/instructions_arm.h
+++ b/runtime/vm/instructions_arm.h
@@ -153,7 +153,7 @@
public:
SwitchableCallPattern(uword pc, const Code& code);
- CodePtr target() const;
+ uword target_entry() const;
void SetTarget(const Code& target) const;
private:
@@ -168,7 +168,7 @@
public:
BareSwitchableCallPattern(uword pc, const Code& code);
- CodePtr target() const;
+ uword target_entry() const;
void SetTarget(const Code& target) const;
private:
diff --git a/runtime/vm/instructions_arm64.cc b/runtime/vm/instructions_arm64.cc
index e7a0d24..a38a07e 100644
--- a/runtime/vm/instructions_arm64.cc
+++ b/runtime/vm/instructions_arm64.cc
@@ -428,8 +428,9 @@
target_pool_index_ = pool_index + 1;
}
-CodePtr SwitchableCallPattern::target() const {
- return static_cast<CodePtr>(object_pool_.ObjectAt(target_pool_index_));
+uword SwitchableCallPattern::target_entry() const {
+ return Code::Handle(Code::RawCast(object_pool_.ObjectAt(target_pool_index_)))
+ .MonomorphicEntryPoint();
}
void SwitchableCallPattern::SetTarget(const Code& target) const {
@@ -454,17 +455,8 @@
target_pool_index_ = pool_index + 1;
}
-CodePtr BareSwitchableCallPattern::target() const {
- const uword pc = object_pool_.RawValueAt(target_pool_index_);
- CodePtr result = ReversePc::Lookup(IsolateGroup::Current(), pc);
- if (result != Code::null()) {
- return result;
- }
- result = ReversePc::Lookup(Dart::vm_isolate_group(), pc);
- if (result != Code::null()) {
- return result;
- }
- UNREACHABLE();
+uword BareSwitchableCallPattern::target_entry() const {
+ return object_pool_.RawValueAt(target_pool_index_);
}
void BareSwitchableCallPattern::SetTarget(const Code& target) const {
diff --git a/runtime/vm/instructions_arm64.h b/runtime/vm/instructions_arm64.h
index 4a8f7e2..ae75539 100644
--- a/runtime/vm/instructions_arm64.h
+++ b/runtime/vm/instructions_arm64.h
@@ -163,7 +163,7 @@
public:
SwitchableCallPattern(uword pc, const Code& code);
- CodePtr target() const;
+ uword target_entry() const;
void SetTarget(const Code& target) const;
private:
@@ -178,7 +178,7 @@
public:
BareSwitchableCallPattern(uword pc, const Code& code);
- CodePtr target() const;
+ uword target_entry() const;
void SetTarget(const Code& target) const;
private:
diff --git a/runtime/vm/runtime_entry.cc b/runtime/vm/runtime_entry.cc
index 2a7cbd5..2c31114 100644
--- a/runtime/vm/runtime_entry.cc
+++ b/runtime/vm/runtime_entry.cc
@@ -1520,11 +1520,12 @@
private:
FunctionPtr ResolveTargetFunction(const Object& data);
- void HandleMiss(const Object& old_data,
- const Code& old_target,
- const Function& target_function);
#if defined(DART_PRECOMPILED_RUNTIME)
+ void HandleMissAOT(const Object& old_data,
+ uword old_entry,
+ const Function& target_function);
+
void DoUnlinkedCallAOT(const UnlinkedCall& unlinked,
const Function& target_function);
void DoMonomorphicMissAOT(const Object& data,
@@ -1538,6 +1539,10 @@
intptr_t* lower,
intptr_t* upper);
#else
+ void HandleMissJIT(const Object& old_data,
+ const Code& old_target,
+ const Function& target_function);
+
void DoMonomorphicMissJIT(const Object& data,
const Function& target_function);
void DoICDataMissJIT(const ICData& data,
@@ -2086,7 +2091,6 @@
const auto& target_function =
Function::Handle(zone_, ResolveTargetFunction(old_data));
- auto& code = Code::Handle(zone_);
auto& data = Object::Handle(zone_);
// We ensure any transition in a patchable calls are done in an atomic
@@ -2100,9 +2104,12 @@
#if defined(DART_PRECOMPILED_RUNTIME)
data =
CodePatcher::GetSwitchableCallDataAt(caller_frame_->pc(), caller_code_);
- DEBUG_ONLY(code = CodePatcher::GetSwitchableCallTargetAt(caller_frame_->pc(),
- caller_code_));
+ uword target_entry = 0;
+ DEBUG_ONLY(target_entry = CodePatcher::GetSwitchableCallTargetEntryAt(
+ caller_frame_->pc(), caller_code_));
+ HandleMissAOT(data, target_entry, target_function);
#else
+ auto& code = Code::Handle(zone_);
if (should_consider_patching()) {
code ^= CodePatcher::GetInstanceCallAt(caller_frame_->pc(), caller_code_,
&data);
@@ -2110,34 +2117,52 @@
ASSERT(old_data.IsICData() || old_data.IsMegamorphicCache());
data = old_data.ptr();
}
+ HandleMissJIT(data, code, target_function);
#endif
- HandleMiss(data, code, target_function);
}
-void PatchableCallHandler::HandleMiss(const Object& old_data,
- const Code& old_code,
- const Function& target_function) {
- switch (old_data.GetClassId()) {
#if defined(DART_PRECOMPILED_RUNTIME)
+
+void PatchableCallHandler::HandleMissAOT(const Object& old_data,
+ uword old_entry,
+ const Function& target_function) {
+ switch (old_data.GetClassId()) {
case kUnlinkedCallCid:
- ASSERT(old_code.ptr() == StubCode::SwitchableCallMiss().ptr());
+ ASSERT(old_entry ==
+ StubCode::SwitchableCallMiss().MonomorphicEntryPoint());
DoUnlinkedCallAOT(UnlinkedCall::Cast(old_data), target_function);
break;
case kMonomorphicSmiableCallCid:
- ASSERT(old_code.ptr() == StubCode::MonomorphicSmiableCheck().ptr());
+ ASSERT(old_entry ==
+ StubCode::MonomorphicSmiableCheck().MonomorphicEntryPoint());
FALL_THROUGH;
case kSmiCid:
DoMonomorphicMissAOT(old_data, target_function);
break;
case kSingleTargetCacheCid:
- ASSERT(old_code.ptr() == StubCode::SingleTargetCall().ptr());
+ ASSERT(old_entry == StubCode::SingleTargetCall().MonomorphicEntryPoint());
DoSingleTargetMissAOT(SingleTargetCache::Cast(old_data), target_function);
break;
case kICDataCid:
- ASSERT(old_code.ptr() == StubCode::ICCallThroughCode().ptr());
+ ASSERT(old_entry ==
+ StubCode::ICCallThroughCode().MonomorphicEntryPoint());
DoICDataMissAOT(ICData::Cast(old_data), target_function);
break;
+ case kMegamorphicCacheCid:
+ ASSERT(old_entry == StubCode::MegamorphicCall().MonomorphicEntryPoint());
+ DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
+ break;
+ default:
+ UNREACHABLE();
+ }
+}
+
#else
+
+void PatchableCallHandler::HandleMissJIT(const Object& old_data,
+ const Code& old_code,
+ const Function& target_function) {
+ switch (old_data.GetClassId()) {
case kArrayCid:
// ICData three-element array: Smi(receiver CID), Smi(count),
// Function(target). It is the Array from ICData::entries_.
@@ -2146,7 +2171,6 @@
case kICDataCid:
DoICDataMissJIT(ICData::Cast(old_data), old_code, target_function);
break;
-#endif // defined(DART_PRECOMPILED_RUNTIME)
case kMegamorphicCacheCid:
ASSERT(old_code.ptr() == StubCode::MegamorphicCall().ptr() ||
(old_code.IsNull() && !should_consider_patching()));
@@ -2156,6 +2180,7 @@
UNREACHABLE();
}
}
+#endif // defined(DART_PRECOMPILED_RUNTIME)
static void InlineCacheMissHandler(Thread* thread,
Zone* zone,
diff --git a/tools/VERSION b/tools/VERSION
index 8ef6d63..79ef3203 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
MAJOR 2
MINOR 13
PATCH 0
-PRERELEASE 0
+PRERELEASE 1
PRERELEASE_PATCH 0
\ No newline at end of file