[vm, compiler] Unoptimized megamorphic calls.
When an instance call in unoptimized code creates more than FLAG_max_polymorphic_checks cases, switch the call to use a MegamorphicCache instead of ICData. The prevents unbounded collection of type feedback, and gives improvements on microbenchmarks in the 3-8% range for unoptimized code.
It also leads to a loss of target frequency information for the optimizer, leading to different ordering for range checks in polymorphic inlining. This leads to changes on megamorphic microbenchmarks from -31% to +60%, weighted toward the negative end.
In practice the frequency information seems unimportant, as dart2js has 4.01% geomean improvement.
This is a step toward direct monomorphic calls in unoptimized code, which will also make use of the patching and type feedback extraction added here.
Bug: https://github.com/dart-lang/sdk/issues/26780
Bug: https://github.com/dart-lang/sdk/issues/36409
Bug: https://github.com/dart-lang/sdk/issues/36731
Change-Id: I29f53f23b6794c5f5f0db8b8184788cee16fd9c5
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/99270
Reviewed-by: Alexander Markov <alexmarkov@google.com>
diff --git a/runtime/vm/code_patcher.h b/runtime/vm/code_patcher.h
index 239a69d..ceacea6 100644
--- a/runtime/vm/code_patcher.h
+++ b/runtime/vm/code_patcher.h
@@ -49,12 +49,18 @@
// in given code.
static RawCode* GetStaticCallTargetAt(uword return_address, const Code& code);
- // Get instance call information. Returns the call target and sets each
- // of the output parameters ic_data and arguments_descriptor if they are
- // non-NULL.
+ // Get instance call information. Returns the call target and sets the output
+ // parameter data if non-NULL.
static RawCode* GetInstanceCallAt(uword return_address,
- const Code& code,
- ICData* ic_data);
+ const Code& caller_code,
+ Object* data);
+
+ // Change the state of an instance call by patching the corresponding object
+ // pool entries (non-IA32) or instructions (IA32).
+ static void PatchInstanceCallAt(uword return_address,
+ const Code& caller_code,
+ const Object& data,
+ const Code& target);
// Return target of an unoptimized static call and its ICData object
// (calls target via a stub).
@@ -79,22 +85,22 @@
#if defined(TARGET_ARCH_DBC)
static NativeFunctionWrapper GetNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction* target);
#else
static RawCode* GetNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction* target);
#endif
#if defined(TARGET_ARCH_DBC)
static void PatchNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction target,
NativeFunctionWrapper trampoline);
#else
static void PatchNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction target,
const Code& trampoline);
#endif
diff --git a/runtime/vm/code_patcher_arm.cc b/runtime/vm/code_patcher_arm.cc
index 362b8b0..024a6ab 100644
--- a/runtime/vm/code_patcher_arm.cc
+++ b/runtime/vm/code_patcher_arm.cc
@@ -33,23 +33,33 @@
}
RawCode* CodePatcher::GetInstanceCallAt(uword return_address,
- const Code& code,
- ICData* ic_data) {
- ASSERT(code.ContainsInstructionAt(return_address));
- CallPattern call(return_address, code);
- if (ic_data != NULL) {
- *ic_data = call.IcData();
+ const Code& caller_code,
+ Object* data) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ ICCallPattern call(return_address, caller_code);
+ if (data != NULL) {
+ *data = call.Data();
}
return call.TargetCode();
}
+void CodePatcher::PatchInstanceCallAt(uword return_address,
+ const Code& caller_code,
+ const Object& data,
+ const Code& target) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ ICCallPattern call(return_address, caller_code);
+ call.SetData(data);
+ call.SetTargetCode(target);
+}
+
RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
ICData* ic_data_result) {
- ASSERT(code.ContainsInstructionAt(return_address));
- CallPattern static_call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ ICCallPattern static_call(return_address, caller_code);
ICData& ic_data = ICData::Handle();
- ic_data = static_call.IcData();
+ ic_data ^= static_call.Data();
if (ic_data_result != NULL) {
*ic_data_result = ic_data.raw();
}
diff --git a/runtime/vm/code_patcher_arm64.cc b/runtime/vm/code_patcher_arm64.cc
index 7716090..ce3c4dc 100644
--- a/runtime/vm/code_patcher_arm64.cc
+++ b/runtime/vm/code_patcher_arm64.cc
@@ -68,23 +68,33 @@
}
RawCode* CodePatcher::GetInstanceCallAt(uword return_address,
- const Code& code,
- ICData* ic_data) {
- ASSERT(code.ContainsInstructionAt(return_address));
- CallPattern call(return_address, code);
- if (ic_data != NULL) {
- *ic_data = call.IcData();
+ const Code& caller_code,
+ Object* data) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ ICCallPattern call(return_address, caller_code);
+ if (data != NULL) {
+ *data = call.Data();
}
return call.TargetCode();
}
+void CodePatcher::PatchInstanceCallAt(uword return_address,
+ const Code& caller_code,
+ const Object& data,
+ const Code& target) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ ICCallPattern call(return_address, caller_code);
+ call.SetData(data);
+ call.SetTargetCode(target);
+}
+
RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
const Code& code,
ICData* ic_data_result) {
ASSERT(code.ContainsInstructionAt(return_address));
- CallPattern static_call(return_address, code);
+ ICCallPattern static_call(return_address, code);
ICData& ic_data = ICData::Handle();
- ic_data ^= static_call.IcData();
+ ic_data ^= static_call.Data();
if (ic_data_result != NULL) {
*ic_data_result = ic_data.raw();
}
@@ -132,20 +142,20 @@
}
void CodePatcher::PatchNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction target,
const Code& trampoline) {
- ASSERT(code.ContainsInstructionAt(return_address));
- NativeCallPattern call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ NativeCallPattern call(return_address, caller_code);
call.set_target(trampoline);
call.set_native_function(target);
}
RawCode* CodePatcher::GetNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction* target) {
- ASSERT(code.ContainsInstructionAt(return_address));
- NativeCallPattern call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ NativeCallPattern call(return_address, caller_code);
*target = call.native_function();
return call.target();
}
diff --git a/runtime/vm/code_patcher_dbc.cc b/runtime/vm/code_patcher_dbc.cc
index 9c478ff..93bc87a 100644
--- a/runtime/vm/code_patcher_dbc.cc
+++ b/runtime/vm/code_patcher_dbc.cc
@@ -33,23 +33,33 @@
}
RawCode* CodePatcher::GetInstanceCallAt(uword return_address,
- const Code& code,
- ICData* ic_data) {
- ASSERT(code.ContainsInstructionAt(return_address));
- CallPattern call(return_address, code);
- if (ic_data != NULL) {
- *ic_data = call.IcData();
+ const Code& caller_code,
+ Object* cache) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ CallPattern call(return_address, caller_code);
+ if (cache != NULL) {
+ *cache = call.Data();
}
return call.TargetCode();
}
+void CodePatcher::PatchInstanceCallAt(uword return_address,
+ const Code& caller_code,
+ const Object& data,
+ const Code& target) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ CallPattern call(return_address, caller_code);
+ call.SetData(data);
+ call.SetTargetCode(target);
+}
+
RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
ICData* ic_data_result) {
- ASSERT(code.ContainsInstructionAt(return_address));
- CallPattern static_call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ CallPattern static_call(return_address, caller_code);
ICData& ic_data = ICData::Handle();
- ic_data ^= static_call.IcData();
+ ic_data ^= static_call.Data();
if (ic_data_result != NULL) {
*ic_data_result = ic_data.raw();
}
@@ -91,10 +101,10 @@
}
NativeFunctionWrapper CodePatcher::GetNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction* target) {
- ASSERT(code.ContainsInstructionAt(return_address));
- NativeCallPattern call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ NativeCallPattern call(return_address, caller_code);
*target = call.native_function();
return call.target();
}
diff --git a/runtime/vm/code_patcher_ia32.cc b/runtime/vm/code_patcher_ia32.cc
index ccc8c07..bcedb83 100644
--- a/runtime/vm/code_patcher_ia32.cc
+++ b/runtime/vm/code_patcher_ia32.cc
@@ -77,12 +77,31 @@
explicit InstanceCall(uword return_address)
: UnoptimizedCall(return_address) {
#if defined(DEBUG)
- ICData& test_ic_data = ICData::Handle();
- test_ic_data ^= ic_data();
- ASSERT(test_ic_data.NumArgsTested() > 0);
+ Object& test_data = Object::Handle(data());
+ ASSERT(test_data.IsICData() || test_data.IsMegamorphicCache());
+ if (test_data.IsICData()) {
+ ASSERT(ICData::Cast(test_data).NumArgsTested() > 0);
+ }
#endif // DEBUG
}
+ RawObject* data() const { return *reinterpret_cast<RawObject**>(start_ + 1); }
+ void set_data(const Object& data) const {
+ uword* cache_addr = reinterpret_cast<uword*>(start_ + 1);
+ uword imm = reinterpret_cast<uword>(data.raw());
+ *cache_addr = imm;
+ }
+
+ RawCode* target() const {
+ const uword imm = *reinterpret_cast<uword*>(start_ + 6);
+ return reinterpret_cast<RawCode*>(imm);
+ }
+ void set_target(const Code& target) const {
+ uword* target_addr = reinterpret_cast<uword*>(start_ + 6);
+ uword imm = reinterpret_cast<uword>(target.raw());
+ *target_addr = imm;
+ }
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(InstanceCall);
};
@@ -168,20 +187,32 @@
}
RawCode* CodePatcher::GetInstanceCallAt(uword return_address,
- const Code& code,
- ICData* ic_data) {
- ASSERT(code.ContainsInstructionAt(return_address));
+ const Code& caller_code,
+ Object* data) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
InstanceCall call(return_address);
- if (ic_data != NULL) {
- *ic_data ^= call.ic_data();
+ if (data != NULL) {
+ *data = call.data();
}
return Code::null();
}
+void CodePatcher::PatchInstanceCallAt(uword return_address,
+ const Code& caller_code,
+ const Object& data,
+ const Code& target) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ const Instructions& instrs = Instructions::Handle(caller_code.instructions());
+ WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
+ InstanceCall call(return_address);
+ call.set_data(data);
+ call.set_target(target);
+}
+
RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
ICData* ic_data_result) {
- ASSERT(code.ContainsInstructionAt(return_address));
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
UnoptimizedStaticCall static_call(return_address);
ICData& ic_data = ICData::Handle();
ic_data ^= static_call.ic_data();
@@ -214,14 +245,14 @@
}
void CodePatcher::PatchNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction target,
const Code& trampoline) {
UNREACHABLE();
}
RawCode* CodePatcher::GetNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction* target) {
UNREACHABLE();
return NULL;
diff --git a/runtime/vm/code_patcher_x64.cc b/runtime/vm/code_patcher_x64.cc
index f53cf2a..c17c742 100644
--- a/runtime/vm/code_patcher_x64.cc
+++ b/runtime/vm/code_patcher_x64.cc
@@ -77,8 +77,6 @@
intptr_t argument_index() const { return argument_index_; }
- RawObject* ic_data() const { return object_pool_.ObjectAt(argument_index()); }
-
RawCode* target() const {
Code& code = Code::Handle();
code ^= object_pool_.ObjectAt(code_index_);
@@ -123,20 +121,28 @@
InstanceCall(uword return_address, const Code& code)
: UnoptimizedCall(return_address, code) {
#if defined(DEBUG)
- ICData& test_ic_data = ICData::Handle();
- test_ic_data ^= ic_data();
- ASSERT(test_ic_data.NumArgsTested() > 0);
+ Object& test_cache = Object::Handle(data());
+ ASSERT(test_cache.IsICData() || test_cache.IsMegamorphicCache());
+ if (test_cache.IsICData()) {
+ ASSERT(ICData::Cast(test_cache).NumArgsTested() > 0);
+ }
#endif // DEBUG
}
+ RawObject* data() const { return object_pool_.ObjectAt(argument_index()); }
+ void set_data(const Object& data) const {
+ ASSERT(data.IsICData() || data.IsMegamorphicCache());
+ object_pool_.SetObjectAt(argument_index(), data);
+ }
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(InstanceCall);
};
class UnoptimizedStaticCall : public UnoptimizedCall {
public:
- UnoptimizedStaticCall(uword return_address, const Code& code)
- : UnoptimizedCall(return_address, code) {
+ UnoptimizedStaticCall(uword return_address, const Code& caller_code)
+ : UnoptimizedCall(return_address, caller_code) {
#if defined(DEBUG)
ICData& test_ic_data = ICData::Handle();
test_ic_data ^= ic_data();
@@ -144,6 +150,8 @@
#endif // DEBUG
}
+ RawObject* ic_data() const { return object_pool_.ObjectAt(argument_index()); }
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall);
};
@@ -152,8 +160,8 @@
// the object pool.
class PoolPointerCall : public ValueObject {
public:
- explicit PoolPointerCall(uword return_address, const Code& code)
- : object_pool_(ObjectPool::Handle(code.GetObjectPool())),
+ explicit PoolPointerCall(uword return_address, const Code& caller_code)
+ : object_pool_(ObjectPool::Handle(caller_code.GetObjectPool())),
code_index_(-1) {
uword pc = return_address;
@@ -424,25 +432,35 @@
}
RawCode* CodePatcher::GetInstanceCallAt(uword return_address,
- const Code& code,
- ICData* ic_data) {
- ASSERT(code.ContainsInstructionAt(return_address));
- InstanceCall call(return_address, code);
- if (ic_data != NULL) {
- *ic_data ^= call.ic_data();
+ const Code& caller_code,
+ Object* data) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ InstanceCall call(return_address, caller_code);
+ if (data != NULL) {
+ *data = call.data();
}
return call.target();
}
+void CodePatcher::PatchInstanceCallAt(uword return_address,
+ const Code& caller_code,
+ const Object& data,
+ const Code& target) {
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ InstanceCall call(return_address, caller_code);
+ call.set_data(data);
+ call.set_target(target);
+}
+
void CodePatcher::InsertDeoptimizationCallAt(uword start) {
UNREACHABLE();
}
RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
ICData* ic_data_result) {
- ASSERT(code.ContainsInstructionAt(return_address));
- UnoptimizedStaticCall static_call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ UnoptimizedStaticCall static_call(return_address, caller_code);
ICData& ic_data = ICData::Handle();
ic_data ^= static_call.ic_data();
if (ic_data_result != NULL) {
@@ -492,20 +510,20 @@
}
void CodePatcher::PatchNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction target,
const Code& trampoline) {
- ASSERT(code.ContainsInstructionAt(return_address));
- NativeCall call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ NativeCall call(return_address, caller_code);
call.set_target(trampoline);
call.set_native_function(target);
}
RawCode* CodePatcher::GetNativeCallAt(uword return_address,
- const Code& code,
+ const Code& caller_code,
NativeFunction* target) {
- ASSERT(code.ContainsInstructionAt(return_address));
- NativeCall call(return_address, code);
+ ASSERT(caller_code.ContainsInstructionAt(return_address));
+ NativeCall call(return_address, caller_code);
*target = call.native_function();
return call.target();
}
diff --git a/runtime/vm/compiler/aot/aot_call_specializer.cc b/runtime/vm/compiler/aot/aot_call_specializer.cc
index 895d8a7..fab9d76 100644
--- a/runtime/vm/compiler/aot/aot_call_specializer.cc
+++ b/runtime/vm/compiler/aot/aot_call_specializer.cc
@@ -161,7 +161,8 @@
Function::Handle(Z, call->ResolveForReceiverClass(cls));
ASSERT(!function.IsNull());
const Function& target = Function::ZoneHandle(Z, function.raw());
- StaticCallInstr* static_call = StaticCallInstr::FromCall(Z, call, target);
+ StaticCallInstr* static_call =
+ StaticCallInstr::FromCall(Z, call, target, call->CallCount());
static_call->SetResultType(Z, CompileType::FromCid(kTypeCid));
call->ReplaceWith(static_call, current_iterator());
return true;
@@ -849,7 +850,8 @@
CallTargets* targets = CallTargets::Create(Z, unary_checks);
ASSERT(targets->HasSingleTarget());
const Function& target = targets->FirstTarget();
- StaticCallInstr* call = StaticCallInstr::FromCall(Z, instr, target);
+ StaticCallInstr* call = StaticCallInstr::FromCall(
+ Z, instr, target, targets->AggregateCallCount());
instr->ReplaceWith(call, current_iterator());
return;
}
@@ -911,7 +913,8 @@
Function::Handle(Z, instr->ResolveForReceiverClass(receiver_class));
if (!function.IsNull()) {
const Function& target = Function::ZoneHandle(Z, function.raw());
- StaticCallInstr* call = StaticCallInstr::FromCall(Z, instr, target);
+ StaticCallInstr* call =
+ StaticCallInstr::FromCall(Z, instr, target, instr->CallCount());
instr->ReplaceWith(call, current_iterator());
return;
}
@@ -1024,7 +1027,8 @@
// We have computed that there is only a single target for this call
// within the whole hierarchy. Replace InstanceCall with StaticCall.
const Function& target = Function::ZoneHandle(Z, single_target.raw());
- StaticCallInstr* call = StaticCallInstr::FromCall(Z, instr, target);
+ StaticCallInstr* call =
+ StaticCallInstr::FromCall(Z, instr, target, instr->CallCount());
instr->ReplaceWith(call, current_iterator());
return;
} else if ((ic_data.raw() != ICData::null()) &&
@@ -1181,7 +1185,8 @@
Z, call->instance_call()->ResolveForReceiverClass(receiver_class));
if (!function.IsNull()) {
// Only one target. Replace by static call.
- StaticCallInstr* new_call = StaticCallInstr::FromCall(Z, call, function);
+ StaticCallInstr* new_call =
+ StaticCallInstr::FromCall(Z, call, function, call->CallCount());
call->ReplaceWith(new_call, current_iterator());
}
}
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index c2aeb5e..df45f89 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -702,6 +702,41 @@
cid_ranges_.Add(new (zone) CidRange(id, id));
}
}
+
+ if (ic_data.is_megamorphic()) {
+ const MegamorphicCache& cache =
+ MegamorphicCache::Handle(zone, ic_data.AsMegamorphicCache());
+ const Array& buckets = Array::Handle(zone, cache.buckets());
+ for (intptr_t i = 0; i < buckets.Length();
+ i += MegamorphicCache::kEntryLength) {
+ intptr_t id = Smi::Value(
+ Smi::RawCast(buckets.At(i + MegamorphicCache::kClassIdIndex)));
+ if (id == kIllegalCid) {
+ continue;
+ }
+ if (include_targets) {
+ Function& function = Function::ZoneHandle(zone);
+ function ^= buckets.At(i + MegamorphicCache::kTargetFunctionIndex);
+ intptr_t count = function.usage_counter();
+ if (count < 0) {
+ if (function.HasCode()) {
+ // 'function' is queued for optimized compilation
+ count = FLAG_optimization_counter_threshold;
+ } else {
+ // 'function' is queued for unoptimized compilation
+ count = FLAG_compilation_counter_threshold;
+ }
+ } else if (Code::Handle(zone, function.CurrentCode()).is_optimized()) {
+ // 'function' was optimized and stopped counting
+ count = FLAG_optimization_counter_threshold;
+ }
+ cid_ranges_.Add(new (zone) TargetInfo(
+ id, id, &function, count, StaticTypeExactnessState::NotTracking()));
+ } else {
+ cid_ranges_.Add(new (zone) CidRange(id, id));
+ }
+ }
+ }
}
bool Cids::IsMonomorphic() const {
@@ -3746,6 +3781,14 @@
Sort(OrderByFrequency);
}
+void CallTargets::Print() const {
+ for (intptr_t i = 0; i < length(); i++) {
+ OS::PrintErr("cid = [%" Pd ", %" Pd "], count = %" Pd ", target = %s\n",
+ TargetAt(i)->cid_start, TargetAt(i)->cid_end,
+ TargetAt(i)->count, TargetAt(i)->target->ToQualifiedCString());
+ }
+}
+
// Shared code generation methods (EmitNativeCode and
// MakeLocationSummary). Only assembly code that can be shared across all
// architectures can be used. Machine specific register allocation and code
@@ -4410,8 +4453,8 @@
ASSERT(new_target->HasSingleTarget());
const Function& target = new_target->FirstTarget();
- StaticCallInstr* specialized =
- StaticCallInstr::FromCall(flow_graph->zone(), this, target);
+ StaticCallInstr* specialized = StaticCallInstr::FromCall(
+ flow_graph->zone(), this, target, new_target->AggregateCallCount());
flow_graph->InsertBefore(this, specialized, env(), FlowGraph::kValue);
return specialized;
}
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 3e91b50..cd63743 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -615,6 +615,8 @@
const Function& FirstTarget() const;
const Function& MostPopularTarget() const;
+ void Print() const;
+
private:
void MergeIntoRanges();
};
@@ -3836,7 +3838,8 @@
template <class C>
static StaticCallInstr* FromCall(Zone* zone,
const C* call,
- const Function& target) {
+ const Function& target,
+ intptr_t call_count) {
PushArgumentsArray* args =
new (zone) PushArgumentsArray(call->ArgumentCount());
for (intptr_t i = 0; i < call->ArgumentCount(); i++) {
@@ -3845,7 +3848,7 @@
StaticCallInstr* new_call = new (zone)
StaticCallInstr(call->token_pos(), target, call->type_args_len(),
call->argument_names(), args, call->deopt_id(),
- call->CallCount(), ICData::kNoRebind);
+ call_count, ICData::kNoRebind);
if (call->result_type() != NULL) {
new_call->result_type_ = call->result_type();
}
diff --git a/runtime/vm/compiler/call_specializer.cc b/runtime/vm/compiler/call_specializer.cc
index 5f12916..1338e99 100644
--- a/runtime/vm/compiler/call_specializer.cc
+++ b/runtime/vm/compiler/call_specializer.cc
@@ -314,7 +314,8 @@
ASSERT(targets->HasSingleTarget());
const Function& target = targets->FirstTarget();
- StaticCallInstr* specialized = StaticCallInstr::FromCall(Z, call, target);
+ StaticCallInstr* specialized =
+ StaticCallInstr::FromCall(Z, call, target, targets->AggregateCallCount());
call->ReplaceWith(specialized, current_iterator());
}
diff --git a/runtime/vm/compiler/jit/jit_call_specializer.cc b/runtime/vm/compiler/jit/jit_call_specializer.cc
index f073878..189ef8b 100644
--- a/runtime/vm/compiler/jit/jit_call_specializer.cc
+++ b/runtime/vm/compiler/jit/jit_call_specializer.cc
@@ -52,8 +52,10 @@
void JitCallSpecializer::ReplaceWithStaticCall(InstanceCallInstr* instr,
const ICData& unary_checks,
- const Function& target) {
- StaticCallInstr* call = StaticCallInstr::FromCall(Z, instr, target);
+ const Function& target,
+ intptr_t call_count) {
+ StaticCallInstr* call =
+ StaticCallInstr::FromCall(Z, instr, target, call_count);
if (unary_checks.NumberOfChecks() == 1 &&
unary_checks.GetExactnessAt(0).IsExact()) {
if (unary_checks.GetExactnessAt(0).IsTriviallyExact()) {
@@ -143,7 +145,8 @@
Function::ZoneHandle(Z, unary_checks.GetTargetAt(0));
if (flow_graph()->CheckForInstanceCall(instr, target.kind()) ==
FlowGraph::ToCheck::kNoCheck) {
- ReplaceWithStaticCall(instr, unary_checks, target);
+ ReplaceWithStaticCall(instr, unary_checks, target,
+ targets.AggregateCallCount());
return;
}
}
@@ -168,7 +171,8 @@
// Call can still deoptimize, do not detach environment from instr.
const Function& target =
Function::ZoneHandle(Z, unary_checks.GetTargetAt(0));
- ReplaceWithStaticCall(instr, unary_checks, target);
+ ReplaceWithStaticCall(instr, unary_checks, target,
+ targets.AggregateCallCount());
} else {
PolymorphicInstanceCallInstr* call =
new (Z) PolymorphicInstanceCallInstr(instr, targets,
diff --git a/runtime/vm/compiler/jit/jit_call_specializer.h b/runtime/vm/compiler/jit/jit_call_specializer.h
index 77c72bd..00f7f67 100644
--- a/runtime/vm/compiler/jit/jit_call_specializer.h
+++ b/runtime/vm/compiler/jit/jit_call_specializer.h
@@ -37,7 +37,8 @@
void ReplaceWithStaticCall(InstanceCallInstr* instr,
const ICData& unary_checks,
- const Function& target);
+ const Function& target,
+ intptr_t call_count);
DISALLOW_COPY_AND_ASSIGN(JitCallSpecializer);
};
diff --git a/runtime/vm/deferred_objects.cc b/runtime/vm/deferred_objects.cc
index 76ca7b7..6c5a9b2 100644
--- a/runtime/vm/deferred_objects.cc
+++ b/runtime/vm/deferred_objects.cc
@@ -125,8 +125,16 @@
// If the deoptimization happened at an IC call, update the IC data
// to avoid repeated deoptimization at the same site next time around.
ICData& ic_data = ICData::Handle(zone);
- CodePatcher::GetInstanceCallAt(pc, code, &ic_data);
- if (!ic_data.IsNull()) {
+ Array& ics = Array::Handle(zone, function.ic_data_array());
+ bool found = false;
+ for (intptr_t i = 1; i < ics.Length(); i++) {
+ ic_data ^= ics.At(i);
+ if (ic_data.deopt_id() == deopt_id_) {
+ found = true;
+ break;
+ }
+ }
+ if (found) {
ic_data.AddDeoptReason(deopt_context->deopt_reason());
// Propagate the reason to all ICData-s with same deopt_id since
// only unoptimized-code ICData (IC calls) are propagated.
diff --git a/runtime/vm/instructions_arm.cc b/runtime/vm/instructions_arm.cc
index c8e8af9..021474f 100644
--- a/runtime/vm/instructions_arm.cc
+++ b/runtime/vm/instructions_arm.cc
@@ -18,20 +18,35 @@
CallPattern::CallPattern(uword pc, const Code& code)
: object_pool_(ObjectPool::Handle(code.GetObjectPool())),
- end_(pc),
- ic_data_load_end_(0),
- target_code_pool_index_(-1),
- ic_data_(ICData::Handle()) {
+ target_code_pool_index_(-1) {
ASSERT(code.ContainsInstructionAt(pc));
// Last instruction: blx lr.
- ASSERT(*(reinterpret_cast<uword*>(end_) - 1) == 0xe12fff3e);
+ ASSERT(*(reinterpret_cast<uword*>(pc) - 1) == 0xe12fff3e);
Register reg;
- ic_data_load_end_ = InstructionPattern::DecodeLoadWordFromPool(
- end_ - 2 * Instr::kInstrSize, ®, &target_code_pool_index_);
+ InstructionPattern::DecodeLoadWordFromPool(pc - 2 * Instr::kInstrSize, ®,
+ &target_code_pool_index_);
ASSERT(reg == CODE_REG);
}
+ICCallPattern::ICCallPattern(uword pc, const Code& code)
+ : object_pool_(ObjectPool::Handle(code.GetObjectPool())),
+ target_pool_index_(-1),
+ data_pool_index_(-1) {
+ ASSERT(code.ContainsInstructionAt(pc));
+ // Last instruction: blx lr.
+ ASSERT(*(reinterpret_cast<uword*>(pc) - 1) == 0xe12fff3e);
+
+ Register reg;
+ uword data_load_end = InstructionPattern::DecodeLoadWordFromPool(
+ pc - 2 * Instr::kInstrSize, ®, &target_pool_index_);
+ ASSERT(reg == CODE_REG);
+
+ InstructionPattern::DecodeLoadWordFromPool(data_load_end, ®,
+ &data_pool_index_);
+ ASSERT(reg == R9);
+}
+
NativeCallPattern::NativeCallPattern(uword pc, const Code& code)
: object_pool_(ObjectPool::Handle(code.GetObjectPool())),
end_(pc),
@@ -245,16 +260,6 @@
return false;
}
-RawICData* CallPattern::IcData() {
- if (ic_data_.IsNull()) {
- Register reg;
- InstructionPattern::DecodeLoadObject(ic_data_load_end_, object_pool_, ®,
- &ic_data_);
- ASSERT(reg == R9);
- }
- return ic_data_.raw();
-}
-
RawCode* CallPattern::TargetCode() const {
return reinterpret_cast<RawCode*>(
object_pool_.ObjectAt(target_code_pool_index_));
@@ -264,6 +269,23 @@
object_pool_.SetObjectAt(target_code_pool_index_, target_code);
}
+RawObject* ICCallPattern::Data() const {
+ return object_pool_.ObjectAt(data_pool_index_);
+}
+
+void ICCallPattern::SetData(const Object& data) const {
+ ASSERT(data.IsICData() || data.IsMegamorphicCache());
+ object_pool_.SetObjectAt(data_pool_index_, data);
+}
+
+RawCode* ICCallPattern::TargetCode() const {
+ return reinterpret_cast<RawCode*>(object_pool_.ObjectAt(target_pool_index_));
+}
+
+void ICCallPattern::SetTargetCode(const Code& target_code) const {
+ object_pool_.SetObjectAt(target_pool_index_, target_code);
+}
+
SwitchableCallPatternBase::SwitchableCallPatternBase(const Code& code)
: object_pool_(ObjectPool::Handle(code.GetObjectPool())),
data_pool_index_(-1),
diff --git a/runtime/vm/instructions_arm.h b/runtime/vm/instructions_arm.h
index d3e1cdb..bbd0d6c 100644
--- a/runtime/vm/instructions_arm.h
+++ b/runtime/vm/instructions_arm.h
@@ -72,7 +72,23 @@
public:
CallPattern(uword pc, const Code& code);
- RawICData* IcData();
+ RawCode* TargetCode() const;
+ void SetTargetCode(const Code& code) const;
+
+ private:
+ const ObjectPool& object_pool_;
+
+ intptr_t target_code_pool_index_;
+
+ DISALLOW_COPY_AND_ASSIGN(CallPattern);
+};
+
+class ICCallPattern : public ValueObject {
+ public:
+ ICCallPattern(uword pc, const Code& code);
+
+ RawObject* Data() const;
+ void SetData(const Object& data) const;
RawCode* TargetCode() const;
void SetTargetCode(const Code& code) const;
@@ -80,13 +96,10 @@
private:
const ObjectPool& object_pool_;
- uword end_;
- uword ic_data_load_end_;
+ intptr_t target_pool_index_;
+ intptr_t data_pool_index_;
- intptr_t target_code_pool_index_;
- ICData& ic_data_;
-
- DISALLOW_COPY_AND_ASSIGN(CallPattern);
+ DISALLOW_COPY_AND_ASSIGN(ICCallPattern);
};
class NativeCallPattern : public ValueObject {
diff --git a/runtime/vm/instructions_arm64.cc b/runtime/vm/instructions_arm64.cc
index cc29bcd..02df5d6 100644
--- a/runtime/vm/instructions_arm64.cc
+++ b/runtime/vm/instructions_arm64.cc
@@ -18,20 +18,35 @@
CallPattern::CallPattern(uword pc, const Code& code)
: object_pool_(ObjectPool::Handle(code.GetObjectPool())),
- end_(pc),
- ic_data_load_end_(0),
- target_code_pool_index_(-1),
- ic_data_(ICData::Handle()) {
+ target_code_pool_index_(-1) {
ASSERT(code.ContainsInstructionAt(pc));
// Last instruction: blr ip0.
- ASSERT(*(reinterpret_cast<uint32_t*>(end_) - 1) == 0xd63f0200);
+ ASSERT(*(reinterpret_cast<uint32_t*>(pc) - 1) == 0xd63f0200);
Register reg;
- ic_data_load_end_ = InstructionPattern::DecodeLoadWordFromPool(
- end_ - 2 * Instr::kInstrSize, ®, &target_code_pool_index_);
+ InstructionPattern::DecodeLoadWordFromPool(pc - 2 * Instr::kInstrSize, ®,
+ &target_code_pool_index_);
ASSERT(reg == CODE_REG);
}
+ICCallPattern::ICCallPattern(uword pc, const Code& code)
+ : object_pool_(ObjectPool::Handle(code.GetObjectPool())),
+ target_pool_index_(-1),
+ data_pool_index_(-1) {
+ ASSERT(code.ContainsInstructionAt(pc));
+ // Last instruction: blr ip0.
+ ASSERT(*(reinterpret_cast<uint32_t*>(pc) - 1) == 0xd63f0200);
+
+ Register reg;
+ uword data_load_end = InstructionPattern::DecodeLoadWordFromPool(
+ pc - 2 * Instr::kInstrSize, ®, &target_pool_index_);
+ ASSERT(reg == CODE_REG);
+
+ InstructionPattern::DecodeLoadWordFromPool(data_load_end, ®,
+ &data_pool_index_);
+ ASSERT(reg == R5);
+}
+
NativeCallPattern::NativeCallPattern(uword pc, const Code& code)
: object_pool_(ObjectPool::Handle(code.GetObjectPool())),
end_(pc),
@@ -353,16 +368,6 @@
instr->SetInstructionBits(instr->InstructionBits() | B22);
}
-RawICData* CallPattern::IcData() {
- if (ic_data_.IsNull()) {
- Register reg;
- InstructionPattern::DecodeLoadObject(ic_data_load_end_, object_pool_, ®,
- &ic_data_);
- ASSERT(reg == R5);
- }
- return ic_data_.raw();
-}
-
RawCode* CallPattern::TargetCode() const {
return reinterpret_cast<RawCode*>(
object_pool_.ObjectAt(target_code_pool_index_));
@@ -373,6 +378,24 @@
// No need to flush the instruction cache, since the code is not modified.
}
+RawObject* ICCallPattern::Data() const {
+ return object_pool_.ObjectAt(data_pool_index_);
+}
+
+void ICCallPattern::SetData(const Object& data) const {
+ ASSERT(data.IsICData() || data.IsMegamorphicCache());
+ object_pool_.SetObjectAt(data_pool_index_, data);
+}
+
+RawCode* ICCallPattern::TargetCode() const {
+ return reinterpret_cast<RawCode*>(object_pool_.ObjectAt(target_pool_index_));
+}
+
+void ICCallPattern::SetTargetCode(const Code& target) const {
+ object_pool_.SetObjectAt(target_pool_index_, target);
+ // No need to flush the instruction cache, since the code is not modified.
+}
+
SwitchableCallPatternBase::SwitchableCallPatternBase(const Code& code)
: object_pool_(ObjectPool::Handle(code.GetObjectPool())),
data_pool_index_(-1),
diff --git a/runtime/vm/instructions_arm64.h b/runtime/vm/instructions_arm64.h
index 8550643..6b8b4aa 100644
--- a/runtime/vm/instructions_arm64.h
+++ b/runtime/vm/instructions_arm64.h
@@ -82,7 +82,23 @@
public:
CallPattern(uword pc, const Code& code);
- RawICData* IcData();
+ RawCode* TargetCode() const;
+ void SetTargetCode(const Code& target) const;
+
+ private:
+ const ObjectPool& object_pool_;
+
+ intptr_t target_code_pool_index_;
+
+ DISALLOW_COPY_AND_ASSIGN(CallPattern);
+};
+
+class ICCallPattern : public ValueObject {
+ public:
+ ICCallPattern(uword pc, const Code& caller_code);
+
+ RawObject* Data() const;
+ void SetData(const Object& data) const;
RawCode* TargetCode() const;
void SetTargetCode(const Code& target) const;
@@ -90,13 +106,10 @@
private:
const ObjectPool& object_pool_;
- uword end_;
- uword ic_data_load_end_;
+ intptr_t target_pool_index_;
+ intptr_t data_pool_index_;
- intptr_t target_code_pool_index_;
- ICData& ic_data_;
-
- DISALLOW_COPY_AND_ASSIGN(CallPattern);
+ DISALLOW_COPY_AND_ASSIGN(ICCallPattern);
};
class NativeCallPattern : public ValueObject {
diff --git a/runtime/vm/instructions_dbc.cc b/runtime/vm/instructions_dbc.cc
index b303e95..6d5280b 100644
--- a/runtime/vm/instructions_dbc.cc
+++ b/runtime/vm/instructions_dbc.cc
@@ -54,15 +54,14 @@
CallPattern::CallPattern(uword pc, const Code& code)
: object_pool_(ObjectPool::Handle(code.GetObjectPool())),
end_(pc),
- ic_data_load_end_(0),
- target_code_pool_index_(-1),
- ic_data_(ICData::Handle()) {
+ data_pool_index_(-1),
+ target_pool_index_(-1) {
ASSERT(code.ContainsInstructionAt(end_));
const uword call_pc = end_ - sizeof(Instr);
Instr call_instr = SimulatorBytecode::At(call_pc);
ASSERT(SimulatorBytecode::IsCallOpcode(call_instr));
- ic_data_load_end_ = call_pc;
- target_code_pool_index_ = SimulatorBytecode::DecodeD(call_instr);
+ data_pool_index_ = SimulatorBytecode::DecodeD(call_instr);
+ target_pool_index_ = SimulatorBytecode::DecodeD(call_instr);
}
NativeCallPattern::NativeCallPattern(uword pc, const Code& code)
@@ -143,21 +142,20 @@
return GetLoadedObjectAt(pc, pool, obj);
}
-RawICData* CallPattern::IcData() {
- if (ic_data_.IsNull()) {
- bool found = GetLoadedObjectAt(ic_data_load_end_, object_pool_, &ic_data_);
- ASSERT(found);
- }
- return ic_data_.raw();
+RawObject* CallPattern::Data() const {
+ return object_pool_.ObjectAt(data_pool_index_);
+}
+
+void CallPattern::SetData(const Object& data) const {
+ object_pool_.SetObjectAt(data_pool_index_, data);
}
RawCode* CallPattern::TargetCode() const {
- return reinterpret_cast<RawCode*>(
- object_pool_.ObjectAt(target_code_pool_index_));
+ return reinterpret_cast<RawCode*>(object_pool_.ObjectAt(target_pool_index_));
}
-void CallPattern::SetTargetCode(const Code& target_code) const {
- object_pool_.SetObjectAt(target_code_pool_index_, target_code);
+void CallPattern::SetTargetCode(const Code& target) const {
+ object_pool_.SetObjectAt(target_pool_index_, target);
}
void CallPattern::InsertDeoptCallAt(uword pc) {
diff --git a/runtime/vm/instructions_dbc.h b/runtime/vm/instructions_dbc.h
index 15ac846..3d465fa 100644
--- a/runtime/vm/instructions_dbc.h
+++ b/runtime/vm/instructions_dbc.h
@@ -49,9 +49,10 @@
class CallPattern : public ValueObject {
public:
- CallPattern(uword pc, const Code& code);
+ CallPattern(uword pc, const Code& caller_code);
- RawICData* IcData();
+ RawObject* Data() const;
+ void SetData(const Object& data) const;
RawCode* TargetCode() const;
void SetTargetCode(const Code& code) const;
@@ -62,10 +63,9 @@
const ObjectPool& object_pool_;
uword end_;
- uword ic_data_load_end_;
- intptr_t target_code_pool_index_;
- ICData& ic_data_;
+ intptr_t data_pool_index_;
+ intptr_t target_pool_index_;
DISALLOW_COPY_AND_ASSIGN(CallPattern);
};
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 73cf977..bbad5ca 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -14023,6 +14023,12 @@
return result.raw();
}
+RawMegamorphicCache* ICData::AsMegamorphicCache() const {
+ const String& name = String::Handle(target_name());
+ const Array& descriptor = Array::Handle(arguments_descriptor());
+ return MegamorphicCacheTable::Lookup(Isolate::Current(), name, descriptor);
+}
+
bool ICData::AllTargetsHaveSameOwner(intptr_t owner_cid) const {
if (NumberOfChecksIs(0)) return false;
Class& cls = Class::Handle();
@@ -14221,6 +14227,7 @@
AbstractType::Handle(from.receivers_static_type())));
// Copy deoptimization reasons.
result.SetDeoptReasons(from.DeoptReasons());
+ result.set_is_megamorphic(from.is_megamorphic());
return result.raw();
}
@@ -14245,6 +14252,7 @@
result.set_entries(cloned_array);
// Copy deoptimization reasons.
result.SetDeoptReasons(from.DeoptReasons());
+ result.set_is_megamorphic(from.is_megamorphic());
return result.raw();
}
#endif
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 725bc63..499939e 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -1672,6 +1672,14 @@
RebindRule rebind_rule() const;
void set_rebind_rule(uint32_t rebind_rule) const;
+ bool is_megamorphic() const {
+ return MegamorphicBit::decode(raw_ptr()->state_bits_);
+ }
+ void set_is_megamorphic(bool value) const {
+ StoreNonPointer(&raw_ptr()->state_bits_,
+ MegamorphicBit::update(value, raw_ptr()->state_bits_));
+ }
+
// The length of the array. This includes all sentinel entries including
// the final one.
intptr_t Length() const;
@@ -1801,6 +1809,8 @@
// Used for printing and optimizations.
RawICData* AsUnaryClassChecksSortedByCount() const;
+ RawMegamorphicCache* AsMegamorphicCache() const;
+
// Consider only used entries.
bool AllTargetsHaveSameOwner(intptr_t owner_cid) const;
bool AllReceiversAreNumbers() const;
@@ -1893,7 +1903,9 @@
kDeoptReasonPos = kTrackingExactnessPos + kTrackingExactnessSize,
kDeoptReasonSize = kLastRecordedDeoptReason + 1,
kRebindRulePos = kDeoptReasonPos + kDeoptReasonSize,
- kRebindRuleSize = 3
+ kRebindRuleSize = 3,
+ kMegamorphicPos = kRebindRulePos + kRebindRuleSize,
+ kMegamorphicSize = 1,
};
COMPILE_ASSERT(kNumRebindRules <= (1 << kRebindRuleSize));
@@ -1914,6 +1926,9 @@
uint32_t,
ICData::kRebindRulePos,
ICData::kRebindRuleSize> {};
+ class MegamorphicBit
+ : public BitField<uint32_t, bool, kMegamorphicPos, kMegamorphicSize> {};
+
#if defined(DEBUG)
// Used in asserts to verify that a check is not added twice.
bool HasCheck(const GrowableArray<intptr_t>& cids) const;
@@ -5679,6 +5694,12 @@
static const intptr_t kSpreadFactor = 7;
static const double kLoadFactor;
+ enum {
+ kClassIdIndex,
+ kTargetFunctionIndex,
+ kEntryLength,
+ };
+
RawArray* buckets() const;
void set_buckets(const Array& buckets) const;
@@ -5725,12 +5746,6 @@
void set_target_name(const String& value) const;
void set_arguments_descriptor(const Array& value) const;
- enum {
- kClassIdIndex,
- kTargetFunctionIndex,
- kEntryLength,
- };
-
static inline void SetEntry(const Array& array,
intptr_t index,
const Smi& class_id,
diff --git a/runtime/vm/runtime_entry.cc b/runtime/vm/runtime_entry.cc
index 1a7ba2e..b211fba 100644
--- a/runtime/vm/runtime_entry.cc
+++ b/runtime/vm/runtime_entry.cc
@@ -1171,6 +1171,40 @@
receiver.GetClassId(), target_function.ToCString());
}
}
+
+#if !defined(TARGET_ARCH_DBC) && !defined(DART_PRECOMPILED_RUNTIME)
+ // Maybe switch to megamorphic call.
+ // TODO(rmacnak): Investigate interaction with the kernel isolate in reload
+ // stress test mode.
+ if (!Isolate::Current()->is_kernel_isolate() &&
+ (ic_data.NumArgsTested() == 1) &&
+ (ic_data.NumberOfChecks() > FLAG_max_polymorphic_checks)) {
+ ASSERT(ic_data.rebind_rule() == ICData::kInstance);
+ Thread* thread = Thread::Current();
+ DartFrameIterator iterator(thread,
+ StackFrameIterator::kNoCrossThreadIteration);
+ StackFrame* caller_frame = iterator.NextFrame();
+ ASSERT(caller_frame->IsDartFrame());
+ if (!caller_frame->is_interpreted()) {
+ Zone* zone = thread->zone();
+ const Code& caller_code =
+ Code::Handle(zone, caller_frame->LookupDartCode());
+ if (!caller_code.is_optimized()) {
+ const MegamorphicCache& cache =
+ MegamorphicCache::Handle(zone, ic_data.AsMegamorphicCache());
+ ic_data.set_is_megamorphic(true);
+ CodePatcher::PatchInstanceCallAt(caller_frame->pc(), caller_code, cache,
+ StubCode::MegamorphicCall());
+ if (FLAG_trace_ic) {
+ OS::PrintErr("InlineCacheMissHandler %" Pd " call at %#" Px
+ " switching to megamorphic dispatch %s\n",
+ args.length(), caller_frame->pc(), ic_data.ToCString());
+ }
+ }
+ }
+ }
+#endif // !defined(TARGET_ARCH_DBC) && !defined(DART_PRECOMPILED_RUNTIME)
+
return target_function.raw();
}