blob: 2fab100943363b7ef8397c30e9a76cf00721b64e [file] [log] [blame]
<
// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include "vm/compiler/frontend/kernel_to_il.h"
#include "platform/assert.h"
#include "platform/globals.h"
#include "vm/class_id.h"
#include "vm/compiler/aot/precompiler.h"
#include "vm/compiler/backend/flow_graph_compiler.h"
#include "vm/compiler/backend/il.h"
#include "vm/compiler/backend/il_printer.h"
#include "vm/compiler/backend/locations.h"
#include "vm/compiler/backend/range_analysis.h"
#include "vm/compiler/ffi/abi.h"
#include "vm/compiler/ffi/marshaller.h"
#include "vm/compiler/ffi/native_calling_convention.h"
#include "vm/compiler/ffi/native_type.h"
#include "vm/compiler/ffi/recognized_method.h"
#include "vm/compiler/frontend/kernel_binary_flowgraph.h"
#include "vm/compiler/frontend/kernel_translation_helper.h"
#include "vm/compiler/frontend/prologue_builder.h"
#include "vm/compiler/jit/compiler.h"
#include "vm/compiler/runtime_api.h"
#include "vm/kernel_isolate.h"
#include "vm/kernel_loader.h"
#include "vm/log.h"
#include "vm/longjump.h"
#include "vm/native_entry.h"
#include "vm/object_store.h"
#include "vm/report.h"
#include "vm/resolver.h"
#include "vm/scopes.h"
#include "vm/stack_frame.h"
#include "vm/symbols.h"
namespace dart {
DEFINE_FLAG(bool,
print_huge_methods,
false,
"Print huge methods (less optimized)");
namespace kernel {
#define Z (zone_)
#define H (translation_helper_)
#define T (type_translator_)
#define I Isolate::Current()
#define IG IsolateGroup::Current()
FlowGraphBuilder::FlowGraphBuilder(
ParsedFunction* parsed_function,
ZoneGrowableArray<const ICData*>* ic_data_array,
ZoneGrowableArray<intptr_t>* context_level_array,
InlineExitCollector* exit_collector,
bool optimizing,
intptr_t osr_id,
intptr_t first_block_id,
bool inlining_unchecked_entry)
: BaseFlowGraphBuilder(parsed_function,
first_block_id - 1,
osr_id,
context_level_array,
exit_collector,
inlining_unchecked_entry),
translation_helper_(Thread::Current()),
thread_(translation_helper_.thread()),
zone_(translation_helper_.zone()),
parsed_function_(parsed_function),
optimizing_(optimizing),
ic_data_array_(*ic_data_array),
next_function_id_(0),
loop_depth_(0),
try_depth_(0),
catch_depth_(0),
for_in_depth_(0),
block_expression_depth_(0),
graph_entry_(NULL),
scopes_(NULL),
breakable_block_(NULL),
switch_block_(NULL),
try_catch_block_(NULL),
try_finally_block_(NULL),
catch_block_(NULL),
prepend_type_arguments_(Function::ZoneHandle(zone_)),
throw_new_null_assertion_(Function::ZoneHandle(zone_)) {
const Script& script =
Script::Handle(Z, parsed_function->function().script());
H.InitFromScript(script);
}
FlowGraphBuilder::~FlowGraphBuilder() {}
Fragment FlowGraphBuilder::EnterScope(
intptr_t kernel_offset,
const LocalScope** context_scope /* = nullptr */) {
Fragment instructions;
const LocalScope* scope = scopes_->scopes.Lookup(kernel_offset);
if (scope->num_context_variables() > 0) {
instructions += PushContext(scope);
instructions += Drop();
}
if (context_scope != nullptr) {
*context_scope = scope;
}
return instructions;
}
Fragment FlowGraphBuilder::ExitScope(intptr_t kernel_offset) {
Fragment instructions;
const intptr_t context_size =
scopes_->scopes.Lookup(kernel_offset)->num_context_variables();
if (context_size > 0) {
instructions += PopContext();
}
return instructions;
}
Fragment FlowGraphBuilder::AdjustContextTo(int depth) {
ASSERT(depth <= context_depth_ && depth >= 0);
Fragment instructions;
if (depth < context_depth_) {
instructions += LoadContextAt(depth);
instructions += StoreLocal(TokenPosition::kNoSource,
parsed_function_->current_context_var());
instructions += Drop();
context_depth_ = depth;
}
return instructions;
}
Fragment FlowGraphBuilder::PushContext(const LocalScope* scope) {
ASSERT(scope->num_context_variables() > 0);
Fragment instructions = AllocateContext(scope->context_slots());
LocalVariable* context = MakeTemporary();
instructions += LoadLocal(context);
instructions += LoadLocal(parsed_function_->current_context_var());
instructions += StoreNativeField(
Slot::Context_parent(), StoreInstanceFieldInstr::Kind::kInitializing);
instructions += StoreLocal(TokenPosition::kNoSource,
parsed_function_->current_context_var());
++context_depth_;
return instructions;
}
Fragment FlowGraphBuilder::PopContext() {
return AdjustContextTo(context_depth_ - 1);
}
Fragment FlowGraphBuilder::LoadInstantiatorTypeArguments() {
// TODO(27590): We could use `active_class_->IsGeneric()`.
Fragment instructions;
if (scopes_ != nullptr && scopes_->type_arguments_variable != nullptr) {
#ifdef DEBUG
Function& function =
Function::Handle(Z, parsed_function_->function().ptr());
while (function.IsClosureFunction()) {
function = function.parent_function();
}
ASSERT(function.IsFactory());
#endif
instructions += LoadLocal(scopes_->type_arguments_variable);
} else if (parsed_function_->has_receiver_var() &&
active_class_.ClassNumTypeArguments() > 0) {
ASSERT(!parsed_function_->function().IsFactory());
instructions += LoadLocal(parsed_function_->receiver_var());
instructions += LoadNativeField(
Slot::GetTypeArgumentsSlotFor(thread_, *active_class_.klass));
} else {
instructions += NullConstant();
}
return instructions;
}
// This function is responsible for pushing a type arguments vector which
// contains all type arguments of enclosing functions prepended to the type
// arguments of the current function.
Fragment FlowGraphBuilder::LoadFunctionTypeArguments() {
Fragment instructions;
const Function& function = parsed_function_->function();
if (function.IsGeneric() || function.HasGenericParent()) {
ASSERT(parsed_function_->function_type_arguments() != NULL);
instructions += LoadLocal(parsed_function_->function_type_arguments());
} else {
instructions += NullConstant();
}
return instructions;
}
Fragment FlowGraphBuilder::TranslateInstantiatedTypeArguments(
const TypeArguments& type_arguments) {
Fragment instructions;
if (type_arguments.IsNull() || type_arguments.IsInstantiated()) {
// There are no type references to type parameters so we can just take it.
instructions += Constant(type_arguments);
} else {
// The [type_arguments] vector contains a type reference to a type
// parameter we need to resolve it.
if (type_arguments.CanShareInstantiatorTypeArguments(
*active_class_.klass)) {
// If the instantiator type arguments are just passed on, we don't need to
// resolve the type parameters.
//
// This is for example the case here:
// class Foo<T> {
// newList() => new List<T>();
// }
// We just use the type argument vector from the [Foo] object and pass it
// directly to the `new List<T>()` factory constructor.
instructions += LoadInstantiatorTypeArguments();
} else if (type_arguments.CanShareFunctionTypeArguments(
parsed_function_->function())) {
instructions += LoadFunctionTypeArguments();
} else {
// Otherwise we need to resolve [TypeParameterType]s in the type
// expression based on the current instantiator type argument vector.
if (!type_arguments.IsInstantiated(kCurrentClass)) {
instructions += LoadInstantiatorTypeArguments();
} else {
instructions += NullConstant();
}
if (!type_arguments.IsInstantiated(kFunctions)) {
instructions += LoadFunctionTypeArguments();
} else {
instructions += NullConstant();
}
instructions += InstantiateTypeArguments(type_arguments);
}
}
return instructions;
}
Fragment FlowGraphBuilder::CatchBlockEntry(const Array& handler_types,
intptr_t handler_index,
bool needs_stacktrace,
bool is_synthesized) {
LocalVariable* exception_var = CurrentException();
LocalVariable* stacktrace_var = CurrentStackTrace();
LocalVariable* raw_exception_var = CurrentRawException();
LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
CatchBlockEntryInstr* entry = new (Z) CatchBlockEntryInstr(
is_synthesized, // whether catch block was synthesized by FE compiler
AllocateBlockId(), CurrentTryIndex(), graph_entry_, handler_types,
handler_index, needs_stacktrace, GetNextDeoptId(), exception_var,
stacktrace_var, raw_exception_var, raw_stacktrace_var);
graph_entry_->AddCatchEntry(entry);
Fragment instructions(entry);
// Auxiliary variables introduced by the try catch can be captured if we are
// inside a function with yield/resume points. In this case we first need
// to restore the context to match the context at entry into the closure.
const bool should_restore_closure_context =
CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
LocalVariable* context_variable = parsed_function_->current_context_var();
if (should_restore_closure_context) {
ASSERT(parsed_function_->function().IsClosureFunction());
LocalVariable* closure_parameter = parsed_function_->ParameterVariable(0);
ASSERT(!closure_parameter->is_captured());
instructions += LoadLocal(closure_parameter);
instructions += LoadNativeField(Slot::Closure_context());
instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
instructions += Drop();
}
if (exception_var->is_captured()) {
instructions += LoadLocal(context_variable);
instructions += LoadLocal(raw_exception_var);
instructions += StoreNativeField(
Slot::GetContextVariableSlotFor(thread_, *exception_var));
}
if (stacktrace_var->is_captured()) {
instructions += LoadLocal(context_variable);
instructions += LoadLocal(raw_stacktrace_var);
instructions += StoreNativeField(
Slot::GetContextVariableSlotFor(thread_, *stacktrace_var));
}
// :saved_try_context_var can be captured in the context of
// of the closure, in this case CatchBlockEntryInstr restores
// :current_context_var to point to closure context in the
// same way as normal function prologue does.
// Update current context depth to reflect that.
const intptr_t saved_context_depth = context_depth_;
ASSERT(!CurrentCatchContext()->is_captured() ||
CurrentCatchContext()->owner()->context_level() == 0);
context_depth_ = 0;
instructions += LoadLocal(CurrentCatchContext());
instructions += StoreLocal(TokenPosition::kNoSource,
parsed_function_->current_context_var());
instructions += Drop();
context_depth_ = saved_context_depth;
return instructions;
}
Fragment FlowGraphBuilder::TryCatch(int try_handler_index) {
// The body of the try needs to have it's own block in order to get a new try
// index.
//
// => We therefore create a block for the body (fresh try index) and another
// join block (with current try index).
Fragment body;
JoinEntryInstr* entry = new (Z)
JoinEntryInstr(AllocateBlockId(), try_handler_index, GetNextDeoptId());
body += LoadLocal(parsed_function_->current_context_var());
body += StoreLocal(TokenPosition::kNoSource, CurrentCatchContext());
body += Drop();
body += Goto(entry);
return Fragment(body.entry, entry);
}
Fragment FlowGraphBuilder::CheckStackOverflowInPrologue(
TokenPosition position) {
ASSERT(loop_depth_ == 0);
return BaseFlowGraphBuilder::CheckStackOverflowInPrologue(position);
}
Fragment FlowGraphBuilder::CloneContext(
const ZoneGrowableArray<const Slot*>& context_slots) {
LocalVariable* context_variable = parsed_function_->current_context_var();
Fragment instructions = LoadLocal(context_variable);
CloneContextInstr* clone_instruction = new (Z) CloneContextInstr(
InstructionSource(), Pop(), context_slots, GetNextDeoptId());
instructions <<= clone_instruction;
Push(clone_instruction);
instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
instructions += Drop();
return instructions;
}
Fragment FlowGraphBuilder::InstanceCall(
TokenPosition position,
const String& name,
Token::Kind kind,
intptr_t type_args_len,
intptr_t argument_count,
const Array& argument_names,
intptr_t checked_argument_count,
const Function& interface_target,
const Function& tearoff_interface_target,
const InferredTypeMetadata* result_type,
bool use_unchecked_entry,
const CallSiteAttributesMetadata* call_site_attrs,
bool receiver_is_not_smi,
bool is_call_on_this) {
const intptr_t total_count = argument_count + (type_args_len > 0 ? 1 : 0);
InputsArray* arguments = GetArguments(total_count);
InstanceCallInstr* call = new (Z) InstanceCallInstr(
InstructionSource(position), name, kind, arguments, type_args_len,
argument_names, checked_argument_count, ic_data_array_, GetNextDeoptId(),
interface_target, tearoff_interface_target);
if ((result_type != NULL) && !result_type->IsTrivial()) {
call->SetResultType(Z, result_type->ToCompileType(Z));
}
if (use_unchecked_entry) {
call->set_entry_kind(Code::EntryKind::kUnchecked);
}
if (is_call_on_this) {
call->mark_as_call_on_this();
}
if (call_site_attrs != nullptr && call_site_attrs->receiver_type != nullptr &&
call_site_attrs->receiver_type->IsInstantiated()) {
call->set_receivers_static_type(call_site_attrs->receiver_type);
} else if (!interface_target.IsNull()) {
const Class& owner = Class::Handle(Z, interface_target.Owner());
const AbstractType& type =
AbstractType::ZoneHandle(Z, owner.DeclarationType());
call->set_receivers_static_type(&type);
}
call->set_receiver_is_not_smi(receiver_is_not_smi);
Push(call);
if (result_type != nullptr && result_type->IsConstant()) {
Fragment instructions(call);
instructions += Drop();
instructions += Constant(result_type->constant_value);
return instructions;
}
return Fragment(call);
}
Fragment FlowGraphBuilder::FfiCall(
const compiler::ffi::CallMarshaller& marshaller) {
Fragment body;
FfiCallInstr* const call =
new (Z) FfiCallInstr(Z, GetNextDeoptId(), marshaller,
parsed_function_->function().FfiIsLeaf());
for (intptr_t i = call->InputCount() - 1; i >= 0; --i) {
call->SetInputAt(i, Pop());
}
Push(call);
body <<= call;
return body;
}
Fragment FlowGraphBuilder::RethrowException(TokenPosition position,
int catch_try_index) {
Fragment instructions;
Value* stacktrace = Pop();
Value* exception = Pop();
instructions += Fragment(new (Z) ReThrowInstr(
InstructionSource(position), catch_try_index,
GetNextDeoptId(), exception, stacktrace))
.closed();
// Use its side effect of leaving a constant on the stack (does not change
// the graph).
NullConstant();
return instructions;
}
Fragment FlowGraphBuilder::LoadLocal(LocalVariable* variable) {
// Captured 'this' is immutable, so within the outer method we don't need to
// load it from the context.
const ParsedFunction* pf = parsed_function_;
if (pf->function().HasThisParameter() && pf->has_receiver_var() &&
variable == pf->receiver_var()) {
ASSERT(variable == pf->ParameterVariable(0));
variable = pf->RawParameterVariable(0);
}
if (variable->is_captured()) {
Fragment instructions;
instructions += LoadContextAt(variable->owner()->context_level());
instructions +=
LoadNativeField(Slot::GetContextVariableSlotFor(thread_, *variable));
return instructions;
} else {
return BaseFlowGraphBuilder::LoadLocal(variable);
}
}
Fragment FlowGraphBuilder::IndirectGoto(intptr_t target_count) {
Value* index = Pop();
return Fragment(new (Z) IndirectGotoInstr(target_count, index));
}
Fragment FlowGraphBuilder::ThrowLateInitializationError(
TokenPosition position,
const char* throw_method_name,
const String& name) {
const Class& klass =
Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::LateError()));
ASSERT(!klass.IsNull());
const auto& error = klass.EnsureIsFinalized(thread_);
ASSERT(error == Error::null());
const Function& throw_new =
Function::ZoneHandle(Z, klass.LookupStaticFunctionAllowPrivate(
H.DartSymbolObfuscate(throw_method_name)));
ASSERT(!throw_new.IsNull());
Fragment instructions;
// Call LateError._throwFoo.
instructions += Constant(name);
instructions += StaticCall(position, throw_new,
/* argument_count = */ 1, ICData::kStatic);
instructions += Drop();
return instructions;
}
Fragment FlowGraphBuilder::StoreLateField(const Field& field,
LocalVariable* instance,
LocalVariable* setter_value) {
Fragment instructions;
TargetEntryInstr* is_uninitialized;
TargetEntryInstr* is_initialized;
const TokenPosition position = field.token_pos();
const bool is_static = field.is_static();
const bool is_final = field.is_final();
if (is_final) {
// Check whether the field has been initialized already.
if (is_static) {
instructions += LoadStaticField(field, /*calls_initializer=*/false);
} else {
instructions += LoadLocal(instance);
instructions += LoadField(field, /*calls_initializer=*/false);
}
instructions += Constant(Object::sentinel());
instructions += BranchIfStrictEqual(&is_uninitialized, &is_initialized);
JoinEntryInstr* join = BuildJoinEntry();
{
// If the field isn't initialized, do nothing.
Fragment initialize(is_uninitialized);
initialize += Goto(join);
}
{
// If the field is already initialized, throw a LateInitializationError.
Fragment already_initialized(is_initialized);
already_initialized += ThrowLateInitializationError(
position, "_throwFieldAlreadyInitialized",
String::ZoneHandle(Z, field.name()));
already_initialized += Goto(join);
}
instructions = Fragment(instructions.entry, join);
}
if (!is_static) {
instructions += LoadLocal(instance);
}
instructions += LoadLocal(setter_value);
if (is_static) {
instructions += StoreStaticField(position, field);
} else {
instructions += StoreInstanceFieldGuarded(field);
}
return instructions;
}
Fragment FlowGraphBuilder::NativeCall(const String* name,
const Function* function) {
InlineBailout("kernel::FlowGraphBuilder::NativeCall");
const intptr_t num_args =
function->NumParameters() + (function->IsGeneric() ? 1 : 0);
InputsArray* arguments = GetArguments(num_args);
NativeCallInstr* call = new (Z)
NativeCallInstr(name, function, FLAG_link_natives_lazily,
InstructionSource(function->end_token_pos()), arguments);
Push(call);
return Fragment(call);
}
Fragment FlowGraphBuilder::Return(TokenPosition position,
bool omit_result_type_check,
intptr_t yield_index) {
Fragment instructions;
const Function& function = parsed_function_->function();
// Emit a type check of the return type in checked mode for all functions
// and in strong mode for native functions.
if (!omit_result_type_check && function.is_native()) {
const AbstractType& return_type =
AbstractType::Handle(Z, function.result_type());
instructions += CheckAssignable(return_type, Symbols::FunctionResult());
}
if (NeedsDebugStepCheck(function, position)) {
instructions += DebugStepCheck(position);
}
instructions += BaseFlowGraphBuilder::Return(position, yield_index);
return instructions;
}
Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
const Function& target,
intptr_t argument_count,
ICData::RebindRule rebind_rule) {
return StaticCall(position, target, argument_count, Array::null_array(),
rebind_rule);
}
void FlowGraphBuilder::SetResultTypeForStaticCall(
StaticCallInstr* call,
const Function& target,
intptr_t argument_count,
const InferredTypeMetadata* result_type) {
if (call->InitResultType(Z)) {
ASSERT((result_type == NULL) || (result_type->cid == kDynamicCid) ||
(result_type->cid == call->result_cid()));
return;
}
if ((result_type != NULL) && !result_type->IsTrivial()) {
call->SetResultType(Z, result_type->ToCompileType(Z));
}
}
Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
const Function& target,
intptr_t argument_count,
const Array& argument_names,
ICData::RebindRule rebind_rule,
const InferredTypeMetadata* result_type,
intptr_t type_args_count,
bool use_unchecked_entry) {
const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
InputsArray* arguments = GetArguments(total_count);
StaticCallInstr* call = new (Z) StaticCallInstr(
InstructionSource(position), target, type_args_count, argument_names,
arguments, ic_data_array_, GetNextDeoptId(), rebind_rule);
SetResultTypeForStaticCall(call, target, argument_count, result_type);
if (use_unchecked_entry) {
call->set_entry_kind(Code::EntryKind::kUnchecked);
}
Push(call);
if (result_type != nullptr && result_type->IsConstant()) {
Fragment instructions(call);
instructions += Drop();
instructions += Constant(result_type->constant_value);
return instructions;
}
return Fragment(call);
}
Fragment FlowGraphBuilder::StringInterpolateSingle(TokenPosition position) {
Fragment instructions;
instructions += StaticCall(
position, CompilerState::Current().StringBaseInterpolateSingle(),
/* argument_count = */ 1, ICData::kStatic);
return instructions;
}
Fragment FlowGraphBuilder::StringInterpolate(TokenPosition position) {
Fragment instructions;
instructions +=
StaticCall(position, CompilerState::Current().StringBaseInterpolate(),
/* argument_count = */ 1, ICData::kStatic);
return instructions;
}
Fragment FlowGraphBuilder::ThrowTypeError() {
const Class& klass =
Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::TypeError()));
ASSERT(!klass.IsNull());
GrowableHandlePtrArray<const String> pieces(Z, 3);
pieces.Add(Symbols::TypeError());
pieces.Add(Symbols::Dot());
pieces.Add(H.DartSymbolObfuscate("_create"));
const Function& constructor = Function::ZoneHandle(
Z, klass.LookupConstructorAllowPrivate(
String::ZoneHandle(Z, Symbols::FromConcatAll(thread_, pieces))));
ASSERT(!constructor.IsNull());
const String& url = H.DartString(
parsed_function_->function().ToLibNamePrefixedQualifiedCString(),
Heap::kOld);
Fragment instructions;
// Create instance of _FallThroughError
instructions += AllocateObject(TokenPosition::kNoSource, klass, 0);
LocalVariable* instance = MakeTemporary();
// Call _TypeError._create constructor.
instructions += LoadLocal(instance); // this
instructions += Constant(url); // url
instructions += NullConstant(); // line
instructions += IntConstant(0); // column
instructions += Constant(H.DartSymbolPlain("Malformed type.")); // message
instructions += StaticCall(TokenPosition::kNoSource, constructor,
/* argument_count = */ 5, ICData::kStatic);
instructions += Drop();
// Throw the exception
instructions += ThrowException(TokenPosition::kNoSource);
return instructions;
}
Fragment FlowGraphBuilder::ThrowNoSuchMethodError(const Function& target,
bool incompatible_arguments) {
const Class& klass = Class::ZoneHandle(
Z, Library::LookupCoreClass(Symbols::NoSuchMethodError()));
ASSERT(!klass.IsNull());
const auto& error = klass.EnsureIsFinalized(H.thread());
ASSERT(error == Error::null());
const Function& throw_function = Function::ZoneHandle(
Z, klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNew()));
ASSERT(!throw_function.IsNull());
Fragment instructions;
const Class& owner = Class::Handle(Z, target.Owner());
auto& receiver = Instance::ZoneHandle();
InvocationMirror::Kind kind = InvocationMirror::Kind::kMethod;
if (target.IsImplicitGetterFunction() || target.IsGetterFunction()) {
kind = InvocationMirror::kGetter;
} else if (target.IsImplicitSetterFunction() || target.IsSetterFunction()) {
kind = InvocationMirror::kSetter;
}
InvocationMirror::Level level;
if (owner.IsTopLevel()) {
if (incompatible_arguments) {
receiver = target.UserVisibleSignature();
}
level = InvocationMirror::Level::kTopLevel;
} else {
receiver = owner.RareType();
if (target.kind() == UntaggedFunction::kConstructor) {
level = InvocationMirror::Level::kConstructor;
} else {
level = InvocationMirror::Level::kStatic;
}
}
// Call NoSuchMethodError._throwNew static function.
instructions += Constant(receiver); // receiver
instructions += Constant(String::ZoneHandle(Z, target.name())); // memberName
instructions += IntConstant(InvocationMirror::EncodeType(level, kind));
instructions += IntConstant(0); // type arguments length
instructions += NullConstant(); // type arguments
instructions += NullConstant(); // arguments
instructions += NullConstant(); // argumentNames
instructions += StaticCall(TokenPosition::kNoSource, throw_function,
/* argument_count = */ 7, ICData::kStatic);
// Properly close graph with a ThrowInstr, although it is not executed.
instructions += ThrowException(TokenPosition::kNoSource);
instructions += Drop();
return instructions;
}
LocalVariable* FlowGraphBuilder::LookupVariable(intptr_t kernel_offset) {
LocalVariable* local = scopes_->locals.Lookup(kernel_offset);
ASSERT(local != NULL);
return local;
}
FlowGraph* FlowGraphBuilder::BuildGraph() {
const Function& function = parsed_function_->function();
#ifdef DEBUG
// If we attached the native name to the function after it's creation (namely
// after reading the constant table from the kernel blob), we must have done
// so before building flow graph for the functions (since FGB depends needs
// the native name to be there).
const Script& script = Script::Handle(Z, function.script());
const KernelProgramInfo& info =
KernelProgramInfo::Handle(script.kernel_program_info());
ASSERT(info.IsNull() ||
info.potential_natives() == GrowableObjectArray::null());
// Check that all functions that are explicitly marked as recognized with the
// vm:recognized annotation are in fact recognized. The check can't be done on
// function creation, since the recognized status isn't set until later.
if ((function.IsRecognized() !=
MethodRecognizer::IsMarkedAsRecognized(function)) &&
!function.IsDynamicInvocationForwarder()) {
if (function.IsRecognized()) {
FATAL1(
"Recognized method %s is not marked with the vm:recognized pragma.",
function.ToQualifiedCString());
} else {
FATAL1(
"Non-recognized method %s is marked with the vm:recognized pragma.",
function.ToQualifiedCString());
}
}
#endif
auto& kernel_data = ExternalTypedData::Handle(Z, function.KernelData());
intptr_t kernel_data_program_offset = function.KernelDataProgramOffset();
StreamingFlowGraphBuilder streaming_flow_graph_builder(
this, kernel_data, kernel_data_program_offset);
auto result = streaming_flow_graph_builder.BuildGraph();
FinalizeCoverageArray();
result->set_coverage_array(coverage_array());
if (streaming_flow_graph_builder.num_ast_nodes() >
FLAG_huge_method_cutoff_in_ast_nodes) {
if (FLAG_print_huge_methods) {
OS::PrintErr(
"Warning: \'%s\' from \'%s\' is too large. Some optimizations have "
"been "
"disabled, and the compiler might run out of memory. "
"Consider refactoring this code into smaller components.\n",
function.QualifiedUserVisibleNameCString(),
String::Handle(Z, Library::Handle(
Z, Class::Handle(Z, function.Owner()).library())
.url())
.ToCString());
}
result->mark_huge_method();
}
return result;
}
Fragment FlowGraphBuilder::NativeFunctionBody(const Function& function,
LocalVariable* first_parameter) {
ASSERT(function.is_native());
ASSERT(!IsRecognizedMethodForFlowGraph(function));
Fragment body;
String& name = String::ZoneHandle(Z, function.native_name());
if (function.IsGeneric()) {
body += LoadLocal(parsed_function_->RawTypeArgumentsVariable());
}
for (intptr_t i = 0; i < function.NumParameters(); ++i) {
body += LoadLocal(parsed_function_->RawParameterVariable(i));
}
body += NativeCall(&name, &function);
// We typecheck results of native calls for type safety.
body +=
Return(TokenPosition::kNoSource, /* omit_result_type_check = */ false);
return body;
}
#define LOAD_NATIVE_FIELD(V) \
V(ByteDataViewLength, TypedDataBase_length) \
V(ByteDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
V(ByteDataViewTypedData, TypedDataView_typed_data) \
V(GrowableArrayLength, GrowableObjectArray_length) \
V(ImmutableLinkedHashBase_getData, ImmutableLinkedHashBase_data) \
V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index) \
V(LinkedHashBase_getData, LinkedHashBase_data) \
V(LinkedHashBase_getDeletedKeys, LinkedHashBase_deleted_keys) \
V(LinkedHashBase_getHashMask, LinkedHashBase_hash_mask) \
V(LinkedHashBase_getIndex, LinkedHashBase_index) \
V(LinkedHashBase_getUsedData, LinkedHashBase_used_data) \
V(ObjectArrayLength, Array_length) \
V(TypedDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
V(TypedDataViewTypedData, TypedDataView_typed_data) \
V(TypedListBaseLength, TypedDataBase_length) \
V(WeakProperty_getKey, WeakProperty_key) \
V(WeakProperty_getValue, WeakProperty_value) \
V(WeakReference_getTarget, WeakReference_target)
#define STORE_NATIVE_FIELD(V) \
V(LinkedHashBase_setData, LinkedHashBase_data) \
V(LinkedHashBase_setIndex, LinkedHashBase_index) \
V(WeakProperty_setKey, WeakProperty_key) \
V(WeakProperty_setValue, WeakProperty_value) \
V(WeakReference_setTarget, WeakReference_target)
#define STORE_NATIVE_FIELD_NO_BARRIER(V) \
V(LinkedHashBase_setDeletedKeys, LinkedHashBase_deleted_keys) \
V(LinkedHashBase_setHashMask, LinkedHashBase_hash_mask) \
V(LinkedHashBase_setUsedData, LinkedHashBase_used_data)
bool FlowGraphBuilder::IsRecognizedMethodForFlowGraph(
const Function& function) {
const MethodRecognizer::Kind kind = function.recognized_kind();
switch (kind) {
case MethodRecognizer::kTypedData_ByteDataView_factory:
case MethodRecognizer::kTypedData_Int8ArrayView_factory:
case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
case MethodRecognizer::kTypedData_Int16ArrayView_factory:
case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
case MethodRecognizer::kTypedData_Int32ArrayView_factory:
case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
case MethodRecognizer::kTypedData_Int64ArrayView_factory:
case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
case MethodRecognizer::kTypedData_Float32ArrayView_factory:
case MethodRecognizer::kTypedData_Float64ArrayView_factory:
case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
case MethodRecognizer::kTypedData_Int8Array_factory:
case MethodRecognizer::kTypedData_Uint8Array_factory:
case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
case MethodRecognizer::kTypedData_Int16Array_factory:
case MethodRecognizer::kTypedData_Uint16Array_factory:
case MethodRecognizer::kTypedData_Int32Array_factory:
case MethodRecognizer::kTypedData_Uint32Array_factory:
case MethodRecognizer::kTypedData_Int64Array_factory:
case MethodRecognizer::kTypedData_Uint64Array_factory:
case MethodRecognizer::kTypedData_Float32Array_factory:
case MethodRecognizer::kTypedData_Float64Array_factory:
case MethodRecognizer::kTypedData_Float32x4Array_factory:
case MethodRecognizer::kTypedData_Int32x4Array_factory:
case MethodRecognizer::kTypedData_Float64x2Array_factory:
case MethodRecognizer::kFfiLoadInt8:
case MethodRecognizer::kFfiLoadInt16:
case MethodRecognizer::kFfiLoadInt32:
case MethodRecognizer::kFfiLoadInt64:
case MethodRecognizer::kFfiLoadUint8:
case MethodRecognizer::kFfiLoadUint16:
case MethodRecognizer::kFfiLoadUint32:
case MethodRecognizer::kFfiLoadUint64:
case MethodRecognizer::kFfiLoadFloat:
case MethodRecognizer::kFfiLoadFloatUnaligned:
case MethodRecognizer::kFfiLoadDouble:
case MethodRecognizer::kFfiLoadDoubleUnaligned:
case MethodRecognizer::kFfiLoadPointer:
case MethodRecognizer::kFfiStoreInt8:
case MethodRecognizer::kFfiStoreInt16:
case MethodRecognizer::kFfiStoreInt32:
case MethodRecognizer::kFfiStoreInt64:
case MethodRecognizer::kFfiStoreUint8:
case MethodRecognizer::kFfiStoreUint16:
case MethodRecognizer::kFfiStoreUint32:
case MethodRecognizer::kFfiStoreUint64:
case MethodRecognizer::kFfiStoreFloat:
case MethodRecognizer::kFfiStoreFloatUnaligned:
case MethodRecognizer::kFfiStoreDouble:
case MethodRecognizer::kFfiStoreDoubleUnaligned:
case MethodRecognizer::kFfiStorePointer:
case MethodRecognizer::kFfiFromAddress:
case MethodRecognizer::kFfiGetAddress:
case MethodRecognizer::kFfiAsExternalTypedDataInt8:
case MethodRecognizer::kFfiAsExternalTypedDataInt16:
case MethodRecognizer::kFfiAsExternalTypedDataInt32:
case MethodRecognizer::kFfiAsExternalTypedDataInt64:
case MethodRecognizer::kFfiAsExternalTypedDataUint8:
case MethodRecognizer::kFfiAsExternalTypedDataUint16:
case MethodRecognizer::kFfiAsExternalTypedDataUint32:
case MethodRecognizer::kFfiAsExternalTypedDataUint64:
case MethodRecognizer::kFfiAsExternalTypedDataFloat:
case MethodRecognizer::kFfiAsExternalTypedDataDouble:
case MethodRecognizer::kGetNativeField:
case MethodRecognizer::kObjectEquals:
case MethodRecognizer::kStringBaseLength:
case MethodRecognizer::kStringBaseIsEmpty:
case MethodRecognizer::kClassIDgetID:
case MethodRecognizer::kGrowableArrayAllocateWithData:
case MethodRecognizer::kGrowableArrayCapacity:
case MethodRecognizer::kListFactory:
case MethodRecognizer::kObjectArrayAllocate:
case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
case MethodRecognizer::kFfiAbi:
case MethodRecognizer::kReachabilityFence:
case MethodRecognizer::kUtf8DecoderScan:
case MethodRecognizer::kHas63BitSmis:
#define CASE(method, slot) case MethodRecognizer::k##method:
LOAD_NATIVE_FIELD(CASE)
STORE_NATIVE_FIELD(CASE)
STORE_NATIVE_FIELD_NO_BARRIER(CASE)
#undef CASE
return true;
case MethodRecognizer::kDoubleToInteger:
case MethodRecognizer::kDoubleMod:
case MethodRecognizer::kDoubleRoundToDouble:
case MethodRecognizer::kDoubleTruncateToDouble:
case MethodRecognizer::kDoubleFloorToDouble:
case MethodRecognizer::kDoubleCeilToDouble:
case MethodRecognizer::kMathDoublePow:
case MethodRecognizer::kMathSin:
case MethodRecognizer::kMathCos:
case MethodRecognizer::kMathTan:
case MethodRecognizer::kMathAsin:
case MethodRecognizer::kMathAcos:
case MethodRecognizer::kMathAtan:
case MethodRecognizer::kMathAtan2:
case MethodRecognizer::kMathExp:
case MethodRecognizer::kMathLog:
case MethodRecognizer::kMathSqrt:
return FlowGraphCompiler::SupportsUnboxedDoubles();
case MethodRecognizer::kDoubleCeilToInt:
case MethodRecognizer::kDoubleFloorToInt:
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
#if defined(TARGET_ARCH_X64)
return CompilerState::Current().is_aot() || FLAG_target_unknown_cpu;
#elif defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
defined(TARGET_ARCH_RISCV64)
return true;
#else
return false;
#endif
default:
return false;
}
}
FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
const Function& function) {
ASSERT(IsRecognizedMethodForFlowGraph(function));
graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
auto normal_entry = BuildFunctionEntry(graph_entry_);
graph_entry_->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
BuildPrologue(normal_entry, &prologue_info);
Fragment body(instruction_cursor);
body += CheckStackOverflowInPrologue(function.token_pos());
const MethodRecognizer::Kind kind = function.recognized_kind();
switch (kind) {
case MethodRecognizer::kTypedData_ByteDataView_factory:
body += BuildTypedDataViewFactoryConstructor(function, kByteDataViewCid);
break;
case MethodRecognizer::kTypedData_Int8ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(function,
kTypedDataInt8ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(function,
kTypedDataUint8ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataUint8ClampedArrayViewCid);
break;
case MethodRecognizer::kTypedData_Int16ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(function,
kTypedDataInt16ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataUint16ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Int32ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(function,
kTypedDataInt32ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataUint32ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Int64ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(function,
kTypedDataInt64ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataUint64ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Float32ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataFloat32ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Float64ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataFloat64ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataFloat32x4ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataInt32x4ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
body += BuildTypedDataViewFactoryConstructor(
function, kTypedDataFloat64x2ArrayViewCid);
break;
case MethodRecognizer::kTypedData_Int8Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataInt8ArrayCid);
break;
case MethodRecognizer::kTypedData_Uint8Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataUint8ArrayCid);
break;
case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
body += BuildTypedDataFactoryConstructor(function,
kTypedDataUint8ClampedArrayCid);
break;
case MethodRecognizer::kTypedData_Int16Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataInt16ArrayCid);
break;
case MethodRecognizer::kTypedData_Uint16Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataUint16ArrayCid);
break;
case MethodRecognizer::kTypedData_Int32Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataInt32ArrayCid);
break;
case MethodRecognizer::kTypedData_Uint32Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataUint32ArrayCid);
break;
case MethodRecognizer::kTypedData_Int64Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataInt64ArrayCid);
break;
case MethodRecognizer::kTypedData_Uint64Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataUint64ArrayCid);
break;
case MethodRecognizer::kTypedData_Float32Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataFloat32ArrayCid);
break;
case MethodRecognizer::kTypedData_Float64Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataFloat64ArrayCid);
break;
case MethodRecognizer::kTypedData_Float32x4Array_factory:
body += BuildTypedDataFactoryConstructor(function,
kTypedDataFloat32x4ArrayCid);
break;
case MethodRecognizer::kTypedData_Int32x4Array_factory:
body +=
BuildTypedDataFactoryConstructor(function, kTypedDataInt32x4ArrayCid);
break;
case MethodRecognizer::kTypedData_Float64x2Array_factory:
body += BuildTypedDataFactoryConstructor(function,
kTypedDataFloat64x2ArrayCid);
break;
case MethodRecognizer::kObjectEquals:
ASSERT_EQUAL(function.NumParameters(), 2);
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadLocal(parsed_function_->RawParameterVariable(1));
body += StrictCompare(Token::kEQ_STRICT);
break;
case MethodRecognizer::kStringBaseLength:
case MethodRecognizer::kStringBaseIsEmpty:
ASSERT_EQUAL(function.NumParameters(), 1);
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadNativeField(Slot::String_length());
if (kind == MethodRecognizer::kStringBaseIsEmpty) {
body += IntConstant(0);
body += StrictCompare(Token::kEQ_STRICT);
}
break;
case MethodRecognizer::kClassIDgetID:
ASSERT_EQUAL(function.NumParameters(), 1);
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadClassId();
break;
case MethodRecognizer::kGrowableArrayAllocateWithData: {
ASSERT(function.IsFactory());
ASSERT_EQUAL(function.NumParameters(), 2);
const Class& cls =
Class::ZoneHandle(Z, compiler::GrowableObjectArrayClass().ptr());
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += AllocateObject(TokenPosition::kNoSource, cls, 1);
LocalVariable* object = MakeTemporary();
body += LoadLocal(object);
body += LoadLocal(parsed_function_->RawParameterVariable(1));
body += StoreNativeField(Slot::GrowableObjectArray_data(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
body += LoadLocal(object);
body += IntConstant(0);
body += StoreNativeField(Slot::GrowableObjectArray_length(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
break;
}
case MethodRecognizer::kGrowableArrayCapacity:
ASSERT_EQUAL(function.NumParameters(), 1);
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadNativeField(Slot::GrowableObjectArray_data());
body += LoadNativeField(Slot::Array_length());
break;
case MethodRecognizer::kListFactory: {
ASSERT(function.IsFactory() && (function.NumParameters() == 2) &&
function.HasOptionalParameters());
// factory List<E>([int length]) {
// return (:arg_desc.positional_count == 2) ? new _List<E>(length)
// : new _GrowableList<E>(0);
// }
const Library& core_lib = Library::Handle(Z, Library::CoreLibrary());
TargetEntryInstr* allocate_non_growable;
TargetEntryInstr* allocate_growable;
body += LoadArgDescriptor();
body += LoadNativeField(Slot::ArgumentsDescriptor_positional_count());
body += IntConstant(2);
body += BranchIfStrictEqual(&allocate_non_growable, &allocate_growable);
JoinEntryInstr* join = BuildJoinEntry();
{
const Class& cls = Class::Handle(
Z, core_lib.LookupClass(
Library::PrivateCoreLibName(Symbols::_List())));
ASSERT(!cls.IsNull());
const Function& func = Function::ZoneHandle(
Z, cls.LookupFactoryAllowPrivate(Symbols::_ListFactory()));
ASSERT(!func.IsNull());
Fragment allocate(allocate_non_growable);
allocate += LoadLocal(parsed_function_->RawParameterVariable(0));
allocate += LoadLocal(parsed_function_->RawParameterVariable(1));
allocate +=
StaticCall(TokenPosition::kNoSource, func, 2, ICData::kStatic);
allocate += StoreLocal(TokenPosition::kNoSource,
parsed_function_->expression_temp_var());
allocate += Drop();
allocate += Goto(join);
}
{
const Class& cls = Class::Handle(
Z, core_lib.LookupClass(
Library::PrivateCoreLibName(Symbols::_GrowableList())));
ASSERT(!cls.IsNull());
const Function& func = Function::ZoneHandle(
Z, cls.LookupFactoryAllowPrivate(Symbols::_GrowableListFactory()));
ASSERT(!func.IsNull());
Fragment allocate(allocate_growable);
allocate += LoadLocal(parsed_function_->RawParameterVariable(0));
allocate += IntConstant(0);
allocate +=
StaticCall(TokenPosition::kNoSource, func, 2, ICData::kStatic);
allocate += StoreLocal(TokenPosition::kNoSource,
parsed_function_->expression_temp_var());
allocate += Drop();
allocate += Goto(join);
}
body = Fragment(body.entry, join);
body += LoadLocal(parsed_function_->expression_temp_var());
break;
}
case MethodRecognizer::kObjectArrayAllocate:
ASSERT(function.IsFactory() && (function.NumParameters() == 2));
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadLocal(parsed_function_->RawParameterVariable(1));
body += CreateArray();
break;
case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
ASSERT_EQUAL(function.NumParameters(), 5);
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadLocal(parsed_function_->RawParameterVariable(1));
body += LoadLocal(parsed_function_->RawParameterVariable(2));
body += LoadLocal(parsed_function_->RawParameterVariable(3));
body += LoadLocal(parsed_function_->RawParameterVariable(4));
body += MemoryCopy(kTypedDataUint8ArrayCid, kOneByteStringCid);
body += NullConstant();
break;
case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
ASSERT_EQUAL(function.NumParameters(), 2);
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadLocal(parsed_function_->RawParameterVariable(1));
// Uses a store-release barrier so that other isolates will see the
// contents of the index after seeing the index itself.
body +=
StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
StoreInstanceFieldInstr::Kind::kOther,
kEmitStoreBarrier, compiler::Assembler::kRelease);
body += NullConstant();
break;
case MethodRecognizer::kUtf8DecoderScan:
ASSERT_EQUAL(function.NumParameters(), 5);
body += LoadLocal(parsed_function_->RawParameterVariable(0)); // decoder
body += LoadLocal(parsed_function_->RawParameterVariable(1)); // bytes
body += LoadLocal(parsed_function_->RawParameterVariable(2)); // start
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += UnboxTruncate(kUnboxedIntPtr);
body += LoadLocal(parsed_function_->RawParameterVariable(3)); // end
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += UnboxTruncate(kUnboxedIntPtr);
body += LoadLocal(parsed_function_->RawParameterVariable(4)); // table
body += Utf8Scan();
body += Box(kUnboxedIntPtr);
break;
case MethodRecognizer::kReachabilityFence:
ASSERT_EQUAL(function.NumParameters(), 1);
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += ReachabilityFence();
body += NullConstant();
break;
case MethodRecognizer::kFfiAbi:
ASSERT_EQUAL(function.NumParameters(), 0);
body += IntConstant(static_cast<int64_t>(compiler::ffi::TargetAbi()));
break;
case MethodRecognizer::kFfiLoadInt8:
case MethodRecognizer::kFfiLoadInt16:
case MethodRecognizer::kFfiLoadInt32:
case MethodRecognizer::kFfiLoadInt64:
case MethodRecognizer::kFfiLoadUint8:
case MethodRecognizer::kFfiLoadUint16:
case MethodRecognizer::kFfiLoadUint32:
case MethodRecognizer::kFfiLoadUint64:
case MethodRecognizer::kFfiLoadFloat:
case MethodRecognizer::kFfiLoadFloatUnaligned:
case MethodRecognizer::kFfiLoadDouble:
case MethodRecognizer::kFfiLoadDoubleUnaligned:
case MethodRecognizer::kFfiLoadPointer: {
const classid_t ffi_type_arg_cid =
compiler::ffi::RecognizedMethodTypeArgCid(kind);
const AlignmentType alignment =
compiler::ffi::RecognizedMethodAlignment(kind);
const classid_t typed_data_cid =
compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
ASSERT_EQUAL(function.NumParameters(), 2);
LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(0);
LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
body += LoadLocal(arg_offset);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
LocalVariable* arg_offset_not_null = MakeTemporary();
body += LoadLocal(arg_pointer);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
// No GC from here til LoadIndexed.
body += LoadUntagged(compiler::target::PointerBase::data_offset());
body += LoadLocal(arg_offset_not_null);
body += UnboxTruncate(kUnboxedFfiIntPtr);
body += LoadIndexed(typed_data_cid, /*index_scale=*/1,
/*index_unboxed=*/true, alignment);
if (kind == MethodRecognizer::kFfiLoadFloat ||
kind == MethodRecognizer::kFfiLoadFloatUnaligned) {
body += FloatToDouble();
}
// Avoid any unnecessary (and potentially deoptimizing) int
// conversions by using the representation returned from LoadIndexed.
body +=
Box(LoadIndexedInstr::RepresentationOfArrayElement(typed_data_cid));
if (kind == MethodRecognizer::kFfiLoadPointer) {
const auto class_table = thread_->isolate_group()->class_table();
ASSERT(class_table->HasValidClassAt(kPointerCid));
const auto& pointer_class =
Class::ZoneHandle(H.zone(), class_table->At(kPointerCid));
// We find the reified type to use for the pointer allocation.
//
// Call sites to this recognized method are guaranteed to pass a
// Pointer<Pointer<X>> as RawParameterVariable(0). This function
// will return a Pointer<X> object - for which we inspect the
// reified type on the argument.
//
// The following is safe to do, as (1) we are guaranteed to have a
// Pointer<Pointer<X>> as argument, and (2) the bound on the pointer
// type parameter guarantees X is an interface type.
ASSERT(function.NumTypeParameters() == 1);
LocalVariable* address = MakeTemporary();
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += LoadNativeField(
Slot::GetTypeArgumentsSlotFor(thread_, pointer_class));
body += LoadNativeField(Slot::GetTypeArgumentsIndexSlot(
thread_, Pointer::kNativeTypeArgPos));
body += LoadNativeField(Slot::Type_arguments());
body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
LocalVariable* pointer = MakeTemporary();
body += LoadLocal(pointer);
body += LoadLocal(address);
body += UnboxTruncate(kUnboxedIntPtr);
body += StoreNativeField(Slot::PointerBase_data());
body += DropTempsPreserveTop(1); // Drop [address] keep [pointer].
}
body += DropTempsPreserveTop(1); // Drop [arg_offset].
} break;
case MethodRecognizer::kFfiStoreInt8:
case MethodRecognizer::kFfiStoreInt16:
case MethodRecognizer::kFfiStoreInt32:
case MethodRecognizer::kFfiStoreInt64:
case MethodRecognizer::kFfiStoreUint8:
case MethodRecognizer::kFfiStoreUint16:
case MethodRecognizer::kFfiStoreUint32:
case MethodRecognizer::kFfiStoreUint64:
case MethodRecognizer::kFfiStoreFloat:
case MethodRecognizer::kFfiStoreFloatUnaligned:
case MethodRecognizer::kFfiStoreDouble:
case MethodRecognizer::kFfiStoreDoubleUnaligned:
case MethodRecognizer::kFfiStorePointer: {
const classid_t ffi_type_arg_cid =
compiler::ffi::RecognizedMethodTypeArgCid(kind);
const AlignmentType alignment =
compiler::ffi::RecognizedMethodAlignment(kind);
const classid_t typed_data_cid =
compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(0);
LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
if (kind == MethodRecognizer::kFfiStorePointer) {
// Do type check before anything untagged is on the stack.
const auto class_table = thread_->isolate_group()->class_table();
ASSERT(class_table->HasValidClassAt(kPointerCid));
const auto& pointer_class =
Class::ZoneHandle(H.zone(), class_table->At(kPointerCid));
const auto& pointer_type_param =
TypeParameter::ZoneHandle(pointer_class.TypeParameterAt(0));
// But we type check it as a method on a generic class at runtime.
body += LoadLocal(arg_value); // value.
body += Constant(pointer_type_param); // dst_type.
// We pass the Pointer type argument as instantiator_type_args.
//
// Call sites to this recognized method are guaranteed to pass a
// Pointer<Pointer<X>> as RawParameterVariable(0). This function
// will takes a Pointer<X> object - for which we inspect the
// reified type on the argument.
//
// The following is safe to do, as (1) we are guaranteed to have a
// Pointer<Pointer<X>> as argument, and (2) the bound on the pointer
// type parameter guarantees X is an interface type.
body += LoadLocal(arg_pointer);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += LoadNativeField(
Slot::GetTypeArgumentsSlotFor(thread_, pointer_class));
body += NullConstant(); // function_type_args.
body += AssertAssignable(TokenPosition::kNoSource, Symbols::Empty());
body += Drop();
}
ASSERT_EQUAL(function.NumParameters(), 3);
body += LoadLocal(arg_offset);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
LocalVariable* arg_offset_not_null = MakeTemporary();
body += LoadLocal(arg_value);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
LocalVariable* arg_value_not_null = MakeTemporary();
body += LoadLocal(arg_pointer); // Pointer.
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
// No GC from here til StoreIndexed.
body += LoadUntagged(compiler::target::PointerBase::data_offset());
body += LoadLocal(arg_offset_not_null);
body += UnboxTruncate(kUnboxedFfiIntPtr);
body += LoadLocal(arg_value_not_null);
if (kind == MethodRecognizer::kFfiStorePointer) {
// This can only be Pointer, so it is always safe to LoadUntagged.
body += LoadUntagged(compiler::target::PointerBase::data_offset());
body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
} else {
// Avoid any unnecessary (and potentially deoptimizing) int
// conversions by using the representation consumed by StoreIndexed.
body += UnboxTruncate(
StoreIndexedInstr::RepresentationOfArrayElement(typed_data_cid));
if (kind == MethodRecognizer::kFfiStoreFloat ||
kind == MethodRecognizer::kFfiStoreFloatUnaligned) {
body += DoubleToFloat();
}
}
body += StoreIndexedTypedData(typed_data_cid, /*index_scale=*/1,
/*index_unboxed=*/true, alignment);
body += Drop(); // Drop [arg_value].
body += Drop(); // Drop [arg_offset].
body += NullConstant();
} break;
case MethodRecognizer::kFfiFromAddress: {
const auto class_table = thread_->isolate_group()->class_table();
ASSERT(class_table->HasValidClassAt(kPointerCid));
const auto& pointer_class =
Class::ZoneHandle(H.zone(), class_table->At(kPointerCid));
ASSERT(function.NumTypeParameters() == 1);
ASSERT_EQUAL(function.NumParameters(), 1);
body += LoadLocal(parsed_function_->RawTypeArgumentsVariable());
body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
body += LoadLocal(MakeTemporary()); // Duplicate Pointer.
body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Address.
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += UnboxTruncate(kUnboxedIntPtr);
body += StoreNativeField(Slot::PointerBase_data());
} break;
case MethodRecognizer::kFfiGetAddress: {
ASSERT_EQUAL(function.NumParameters(), 1);
body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Pointer.
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
// This can only be Pointer, so it is always safe to LoadUntagged.
body += LoadUntagged(compiler::target::PointerBase::data_offset());
body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
body += Box(kUnboxedFfiIntPtr);
} break;
case MethodRecognizer::kHas63BitSmis: {
#if defined(HAS_SMI_63_BITS)
body += Constant(Bool::True());
#else
body += Constant(Bool::False());
#endif // defined(ARCH_IS_64_BIT)
} break;
case MethodRecognizer::kFfiAsExternalTypedDataInt8:
case MethodRecognizer::kFfiAsExternalTypedDataInt16:
case MethodRecognizer::kFfiAsExternalTypedDataInt32:
case MethodRecognizer::kFfiAsExternalTypedDataInt64:
case MethodRecognizer::kFfiAsExternalTypedDataUint8:
case MethodRecognizer::kFfiAsExternalTypedDataUint16:
case MethodRecognizer::kFfiAsExternalTypedDataUint32:
case MethodRecognizer::kFfiAsExternalTypedDataUint64:
case MethodRecognizer::kFfiAsExternalTypedDataFloat:
case MethodRecognizer::kFfiAsExternalTypedDataDouble: {
const classid_t ffi_type_arg_cid =
compiler::ffi::RecognizedMethodTypeArgCid(kind);
const classid_t external_typed_data_cid =
compiler::ffi::ElementExternalTypedDataCid(ffi_type_arg_cid);
auto class_table = thread_->isolate_group()->class_table();
ASSERT(class_table->HasValidClassAt(external_typed_data_cid));
const auto& typed_data_class =
Class::ZoneHandle(H.zone(), class_table->At(external_typed_data_cid));
// We assume that the caller has checked that the arguments are non-null
// and length is in the range [0, kSmiMax/elementSize].
ASSERT_EQUAL(function.NumParameters(), 2);
LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(0);
LocalVariable* arg_length = parsed_function_->RawParameterVariable(1);
body += AllocateObject(TokenPosition::kNoSource, typed_data_class, 0);
LocalVariable* typed_data_object = MakeTemporary();
// Initialize the result's length field.
body += LoadLocal(typed_data_object);
body += LoadLocal(arg_length);
body += StoreNativeField(Slot::TypedDataBase_length(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
// Initialize the result's data pointer field.
body += LoadLocal(typed_data_object);
body += LoadLocal(arg_pointer);
body += LoadUntagged(compiler::target::PointerBase::data_offset());
body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
body += StoreNativeField(Slot::PointerBase_data(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
} break;
case MethodRecognizer::kGetNativeField: {
auto& name = String::ZoneHandle(Z, function.name());
// Note: This method is force optimized so we can push untagged, etc.
// Load TypedDataArray from Instance Handle implementing
// NativeFieldWrapper.
body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Object.
body += CheckNullOptimized(name);
body += LoadNativeField(Slot::Instance_native_fields_array()); // Fields.
body += CheckNullOptimized(name);
// Load the native field at index.
body += IntConstant(0); // Index.
body += LoadIndexed(kIntPtrCid);
body += Box(kUnboxedIntPtr);
} break;
case MethodRecognizer::kDoubleToInteger:
case MethodRecognizer::kDoubleCeilToInt:
case MethodRecognizer::kDoubleFloorToInt: {
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += DoubleToInteger(kind);
} break;
case MethodRecognizer::kDoubleMod:
case MethodRecognizer::kDoubleRoundToDouble:
case MethodRecognizer::kDoubleTruncateToDouble:
case MethodRecognizer::kDoubleFloorToDouble:
case MethodRecognizer::kDoubleCeilToDouble:
case MethodRecognizer::kMathDoublePow:
case MethodRecognizer::kMathSin:
case MethodRecognizer::kMathCos:
case MethodRecognizer::kMathTan:
case MethodRecognizer::kMathAsin:
case MethodRecognizer::kMathAcos:
case MethodRecognizer::kMathAtan:
case MethodRecognizer::kMathAtan2:
case MethodRecognizer::kMathExp:
case MethodRecognizer::kMathLog: {
for (intptr_t i = 0, n = function.NumParameters(); i < n; ++i) {
body += LoadLocal(parsed_function_->RawParameterVariable(i));
}
if (!CompilerState::Current().is_aot() &&
TargetCPUFeatures::double_truncate_round_supported() &&
((kind == MethodRecognizer::kDoubleTruncateToDouble) ||
(kind == MethodRecognizer::kDoubleFloorToDouble) ||
(kind == MethodRecognizer::kDoubleCeilToDouble))) {
body += DoubleToDouble(kind);
} else {
body += InvokeMathCFunction(kind, function.NumParameters());
}
} break;
case MethodRecognizer::kMathSqrt: {
body += LoadLocal(parsed_function_->RawParameterVariable(0));
body += MathUnary(MathUnaryInstr::kSqrt);
} break;
#define IL_BODY(method, slot) \
case MethodRecognizer::k##method: \
ASSERT_EQUAL(function.NumParameters(), 1); \
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
body += LoadNativeField(Slot::slot()); \
break;
LOAD_NATIVE_FIELD(IL_BODY)
#undef IL_BODY
#define IL_BODY(method, slot) \
case MethodRecognizer::k##method: \
ASSERT_EQUAL(function.NumParameters(), 2); \
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
body += StoreNativeField(Slot::slot()); \
body += NullConstant(); \
break;
STORE_NATIVE_FIELD(IL_BODY)
#undef IL_BODY
#define IL_BODY(method, slot) \
case MethodRecognizer::k##method: \
ASSERT_EQUAL(function.NumParameters(), 2); \
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
body += StoreNativeField( \
Slot::slot(), StoreInstanceFieldInstr::Kind::kOther, kNoStoreBarrier); \
body += NullConstant(); \
break;
STORE_NATIVE_FIELD_NO_BARRIER(IL_BODY)
#undef IL_BODY
default: {
UNREACHABLE();
break;
}
}
body += Return(TokenPosition::kNoSource, /* omit_result_type_check = */ true);
return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
prologue_info);
}
Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
const Function& function,
classid_t cid) {
auto token_pos = function.token_pos();
auto class_table = Thread::Current()->isolate_group()->class_table();
ASSERT(class_table->HasValidClassAt(cid));
const auto& view_class = Class::ZoneHandle(H.zone(), class_table->At(cid));
ASSERT(function.IsFactory() && (function.NumParameters() == 4));
LocalVariable* typed_data = parsed_function_->RawParameterVariable(1);
LocalVariable* offset_in_bytes = parsed_function_->RawParameterVariable(2);
LocalVariable* length = parsed_function_->RawParameterVariable(3);
Fragment body;
body += AllocateObject(token_pos, view_class, /*arg_count=*/0);
LocalVariable* view_object = MakeTemporary();
body += LoadLocal(view_object);
body += LoadLocal(typed_data);
body += StoreNativeField(token_pos, Slot::TypedDataView_typed_data(),
StoreInstanceFieldInstr::Kind::kInitializing);
body += LoadLocal(view_object);
body += LoadLocal(offset_in_bytes);
body += StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
body += LoadLocal(view_object);
body += LoadLocal(length);
body += StoreNativeField(token_pos, Slot::TypedDataBase_length(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
// Update the inner pointer.
//
// WARNING: Notice that we assume here no GC happens between those 4
// instructions!
body += LoadLocal(view_object);
body += LoadLocal(typed_data);
body += LoadUntagged(compiler::target::PointerBase::data_offset());
body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
body += LoadLocal(offset_in_bytes);
body += UnboxSmiToIntptr();
body += AddIntptrIntegers();
body += StoreNativeField(Slot::PointerBase_data());
return body;
}
Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
const Function& function,
classid_t cid) {
const auto token_pos = function.token_pos();
ASSERT(
Thread::Current()->isolate_group()->class_table()->HasValidClassAt(cid));
ASSERT(function.IsFactory() && (function.NumParameters() == 2));
LocalVariable* length = parsed_function_->RawParameterVariable(1);
Fragment instructions;
instructions += LoadLocal(length);
// AllocateTypedData instruction checks that length is valid (a non-negative
// Smi below maximum allowed length).
instructions += AllocateTypedData(token_pos, cid);
return instructions;
}
static const LocalScope* MakeImplicitClosureScope(Zone* Z, const Class& klass) {
ASSERT(!klass.IsNull());
// Note that if klass is _Closure, DeclarationType will be _Closure,
// and not the signature type.
Type& klass_type = Type::ZoneHandle(Z, klass.DeclarationType());
LocalVariable* receiver_variable = new (Z)
LocalVariable(TokenPosition::kNoSource, TokenPosition::kNoSource,
Symbols::This(), klass_type, /*param_type=*/nullptr);
receiver_variable->set_is_captured();
// receiver_variable->set_is_final();
LocalScope* scope = new (Z) LocalScope(NULL, 0, 0);
scope->set_context_level(0);
scope->AddVariable(receiver_variable);
scope->AddContextVariable(receiver_variable);
return scope;
}
Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
const Function& target) {
// The function cannot be local and have parent generic functions.
ASSERT(!target.HasGenericParent());
Fragment fragment;
fragment += Constant(target);
// Allocate a context that closes over `this`.
// Note: this must be kept in sync with ScopeBuilder::BuildScopes.
const LocalScope* implicit_closure_scope =
MakeImplicitClosureScope(Z, Class::Handle(Z, target.Owner()));
fragment += AllocateContext(implicit_closure_scope->context_slots());
LocalVariable* context = MakeTemporary();
// Store `this`. The context doesn't need a parent pointer because it doesn't
// close over anything else.
fragment += LoadLocal(context);
fragment += LoadLocal(parsed_function_->receiver_var());
fragment += StoreNativeField(
Slot::GetContextVariableSlotFor(
thread_, *implicit_closure_scope->context_variables()[0]),
StoreInstanceFieldInstr::Kind::kInitializing);
fragment += AllocateClosure();
LocalVariable* closure = MakeTemporary();
// The function signature can have uninstantiated class type parameters.
if (!target.HasInstantiatedSignature(kCurrentClass)) {
fragment += LoadLocal(closure);
fragment += LoadInstantiatorTypeArguments();
fragment += StoreNativeField(Slot::Closure_instantiator_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
}
if (target.IsGeneric()) {
// Only generic functions need to have properly initialized
// delayed_type_arguments.
fragment += LoadLocal(closure);
fragment += Constant(Object::empty_type_arguments());
fragment += StoreNativeField(Slot::Closure_delayed_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
}
return fragment;
}
Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
const AbstractType& dst_type,
const String& name_symbol) {
return Fragment();
}
bool FlowGraphBuilder::NeedsDebugStepCheck(const Function& function,
TokenPosition position) {
return position.IsDebugPause() && !function.is_native() &&
function.is_debuggable();
}
bool FlowGraphBuilder::NeedsDebugStepCheck(Value* value,
TokenPosition position) {
if (!position.IsDebugPause()) {
return false;
}
Definition* definition = value->definition();
if (definition->IsConstant() || definition->IsLoadStaticField()) {
return true;
}
if (auto const alloc = definition->AsAllocateClosure()) {
return !alloc->known_function().IsNull();
}
return definition->IsLoadLocal() || definition->IsAssertAssignable();
}
Fragment FlowGraphBuilder::EvaluateAssertion() {
const Class& klass =
Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::AssertionError()));
ASSERT(!klass.IsNull());
const auto& error = klass.EnsureIsFinalized(H.thread());
ASSERT(error == Error::null());
const Function& target = Function::ZoneHandle(
Z, klass.LookupStaticFunctionAllowPrivate(Symbols::EvaluateAssertion()));
ASSERT(!target.IsNull());
return StaticCall(TokenPosition::kNoSource, target, /* argument_count = */ 1,
ICData::kStatic);
}
Fragment FlowGraphBuilder::CheckBoolean(TokenPosition position) {
Fragment instructions;
LocalVariable* top_of_stack = MakeTemporary();
instructions += LoadLocal(top_of_stack);
instructions += AssertBool(position);
instructions += Drop();
return instructions;
}
Fragment FlowGraphBuilder::CheckAssignable(const AbstractType& dst_type,
const String& dst_name,
AssertAssignableInstr::Kind kind,
TokenPosition token_pos) {
Fragment instructions;
if (!dst_type.IsTopTypeForSubtyping()) {
LocalVariable* top_of_stack = MakeTemporary();
instructions += LoadLocal(top_of_stack);
instructions +=
AssertAssignableLoadTypeArguments(token_pos, dst_type, dst_name, kind);
instructions += Drop();
}
return instructions;
}
Fragment FlowGraphBuilder::AssertAssignableLoadTypeArguments(
TokenPosition position,
const AbstractType& dst_type,
const String& dst_name,
AssertAssignableInstr::Kind kind) {
Fragment instructions;
instructions += Constant(AbstractType::ZoneHandle(dst_type.ptr()));
if (!dst_type.IsInstantiated(kCurrentClass)) {
instructions += LoadInstantiatorTypeArguments();
} else {
instructions += NullConstant();
}
if (!dst_type.IsInstantiated(kFunctions)) {
instructions += LoadFunctionTypeArguments();
} else {
instructions += NullConstant();
}
instructions += AssertAssignable(position, dst_name, kind);
return instructions;
}
Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
const AbstractType& sub_type_value,
const AbstractType& super_type_value,
const String& dst_name_value) {
Fragment instructions;
instructions += LoadInstantiatorTypeArguments();
instructions += LoadFunctionTypeArguments();
instructions += Constant(AbstractType::ZoneHandle(Z, sub_type_value.ptr()));
instructions += Constant(AbstractType::ZoneHandle(Z, super_type_value.ptr()));
instructions += Constant(String::ZoneHandle(Z, dst_name_value.ptr()));
instructions += AssertSubtype(position);
return instructions;
}
Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position) {
Fragment instructions;
Value* dst_name = Pop();
Value* super_type = Pop();
Value* sub_type = Pop();
Value* function_type_args = Pop();
Value* instantiator_type_args = Pop();
AssertSubtypeInstr* instr = new (Z) AssertSubtypeInstr(
InstructionSource(position), instantiator_type_args, function_type_args,
sub_type, super_type, dst_name, GetNextDeoptId());
instructions += Fragment(instr);
return instructions;
}
void FlowGraphBuilder::BuildTypeArgumentTypeChecks(TypeChecksToBuild mode,
Fragment* implicit_checks) {
const Function& dart_function = parsed_function_->function();
const Function* forwarding_target = nullptr;
if (parsed_function_->is_forwarding_stub()) {
forwarding_target = parsed_function_->forwarding_stub_super_target();
ASSERT(!forwarding_target->IsNull());
}
TypeParameters& type_parameters = TypeParameters::Handle(Z);
if (dart_function.IsFactory()) {
type_parameters = Class::Handle(Z, dart_function.Owner()).type_parameters();
} else {
type_parameters = dart_function.type_parameters();
}
const intptr_t num_type_params = type_parameters.Length();
if (num_type_params == 0) return;
if (forwarding_target != nullptr) {
type_parameters = forwarding_target->type_parameters();
ASSERT(type_parameters.Length() == num_type_params);
}
if (type_parameters.AllDynamicBounds()) {
return; // All bounds are dynamic.
}
TypeParameter& type_param = TypeParameter::Handle(Z);
String& name = String::Handle(Z);
AbstractType& bound = AbstractType::Handle(Z);
Fragment check_bounds;
for (intptr_t i = 0; i < num_type_params; ++i) {
bound = type_parameters.BoundAt(i);
if (bound.IsTopTypeForSubtyping()) {
continue;
}
switch (mode) {
case TypeChecksToBuild::kCheckAllTypeParameterBounds:
break;
case TypeChecksToBuild::kCheckCovariantTypeParameterBounds:
if (!type_parameters.IsGenericCovariantImplAt(i)) {
continue;
}
break;
case TypeChecksToBuild::kCheckNonCovariantTypeParameterBounds:
if (type_parameters.IsGenericCovariantImplAt(i)) {
continue;
}
break;
}
name = type_parameters.NameAt(i);
if (forwarding_target != nullptr) {
type_param = forwarding_target->TypeParameterAt(i);
} else if (dart_function.IsFactory()) {
type_param = Class::Handle(Z, dart_function.Owner()).TypeParameterAt(i);
} else {
type_param = dart_function.TypeParameterAt(i);
}
ASSERT(type_param.IsFinalized());
if (bound.IsTypeRef()) {
bound = TypeRef::Cast(bound).type();
}
check_bounds +=
AssertSubtype(TokenPosition::kNoSource, type_param, bound, name);
}
// Type arguments passed through partial instantiation are guaranteed to be
// bounds-checked at the point of partial instantiation, so we don't need to
// check them again at the call-site.
if (dart_function.IsClosureFunction() && !check_bounds.is_empty() &&
FLAG_eliminate_type_checks) {
LocalVariable* closure = parsed_function_->ParameterVariable(0);
*implicit_checks += TestDelayedTypeArgs(closure, /*present=*/{},
/*absent=*/check_bounds);
} else {
*implicit_checks += check_bounds;
}
}
void FlowGraphBuilder::BuildArgumentTypeChecks(
Fragment* explicit_checks,
Fragment* implicit_checks,
Fragment* implicit_redefinitions) {
const Function& dart_function = parsed_function_->function();
const Function* forwarding_target = nullptr;
if (parsed_function_->is_forwarding_stub()) {
forwarding_target = parsed_function_->forwarding_stub_super_target();
ASSERT(!forwarding_target->IsNull());
}
const intptr_t num_params = dart_function.NumParameters();
for (intptr_t i = dart_function.NumImplicitParameters(); i < num_params;
++i) {
LocalVariable* param = parsed_function_->ParameterVariable(i);
const String& name = param->name();
if (!param->needs_type_check()) {
continue;
}
if (param->is_captured()) {
param = parsed_function_->RawParameterVariable(i);
}
const AbstractType* target_type = &param->type();
if (forwarding_target != NULL) {
// We add 1 to the parameter index to account for the receiver.
target_type =
&AbstractType::ZoneHandle(Z, forwarding_target->ParameterTypeAt(i));
}
if (target_type->IsTopTypeForSubtyping()) continue;
const bool is_covariant = param->is_explicit_covariant_parameter();
Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
*checks += LoadLocal(param);
*checks += AssertAssignableLoadTypeArguments(
TokenPosition::kNoSource, *target_type, name,
AssertAssignableInstr::kParameterCheck);
*checks += StoreLocal(param);
*checks += Drop();
if (!is_covariant && implicit_redefinitions != nullptr && optimizing_) {
// We generate slightly different code in optimized vs. un-optimized code,
// which is ok since we don't allocate any deopt ids.
AssertNoDeoptIdsAllocatedScope no_deopt_allocation(thread_);
*implicit_redefinitions += LoadLocal(param);
*implicit_redefinitions += RedefinitionWithType(*target_type);
*implicit_redefinitions += StoreLocal(TokenPosition::kNoSource, param);
*implicit_redefinitions += Drop();
}
}
}
BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
PrologueInfo* prologue_info) {
const bool compiling_for_osr = IsCompiledForOsr();
kernel::PrologueBuilder prologue_builder(
parsed_function_, last_used_block_id_, compiling_for_osr, IsInlining());
BlockEntryInstr* instruction_cursor =
prologue_builder.BuildPrologue(normal_entry, prologue_info);
last_used_block_id_ = prologue_builder.last_used_block_id();
return instruction_cursor;
}
ArrayPtr FlowGraphBuilder::GetOptionalParameterNames(const Function& function) {
if (!function.HasOptionalNamedParameters()) {
return Array::null();
}
const intptr_t num_fixed_params = function.num_fixed_parameters();
const intptr_t num_opt_params = function.NumOptionalNamedParameters();
const auto& names = Array::Handle(Z, Array::New(num_opt_params, Heap::kOld));
auto& name = String::Handle(Z);
for (intptr_t i = 0; i < num_opt_params; ++i) {
name = function.ParameterNameAt(num_fixed_params + i);
names.SetAt(i, name);
}
return names.ptr();
}
Fragment FlowGraphBuilder::PushExplicitParameters(
const Function& function,
const Function& target /* = Function::null_function()*/) {
Fragment instructions;
for (intptr_t i = function.NumImplicitParameters(),
n = function.NumParameters();
i < n; ++i) {
Fragment push_param = LoadLocal(parsed_function_->ParameterVariable(i));
if (!target.IsNull() && target.is_unboxed_parameter_at(i)) {
Representation to;
if (target.is_unboxed_integer_parameter_at(i)) {
to = kUnboxedInt64;
} else {
ASSERT(target.is_unboxed_double_parameter_at(i));
to = kUnboxedDouble;
}
const auto unbox = UnboxInstr::Create(to, Pop(), DeoptId::kNone,
Instruction::kNotSpeculative);
Push(unbox);
push_param += Fragment(unbox);
}
instructions += push_param;
}
return instructions;
}
FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
const Function& method) {
// A method extractor is the implicit getter for a method.
const Function& function =
Function::ZoneHandle(Z, method.extracted_method_closure());
graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
auto normal_entry = BuildFunctionEntry(graph_entry_);
graph_entry_->set_normal_entry(normal_entry);
Fragment body(normal_entry);
body += CheckStackOverflowInPrologue(method.token_pos());
body += BuildImplicitClosureCreation(function);
body += Return(TokenPosition::kNoSource);
// There is no prologue code for a method extractor.
PrologueInfo prologue_info(-1, -1);
return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
prologue_info);
}
FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
const Function& function) {
// This function is specialized for a receiver class, a method name, and
// the arguments descriptor at a call site.
const ArgumentsDescriptor descriptor(saved_args_desc_array());
graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
auto normal_entry = BuildFunctionEntry(graph_entry_);
graph_entry_->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
BuildPrologue(normal_entry, &prologue_info);
Fragment body(instruction_cursor);
body += CheckStackOverflowInPrologue(function.token_pos());
// The receiver is the first argument to noSuchMethod, and it is the first
// argument passed to the dispatcher function.
body += LoadLocal(parsed_function_->ParameterVariable(0));
// The second argument to noSuchMethod is an invocation mirror. Push the
// arguments for allocating the invocation mirror. First, the name.
body += Constant(String::ZoneHandle(Z, function.name()));
// Second, the arguments descriptor.
body += Constant(saved_args_desc_array());
// Third, an array containing the original arguments. Create it and fill
// it in.
const intptr_t receiver_index = descriptor.TypeArgsLen() > 0 ? 1 : 0;
body += Constant(TypeArguments::ZoneHandle(Z, TypeArguments::null()));
body += IntConstant(receiver_index + descriptor.Size());
body += CreateArray();
LocalVariable* array = MakeTemporary();
if (receiver_index > 0) {
LocalVariable* type_args = parsed_function_->function_type_arguments();
ASSERT(type_args != NULL);
body += LoadLocal(array);
body += IntConstant(0);
body += LoadLocal(type_args);
body += StoreIndexed(kArrayCid);
}
for (intptr_t i = 0; i < descriptor.PositionalCount(); ++i) {
body += LoadLocal(array);
body += IntConstant(receiver_index + i);
body += LoadLocal(parsed_function_->ParameterVariable(i));
body += StoreIndexed(kArrayCid);
}
String& name = String::Handle(Z);
for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
const intptr_t parameter_index = descriptor.PositionAt(i);
name = descriptor.NameAt(i);
name = Symbols::New(H.thread(), name);
body += LoadLocal(array);
body += IntConstant(receiver_index + parameter_index);
body += LoadLocal(parsed_function_->ParameterVariable(parameter_index));
body += StoreIndexed(kArrayCid);
}
// Fourth, false indicating this is not a super NoSuchMethod.
body += Constant(Bool::False());
const Class& mirror_class =
Class::Handle(Z, Library::LookupCoreClass(Symbols::InvocationMirror()));
ASSERT(!mirror_class.IsNull());
const auto& error = mirror_class.EnsureIsFinalized(H.thread());
ASSERT(error == Error::null());
const Function& allocation_function = Function::ZoneHandle(
Z, mirror_class.LookupStaticFunction(
Library::PrivateCoreLibName(Symbols::AllocateInvocationMirror())));
ASSERT(!allocation_function.IsNull());
body += StaticCall(TokenPosition::kMinSource, allocation_function,
/* argument_count = */ 4, ICData::kStatic);
const int kTypeArgsLen = 0;
ArgumentsDescriptor two_arguments(
Array::Handle(Z, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, 2)));
Function& no_such_method =
Function::ZoneHandle(Z, Resolver::ResolveDynamicForReceiverClass(
Class::Handle(Z, function.Owner()),
Symbols::NoSuchMethod(), two_arguments));
if (no_such_method.IsNull()) {
// If noSuchMethod is not found on the receiver class, call
// Object.noSuchMethod.
no_such_method = Resolver::ResolveDynamicForReceiverClass(
Class::Handle(Z, IG->object_store()->object_class()),
Symbols::NoSuchMethod(), two_arguments);
}
body += StaticCall(TokenPosition::kMinSource, no_such_method,
/* argument_count = */ 2, ICData::kNSMDispatch);
body += Return(TokenPosition::kNoSource);
return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
prologue_info);
}
// Information used by the various dynamic closure call fragment builders.
struct FlowGraphBuilder::ClosureCallInfo {
ClosureCallInfo(LocalVariable* closure,
JoinEntryInstr* throw_no_such_method,
const Array& arguments_descriptor_array,
ParsedFunction::DynamicClosureCallVars* const vars)
: closure(ASSERT_NOTNULL(closure)),
throw_no_such_method(ASSERT_NOTNULL(throw_no_such_method)),
descriptor(arguments_descriptor_array),
vars(ASSERT_NOTNULL(vars)) {}
LocalVariable* const closure;
JoinEntryInstr* const throw_no_such_method;
const ArgumentsDescriptor descriptor;
ParsedFunction::DynamicClosureCallVars* const vars;
// Set up by BuildClosureCallDefaultTypeHandling() when needed. These values
// are read-only, so they don't need real local variables and are created
// using MakeTemporary().
LocalVariable* signature = nullptr;
LocalVariable* num_fixed_params = nullptr;
LocalVariable* num_opt_params = nullptr;
LocalVariable* num_max_params = nullptr;
LocalVariable* has_named_params = nullptr;
LocalVariable* named_parameter_names = nullptr;
LocalVariable* parameter_types = nullptr;
LocalVariable* type_parameters = nullptr;
LocalVariable* num_type_parameters = nullptr;
LocalVariable* type_parameter_flags = nullptr;
LocalVariable* instantiator_type_args = nullptr;
LocalVariable* parent_function_type_args = nullptr;
LocalVariable* num_parent_type_args = nullptr;
};
Fragment FlowGraphBuilder::TestClosureFunctionGeneric(
const ClosureCallInfo& info,
Fragment generic,
Fragment not_generic) {
JoinEntryInstr* after_branch = BuildJoinEntry();
Fragment check;
check += LoadLocal(info.type_parameters);
TargetEntryInstr* is_not_generic;
TargetEntryInstr* is_generic;
check += BranchIfNull(&is_not_generic, &is_generic);
generic.Prepend(is_generic);
generic += Goto(after_branch);
not_generic.Prepend(is_not_generic);
not_generic += Goto(after_branch);
return Fragment(check.entry, after_branch);
}
Fragment FlowGraphBuilder::TestClosureFunctionNamedParameterRequired(
const ClosureCallInfo& info,
Fragment set,
Fragment not_set) {
// Required named arguments only exist if null_safety is enabled.
if (!IG->use_strict_null_safety_checks()) return not_set;
Fragment check_required;
// We calculate the index to dereference in the parameter names array.
check_required += LoadLocal(info.vars->current_param_index);
check_required +=
IntConstant(compiler::target::kNumParameterFlagsPerElementLog2);
check_required += SmiBinaryOp(Token::kSHR);
check_required += LoadLocal(info.num_opt_params);
check_required += SmiBinaryOp(Token::kADD);
LocalVariable* flags_index = MakeTemporary("flags_index"); // Read-only.
// One read-only stack value (flag_index) that must be dropped
// after we rejoin at after_check.
JoinEntryInstr* after_check = BuildJoinEntry();
// Now we check to see if the flags index is within the bounds of the
// parameters names array. If not, it cannot be required.
check_required += LoadLocal(flags_index);
check_required += LoadLocal(info.named_parameter_names);
check_required += LoadNativeField(Slot::Array_length());
check_required += SmiRelationalOp(Token::kLT);
TargetEntryInstr* valid_index;
TargetEntryInstr* invalid_index;
check_required += BranchIfTrue(&valid_index, &invalid_index);
JoinEntryInstr* join_not_set = BuildJoinEntry();
Fragment(invalid_index) + Goto(join_not_set);
// Otherwise, we need to retrieve the value. We're guaranteed the Smis in
// the flag slots are non-null, so after loading we can immediate check
// the required flag bit for the given named parameter.
check_required.current = valid_index;
check_required += LoadLocal(info.named_parameter_names);
check_required += LoadLocal(flags_index);
check_required += LoadIndexed(
kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
check_required += LoadLocal(info.vars->current_param_index);
check_required +=
IntConstant(compiler::target::kNumParameterFlagsPerElement - 1);
check_required += SmiBinaryOp(Token::kBIT_AND);
// If the below changes, we'll need to multiply by the number of parameter
// flags before shifting.
static_assert(compiler::target::kNumParameterFlags == 1,
"IL builder assumes only one flag bit per parameter");
check_required += SmiBinaryOp(Token::kSHR);
check_required +=
IntConstant(1 << compiler::target::kRequiredNamedParameterFlag);
check_required += SmiBinaryOp(Token::kBIT_AND);
check_required += IntConstant(0);
TargetEntryInstr* is_not_set;
TargetEntryInstr* is_set;
check_required += BranchIfEqual(&is_not_set, &is_set);
Fragment(is_not_set) + Goto(join_not_set);
set.Prepend(is_set);
set += Goto(after_check);
not_set.Prepend(join_not_set);
not_set += Goto(after_check);
// After rejoining, drop the introduced temporaries.
check_required.current = after_check;
check_required += DropTemporary(&flags_index);
return check_required;
}
Fragment FlowGraphBuilder::BuildClosureCallDefaultTypeHandling(
const ClosureCallInfo& info) {
if (info.descriptor.TypeArgsLen() > 0) {
ASSERT(parsed_function_->function_type_arguments() != nullptr);
// A TAV was provided, so we don't need default type argument handling
// and can just take the arguments we were given.
Fragment store_provided;
store_provided += LoadLocal(parsed_function_->function_type_arguments());
store_provided += StoreLocal(info.vars->function_type_args);
store_provided += Drop();
return store_provided;
}
// Load the defaults, instantiating or replacing them with the other type
// arguments as appropriate.
Fragment store_default;
store_default += LoadLocal(info.closure);
store_default += LoadNativeField(Slot::Closure_function());
store_default += LoadNativeField(Slot::Function_data());
LocalVariable* closure_data = MakeTemporary("closure_data");
store_default += LoadLocal(closure_data);
const auto& slot = Slot::ClosureData_default_type_arguments_kind();
store_default += LoadNativeField(slot);
store_default += Box(slot.representation());
LocalVariable* default_tav_kind = MakeTemporary("default_tav_kind");
// Two locals to drop after join, closure_data and default_tav_kind.
JoinEntryInstr* done = BuildJoinEntry();
store_default += LoadLocal(default_tav_kind);
TargetEntryInstr* is_instantiated;
TargetEntryInstr* is_not_instantiated;
store_default += IntConstant(static_cast<intptr_t>(
ClosureData::DefaultTypeArgumentsKind::kIsInstantiated));
store_default += BranchIfEqual(&is_instantiated, &is_not_instantiated);
store_default.current = is_not_instantiated; // Check next case.
store_default += LoadLocal(default_tav_kind);
TargetEntryInstr* needs_instantiation;
TargetEntryInstr* can_share;
store_default += IntConstant(static_cast<intptr_t>(
ClosureData::DefaultTypeArgumentsKind::kNeedsInstantiation));
store_default += BranchIfEqual(&needs_instantiation, &can_share);
store_default.current = can_share; // Check next case.
store_default += LoadLocal(default_tav_kind);
TargetEntryInstr* can_share_instantiator;
TargetEntryInstr* can_share_function;
store_default += IntConstant(static_cast<intptr_t>(
ClosureData::DefaultTypeArgumentsKind::kSharesInstantiatorTypeArguments));
store_default += BranchIfEqual(&can_share_instantiator, &can_share_function);
Fragment instantiated(is_instantiated);
instantiated += LoadLocal(info.type_parameters);
instantiated += LoadNativeField(Slot::TypeParameters_defaults());
instantiated += StoreLocal(info.vars->function_type_args);
instantiated += Drop();
instantiated += Goto(done);
Fragment do_instantiation(needs_instantiation);
// Load the instantiator type arguments.
do_instantiation += LoadLocal(info.instantiator_type_args);
// Load the parent function type arguments. (No local function type arguments
// can be used within the defaults).
do_instantiation += LoadLocal(info.parent_function_type_args);
// Load the default type arguments to instantiate.
do_instantiation += LoadLocal(info.type_parameters);
do_instantiation += LoadNativeField(Slot::TypeParameters_defaults());
do_instantiation += InstantiateDynamicTypeArguments();
do_instantiation += StoreLocal(info.vars->function_type_args);
do_instantiation += Drop();
do_instantiation += Goto(done);
Fragment share_instantiator(can_share_instantiator);
share_instantiator += LoadLocal(info.instantiator_type_args);
share_instantiator += StoreLocal(info.vars->function_type_args);
share_instantiator += Drop();
share_instantiator += Goto(done);
Fragment share_function(can_share_function);
// Since the defaults won't have local type parameters, these must all be
// from the parent function type arguments, so we can just use it.
share_function += LoadLocal(info.parent_function_type_args);
share_function += StoreLocal(info.vars->function_type_args);
share_function += Drop();
share_function += Goto(done);
store_default.current = done; // Return here after branching.
store_default += DropTemporary(&default_tav_kind);
store_default += DropTemporary(&closure_data);
Fragment store_delayed;
store_delayed += LoadLocal(info.closure);
store_delayed += LoadNativeField(Slot::Closure_delayed_type_arguments());
store_delayed += StoreLocal(info.vars->function_type_args);
store_delayed += Drop();
// Use the delayed type args if present, else the default ones.
return TestDelayedTypeArgs(info.closure, store_delayed, store_default);
}
Fragment FlowGraphBuilder::BuildClosureCallNamedArgumentsCheck(
const ClosureCallInfo& info) {
// When no named arguments are provided, we just need to check for possible
// required named arguments.
if (info.descriptor.NamedCount() == 0) {
// No work to do if there are no possible required named parameters.
if (!IG->use_strict_null_safety_checks()) {
return Fragment();
}
// If the below changes, we can no longer assume that flag slots existing
// means there are required parameters.
static_assert(compiler::target::kNumParameterFlags == 1,
"IL builder assumes only one flag bit per parameter");
// No named args were provided, so check for any required named params.
// Here, we assume that the only parameter flag saved is the required bit
// for named parameters. If this changes, we'll need to check each flag
// entry appropriately for any set required bits.
Fragment has_any;
has_any += LoadLocal(info.num_opt_params);
has_any += LoadLocal(info.named_parameter_names);
has_any += LoadNativeField(Slot::Array_length());
TargetEntryInstr* no_required;
TargetEntryInstr* has_required;
has_any += BranchIfEqual(&no_required, &has_required);
Fragment(has_required) + Goto(info.throw_no_such_method);
return Fragment(has_any.entry, no_required);
}
// Otherwise, we need to loop through the parameter names to check the names
// of named arguments for validity (and possibly missing required ones).
Fragment check_names;
check_names += LoadLocal(info.vars->current_param_index);
LocalVariable* old_index = MakeTemporary("old_index"); // Read-only.
check_names += LoadLocal(info.vars->current_num_processed);
LocalVariable* old_processed = MakeTemporary("old_processed"); // Read-only.
// Two local stack values (old_index, old_processed) to drop after rejoining
// at done.
JoinEntryInstr* loop = BuildJoinEntry();
JoinEntryInstr* done = BuildJoinEntry();
check_names += IntConstant(0);
check_names += StoreLocal(info.vars->current_num_processed);
check_names += Drop();
check_names += IntConstant(0);
check_names += StoreLocal(info.vars->current_param_index);
check_names += Drop();
check_names += Goto(loop);
Fragment loop_check(loop);
loop_check += LoadLocal(info.vars->current_param_index);
loop_check += LoadLocal(info.num_opt_params);
loop_check += SmiRelationalOp(Token::kLT);
TargetEntryInstr* no_more;
TargetEntryInstr* more;
loop_check += BranchIfTrue(&more, &no_more);
Fragment(no_more) + Goto(done);
Fragment loop_body(more);
// First load the name we need to check against.
loop_body += LoadLocal(info.named_parameter_names);
loop_body += LoadLocal(info.vars->current_param_index);
loop_body += LoadIndexed(
kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
LocalVariable* param_name = MakeTemporary("param_name"); // Read only.
// One additional local value on the stack within the loop body (param_name)
// that should be dropped after rejoining at loop_incr.
JoinEntryInstr* loop_incr = BuildJoinEntry();
// Now iterate over the ArgumentsDescriptor names and check for a match.
for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
const auto& name = String::ZoneHandle(Z, info.descriptor.NameAt(i));
loop_body += Constant(name);
loop_body += LoadLocal(param_name);
TargetEntryInstr* match;
TargetEntryInstr* mismatch;
loop_body += BranchIfEqual(&match, &mismatch);
loop_body.current = mismatch;
// We have a match, so go to the next name after storing the corresponding
// parameter index on the stack and incrementing the number of matched
// arguments. (No need to check the required bit for provided parameters.)
Fragment matched(match);
matched += LoadLocal(info.vars->current_param_index);
matched += LoadLocal(info.num_fixed_params);
matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
matched += StoreLocal(info.vars->named_argument_parameter_indices.At(i));
matched += Drop();
matched += LoadLocal(info.vars->current_num_processed);
matched += IntConstant(1);
matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
matched += StoreLocal(info.vars->current_num_processed);
matched += Drop();
matched += Goto(loop_incr);
}
// None of the names in the arguments descriptor matched, so check if this
// is a required parameter.
loop_body += TestClosureFunctionNamedParameterRequired(
info,
/*set=*/Goto(info.throw_no_such_method),
/*not_set=*/{});
loop_body += Goto(loop_incr);
Fragment incr_index(loop_incr);
incr_index += DropTemporary(&param_name);
incr_index += LoadLocal(info.vars->current_param_index);
incr_index += IntConstant(1);
incr_index += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
incr_index += StoreLocal(info.vars->current_param_index);
incr_index += Drop();
incr_index += Goto(loop);
Fragment check_processed(done);
check_processed += LoadLocal(info.vars->current_num_processed);
check_processed += IntConstant(info.descriptor.NamedCount());
TargetEntryInstr* all_processed;
TargetEntryInstr* bad_name;
check_processed += BranchIfEqual(&all_processed, &bad_name);
// Didn't find a matching parameter name for at least one argument name.
Fragment(bad_name) + Goto(info.throw_no_such_method);
// Drop the temporaries at the end of the fragment.
check_names.current = all_processed;
check_names += LoadLocal(old_processed);
check_names += StoreLocal(info.vars->current_num_processed);
check_names += Drop();
check_names += DropTemporary(&old_processed);
check_names += LoadLocal(old_index);
check_names += StoreLocal(info.vars->current_param_index);
check_names += Drop();
check_names += DropTemporary(&old_index);
return check_names;
}
Fragment FlowGraphBuilder::BuildClosureCallArgumentsValidCheck(
const ClosureCallInfo& info) {
Fragment check_entry;
// We only need to check the length of any explicitly provided type arguments.
if (info.descriptor.TypeArgsLen() > 0) {
Fragment check_type_args_length;
check_type_args_length += LoadLocal(info.type_parameters);
TargetEntryInstr* null;
TargetEntryInstr* not_null;
check_type_args_length += BranchIfNull(&null, &not_null);
check_type_args_length.current = not_null; // Continue in non-error case.
check_type_args_length += LoadLocal(info.signature);
check_type_args_length += BuildExtractUnboxedSlotBitFieldIntoSmi<
UntaggedFunctionType::PackedNumTypeParameters>(
Slot::FunctionType_packed_type_parameter_counts());
check_type_args_length += IntConstant(info.descriptor.TypeArgsLen());
TargetEntryInstr* equal;
TargetEntryInstr* not_equal;
check_type_args_length += BranchIfEqual(&equal, &not_equal);
check_type_args_length.current = equal; // Continue in non-error case.
// The function is not generic.
Fragment(null) + Goto(info.throw_no_such_method);
// An incorrect number of type arguments were passed.
Fragment(not_equal) + Goto(info.throw_no_such_method);
// Type arguments should not be provided if there are delayed type
// arguments, as then the closure itself is not generic.
check_entry += TestDelayedTypeArgs(
info.closure, /*present=*/Goto(info.throw_no_such_method),
/*absent=*/check_type_args_length);
}
check_entry += LoadLocal(info.has_named_params);
TargetEntryInstr* has_named;
TargetEntryInstr* has_positional;
check_entry += BranchIfTrue(&has_named, &has_positional);
JoinEntryInstr* join_after_optional = BuildJoinEntry();
check_entry.current = join_after_optional;
if (info.descriptor.NamedCount() > 0) {
// No reason to continue checking, as this function doesn't take named args.
Fragment(has_positional) + Goto(info.throw_no_such_method);
} else {
Fragment check_pos(has_positional);
check_pos += LoadLocal(info.num_fixed_params);
check_pos += IntConstant(info.descriptor.PositionalCount());
check_pos += SmiRelationalOp(Token::kLTE);
TargetEntryInstr* enough;
TargetEntryInstr* too_few;
check_pos += BranchIfTrue(&enough, &too_few);
check_pos.current = enough;
Fragment(too_few) + Goto(info.throw_no_such_method);
check_pos += IntConstant(info.descriptor.PositionalCount());
check_pos += LoadLocal(info.num_max_params);
check_pos += SmiRelationalOp(Token::kLTE);
TargetEntryInstr* valid;
TargetEntryInstr* too_many;
check_pos += BranchIfTrue(&valid, &too_many);
check_pos.current = valid;
Fragment(too_many) + Goto(info.throw_no_such_method);
check_pos += Goto(join_after_optional);
}
Fragment check_named(has_named);
TargetEntryInstr* same;
TargetEntryInstr* different;
check_named += LoadLocal(info.num_fixed_params);
check_named += IntConstant(info.descriptor.PositionalCount());
check_named += BranchIfEqual(&same, &different);
check_named.current = same;
Fragment(different) + Goto(info.throw_no_such_method);
if (info.descriptor.NamedCount() > 0) {
check_named += IntConstant(info.descriptor.NamedCount());
check_named += LoadLocal(info.num_opt_params);
check_named += SmiRelationalOp(Token::kLTE);
TargetEntryInstr* valid;
TargetEntryInstr* too_many;
check_named += BranchIfTrue(&valid, &too_many);
check_named.current = valid;
Fragment(too_many) + Goto(info.throw_no_such_method);
}
// Check the names for optional arguments. If applicable, also check that all
// required named parameters are provided.
check_named += BuildClosureCallNamedArgumentsCheck(info);
check_named += Goto(join_after_optional);
check_entry.current = join_after_optional;
return check_entry;
}
Fragment FlowGraphBuilder::BuildClosureCallTypeArgumentsTypeCheck(
const ClosureCallInfo& info) {
JoinEntryInstr* done = BuildJoinEntry();
JoinEntryInstr* loop = BuildJoinEntry();
// We assume that the value stored in :t_type_parameters is not null (i.e.,
// the function stored in :t_function is generic).
Fragment loop_init;
// A null bounds vector represents a vector of dynamic and no check is needed.
loop_init += LoadLocal(info.type_parameters);
loop_init += LoadNativeField(Slot::TypeParameters_bounds());
TargetEntryInstr* null_bounds;
TargetEntryInstr* non_null_bounds;
loop_init += BranchIfNull(&null_bounds, &non_null_bounds);
Fragment(null_bounds) + Goto(done);
loop_init.current = non_null_bounds;
// Loop over the type parameters array.
loop_init += IntConstant(0);
loop_init += StoreLocal(info.vars->current_param_index);
loop_init += Drop();
loop_init += Goto(loop);
Fragment loop_check(loop);
loop_check += LoadLocal(info.vars->current_param_index);
loop_check += LoadLocal(info.num_type_parameters);
loop_check += SmiRelationalOp(Token::kLT);
TargetEntryInstr* more;
TargetEntryInstr* no_more;
loop_check += BranchIfTrue(&more, &no_more);
Fragment(no_more) + Goto(done);
Fragment loop_test_flag(more);
JoinEntryInstr* next = BuildJoinEntry();
JoinEntryInstr* check = BuildJoinEntry();
loop_test_flag += LoadLocal(info.type_parameter_flags);
TargetEntryInstr* null_flags;
TargetEntryInstr* non_null_flags;
loop_test_flag += BranchIfNull(&null_flags, &non_null_flags);
Fragment(null_flags) + Goto(check); // Check type if null (non-covariant).
loop_test_flag.current = non_null_flags; // Test flags if not null.
loop_test_flag += LoadLocal(info.type_parameter_flags);
loop_test_flag += LoadLocal(info.vars->current_param_index);
loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiShift);
loop_test_flag += SmiBinaryOp(Token::kSHR);
loop_test_flag += LoadIndexed(
kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
loop_test_flag += LoadLocal(info.vars->current_param_index);
loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiMask);
loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
loop_test_flag += SmiBinaryOp(Token::kSHR);
loop_test_flag += IntConstant(1);
loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
loop_test_flag += IntConstant(0);
TargetEntryInstr* is_noncovariant;
TargetEntryInstr* is_covariant;
loop_test_flag += BranchIfEqual(&is_noncovariant, &is_covariant);
Fragment(is_covariant) + Goto(next); // Continue if covariant.
Fragment(is_noncovariant) + Goto(check); // Check type if non-covariant.
Fragment loop_prep_type_param(check);
JoinEntryInstr* dynamic_type_param = BuildJoinEntry();
JoinEntryInstr* call = BuildJoinEntry();