blob: fe0999e7ed940634eeecb447c76e266a26cb36e3 [file] [log] [blame]
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include "vm/object.h"
#include "include/dart_api.h"
#include "platform/assert.h"
#include "vm/bit_vector.h"
#include "vm/bootstrap.h"
#include "vm/class_finalizer.h"
#include "vm/code_observers.h"
#include "vm/compiler/aot/precompiler.h"
#include "vm/compiler/assembler/assembler.h"
#include "vm/compiler/assembler/disassembler.h"
#include "vm/compiler/frontend/bytecode_reader.h"
#include "vm/compiler/frontend/kernel_fingerprints.h"
#include "vm/compiler/frontend/kernel_translation_helper.h"
#include "vm/compiler/intrinsifier.h"
#include "vm/compiler/jit/compiler.h"
#include "vm/cpu.h"
#include "vm/dart.h"
#include "vm/dart_api_state.h"
#include "vm/dart_entry.h"
#include "vm/datastream.h"
#include "vm/debugger.h"
#include "vm/deopt_instructions.h"
#include "vm/double_conversion.h"
#include "vm/exceptions.h"
#include "vm/growable_array.h"
#include "vm/hash.h"
#include "vm/hash_table.h"
#include "vm/heap/become.h"
#include "vm/heap/heap.h"
#include "vm/heap/weak_code.h"
#include "vm/isolate_reload.h"
#include "vm/kernel.h"
#include "vm/kernel_isolate.h"
#include "vm/kernel_loader.h"
#include "vm/native_symbol.h"
#include "vm/object_store.h"
#include "vm/parser.h"
#include "vm/profiler.h"
#include "vm/resolver.h"
#include "vm/reusable_handles.h"
#include "vm/runtime_entry.h"
#include "vm/scopes.h"
#include "vm/stack_frame.h"
#include "vm/stub_code.h"
#include "vm/symbols.h"
#include "vm/tags.h"
#include "vm/thread_registry.h"
#include "vm/timeline.h"
#include "vm/type_table.h"
#include "vm/type_testing_stubs.h"
#include "vm/unicode.h"
#include "vm/zone_text_buffer.h"
namespace dart {
DEFINE_FLAG(int,
huge_method_cutoff_in_code_size,
200000,
"Huge method cutoff in unoptimized code size (in bytes).");
DEFINE_FLAG(
bool,
show_internal_names,
false,
"Show names of internal classes (e.g. \"OneByteString\") in error messages "
"instead of showing the corresponding interface names (e.g. \"String\")");
DEFINE_FLAG(bool, use_lib_cache, false, "Use library name cache");
DEFINE_FLAG(bool, use_exp_cache, false, "Use library exported name cache");
DEFINE_FLAG(bool,
remove_script_timestamps_for_test,
false,
"Remove script timestamps to allow for deterministic testing.");
DECLARE_FLAG(bool, show_invisible_frames);
DECLARE_FLAG(bool, trace_deoptimization);
DECLARE_FLAG(bool, trace_deoptimization_verbose);
DECLARE_FLAG(bool, trace_reload);
DECLARE_FLAG(bool, write_protect_code);
static const char* const kGetterPrefix = "get:";
static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix);
static const char* const kSetterPrefix = "set:";
static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix);
// A cache of VM heap allocated preinitialized empty ic data entry arrays.
RawArray* ICData::cached_icdata_arrays_[kCachedICDataArrayCount];
cpp_vtable Object::handle_vtable_ = 0;
cpp_vtable Object::builtin_vtables_[kNumPredefinedCids] = {0};
cpp_vtable Smi::handle_vtable_ = 0;
// These are initialized to a value that will force a illegal memory access if
// they are being used.
#if defined(RAW_NULL)
#error RAW_NULL should not be defined.
#endif
#define RAW_NULL kHeapObjectTag
#define DEFINE_SHARED_READONLY_HANDLE(Type, name) \
Type* Object::name##_ = nullptr;
SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE)
#undef DEFINE_SHARED_READONLY_HANDLE
RawObject* Object::null_ = reinterpret_cast<RawObject*>(RAW_NULL);
RawClass* Object::class_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::dynamic_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::void_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::unresolved_class_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::type_arguments_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::patch_class_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::function_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::closure_data_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::signature_data_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::redirection_data_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::field_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::script_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::library_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::namespace_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::kernel_program_info_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::code_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::instructions_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::object_pool_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::pc_descriptors_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::code_source_map_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::stackmap_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::var_descriptors_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::exception_handlers_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::context_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::context_scope_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::singletargetcache_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::unlinkedcall_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::icdata_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::megamorphic_cache_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::subtypetestcache_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::api_error_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::language_error_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::unhandled_exception_class_ =
reinterpret_cast<RawClass*>(RAW_NULL);
RawClass* Object::unwind_error_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
const double MegamorphicCache::kLoadFactor = 0.50;
static void AppendSubString(Zone* zone,
GrowableArray<const char*>* segments,
const char* name,
intptr_t start_pos,
intptr_t len) {
char* segment = zone->Alloc<char>(len + 1); // '\0'-terminated.
memmove(segment, name + start_pos, len);
segment[len] = '\0';
segments->Add(segment);
}
static const char* MergeSubStrings(Zone* zone,
const GrowableArray<const char*>& segments,
intptr_t alloc_len) {
char* result = zone->Alloc<char>(alloc_len + 1); // '\0'-terminated
intptr_t pos = 0;
for (intptr_t k = 0; k < segments.length(); k++) {
const char* piece = segments[k];
const intptr_t piece_len = strlen(segments[k]);
memmove(result + pos, piece, piece_len);
pos += piece_len;
ASSERT(pos <= alloc_len);
}
result[pos] = '\0';
return result;
}
// Remove private keys, but retain getter/setter/constructor/mixin manglings.
RawString* String::RemovePrivateKey(const String& name) {
ASSERT(name.IsOneByteString());
GrowableArray<uint8_t> without_key(name.Length());
intptr_t i = 0;
while (i < name.Length()) {
while (i < name.Length()) {
uint8_t c = name.CharAt(i++);
if (c == '@') break;
without_key.Add(c);
}
while (i < name.Length()) {
uint8_t c = name.CharAt(i);
if ((c < '0') || (c > '9')) break;
i++;
}
}
return String::FromLatin1(without_key.data(), without_key.length());
}
// Takes a vm internal name and makes it suitable for external user.
//
// Examples:
//
// Internal getter and setter prefixes are changed:
//
// get:foo -> foo
// set:foo -> foo=
//
// Private name mangling is removed, possibly multiple times:
//
// _ReceivePortImpl@709387912 -> _ReceivePortImpl
// _ReceivePortImpl@709387912._internal@709387912 ->
// _ReceivePortImpl._internal
// _C@6328321&_E@6328321&_F@6328321 -> _C&_E&_F
//
// The trailing . on the default constructor name is dropped:
//
// List. -> List
//
// And so forth:
//
// get:foo@6328321 -> foo
// _MyClass@6328321. -> _MyClass
// _MyClass@6328321.named -> _MyClass.named
//
RawString* String::ScrubName(const String& name) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
#if !defined(DART_PRECOMPILED_RUNTIME)
if (name.Equals(Symbols::TopLevel())) {
// Name of invisible top-level class.
return Symbols::Empty().raw();
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
const char* cname = name.ToCString();
ASSERT(strlen(cname) == static_cast<size_t>(name.Length()));
const intptr_t name_len = name.Length();
// First remove all private name mangling.
intptr_t start_pos = 0;
GrowableArray<const char*> unmangled_segments;
intptr_t sum_segment_len = 0;
for (intptr_t i = 0; i < name_len; i++) {
if ((cname[i] == '@') && ((i + 1) < name_len) && (cname[i + 1] >= '0') &&
(cname[i + 1] <= '9')) {
// Append the current segment to the unmangled name.
const intptr_t segment_len = i - start_pos;
sum_segment_len += segment_len;
AppendSubString(zone, &unmangled_segments, cname, start_pos, segment_len);
// Advance until past the name mangling. The private keys are only
// numbers so we skip until the first non-number.
i++; // Skip the '@'.
while ((i < name.Length()) && (name.CharAt(i) >= '0') &&
(name.CharAt(i) <= '9')) {
i++;
}
start_pos = i;
i--; // Account for for-loop increment.
}
}
const char* unmangled_name = NULL;
if (start_pos == 0) {
// No name unmangling needed, reuse the name that was passed in.
unmangled_name = cname;
sum_segment_len = name_len;
} else if (name.Length() != start_pos) {
// Append the last segment.
const intptr_t segment_len = name.Length() - start_pos;
sum_segment_len += segment_len;
AppendSubString(zone, &unmangled_segments, cname, start_pos, segment_len);
}
if (unmangled_name == NULL) {
// Merge unmangled_segments.
unmangled_name = MergeSubStrings(zone, unmangled_segments, sum_segment_len);
}
#if !defined(DART_PRECOMPILED_RUNTIME)
intptr_t len = sum_segment_len;
intptr_t start = 0;
intptr_t dot_pos = -1; // Position of '.' in the name, if any.
bool is_setter = false;
for (intptr_t i = start; i < len; i++) {
if (unmangled_name[i] == ':') {
if (start != 0) {
// Reset and break.
start = 0;
dot_pos = -1;
break;
}
ASSERT(start == 0); // Only one : is possible in getters or setters.
if (unmangled_name[0] == 's') {
is_setter = true;
}
start = i + 1;
} else if (unmangled_name[i] == '.') {
if (dot_pos != -1) {
// Reset and break.
start = 0;
dot_pos = -1;
break;
}
ASSERT(dot_pos == -1); // Only one dot is supported.
dot_pos = i;
}
}
if ((start == 0) && (dot_pos == -1)) {
// This unmangled_name is fine as it is.
return Symbols::New(thread, unmangled_name, sum_segment_len);
}
// Drop the trailing dot if needed.
intptr_t end = ((dot_pos + 1) == len) ? dot_pos : len;
unmangled_segments.Clear();
intptr_t final_len = end - start;
AppendSubString(zone, &unmangled_segments, unmangled_name, start, final_len);
if (is_setter) {
const char* equals = Symbols::Equals().ToCString();
const intptr_t equals_len = strlen(equals);
AppendSubString(zone, &unmangled_segments, equals, 0, equals_len);
final_len += equals_len;
}
unmangled_name = MergeSubStrings(zone, unmangled_segments, final_len);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
return Symbols::New(thread, unmangled_name);
}
RawString* String::ScrubNameRetainPrivate(const String& name) {
#if !defined(DART_PRECOMPILED_RUNTIME)
intptr_t len = name.Length();
intptr_t start = 0;
intptr_t at_pos = -1; // Position of '@' in the name, if any.
bool is_setter = false;
for (intptr_t i = start; i < len; i++) {
if (name.CharAt(i) == ':') {
ASSERT(start == 0); // Only one : is possible in getters or setters.
if (name.CharAt(0) == 's') {
is_setter = true;
}
start = i + 1;
} else if (name.CharAt(i) == '@') {
// Setters should have only one @ so we know where to put the =.
ASSERT(!is_setter || (at_pos == -1));
at_pos = i;
}
}
if (start == 0) {
// This unmangled_name is fine as it is.
return name.raw();
}
String& result =
String::Handle(String::SubString(name, start, (len - start)));
if (is_setter) {
// Setters need to end with '='.
if (at_pos == -1) {
return String::Concat(result, Symbols::Equals());
} else {
const String& pre_at =
String::Handle(String::SubString(result, 0, at_pos - 4));
const String& post_at =
String::Handle(String::SubString(name, at_pos, len - at_pos));
result = String::Concat(pre_at, Symbols::Equals());
result = String::Concat(result, post_at);
}
}
return result.raw();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
return name.raw(); // In AOT, return argument unchanged.
}
template <typename type>
static bool IsSpecialCharacter(type value) {
return ((value == '"') || (value == '\n') || (value == '\f') ||
(value == '\b') || (value == '\t') || (value == '\v') ||
(value == '\r') || (value == '\\') || (value == '$'));
}
static inline bool IsAsciiNonprintable(int32_t c) {
return ((0 <= c) && (c < 32)) || (c == 127);
}
static int32_t EscapeOverhead(int32_t c) {
if (IsSpecialCharacter(c)) {
return 1; // 1 additional byte for the backslash.
} else if (IsAsciiNonprintable(c)) {
return 3; // 3 additional bytes to encode c as \x00.
}
return 0;
}
template <typename type>
static type SpecialCharacter(type value) {
if (value == '"') {
return '"';
} else if (value == '\n') {
return 'n';
} else if (value == '\f') {
return 'f';
} else if (value == '\b') {
return 'b';
} else if (value == '\t') {
return 't';
} else if (value == '\v') {
return 'v';
} else if (value == '\r') {
return 'r';
} else if (value == '\\') {
return '\\';
} else if (value == '$') {
return '$';
}
UNREACHABLE();
return '\0';
}
void Object::InitNull(Isolate* isolate) {
// Should only be run by the vm isolate.
ASSERT(isolate == Dart::vm_isolate());
// TODO(iposva): NoSafepointScope needs to be added here.
ASSERT(class_class() == null_);
Heap* heap = isolate->heap();
// Allocate and initialize the null instance.
// 'null_' must be the first object allocated as it is used in allocation to
// clear the object.
{
uword address = heap->Allocate(Instance::InstanceSize(), Heap::kOld);
null_ = reinterpret_cast<RawInstance*>(address + kHeapObjectTag);
// The call below is using 'null_' to initialize itself.
InitializeObject(address, kNullCid, Instance::InstanceSize(), true);
}
}
void Object::Init(Isolate* isolate) {
// Should only be run by the vm isolate.
ASSERT(isolate == Dart::vm_isolate());
// Initialize the static vtable values.
{
Object fake_object;
Smi fake_smi;
Object::handle_vtable_ = fake_object.vtable();
Smi::handle_vtable_ = fake_smi.vtable();
}
Heap* heap = isolate->heap();
// Allocate the read only object handles here.
#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \
name##_ = Type::ReadOnlyHandle();
SHARED_READONLY_HANDLES_LIST(INITIALIZE_SHARED_READONLY_HANDLE)
#undef INITIALIZE_SHARED_READONLY_HANDLE
*null_object_ = Object::null();
*null_array_ = Array::null();
*null_string_ = String::null();
*null_instance_ = Instance::null();
*null_function_ = Function::null();
*null_type_arguments_ = TypeArguments::null();
*empty_type_arguments_ = TypeArguments::null();
*null_abstract_type_ = AbstractType::null();
// Initialize the empty and zero array handles to null_ in order to be able to
// check if the empty and zero arrays were allocated (RAW_NULL is not
// available).
*empty_array_ = Array::null();
*zero_array_ = Array::null();
Class& cls = Class::Handle();
// Allocate and initialize the class class.
{
intptr_t size = Class::InstanceSize();
uword address = heap->Allocate(size, Heap::kOld);
class_class_ = reinterpret_cast<RawClass*>(address + kHeapObjectTag);
InitializeObject(address, Class::kClassId, size, true);
Class fake;
// Initialization from Class::New<Class>.
// Directly set raw_ to break a circular dependency: SetRaw will attempt
// to lookup class class in the class table where it is not registered yet.
cls.raw_ = class_class_;
cls.set_handle_vtable(fake.vtable());
cls.set_instance_size(Class::InstanceSize());
cls.set_next_field_offset(Class::NextFieldOffset());
cls.set_id(Class::kClassId);
cls.set_state_bits(0);
cls.set_is_finalized();
cls.set_is_type_finalized();
cls.set_is_cycle_free();
cls.set_type_arguments_field_offset_in_words(Class::kNoTypeArguments);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_has_pragma(false);
cls.set_num_native_fields(0);
cls.InitEmptyFields();
isolate->RegisterClass(cls);
}
// Allocate and initialize the null class.
cls = Class::New<Instance>(kNullCid);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
isolate->object_store()->set_null_class(cls);
// Allocate and initialize the free list element class.
cls = Class::New<FreeListElement::FakeInstance>(kFreeListElement);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_finalized();
cls.set_is_type_finalized();
cls.set_is_cycle_free();
// Allocate and initialize the forwarding corpse class.
cls = Class::New<ForwardingCorpse::FakeInstance>(kForwardingCorpse);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_finalized();
cls.set_is_type_finalized();
cls.set_is_cycle_free();
// Allocate and initialize the sentinel values of Null class.
{
*sentinel_ ^=
Object::Allocate(kNullCid, Instance::InstanceSize(), Heap::kOld);
*transition_sentinel_ ^=
Object::Allocate(kNullCid, Instance::InstanceSize(), Heap::kOld);
}
// Allocate and initialize optimizing compiler constants.
{
*unknown_constant_ ^=
Object::Allocate(kNullCid, Instance::InstanceSize(), Heap::kOld);
*non_constant_ ^=
Object::Allocate(kNullCid, Instance::InstanceSize(), Heap::kOld);
}
// Allocate the remaining VM internal classes.
cls = Class::New<UnresolvedClass>();
unresolved_class_class_ = cls.raw();
cls = Class::New<TypeArguments>();
type_arguments_class_ = cls.raw();
cls = Class::New<PatchClass>();
patch_class_class_ = cls.raw();
cls = Class::New<Function>();
function_class_ = cls.raw();
cls = Class::New<ClosureData>();
closure_data_class_ = cls.raw();
cls = Class::New<SignatureData>();
signature_data_class_ = cls.raw();
cls = Class::New<RedirectionData>();
redirection_data_class_ = cls.raw();
cls = Class::New<Field>();
field_class_ = cls.raw();
cls = Class::New<Script>();
script_class_ = cls.raw();
cls = Class::New<Library>();
library_class_ = cls.raw();
cls = Class::New<Namespace>();
namespace_class_ = cls.raw();
cls = Class::New<KernelProgramInfo>();
kernel_program_info_class_ = cls.raw();
cls = Class::New<Code>();
code_class_ = cls.raw();
cls = Class::New<Instructions>();
instructions_class_ = cls.raw();
cls = Class::New<ObjectPool>();
object_pool_class_ = cls.raw();
cls = Class::New<PcDescriptors>();
pc_descriptors_class_ = cls.raw();
cls = Class::New<CodeSourceMap>();
code_source_map_class_ = cls.raw();
cls = Class::New<StackMap>();
stackmap_class_ = cls.raw();
cls = Class::New<LocalVarDescriptors>();
var_descriptors_class_ = cls.raw();
cls = Class::New<ExceptionHandlers>();
exception_handlers_class_ = cls.raw();
cls = Class::New<Context>();
context_class_ = cls.raw();
cls = Class::New<ContextScope>();
context_scope_class_ = cls.raw();
cls = Class::New<SingleTargetCache>();
singletargetcache_class_ = cls.raw();
cls = Class::New<UnlinkedCall>();
unlinkedcall_class_ = cls.raw();
cls = Class::New<ICData>();
icdata_class_ = cls.raw();
cls = Class::New<MegamorphicCache>();
megamorphic_cache_class_ = cls.raw();
cls = Class::New<SubtypeTestCache>();
subtypetestcache_class_ = cls.raw();
cls = Class::New<ApiError>();
api_error_class_ = cls.raw();
cls = Class::New<LanguageError>();
language_error_class_ = cls.raw();
cls = Class::New<UnhandledException>();
unhandled_exception_class_ = cls.raw();
cls = Class::New<UnwindError>();
unwind_error_class_ = cls.raw();
ASSERT(class_class() != null_);
// Pre-allocate classes in the vm isolate so that we can for example create a
// symbol table and populate it with some frequently used strings as symbols.
cls = Class::New<Array>();
isolate->object_store()->set_array_class(cls);
cls.set_type_arguments_field_offset(Array::type_arguments_offset());
cls.set_num_type_arguments(1);
cls.set_num_own_type_arguments(1);
cls = Class::New<Array>(kImmutableArrayCid);
isolate->object_store()->set_immutable_array_class(cls);
cls.set_type_arguments_field_offset(Array::type_arguments_offset());
cls.set_num_type_arguments(1);
cls.set_num_own_type_arguments(1);
cls = Class::New<GrowableObjectArray>();
isolate->object_store()->set_growable_object_array_class(cls);
cls.set_type_arguments_field_offset(
GrowableObjectArray::type_arguments_offset());
cls.set_num_type_arguments(1);
cls = Class::NewStringClass(kOneByteStringCid);
isolate->object_store()->set_one_byte_string_class(cls);
cls = Class::NewStringClass(kTwoByteStringCid);
isolate->object_store()->set_two_byte_string_class(cls);
cls = Class::New<Mint>();
isolate->object_store()->set_mint_class(cls);
cls = Class::New<Double>();
isolate->object_store()->set_double_class(cls);
// Ensure that class kExternalTypedDataUint8ArrayCid is registered as we
// need it when reading in the token stream of bootstrap classes in the VM
// isolate.
Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid);
// Needed for object pools of VM isolate stubs.
Class::NewTypedDataClass(kTypedDataInt8ArrayCid);
// Allocate and initialize the empty_array instance.
{
uword address = heap->Allocate(Array::InstanceSize(0), Heap::kOld);
InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(0), true);
Array::initializeHandle(
empty_array_, reinterpret_cast<RawArray*>(address + kHeapObjectTag));
empty_array_->StoreSmi(&empty_array_->raw_ptr()->length_, Smi::New(0));
empty_array_->SetCanonical();
}
Smi& smi = Smi::Handle();
// Allocate and initialize the zero_array instance.
{
uword address = heap->Allocate(Array::InstanceSize(1), Heap::kOld);
InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(1), true);
Array::initializeHandle(
zero_array_, reinterpret_cast<RawArray*>(address + kHeapObjectTag));
zero_array_->StoreSmi(&zero_array_->raw_ptr()->length_, Smi::New(1));
smi = Smi::New(0);
zero_array_->SetAt(0, smi);
zero_array_->SetCanonical();
}
// Allocate and initialize the canonical empty context scope object.
{
uword address = heap->Allocate(ContextScope::InstanceSize(0), Heap::kOld);
InitializeObject(address, kContextScopeCid, ContextScope::InstanceSize(0),
true);
ContextScope::initializeHandle(
empty_context_scope_,
reinterpret_cast<RawContextScope*>(address + kHeapObjectTag));
empty_context_scope_->StoreNonPointer(
&empty_context_scope_->raw_ptr()->num_variables_, 0);
empty_context_scope_->StoreNonPointer(
&empty_context_scope_->raw_ptr()->is_implicit_, true);
empty_context_scope_->SetCanonical();
}
// Allocate and initialize the canonical empty object pool object.
{
uword address = heap->Allocate(ObjectPool::InstanceSize(0), Heap::kOld);
InitializeObject(address, kObjectPoolCid, ObjectPool::InstanceSize(0),
true);
ObjectPool::initializeHandle(
empty_object_pool_,
reinterpret_cast<RawObjectPool*>(address + kHeapObjectTag));
empty_object_pool_->StoreNonPointer(&empty_object_pool_->raw_ptr()->length_,
0);
empty_object_pool_->SetCanonical();
}
// Allocate and initialize the empty_descriptors instance.
{
uword address = heap->Allocate(PcDescriptors::InstanceSize(0), Heap::kOld);
InitializeObject(address, kPcDescriptorsCid, PcDescriptors::InstanceSize(0),
true);
PcDescriptors::initializeHandle(
empty_descriptors_,
reinterpret_cast<RawPcDescriptors*>(address + kHeapObjectTag));
empty_descriptors_->StoreNonPointer(&empty_descriptors_->raw_ptr()->length_,
0);
empty_descriptors_->SetCanonical();
}
// Allocate and initialize the canonical empty variable descriptor object.
{
uword address =
heap->Allocate(LocalVarDescriptors::InstanceSize(0), Heap::kOld);
InitializeObject(address, kLocalVarDescriptorsCid,
LocalVarDescriptors::InstanceSize(0), true);
LocalVarDescriptors::initializeHandle(
empty_var_descriptors_,
reinterpret_cast<RawLocalVarDescriptors*>(address + kHeapObjectTag));
empty_var_descriptors_->StoreNonPointer(
&empty_var_descriptors_->raw_ptr()->num_entries_, 0);
empty_var_descriptors_->SetCanonical();
}
// Allocate and initialize the canonical empty exception handler info object.
// The vast majority of all functions do not contain an exception handler
// and can share this canonical descriptor.
{
uword address =
heap->Allocate(ExceptionHandlers::InstanceSize(0), Heap::kOld);
InitializeObject(address, kExceptionHandlersCid,
ExceptionHandlers::InstanceSize(0), true);
ExceptionHandlers::initializeHandle(
empty_exception_handlers_,
reinterpret_cast<RawExceptionHandlers*>(address + kHeapObjectTag));
empty_exception_handlers_->StoreNonPointer(
&empty_exception_handlers_->raw_ptr()->num_entries_, 0);
empty_exception_handlers_->SetCanonical();
}
// Allocate and initialize the canonical empty type arguments object.
{
uword address = heap->Allocate(TypeArguments::InstanceSize(0), Heap::kOld);
InitializeObject(address, kTypeArgumentsCid, TypeArguments::InstanceSize(0),
true);
TypeArguments::initializeHandle(
empty_type_arguments_,
reinterpret_cast<RawTypeArguments*>(address + kHeapObjectTag));
empty_type_arguments_->StoreSmi(&empty_type_arguments_->raw_ptr()->length_,
Smi::New(0));
empty_type_arguments_->StoreSmi(&empty_type_arguments_->raw_ptr()->hash_,
Smi::New(0));
empty_type_arguments_->SetCanonical();
}
// The VM isolate snapshot object table is initialized to an empty array
// as we do not have any VM isolate snapshot at this time.
*vm_isolate_snapshot_object_table_ = Object::empty_array().raw();
cls = Class::New<Instance>(kDynamicCid);
cls.set_is_abstract();
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_finalized();
cls.set_is_type_finalized();
cls.set_is_cycle_free();
dynamic_class_ = cls.raw();
cls = Class::New<Instance>(kVoidCid);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_finalized();
cls.set_is_type_finalized();
cls.set_is_cycle_free();
void_class_ = cls.raw();
cls = Class::New<Type>();
cls.set_is_finalized();
cls.set_is_type_finalized();
cls.set_is_cycle_free();
cls = dynamic_class_;
*dynamic_type_ = Type::NewNonParameterizedType(cls);
cls = void_class_;
*void_type_ = Type::NewNonParameterizedType(cls);
// Since TypeArguments objects are passed as function arguments, make them
// behave as Dart instances, although they are just VM objects.
// Note that we cannot set the super type to ObjectType, which does not live
// in the vm isolate. See special handling in Class::SuperClass().
cls = type_arguments_class_;
cls.set_interfaces(Object::empty_array());
cls.SetFields(Object::empty_array());
cls.SetFunctions(Object::empty_array());
// Allocate and initialize singleton true and false boolean objects.
cls = Class::New<Bool>();
isolate->object_store()->set_bool_class(cls);
*bool_true_ = Bool::New(true);
*bool_false_ = Bool::New(false);
*smi_illegal_cid_ = Smi::New(kIllegalCid);
String& error_str = String::Handle();
error_str = String::New("SnapshotWriter Error", Heap::kOld);
*snapshot_writer_error_ =
LanguageError::New(error_str, Report::kError, Heap::kOld);
error_str = String::New("Branch offset overflow", Heap::kOld);
*branch_offset_error_ =
LanguageError::New(error_str, Report::kBailout, Heap::kOld);
error_str = String::New("Speculative inlining failed", Heap::kOld);
*speculative_inlining_error_ =
LanguageError::New(error_str, Report::kBailout, Heap::kOld);
error_str = String::New("Background Compilation Failed", Heap::kOld);
*background_compilation_error_ =
LanguageError::New(error_str, Report::kBailout, Heap::kOld);
// Allocate the parameter arrays for method extractor types and names.
*extractor_parameter_types_ = Array::New(1, Heap::kOld);
extractor_parameter_types_->SetAt(0, Object::dynamic_type());
*extractor_parameter_names_ = Array::New(1, Heap::kOld);
// Fill in extractor_parameter_names_ later, after symbols are initialized
// (in Object::FinalizeVMIsolate). extractor_parameter_names_ object
// needs to be created earlier as VM isolate snapshot reader references it
// before Object::FinalizeVMIsolate.
// Some thread fields need to be reinitialized as null constants have not been
// initialized until now.
Thread* thr = Thread::Current();
ASSERT(thr != NULL);
thr->clear_sticky_error();
thr->clear_pending_functions();
ASSERT(!null_object_->IsSmi());
ASSERT(!null_array_->IsSmi());
ASSERT(null_array_->IsArray());
ASSERT(!null_string_->IsSmi());
ASSERT(null_string_->IsString());
ASSERT(!null_instance_->IsSmi());
ASSERT(null_instance_->IsInstance());
ASSERT(!null_function_->IsSmi());
ASSERT(null_function_->IsFunction());
ASSERT(!null_type_arguments_->IsSmi());
ASSERT(null_type_arguments_->IsTypeArguments());
ASSERT(!empty_array_->IsSmi());
ASSERT(empty_array_->IsArray());
ASSERT(!zero_array_->IsSmi());
ASSERT(zero_array_->IsArray());
ASSERT(!empty_context_scope_->IsSmi());
ASSERT(empty_context_scope_->IsContextScope());
ASSERT(!empty_descriptors_->IsSmi());
ASSERT(empty_descriptors_->IsPcDescriptors());
ASSERT(!empty_var_descriptors_->IsSmi());
ASSERT(empty_var_descriptors_->IsLocalVarDescriptors());
ASSERT(!empty_exception_handlers_->IsSmi());
ASSERT(empty_exception_handlers_->IsExceptionHandlers());
ASSERT(!sentinel_->IsSmi());
ASSERT(sentinel_->IsInstance());
ASSERT(!transition_sentinel_->IsSmi());
ASSERT(transition_sentinel_->IsInstance());
ASSERT(!unknown_constant_->IsSmi());
ASSERT(unknown_constant_->IsInstance());
ASSERT(!non_constant_->IsSmi());
ASSERT(non_constant_->IsInstance());
ASSERT(!bool_true_->IsSmi());
ASSERT(bool_true_->IsBool());
ASSERT(!bool_false_->IsSmi());
ASSERT(bool_false_->IsBool());
ASSERT(smi_illegal_cid_->IsSmi());
ASSERT(!snapshot_writer_error_->IsSmi());
ASSERT(snapshot_writer_error_->IsLanguageError());
ASSERT(!branch_offset_error_->IsSmi());
ASSERT(branch_offset_error_->IsLanguageError());
ASSERT(!speculative_inlining_error_->IsSmi());
ASSERT(speculative_inlining_error_->IsLanguageError());
ASSERT(!background_compilation_error_->IsSmi());
ASSERT(background_compilation_error_->IsLanguageError());
ASSERT(!vm_isolate_snapshot_object_table_->IsSmi());
ASSERT(vm_isolate_snapshot_object_table_->IsArray());
ASSERT(!extractor_parameter_types_->IsSmi());
ASSERT(extractor_parameter_types_->IsArray());
ASSERT(!extractor_parameter_names_->IsSmi());
ASSERT(extractor_parameter_names_->IsArray());
}
void Object::FinishInit(Isolate* isolate) {
// The type testing stubs we initialize in AbstractType objects for the
// canonical type of kDynamicCid/kVoidCid need to be set in this
// method, which is called after StubCode::InitOnce().
Instructions& instr = Instructions::Handle();
instr = TypeTestingStubGenerator::DefaultCodeForType(*dynamic_type_);
dynamic_type_->SetTypeTestingStub(instr);
instr = TypeTestingStubGenerator::DefaultCodeForType(*void_type_);
void_type_->SetTypeTestingStub(instr);
}
void Object::Cleanup() {
null_ = reinterpret_cast<RawObject*>(RAW_NULL);
class_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
dynamic_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
void_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
unresolved_class_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
type_arguments_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
patch_class_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
function_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
closure_data_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
signature_data_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
redirection_data_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
field_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
script_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
library_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
namespace_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
kernel_program_info_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
code_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
instructions_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
object_pool_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
pc_descriptors_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
code_source_map_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
stackmap_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
var_descriptors_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
exception_handlers_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
context_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
context_scope_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
singletargetcache_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
unlinkedcall_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
icdata_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
megamorphic_cache_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
subtypetestcache_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
api_error_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
language_error_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
unhandled_exception_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
unwind_error_class_ = reinterpret_cast<RawClass*>(RAW_NULL);
}
// An object visitor which will mark all visited objects. This is used to
// premark all objects in the vm_isolate_ heap. Also precalculates hash
// codes so that we can get the identity hash code of objects in the read-
// only VM isolate.
class FinalizeVMIsolateVisitor : public ObjectVisitor {
public:
FinalizeVMIsolateVisitor()
#if defined(HASH_IN_OBJECT_HEADER)
: counter_(1337)
#endif
{
}
void VisitObject(RawObject* obj) {
// Free list elements should never be marked.
ASSERT(!obj->IsMarked());
// No forwarding corpses in the VM isolate.
ASSERT(!obj->IsForwardingCorpse());
if (!obj->IsFreeListElement()) {
ASSERT(obj->IsVMHeapObject());
obj->SetMarkBitUnsynchronized();
Object::FinalizeReadOnlyObject(obj);
#if defined(HASH_IN_OBJECT_HEADER)
// These objects end up in the read-only VM isolate which is shared
// between isolates, so we have to prepopulate them with identity hash
// codes, since we can't add hash codes later.
if (Object::GetCachedHash(obj) == 0) {
// Some classes have identity hash codes that depend on their contents,
// not per object.
ASSERT(!obj->IsStringInstance());
if (!obj->IsMint() && !obj->IsDouble() && !obj->IsRawNull() &&
!obj->IsBool()) {
counter_ += 2011; // The year Dart was announced and a prime.
counter_ &= 0x3fffffff;
if (counter_ == 0) counter_++;
Object::SetCachedHash(obj, counter_);
}
}
#endif
}
}
private:
#if defined(HASH_IN_OBJECT_HEADER)
int32_t counter_;
#endif
};
#define SET_CLASS_NAME(class_name, name) \
cls = class_name##_class(); \
cls.set_name(Symbols::name());
void Object::FinalizeVMIsolate(Isolate* isolate) {
// Should only be run by the vm isolate.
ASSERT(isolate == Dart::vm_isolate());
// Finish initialization of extractor_parameter_names_ which was
// Started in Object::InitOnce()
extractor_parameter_names_->SetAt(0, Symbols::This());
// Set up names for all VM singleton classes.
Class& cls = Class::Handle();
SET_CLASS_NAME(class, Class);
SET_CLASS_NAME(dynamic, Dynamic);
SET_CLASS_NAME(void, Void);
SET_CLASS_NAME(unresolved_class, UnresolvedClass);
SET_CLASS_NAME(type_arguments, TypeArguments);
SET_CLASS_NAME(patch_class, PatchClass);
SET_CLASS_NAME(function, Function);
SET_CLASS_NAME(closure_data, ClosureData);
SET_CLASS_NAME(signature_data, SignatureData);
SET_CLASS_NAME(redirection_data, RedirectionData);
SET_CLASS_NAME(field, Field);
SET_CLASS_NAME(script, Script);
SET_CLASS_NAME(library, LibraryClass);
SET_CLASS_NAME(namespace, Namespace);
SET_CLASS_NAME(kernel_program_info, KernelProgramInfo);
SET_CLASS_NAME(code, Code);
SET_CLASS_NAME(instructions, Instructions);
SET_CLASS_NAME(object_pool, ObjectPool);
SET_CLASS_NAME(code_source_map, CodeSourceMap);
SET_CLASS_NAME(pc_descriptors, PcDescriptors);
SET_CLASS_NAME(stackmap, StackMap);
SET_CLASS_NAME(var_descriptors, LocalVarDescriptors);
SET_CLASS_NAME(exception_handlers, ExceptionHandlers);
SET_CLASS_NAME(context, Context);
SET_CLASS_NAME(context_scope, ContextScope);
SET_CLASS_NAME(singletargetcache, SingleTargetCache);
SET_CLASS_NAME(unlinkedcall, UnlinkedCall);
SET_CLASS_NAME(icdata, ICData);
SET_CLASS_NAME(megamorphic_cache, MegamorphicCache);
SET_CLASS_NAME(subtypetestcache, SubtypeTestCache);
SET_CLASS_NAME(api_error, ApiError);
SET_CLASS_NAME(language_error, LanguageError);
SET_CLASS_NAME(unhandled_exception, UnhandledException);
SET_CLASS_NAME(unwind_error, UnwindError);
// Set up names for object array and one byte string class which are
// pre-allocated in the vm isolate also.
cls = isolate->object_store()->array_class();
cls.set_name(Symbols::_List());
cls = isolate->object_store()->one_byte_string_class();
cls.set_name(Symbols::OneByteString());
// Set up names for the pseudo-classes for free list elements and forwarding
// corpses. Mainly this makes VM debugging easier.
cls = isolate->class_table()->At(kFreeListElement);
cls.set_name(Symbols::FreeListElement());
cls = isolate->class_table()->At(kForwardingCorpse);
cls.set_name(Symbols::ForwardingCorpse());
{
ASSERT(isolate == Dart::vm_isolate());
Thread* thread = Thread::Current();
WritableVMIsolateScope scope(thread);
HeapIterationScope iteration(thread);
FinalizeVMIsolateVisitor premarker;
ASSERT(isolate->heap()->UsedInWords(Heap::kNew) == 0);
iteration.IterateOldObjectsNoImagePages(&premarker);
// Make the VM isolate read-only again after setting all objects as marked.
// Note objects in image pages are already pre-marked.
}
}
void Object::FinalizeReadOnlyObject(RawObject* object) {
NoSafepointScope no_safepoint;
intptr_t cid = object->GetClassId();
if (cid == kOneByteStringCid) {
RawOneByteString* str = static_cast<RawOneByteString*>(object);
if (String::GetCachedHash(str) == 0) {
intptr_t hash = String::Hash(str);
String::SetCachedHash(str, hash);
}
intptr_t size = OneByteString::UnroundedSize(str);
ASSERT(size <= str->Size());
memset(reinterpret_cast<void*>(RawObject::ToAddr(str) + size), 0,
str->Size() - size);
} else if (cid == kTwoByteStringCid) {
RawTwoByteString* str = static_cast<RawTwoByteString*>(object);
if (String::GetCachedHash(str) == 0) {
intptr_t hash = String::Hash(str);
String::SetCachedHash(str, hash);
}
ASSERT(String::GetCachedHash(str) != 0);
intptr_t size = TwoByteString::UnroundedSize(str);
ASSERT(size <= str->Size());
memset(reinterpret_cast<void*>(RawObject::ToAddr(str) + size), 0,
str->Size() - size);
} else if (cid == kExternalOneByteStringCid) {
RawExternalOneByteString* str =
static_cast<RawExternalOneByteString*>(object);
if (String::GetCachedHash(str) == 0) {
intptr_t hash = String::Hash(str);
String::SetCachedHash(str, hash);
}
} else if (cid == kExternalTwoByteStringCid) {
RawExternalTwoByteString* str =
static_cast<RawExternalTwoByteString*>(object);
if (String::GetCachedHash(str) == 0) {
intptr_t hash = String::Hash(str);
String::SetCachedHash(str, hash);
}
} else if (cid == kCodeSourceMapCid) {
RawCodeSourceMap* map = CodeSourceMap::RawCast(object);
intptr_t size = CodeSourceMap::UnroundedSize(map);
ASSERT(size <= map->Size());
memset(reinterpret_cast<void*>(RawObject::ToAddr(map) + size), 0,
map->Size() - size);
} else if (cid == kStackMapCid) {
RawStackMap* map = StackMap::RawCast(object);
intptr_t size = StackMap::UnroundedSize(map);
ASSERT(size <= map->Size());
memset(reinterpret_cast<void*>(RawObject::ToAddr(map) + size), 0,
map->Size() - size);
} else if (cid == kPcDescriptorsCid) {
RawPcDescriptors* desc = PcDescriptors::RawCast(object);
intptr_t size = PcDescriptors::UnroundedSize(desc);
ASSERT(size <= desc->Size());
memset(reinterpret_cast<void*>(RawObject::ToAddr(desc) + size), 0,
desc->Size() - size);
}
}
void Object::set_vm_isolate_snapshot_object_table(const Array& table) {
ASSERT(Isolate::Current() == Dart::vm_isolate());
*vm_isolate_snapshot_object_table_ = table.raw();
}
// Make unused space in an object whose type has been transformed safe
// for traversing during GC.
// The unused part of the transformed object is marked as an TypedDataInt8Array
// object.
void Object::MakeUnusedSpaceTraversable(const Object& obj,
intptr_t original_size,
intptr_t used_size) {
ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
ASSERT(!obj.IsNull());
ASSERT(original_size >= used_size);
if (original_size > used_size) {
intptr_t leftover_size = original_size - used_size;
uword addr = RawObject::ToAddr(obj.raw()) + used_size;
if (leftover_size >= TypedData::InstanceSize(0)) {
// Update the leftover space as a TypedDataInt8Array object.
RawTypedData* raw =
reinterpret_cast<RawTypedData*>(RawObject::FromAddr(addr));
uword new_tags = RawObject::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
new_tags = RawObject::SizeTag::update(leftover_size, new_tags);
new_tags = RawObject::VMHeapObjectTag::update(obj.raw()->IsVMHeapObject(),
new_tags);
const bool is_old = obj.raw()->IsOldObject();
new_tags = RawObject::OldBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotMarkedBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotRememberedBit::update(is_old, new_tags);
new_tags = RawObject::NewBit::update(!is_old, new_tags);
// On architectures with a relaxed memory model, the concurrent marker may
// observe the write of the filler object's header before observing the
// new array length, and so treat it as a pointer. Ensure it is a Smi so
// the marker won't dereference it.
ASSERT((new_tags & kSmiTagMask) == kSmiTag);
uint32_t tags = raw->ptr()->tags_;
uint32_t old_tags;
// TODO(iposva): Investigate whether CompareAndSwapWord is necessary.
do {
old_tags = tags;
// We can't use obj.CompareAndSwapTags here because we don't have a
// handle for the new object.
tags = AtomicOperations::CompareAndSwapUint32(&raw->ptr()->tags_,
old_tags, new_tags);
} while (tags != old_tags);
intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0));
ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
raw->StoreSmi(&(raw->ptr()->length_), Smi::New(leftover_len));
} else {
// Update the leftover space as a basic object.
ASSERT(leftover_size == Object::InstanceSize());
RawObject* raw = reinterpret_cast<RawObject*>(RawObject::FromAddr(addr));
uword new_tags = RawObject::ClassIdTag::update(kInstanceCid, 0);
new_tags = RawObject::SizeTag::update(leftover_size, new_tags);
new_tags = RawObject::VMHeapObjectTag::update(obj.raw()->IsVMHeapObject(),
new_tags);
const bool is_old = obj.raw()->IsOldObject();
new_tags = RawObject::OldBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotMarkedBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotRememberedBit::update(is_old, new_tags);
new_tags = RawObject::NewBit::update(!is_old, new_tags);
// On architectures with a relaxed memory model, the concurrent marker may
// observe the write of the filler object's header before observing the
// new array length, and so treat it as a pointer. Ensure it is a Smi so
// the marker won't dereference it.
ASSERT((new_tags & kSmiTagMask) == kSmiTag);
uint32_t tags = raw->ptr()->tags_;
uint32_t old_tags;
// TODO(iposva): Investigate whether CompareAndSwapWord is necessary.
do {
old_tags = tags;
// We can't use obj.CompareAndSwapTags here because we don't have a
// handle for the new object.
tags = AtomicOperations::CompareAndSwapUint32(&raw->ptr()->tags_,
old_tags, new_tags);
} while (tags != old_tags);
}
}
}
void Object::VerifyBuiltinVtables() {
#if defined(DEBUG)
Thread* thread = Thread::Current();
Isolate* isolate = thread->isolate();
Class& cls = Class::Handle(thread->zone(), Class::null());
for (intptr_t cid = (kIllegalCid + 1); cid < kNumPredefinedCids; cid++) {
if (isolate->class_table()->HasValidClassAt(cid)) {
cls ^= isolate->class_table()->At(cid);
ASSERT(builtin_vtables_[cid] == cls.raw_ptr()->handle_vtable_);
}
}
ASSERT(builtin_vtables_[kFreeListElement] == 0);
ASSERT(builtin_vtables_[kForwardingCorpse] == 0);
#endif
}
void Object::RegisterClass(const Class& cls,
const String& name,
const Library& lib) {
ASSERT(name.Length() > 0);
ASSERT(name.CharAt(0) != '_');
cls.set_name(name);
lib.AddClass(cls);
}
void Object::RegisterPrivateClass(const Class& cls,
const String& public_class_name,
const Library& lib) {
ASSERT(public_class_name.Length() > 0);
ASSERT(public_class_name.CharAt(0) == '_');
String& str = String::Handle();
str = lib.PrivateName(public_class_name);
cls.set_name(str);
lib.AddClass(cls);
}
// Initialize a new isolate from source or from a snapshot.
//
// There are three possibilities:
// 1. Running a Kernel binary. This function will bootstrap from the KERNEL
// file.
// 2. There is no snapshot. This function will bootstrap from source.
// 3. There is a snapshot. The caller should initialize from the snapshot.
//
// A non-NULL kernel argument indicates (1). A NULL kernel indicates (2) or
// (3), depending on whether the VM is compiled with DART_NO_SNAPSHOT defined or
// not.
RawError* Object::Init(Isolate* isolate,
const uint8_t* kernel_buffer,
intptr_t kernel_buffer_size) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(isolate == thread->isolate());
#if !defined(DART_PRECOMPILED_RUNTIME)
const bool is_kernel = (kernel_buffer != NULL);
#endif
NOT_IN_PRODUCT(TimelineDurationScope tds(thread, Timeline::GetIsolateStream(),
"Object::Init");)
#if defined(DART_NO_SNAPSHOT)
bool bootstrapping =
(Dart::vm_snapshot_kind() == Snapshot::kNone) || is_kernel;
#elif defined(DART_PRECOMPILED_RUNTIME)
bool bootstrapping = false;
#else
bool bootstrapping = is_kernel;
#endif
if (bootstrapping) {
#if !defined(DART_PRECOMPILED_RUNTIME)
// Object::Init version when we are bootstrapping from source or from a
// Kernel binary.
ObjectStore* object_store = isolate->object_store();
Class& cls = Class::Handle(zone);
Type& type = Type::Handle(zone);
Array& array = Array::Handle(zone);
Library& lib = Library::Handle(zone);
TypeArguments& type_args = TypeArguments::Handle(zone);
// All RawArray fields will be initialized to an empty array, therefore
// initialize array class first.
cls = Class::New<Array>();
object_store->set_array_class(cls);
// VM classes that are parameterized (Array, ImmutableArray,
// GrowableObjectArray, and LinkedHashMap) are also pre-finalized, so
// CalculateFieldOffsets() is not called, so we need to set the offset of
// their type_arguments_ field, which is explicitly declared in their
// respective Raw* classes.
cls.set_type_arguments_field_offset(Array::type_arguments_offset());
cls.set_num_type_arguments(1);
// Set up the growable object array class (Has to be done after the array
// class is setup as one of its field is an array object).
cls = Class::New<GrowableObjectArray>();
object_store->set_growable_object_array_class(cls);
cls.set_type_arguments_field_offset(
GrowableObjectArray::type_arguments_offset());
cls.set_num_type_arguments(1);
// Initialize hash set for canonical_type_.
const intptr_t kInitialCanonicalTypeSize = 16;
array = HashTables::New<CanonicalTypeSet>(kInitialCanonicalTypeSize,
Heap::kOld);
object_store->set_canonical_types(array);
// Initialize hash set for canonical_type_arguments_.
const intptr_t kInitialCanonicalTypeArgumentsSize = 4;
array = HashTables::New<CanonicalTypeArgumentsSet>(
kInitialCanonicalTypeArgumentsSize, Heap::kOld);
object_store->set_canonical_type_arguments(array);
// Setup type class early in the process.
const Class& type_cls = Class::Handle(zone, Class::New<Type>());
const Class& type_ref_cls = Class::Handle(zone, Class::New<TypeRef>());
const Class& type_parameter_cls =
Class::Handle(zone, Class::New<TypeParameter>());
const Class& bounded_type_cls =
Class::Handle(zone, Class::New<BoundedType>());
const Class& mixin_app_type_cls =
Class::Handle(zone, Class::New<MixinAppType>());
const Class& library_prefix_cls =
Class::Handle(zone, Class::New<LibraryPrefix>());
// Pre-allocate the OneByteString class needed by the symbol table.
cls = Class::NewStringClass(kOneByteStringCid);
object_store->set_one_byte_string_class(cls);
// Pre-allocate the TwoByteString class needed by the symbol table.
cls = Class::NewStringClass(kTwoByteStringCid);
object_store->set_two_byte_string_class(cls);
// Setup the symbol table for the symbols created in the isolate.
Symbols::SetupSymbolTable(isolate);
// Set up the libraries array before initializing the core library.
const GrowableObjectArray& libraries =
GrowableObjectArray::Handle(zone, GrowableObjectArray::New(Heap::kOld));
object_store->set_libraries(libraries);
// Pre-register the core library.
Library::InitCoreLibrary(isolate);
// Basic infrastructure has been setup, initialize the class dictionary.
const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
ASSERT(!core_lib.IsNull());
const GrowableObjectArray& pending_classes =
GrowableObjectArray::Handle(zone, GrowableObjectArray::New());
object_store->set_pending_classes(pending_classes);
// Now that the symbol table is initialized and that the core dictionary as
// well as the core implementation dictionary have been setup, preallocate
// remaining classes and register them by name in the dictionaries.
String& name = String::Handle(zone);
cls = object_store->array_class(); // Was allocated above.
RegisterPrivateClass(cls, Symbols::_List(), core_lib);
pending_classes.Add(cls);
// We cannot use NewNonParameterizedType(cls), because Array is
// parameterized. Warning: class _List has not been patched yet. Its
// declared number of type parameters is still 0. It will become 1 after
// patching. The array type allocated below represents the raw type _List
// and not _List<E> as we could expect. Use with caution.
type ^= Type::New(Object::Handle(zone, cls.raw()),
TypeArguments::Handle(zone), TokenPosition::kNoSource);
type.SetIsFinalized();
type ^= type.Canonicalize();
object_store->set_array_type(type);
cls = object_store->growable_object_array_class(); // Was allocated above.
RegisterPrivateClass(cls, Symbols::_GrowableList(), core_lib);
pending_classes.Add(cls);
cls = Class::New<Array>(kImmutableArrayCid);
object_store->set_immutable_array_class(cls);
cls.set_type_arguments_field_offset(Array::type_arguments_offset());
cls.set_num_type_arguments(1);
ASSERT(object_store->immutable_array_class() !=
object_store->array_class());
cls.set_is_prefinalized();
RegisterPrivateClass(cls, Symbols::_ImmutableList(), core_lib);
pending_classes.Add(cls);
cls = object_store->one_byte_string_class(); // Was allocated above.
RegisterPrivateClass(cls, Symbols::OneByteString(), core_lib);
pending_classes.Add(cls);
cls = object_store->two_byte_string_class(); // Was allocated above.
RegisterPrivateClass(cls, Symbols::TwoByteString(), core_lib);
pending_classes.Add(cls);
cls = Class::NewStringClass(kExternalOneByteStringCid);
object_store->set_external_one_byte_string_class(cls);
RegisterPrivateClass(cls, Symbols::ExternalOneByteString(), core_lib);
pending_classes.Add(cls);
cls = Class::NewStringClass(kExternalTwoByteStringCid);
object_store->set_external_two_byte_string_class(cls);
RegisterPrivateClass(cls, Symbols::ExternalTwoByteString(), core_lib);
pending_classes.Add(cls);
// Pre-register the isolate library so the native class implementations can
// be hooked up before compiling it.
Library& isolate_lib = Library::Handle(
zone, Library::LookupLibrary(thread, Symbols::DartIsolate()));
if (isolate_lib.IsNull()) {
isolate_lib = Library::NewLibraryHelper(Symbols::DartIsolate(), true);
isolate_lib.SetLoadRequested();
isolate_lib.Register(thread);
}
object_store->set_bootstrap_library(ObjectStore::kIsolate, isolate_lib);
ASSERT(!isolate_lib.IsNull());
ASSERT(isolate_lib.raw() == Library::IsolateLibrary());
cls = Class::New<Capability>();
RegisterPrivateClass(cls, Symbols::_CapabilityImpl(), isolate_lib);
pending_classes.Add(cls);
cls = Class::New<ReceivePort>();
RegisterPrivateClass(cls, Symbols::_RawReceivePortImpl(), isolate_lib);
pending_classes.Add(cls);
cls = Class::New<SendPort>();
RegisterPrivateClass(cls, Symbols::_SendPortImpl(), isolate_lib);
pending_classes.Add(cls);
const Class& stacktrace_cls = Class::Handle(zone, Class::New<StackTrace>());
RegisterPrivateClass(stacktrace_cls, Symbols::_StackTrace(), core_lib);
pending_classes.Add(stacktrace_cls);
// Super type set below, after Object is allocated.
cls = Class::New<RegExp>();
RegisterPrivateClass(cls, Symbols::_RegExp(), core_lib);
pending_classes.Add(cls);
// Initialize the base interfaces used by the core VM classes.
// Allocate and initialize the pre-allocated classes in the core library.
// The script and token index of these pre-allocated classes is set up in
// the parser when the corelib script is compiled (see
// Parser::ParseClassDefinition).
cls = Class::New<Instance>(kInstanceCid);
object_store->set_object_class(cls);
cls.set_name(Symbols::Object());
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
core_lib.AddClass(cls);
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_object_type(type);
cls = Class::New<Bool>();
object_store->set_bool_class(cls);
RegisterClass(cls, Symbols::Bool(), core_lib);
pending_classes.Add(cls);
cls = Class::New<Instance>(kNullCid);
object_store->set_null_class(cls);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
RegisterClass(cls, Symbols::Null(), core_lib);
pending_classes.Add(cls);
ASSERT(!library_prefix_cls.IsNull());
RegisterPrivateClass(library_prefix_cls, Symbols::_LibraryPrefix(),
core_lib);
pending_classes.Add(library_prefix_cls);
RegisterPrivateClass(type_cls, Symbols::_Type(), core_lib);
pending_classes.Add(type_cls);
RegisterPrivateClass(type_ref_cls, Symbols::_TypeRef(), core_lib);
pending_classes.Add(type_ref_cls);
RegisterPrivateClass(type_parameter_cls, Symbols::_TypeParameter(),
core_lib);
pending_classes.Add(type_parameter_cls);
RegisterPrivateClass(bounded_type_cls, Symbols::_BoundedType(), core_lib);
pending_classes.Add(bounded_type_cls);
RegisterPrivateClass(mixin_app_type_cls, Symbols::_MixinAppType(),
core_lib);
pending_classes.Add(mixin_app_type_cls);
cls = Class::New<Integer>();
object_store->set_integer_implementation_class(cls);
RegisterPrivateClass(cls, Symbols::_IntegerImplementation(), core_lib);
pending_classes.Add(cls);
cls = Class::New<Smi>();
object_store->set_smi_class(cls);
RegisterPrivateClass(cls, Symbols::_Smi(), core_lib);
pending_classes.Add(cls);
cls = Class::New<Mint>();
object_store->set_mint_class(cls);
RegisterPrivateClass(cls, Symbols::_Mint(), core_lib);
pending_classes.Add(cls);
cls = Class::New<Double>();
object_store->set_double_class(cls);
RegisterPrivateClass(cls, Symbols::_Double(), core_lib);
pending_classes.Add(cls);
// Class that represents the Dart class _Closure and C++ class Closure.
cls = Class::New<Closure>();
object_store->set_closure_class(cls);
cls.ResetFinalization(); // To calculate field offsets from Dart source.
RegisterPrivateClass(cls, Symbols::_Closure(), core_lib);
pending_classes.Add(cls);
cls = Class::New<WeakProperty>();
object_store->set_weak_property_class(cls);
RegisterPrivateClass(cls, Symbols::_WeakProperty(), core_lib);
// Pre-register the mirrors library so we can place the vm class
// MirrorReference there rather than the core library.
#if !defined(DART_PRECOMPILED_RUNTIME)
lib = Library::LookupLibrary(thread, Symbols::DartMirrors());
if (lib.IsNull()) {
lib = Library::NewLibraryHelper(Symbols::DartMirrors(), true);
lib.SetLoadRequested();
lib.Register(thread);
}
object_store->set_bootstrap_library(ObjectStore::kMirrors, lib);
ASSERT(!lib.IsNull());
ASSERT(lib.raw() == Library::MirrorsLibrary());
cls = Class::New<MirrorReference>();
RegisterPrivateClass(cls, Symbols::_MirrorReference(), lib);
#endif
// Pre-register the collection library so we can place the vm class
// LinkedHashMap there rather than the core library.
lib = Library::LookupLibrary(thread, Symbols::DartCollection());
if (lib.IsNull()) {
lib = Library::NewLibraryHelper(Symbols::DartCollection(), true);
lib.SetLoadRequested();
lib.Register(thread);
}
object_store->set_bootstrap_library(ObjectStore::kCollection, lib);
ASSERT(!lib.IsNull());
ASSERT(lib.raw() == Library::CollectionLibrary());
cls = Class::New<LinkedHashMap>();
object_store->set_linked_hash_map_class(cls);
cls.set_type_arguments_field_offset(LinkedHashMap::type_arguments_offset());
cls.set_num_type_arguments(2);
cls.set_num_own_type_arguments(0);
RegisterPrivateClass(cls, Symbols::_LinkedHashMap(), lib);
pending_classes.Add(cls);
// Pre-register the developer library so we can place the vm class
// UserTag there rather than the core library.
lib = Library::LookupLibrary(thread, Symbols::DartDeveloper());
if (lib.IsNull()) {
lib = Library::NewLibraryHelper(Symbols::DartDeveloper(), true);
lib.SetLoadRequested();
lib.Register(thread);
}
object_store->set_bootstrap_library(ObjectStore::kDeveloper, lib);
ASSERT(!lib.IsNull());
ASSERT(lib.raw() == Library::DeveloperLibrary());
cls = Class::New<UserTag>();
RegisterPrivateClass(cls, Symbols::_UserTag(), lib);
pending_classes.Add(cls);
// Setup some default native field classes which can be extended for
// specifying native fields in dart classes.
Library::InitNativeWrappersLibrary(isolate, is_kernel);
ASSERT(object_store->native_wrappers_library() != Library::null());
// Pre-register the typed_data library so the native class implementations
// can be hooked up before compiling it.
lib = Library::LookupLibrary(thread, Symbols::DartTypedData());
if (lib.IsNull()) {
lib = Library::NewLibraryHelper(Symbols::DartTypedData(), true);
lib.SetLoadRequested();
lib.Register(thread);
}
object_store->set_bootstrap_library(ObjectStore::kTypedData, lib);
ASSERT(!lib.IsNull());
ASSERT(lib.raw() == Library::TypedDataLibrary());
#define REGISTER_TYPED_DATA_CLASS(clazz) \
cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid); \
RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib);
DART_CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS);
#undef REGISTER_TYPED_DATA_CLASS
#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
cls = Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid); \
RegisterPrivateClass(cls, Symbols::_##clazz##View(), lib); \
pending_classes.Add(cls);
CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
cls = Class::NewTypedDataViewClass(kByteDataViewCid);
RegisterPrivateClass(cls, Symbols::_ByteDataView(), lib);
pending_classes.Add(cls);
#undef REGISTER_TYPED_DATA_VIEW_CLASS
#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid); \
RegisterPrivateClass(cls, Symbols::_External##clazz(), lib);
cls = Class::New<Instance>(kByteBufferCid);
cls.set_instance_size(0);
cls.set_next_field_offset(-kWordSize);
RegisterPrivateClass(cls, Symbols::_ByteBuffer(), lib);
pending_classes.Add(cls);
CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
#undef REGISTER_EXT_TYPED_DATA_CLASS
// Register Float32x4, Int32x4, and Float64x2 in the object store.
cls = Class::New<Float32x4>();
RegisterPrivateClass(cls, Symbols::_Float32x4(), lib);
pending_classes.Add(cls);
object_store->set_float32x4_class(cls);
cls = Class::New<Instance>(kIllegalCid);
RegisterClass(cls, Symbols::Float32x4(), lib);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
type = Type::NewNonParameterizedType(cls);
object_store->set_float32x4_type(type);
cls = Class::New<Int32x4>();
RegisterPrivateClass(cls, Symbols::_Int32x4(), lib);
pending_classes.Add(cls);
object_store->set_int32x4_class(cls);
cls = Class::New<Instance>(kIllegalCid);
RegisterClass(cls, Symbols::Int32x4(), lib);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
type = Type::NewNonParameterizedType(cls);
object_store->set_int32x4_type(type);
cls = Class::New<Float64x2>();
RegisterPrivateClass(cls, Symbols::_Float64x2(), lib);
pending_classes.Add(cls);
object_store->set_float64x2_class(cls);
cls = Class::New<Instance>(kIllegalCid);
RegisterClass(cls, Symbols::Float64x2(), lib);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
type = Type::NewNonParameterizedType(cls);
object_store->set_float64x2_type(type);
// Set the super type of class StackTrace to Object type so that the
// 'toString' method is implemented.
type = object_store->object_type();
stacktrace_cls.set_super_type(type);
// Abstract class that represents the Dart class Type.
// Note that this class is implemented by Dart class _AbstractType.
cls = Class::New<Instance>(kIllegalCid);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
RegisterClass(cls, Symbols::Type(), core_lib);
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_type_type(type);
// Abstract class that represents the Dart class Function.
cls = Class::New<Instance>(kIllegalCid);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
RegisterClass(cls, Symbols::Function(), core_lib);
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_function_type(type);
cls = Class::New<Number>();
RegisterClass(cls, Symbols::Number(), core_lib);
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_number_type(type);
cls = Class::New<Instance>(kIllegalCid);
RegisterClass(cls, Symbols::Int(), core_lib);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_int_type(type);
cls = Class::New<Instance>(kIllegalCid);
RegisterPrivateClass(cls, Symbols::Int64(), core_lib);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_int64_type(type);
cls = Class::New<Instance>(kIllegalCid);
RegisterClass(cls, Symbols::Double(), core_lib);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_double_type(type);
name = Symbols::_String().raw();
cls = Class::New<Instance>(kIllegalCid);
RegisterClass(cls, name, core_lib);
cls.set_num_type_arguments(0);
cls.set_num_own_type_arguments(0);
cls.set_is_prefinalized();
pending_classes.Add(cls);
type = Type::NewNonParameterizedType(cls);
object_store->set_string_type(type);
cls = object_store->bool_class();
type = Type::NewNonParameterizedType(cls);
object_store->set_bool_type(type);
cls = object_store->smi_class();
type = Type::NewNonParameterizedType(cls);
object_store->set_smi_type(type);
cls = object_store->mint_class();
type = Type::NewNonParameterizedType(cls);
object_store->set_mint_type(type);
// The classes 'void' and 'dynamic' are phony classes to make type checking
// more regular; they live in the VM isolate. The class 'void' is not
// registered in the class dictionary because its name is a reserved word.
// The class 'dynamic' is registered in the class dictionary because its
// name is a built-in identifier (this is wrong). The corresponding types
// are stored in the object store.
cls = object_store->null_class();
type = Type::NewNonParameterizedType(cls);
object_store->set_null_type(type);
// Consider removing when/if Null becomes an ordinary class.
type = object_store->object_type();
cls.set_super_type(type);
// Create and cache commonly used type arguments <int>, <double>,
// <String>, <String, dynamic> and <String, String>.
type_args = TypeArguments::New(1);
type = object_store->int_type();
type_args.SetTypeAt(0, type);
type_args.Canonicalize();
object_store->set_type_argument_int(type_args);
type_args = TypeArguments::New(1);
type = object_store->double_type();
type_args.SetTypeAt(0, type);
type_args.Canonicalize();
object_store->set_type_argument_double(type_args);
type_args = TypeArguments::New(1);
type = object_store->string_type();
type_args.SetTypeAt(0, type);
type_args.Canonicalize();
object_store->set_type_argument_string(type_args);
type_args = TypeArguments::New(2);
type = object_store->string_type();
type_args.SetTypeAt(0, type);
type_args.SetTypeAt(1, Object::dynamic_type());
type_args.Canonicalize();
object_store->set_type_argument_string_dynamic(type_args);
type_args = TypeArguments::New(2);
type = object_store->string_type();
type_args.SetTypeAt(0, type);
type_args.SetTypeAt(1, type);
type_args.Canonicalize();
object_store->set_type_argument_string_string(type_args);
// Finish the initialization by compiling the bootstrap scripts containing
// the base interfaces and the implementation of the internal classes.
const Error& error = Error::Handle(
zone, Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size));
if (!error.IsNull()) {
return error.raw();
}
isolate->class_table()->CopySizesFromClassObjects();
ClassFinalizer::VerifyBootstrapClasses();
// Set up the intrinsic state of all functions (core, math and typed data).
Intrinsifier::InitializeState();
// Set up recognized state of all functions (core, math and typed data).
MethodRecognizer::InitializeState();
// Adds static const fields (class ids) to the class 'ClassID');
lib = Library::LookupLibrary(thread, Symbols::DartInternal());
ASSERT(!lib.IsNull());
cls = lib.LookupClassAllowPrivate(Symbols::ClassID());
ASSERT(!cls.IsNull());
cls.InjectCIDFields();
isolate->object_store()->InitKnownObjects();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
// Object::Init version when we are running in a version of dart that has a
// full snapshot linked in and an isolate is initialized using the full
// snapshot.
ObjectStore* object_store = isolate->object_store();
Class& cls = Class::Handle(zone);
// Set up empty classes in the object store, these will get initialized
// correctly when we read from the snapshot. This is done to allow
// bootstrapping of reading classes from the snapshot. Some classes are not
// stored in the object store. Yet we still need to create their Class
// object so that they get put into the class_table (as a side effect of
// Class::New()).
cls = Class::New<Instance>(kInstanceCid);
object_store->set_object_class(cls);
cls = Class::New<LibraryPrefix>();
cls = Class::New<Type>();
cls = Class::New<TypeRef>();
cls = Class::New<TypeParameter>();
cls = Class::New<BoundedType>();
cls = Class::New<MixinAppType>();
cls = Class::New<Array>();
object_store->set_array_class(cls);
cls = Class::New<Array>(kImmutableArrayCid);
object_store->set_immutable_array_class(cls);
cls = Class::New<GrowableObjectArray>();
object_store->set_growable_object_array_class(cls);
cls = Class::New<LinkedHashMap>();
object_store->set_linked_hash_map_class(cls);
cls = Class::New<Float32x4>();
object_store->set_float32x4_class(cls);
cls = Class::New<Int32x4>();
object_store->set_int32x4_class(cls);
cls = Class::New<Float64x2>();
object_store->set_float64x2_class(cls);
#define REGISTER_TYPED_DATA_CLASS(clazz) \
cls = Class::NewTypedDataClass(kTypedData##clazz##Cid);
CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS);
#undef REGISTER_TYPED_DATA_CLASS
#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
cls = Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid);
CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
#undef REGISTER_TYPED_DATA_VIEW_CLASS
cls = Class::NewTypedDataViewClass(kByteDataViewCid);
#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid);
CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
#undef REGISTER_EXT_TYPED_DATA_CLASS
cls = Class::New<Instance>(kByteBufferCid);
cls = Class::New<Integer>();
object_store->set_integer_implementation_class(cls);
cls = Class::New<Smi>();
object_store->set_smi_class(cls);
cls = Class::New<Mint>();
object_store->set_mint_class(cls);
cls = Class::New<Double>();
object_store->set_double_class(cls);
cls = Class::New<Closure>();
object_store->set_closure_class(cls);
cls = Class::NewStringClass(kOneByteStringCid);
object_store->set_one_byte_string_class(cls);
cls = Class::NewStringClass(kTwoByteStringCid);
object_store->set_two_byte_string_class(cls);
cls = Class::NewStringClass(kExternalOneByteStringCid);
object_store->set_external_one_byte_string_class(cls);
cls = Class::NewStringClass(kExternalTwoByteStringCid);
object_store->set_external_two_byte_string_class(cls);
cls = Class::New<Bool>();
object_store->set_bool_class(cls);
cls = Class::New<Instance>(kNullCid);
object_store->set_null_class(cls);
cls = Class::New<Capability>();
cls = Class::New<ReceivePort>();
cls = Class::New<SendPort>();
cls = Class::New<StackTrace>();
cls = Class::New<RegExp>();
cls = Class::New<Number>();
cls = Class::New<WeakProperty>();
object_store->set_weak_property_class(cls);
cls = Class::New<MirrorReference>();
cls = Class::New<UserTag>();
}
return Error::null();
}
#if defined(DEBUG)
bool Object::InVMHeap() const {
if (FLAG_verify_handles && raw()->IsVMHeapObject()) {
Heap* vm_isolate_heap = Dart::vm_isolate()->heap();
ASSERT(vm_isolate_heap->Contains(RawObject::ToAddr(raw())));
}
return raw()->IsVMHeapObject();
}
#endif // DEBUG
void Object::Print() const {
THR_Print("%s\n", ToCString());
}
RawString* Object::DictionaryName() const {
return String::null();
}
void Object::InitializeObject(uword address,
intptr_t class_id,
intptr_t size,
bool is_vm_object) {
uword initial_value = (class_id == kInstructionsCid)
? Assembler::GetBreakInstructionFiller()
: reinterpret_cast<uword>(null_);
uword cur = address;
uword end = address + size;
while (cur < end) {
*reinterpret_cast<uword*>(cur) = initial_value;
cur += kWordSize;
}
uint32_t tags = 0;
ASSERT(class_id != kIllegalCid);
tags = RawObject::ClassIdTag::update(class_id, tags);
tags = RawObject::SizeTag::update(size, tags);
tags = RawObject::VMHeapObjectTag::update(is_vm_object, tags);
const bool is_old =
(address & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
tags = RawObject::OldBit::update(is_old, tags);
tags = RawObject::OldAndNotMarkedBit::update(is_old, tags);
tags = RawObject::OldAndNotRememberedBit::update(is_old, tags);
tags = RawObject::NewBit::update(!is_old, tags);
reinterpret_cast<RawObject*>(address)->tags_ = tags;
#if defined(HASH_IN_OBJECT_HEADER)
reinterpret_cast<RawObject*>(address)->hash_ = 0;
#endif
ASSERT(is_vm_object == RawObject::IsVMHeapObject(tags));
}
void Object::CheckHandle() const {
#if defined(DEBUG)
if (raw_ != Object::null()) {
if ((reinterpret_cast<uword>(raw_) & kSmiTagMask) == kSmiTag) {
ASSERT(vtable() == Smi::handle_vtable_);
return;
}
intptr_t cid = raw_->GetClassId();
if (cid >= kNumPredefinedCids) {
cid = kInstanceCid;
}
ASSERT(vtable() == builtin_vtables_[cid]);
if (FLAG_verify_handles) {
Isolate* isolate = Isolate::Current();
Heap* isolate_heap = isolate->heap();
Heap* vm_isolate_heap = Dart::vm_isolate()->heap();
ASSERT(isolate_heap->Contains(RawObject::ToAddr(raw_)) ||
vm_isolate_heap->Contains(RawObject::ToAddr(raw_)));
}
}
#endif
}
RawObject* Object::Allocate(intptr_t cls_id, intptr_t size, Heap::Space space) {
ASSERT(Utils::IsAligned(size, kObjectAlignment));
Thread* thread = Thread::Current();
// New space allocation allowed only in mutator thread (Dart thread);
ASSERT(thread->IsMutatorThread() || (space != Heap::kNew));
ASSERT(thread->execution_state() == Thread::kThreadInVM);
ASSERT(thread->no_callback_scope_depth() == 0);
Isolate* isolate = thread->isolate();
Heap* heap = isolate->heap();
uword address;
// In a bump allocation scope, all allocations go into old space.
if (thread->bump_allocate() && (space != Heap::kCode)) {
DEBUG_ASSERT(heap->old_space()->CurrentThreadOwnsDataLock());
address = heap->old_space()->TryAllocateDataBumpLocked(
size, PageSpace::kForceGrowth);
} else {
address = heap->Allocate(size, space);
}
if (address == 0) {
// Use the preallocated out of memory exception to avoid calling
// into dart code or allocating any code.
const Instance& exception =
Instance::Handle(isolate->object_store()->out_of_memory());
Exceptions::Throw(thread, exception);
UNREACHABLE();
}
#ifndef PRODUCT
ClassTable* class_table = isolate->class_table();
if (space == Heap::kNew) {
class_table->UpdateAllocatedNew(cls_id, size);
} else {
class_table->UpdateAllocatedOld(cls_id, size);
}
const Class& cls = Class::Handle(class_table->At(cls_id));
if (FLAG_profiler && cls.TraceAllocation(isolate)) {
Profiler::SampleAllocation(thread, cls_id);
}
#endif // !PRODUCT
NoSafepointScope no_safepoint;
InitializeObject(address, cls_id, size, (isolate == Dart::vm_isolate()));
RawObject* raw_obj = reinterpret_cast<RawObject*>(address + kHeapObjectTag);
ASSERT(cls_id == RawObject::ClassIdTag::decode(raw_obj->ptr()->tags_));
if (raw_obj->IsOldObject() && thread->is_marking()) {
// Black allocation. Prevents a data race between the mutator and concurrent
// marker on ARM and ARM64 (the marker may observe a publishing store of
// this object before the stores that initialize its slots), and helps the
// collection to finish sooner.
raw_obj->SetMarkBitUnsynchronized();
heap->old_space()->AllocateBlack(size);
}
return raw_obj;
}
class WriteBarrierUpdateVisitor : public ObjectPointerVisitor {
public:
explicit WriteBarrierUpdateVisitor(Thread* thread, RawObject* obj)
: ObjectPointerVisitor(thread->isolate()),
thread_(thread),
old_obj_(obj) {
ASSERT(old_obj_->IsOldObject());
}
void VisitPointers(RawObject** from, RawObject** to) {
for (RawObject** slot = from; slot <= to; ++slot) {
RawObject* value = *slot;
if (value->IsHeapObject()) {
old_obj_->CheckHeapPointerStore(value, thread_);
}
}
}
private:
Thread* thread_;
RawObject* old_obj_;
DISALLOW_COPY_AND_ASSIGN(WriteBarrierUpdateVisitor);
};
bool Object::IsReadOnlyHandle() const {
return Dart::IsReadOnlyHandle(reinterpret_cast<uword>(this));
}
bool Object::IsNotTemporaryScopedHandle() const {
return (IsZoneHandle() || IsReadOnlyHandle());
}
RawObject* Object::Clone(const Object& orig, Heap::Space space) {
const Class& cls = Class::Handle(orig.clazz());
intptr_t size = orig.raw()->Size();
RawObject* raw_clone = Object::Allocate(cls.id(), size, space);
NoSafepointScope no_safepoint;
// Copy the body of the original into the clone.
uword orig_addr = RawObject::ToAddr(orig.raw());
uword clone_addr = RawObject::ToAddr(raw_clone);
static const intptr_t kHeaderSizeInBytes = sizeof(RawObject);
memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
size - kHeaderSizeInBytes);
// Add clone to store buffer, if needed.
if (!raw_clone->IsOldObject()) {
// No need to remember an object in new space.
return raw_clone;
}
WriteBarrierUpdateVisitor visitor(Thread::Current(), raw_clone);
raw_clone->VisitPointers(&visitor);
return raw_clone;
}
RawString* Class::Name() const {
return raw_ptr()->name_;
}
RawString* Class::ScrubbedName() const {
return String::ScrubName(String::Handle(Name()));
}
RawString* Class::UserVisibleName() const {
#if !defined(PRODUCT)
ASSERT(raw_ptr()->user_name_ != String::null());
return raw_ptr()->user_name_;
#endif // !defined(PRODUCT)
return GenerateUserVisibleName(); // No caching in PRODUCT, regenerate.
}
bool Class::IsInFullSnapshot() const {
NoSafepointScope no_safepoint;
return raw_ptr()->library_->ptr()->is_in_fullsnapshot_;
}
RawAbstractType* Class::RareType() const {
const Type& type = Type::Handle(Type::New(
*this, Object::null_type_arguments(), TokenPosition::kNoSource));
return ClassFinalizer::FinalizeType(*this, type);
}
RawAbstractType* Class::DeclarationType() const {
const TypeArguments& args = TypeArguments::Handle(type_parameters());
const Type& type =
Type::Handle(Type::New(*this, args, TokenPosition::kNoSource));
return ClassFinalizer::FinalizeType(*this, type);
}
template <class FakeObject>
RawClass* Class::New() {
ASSERT(Object::class_class() != Class::null());
Class& result = Class::Handle();
{
RawObject* raw =
Object::Allocate(Class::kClassId, Class::InstanceSize(), Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
}
FakeObject fake;
result.set_handle_vtable(fake.vtable());
result.set_token_pos(TokenPosition::kNoSource);
result.set_instance_size(FakeObject::InstanceSize());
result.set_type_arguments_field_offset_in_words(kNoTypeArguments);
result.set_next_field_offset(FakeObject::NextFieldOffset());
COMPILE_ASSERT((FakeObject::kClassId != kInstanceCid));
result.set_id(FakeObject::kClassId);
result.set_num_type_arguments(0);
result.set_num_own_type_arguments(0);
result.set_has_pragma(false);
result.set_num_native_fields(0);
result.set_state_bits(0);
if ((FakeObject::kClassId < kInstanceCid) ||
(FakeObject::kClassId == kTypeArgumentsCid)) {
// VM internal classes are done. There is no finalization needed or
// possible in this case.
result.set_is_finalized();
} else {
// VM backed classes are almost ready: run checks and resolve class
// references, but do not recompute size.
result.set_is_prefinalized();
}
result.set_kernel_offset(-1);
result.InitEmptyFields();
Isolate::Current()->RegisterClass(result);
return result.raw();
}
static void ReportTooManyTypeArguments(const Class& cls) {
Report::MessageF(Report::kError, Script::Handle(cls.script()),
cls.token_pos(), Report::AtLocation,
"too many type parameters declared in class '%s' or in its "
"super classes",
String::Handle(cls.Name()).ToCString());
UNREACHABLE();
}
void Class::set_num_type_arguments(intptr_t value) const {
if (!Utils::IsInt(16, value)) {
ReportTooManyTypeArguments(*this);
}
StoreNonPointer(&raw_ptr()->num_type_arguments_, value);
}
void Class::set_num_own_type_arguments(intptr_t value) const {
if (!Utils::IsUint(kNumOwnTypeArgumentsSize, value)) {
ReportTooManyTypeArguments(*this);
}
StoreNonPointer(
&raw_ptr()->has_pragma_and_num_own_type_arguments_,
NumOwnTypeArguments::update(
value, raw_ptr()->has_pragma_and_num_own_type_arguments_));
}
void Class::set_has_pragma_and_num_own_type_arguments(uint16_t value) const {
StoreNonPointer(&raw_ptr()->has_pragma_and_num_own_type_arguments_, value);
}
void Class::set_has_pragma(bool value) const {
StoreNonPointer(
&raw_ptr()->has_pragma_and_num_own_type_arguments_,
HasPragmaBit::update(value,
raw_ptr()->has_pragma_and_num_own_type_arguments_));
}
// Initialize class fields of type Array with empty array.
void Class::InitEmptyFields() {
if (Object::empty_array().raw() == Array::null()) {
// The empty array has not been initialized yet.
return;
}
StorePointer(&raw_ptr()->interfaces_, Object::empty_array().raw());
StorePointer(&raw_ptr()->constants_, Object::empty_array().raw());
StorePointer(&raw_ptr()->functions_, Object::empty_array().raw());
StorePointer(&raw_ptr()->fields_, Object::empty_array().raw());
StorePointer(&raw_ptr()->invocation_dispatcher_cache_,
Object::empty_array().raw());
}
RawArray* Class::OffsetToFieldMap(bool original_classes) const {
Array& array = Array::Handle(raw_ptr()->offset_in_words_to_field_);
if (array.IsNull()) {
ASSERT(is_finalized());
const intptr_t length = raw_ptr()->instance_size_in_words_;
array = Array::New(length, Heap::kOld);
Class& cls = Class::Handle(this->raw());
Array& fields = Array::Handle();
Field& f = Field::Handle();
while (!cls.IsNull()) {
fields = cls.fields();
for (intptr_t i = 0; i < fields.Length(); ++i) {
f ^= fields.At(i);
if (f.is_instance()) {
array.SetAt(f.Offset() >> kWordSizeLog2, f);
}
}
cls = cls.SuperClass(original_classes);
}
StorePointer(&raw_ptr()->offset_in_words_to_field_, array.raw());
}
return array.raw();
}
bool Class::HasInstanceFields() const {
const Array& field_array = Array::Handle(fields());
Field& field = Field::Handle();
for (intptr_t i = 0; i < field_array.Length(); ++i) {
field ^= field_array.At(i);
if (!field.is_static()) {
return true;
}
}
return false;
}
class FunctionName {
public:
FunctionName(const String& name, String* tmp_string)
: name_(name), tmp_string_(tmp_string) {}
bool Matches(const Function& function) const {
if (name_.IsSymbol()) {
return name_.raw() == function.name();
} else {
*tmp_string_ = function.name();
return name_.Equals(*tmp_string_);
}
}
intptr_t Hash() const { return name_.Hash(); }
private:
const String& name_;
String* tmp_string_;
};
// Traits for looking up Functions by name.
class ClassFunctionsTraits {
public:
static const char* Name() { return "ClassFunctionsTraits"; }
static bool ReportStats() { return false; }
// Called when growing the table.
static bool IsMatch(const Object& a, const Object& b) {
ASSERT(a.IsFunction() && b.IsFunction());
// Function objects are always canonical.
return a.raw() == b.raw();
}
static bool IsMatch(const FunctionName& name, const Object& obj) {
return name.Matches(Function::Cast(obj));
}
static uword Hash(const Object& key) {
return String::HashRawSymbol(Function::Cast(key).name());
}
static uword Hash(const FunctionName& name) { return name.Hash(); }
};
typedef UnorderedHashSet<ClassFunctionsTraits> ClassFunctionsSet;
void Class::SetFunctions(const Array& value) const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->functions_, value.raw());
const intptr_t len = value.Length();
if (len >= kFunctionLookupHashTreshold) {
ClassFunctionsSet set(HashTables::New<ClassFunctionsSet>(len, Heap::kOld));
Function& func = Function::Handle();
for (intptr_t i = 0; i < len; ++i) {
func ^= value.At(i);
// Verify that all the functions in the array have this class as owner.
ASSERT(func.Owner() == raw());
set.Insert(func);
}
StorePointer(&raw_ptr()->functions_hash_table_, set.Release().raw());
} else {
StorePointer(&raw_ptr()->functions_hash_table_, Array::null());
}
}
void Class::AddFunction(const Function& function) const {
ASSERT(Thread::Current()->IsMutatorThread());
const Array& arr = Array::Handle(functions());
const Array& new_arr =
Array::Handle(Array::Grow(arr, arr.Length() + 1, Heap::kOld));
new_arr.SetAt(arr.Length(), function);
StorePointer(&raw_ptr()->functions_, new_arr.raw());
// Add to hash table, if any.
const intptr_t new_len = new_arr.Length();
if (new_len == kFunctionLookupHashTreshold) {
// Transition to using hash table.
SetFunctions(new_arr);
} else if (new_len > kFunctionLookupHashTreshold) {
ClassFunctionsSet set(raw_ptr()->functions_hash_table_);
set.Insert(function);
StorePointer(&raw_ptr()->functions_hash_table_, set.Release().raw());
}
}
void Class::RemoveFunction(const Function& function) const {
ASSERT(Thread::Current()->IsMutatorThread());
const Array& arr = Array::Handle(functions());
StorePointer(&raw_ptr()->functions_, Object::empty_array().raw());
StorePointer(&raw_ptr()->functions_hash_table_, Array::null());
Function& entry = Function::Handle();
for (intptr_t i = 0; i < arr.Length(); i++) {
entry ^= arr.At(i);
if (function.raw() != entry.raw()) {
AddFunction(entry);
}
}
}
RawFunction* Class::FunctionFromIndex(intptr_t idx) const {
const Array& funcs = Array::Handle(functions());
if ((idx < 0) || (idx >= funcs.Length())) {
return Function::null();
}
Function& func = Function::Handle();
func ^= funcs.At(idx);
ASSERT(!func.IsNull());
return func.raw();
}
RawFunction* Class::ImplicitClosureFunctionFromIndex(intptr_t idx) const {
const Array& funcs = Array::Handle(functions());
if ((idx < 0) || (idx >= funcs.Length())) {
return Function::null();
}
Function& func = Function::Handle();
func ^= funcs.At(idx);
ASSERT(!func.IsNull());
if (!func.HasImplicitClosureFunction()) {
return Function::null();
}
const Function& closure_func =
Function::Handle(func.ImplicitClosureFunction());
ASSERT(!closure_func.IsNull());
return closure_func.raw();
}
intptr_t Class::FindImplicitClosureFunctionIndex(const Function& needle) const {
Thread* thread = Thread::Current();
if (EnsureIsFinalized(thread) != Error::null()) {
return -1;
}
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_FUNCTION_HANDLESCOPE(thread);
Array& funcs = thread->ArrayHandle();
Function& function = thread->FunctionHandle();
funcs ^= functions();
ASSERT(!funcs.IsNull());
Function& implicit_closure = Function::Handle(thread->zone());
const intptr_t len = funcs.Length();
for (intptr_t i = 0; i < len; i++) {
function ^= funcs.At(i);
implicit_closure ^= function.implicit_closure_function();
if (implicit_closure.IsNull()) {
// Skip non-implicit closure functions.
continue;
}
if (needle.raw() == implicit_closure.raw()) {
return i;
}
}
// No function found.
return -1;
}
intptr_t Class::FindInvocationDispatcherFunctionIndex(
const Function& needle) const {
Thread* thread = Thread::Current();
if (EnsureIsFinalized(thread) != Error::null()) {
return -1;
}
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_OBJECT_HANDLESCOPE(thread);
Array& funcs = thread->ArrayHandle();
Object& object = thread->ObjectHandle();
funcs ^= invocation_dispatcher_cache();
ASSERT(!funcs.IsNull());
const intptr_t len = funcs.Length();
for (intptr_t i = 0; i < len; i++) {
object = funcs.At(i);
// The invocation_dispatcher_cache is a table with some entries that
// are functions.
if (object.IsFunction()) {
if (Function::Cast(object).raw() == needle.raw()) {
return i;
}
}
}
// No function found.
return -1;
}
RawFunction* Class::InvocationDispatcherFunctionFromIndex(intptr_t idx) const {
Thread* thread = Thread::Current();
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_OBJECT_HANDLESCOPE(thread);
Array& dispatcher_cache = thread->ArrayHandle();
Object& object = thread->ObjectHandle();
dispatcher_cache ^= invocation_dispatcher_cache();
object = dispatcher_cache.At(idx);
if (!object.IsFunction()) {
return Function::null();
}
return Function::Cast(object).raw();
}
void Class::set_signature_function(const Function& value) const {
ASSERT(value.IsClosureFunction() || value.IsSignatureFunction());
StorePointer(&raw_ptr()->signature_function_, value.raw());
}
void Class::set_state_bits(intptr_t bits) const {
StoreNonPointer(&raw_ptr()->state_bits_, static_cast<uint16_t>(bits));
}
void Class::set_library(const Library& value) const {
StorePointer(&raw_ptr()->library_, value.raw());
}
void Class::set_type_parameters(const TypeArguments& value) const {
StorePointer(&raw_ptr()->type_parameters_, value.raw());
}
intptr_t Class::NumTypeParameters(Thread* thread) const {
if (IsMixinApplication() && !is_mixin_type_applied()) {
ClassFinalizer::ApplyMixinType(*this);
}
if (type_parameters() == TypeArguments::null()) {
const intptr_t cid = id();
if ((cid == kArrayCid) || (cid == kImmutableArrayCid) ||
(cid == kGrowableObjectArrayCid)) {
return 1; // List's type parameter may not have been parsed yet.
}
return 0;
}
REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
TypeArguments& type_params = thread->TypeArgumentsHandle();
type_params = type_parameters();
return type_params.Length();
}
intptr_t Class::NumOwnTypeArguments() const {
// Return cached value if already calculated.
if (num_own_type_arguments() != kUnknownNumTypeArguments) {
return num_own_type_arguments();
}
Thread* thread = Thread::Current();
Isolate* isolate = thread->isolate();
Zone* zone = thread->zone();
const intptr_t num_type_params = NumTypeParameters();
if ((num_type_params == 0) || (super_type() == AbstractType::null()) ||
(super_type() == isolate->object_store()->object_type())) {
set_num_own_type_arguments(num_type_params);
return num_type_params;
}
ASSERT(!IsMixinApplication() || is_mixin_type_applied());
const AbstractType& sup_type = AbstractType::Handle(zone, super_type());
const TypeArguments& sup_type_args =
TypeArguments::Handle(zone, sup_type.arguments());
if (sup_type_args.IsNull()) {
// The super type is raw or the super class is non generic.
// In either case, overlapping is not possible.
set_num_own_type_arguments(num_type_params);
return num_type_params;
}
const intptr_t num_sup_type_args = sup_type_args.Length();
// At this point, the super type may or may not be finalized. In either case,
// the result of this function must remain the same.
// The value of num_sup_type_args may increase when the super type is
// finalized, but the last num_sup_type_args type arguments will not be
// modified by finalization, only shifted to higher indices in the vector.
// They may however get wrapped in a BoundedType, which we skip.
// The super type may not even be resolved yet. This is not necessary, since
// we only check for matching type parameters, which are resolved by default.
const TypeArguments& type_params =
TypeArguments::Handle(zone, type_parameters());
// Determine the maximum overlap of a prefix of the vector consisting of the
// type parameters of this class with a suffix of the vector consisting of the
// type arguments of the super type of this class.
// The number of own type arguments of this class is the number of its type
// parameters minus the number of type arguments in the overlap.
// Attempt to overlap the whole vector of type parameters; reduce the size
// of the vector (keeping the first type parameter) until it fits or until
// its size is zero.
TypeParameter& type_param = TypeParameter::Handle(zone);
AbstractType& sup_type_arg = AbstractType::Handle(zone);
for (intptr_t num_overlapping_type_args =
(num_type_params < num_sup_type_args) ? num_type_params
: num_sup_type_args;
num_overlapping_type_args > 0; num_overlapping_type_args--) {
intptr_t i = 0;
for (; i < num_overlapping_type_args; i++) {
type_param ^= type_params.TypeAt(i);
sup_type_arg = sup_type_args.TypeAt(num_sup_type_args -
num_overlapping_type_args + i);
// BoundedType can nest in case the finalized super type has bounded type
// arguments that overlap multiple times in its own super class chain.
while (sup_type_arg.IsBoundedType()) {
sup_type_arg = BoundedType::Cast(sup_type_arg).type();
}
if (!type_param.Equals(sup_type_arg)) break;
}
if (i == num_overlapping_type_args) {
// Overlap found.
set_num_own_type_arguments(num_type_params - num_overlapping_type_args);
return num_type_params - num_overlapping_type_args;
}
}
// No overlap found.
set_num_own_type_arguments(num_type_params);
return num_type_params;
}
intptr_t Class::NumTypeArguments() const {
// Return cached value if already calculated.
if (num_type_arguments() != kUnknownNumTypeArguments) {
return num_type_arguments();
}
// To work properly, this call requires the super class of this class to be
// resolved, which is checked by the type_class() call on the super type.
// Note that calling type_class() on a MixinAppType fails.
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
Class& cls = Class::Handle(zone);
AbstractType& sup_type = AbstractType::Handle(zone);
cls = raw();
intptr_t num_type_args = 0;
do {
// Calling NumOwnTypeArguments() on a mixin application class will setup the
// type parameters if not already done.
num_type_args += cls.NumOwnTypeArguments();
// Super type of Object class is null.
if ((cls.super_type() == AbstractType::null()) ||
(cls.super_type() == isolate->object_store()->object_type())) {
break;
}
sup_type = cls.super_type();
// A BoundedType, TypeRef, or function type can appear as type argument of
// sup_type, but not as sup_type itself.
ASSERT(sup_type.IsType());
ClassFinalizer::ResolveTypeClass(cls, Type::Cast(sup_type));
cls = sup_type.type_class();
ASSERT(!cls.IsTypedefClass());
} while (true);
set_num_type_arguments(num_type_args);
return num_type_args;
}
RawClass* Class::SuperClass(bool original_classes) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
if (super_type() == AbstractType::null()) {
if (id() == kTypeArgumentsCid) {
// Pretend TypeArguments objects are Dart instances.
return isolate->class_table()->At(kInstanceCid);
}
return Class::null();
}
const AbstractType& sup_type = AbstractType::Handle(zone, super_type());
const intptr_t type_class_id = sup_type.type_class_id();
if (original_classes) {
return isolate->GetClassForHeapWalkAt(type_class_id);
} else {
return isolate->class_table()->At(type_class_id);
}
}
void Class::set_super_type(const AbstractType& value) const {
ASSERT(value.IsNull() || (value.IsType() && !value.IsDynamicType()) ||
value.IsMixinAppType());
StorePointer(&raw_ptr()->super_type_, value.raw());
}
RawTypeParameter* Class::LookupTypeParameter(const String& type_name) const {
ASSERT(!type_name.IsNull());
Thread* thread = Thread::Current();
REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
REUSABLE_TYPE_PARAMETER_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
TypeArguments& type_params = thread->TypeArgumentsHandle();
TypeParameter& type_param = thread->TypeParameterHandle();
String& type_param_name = thread->StringHandle();
type_params ^= type_parameters();
if (!type_params.IsNull()) {
const intptr_t num_type_params = type_params.Length();
for (intptr_t i = 0; i < num_type_params; i++) {
type_param ^= type_params.TypeAt(i);
type_param_name = type_param.name();
if (type_param_name.Equals(type_name)) {
return type_param.raw();
}
}
}
return TypeParameter::null();
}
void Class::CalculateFieldOffsets() const {
Array& flds = Array::Handle(fields());
const Class& super = Class::Handle(SuperClass());
intptr_t offset = 0;
intptr_t type_args_field_offset = kNoTypeArguments;
if (super.IsNull()) {
offset = Instance::NextFieldOffset();
ASSERT(offset > 0);
} else {
ASSERT(super.is_finalized() || super.is_prefinalized());
type_args_field_offset = super.type_arguments_field_offset();
offset = super.next_field_offset();
ASSERT(offset > 0);
// We should never call CalculateFieldOffsets for native wrapper
// classes, assert this.
ASSERT(num_native_fields() == 0);
set_num_native_fields(super.num_native_fields());
}
// If the super class is parameterized, use the same type_arguments field,
// otherwise, if this class is the first in the super chain to be
// parameterized, introduce a new type_arguments field.
if (type_args_field_offset == kNoTypeArguments) {
const TypeArguments& type_params = TypeArguments::Handle(type_parameters());
if (!type_params.IsNull()) {
ASSERT(type_params.Length() > 0);
// The instance needs a type_arguments field.
type_args_field_offset = offset;
offset += kWordSize;
}
}
set_type_arguments_field_offset(type_args_field_offset);
ASSERT(offset > 0);
Field& field = Field::Handle();
intptr_t len = flds.Length();
for (intptr_t i = 0; i < len; i++) {
field ^= flds.At(i);
// Offset is computed only for instance fields.
if (!field.is_static()) {
ASSERT(field.Offset() == 0);
field.SetOffset(offset);
offset += kWordSize;
}
}
set_instance_size(RoundedAllocationSize(offset));
set_next_field_offset(offset);
}
struct InvocationDispatcherCacheLayout {
enum { kNameIndex = 0, kArgsDescIndex, kFunctionIndex, kEntrySize };
};
void Class::AddInvocationDispatcher(const String& target_name,
const Array& args_desc,
const Function& dispatcher) const {
// Search for a free entry.
Array& cache = Array::Handle(invocation_dispatcher_cache());
intptr_t i = 0;
while (i < cache.Length() && cache.At(i) != Object::null()) {
i += InvocationDispatcherCacheLayout::kEntrySize;
}
if (i == cache.Length()) {
// Allocate new larger cache.
intptr_t new_len =
(cache.Length() == 0)
? static_cast<intptr_t>(InvocationDispatcherCacheLayout::kEntrySize)
: cache.Length() * 2;
cache ^= Array::Grow(cache, new_len);
set_invocation_dispatcher_cache(cache);
}
cache.SetAt(i + InvocationDispatcherCacheLayout::kNameIndex, target_name);
cache.SetAt(i + InvocationDispatcherCacheLayout::kArgsDescIndex, args_desc);
cache.SetAt(i + InvocationDispatcherCacheLayout::kFunctionIndex, dispatcher);
}
RawFunction* Class::GetInvocationDispatcher(const String& target_name,
const Array& args_desc,
RawFunction::Kind kind,
bool create_if_absent) const {
ASSERT(kind == RawFunction::kNoSuchMethodDispatcher ||
kind == RawFunction::kInvokeFieldDispatcher ||
kind == RawFunction::kDynamicInvocationForwarder);
Function& dispatcher = Function::Handle();
Array& cache = Array::Handle(invocation_dispatcher_cache());
ASSERT(!cache.IsNull());
String& name = String::Handle();
Array& desc = Array::Handle();
intptr_t i = 0;
for (; i < cache.Length(); i += InvocationDispatcherCacheLayout::kEntrySize) {
name ^= cache.At(i + InvocationDispatcherCacheLayout::kNameIndex);
if (name.IsNull()) break; // Reached last entry.
if (!name.Equals(target_name)) continue;
desc ^= cache.At(i + InvocationDispatcherCacheLayout::kArgsDescIndex);
if (desc.raw() != args_desc.raw()) continue;
dispatcher ^= cache.At(i + InvocationDispatcherCacheLayout::kFunctionIndex);
if (dispatcher.kind() == kind) {
// Found match.
ASSERT(dispatcher.IsFunction());
break;
}
}
if (dispatcher.IsNull() && create_if_absent) {
dispatcher ^= CreateInvocationDispatcher(target_name, args_desc, kind);
AddInvocationDispatcher(target_name, args_desc, dispatcher);
}
return dispatcher.raw();
}
RawFunction* Class::CreateInvocationDispatcher(const String& target_name,
const Array& args_desc,
RawFunction::Kind kind) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Function& invocation = Function::Handle(
zone, Function::New(
String::Handle(zone, Symbols::New(thread, target_name)), kind,
false, // Not static.
false, // Not const.
false, // Not abstract.
false, // Not external.
false, // Not native.
*this, TokenPosition::kMinSource));
ArgumentsDescriptor desc(args_desc);
if (desc.TypeArgsLen() > 0) {
// Make dispatcher function generic, since type arguments are passed.
const TypeArguments& type_params =
TypeArguments::Handle(zone, TypeArguments::New(desc.TypeArgsLen()));
// The presence of a type parameter array is enough to mark this dispatcher
// as generic. To save memory, we do not copy the type parameters to the
// array (they are not accessed), but leave it as an array of null objects.
invocation.set_type_parameters(type_params);
}
invocation.set_num_fixed_parameters(desc.PositionalCount());
invocation.SetNumOptionalParameters(desc.NamedCount(),
false); // Not positional.
invocation.set_parameter_types(
Array::Handle(zone, Array::New(desc.Count(), Heap::kOld)));
invocation.set_parameter_names(
Array::Handle(zone, Array::New(desc.Count(), Heap::kOld)));
// Receiver.
invocation.SetParameterTypeAt(0, Object::dynamic_type());
invocation.SetParameterNameAt(0, Symbols::This());
// Remaining positional parameters.
intptr_t i = 1;
for (; i < desc.PositionalCount(); i++) {
invocation.SetParameterTypeAt(i, Object::dynamic_type());
char name[64];
Utils::SNPrint(name, 64, ":p%" Pd, i);
invocation.SetParameterNameAt(
i, String::Handle(zone, Symbols::New(thread, name)));
}
// Named parameters.
for (; i < desc.Count(); i++) {
invocation.SetParameterTypeAt(i, Object::dynamic_type());
intptr_t index = i - desc.PositionalCount();
invocation.SetParameterNameAt(i, String::Handle(zone, desc.NameAt(index)));
}
invocation.set_result_type(Object::dynamic_type());
invocation.set_is_debuggable(false);
invocation.set_is_visible(false);
invocation.set_is_reflectable(false);
invocation.set_saved_args_desc(args_desc);
return invocation.raw();
}
// Method extractors are used to create implicit closures from methods.
// When an expression obj.M is evaluated for the first time and receiver obj
// does not have a getter called M but has a method called M then an extractor
// is created and injected as a getter (under the name get:M) into the class
// owning method M.
RawFunction* Function::CreateMethodExtractor(const String& getter_name) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(Field::IsGetterName(getter_name));
const Function& closure_function =
Function::Handle(zone, ImplicitClosureFunction());
const Class& owner = Class::Handle(zone, closure_function.Owner());
Function& extractor = Function::Handle(
zone,
Function::New(String::Handle(zone, Symbols::New(thread, getter_name)),
RawFunction::kMethodExtractor,
false, // Not static.
false, // Not const.
false, // Not abstract.
false, // Not external.
false, // Not native.
owner, TokenPosition::kMethodExtractor));
// Initialize signature: receiver is a single fixed parameter.
const intptr_t kNumParameters = 1;
extractor.set_num_fixed_parameters(kNumParameters);
extractor.SetNumOptionalParameters(0, 0);
extractor.set_parameter_types(Object::extractor_parameter_types());
extractor.set_parameter_names(Object::extractor_parameter_names());
extractor.set_result_type(Object::dynamic_type());
extractor.set_kernel_offset(kernel_offset());
extractor.set_extracted_method_closure(closure_function);
extractor.set_is_debuggable(false);
extractor.set_is_visible(false);
owner.AddFunction(extractor);
return extractor.raw();
}
RawFunction* Function::GetMethodExtractor(const String& getter_name) const {
ASSERT(Field::IsGetterName(getter_name));
const Function& closure_function =
Function::Handle(ImplicitClosureFunction());
const Class& owner = Class::Handle(closure_function.Owner());
Function& result = Function::Handle(owner.LookupDynamicFunction(getter_name));
if (result.IsNull()) {
result ^= CreateMethodExtractor(getter_name);
}
ASSERT(result.kind() == RawFunction::kMethodExtractor);
return result.raw();
}
bool Function::FindPragma(Isolate* I,
const String& pragma_name,
Object* options) const {
if (!has_pragma()) return false;
auto& klass = Class::Handle(Owner());
auto& lib = Library::Handle(klass.library());
auto& pragma_class =
Class::Handle(Isolate::Current()->object_store()->pragma_class());
auto& pragma_name_field =
Field::Handle(pragma_class.LookupField(Symbols::name()));
auto& pragma_options_field =
Field::Handle(pragma_class.LookupField(Symbols::options()));
Array& metadata = Array::Handle();
metadata ^= lib.GetMetadata(Function::Handle(raw()));
if (metadata.IsNull()) return false;
auto& pragma = Object::Handle();
for (intptr_t i = 0; i < metadata.Length(); ++i) {
pragma = metadata.At(i);
if (pragma.clazz() != pragma_class.raw() ||
Instance::Cast(pragma).GetField(pragma_name_field) !=
pragma_name.raw()) {
continue;
}
*options = Instance::Cast(pragma).GetField(pragma_options_field);
return true;
}
return false;
}
bool Function::IsDynamicInvocationForwaderName(const String& name) {
return name.StartsWith(Symbols::DynamicPrefix());
}
RawString* Function::DemangleDynamicInvocationForwarderName(
const String& name) {
const intptr_t kDynamicPrefixLength = 4; // "dyn:"
ASSERT(Symbols::DynamicPrefix().Length() == kDynamicPrefixLength);
return Symbols::New(Thread::Current(), name, kDynamicPrefixLength,
name.Length() - kDynamicPrefixLength);
}
#if !defined(DART_PRECOMPILED_RUNTIME)
RawFunction* Function::CreateDynamicInvocationForwarder(
const String& mangled_name) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Function& forwarder = Function::Handle(zone);
forwarder ^= Object::Clone(*this, Heap::kOld);
forwarder.set_name(mangled_name);
forwarder.set_kind(RawFunction::kDynamicInvocationForwarder);
forwarder.set_is_debuggable(false);
// TODO(vegorov) for error reporting reasons it is better to make this
// function visible and instead use a TailCall to invoke the target.
// Our TailCall instruction is not ready for such usage though it
// blocks inlining and can't take Function-s only Code objects.
forwarder.set_is_visible(false);
forwarder.ClearICDataArray();
forwarder.ClearCode();
forwarder.set_usage_counter(0);
forwarder.set_deoptimization_counter(0);
forwarder.set_optimized_instruction_count(0);
forwarder.set_inlining_depth(0);
forwarder.set_optimized_call_site_count(0);
forwarder.set_kernel_offset(kernel_offset());
return forwarder.raw();
}
RawString* Function::CreateDynamicInvocationForwarderName(const String& name) {
return Symbols::FromConcat(Thread::Current(), Symbols::DynamicPrefix(), name);
}
RawFunction* Function::GetDynamicInvocationForwarder(
const String& mangled_name,
bool allow_add /* = true */) const {
ASSERT(IsDynamicInvocationForwaderName(mangled_name));
const Class& owner = Class::Handle(Owner());
Function& result = Function::Handle(owner.GetInvocationDispatcher(
mangled_name, Array::null_array(),
RawFunction::kDynamicInvocationForwarder, /*create_if_absent=*/false));
if (!result.IsNull()) {
return result.raw();
}
// Check if function actually needs a dynamic invocation forwarder.
if (!kernel::NeedsDynamicInvocationForwarder(*this)) {
result = raw();
} else if (allow_add) {
result = CreateDynamicInvocationForwarder(mangled_name);
}
if (allow_add) {
owner.AddInvocationDispatcher(mangled_name, Array::null_array(), result);
}
return result.raw();
}
#endif
bool AbstractType::InstantiateAndTestSubtype(
AbstractType* subtype,
AbstractType* supertype,
Error* bound_error,
const TypeArguments& instantiator_type_args,
const TypeArguments& function_type_args) {
if (!subtype->IsInstantiated()) {
*subtype =
subtype->InstantiateFrom(instantiator_type_args, function_type_args,
kAllFree, bound_error, NULL, NULL, Heap::kOld);
}
if (!bound_error->IsNull()) {
return false;
}
if (!supertype->IsInstantiated()) {
*supertype = supertype->InstantiateFrom(
instantiator_type_args, function_type_args, kAllFree, bound_error, NULL,
NULL, Heap::kOld);
}
if (!bound_error->IsNull()) {
return false;
}
bool is_subtype_of =
subtype->IsSubtypeOf(*supertype, bound_error, NULL, Heap::kOld);
if (!bound_error->IsNull()) {
return false;
}
return is_subtype_of;
}
RawArray* Class::invocation_dispatcher_cache() const {
return raw_ptr()->invocation_dispatcher_cache_;
}
void Class::set_invocation_dispatcher_cache(const Array& cache) const {
StorePointer(&raw_ptr()->invocation_dispatcher_cache_, cache.raw());
}
void Class::Finalize() const {
Isolate* isolate = Isolate::Current();
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(!isolate->all_classes_finalized());
ASSERT(!is_finalized());
// Prefinalized classes have a VM internal representation and no Dart fields.
// Their instance size is precomputed and field offsets are known.
if (!is_prefinalized()) {
// Compute offsets of instance fields and instance size.
CalculateFieldOffsets();
if (raw() == isolate->class_table()->At(id())) {
// Sets the new size in the class table.
isolate->class_table()->SetAt(id(), raw());
}
}
set_is_finalized();
}
class CHACodeArray : public WeakCodeReferences {
public:
explicit CHACodeArray(const Class& cls)
: WeakCodeReferences(Array::Handle(cls.dependent_code())), cls_(cls) {}
virtual void UpdateArrayTo(const Array& value) {
// TODO(fschneider): Fails for classes in the VM isolate.
cls_.set_dependent_code(value);
}
virtual void ReportDeoptimization(const Code& code) {
if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
Function& function = Function::Handle(code.function());
THR_Print("Deoptimizing %s because CHA optimized (%s).\n",
function.ToFullyQualifiedCString(), cls_.ToCString());
}
}
virtual void ReportSwitchingCode(const Code& code) {
if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
Function& function = Function::Handle(code.function());
THR_Print(
"Switching %s to unoptimized code because CHA invalid"
" (%s)\n",
function.ToFullyQualifiedCString(), cls_.ToCString());
}
}
private:
const Class& cls_;
DISALLOW_COPY_AND_ASSIGN(CHACodeArray);
};
#if defined(DEBUG)
static bool IsMutatorOrAtSafepoint() {
Thread* thread = Thread::Current();
return thread->IsMutatorThread() || thread->IsAtSafepoint();
}
#endif
void Class::RegisterCHACode(const Code& code) {
if (FLAG_trace_cha) {
THR_Print("RegisterCHACode '%s' depends on class '%s'\n",
Function::Handle(code.function()).ToQualifiedCString(),
ToCString());
}
DEBUG_ASSERT(IsMutatorOrAtSafepoint());
ASSERT(code.is_optimized());
CHACodeArray a(*this);
a.Register(code);
}
void Class::DisableCHAOptimizedCode(const Class& subclass) {
ASSERT(Thread::Current()->IsMutatorThread());
CHACodeArray a(*this);
if (FLAG_trace_deoptimization && a.HasCodes()) {
if (subclass.IsNull()) {
THR_Print("Deopt for CHA (all)\n");
} else {
THR_Print("Deopt for CHA (new subclass %s)\n", subclass.ToCString());
}
}
a.DisableCode();
}
void Class::DisableAllCHAOptimizedCode() {
DisableCHAOptimizedCode(Class::Handle());
}
bool Class::TraceAllocation(Isolate* isolate) const {
#ifndef PRODUCT
ClassTable* class_table = isolate->class_table();
return class_table->TraceAllocationFor(id());
#else
return false;
#endif
}
void Class::SetTraceAllocation(bool trace_allocation) const {
#ifndef PRODUCT
Isolate* isolate = Isolate::Current();
const bool changed = trace_allocation != this->TraceAllocation(isolate);
if (changed) {
ClassTable* class_table = isolate->class_table();
class_table->SetTraceAllocationFor(id(), trace_allocation);
DisableAllocationStub();
}
#else
UNREACHABLE();
#endif
}
bool Class::ValidatePostFinalizePatch(const Class& orig_class,
Error* error) const {
ASSERT(error != NULL);
// Not allowed to add new fields in a post finalization patch.
if (fields() != Object::empty_array().raw()) {
*error = LanguageError::NewFormatted(
*error, // No previous error.
Script::Handle(script()), token_pos(), Report::AtLocation,
Report::kError, Heap::kNew,
"new fields are not allowed for this patch");
return false;
}
// There seem to be no functions, the patch is pointless.
if (functions() == Object::empty_array().raw()) {
*error = LanguageError::NewFormatted(*error, // No previous error.
Script::Handle(script()), token_pos(),
Report::AtLocation, Report::kError,
Heap::kNew, "no functions to patch");
return false;
}
// Iterate over all functions that will be patched and make sure
// the original function was declared 'external' and has not executed
// so far i.e no code has been generated for it.
Thread* thread = Thread::Current();
ASSERT(thread->IsMutatorThread());
Zone* zone = thread->zone();
const Array& funcs = Array::Handle(zone, functions());
Function& func = Function::Handle(zone);
Function& orig_func = Function::Handle(zone);
String& name = String::Handle(zone);
for (intptr_t i = 0; i < funcs.Length(); i++) {
func ^= funcs.At(i);
name ^= func.name();
orig_func ^= orig_class.LookupFunctionAllowPrivate(name);
if (!orig_func.IsNull()) {
if (!orig_func.is_external() || orig_func.HasCode()) {
// We can only patch external functions in a post finalized class.
*error = LanguageError::NewFormatted(
*error, // No previous error.
Script::Handle(script()), token_pos(), Report::AtLocation,
Report::kError, Heap::kNew,
!orig_func.is_external()
? "'%s' is not external and therefore cannot be patched"
: "'%s' has already executed and therefore cannot be patched",
name.ToCString());
return false;
}
} else if (!Library::IsPrivate(name)) {
// We can only have new private functions that are added.
*error = LanguageError::NewFormatted(
*error, // No previous error.
Script::Handle(script()), token_pos(), Report::AtLocation,
Report::kError, Heap::kNew,
"'%s' is not private and therefore cannot be patched",
name.ToCString());
return false;
}
}
return true;
}
void Class::set_dependent_code(const Array& array) const {
StorePointer(&raw_ptr()->dependent_code_, array.raw());
}
// Apply the members from the patch class to the original class.
bool Class::ApplyPatch(const Class& patch, Error* error) const {
ASSERT(error != NULL);
ASSERT(!is_finalized());
// Shared handles used during the iteration.
String& member_name = String::Handle();
const PatchClass& patch_class = PatchClass::Handle(
PatchClass::New(*this, Script::Handle(patch.script())));
Array& orig_list = Array::Handle(functions());
intptr_t orig_len = orig_list.Length();
Array& patch_list = Array::Handle(patch.functions());
intptr_t patch_len = patch_list.Length();
Function& func = Function::Handle();
Function& orig_func = Function::Handle();
// Lookup the original implicit constructor, if any.
member_name = Name();
member_name = String::Concat(member_name, Symbols::Dot());
Function& orig_implicit_ctor = Function::Handle(LookupFunction(member_name));
if (!orig_implicit_ctor.IsNull() &&
!orig_implicit_ctor.IsImplicitConstructor()) {
// Not an implicit constructor, but a user declared one.
orig_implicit_ctor = Function::null();
}
const GrowableObjectArray& new_functions =
GrowableObjectArray::Handle(GrowableObjectArray::New(orig_len));
for (intptr_t i = 0; i < orig_len; i++) {
orig_func ^= orig_list.At(i);
member_name ^= orig_func.name();
func = patch.LookupFunction(member_name);
if (func.IsNull()) {
// Non-patched function is preserved, all patched functions are added in
// the loop below.
// However, an implicitly created constructor should not be preserved if
// the patch provides a constructor or a factory. Wait for now.
if (orig_func.raw() != orig_implicit_ctor.raw()) {
new_functions.Add(orig_func);
}
} else if (func.UserVisibleSignature() !=
orig_func.UserVisibleSignature()) {
// Compare user visible signatures to ignore different implicit parameters
// when patching a constructor with a factory.
*error = LanguageError::NewFormatted(
*error, // No previous error.
Script::Handle(patch.script()), func.token_pos(), Report::AtLocation,
Report::kError, Heap::kNew, "signature mismatch: '%s'",
member_name.ToCString());
return false;
}
}
for (intptr_t i = 0; i < patch_len; i++) {
func ^= patch_list.At(i);
if (func.IsGenerativeConstructor() || func.IsFactory()) {
// Do not preserve the original implicit constructor, if any.
orig_implicit_ctor = Function::null();
}
func.set_owner(patch_class);
new_functions.Add(func);
}
if (!orig_implicit_ctor.IsNull()) {
// Preserve the original implicit constructor.
new_functions.Add(orig_implicit_ctor);
}
Array& new_list = Array::Handle(Array::MakeFixedLength(new_functions));
SetFunctions(new_list);
// Merge the two list of fields. Raise an error when duplicates are found or
// when a public field is being added.
orig_list = fields();
orig_len = orig_list.Length();
patch_list = patch.fields();
patch_len = patch_list.Length();
Field& field = Field::Handle();
Field& orig_field = Field::Handle();
new_list = Array::New(patch_len + orig_len);
for (intptr_t i = 0; i < patch_len; i++) {
field ^= patch_list.At(i);
field.set_owner(patch_class);
member_name = field.name();
// TODO(iposva): Verify non-public fields only.
// Verify no duplicate additions.
orig_field ^= LookupField(member_name);
if (!orig_field.IsNull()) {
*error = LanguageError::NewFormatted(
*error, // No previous error.
Script::Handle(patch.script()), field.token_pos(), Report::AtLocation,
Report::kError, Heap::kNew, "duplicate field: %s",
member_name.ToCString());
return false;
}
new_list.SetAt(i, field);
}
for (intptr_t i = 0; i < orig_len; i++) {
field ^= orig_list.At(i);
new_list.SetAt(patch_len + i, field);
}
SetFields(new_list);
// The functions and fields in the patch class are no longer needed.
// The patch class itself is also no longer needed.
patch.SetFunctions(Object::empty_array());
patch.SetFields(Object::empty_array());
Library::Handle(patch.library()).RemovePatchClass(patch);
return true;
}
RawFunction* Function::EvaluateHelper(const Class& cls,
const String& expr,
const Array& param_names,
bool is_static) {
UNREACHABLE();
return Function::null();
}
// Conventions:
// * For throwing a NSM in a class klass we use its runtime type as receiver,
// i.e., klass.RareType().
// * For throwing a NSM in a library, we just pass the null instance as
// receiver.
static RawObject* ThrowNoSuchMethod(const Instance& receiver,
const String& function_name,
const Array& arguments,
const Array& argument_names,
const InvocationMirror::Level level,
const InvocationMirror::Kind kind) {
const Smi& invocation_type =
Smi::Handle(Smi::New(InvocationMirror::EncodeType(level, kind)));
const Array& args = Array::Handle(Array::New(6));
args.SetAt(0, receiver);
args.SetAt(1, function_name);
args.SetAt(2, invocation_type);
// TODO(regis): Support invocation of generic functions with type arguments.
args.SetAt(3, Object::null_type_arguments());
args.SetAt(4, arguments);
args.SetAt(5, argument_names);
const Library& libcore = Library::Handle(Library::CoreLibrary());
const Class& NoSuchMethodError =
Class::Handle(libcore.LookupClass(Symbols::NoSuchMethodError()));
const Function& throwNew = Function::Handle(
NoSuchMethodError.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
return DartEntry::InvokeFunction(throwNew, args);
}
static RawObject* ThrowTypeError(const TokenPosition token_pos,
const Instance& src_value,
const AbstractType& dst_type,
const String& dst_name) {
const Array& args = Array::Handle(Array::New(5));
const Smi& pos = Smi::Handle(Smi::New(token_pos.value()));
args.SetAt(0, pos);
args.SetAt(1, src_value);
args.SetAt(2, dst_type);
args.SetAt(3, dst_name);
args.SetAt(4, String::Handle()); // bound error message
const Library& libcore = Library::Handle(Library::CoreLibrary());
const Class& TypeError =
Class::Handle(libcore.LookupClassAllowPrivate(Symbols::TypeError()));
const Function& throwNew = Function::Handle(
TypeError.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
return DartEntry::InvokeFunction(throwNew, args);
}
RawObject* Class::InvokeGetter(const String& getter_name,
bool throw_nsm_if_absent,
bool respect_reflectable) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Error& error = Error::Handle(zone, EnsureIsFinalized(thread));
if (!error.IsNull()) {
return error.raw();
}
// Note static fields do not have implicit getters.
const Field& field = Field::Handle(zone, LookupStaticField(getter_name));
if (field.IsNull() || field.IsUninitialized()) {
const String& internal_getter_name =
String::Handle(zone, Field::GetterName(getter_name));
Function& getter =
Function::Handle(zone, LookupStaticFunction(internal_getter_name));
if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
if (getter.IsNull()) {
getter = LookupStaticFunction(getter_name);
if (!getter.IsNull()) {
// Looking for a getter but found a regular method: closurize it.
const Function& closure_function =
Function::Handle(zone, getter.ImplicitClosureFunction());
return closure_function.ImplicitStaticClosure();
}
}
if (throw_nsm_if_absent) {
return ThrowNoSuchMethod(
AbstractType::Handle(zone, RareType()), getter_name,
Object::null_array(), Object::null_array(),
InvocationMirror::kStatic, InvocationMirror::kGetter);
}
// Fall through case: Indicate that we didn't find any function or field
// using a special null instance. This is different from a field being
// null. Callers make sure that this null does not leak into Dartland.
return Object::sentinel().raw();
}
// Invoke the getter and return the result.
return DartEntry::InvokeFunction(getter, Object::empty_array());
}
return field.StaticValue();
}
RawObject* Class::InvokeSetter(const String& setter_name,
const Instance& value,
bool respect_reflectable) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Error& error = Error::Handle(zone, EnsureIsFinalized(thread));
if (!error.IsNull()) {
return error.raw();
}
// Check for real fields and user-defined setters.
const Field& field = Field::Handle(zone, LookupStaticField(setter_name));
const String& internal_setter_name =
String::Handle(zone, Field::SetterName(setter_name));
AbstractType& parameter_type = AbstractType::Handle(zone);
AbstractType& argument_type =
AbstractType::Handle(zone, value.GetType(Heap::kOld));
if (field.IsNull()) {
const Function& setter =
Function::Handle(zone, LookupStaticFunction(internal_setter_name));
const int kNumArgs = 1;
const Array& args = Array::Handle(zone, Array::New(kNumArgs));
args.SetAt(0, value);
if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
return ThrowNoSuchMethod(AbstractType::Handle(zone, RareType()),
internal_setter_name, args, Object::null_array(),
InvocationMirror::kStatic,
InvocationMirror::kSetter);
}
parameter_type ^= setter.ParameterTypeAt(0);
if (!argument_type.IsNullType() && !parameter_type.IsDynamicType() &&
!value.IsInstanceOf(parameter_type, Object::null_type_arguments(),
Object::null_type_arguments(), NULL)) {
const String& argument_name =
String::Handle(zone, setter.ParameterNameAt(0));
return ThrowTypeError(setter.token_pos(), value, parameter_type,
argument_name);
}
// Invoke the setter and return the result.
return DartEntry::InvokeFunction(setter, args);
}
if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
const int kNumArgs = 1;
const Array& args = Array::Handle(zone, Array::New(kNumArgs));
args.SetAt(0, value);
return ThrowNoSuchMethod(AbstractType::Handle(zone, RareType()),
internal_setter_name, args, Object::null_array(),
InvocationMirror::kStatic,
InvocationMirror::kSetter);
}
parameter_type ^= field.type();
if (!argument_type.IsNullType() && !parameter_type.IsDynamicType() &&
!value.IsInstanceOf(parameter_type, Object::null_type_arguments(),
Object::null_type_arguments(), NULL)) {
const String& argument_name = String::Handle(zone, field.name());
return ThrowTypeError(field.token_pos(), value, parameter_type,
argument_name);
}
field.SetStaticValue(value);
return value.raw();
}
RawObject* Class::Invoke(const String& function_name,
const Array& args,
const Array& arg_names,
bool respect_reflectable) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
// TODO(regis): Support invocation of generic functions with type arguments.
const int kTypeArgsLen = 0;
const Error& error = Error::Handle(zone, EnsureIsFinalized(thread));
if (!error.IsNull()) {
return error.raw();
}
Function& function =
Function::Handle(zone, LookupStaticFunction(function_name));
if (function.IsNull()) {
// Didn't find a method: try to find a getter and invoke call on its result.
const String& getter_name =
String::Handle(zone, Field::GetterName(function_name));
function = LookupStaticFunction(getter_name);
if (!function.IsNull()) {
// Invoke the getter.
const Object& getter_result = Object::Handle(
zone, DartEntry::InvokeFunction(function, Object::empty_array()));
if (getter_result.IsError()) {
return getter_result.raw();
}
// Make room for the closure (receiver) in the argument list.
const intptr_t num_args = args.Length();
const Array& call_args = Array::Handle(zone, Array::New(num_args + 1));
Object& temp = Object::Handle(zone);
for (int i = 0; i < num_args; i++) {
temp = args.At(i);
call_args.SetAt(i + 1, temp);
}
call_args.SetAt(0, getter_result);
const Array& call_args_descriptor_array =
Array::Handle(zone, ArgumentsDescriptor::New(
kTypeArgsLen, call_args.Length(), arg_names));
// Call the closure.
return DartEntry::InvokeClosure(call_args, call_args_descriptor_array);
}
}
const Array& args_descriptor_array = Array::Handle(
zone, ArgumentsDescriptor::New(kTypeArgsLen, args.Length(), arg_names));
ArgumentsDescriptor args_descriptor(args_descriptor_array);
const TypeArguments& type_args = Object::null_type_arguments();
if (function.IsNull() || !function.AreValidArguments(args_descriptor, NULL) ||
(respect_reflectable && !function.is_reflectable())) {
return ThrowNoSuchMethod(
AbstractType::Handle(zone, RareType()), function_name, args, arg_names,
InvocationMirror::kStatic, InvocationMirror::kMethod);
}
RawObject* type_error =
function.DoArgumentTypesMatch(args, args_descriptor, type_args);
if (type_error != Error::null()) {
return type_error;
}
return DartEntry::InvokeFunction(function, args, args_descriptor_array);
}
RawObject* Class::Evaluate(const String& expr,
const Array& param_names,
const Array& param_values) const {
return Evaluate(expr, param_names, param_values, Object::empty_array(),
Object::null_type_arguments());
}
RawObject* Class::Evaluate(const String& expr,
const Array& param_names,
const Array& param_values,
const Array& type_param_names,
const TypeArguments& type_param_values) const {
ASSERT(Thread::Current()->IsMutatorThread());
if (id() < kInstanceCid || id() == kTypeArgumentsCid) {
const Instance& exception = Instance::Handle(String::New(
"Expressions can be evaluated only with regular Dart instances"));
const Instance& stacktrace = Instance::Handle();
return UnhandledException::New(exception, stacktrace);
}
ASSERT(Library::Handle(library()).kernel_data() ==
ExternalTypedData::null() ||
!FLAG_enable_kernel_expression_compilation);
const Function& eval_func = Function::Handle(
Function::EvaluateHelper(*this, expr, param_names, true));
return DartEntry::InvokeFunction(eval_func, param_values);
}
static RawObject* EvaluateCompiledExpressionHelper(
const uint8_t* kernel_bytes,
intptr_t kernel_length,
const Array& type_definitions,
const String& library_url,
const String& klass,
const Array& arguments,
const TypeArguments& type_arguments);
RawObject* Class::EvaluateCompiledExpression(
const uint8_t* kernel_bytes,
intptr_t kernel_length,
const Array& type_definitions,
const Array& arguments,
const TypeArguments& type_arguments) const {
ASSERT(Thread::Current()->IsMutatorThread());
if (id() < kInstanceCid || id() == kTypeArgumentsCid) {
const Instance& exception = Instance::Handle(String::New(
"Expressions can be evaluated only with regular Dart instances"));
const Instance& stacktrace = Instance::Handle();
return UnhandledException::New(exception, stacktrace);
}
return EvaluateCompiledExpressionHelper(
kernel_bytes, kernel_length, type_definitions,
String::Handle(Library::Handle(library()).url()),
IsTopLevel() ? String::Handle() : String::Handle(UserVisibleName()),
arguments, type_arguments);
}
// Ensure that top level parsing of the class has been done.
RawError* Class::EnsureIsFinalized(Thread* thread) const {
// Finalized classes have already been parsed.
if (is_finalized()) {
return Error::null();
}
if (Compiler::IsBackgroundCompilation()) {
Compiler::AbortBackgroundCompilation(DeoptId::kNone,
"Class finalization while compiling");
}
ASSERT(thread->IsMutatorThread());
ASSERT(thread != NULL);
const Error& error =
Error::Handle(thread->zone(), Compiler::CompileClass(*this));
if (!error.IsNull()) {
ASSERT(thread == Thread::Current());
if (thread->long_jump_base() != NULL) {
Report::LongJump(error);
UNREACHABLE();
}
}
return error.raw();
}
void Class::SetFields(const Array& value) const {
ASSERT(!value.IsNull());
#if defined(DEBUG)
// Verify that all the fields in the array have this class as owner.
Field& field = Field::Handle();
intptr_t len = value.Length();
for (intptr_t i = 0; i < len; i++) {
field ^= value.At(i);
ASSERT(field.IsOriginal());
ASSERT(field.Owner() == raw());
}
#endif
// The value of static fields is already initialized to null.
StorePointer(&raw_ptr()->fields_, value.raw());
}
void Class::AddField(const Field& field) const {
const Array& arr = Array::Handle(fields());
const Array& new_arr = Array::Handle(Array::Grow(arr, arr.Length() + 1));
new_arr.SetAt(arr.Length(), field);
SetFields(new_arr);
}
void Class::AddFields(const GrowableArray<const Field*>& new_fields) const {
const intptr_t num_new_fields = new_fields.length();
if (num_new_fields == 0) return;
const Array& arr = Array::Handle(fields());
const intptr_t num_old_fields = arr.Length();
const Array& new_arr = Array::Handle(
Array::Grow(arr, num_old_fields + num_new_fields, Heap::kOld));
for (intptr_t i = 0; i < num_new_fields; i++) {
new_arr.SetAt(i + num_old_fields, *new_fields.At(i));
}
SetFields(new_arr);
}
void Class::InjectCIDFields() const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Field& field = Field::Handle(zone);
Smi& value = Smi::Handle(zone);
String& field_name = String::Handle(zone);
#define CLASS_LIST_WITH_NULL(V) \
V(Null) \
CLASS_LIST_NO_OBJECT(V)
#define ADD_SET_FIELD(clazz) \
field_name = Symbols::New(thread, "cid" #clazz); \
field = Field::New(field_name, true, false, true, false, *this, \
Type::Handle(Type::IntType()), TokenPosition::kMinSource, \
TokenPosition::kMinSource); \
value = Smi::New(k##clazz##Cid); \
field.SetStaticValue(value, true); \
AddField(field);
CLASS_LIST_WITH_NULL(ADD_SET_FIELD)
#undef ADD_SET_FIELD
#undef CLASS_LIST_WITH_NULL
}
template <class FakeInstance>
RawClass* Class::NewCommon(intptr_t index) {
ASSERT(Object::class_class() != Class::null());
Class& result = Class::Handle();
{
RawObject* raw =
Object::Allocate(Class::kClassId, Class::InstanceSize(), Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
}
FakeInstance fake;
ASSERT(fake.IsInstance());
result.set_handle_vtable(fake.vtable());
result.set_token_pos(TokenPosition::kNoSource);
result.set_instance_size(FakeInstance::InstanceSize());
result.set_type_arguments_field_offset_in_words(kNoTypeArguments);
result.set_next_field_offset(FakeInstance::NextFieldOffset());
result.set_id(index);
result.set_num_type_arguments(kUnknownNumTypeArguments);
result.set_num_own_type_arguments(kUnknownNumTypeArguments);
result.set_has_pragma(false);
result.set_num_native_fields(0);
result.set_state_bits(0);
result.InitEmptyFields();
return result.raw();
}
template <class FakeInstance>
RawClass* Class::New(intptr_t index) {
Class& result = Class::Handle(NewCommon<FakeInstance>(index));
result.set_kernel_offset(-1);
Isolate::Current()->RegisterClass(result);
return result.raw();
}
RawClass* Class::New(const Library& lib,
const String& name,
const Script& script,
TokenPosition token_pos,
bool register_class) {
Class& result = Class::Handle(NewCommon<Instance>(kIllegalCid));
result.set_library(lib);
result.set_name(name);
result.set_script(script);
result.set_token_pos(token_pos);
result.set_kernel_offset(-1);
if (register_class) {
Isolate::Current()->RegisterClass(result);
}
return result.raw();
}
RawClass* Class::NewInstanceClass() {
return Class::New<Instance>(kIllegalCid);
}
RawClass* Class::NewNativeWrapper(const Library& library,
const String& name,
int field_count) {
Class& cls = Class::Handle(library.LookupClass(name));
if (cls.IsNull()) {
cls = New(library, name, Script::Handle(), TokenPosition::kNoSource);
cls.SetFields(Object::empty_array());
cls.SetFunctions(Object::empty_array());
// Set super class to Object.
cls.set_super_type(Type::Handle(Type::ObjectType()));
// Compute instance size. First word contains a pointer to a properly
// sized typed array once the first native field has been set.
intptr_t instance_size = sizeof(RawInstance) + kWordSize;
cls.set_instance_size(RoundedAllocationSize(instance_size));
cls.set_next_field_offset(instance_size);
cls.set_num_native_fields(field_count);
cls.set_is_finalized();
cls.set_is_type_finalized();
cls.set_is_synthesized_class();
cls.set_is_cycle_free();
cls.set_kernel_offset(-1);
library.AddClass(cls);
return cls.raw();
} else {
return Class::null();
}
}
RawClass* Class::NewStringClass(intptr_t class_id) {
intptr_t instance_size;
if (class_id == kOneByteStringCid) {
instance_size = OneByteString::InstanceSize();
} else if (class_id == kTwoByteStringCid) {
instance_size = TwoByteString::InstanceSize();
} else if (class_id == kExternalOneByteStringCid) {
instance_size = ExternalOneByteString::InstanceSize();
} else {
ASSERT(class_id == kExternalTwoByteStringCid);
instance_size = ExternalTwoByteString::InstanceSize();
}
Class& result = Class::Handle(New<String>(class_id));
result.set_instance_size(instance_size);
result.set_next_field_offset(String::NextFieldOffset());
result.set_is_prefinalized();
return result.raw();
}
RawClass* Class::NewTypedDataClass(intptr_t class_id) {
ASSERT(RawObject::IsTypedDataClassId(class_id));
intptr_t instance_size = TypedData::InstanceSize();
Class& result = Class::Handle(New<TypedData>(class_id));
result.set_instance_size(instance_size);
result.set_next_field_offset(TypedData::NextFieldOffset());
result.set_is_prefinalized();
return result.raw();
}
RawClass* Class::NewTypedDataViewClass(intptr_t class_id) {
ASSERT(RawObject::IsTypedDataViewClassId(class_id));
Class& result = Class::Handle(New<Instance>(class_id));
result.set_instance_size(0);
result.set_next_field_offset(-kWordSize);
return result.raw();
}
RawClass* Class::NewExternalTypedDataClass(intptr_t class_id) {
ASSERT(RawObject::IsExternalTypedDataClassId(class_id));
intptr_t instance_size = ExternalTypedData::InstanceSize();
Class& result = Class::Handle(New<ExternalTypedData>(class_id));
result.set_instance_size(instance_size);
result.set_next_field_offset(ExternalTypedData::NextFieldOffset());
result.set_is_prefinalized();
return result.raw();
}
void Class::set_name(const String& value) const {
ASSERT(raw_ptr()->name_ == String::null());
ASSERT(value.IsSymbol());
StorePointer(&raw_ptr()->name_, value.raw());
#if !defined(PRODUCT)
if (raw_ptr()->user_name_ == String::null()) {
// TODO(johnmccutchan): Eagerly set user name for VM isolate classes,
// lazily set user name for the other classes.
// Generate and set user_name.
const String& user_name = String::Handle(GenerateUserVisibleName());
set_user_name(user_name);
}
#endif // !defined(PRODUCT)
}
#if !defined(PRODUCT)
void Class::set_user_name(const String& value) const {
StorePointer(&raw_ptr()->user_name_, value.raw());
}
#endif // !defined(PRODUCT)
RawString* Class::GenerateUserVisibleName() const {
if (FLAG_show_internal_names) {
return Name();
}
switch (id()) {
case kFloat32x4Cid:
return Symbols::Float32x4().raw();
case kInt32x4Cid:
return Symbols::Int32x4().raw();
case kTypedDataInt8ArrayCid:
case kExternalTypedDataInt8ArrayCid:
return Symbols::Int8List().raw();
case kTypedDataUint8ArrayCid:
case kExternalTypedDataUint8ArrayCid:
return Symbols::Uint8List().raw();
case kTypedDataUint8ClampedArrayCid:
case kExternalTypedDataUint8ClampedArrayCid:
return Symbols::Uint8ClampedList().raw();
case kTypedDataInt16ArrayCid:
case kExternalTypedDataInt16ArrayCid:
return Symbols::Int16List().raw();
case kTypedDataUint16ArrayCid:
case kExternalTypedDataUint16ArrayCid:
return Symbols::Uint16List().raw();
case kTypedDataInt32ArrayCid:
case kExternalTypedDataInt32ArrayCid:
return Symbols::Int32List().raw();
case kTypedDataUint32ArrayCid:
case kExternalTypedDataUint32ArrayCid:
return Symbols::Uint32List().raw();
case kTypedDataInt64ArrayCid:
case kExternalTypedDataInt64ArrayCid:
return Symbols::Int64List().raw();
case kTypedDataUint64ArrayCid:
case kExternalTypedDataUint64ArrayCid:
return Symbols::Uint64List().raw();
case kTypedDataInt32x4ArrayCid:
case kExternalTypedDataInt32x4ArrayCid:
return Symbols::Int32x4List().raw();
case kTypedDataFloat32x4ArrayCid:
case kExternalTypedDataFloat32x4ArrayCid:
return Symbols::Float32x4List().raw();
case kTypedDataFloat64x2ArrayCid:
case kExternalTypedDataFloat64x2ArrayCid:
return Symbols::Float64x2List().raw();
case kTypedDataFloat32ArrayCid:
case kExternalTypedDataFloat32ArrayCid:
return Symbols::Float32List().raw();
case kTypedDataFloat64ArrayCid:
case kExternalTypedDataFloat64ArrayCid:
return Symbols::Float64List().raw();
#if !defined(PRODUCT)
case kNullCid:
return Symbols::Null().raw();
case kDynamicCid:
return Symbols::Dynamic().raw();
case kVoidCid:
return Symbols::Void().raw();
case kClassCid:
return Symbols::Class().raw();
case kUnresolvedClassCid:
return Symbols::UnresolvedClass().raw();
case kTypeArgumentsCid:
return Symbols::TypeArguments().raw();
case kPatchClassCid:
return Symbols::PatchClass().raw();
case kFunctionCid:
return Symbols::Function().raw();
case kClosureDataCid:
return Symbols::ClosureData().raw();
case kSignatureDataCid:
return Symbols::SignatureData().raw();
case kRedirectionDataCid:
return Symbols::RedirectionData().raw();
case kFieldCid:
return Symbols::Field().raw();
case kScriptCid:
return Symbols::Script().raw();
case kLibraryCid:
return Symbols::Library().raw();
case kLibraryPrefixCid:
return Symbols::LibraryPrefix().raw();
case kNamespaceCid:
return Symbols::Namespace().raw();
case kKernelProgramInfoCid:
return Symbols::KernelProgramInfo().raw();
case kCodeCid:
return Symbols::Code().raw();
case kInstructionsCid:
return Symbols::Instructions().raw();
case kObjectPoolCid:
return Symbols::ObjectPool().raw();
case kCodeSourceMapCid:
return Symbols::CodeSourceMap().raw();
case kPcDescriptorsCid:
return Symbols::PcDescriptors().raw();
case kStackMapCid:
return Symbols::StackMap().raw();
case kLocalVarDescriptorsCid:
return Symbols::LocalVarDescriptors().raw();
case kExceptionHandlersCid:
return Symbols::ExceptionHandlers().raw();
case kContextCid:
return Symbols::Context().raw();
case kContextScopeCid:
return Symbols::ContextScope().raw();
case kSingleTargetCacheCid:
return Symbols::SingleTargetCache().raw();
case kICDataCid:
return Symbols::ICData().raw();
case kMegamorphicCacheCid:
return Symbols::MegamorphicCache().raw();
case kSubtypeTestCacheCid:
return Symbols::SubtypeTestCache().raw();
case kApiErrorCid:
return Symbols::ApiError().raw();
case kLanguageErrorCid:
return Symbols::LanguageError().raw();
case kUnhandledExceptionCid:
return Symbols::UnhandledException().raw();
case kUnwindErrorCid:
return Symbols::UnwindError().raw();
case kIntegerCid:
case kSmiCid:
case kMintCid:
return Symbols::Int().raw();
case kDoubleCid:
return Symbols::Double().raw();
case kOneByteStringCid:
case kTwoByteStringCid:
case kExternalOneByteStringCid:
case kExternalTwoByteStringCid:
return Symbols::_String().raw();
case kArrayCid:
case kImmutableArrayCid:
case kGrowableObjectArrayCid:
return Symbols::List().raw();
#endif // !defined(PRODUCT)
}
String& name = String::Handle(Name());
name = String::ScrubName(name);
if (name.raw() == Symbols::FutureImpl().raw() &&
library() == Library::AsyncLibrary()) {
return Symbols::Future().raw();
}
return name.raw();
}
void Class::set_script(const Script& value) const {
StorePointer(&raw_ptr()->script_, value.raw());
}
void Class::set_token_pos(TokenPosition token_pos) const {
ASSERT(!token_pos.IsClassifying());
StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
}
TokenPosition Class::ComputeEndTokenPos() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return TokenPosition::kNoSource;
#else
// Return the begin token for synthetic classes.
if (is_synthesized_class() || IsMixinApplication() || IsTopLevel()) {
return token_pos();
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Script& scr = Script::Handle(zone, script());
ASSERT(!scr.IsNull());
if (scr.kind() == RawScript::kKernelTag) {
ASSERT(kernel_offset() > 0);
const Library& lib = Library::Handle(zone, library());
const ExternalTypedData& kernel_data =
ExternalTypedData::Handle(zone, lib.kernel_data());
ASSERT(!kernel_data.IsNull());
const intptr_t library_kernel_offset = lib.kernel_offset();
ASSERT(library_kernel_offset > 0);
const intptr_t class_offset = kernel_offset();
kernel::TranslationHelper translation_helper(thread);
translation_helper.InitFromScript(scr);
kernel::KernelReaderHelper kernel_reader_helper(zone, &translation_helper,
scr, kernel_data, 0);
kernel_reader_helper.SetOffset(class_offset);
kernel::ClassHelper class_helper(&kernel_reader_helper);
class_helper.ReadUntilIncluding(kernel::ClassHelper::kEndPosition);
if (class_helper.end_position_.IsReal()) return class_helper.end_position_;
TokenPosition largest_seen = token_pos();
// Walk through all functions and get their end_tokens to find the classes
// "end token".
// TODO(jensj): Should probably walk though all fields as well.
Function& function = Function::Handle(zone);
const Array& arr = Array::Handle(functions());
for (int i = 0; i < arr.Length(); i++) {
function ^= arr.At(i);
if (function.script() == script()) {
if (largest_seen < function.end_token_pos()) {
largest_seen = function.end_token_pos();
}
}
}
return TokenPosition(largest_seen);
}
UNREACHABLE();
#endif
}
int32_t Class::SourceFingerprint() const {
#if !defined(DART_PRECOMPILED_RUNTIME)
return kernel::KernelSourceFingerprintHelper::CalculateClassFingerprint(
*this);
#else
return 0;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
void Class::set_is_implemented() const {
set_state_bits(ImplementedBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_abstract() const {
set_state_bits(AbstractBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_type_finalized() const {
set_state_bits(TypeFinalizedBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_patch() const {
set_state_bits(PatchBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_synthesized_class() const {
set_state_bits(SynthesizedClassBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_enum_class() const {
set_state_bits(EnumBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_const() const {
set_state_bits(ConstBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_mixin_app_alias() const {
set_state_bits(MixinAppAliasBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_mixin_type_applied() const {
set_state_bits(MixinTypeAppliedBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_transformed_mixin_application() const {
set_state_bits(
TransformedMixinApplicationBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_fields_marked_nullable() const {
set_state_bits(FieldsMarkedNullableBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_cycle_free() const {
ASSERT(!is_cycle_free());
set_state_bits(CycleFreeBit::update(true, raw_ptr()->state_bits_));
}
void Class::set_is_allocated(bool value) const {
set_state_bits(IsAllocatedBit::update(value, raw_ptr()->state_bits_));
}
void Class::set_is_finalized() const {
ASSERT(!is_finalized());
set_state_bits(
ClassFinalizedBits::update(RawClass::kFinalized, raw_ptr()->state_bits_));
}
void Class::SetRefinalizeAfterPatch() const {
ASSERT(!IsTopLevel());
set_state_bits(ClassFinalizedBits::update(RawClass::kRefinalizeAfterPatch,
raw_ptr()->state_bits_));
set_state_bits(TypeFinalizedBit::update(false, raw_ptr()->state_bits_));
}
void Class::ResetFinalization() const {
ASSERT(IsTopLevel() || IsClosureClass());
set_state_bits(
ClassFinalizedBits::update(RawClass::kAllocated, raw_ptr()->state_bits_));
set_state_bits(TypeFinalizedBit::update(false, raw_ptr()->state_bits_));
}
void Class::set_is_prefinalized() const {
ASSERT(!is_finalized());
set_state_bits(ClassFinalizedBits::update(RawClass::kPreFinalized,
raw_ptr()->state_bits_));
}
void Class::set_is_marked_for_parsing() const {
set_state_bits(MarkedForParsingBit::update(true, raw_ptr()->state_bits_));
}
void Class::reset_is_marked_for_parsing() const {
set_state_bits(MarkedForParsingBit::update(false, raw_ptr()->state_bits_));
}
void Class::set_interfaces(const Array& value) const {
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->interfaces_, value.raw());
}
void Class::set_mixin(const Type& value) const {
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->mixin_, value.raw());
}
bool Class::IsMixinApplication() const {
return mixin() != Type::null();
}
RawClass* Class::GetPatchClass() const {
const Library& lib = Library::Handle(library());
return lib.GetPatchClass(String::Handle(Name()));
}
void Class::AddDirectImplementor(const Class& implementor) const {
ASSERT(is_implemented());
ASSERT(!implementor.IsNull());
GrowableObjectArray& direct_implementors =
GrowableObjectArray::Handle(raw_ptr()->direct_implementors_);
if (direct_implementors.IsNull()) {
direct_implementors = GrowableObjectArray::New(4, Heap::kOld);
StorePointer(&raw_ptr()->direct_implementors_, direct_implementors.raw());
}
#if defined(DEBUG)
// Verify that the same class is not added twice.
for (intptr_t i = 0; i < direct_implementors.Length(); i++) {
ASSERT(direct_implementors.At(i) != implementor.raw());
}
#endif
direct_implementors.Add(implementor, Heap::kOld);
}
void Class::ClearDirectImplementors() const {
StorePointer(&raw_ptr()->direct_implementors_, GrowableObjectArray::null());
}
void Class::AddDirectSubclass(const Class& subclass) const {
ASSERT(!subclass.IsNull());
ASSERT(subclass.SuperClass() == raw());
// Do not keep track of the direct subclasses of class Object.
ASSERT(!IsObjectClass());
GrowableObjectArray& direct_subclasses =
GrowableObjectArray::Handle(raw_ptr()->direct_subclasses_);
if (direct_subclasses.IsNull()) {
direct_subclasses = GrowableObjectArray::New(4, Heap::kOld);
StorePointer(&raw_ptr()->direct_subclasses_, direct_subclasses.raw());
}
#if defined(DEBUG)
// Verify that the same class is not added twice.
for (intptr_t i = 0; i < direct_subclasses.Length(); i++) {
ASSERT(direct_subclasses.At(i) != subclass.raw());
}
#endif
direct_subclasses.Add(subclass, Heap::kOld);
}
void Class::ClearDirectSubclasses() const {
StorePointer(&raw_ptr()->direct_subclasses_, GrowableObjectArray::null());
}
RawArray* Class::constants() const {
return raw_ptr()->constants_;
}
void Class::set_constants(const Array& value) const {
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->constants_, value.raw());
}
RawType* Class::canonical_type() const {
return raw_ptr()->canonical_type_;
}
void Class::set_canonical_type(const Type& value) const {
ASSERT(!value.IsNull() && value.IsCanonical() && value.IsOld());
StorePointer(&raw_ptr()->canonical_type_, value.raw());
}
RawType* Class::CanonicalType() const {
return raw_ptr()->canonical_type_;
}
void Class::SetCanonicalType(const Type& type) const {
ASSERT((canonical_type() == Object::null()) ||
(canonical_type() == type.raw())); // Set during own finalization.
set_canonical_type(type);
}
void Class::set_allocation_stub(const Code& value) const {
// Never clear the stub as it may still be a target, but will be GC-d if
// not referenced.
ASSERT(!value.IsNull());
ASSERT(raw_ptr()->allocation_stub_ == Code::null());
StorePointer(&raw_ptr()->allocation_stub_, value.raw());
}
void Class::DisableAllocationStub() const {
const Code& existing_stub = Code::Handle(allocation_stub());
if (existing_stub.IsNull()) {
return;
}
ASSERT(!existing_stub.IsDisabled());
// Change the stub so that the next caller will regenerate the stub.
existing_stub.DisableStubCode();
// Disassociate the existing stub from class.
StorePointer(&raw_ptr()->allocation_stub_, Code::null());
}
bool Class::IsDartFunctionClass() const {
return raw() == Type::Handle(Type::DartFunctionType()).type_class();
}
bool Class::IsFutureClass() const {
// Looking up future_class in the object store would not work, because
// this function is called during class finalization, before the object store
// field would be initialized by InitKnownObjects().
return (Name() == Symbols::Future().raw()) &&
(library() == Library::AsyncLibrary());
}
bool Class::IsFutureOrClass() const {
// Looking up future_or_class in the object store would not work, because
// this function is called during class finalization, before the object store
// field would be initialized by InitKnownObjects().
return (Name() == Symbols::FutureOr().raw()) &&
(library() == Library::AsyncLibrary());
}
// If test_kind == kIsSubtypeOf, checks if type S is a subtype of type T.
// If test_kind == kIsMoreSpecificThan, checks if S is more specific than T.
// Type S is specified by this class parameterized with 'type_arguments', and
// type T by class 'other' parameterized with 'other_type_arguments'.
// This class and class 'other' do not need to be finalized, however, they must
// be resolved as well as their interfaces.
bool Class::TypeTestNonRecursive(const Class& cls,
Class::TypeTestKind test_kind,
const TypeArguments& type_arguments,
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) {
// Use the 'this_class' object as if it was the receiver of this method, but
// instead of recursing, reset it to the super class and loop.
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Class& this_class = Class::Handle(zone, cls.raw());
while (true) {
// Each occurrence of DynamicType in type T is interpreted as the dynamic
// type, a supertype of all types. So are Object and void types.
if (other.IsDynamicClass() || other.IsObjectClass() ||
other.IsVoidClass()) {
return true;
}
// Check for NullType, which, as of Dart 1.5, is a subtype of (and is more
// specific than) any type. Note that the null instance is not handled here.
if (this_class.IsNullClass()) {
return true;
}
// Class FutureOr is mapped to dynamic in non-strong mode.
// Detect snapshots compiled in strong mode and run in non-strong mode.
ASSERT(FLAG_strong || !other.IsFutureOrClass());
// In strong mode, check if 'other' is 'FutureOr'.
// If so, apply additional subtyping rules.
if (FLAG_strong && this_class.FutureOrTypeTest(
zone, type_arguments, other, other_type_arguments,
bound_error, bound_trail, space)) {
return true;
}
// In the case of a subtype test, each occurrence of DynamicType in type S
// is interpreted as the bottom type, a subtype of all types, but not in
// strong mode.
// However, DynamicType is not more specific than any type.
if (this_class.IsDynamicClass()) {
return !FLAG_strong && (test_kind == Class::kIsSubtypeOf);
}
// If other is neither Object, dynamic or void, then ObjectType/VoidType
// can't be a subtype of other.
if (this_class.IsObjectClass() || this_class.IsVoidClass()) {
return false;
}
// Check for reflexivity.
if (this_class.raw() == other.raw()) {
const intptr_t num_type_params = this_class.NumTypeParameters();
if (num_type_params == 0) {
return true;
}
const intptr_t num_type_args = this_class.NumTypeArguments();
const intptr_t from_index = num_type_args - num_type_params;
// Since we do not truncate the type argument vector of a subclass (see
// below), we only check a subvector of the proper length.
// Check for covariance.
if (other_type_arguments.IsNull() ||
other_type_arguments.IsTopTypes(from_index, num_type_params)) {
return true;
}
if (type_arguments.IsNull() ||
type_arguments.IsRaw(from_index, num_type_params)) {
// Other type can't be more specific than this one because for that
// it would have to have all dynamic type arguments which is checked
// above.
return !FLAG_strong && (test_kind == Class::kIsSubtypeOf);
}
return type_arguments.TypeTest(test_kind, other_type_arguments,
from_index, num_type_params, bound_error,
bound_trail, space);
}
// In strong mode, subtyping rules of callable instances are restricted.
if (!FLAG_strong && other.IsDartFunctionClass()) {
// Check if type S has a call() method.
const Function& call_function =
Function::Handle(zone, this_class.LookupCallFunctionForTypeTest());
if (!call_function.IsNull()) {
return true;
}
}
// Check for 'direct super type' specified in the implements clause
// and check for transitivity at the same time.
Array& interfaces = Array::Handle(zone, this_class.interfaces());
AbstractType& interface = AbstractType::Handle(zone);
Class& interface_class = Class::Handle(zone);
TypeArguments& interface_args = TypeArguments::Handle(zone);
Error& error = Error::Handle(zone);
for (intptr_t i = 0; i < interfaces.Length(); i++) {
interface ^= interfaces.At(i);
if (!interface.IsFinalized()) {
// We may be checking bounds at finalization time and can encounter
// a still unfinalized interface.
if (interface.IsBeingFinalized()) {
// Interface is part of a still unfinalized recursive type graph.
// Skip it. The caller will create a bounded type to be checked at
// runtime if this type test returns false at compile time.
continue;
}
ClassFinalizer::FinalizeType(this_class, interface);
interfaces.SetAt(i, interface);
}
if (interface.IsMalbounded()) {
// Return the first bound error to the caller if it requests it.
if ((bound_error != NULL) && bound_error->IsNull()) {
*bound_error = interface.error();
}
continue; // Another interface may work better.
}
interface_class = interface.type_class();
interface_args = interface.arguments();
if (!interface_args.IsNull() && !interface_args.IsInstantiated()) {
// This type class implements an interface that is parameterized with
// generic type(s), e.g. it implements List<T>.
// The uninstantiated type T must be instantiated using the type
// parameters of this type before performing the type test.
// The type arguments of this type that are referred to by the type
// parameters of the interface are at the end of the type vector,
// after the type arguments of the super type of this type.
// The index of the type parameters is adjusted upon finalization.
error = Error::null();
interface_args = interface_args.InstantiateFrom(
type_arguments, Object::null_type_arguments(), kNoneFree, &error,
NULL, bound_trail, space);
if (!error.IsNull()) {
// Return the first bound error to the caller if it requests it.
if ((bound_error != NULL) && bound_error->IsNull()) {
*bound_error = error.raw();
}
continue; // Another interface may work better.
}
}
// In Dart 2, implementing Function has no meaning.
if (FLAG_strong && interface_class.IsDartFunctionClass()) {
continue;
}
if (interface_class.TypeTest(test_kind, interface_args, other,
other_type_arguments, bound_error,
bound_trail, space)) {
return true;
}
}
// "Recurse" up the class hierarchy until we have reached the top.
this_class = this_class.SuperClass();
if (this_class.IsNull()) {
return false;
}
}
UNREACHABLE();
return false;
}
// If test_kind == kIsSubtypeOf, checks if type S is a subtype of type T.
// If test_kind == kIsMoreSpecificThan, checks if S is more specific than T.
// Type S is specified by this class parameterized with 'type_arguments', and
// type T by class 'other' parameterized with 'other_type_arguments'.
// This class and class 'other' do not need to be finalized, however, they must
// be resolved as well as their interfaces.
bool Class::TypeTest(TypeTestKind test_kind,
const TypeArguments& type_arguments,
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
return TypeTestNonRecursive(*this, test_kind, type_arguments, other,
other_type_arguments, bound_error, bound_trail,
space);
}
bool Class::FutureOrTypeTest(Zone* zone,
const TypeArguments& type_arguments,
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
// In strong mode, there is no difference between 'is subtype of' and
// 'is more specific than'.
ASSERT(FLAG_strong);
if (other.IsFutureOrClass()) {
if (other_type_arguments.IsNull()) {
return true;
}
const AbstractType& other_type_arg =
AbstractType::Handle(zone, other_type_arguments.TypeAt(0));
if (other_type_arg.IsTopType()) {
return true;
}
if (!type_arguments.IsNull() && IsFutureClass()) {
const AbstractType& type_arg =
AbstractType::Handle(zone, type_arguments.TypeAt(0));
if (type_arg.TypeTest(Class::kIsSubtypeOf, other_type_arg, bound_error,
bound_trail, space)) {
return true;
}
}
if (other_type_arg.HasResolvedTypeClass() &&
TypeTest(Class::kIsSubtypeOf, type_arguments,
Class::Handle(zone, other_type_arg.type_class()),
TypeArguments::Handle(other_type_arg.arguments()), bound_error,
bound_trail, space)) {
return true;
}
}
return false;
}
bool Class::IsTopLevel() const {
return Name() == Symbols::TopLevel().raw();
}
bool Class::IsPrivate() const {
return Library::IsPrivate(String::Handle(Name()));
}
RawFunction* Class::LookupDynamicFunction(const String& name) const {
return LookupFunction(name, kInstance);
}
RawFunction* Class::LookupDynamicFunctionAllowAbstract(
const String& name) const {
return LookupFunction(name, kInstanceAllowAbstract);
}
RawFunction* Class::LookupDynamicFunctionAllowPrivate(
const String& name) const {
return LookupFunctionAllowPrivate(name, kInstance);
}
RawFunction* Class::LookupStaticFunction(const String& name) const {
return LookupFunction(name, kStatic);
}
RawFunction* Class::LookupStaticFunctionAllowPrivate(const String& name) const {
return LookupFunctionAllowPrivate(name, kStatic);
}
RawFunction* Class::LookupConstructor(const String& name) const {
return LookupFunction(name, kConstructor);
}
RawFunction* Class::LookupConstructorAllowPrivate(const String& name) const {
return LookupFunctionAllowPrivate(name, kConstructor);
}
RawFunction* Class::LookupFactory(const String& name) const {
return LookupFunction(name, kFactory);
}
RawFunction* Class::LookupFactoryAllowPrivate(const String& name) const {
return LookupFunctionAllowPrivate(name, kFactory);
}
RawFunction* Class::LookupFunction(const String& name) const {
return LookupFunction(name, kAny);
}
RawFunction* Class::LookupFunctionAllowPrivate(const String& name) const {
return LookupFunctionAllowPrivate(name, kAny);
}
RawFunction* Class::LookupCallFunctionForTypeTest() const {
// If this class is not compiled yet, it is too early to lookup a call
// function. This case should only occur during bounds checking at compile
// time. Return null as if the call method did not exist, so the type test
// may return false, but without a bound error, and the bound check will get
// postponed to runtime.
if (!is_finalized()) {
return Function::null();
}
Zone* zone = Thread::Current()->zone();
Class& cls = Class::Handle(zone, raw());
Function& call_function = Function::Handle(zone);
do {
ASSERT(cls.is_finalized());
call_function = cls.LookupDynamicFunctionAllowAbstract(Symbols::Call());
cls = cls.SuperClass();
} while (call_function.IsNull() && !cls.IsNull());
if (!call_function.IsNull()) {
// Make sure the signature is finalized before using it in a type test.
ClassFinalizer::FinalizeSignature(
cls, call_function, ClassFinalizer::kFinalize); // No bounds checking.
}
return call_function.raw();
}
// Returns true if 'prefix' and 'accessor_name' match 'name'.
static bool MatchesAccessorName(const String& name,
const char* prefix,
intptr_t prefix_length,
const String& accessor_name) {
intptr_t name_len = name.Length();
intptr_t accessor_name_len = accessor_name.Length();
if (name_len != (accessor_name_len + prefix_length)) {
return false;
}
for (intptr_t i = 0; i < prefix_length; i++) {
if (name.CharAt(i) != prefix[i]) {
return false;
}
}
for (intptr_t i = 0, j = prefix_length; i < accessor_name_len; i++, j++) {
if (name.CharAt(j) != accessor_name.CharAt(i)) {
return false;
}
}
return true;
}
RawFunction* Class::CheckFunctionType(const Function& func, MemberKind kind) {
if ((kind == kInstance) || (kind == kInstanceAllowAbstract)) {
if (func.IsDynamicFunction(kind == kInstanceAllowAbstract)) {
return func.raw();
}
} else if (kind == kStatic) {
if (func.IsStaticFunction()) {
return func.raw();
}
} else if (kind == kConstructor) {
if (func.IsGenerativeConstructor()) {
ASSERT(!func.is_static());
return func.raw();
}
} else if (kind == kFactory) {
if (func.IsFactory()) {
ASSERT(func.is_static());
return func.raw();
}
} else if (kind == kAny) {
return func.raw();
}
return Function::null();
}
RawFunction* Class::LookupFunction(const String& name, MemberKind kind) const {
Thread* thread = Thread::Current();
if (EnsureIsFinalized(thread) != Error::null()) {
return Function::null();
}
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_FUNCTION_HANDLESCOPE(thread);
Array& funcs = thread->ArrayHandle();
funcs ^= functions();
ASSERT(!funcs.IsNull());
const intptr_t len = funcs.Length();
Function& function = thread->FunctionHandle();
if (len >= kFunctionLookupHashTreshold) {
// Cache functions hash table to allow multi threaded access.
const Array& hash_table =
Array::Handle(thread->zone(), raw_ptr()->functions_hash_table_);
if (!hash_table.IsNull()) {
ClassFunctionsSet set(hash_table.raw());
REUSABLE_STRING_HANDLESCOPE(thread);
function ^= set.GetOrNull(FunctionName(name, &(thread->StringHandle())));
// No mutations.
ASSERT(set.Release().raw() == hash_table.raw());
return function.IsNull() ? Function::null()
: CheckFunctionType(function, kind);
}
}
if (name.IsSymbol()) {
// Quick Symbol compare.
NoSafepointScope no_safepoint;
for (intptr_t i = 0; i < len; i++) {
function ^= funcs.At(i);
if (function.name() == name.raw()) {
return CheckFunctionType(function, kind);
}
}
} else {
REUSABLE_STRING_HANDLESCOPE(thread);
String& function_name = thread->StringHandle();
for (intptr_t i = 0; i < len; i++) {
function ^= funcs.At(i);
function_name ^= function.name();
if (function_name.Equals(name)) {
return CheckFunctionType(function, kind);
}
}
}
// No function found.
return Function::null();
}
RawFunction* Class::LookupFunctionAllowPrivate(const String& name,
MemberKind kind) const {
Thread* thread = Thread::Current();
if (EnsureIsFinalized(thread) != Error::null()) {
return Function::null();
}
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_FUNCTION_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
Array& funcs = thread->ArrayHandle();
funcs ^= functions();
ASSERT(!funcs.IsNull());
const intptr_t len = funcs.Length();
Function& function = thread->FunctionHandle();
String& function_name = thread->StringHandle();
for (intptr_t i = 0; i < len; i++) {
function ^= funcs.At(i);
function_name ^= function.name();
if (String::EqualsIgnoringPrivateKey(function_name, name)) {
return CheckFunctionType(function, kind);
}
}
// No function found.
return Function::null();
}
RawFunction* Class::LookupGetterFunction(const String& name) const {
return LookupAccessorFunction(kGetterPrefix, kGetterPrefixLength, name);
}
RawFunction* Class::LookupSetterFunction(const String& name) const {
return LookupAccessorFunction(kSetterPrefix, kSetterPrefixLength, name);
}
RawFunction* Class::LookupAccessorFunction(const char* prefix,
intptr_t prefix_length,
const String& name) const {
Thread* thread = Thread::Current();
if (EnsureIsFinalized(thread) != Error::null()) {
return Function::null();
}
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_FUNCTION_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
Array& funcs = thread->ArrayHandle();
funcs ^= functions();
intptr_t len = funcs.Length();
Function& function = thread->FunctionHandle();
String& function_name = thread->StringHandle();
for (intptr_t i = 0; i < len; i++) {
function ^= funcs.At(i);
function_name ^= function.name();
if (MatchesAccessorName(function_name, prefix, prefix_length, name)) {
return function.raw();
}
}
// No function found.
return Function::null();
}
RawField* Class::LookupInstanceField(const String& name) const {
return LookupField(name, kInstance);
}
RawField* Class::LookupStaticField(const String& name) const {
return LookupField(name, kStatic);
}
RawField* Class::LookupField(const String& name) const {
return LookupField(name, kAny);
}
RawField* Class::LookupField(const String& name, MemberKind kind) const {
Thread* thread = Thread::Current();
if (EnsureIsFinalized(thread) != Error::null()) {
return Field::null();
}
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_FIELD_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
Array& flds = thread->ArrayHandle();
flds ^= fields();
ASSERT(!flds.IsNull());
intptr_t len = flds.Length();
Field& field = thread->FieldHandle();
if (name.IsSymbol()) {
// Use fast raw pointer string compare for symbols.
for (intptr_t i = 0; i < len; i++) {
field ^= flds.At(i);
if (name.raw() == field.name()) {
if (kind == kInstance) {
return field.is_static() ? Field::null() : field.raw();
} else if (kind == kStatic) {
return field.is_static() ? field.raw() : Field::null();
}
ASSERT(kind == kAny);
return field.raw();
}
}
} else {
String& field_name = thread->StringHandle();
for (intptr_t i = 0; i < len; i++) {
field ^= flds.At(i);
field_name ^= field.name();
if (name.Equals(field_name)) {
if (kind == kInstance) {
return field.is_static() ? Field::null() : field.raw();
} else if (kind == kStatic) {
return field.is_static() ? field.raw() : Field::null();
}
ASSERT(kind == kAny);
return field.raw();
}
}
}
return Field::null();
}
RawField* Class::LookupFieldAllowPrivate(const String& name,
bool instance_only) const {
// Use slow string compare, ignoring privacy name mangling.
Thread* thread = Thread::Current();
if (EnsureIsFinalized(thread) != Error::null()) {
return Field::null();
}
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_FIELD_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
Array& flds = thread->ArrayHandle();
flds ^= fields();
ASSERT(!flds.IsNull());
intptr_t len = flds.Length();
Field& field = thread->FieldHandle();
String& field_name = thread->StringHandle();
for (intptr_t i = 0; i < len; i++) {
field ^= flds.At(i);
field_name ^= field.name();
if (field.is_static() && instance_only) {
// If we only care about instance fields, skip statics.
continue;
}
if (String::EqualsIgnoringPrivateKey(field_name, name)) {
return field.raw();
}
}
return Field::null();
}
RawField* Class::LookupInstanceFieldAllowPrivate(const String& name) const {
Field& field = Field::Handle(LookupFieldAllowPrivate(name, true));
if (!field.IsNull() && !field.is_static()) {
return field.raw();
}
return Field::null();
}
RawField* Class::LookupStaticFieldAllowPrivate(const String& name) const {
Field& field = Field::Handle(LookupFieldAllowPrivate(name));
if (!field.IsNull() && field.is_static()) {
return field.raw();
}
return Field::null();
}
RawLibraryPrefix* Class::LookupLibraryPrefix(const String& name) const {
Zone* zone = Thread::Current()->zone();
const Library& lib = Library::Handle(zone, library());
const Object& obj = Object::Handle(zone, lib.LookupLocalObject(name));
if (!obj.IsNull() && obj.IsLibraryPrefix()) {
return LibraryPrefix::Cast(obj).raw();
}
return LibraryPrefix::null();
}
const char* Class::ToCString() const {
const Library& lib = Library::Handle(library());
const char* library_name = lib.IsNull() ? "" : lib.ToCString();
const char* patch_prefix = is_patch() ? "Patch " : "";
const char* class_name = String::Handle(Name()).ToCString();
return OS::SCreate(Thread::Current()->zone(), "%s %sClass: %s", library_name,
patch_prefix, class_name);
}
// Thomas Wang, Integer Hash Functions.
// https://gist.github.com/badboy/6267743
// "64 bit to 32 bit Hash Functions"
static uword Hash64To32(uint64_t v) {
v = ~v + (v << 18);
v = v ^ (v >> 31);
v = v * 21;
v = v ^ (v >> 11);
v = v + (v << 6);
v = v ^ (v >> 22);
return static_cast<uint32_t>(v);
}
class CanonicalDoubleKey {
public:
explicit CanonicalDoubleKey(const Double& key)
: key_(&key), value_(key.value()) {}
explicit CanonicalDoubleKey(const double value) : key_(NULL), value_(value) {}
bool Matches(const Double& obj) const {
return obj.BitwiseEqualsToDouble(value_);
}
uword Hash() const { return Hash(value_); }
static uword Hash(double value) {
return Hash64To32(bit_cast<uint64_t>(value));
}
const Double* key_;
const double value_;
private:
DISALLOW_ALLOCATION();
};
class CanonicalMintKey {
public:
explicit CanonicalMintKey(const Mint& key)
: key_(&key), value_(key.value()) {}
explicit CanonicalMintKey(const int64_t value) : key_(NULL), value_(value) {}
bool Matches(const Mint& obj) const { return obj.value() == value_; }
uword Hash() const { return Hash(value_); }
static uword Hash(int64_t value) {
return Hash64To32(bit_cast<uint64_t>(value));
}
const Mint* key_;
const int64_t value_;
private:
DISALLOW_ALLOCATION();
};
// Traits for looking up Canonical numbers based on a hash of the value.
template <typename ObjectType, typename KeyType>
class CanonicalNumberTraits {
public:
static const char* Name() { return "CanonicalNumberTraits"; }
static bool ReportStats() { return false; }
// Called when growing the table.
static bool IsMatch(const Object& a, const Object& b) {
return a.raw() == b.raw();
}
static bool IsMatch(const KeyType& a, const Object& b) {
return a.Matches(ObjectType::Cast(b));
}
static uword Hash(const Object& key) {
return KeyType::Hash(ObjectType::Cast(key).value());
}
static uword Hash(const KeyType& key) { return key.Hash(); }
static RawObject* NewKey(const KeyType& obj) {
if (obj.key_ != NULL) {
return obj.key_->raw();
} else {
UNIMPLEMENTED();
return NULL;
}
}
};
typedef UnorderedHashSet<CanonicalNumberTraits<Double, CanonicalDoubleKey> >
CanonicalDoubleSet;
typedef UnorderedHashSet<CanonicalNumberTraits<Mint, CanonicalMintKey> >
CanonicalMintSet;
// Returns an instance of Double or Double::null().
RawDouble* Class::LookupCanonicalDouble(Zone* zone, double value) const {
ASSERT(this->raw() == Isolate::Current()->object_store()->double_class());
if (this->constants() == Object::empty_array().raw()) return Double::null();
Double& canonical_value = Double::Handle(zone);
CanonicalDoubleSet constants(zone, this->constants());
canonical_value ^= constants.GetOrNull(CanonicalDoubleKey(value));
this->set_constants(constants.Release());
return canonical_value.raw();
}
// Returns an instance of Mint or Mint::null().
RawMint* Class::LookupCanonicalMint(Zone* zone, int64_t value) const {
ASSERT(this->raw() == Isolate::Current()->object_store()->mint_class());
if (this->constants() == Object::empty_array().raw()) return Mint::null();
Mint& canonical_value = Mint::Handle(zone);
CanonicalMintSet constants(zone, this->constants());
canonical_value ^= constants.GetOrNull(CanonicalMintKey(value));
this->set_constants(constants.Release());
return canonical_value.raw();
}
class CanonicalInstanceKey {
public:
explicit CanonicalInstanceKey(const Instance& key) : key_(key) {
ASSERT(!(key.IsString() || key.IsInteger() || key.IsAbstractType()));
}
bool Matches(const Instance& obj) const {
ASSERT(!(obj.IsString() || obj.IsInteger() || obj.IsAbstractType()));
if (key_.CanonicalizeEquals(obj)) {
ASSERT(obj.IsCanonical());
return true;
}
return false;
}
uword Hash() const { return key_.CanonicalizeHash(); }
const Instance& key_;
private:
DISALLOW_ALLOCATION();
};
// Traits for looking up Canonical Instances based on a hash of the fields.
class CanonicalInstanceTraits {
public:
static const char* Name() { return "CanonicalInstanceTraits"; }
static bool ReportStats() { return false; }
// Called when growing the table.
static bool IsMatch(const Object& a, const Object& b) {
ASSERT(!(a.IsString() || a.IsInteger() || a.IsAbstractType()));
ASSERT(!(b.IsString() || b.IsInteger() || b.IsAbstractType()));
return a.raw() == b.raw();
}
static bool IsMatch(const CanonicalInstanceKey& a, const Object& b) {
return a.Matches(Instance::Cast(b));
}
static uword Hash(const Object& key) {
ASSERT(!(key.IsString() || key.IsNumber() || key.IsAbstractType()));
ASSERT(key.IsInstance());
return Instance::Cast(key).CanonicalizeHash();
}
static uword Hash(const CanonicalInstanceKey& key) { return key.Hash(); }
static RawObject* NewKey(const CanonicalInstanceKey& obj) {
return obj.key_.raw();
}
};
typedef UnorderedHashSet<CanonicalInstanceTraits> CanonicalInstancesSet;
RawInstance* Class::LookupCanonicalInstance(Zone* zone,
const Instance& value) const {
ASSERT(this->raw() == value.clazz());
ASSERT(is_finalized());
Instance& canonical_value = Instance::Handle(zone);
if (this->constants() != Object::empty_array().raw()) {
CanonicalInstancesSet constants(zone, this->constants());
canonical_value ^= constants.GetOrNull(CanonicalInstanceKey(value));
this->set_constants(constants.Release());
}
return canonical_value.raw();
}
RawInstance* Class::InsertCanonicalConstant(Zone* zone,
const Instance& constant) const {
ASSERT(this->raw() == constant.clazz());
Instance& canonical_value = Instance::Handle(zone);
if (this->constants() == Object::empty_array().raw()) {
CanonicalInstancesSet constants(
HashTables::New<CanonicalInstancesSet>(128, Heap::kOld));
canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
this->set_constants(constants.Release());
} else {
CanonicalInstancesSet constants(Thread::Current()->zone(),
this->constants());
canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
this->set_constants(constants.Release());
}
return canonical_value.raw();
}
void Class::InsertCanonicalDouble(Zone* zone, const Double& constant) const {
if (this->constants() == Object::empty_array().raw()) {
this->set_constants(Array::Handle(
zone, HashTables::New<CanonicalDoubleSet>(128, Heap::kOld)));
}
CanonicalDoubleSet constants(zone, this->constants());
constants.InsertNewOrGet(CanonicalDoubleKey(constant));
this->set_constants(constants.Release());
}
void Class::InsertCanonicalMint(Zone* zone, const Mint& constant) const {
if (this->constants() == Object::empty_array().raw()) {
this->set_constants(Array::Handle(
zone, HashTables::New<CanonicalMintSet>(128, Heap::kOld)));
}
CanonicalMintSet constants(zone, this->constants());
constants.InsertNewOrGet(CanonicalMintKey(constant));
this->set_constants(constants.Release());
}
void Class::RehashConstants(Zone* zone) const {
intptr_t cid = id();
if ((cid == kMintCid) || (cid == kDoubleCid)) {
// Constants stored as a plain list or in a hashset with a stable hashcode,
// which only depends on the actual value of the constant.
return;
}
const Array& old_constants = Array::Handle(zone, constants());
if (old_constants.Length() == 0) return;
set_constants(Object::empty_array());
CanonicalInstancesSet set(zone, old_constants.raw());
Instance& constant = Instance::Handle(zone);
CanonicalInstancesSet::Iterator it(&set);
while (it.MoveNext()) {
constant ^= set.GetKey(it.Current());
ASSERT(!constant.IsNull());
ASSERT(constant.IsCanonical());
InsertCanonicalConstant(zone, constant);
}
set.Release();
}
RawUnresolvedClass* UnresolvedClass::New(const Object& library_prefix,
const String& ident,
TokenPosition token_pos) {
const UnresolvedClass& type = UnresolvedClass::Handle(UnresolvedClass::New());
type.set_library_or_library_prefix(library_prefix);
type.set_ident(ident);
type.set_token_pos(token_pos);
return type.raw();
}
RawUnresolvedClass* UnresolvedClass::New() {
ASSERT(Object::unresolved_class_class() != Class::null());
RawObject* raw = Object::Allocate(
UnresolvedClass::kClassId, UnresolvedClass::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawUnresolvedClass*>(raw);
}
void UnresolvedClass::set_token_pos(TokenPosition token_pos) const {
ASSERT(!token_pos.IsClassifying());
StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
}
void UnresolvedClass::set_ident(const String& ident) const {
StorePointer(&raw_ptr()->ident_, ident.raw());
}
void UnresolvedClass::set_library_or_library_prefix(
const Object& library_prefix) const {
StorePointer(&raw_ptr()->library_or_library_prefix_, library_prefix.raw());
}
RawString* UnresolvedClass::Name() const {
if (library_or_library_prefix() != Object::null()) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Object& lib_prefix =
Object::Handle(zone, library_or_library_prefix());
String& name = String::Handle(zone); // Qualifier.
if (lib_prefix.IsLibraryPrefix()) {
name = LibraryPrefix::Cast(lib_prefix).name();
} else {
name = Library::Cast(lib_prefix).name();
}
GrowableHandlePtrArray<const String> strs(zone, 3);
strs.Add(name);
strs.Add(Symbols::Dot());
strs.Add(String::Handle(zone, ident()));
return Symbols::FromConcatAll(thread, strs);
} else {
return ident();
}
}
const char* UnresolvedClass::ToCString() const {
const char* cname = String::Handle(Name()).ToCString();
return OS::SCreate(Thread::Current()->zone(), "unresolved class '%s'", cname);
}
intptr_t TypeArguments::ComputeHash() const {
if (IsNull()) return 0;
const intptr_t num_types = Length();
if (IsRaw(0, num_types)) return 0;
uint32_t result = 0;
AbstractType& type = AbstractType::Handle();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
// The hash may be calculated during type finalization (for debugging
// purposes only) while a type argument is still temporarily null.
if (type.IsNull() || type.IsNullTypeRef()) {
return 0; // Do not cache hash, since it will still change.
}
result = CombineHashes(result, type.Hash());
}
result = FinalizeHash(result, kHashBits);
SetHash(result);
return result;
}
RawTypeArguments* TypeArguments::Prepend(Zone* zone,
const TypeArguments& other,
intptr_t other_length,
intptr_t total_length) const {
if (IsNull() && other.IsNull()) {
return TypeArguments::null();
}
const TypeArguments& result =
TypeArguments::Handle(zone, TypeArguments::New(total_length, Heap::kNew));
AbstractType& type = AbstractType::Handle(zone);
for (intptr_t i = 0; i < other_length; i++) {
type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i);
result.SetTypeAt(i, type);
}
for (intptr_t i = other_length; i < total_length; i++) {
type = IsNull() ? Type::DynamicType() : TypeAt(i - other_length);
result.SetTypeAt(i, type);
}
return result.Canonicalize();
}
RawString* TypeArguments::SubvectorName(intptr_t from_index,
intptr_t len,
NameVisibility name_visibility) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
String& name = String::Handle(zone);
const intptr_t num_strings =
(len == 0) ? 2 : 2 * len + 1; // "<""T"", ""T"">".
GrowableHandlePtrArray<const String> pieces(zone, num_strings);
pieces.Add(Symbols::LAngleBracket());
AbstractType& type = AbstractType::Handle(zone);
for (intptr_t i = 0; i < len; i++) {
if (from_index + i < Length()) {
type = TypeAt(from_index + i);
name = type.BuildName(name_visibility);
} else {
// Show dynamic type argument in strong mode.
ASSERT(FLAG_strong);
name = Symbols::Dynamic().raw();
}
pieces.Add(name);
if (i < len - 1) {
pieces.Add(Symbols::CommaSpace());
}
}
pieces.Add(Symbols::RAngleBracket());
ASSERT(pieces.length() == num_strings);
return Symbols::FromConcatAll(thread, pieces);
}
bool TypeArguments::IsSubvectorEquivalent(const TypeArguments& other,
intptr_t from_index,
intptr_t len,
TrailPtr trail) const {
if (this->raw() == other.raw()) {
return true;
}
if (IsNull() || other.IsNull()) {
return false;
}
const intptr_t num_types = Length();
if (num_types != other.Length()) {
return false;
}
AbstractType& type = AbstractType::Handle();
AbstractType& other_type = AbstractType::Handle();
for (intptr_t i = from_index; i < from_index + len; i++) {
type = TypeAt(i);
other_type = other.TypeAt(i);
// Still unfinalized vectors should not be considered equivalent.
if (type.IsNull() || !type.IsEquivalent(other_type, trail)) {
return false;
}
}
return true;
}
bool TypeArguments::IsRecursive() const {
if (IsNull()) return false;
const intptr_t num_types = Length();
AbstractType& type = AbstractType::Handle();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
// If this type argument is null, the type parameterized with this type
// argument is still being finalized and is definitely recursive. The null
// type argument will be replaced by a non-null type before the type is
// marked as finalized.
if (type.IsNull() || type.IsRecursive()) {
return true;
}
}
return false;
}
void TypeArguments::SetScopeFunction(const Function& function) const {
if (IsNull()) return;
const intptr_t num_types = Length();
AbstractType& type = AbstractType::Handle();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
if (!type.IsNull()) {
type.SetScopeFunction(function);
}
}
}
bool TypeArguments::IsDynamicTypes(bool raw_instantiated,
intptr_t from_index,
intptr_t len) const {
ASSERT(Length() >= (from_index + len));
AbstractType& type = AbstractType::Handle();
Class& type_class = Class::Handle();
for (intptr_t i = 0; i < len; i++) {
type = TypeAt(from_index + i);
if (type.IsNull()) {
return false;
}
if (!type.HasResolvedTypeClass()) {
if (raw_instantiated && type.IsTypeParameter()) {
// An uninstantiated type parameter is equivalent to dynamic (even in
// the presence of a malformed bound in checked mode).
continue;
}
return false;
}
type_class = type.type_class();
if (!type_class.IsDynamicClass()) {
return false;
}
}
return true;
}
bool TypeArguments::IsTopTypes(intptr_t from_index, intptr_t len) const {
ASSERT(Length() >= (from_index + len));
AbstractType& type = AbstractType::Handle();
for (intptr_t i = 0; i < len; i++) {
type = TypeAt(from_index + i);
if (type.IsNull() || !type.IsTopType()) {
return false;
}
}
return true;
}
bool TypeArguments::TypeTest(TypeTestKind test_kind,
const TypeArguments& other,
intptr_t from_index,
intptr_t len,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
ASSERT(Length() >= (from_index + len));
ASSERT(!other.IsNull());
ASSERT(other.Length() >= (from_index + len));
AbstractType& type = AbstractType::Handle();
AbstractType& other_type = AbstractType::Handle();
for (intptr_t i = 0; i < len; i++) {
type = TypeAt(from_index + i);
other_type = other.TypeAt(from_index + i);
if (type.IsNull() || other_type.IsNull() ||
!type.TypeTest(test_kind, other_type, bound_error, bound_trail,
space)) {
return false;
}
}
return true;
}
bool TypeArguments::HasInstantiations() const {
const Array& prior_instantiations = Array::Handle(instantiations());
ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel.
return prior_instantiations.Length() > 1;
}
intptr_t TypeArguments::NumInstantiations() const {
const Array& prior_instantiations = Array::Handle(instantiations());
ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel.
intptr_t num = 0;
intptr_t i = 0;
while (prior_instantiations.At(i) != Smi::New(StubCode::kNoInstantiator)) {
i += StubCode::kInstantiationSizeInWords;
num++;
}
return num;
}
RawArray* TypeArguments::instantiations() const {
return raw_ptr()->instantiations_;
}
void TypeArguments::set_instantiations(const Array& value) const {
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->instantiations_, value.raw());
}
intptr_t TypeArguments::Length() const {
if (IsNull()) {
return 0;
}
return Smi::Value(raw_ptr()->length_);
}
RawAbstractType* TypeArguments::TypeAt(intptr_t index) const {
return *TypeAddr(index);
}
void TypeArguments::SetTypeAt(intptr_t index, const AbstractType& value) const {
ASSERT(!IsCanonical());
StorePointer(TypeAddr(index), value.raw());
}
bool TypeArguments::IsResolved() const {
if (IsCanonical()) {
return true;
}
AbstractType& type = AbstractType::Handle();
const intptr_t num_types = Length();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
if (!type.IsResolved()) {
return false;
}
}
return true;
}
bool TypeArguments::IsSubvectorInstantiated(intptr_t from_index,
intptr_t len,
Genericity genericity,
intptr_t num_free_fun_type_params,
TrailPtr trail) const {
ASSERT(!IsNull());
AbstractType& type = AbstractType::Handle();
for (intptr_t i = 0; i < len; i++) {
type = TypeAt(from_index + i);
// If this type argument T is null, the type A containing T in its flattened
// type argument vector V is recursive and is still being finalized.
// T is the type argument of a super type of A. T is being instantiated
// during finalization of V, which is also the instantiator. T depends
// solely on the type parameters of A and will be replaced by a non-null
// type before A is marked as finalized.
if (!type.IsNull() &&
!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
return false;
}
}
return true;
}
bool TypeArguments::IsUninstantiatedIdentity() const {
AbstractType& type = AbstractType::Handle();
const intptr_t num_types = Length();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
if (type.IsNull()) {
return false; // Still unfinalized, too early to tell.
}
if (!type.IsTypeParameter()) {
return false;
}
const TypeParameter& type_param = TypeParameter::Cast(type);
ASSERT(type_param.IsFinalized());
if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
return false;
}
// TODO(regis): Do the bounds really matter, since they are checked at
// finalization time (creating BoundedTypes where required)? Understand
// why ignoring bounds here causes failures.
// If this type parameter specifies an upper bound, then the type argument
// vector does not really represent the identity vector. It cannot be
// substituted by the instantiator's type argument vector without checking
// the upper bound.
const AbstractType& bound = AbstractType::Handle(type_param.bound());
ASSERT(bound.IsResolved());
if (!bound.IsObjectType() && !bound.IsDynamicType()) {
return false;
}
}
return true;
// Note that it is not necessary to verify at runtime that the instantiator
// type vector is long enough, since this uninstantiated vector contains as
// many different type parameters as it is long.
}
// Return true if this uninstantiated type argument vector, once instantiated
// at runtime, is a prefix of the type argument vector of its instantiator.
bool TypeArguments::CanShareInstantiatorTypeArguments(
const Class& instantiator_class) const {
ASSERT(!IsInstantiated());
const intptr_t num_type_args = Length();
const intptr_t num_instantiator_type_args =
instantiator_class.NumTypeArguments();
if (num_type_args > num_instantiator_type_args) {
// This vector cannot be a prefix of a shorter vector.
return false;
}
const intptr_t num_instantiator_type_params =
instantiator_class.NumTypeParameters();
const intptr_t first_type_param_offset =
num_instantiator_type_args - num_instantiator_type_params;
// At compile time, the type argument vector of the instantiator consists of
// the type argument vector of its super type, which may refer to the type
// parameters of the instantiator class, followed by (or overlapping partially
// or fully with) the type parameters of the instantiator class in declaration
// order.
// In other words, the only variables are the type parameters of the
// instantiator class.
// This uninstantiated type argument vector is also expressed in terms of the
// type parameters of the instantiator class. Therefore, in order to be a
// prefix once instantiated at runtime, every one of its type argument must be
// equal to the type argument of the instantiator vector at the same index.
// As a first requirement, the last num_instantiator_type_params type
// arguments of this type argument vector must refer to the corresponding type
// parameters of the instantiator class.
AbstractType& type_arg = AbstractType::Handle();
for (intptr_t i = first_type_param_offset; i < num_type_args; i++) {
type_arg = TypeAt(i);
if (!type_arg.IsTypeParameter()) {
return false;
}
const TypeParameter& type_param = TypeParameter::Cast(type_arg);
ASSERT(type_param.IsFinalized());
if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
return false;
}
}
// As a second requirement, the type arguments corresponding to the super type
// must be identical. Overlapping ones have already been checked starting at
// first_type_param_offset.
if (first_type_param_offset == 0) {
return true;
}
AbstractType& super_type =
AbstractType::Handle(instantiator_class.super_type());
const TypeArguments& super_type_args =
TypeArguments::Handle(super_type.arguments());
if (super_type_args.IsNull()) {
return false;
}
AbstractType& super_type_arg = AbstractType::Handle();
for (intptr_t i = 0; (i < first_type_param_offset) && (i < num_type_args);
i++) {
type_arg = TypeAt(i);
super_type_arg = super_type_args.TypeAt(i);
if (!type_arg.Equals(super_type_arg)) {
return false;
}
}
return true;
}
bool TypeArguments::IsFinalized() const {
ASSERT(!IsNull());
AbstractType& type = AbstractType::Handle();
const intptr_t num_types = Length();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
if (!type.IsFinalized()) {
return false;
}
}
return true;
}
bool TypeArguments::IsBounded() const {
AbstractType& type = AbstractType::Handle();
const intptr_t num_types = Length();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
if (type.IsBoundedType()) {
return true;
}
if (type.IsTypeParameter()) {
const AbstractType& bound =
AbstractType::Handle(TypeParameter::Cast(type).bound());
if (!bound.IsObjectType() && !bound.IsDynamicType()) {
return true;
}
continue;
}
const TypeArguments& type_args =
TypeArguments::Handle(Type::Cast(type).arguments());
if (!type_args.IsNull() && type_args.IsBounded()) {
return true;
}
}
return false;
}
RawTypeArguments* TypeArguments::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
const TypeArguments& function_type_arguments,
intptr_t num_free_fun_type_params,
Error* bound_error,
TrailPtr instantiation_trail,
TrailPtr bound_trail,
Heap::Space space) const {
ASSERT(!IsInstantiated(kAny, num_free_fun_type_params));
if (!instantiator_type_arguments.IsNull() && IsUninstantiatedIdentity() &&
(instantiator_type_arguments.Length() == Length())) {
return instantiator_type_arguments.raw();
}
const intptr_t num_types = Length();
TypeArguments& instantiated_array =
TypeArguments::Handle(TypeArguments::New(num_types, space));
AbstractType& type = AbstractType::Handle();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
// If this type argument T is null, the type A containing T in its flattened
// type argument vector V is recursive and is still being finalized.
// T is the type argument of a super type of A. T is being instantiated
// during finalization of V, which is also the instantiator. T depends
// solely on the type parameters of A and will be replaced by a non-null
// type before A is marked as finalized.
if (!type.IsNull() &&
!type.IsInstantiated(kAny, num_free_fun_type_params)) {
type = type.InstantiateFrom(instantiator_type_arguments,
function_type_arguments,
num_free_fun_type_params, bound_error,
instantiation_trail, bound_trail, space);
}
instantiated_array.SetTypeAt(i, type);
}
return instantiated_array.raw();
}
RawTypeArguments* TypeArguments::InstantiateAndCanonicalizeFrom(
const TypeArguments& instantiator_type_arguments,
const TypeArguments& function_type_arguments,
Error* bound_error) const {
ASSERT(!IsInstantiated());
ASSERT(instantiator_type_arguments.IsNull() ||
instantiator_type_arguments.IsCanonical());
ASSERT(function_type_arguments.IsNull() ||
function_type_arguments.IsCanonical());
// Lookup instantiator and, if found, return paired instantiated result.
Array& prior_instantiations = Array::Handle(instantiations());
ASSERT(!prior_instantiations.IsNull() && prior_instantiations.IsArray());
// The instantiations cache is initialized with Object::zero_array() and is
// therefore guaranteed to contain kNoInstantiator. No length check needed.
ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel.
intptr_t index = 0;
while (true) {
if ((prior_instantiations.At(index) == instantiator_type_arguments.raw()) &&
(prior_instantiations.At(index + 1) == function_type_arguments.raw())) {
return TypeArguments::RawCast(prior_instantiations.At(index + 2));
}
if (prior_instantiations.At(index) == Smi::New(StubCode::kNoInstantiator)) {
break;
}
index += StubCode::kInstantiationSizeInWords;
}
// Cache lookup failed. Instantiate the type arguments.
TypeArguments& result = TypeArguments::Handle();
result = InstantiateFrom(instantiator_type_arguments, function_type_arguments,
kAllFree, bound_error, NULL, NULL, Heap::kOld);
if ((bound_error != NULL) && !bound_error->IsNull()) {
return result.raw();
}
// Instantiation did not result in bound error. Canonicalize type arguments.
result = result.Canonicalize();
// InstantiateAndCanonicalizeFrom is not reentrant. It cannot have been called
// indirectly, so the prior_instantiations array cannot have grown.
ASSERT(prior_instantiations.raw() == instantiations());
// Add instantiator and function type args and result to instantiations array.
intptr_t length = prior_instantiations.Length();
if ((index + StubCode::kInstantiationSizeInWords) >= length) {
// TODO(regis): Should we limit the number of cached instantiations?
// Grow the instantiations array by about 50%, but at least by 1.
// The initial array is Object::zero_array() of length 1.
intptr_t entries = (length - 1) / StubCode::kInstantiationSizeInWords;
intptr_t new_entries = entries + (entries >> 1) + 1;
length = new_entries * StubCode::kInstantiationSizeInWords + 1;
prior_instantiations =
Array::Grow(prior_instantiations, length, Heap::kOld);
set_instantiations(prior_instantiations);
ASSERT((index + StubCode::kInstantiationSizeInWords) < length);
}
prior_instantiations.SetAt(index + 0, instantiator_type_arguments);
prior_instantiations.SetAt(index + 1, function_type_arguments);
prior_instantiations.SetAt(index + 2, result);
prior_instantiations.SetAt(index + 3,
Smi::Handle(Smi::New(StubCode::kNoInstantiator)));
return result.raw();
}
RawTypeArguments* TypeArguments::New(intptr_t len, Heap::Space space) {
if (len < 0 || len > kMaxElements) {
// This should be caught before we reach here.
FATAL1("Fatal error in TypeArguments::New: invalid len %" Pd "\n", len);
}
TypeArguments& result = TypeArguments::Handle();
{
RawObject* raw = Object::Allocate(TypeArguments::kClassId,
TypeArguments::InstanceSize(len), space);
NoSafepointScope no_safepoint;
result ^= raw;
// Length must be set before we start storing into the array.
result.SetLength(len);
result.SetHash(0);
}
// The zero array should have been initialized.
ASSERT(Object::zero_array().raw() != Array::null());
COMPILE_ASSERT(StubCode::kNoInstantiator == 0);
result.set_instantiations(Object::zero_array());
return result.raw();
}
RawAbstractType* const* TypeArguments::TypeAddr(intptr_t index) const {
ASSERT((index >= 0) && (index < Length()));
return &raw_ptr()->types()[index];
}
void TypeArguments::SetLength(intptr_t value) const {
ASSERT(!IsCanonical());
// This is only safe because we create a new Smi, which does not cause
// heap allocation.
StoreSmi(&raw_ptr()->length_, Smi::New(value));
}
RawTypeArguments* TypeArguments::CloneUnfinalized() const {
if (IsNull() || IsFinalized()) {
return raw();
}
ASSERT(IsResolved());
AbstractType& type = AbstractType::Handle();
const intptr_t num_types = Length();
const TypeArguments& clone =
TypeArguments::Handle(TypeArguments::New(num_types));
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
type = type.CloneUnfinalized();
clone.SetTypeAt(i, type);
}
ASSERT(clone.IsResolved());
return clone.raw();
}
RawTypeArguments* TypeArguments::CloneUninstantiated(const Class& new_owner,
TrailPtr trail) const {
ASSERT(!IsNull());
ASSERT(IsFinalized());
ASSERT(!IsInstantiated());
AbstractType& type = AbstractType::Handle();
const intptr_t num_types = Length();
const TypeArguments& clone =
TypeArguments::Handle(TypeArguments::New(num_types));
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
if (!type.IsInstantiated()) {
type = type.CloneUninstantiated(new_owner, trail);
}
clone.SetTypeAt(i, type);
}
ASSERT(clone.IsFinalized());
return clone.raw();
}
RawTypeArguments* TypeArguments::Canonicalize(TrailPtr trail) const {
if (IsNull() || IsCanonical()) {
ASSERT(IsOld());
return this->raw();
}
const intptr_t num_types = Length();
if (IsRaw(0, num_types)) {
return TypeArguments::null();
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
ObjectStore* object_store = isolate->object_store();
TypeArguments& result = TypeArguments::Handle(zone);
{
SafepointMutexLocker ml(isolate->type_canonicalization_mutex());
CanonicalTypeArgumentsSet table(zone,
object_store->canonical_type_arguments());
result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
object_store->set_canonical_type_arguments(table.Release());
}
if (result.IsNull()) {
// Canonicalize each type argument.
AbstractType& type_arg = AbstractType::Handle(zone);
for (intptr_t i = 0; i < num_types; i++) {
type_arg = TypeAt(i);
type_arg = type_arg.Canonicalize(trail);
if (IsCanonical()) {
// Canonicalizing this type_arg canonicalized this type.
ASSERT(IsRecursive());
return this->raw();
}
SetTypeAt(i, type_arg);
}
// Canonicalization of a type argument of a recursive type argument vector
// may change the hash of the vector, so recompute.
if (IsRecursive()) {
ComputeHash();
}
SafepointMutexLocker ml(isolate->type_canonicalization_mutex());
CanonicalTypeArgumentsSet table(zone,
object_store->canonical_type_arguments());
// Since we canonicalized some type arguments above we need to lookup
// in the table again to make sure we don't already have an equivalent
// canonical entry.
result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
if (result.IsNull()) {
// Make sure we have an old space object and add it to the table.
if (this->IsNew()) {
result ^= Object::Clone(*this, Heap::kOld);
} else {
result ^= this->raw();
}
ASSERT(result.IsOld());
result.SetCanonical(); // Mark object as being canonical.
// Now add this TypeArgument into the canonical list of type arguments.
bool present = table.Insert(result);
ASSERT(!present);
}
object_store->set_canonical_type_arguments(table.Release());
}
ASSERT(result.Equals(*this));
ASSERT(!result.IsNull());
ASSERT(result.IsTypeArguments());
ASSERT(result.IsCanonical());
return result.raw();
}
void TypeArguments::EnumerateURIs(URIs* uris) const {
if (IsNull()) {
return;
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
AbstractType& type = AbstractType::Handle(zone);
const intptr_t num_types = Length();
for (intptr_t i = 0; i < num_types; i++) {
type = TypeAt(i);
type.EnumerateURIs(uris);
}
}
const char* TypeArguments::ToCString() const {
if (IsNull()) {
return "TypeArguments: null";
}
Zone* zone = Thread::Current()->zone();
const char* prev_cstr = OS::SCreate(zone, "TypeArguments: (@%p H%" Px ")",
raw(), Smi::Value(raw_ptr()->hash_));
for (int i = 0; i < Length(); i++) {
const AbstractType& type_at = AbstractType::Handle(zone, TypeAt(i));
const char* type_cstr = type_at.IsNull() ? "null" : type_at.ToCString();
char* chars = OS::SCreate(zone, "%s [%s]", prev_cstr, type_cstr);
prev_cstr = chars;
}
return prev_cstr;
}
const char* PatchClass::ToCString() const {
const Class& cls = Class::Handle(patched_class());
const char* cls_name = cls.ToCString();
return OS::SCreate(Thread::Current()->zone(), "PatchClass for %s", cls_name);
}
RawPatchClass* PatchClass::New(const Class& patched_class,
const Class& origin_class) {
const PatchClass& result = PatchClass::Handle(PatchClass::New());
result.set_patched_class(patched_class);
result.set_origin_class(origin_class);
result.set_script(Script::Handle(origin_class.script()));
result.set_library_kernel_offset(-1);
return result.raw();
}
RawPatchClass* PatchClass::New(const Class& patched_class,
const Script& script) {
const PatchClass& result = PatchClass::Handle(PatchClass::New());
result.set_patched_class(patched_class);
result.set_origin_class(patched_class);
result.set_script(script);
result.set_library_kernel_offset(-1);
return result.raw();
}
RawPatchClass* PatchClass::New() {
ASSERT(Object::patch_class_class() != Class::null());
RawObject* raw = Object::Allocate(PatchClass::kClassId,
PatchClass::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawPatchClass*>(raw);
}
void PatchClass::set_patched_class(const Class& value) const {
StorePointer(&raw_ptr()->patched_class_, value.raw());
}
void PatchClass::set_origin_class(const Class& value) const {
StorePointer(&raw_ptr()->origin_class_, value.raw());
}
void PatchClass::set_script(const Script& value) const {
StorePointer(&raw_ptr()->script_, value.raw());
}
void PatchClass::set_library_kernel_data(const ExternalTypedData& data) const {
StorePointer(&raw_ptr()->library_kernel_data_, data.raw());
}
intptr_t Function::Hash() const {
return String::HashRawSymbol(name());
}
bool Function::HasBreakpoint() const {
#if defined(PRODUCT)
return false;
#else
Thread* thread = Thread::Current();
return thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone());
#endif
}
void Function::InstallOptimizedCode(const Code& code) const {
DEBUG_ASSERT(IsMutatorOrAtSafepoint());
// We may not have previous code if FLAG_precompile is set.
// Hot-reload may have already disabled the current code.
if (HasCode() && !Code::Handle(CurrentCode()).IsDisabled()) {
Code::Handle(CurrentCode()).DisableDartCode();
}
AttachCode(code);
}
void Function::SetInstructions(const Code& value) const {
DEBUG_ASSERT(IsMutatorOrAtSafepoint());
SetInstructionsSafe(value);
}
void Function::SetInstructionsSafe(const Code& value) const {
StorePointer(&raw_ptr()->code_, value.raw());
StoreNonPointer(&raw_ptr()->entry_point_, value.EntryPoint());
StoreNonPointer(&raw_ptr()->unchecked_entry_point_,
value.UncheckedEntryPoint());
}
void Function::AttachCode(const Code& value) const {
DEBUG_ASSERT(IsMutatorOrAtSafepoint());
// Finish setting up code before activating it.
value.set_owner(*this);
SetInstructions(value);
ASSERT(Function::Handle(value.function()).IsNull() ||
(value.function() == this->raw()));
}
bool Function::HasCode() const {
NoSafepointScope no_safepoint;
ASSERT(raw_ptr()->code_ != Code::null());
#if defined(DART_PRECOMPILED_RUNTIME)
return raw_ptr()->code_ != StubCode::LazyCompile_entry()->code();
#else
return raw_ptr()->code_ != StubCode::LazyCompile_entry()->code() &&
raw_ptr()->code_ != StubCode::InterpretCall_entry()->code();
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
#if !defined(DART_PRECOMPILED_RUNTIME)
bool Function::IsBytecodeAllowed(Zone* zone) const {
switch (kind()) {
case RawFunction::kImplicitGetter:
case RawFunction::kImplicitSetter:
case RawFunction::kMethodExtractor:
case RawFunction::kNoSuchMethodDispatcher:
case RawFunction::kInvokeFieldDispatcher:
case RawFunction::kDynamicInvocationForwarder:
case RawFunction::kImplicitClosureFunction:
case RawFunction::kIrregexpFunction:
return false;
case RawFunction::kImplicitStaticFinalGetter:
return kernel::IsFieldInitializer(*this, zone) || is_const();
default:
return true;
}
}
void Function::AttachBytecode(const Code& value) const {
DEBUG_ASSERT(IsMutatorOrAtSafepoint());
ASSERT(FLAG_enable_interpreter || FLAG_use_bytecode_compiler);
// Finish setting up code before activating it.
value.set_owner(*this);
StorePointer(&raw_ptr()->bytecode_, value.raw());
// We should not have loaded the bytecode if the function had code.
ASSERT(!HasCode());
if (FLAG_enable_interpreter) {
// Set the code entry_point to InterpretCall stub.
SetInstructions(Code::Handle(StubCode::InterpretCall_entry()->code()));
}
}
bool Function::HasBytecode() const {
return raw_ptr()->bytecode_ != Code::null();
}
bool Function::HasBytecode(RawFunction* function) {
return function->ptr()->bytecode_ != Code::null();
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
bool Function::HasCode(RawFunction* function) {
NoSafepointScope no_safepoint;
ASSERT(function->ptr()->code_ != Code::null());
#if defined(DART_PRECOMPILED_RUNTIME)
return function->ptr()->code_ != StubCode::LazyCompile_entry()->code();
#else
return function->ptr()->code_ != StubCode::LazyCompile_entry()->code() &&
function->ptr()->code_ != StubCode::InterpretCall_entry()->code();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
void Function::ClearCode() const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
ASSERT(Thread::Current()->IsMutatorThread());
StorePointer(&raw_ptr()->unoptimized_code_, Code::null());
StorePointer(&raw_ptr()->bytecode_, Code::null());
SetInstructions(Code::Handle(StubCode::LazyCompile_entry()->code()));
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
void Function::EnsureHasCompiledUnoptimizedCode() const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(thread->IsMutatorThread());
const Error& error =
Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
if (!error.IsNull()) {
Exceptions::PropagateError(error);
}
}
void Function::SwitchToUnoptimizedCode() const {
ASSERT(HasOptimizedCode());
Thread* thread = Thread::Current();
Isolate* isolate = thread->isolate();
Zone* zone = thread->zone();
ASSERT(thread->IsMutatorThread());
const Code& current_code = Code::Handle(zone, CurrentCode());
if (FLAG_trace_deoptimization_verbose) {
THR_Print("Disabling optimized code: '%s' entry: %#" Px "\n",
ToFullyQualifiedCString(), current_code.EntryPoint());
}
current_code.DisableDartCode();
const Error& error =
Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
if (!error.IsNull()) {
Exceptions::PropagateError(error);
}
const Code& unopt_code = Code::Handle(zone, unoptimized_code());
unopt_code.Enable();
AttachCode(unopt_code);
isolate->TrackDeoptimizedCode(current_code);
}
void Function::SwitchToLazyCompiledUnoptimizedCode() const {
if (!HasOptimizedCode()) {
return;
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(thread->IsMutatorThread());
const Code& current_code = Code::Handle(zone, CurrentCode());
TIR_Print("Disabling optimized code for %s\n", ToCString());
current_code.DisableDartCode();
const Code& unopt_code = Code::Handle(zone, unoptimized_code());
if (unopt_code.IsNull()) {
// Set the lazy compile code.
TIR_Print("Switched to lazy compile stub for %s\n", ToCString());
SetInstructions(Code::Handle(StubCode::LazyCompile_entry()->code()));
return;
}
TIR_Print("Switched to unoptimized code for %s\n", ToCString());
AttachCode(unopt_code);
unopt_code.Enable();
}
void Function::set_unoptimized_code(const Code& value) const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
DEBUG_ASSERT(IsMutatorOrAtSafepoint());
ASSERT(value.IsNull() || !value.is_optimized());
StorePointer(&raw_ptr()->unoptimized_code_, value.raw());
#endif
}
RawContextScope* Function::context_scope() const {
if (IsClosureFunction()) {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
return ClosureData::Cast(obj).context_scope();
}
return ContextScope::null();
}
void Function::set_context_scope(const ContextScope& value) const {
if (IsClosureFunction()) {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
ClosureData::Cast(obj).set_context_scope(value);
return;
}
UNREACHABLE();
}
RawInstance* Function::implicit_static_closure() const {
if (IsImplicitStaticClosureFunction()) {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
return ClosureData::Cast(obj).implicit_static_closure();
}
return Instance::null();
}
void Function::set_implicit_static_closure(const Instance& closure) const {
if (IsImplicitStaticClosureFunction()) {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
ClosureData::Cast(obj).set_implicit_static_closure(closure);
return;
}
UNREACHABLE();
}
RawScript* Function::eval_script() const {
const Object& obj = Object::Handle(raw_ptr()->data_);
if (obj.IsScript()) {
return Script::Cast(obj).raw();
}
return Script::null();
}
void Function::set_eval_script(const Script& script) const {
ASSERT(token_pos() == TokenPosition::kMinSource);
ASSERT(raw_ptr()->data_ == Object::null());
set_data(script);
}
RawFunction* Function::extracted_method_closure() const {
ASSERT(kind() == RawFunction::kMethodExtractor);
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(obj.IsFunction());
return Function::Cast(obj).raw();
}
void Function::set_extracted_method_closure(const Function& value) const {
ASSERT(kind() == RawFunction::kMethodExtractor);
ASSERT(raw_ptr()->data_ == Object::null());
set_data(value);
}
RawArray* Function::saved_args_desc() const {
ASSERT(kind() == RawFunction::kNoSuchMethodDispatcher ||
kind() == RawFunction::kInvokeFieldDispatcher);
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(obj.IsArray());
return Array::Cast(obj).raw();
}
void Function::set_saved_args_desc(const Array& value) const {
ASSERT(kind() == RawFunction::kNoSuchMethodDispatcher ||
kind() == RawFunction::kInvokeFieldDispatcher);
ASSERT(raw_ptr()->data_ == Object::null());
set_data(value);
}
RawField* Function::accessor_field() const {
ASSERT(kind() == RawFunction::kImplicitGetter ||
kind() == RawFunction::kImplicitSetter ||
kind() == RawFunction::kImplicitStaticFinalGetter);
return Field::RawCast(raw_ptr()->data_);
}
void Function::set_accessor_field(const Field& value) const {
ASSERT(kind() == RawFunction::kImplicitGetter ||
kind() == RawFunction::kImplicitSetter ||
kind() == RawFunction::kImplicitStaticFinalGetter);
// Top level classes may be finalized multiple times.
ASSERT(raw_ptr()->data_ == Object::null() || raw_ptr()->data_ == value.raw());
set_data(value);
}
RawFunction* Function::parent_function() const {
if (IsClosureFunction() || IsSignatureFunction()) {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
if (IsClosureFunction()) {
return ClosureData::Cast(obj).parent_function();
} else {
return SignatureData::Cast(obj).parent_function();
}
}
return Function::null();
}
void Function::set_parent_function(const Function& value) const {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
if (IsClosureFunction()) {
ClosureData::Cast(obj).set_parent_function(value);
} else {
ASSERT(IsSignatureFunction());
SignatureData::Cast(obj).set_parent_function(value);
}
}
// Enclosing outermost function of this local function.
RawFunction* Function::GetOutermostFunction() const {
RawFunction* parent = parent_function();
if (parent == Object::null()) {
return raw();
}
Function& function = Function::Handle();
do {
function = parent;
parent = function.parent_function();
} while (parent != Object::null());
return function.raw();
}
bool Function::HasGenericParent() const {
if (IsImplicitClosureFunction()) {
// The parent function of an implicit closure function is not the enclosing
// function we are asking about here.
return false;
}
Function& parent = Function::Handle(parent_function());
while (!parent.IsNull()) {
if (parent.IsGeneric()) {
return true;
}
parent = parent.parent_function();
}
return false;
}
RawFunction* Function::implicit_closure_function() const {
if (IsClosureFunction() || IsSignatureFunction() || IsFactory() ||
IsDispatcherOrImplicitAccessor() || IsImplicitStaticFieldInitializer()) {
return Function::null();
}
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(obj.IsNull() || obj.IsScript() || obj.IsFunction() || obj.IsArray());
if (obj.IsNull() || obj.IsScript()) {
return Function::null();
}
if (obj.IsFunction()) {
return Function::Cast(obj).raw();
}
ASSERT(is_native());
ASSERT(obj.IsArray());
const Object& res = Object::Handle(Array::Cast(obj).At(1));
return res.IsNull() ? Function::null() : Function::Cast(res).raw();
}
void Function::set_implicit_closure_function(const Function& value) const {
ASSERT(!IsClosureFunction() && !IsSignatureFunction());
const Object& old_data = Object::Handle(raw_ptr()->data_);
if (is_native()) {
ASSERT(old_data.IsArray());
ASSERT((Array::Cast(old_data).At(1) == Object::null()) || value.IsNull());
Array::Cast(old_data).SetAt(1, value);
} else {
// Maybe this function will turn into a native later on :-/
if (old_data.IsArray()) {
ASSERT((Array::Cast(old_data).At(1) == Object::null()) || value.IsNull());
Array::Cast(old_data).SetAt(1, value);
} else {
ASSERT(old_data.IsNull() || value.IsNull());
set_data(value);
}
}
}
RawType* Function::ExistingSignatureType() const {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
if (IsSignatureFunction()) {
return SignatureData::Cast(obj).signature_type();
} else {
ASSERT(IsClosureFunction());
return ClosureData::Cast(obj).signature_type();
}
}
RawType* Function::SignatureType() const {
Type& type = Type::Handle(ExistingSignatureType());
if (type.IsNull()) {
// The function type of this function is not yet cached and needs to be
// constructed and cached here.
// A function type is type parameterized in the same way as the owner class
// of its non-static signature function.
// It is not type parameterized if its signature function is static, or if
// none of its result type or formal parameter types are type parameterized.
// Unless the function type is a generic typedef, the type arguments of the
// function type are not explicitly stored in the function type as a vector
// of type arguments.
// The type class of a non-typedef function type is always the non-generic
// _Closure class, whether the type is generic or not.
// The type class of a typedef function type is always the typedef class,
// which may be generic, in which case the type stores type arguments.
// With the introduction of generic functions, we may reach here before the
// function type parameters have been resolved. Therefore, we cannot yet
// check whether the function type has an instantiated signature.
// We can do it only when the signature has been resolved.
// We only set the type class of the function type to the typedef class
// if the signature of the function type is the signature of the typedef.
// Note that a function type can have a typedef class as owner without
// representing the typedef, as in the following example:
// typedef F(f(int x)); where the type of f is a function type with F as
// owner, without representing the function type of F.
Class& scope_class = Class::Handle(Owner());
if (!scope_class.IsTypedefClass() ||
(scope_class.signature_function() != raw())) {
scope_class = Isolate::Current()->object_store()->closure_class();
}
const TypeArguments& signature_type_arguments =
TypeArguments::Handle(scope_class.type_parameters());
// Return the still unfinalized signature type.
type = Type::New(scope_class, signature_type_arguments, token_pos());
type.set_signature(*this);
SetSignatureType(type);
}
return type.raw();
}
void Function::SetSignatureType(const Type& value) const {
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
if (IsSignatureFunction()) {
SignatureData::Cast(obj).set_signature_type(value);
ASSERT(!value.IsCanonical() || (value.signature() == this->raw()));
} else {
ASSERT(IsClosureFunction());
ClosureData::Cast(obj).set_signature_type(value);
}
}
bool Function::IsRedirectingFactory() const {
if (!IsFactory() || !is_redirecting()) {
return false;
}
ASSERT(!IsClosureFunction()); // A factory cannot also be a closure.
return true;
}
RawType* Function::RedirectionType() const {
ASSERT(IsRedirectingFactory());
ASSERT(!is_native());
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
return RedirectionData::Cast(obj).type();
}
const char* Function::KindToCString(RawFunction::Kind kind) {
switch (kind) {
case RawFunction::kRegularFunction:
return "RegularFunction";
break;
case RawFunction::kClosureFunction:
return "ClosureFunction";
break;
case RawFunction::kImplicitClosureFunction:
return "ImplicitClosureFunction";
break;
case RawFunction::kSignatureFunction:
return "SignatureFunction";
break;
case RawFunction::kGetterFunction:
return "GetterFunction";
break;
case RawFunction::kSetterFunction:
return "SetterFunction";
break;
case RawFunction::kConstructor:
return "Constructor";
break;
case RawFunction::kImplicitGetter:
return "ImplicitGetter";
break;
case RawFunction::kImplicitSetter:
return "ImplicitSetter";
break;
case RawFunction::kImplicitStaticFinalGetter:
return "ImplicitStaticFinalGetter";
break;
case RawFunction::kMethodExtractor:
return "MethodExtractor";
break;
case RawFunction::kNoSuchMethodDispatcher:
return "NoSuchMethodDispatcher";
break;
case RawFunction::kInvokeFieldDispatcher:
return "InvokeFieldDispatcher";
break;
case RawFunction::kIrregexpFunction:
return "IrregexpFunction";
break;
case RawFunction::kDynamicInvocationForwarder:
return "DynamicInvocationForwarder";
default:
UNREACHABLE();
return NULL;
}
}
void Function::SetRedirectionType(const Type& type) const {
ASSERT(IsFactory());
Object& obj = Object::Handle(raw_ptr()->data_);
if (obj.IsNull()) {
obj = RedirectionData::New();
set_data(obj);
}
RedirectionData::Cast(obj).set_type(type);
}
RawString* Function::RedirectionIdentifier() const {
ASSERT(IsRedirectingFactory());
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
return RedirectionData::Cast(obj).identifier();
}
void Function::SetRedirectionIdentifier(const String& identifier) const {
ASSERT(IsFactory());
Object& obj = Object::Handle(raw_ptr()->data_);
if (obj.IsNull()) {
obj = RedirectionData::New();
set_data(obj);
}
RedirectionData::Cast(obj).set_identifier(identifier);
}
RawFunction* Function::RedirectionTarget() const {
ASSERT(IsRedirectingFactory());
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(!obj.IsNull());
return RedirectionData::Cast(obj).target();
}
void Function::SetRedirectionTarget(const Function& target) const {
ASSERT(IsFactory());
Object& obj = Object::Handle(raw_ptr()->data_);
if (obj.IsNull()) {
obj = RedirectionData::New();
set_data(obj);
}
RedirectionData::Cast(obj).set_target(target);
}
// This field is heavily overloaded:
// eval function: Script expression source
// kernel eval function: Array[0] = Script
// Array[1] = Kernel data
// Array[2] = Kernel offset of enclosing library
// signature function: SignatureData
// method extractor: Function extracted closure function
// implicit getter: Field
// implicit setter: Field
// impl. static final gttr: Field
// noSuchMethod dispatcher: Array arguments descriptor
// invoke-field dispatcher: Array arguments descriptor
// redirecting constructor: RedirectionData
// closure function: ClosureData
// irregexp function: Array[0] = RegExp
// Array[1] = Smi string specialization cid
// native function: Array[0] = String native name
// Array[1] = Function implicit closure function
// regular function: Function for implicit closure function
void Function::set_data(const Object& value) const {
StorePointer(&raw_ptr()->data_, value.raw());
}
bool Function::IsInFactoryScope() const {
if (!IsLocalFunction()) {
return IsFactory();
}
Function& outer_function = Function::Handle(parent_function());
while (outer_function.IsLocalFunction()) {
outer_function = outer_function.parent_function();
}
return outer_function.IsFactory();
}
void Function::set_name(const String& value) const {
ASSERT(value.IsSymbol());
StorePointer(&raw_ptr()->name_, value.raw());
}
void Function::set_owner(const Object& value) const {
ASSERT(!value.IsNull() || IsSignatureFunction());
StorePointer(&raw_ptr()->owner_, value.raw());
}
RawRegExp* Function::regexp() const {
ASSERT(kind() == RawFunction::kIrregexpFunction);
const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data_));
return RegExp::RawCast(pair.At(0));
}
class StickySpecialization : public BitField<intptr_t, bool, 0, 1> {};
class StringSpecializationCid
: public BitField<intptr_t, intptr_t, 1, RawObject::kClassIdTagSize> {};
intptr_t Function::string_specialization_cid() const {
ASSERT(kind() == RawFunction::kIrregexpFunction);
const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data_));
return StringSpecializationCid::decode(Smi::Value(Smi::RawCast(pair.At(1))));
}
bool Function::is_sticky_specialization() const {
ASSERT(kind() == RawFunction::kIrregexpFunction);
const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data_));
return StickySpecialization::decode(Smi::Value(Smi::RawCast(pair.At(1))));
}
void Function::SetRegExpData(const RegExp& regexp,
intptr_t string_specialization_cid,
bool sticky) const {
ASSERT(kind() == RawFunction::kIrregexpFunction);
ASSERT(RawObject::IsStringClassId(string_specialization_cid));
ASSERT(raw_ptr()->data_ == Object::null());
const Array& pair = Array::Handle(Array::New(2, Heap::kOld));
pair.SetAt(0, regexp);
pair.SetAt(1, Smi::Handle(Smi::New(StickySpecialization::encode(sticky) |
StringSpecializationCid::encode(
string_specialization_cid))));
set_data(pair);
}
RawString* Function::native_name() const {
ASSERT(is_native());
const Object& obj = Object::Handle(raw_ptr()->data_);
ASSERT(obj.IsArray());
return String::RawCast(Array::Cast(obj).At(0));
}
void Function::set_native_name(const String& value) const {
Zone* zone = Thread::Current()->zone();
ASSERT(is_native());
// Due to the fact that kernel needs to read in the constant table before the
// annotation data is available, we don't know at function creation time
// whether the function is a native or not.
//
// Reading the constant table can cause a static function to get an implicit
// closure function.
//
// We therefore handle both cases.
const Object& old_data = Object::Handle(zone, raw_ptr()->data_);
ASSERT(old_data.IsNull() ||
(old_data.IsFunction() &&
Function::Handle(zone, Function::RawCast(old_data.raw()))
.IsImplicitClosureFunction()));
const Array& pair = Array::Handle(zone, Array::New(2, Heap::kOld));
pair.SetAt(0, value);
pair.SetAt(1, old_data); // will be the implicit closure function if needed.
set_data(pair);
}
void Function::set_result_type(const AbstractType& value) const {
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->result_type_, value.raw());
}
RawAbstractType* Function::ParameterTypeAt(intptr_t index) const {
const Array& parameter_types = Array::Handle(raw_ptr()->parameter_types_);
return AbstractType::RawCast(parameter_types.At(index));
}
void Function::SetParameterTypeAt(intptr_t index,
const AbstractType& value) const {
ASSERT(!value.IsNull());
// Method extractor parameters are shared and are in the VM heap.
ASSERT(kind() != RawFunction::kMethodExtractor);
const Array& parameter_types = Array::Handle(raw_ptr()->parameter_types_);
parameter_types.SetAt(index, value);
}
void Function::set_parameter_types(const Array& value) const {
StorePointer(&raw_ptr()->parameter_types_, value.raw());
}
RawString* Function::ParameterNameAt(intptr_t index) const {
const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names_);
return String::RawCast(parameter_names.At(index));
}
void Function::SetParameterNameAt(intptr_t index, const String& value) const {
ASSERT(!value.IsNull() && value.IsSymbol());
const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names_);
parameter_names.SetAt(index, value);
}
void Function::set_parameter_names(const Array& value) const {
StorePointer(&raw_ptr()->parameter_names_, value.raw());
}
void Function::set_type_parameters(const TypeArguments& value) const {
StorePointer(&raw_ptr()->type_parameters_, value.raw());
}
intptr_t Function::NumTypeParameters(Thread* thread) const {
if (type_parameters() == TypeArguments::null()) {
return 0;
}
REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
TypeArguments& type_params = thread->TypeArgumentsHandle();
type_params = type_parameters();
// We require null to represent a non-generic function.
ASSERT(type_params.Length() != 0);
return type_params.Length();
}
intptr_t Function::NumParentTypeParameters() const {
if (IsImplicitClosureFunction()) {
return 0;
}
Thread* thread = Thread::Current();
Function& parent = Function::Handle(parent_function());
intptr_t num_parent_type_params = 0;
while (!parent.IsNull()) {
num_parent_type_params += parent.NumTypeParameters(thread);
if (parent.IsImplicitClosureFunction()) break;
parent ^= parent.parent_function();
}
return num_parent_type_params;
}
void Function::PrintSignatureTypes() const {
Function& sig_fun = Function::Handle(raw());
Type& sig_type = Type::Handle();
while (!sig_fun.IsNull()) {
sig_type = sig_fun.SignatureType();
THR_Print("%s%s\n",
sig_fun.IsImplicitClosureFunction() ? "implicit closure: " : "",
sig_type.ToCString());
sig_fun ^= sig_fun.parent_function();
}
}
RawTypeParameter* Function::LookupTypeParameter(
const String& type_name,
intptr_t* function_level) const {
ASSERT(!type_name.IsNull());
Thread* thread = Thread::Current();
REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
REUSABLE_TYPE_PARAMETER_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
REUSABLE_FUNCTION_HANDLESCOPE(thread);
TypeArguments& type_params = thread->TypeArgumentsHandle();
TypeParameter& type_param = thread->TypeParameterHandle();
String& type_param_name = thread->StringHandle();
Function& function = thread->FunctionHandle();
function ^= this->raw();
while (!function.IsNull()) {
type_params ^= function.type_parameters();
if (!type_params.IsNull()) {
const intptr_t num_type_params = type_params.Length();
for (intptr_t i = 0; i < num_type_params; i++) {
type_param ^= type_params.TypeAt(i);
type_param_name = type_param.name();
if (type_param_name.Equals(type_name)) {
return type_param.raw();
}
}
}
if (function.IsImplicitClosureFunction()) {
// The parent function is not the enclosing function, but the closurized
// function with identical type parameters.
break;
}
function ^= function.parent_function();
if (function_level != NULL) {
(*function_level)--;
}
}
return TypeParameter::null();
}
void Function::set_kind(RawFunction::Kind value) const {
set_kind_tag(KindBits::update(value, raw_ptr()->kind_tag_));
}
void Function::set_modifier(RawFunction::AsyncModifier value) const {
set_kind_tag(ModifierBits::update(value, raw_ptr()->kind_tag_));
}
void Function::set_recognized_kind(MethodRecognizer::Kind value) const {
// Prevent multiple settings of kind.
ASSERT((value == MethodRecognizer::kUnknown) || !IsRecognized());
set_kind_tag(RecognizedBits::update(value, raw_ptr()->kind_tag_));
}
void Function::set_token_pos(TokenPosition token_pos) const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
ASSERT(!token_pos.IsClassifying() || IsMethodExtractor());
StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
#endif
}
void Function::set_kind_tag(uint32_t value) const {
StoreNonPointer(&raw_ptr()->kind_tag_, static_cast<uint32_t>(value));
}
void Function::set_packed_fields(uint32_t packed_fields) const {
StoreNonPointer(&raw_ptr()->packed_fields_, packed_fields);
}
void Function::set_num_fixed_parameters(intptr_t value) const {
ASSERT(value >= 0);
ASSERT(Utils::IsUint(RawFunction::kMaxFixedParametersBits, value));
const uint32_t* original = &raw_ptr()->packed_fields_;
StoreNonPointer(original, RawFunction::PackedNumFixedParameters::update(
value, *original));
}
void Function::SetNumOptionalParameters(intptr_t value,
bool are_optional_positional) const {
ASSERT(Utils::IsUint(RawFunction::kMaxOptionalParametersBits, value));
uint32_t packed_fields = raw_ptr()->packed_fields_;
packed_fields = RawFunction::PackedHasNamedOptionalParameters::update(
!are_optional_positional, packed_fields);
packed_fields =
RawFunction::PackedNumOptionalParameters::update(value, packed_fields);
set_packed_fields(packed_fields);
}
bool Function::IsOptimizable() const {
if (FLAG_precompiled_mode) {
return true;
}
if (is_native()) {
// Native methods don't need to be optimized.
return false;
}
const intptr_t function_length = end_token_pos().Pos() - token_pos().Pos();
if (is_optimizable() && (script() != Script::null()) &&
(function_length < FLAG_huge_method_cutoff_in_tokens)) {
// Additional check needed for implicit getters.
return (unoptimized_code() == Object::null()) ||
(Code::Handle(unoptimized_code()).Size() <
FLAG_huge_method_cutoff_in_code_size);
}
return false;
}
void Function::SetIsOptimizable(bool value) const {
ASSERT(!is_native());
set_is_optimizable(value);
if (!value) {
set_is_inlinable(false);
set_usage_counter(INT_MIN);
}
}
bool Function::CanBeInlined() const {
#if defined(PRODUCT)
return is_inlinable() && !is_external() && !is_generated_body();
#else
Thread* thread = Thread::Current();
return is_inlinable() && !is_external() && !is_generated_body() &&
!thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone());
#endif
}
intptr_t Function::NumParameters() const {
return num_fixed_parameters() + NumOptionalParameters();
}
intptr_t Function::NumImplicitParameters() const {
const RawFunction::Kind k = kind();
if (k == RawFunction::kConstructor) {
// Type arguments for factory; instance for generative constructor.
return 1;
}
if ((k == RawFunction::kClosureFunction) ||
(k == RawFunction::kImplicitClosureFunction) ||
(k == RawFunction::kSignatureFunction)) {
return 1; // Closure object.
}
if (!is_static()) {
// Closure functions defined inside instance (i.e. non-static) functions are
// marked as non-static, but they do not have a receiver.
// Closures are handled above.
ASSERT((k != RawFunction::kClosureFunction) &&
(k != RawFunction::kImplicitClosureFunction) &&
(k != RawFunction::kSignatureFunction));
return 1; // Receiver.
}
return 0; // No implicit parameters.
}
bool Function::AreValidArgumentCounts(intptr_t num_type_arguments,
intptr_t num_arguments,
intptr_t num_named_arguments,
String* error_message) const {
if ((num_type_arguments != 0) &&
(num_type_arguments != NumTypeParameters())) {
if (error_message != NULL) {
const intptr_t kMessageBufferSize = 64;
char message_buffer[kMessageBufferSize];
Utils::SNPrint(message_buffer, kMessageBufferSize,
"%" Pd " type arguments passed, but %" Pd " expected",
num_type_arguments, NumTypeParameters());
// Allocate in old space because it can be invoked in background
// optimizing compilation.
*error_message = String::New(message_buffer, Heap::kOld);
}
return false; // Too many type arguments.
}
if (num_named_arguments > NumOptionalNamedParameters()) {
if (error_message != NULL) {
const intptr_t kMessageBufferSize = 64;
char message_buffer[kMessageBufferSize];
Utils::SNPrint(message_buffer, kMessageBufferSize,
"%" Pd " named passed, at most %" Pd " expected",
num_named_arguments, NumOptionalNamedParameters());
// Allocate in old space because it can be invoked in background
// optimizing compilation.
*error_message = String::New(message_buffer, Heap::kOld);
}
return false; // Too many named arguments.
}
const intptr_t num_pos_args = num_arguments - num_named_arguments;
const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
const intptr_t num_pos_params = num_fixed_parameters() + num_opt_pos_params;
if (num_pos_args > num_pos_params) {
if (error_message != NULL) {
const intptr_t kMessageBufferSize = 64;
char message_buffer[kMessageBufferSize];
// Hide implicit parameters to the user.
const intptr_t num_hidden_params = NumImplicitParameters();
Utils::SNPrint(message_buffer, kMessageBufferSize,
"%" Pd "%s passed, %s%" Pd " expected",
num_pos_args - num_hidden_params,
num_opt_pos_params > 0 ? " positional" : "",
num_opt_pos_params > 0 ? "at most " : "",
num_pos_params - num_hidden_params);
// Allocate in old space because it can be invoked in background
// optimizing compilation.
*error_message = String::New(message_buffer, Heap::kOld);
}
return false; // Too many fixed and/or positional arguments.
}
if (num_pos_args < num_fixed_parameters()) {
if (error_message != NULL) {
const intptr_t kMessageBufferSize = 64;
char message_buffer[kMessageBufferSize];
// Hide implicit parameters to the user.
const intptr_t num_hidden_params = NumImplicitParameters();
Utils::SNPrint(message_buffer, kMessageBufferSize,
"%" Pd "%s passed, %s%" Pd " expected",
num_pos_args - num_hidden_params,
num_opt_pos_params > 0 ? " positional" : "",
num_opt_pos_params > 0 ? "at least " : "",
num_fixed_parameters() - num_hidden_params);
// Allocate in old space because it can be invoked in background
// optimizing compilation.
*error_message = String::New(message_buffer, Heap::kOld);
}
return false; // Too few fixed and/or positional arguments.
}
return true;
}
bool Function::AreValidArguments(intptr_t num_type_arguments,
intptr_t num_arguments,
const Array& argument_names,
String* error_message) const {
const intptr_t num_named_arguments =
argument_names.IsNull() ? 0 : argument_names.Length();
if (!AreValidArgumentCounts(num_type_arguments, num_arguments,
num_named_arguments, error_message)) {
return false;
}
// Verify that all argument names are valid parameter names.
Zone* zone = Thread::Current()->zone();
String& argument_name = String::Handle(zone);
String& parameter_name = String::Handle(zone);
for (intptr_t i = 0; i < num_named_arguments; i++) {
argument_name ^= argument_names.At(i);
ASSERT(argument_name.IsSymbol());
bool found = false;
const intptr_t num_positional_args = num_arguments - num_named_arguments;
const intptr_t num_parameters = NumParameters();
for (intptr_t j = num_positional_args; !found && (j < num_parameters);
j++) {
parameter_name = ParameterNameAt(j);
ASSERT(argument_name.IsSymbol());
if (argument_name.Equals(parameter_name)) {
found = true;
}
}
if (!found) {
if (error_message != NULL) {
const intptr_t kMessageBufferSize = 64;
char message_buffer[kMessageBufferSize];
Utils::SNPrint(message_buffer, kMessageBufferSize,
"no optional formal parameter named '%s'",
argument_name.ToCString());
// Allocate in old space because it can be invoked in background
// optimizing compilation.
*error_message = String::New(message_buffer, Heap::kOld);
}
return false;
}
}
return true;
}
bool Function::AreValidArguments(const ArgumentsDescriptor& args_desc,
String* error_message) const {
const intptr_t num_type_arguments = args_desc.TypeArgsLen();
const intptr_t num_arguments = args_desc.Count();
const intptr_t num_named_arguments = args_desc.NamedCount();
if (!AreValidArgumentCounts(num_type_arguments, num_arguments,
num_named_arguments, error_message)) {
return false;
}
// Verify that all argument names are valid parameter names.
Zone* zone = Thread::Current()->zone();
String& argument_name = String::Handle(zone);
String& parameter_name = String::Handle(zone);
for (intptr_t i = 0; i < num_named_arguments; i++) {
argument_name ^= args_desc.NameAt(i);
ASSERT(argument_name.IsSymbol());
bool found = false;
const intptr_t num_positional_args = num_arguments - num_named_arguments;
const int num_parameters = NumParameters();
for (intptr_t j = num_positional_args; !found && (j < num_parameters);
j++) {
parameter_name = ParameterNameAt(j);
ASSERT(argument_name.IsSymbol());
if (argument_name.Equals(parameter_name)) {
found = true;
}
}
if (!found) {
if (error_message != NULL) {
const intptr_t kMessageBufferSize = 64;
char message_buffer[kMessageBufferSize];
Utils::SNPrint(message_buffer, kMessageBufferSize,
"no optional formal parameter named '%s'",
argument_name.ToCString());
// Allocate in old space because it can be invoked in background
// optimizing compilation.
*error_message = String::New(message_buffer, Heap::kOld);
}
return false;
}
}
return true;
}
RawObject* Function::DoArgumentTypesMatch(
const Array& args,
const ArgumentsDescriptor& args_desc,
const TypeArguments& instantiator_type_args) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Function& instantiated_func = Function::Handle(zone, raw());
if (!HasInstantiatedSignature()) {
instantiated_func ^= InstantiateSignatureFrom(instantiator_type_args,
Object::null_type_arguments(),
kAllFree, Heap::kOld);
}
AbstractType& argument_type = AbstractType::Handle(zone);
AbstractType& parameter_type = AbstractType::Handle(zone);
Instance& argument = Instance::Handle(zone);
// Check types of the provided arguments against the expected parameter types.
for (intptr_t i = args_desc.FirstArgIndex(); i < args_desc.PositionalCount();
++i) {
argument ^= args.At(i);
argument_type ^= argument.GetType(Heap::kOld);
parameter_type ^= instantiated_func.ParameterTypeAt(i);
// If the argument type is dynamic or the parameter is null, move on.
if (parameter_type.IsDynamicType() || argument_type.IsNullType()) {
continue;
}
if (!argument.IsInstanceOf(parameter_type, instantiator_type_args,
Object::null_type_arguments(), NULL)) {
String& argument_name = String::Handle(zone, ParameterNameAt(i));
return ThrowTypeError(token_pos(), argument, parameter_type,
argument_name);
}
}
const intptr_t num_arguments = args_desc.Count();
const intptr_t num_named_arguments = args_desc.NamedCount();
if (num_named_arguments == 0) {
return Error::null();
}
String& argument_name = String::Handle(zone);
String& parameter_name = String::Handle(zone);
// Check types of named arguments against expected parameter type.
for (intptr_t i = 0; i < num_named_arguments; i++) {
argument_name ^= args_desc.NameAt(i);
ASSERT(argument_name.IsSymbol());
bool found = false;
const intptr_t num_positional_args = num_arguments - num_named_arguments;
const int num_parameters = NumParameters();
// Try to find the named parameter that matches the provided argument.
for (intptr_t j = num_positional_args; !found && (j < num_parameters);
j++) {
parameter_name = ParameterNameAt(j);
ASSERT(argument_name.IsSymbol());
if (argument_name.Equals(parameter_name)) {
found = true;
argument ^= args.At(args_desc.PositionAt(i));
argument_type ^= argument.GetType(Heap::kOld);
parameter_type ^= instantiated_func.ParameterTypeAt(j);
// If the argument type is dynamic or the parameter is null, move on.
if (parameter_type.IsDynamicType() || argument_type.IsNullType()) {
continue;
}
if (!argument.IsInstanceOf(parameter_type, instantiator_type_args,
Object::null_type_arguments(), NULL)) {
String& argument_name = String::Handle(zone, ParameterNameAt(i));
return ThrowTypeError(token_pos(), argument, parameter_type,
argument_name);
}
}
}
ASSERT(found);
}
return Error::null();
}
// Helper allocating a C string buffer in the zone, printing the fully qualified
// name of a function in it, and replacing ':' by '_' to make sure the
// constructed name is a valid C++ identifier for debugging purpose.
// Set 'chars' to allocated buffer and return number of written characters.
enum QualifiedFunctionLibKind {
kQualifiedFunctionLibKindLibUrl,
kQualifiedFunctionLibKindLibName
};
static intptr_t ConstructFunctionFullyQualifiedCString(
const Function& function,
char** chars,
intptr_t reserve_len,
bool with_lib,
QualifiedFunctionLibKind lib_kind) {
const char* name = String::Handle(function.name()).ToCString();
const char* function_format = (reserve_len == 0) ? "%s" : "%s_";
reserve_len += Utils::SNPrint(NULL, 0, function_format, name);
const Function& parent = Function::Handle(function.parent_function());
intptr_t written = 0;
if (parent.IsNull()) {
const Class& function_class = Class::Handle(function.Owner());
ASSERT(!function_class.IsNull());
const char* class_name = String::Handle(function_class.Name()).ToCString();
ASSERT(class_name != NULL);
const Library& library = Library::Handle(function_class.library());
ASSERT(!library.IsNull());
const char* library_name = NULL;
const char* lib_class_format = NULL;
if (with_lib) {
switch (lib_kind) {
case kQualifiedFunctionLibKindLibUrl:
library_name = String::Handle(library.url()).ToCString();
break;
case kQualifiedFunctionLibKindLibName:
library_name = String::Handle(library.name()).ToCString();
break;
default:
UNREACHABLE();
}
ASSERT(library_name != NULL);
lib_class_format = (library_name[0] == '\0') ? "%s%s_" : "%s_%s_";
} else {
library_name = "";
lib_class_format = "%s%s.";
}
reserve_len +=
Utils::SNPrint(NULL, 0, lib_class_format, library_name, class_name);
ASSERT(chars != NULL);
*chars = Thread::Current()->zone()->Alloc<char>(reserve_len + 1);
written = Utils::SNPrint(*chars, reserve_len + 1, lib_class_format,
library_name, class_name);
} else {
written = ConstructFunctionFullyQualifiedCString(parent, chars, reserve_len,
with_lib, lib_kind);
}
ASSERT(*chars != NULL);
char* next = *chars + written;
written += Utils::SNPrint(next, reserve_len + 1, function_format, name);
// Replace ":" with "_".
while (true) {
next = strchr(next, ':');
if (next == NULL) break;
*next = '_';
}
return written;
}
const char* Function::ToFullyQualifiedCString() const {
char* chars = NULL;
ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
kQualifiedFunctionLibKindLibUrl);
return chars;
}
const char* Function::ToLibNamePrefixedQualifiedCString() const {
char* chars = NULL;
ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
kQualifiedFunctionLibKindLibName);
return chars;
}
const char* Function::ToQualifiedCString() const {
char* chars = NULL;
ConstructFunctionFullyQualifiedCString(*this, &chars, 0, false,
kQualifiedFunctionLibKindLibUrl);
return chars;
}
RawFunction* Function::InstantiateSignatureFrom(
const TypeArguments& instantiator_type_arguments,
const TypeArguments& function_type_arguments,
intptr_t num_free_fun_type_params,
Heap::Space space) const {
Zone* zone = Thread::Current()->zone();
const Object& owner = Object::Handle(zone, RawOwner());
// Note that parent pointers in newly instantiated signatures still points to
// the original uninstantiated parent signatures. That is not a problem.
const Function& parent = Function::Handle(zone, parent_function());
// See the comment on kCurrentAndEnclosingFree to understand why we don't
// adjust 'num_free_fun_type_params' downward in this case.
bool delete_type_parameters = false;
if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
num_free_fun_type_params = kAllFree;
delete_type_parameters = true;
} else {
ASSERT(!HasInstantiatedSignature(kAny, num_free_fun_type_params));
// A generic typedef may declare a non-generic function type and get
// instantiated with unrelated function type parameters. In that case, its
// signature is still uninstantiated, because these type parameters are
// free (they are not declared by the typedef).
// For that reason, we only adjust num_free_fun_type_params if this
// signature is generic or has a generic parent.
if (IsGeneric() || HasGenericParent()) {
// We only consider the function type parameters declared by the parents
// of this signature function as free.
const int num_parent_type_params = NumParentTypeParameters();
if (num_parent_type_params < num_free_fun_type_params) {
num_free_fun_type_params = num_parent_type_params;
}
}
}
Function& sig = Function::Handle(Function::NewSignatureFunction(
owner, parent, TokenPosition::kNoSource, space));
AbstractType& type = AbstractType::Handle(zone);
// Copy the type parameters and instantiate their bounds (if necessary).
if (!delete_type_parameters) {
const TypeArguments& type_params =
TypeArguments::Handle(zone, type_parameters());
if (!type_params.IsNull()) {
TypeArguments& instantiated_type_params = TypeArguments::Handle(zone);
TypeParameter& type_param = TypeParameter::Handle(zone);
Class& cls = Class::Handle(zone);
String& param_name = String::Handle(zone);
for (intptr_t i = 0; i < type_params.Length(); ++i) {
type_param ^= type_params.TypeAt(i);
type = type_param.bound();
if (!type.IsInstantiated(kAny, num_free_fun_type_params)) {
type = type.InstantiateFrom(
instantiator_type_arguments, function_type_arguments,
num_free_fun_type_params, NULL, NULL, NULL, space);
cls = type_param.parameterized_class();
param_name = type_param.name();
ASSERT(type_param.IsFinalized());
type_param ^=
TypeParameter::New(cls, sig, type_param.index(), param_name, type,
type_param.token_pos());
type_param.SetIsFinalized();
if (instantiated_type_params.IsNull()) {
instantiated_type_params = TypeArguments::New(type_params.Length());
for (intptr_t j = 0; j < i; ++j) {
type = type_params.TypeAt(j);
instantiated_type_params.SetTypeAt(j, type);
}
}
instantiated_type_params.SetTypeAt(i, type_param);
} else if (!instantiated_type_params.IsNull()) {
instantiated_type_params.SetTypeAt(i, type_param);
}
}
sig.set_type_parameters(instantiated_type_params.IsNull()
? type_params
: instantiated_type_params);
}
}
type = result_type();
if (!type.IsInstantiated(kAny, num_free_fun_type_params)) {
type = type.InstantiateFrom(
instantiator_type_arguments, function_type_arguments,
num_free_fun_type_params, NULL, NULL, NULL, space);
}
sig.set_result_type(type);
const intptr_t num_params = NumParameters();
sig.set_num_fixed_parameters(num_fixed_parameters());
sig.SetNumOptionalParameters(NumOptionalParameters(),
HasOptionalPositionalParameters());
sig.set_parameter_types(Array::Handle(Array::New(num_params, space)));
for (intptr_t i = 0; i < num_params; i++) {
type = ParameterTypeAt(i);
if (!type.IsInstantiated(kAny, num_free_fun_type_params)) {
type = type.InstantiateFrom(
instantiator_type_arguments, function_type_arguments,
num_free_fun_type_params, NULL, NULL, NULL, space);
}
sig.SetParameterTypeAt(i, type);
}
sig.set_parameter_names(Array::Handle(zone, parameter_names()));
if (delete_type_parameters) {
ASSERT(sig.HasInstantiatedSignature(kFunctions));
}
return sig.raw();
}
// If test_kind == kIsSubtypeOf, checks if the type of the specified parameter
// of this function is a subtype or a supertype of the type of the specified
// parameter of the other function. In strong mode, we only check for supertype,
// i.e. contravariance.
// Note that types marked as covariant are already dealt with in the front-end.
// If test_kind == kIsMoreSpecificThan, checks if the type of the specified
// parameter of this function is more specific than the type of the specified
// parameter of the other function.
// Note that for kIsMoreSpecificThan (non-strong mode only), we do not apply
// contravariance of parameter types, but covariance of both parameter types and
// result type.
bool Function::TestParameterType(TypeTestKind test_kind,
intptr_t parameter_position,
intptr_t other_parameter_position,
const Function& other,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
if (FLAG_strong) {
const AbstractType& param_type =
AbstractType::Handle(ParameterTypeAt(parameter_position));
if (param_type.IsTopType()) {
return true;
}
const AbstractType& other_param_type =
AbstractType::Handle(other.ParameterTypeAt(other_parameter_position));
return other_param_type.IsSubtypeOf(param_type, bound_error, bound_trail,
space);
}
const AbstractType& other_param_type =
AbstractType::Handle(other.ParameterTypeAt(other_parameter_position));
if (other_param_type.IsDynamicType()) {
return true;
}
const AbstractType& param_type =
AbstractType::Handle(ParameterTypeAt(parameter_position));
if (param_type.IsDynamicType()) {
return test_kind == kIsSubtypeOf;
}
if (test_kind == kIsSubtypeOf) {
return param_type.IsSubtypeOf(other_param_type, bound_error, bound_trail,
space) ||
other_param_type.IsSubtypeOf(param_type, bound_error, bound_trail,
space);
}
ASSERT(test_kind == kIsMoreSpecificThan);
return param_type.IsMoreSpecificThan(other_param_type, bound_error,
bound_trail, space);
}
bool Function::HasSameTypeParametersAndBounds(const Function& other) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const intptr_t num_type_params = NumTypeParameters(thread);
if (num_type_params != other.NumTypeParameters(thread)) {
return false;
}
if (num_type_params > 0) {
const TypeArguments& type_params =
TypeArguments::Handle(zone, type_parameters());
ASSERT(!type_params.IsNull());
const TypeArguments& other_type_params =
TypeArguments::Handle(zone, other.type_parameters());
ASSERT(!other_type_params.IsNull());
TypeParameter& type_param = TypeParameter::Handle(zone);
TypeParameter& other_type_param = TypeParameter::Handle(zone);
AbstractType& bound = AbstractType::Handle(zone);
AbstractType& other_bound = AbstractType::Handle(zone);
for (intptr_t i = 0; i < num_type_params; i++) {
type_param ^= type_params.TypeAt(i);
other_type_param ^= other_type_params.TypeAt(i);
bound = type_param.bound();
ASSERT(bound.IsFinalized());
other_bound = other_type_param.bound();
ASSERT(other_bound.IsFinalized());
if (!bound.Equals(other_bound)) {
return false;
}
}
}
return true;
}
bool Function::TypeTest(TypeTestKind test_kind,
const Function& other,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
const intptr_t num_fixed_params = num_fixed_parameters();
const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
const intptr_t num_opt_named_params = NumOptionalNamedParameters();
const intptr_t other_num_fixed_params = other.num_fixed_parameters();
const intptr_t other_num_opt_pos_params =
other.NumOptionalPositionalParameters();
const intptr_t other_num_opt_named_params =
other.NumOptionalNamedParameters();
// This function requires the same arguments or less and accepts the same
// arguments or more. We can ignore implicit parameters.
const intptr_t num_ignored_params = NumImplicitParameters();
const intptr_t other_num_ignored_params = other.NumImplicitParameters();
if (((num_fixed_params - num_ignored_params) >
(other_num_fixed_params - other_num_ignored_params)) ||
((num_fixed_params - num_ignored_params + num_opt_pos_params) <
(other_num_fixed_params - other_num_ignored_params +
other_num_opt_pos_params)) ||
(num_opt_named_params < other_num_opt_named_params)) {
return false;
}
if (FLAG_reify_generic_functions) {
// Check the type parameters and bounds of generic functions.
if (!HasSameTypeParametersAndBounds(other)) {
return false;
}
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
// Check the result type.
const AbstractType& other_res_type =
AbstractType::Handle(zone, other.result_type());
if (FLAG_strong) {
// In strong mode, 'void Function()' is a subtype of 'Object Function()'.
if (!other_res_type.IsTopType()) {
const AbstractType& res_type = AbstractType::Handle(zone, result_type());
if (!res_type.IsSubtypeOf(other_res_type, bound_error, bound_trail,
space)) {
return false;
}
}
} else {
// In Dart 1.0, 'void Function()' is not a subtype of 'Object Function()',
// but it is a subtype of 'dynamic Function()' and of 'void Function()'.
if (!other_res_type.IsDynamicType() && !other_res_type.IsVoidType()) {
const AbstractType& res_type = AbstractType::Handle(zone, result_type());
if (res_type.IsVoidType()) {
return false;
}
if (test_kind == kIsSubtypeOf) {
if (!res_type.IsSubtypeOf(other_res_type, bound_error, bound_trail,
space) &&
!other_res_type.IsSubtypeOf(res_type, bound_error, bound_trail,
space)) {
return false;
}
} else {
ASSERT(test_kind == kIsMoreSpecificThan);
if (!res_type.IsMoreSpecificThan(other_res_type, bound_error,
bound_trail, space)) {
return false;
}
}
}
}
// Check the types of fixed and optional positional parameters.
for (intptr_t i = 0; i < (other_num_fixed_params - other_num_ignored_params +
other_num_opt_pos_params);
i++) {
if (!TestParameterType(test_kind, i + num_ignored_params,
i + other_num_ignored_params, other, bound_error,
bound_trail, space)) {
return false;
}
}
// Check the names and types of optional named parameters.
if (other_num_opt_named_params == 0) {
return true;
}
// Check that for each optional named parameter of type T of the other
// function type, there exists an optional named parameter of this function
// type with an identical name and with a type S that is a either a subtype
// or supertype of T (if test_kind == kIsSubtypeOf) or that is more specific
// than T (if test_kind == kIsMoreSpecificThan). In strong mode, we only check
// for supertype, i.e. contravariance.
// Note that SetParameterNameAt() guarantees that names are symbols, so we
// can compare their raw pointers.
const int num_params = num_fixed_params + num_opt_named_params;
const int other_num_params =
other_num_fixed_params + other_num_opt_named_params;
bool found_param_name;
String& other_param_name = String::Handle(zone);
for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
other_param_name = other.ParameterNameAt(i);
ASSERT(other_param_name.IsSymbol());
found_param_name = false;
for (intptr_t j = num_fixed_params; j < num_params; j++) {
ASSERT(String::Handle(zone, ParameterNameAt(j)).IsSymbol());
if (ParameterNameAt(j) == other_param_name.raw()) {
found_param_name = true;
if (!TestParameterType(test_kind, j, i, other, bound_error, bound_trail,
space)) {
return false;
}
break;
}
}
if (!found_param_name) {
return false;
}
}
return true;
}
// The compiler generates an implicit constructor if a class definition
// does not contain an explicit constructor or factory. The implicit
// constructor has the same token position as the owner class.
bool Function::IsImplicitConstructor() const {
return IsGenerativeConstructor() && (token_pos() == end_token_pos());
}
bool Function::IsImplicitStaticClosureFunction(RawFunction* func) {
NoSafepointScope no_safepoint;
uint32_t kind_tag = func->ptr()->kind_tag_;
return (KindBits::decode(kind_tag) ==
RawFunction::kImplicitClosureFunction) &&
StaticBit::decode(kind_tag);
}
RawFunction* Function::New(Heap::Space space) {
ASSERT(Object::function_class() != Class::null());
RawObject* raw =
Object::Allocate(Function::kClassId, Function::InstanceSize(), space);
return reinterpret_cast<RawFunction*>(raw);
}
RawFunction* Function::New(const String& name,
RawFunction::Kind kind,
bool is_static,
bool is_const,
bool is_abstract,
bool is_external,
bool is_native,
const Object& owner,
TokenPosition token_pos,
Heap::Space space) {
ASSERT(!owner.IsNull() || (kind == RawFunction::kSignatureFunction));
const Function& result = Function::Handle(Function::New(space));
result.set_kind_tag(0);
result.set_parameter_types(Object::empty_array());
result.set_parameter_names(Object::empty_array());
result.set_name(name);
result.set_kind_tag(0); // Ensure determinism of uninitialized bits.
result.set_kind(kind);
result.set_recognized_kind(MethodRecognizer::kUnknown);
result.set_modifier(RawFunction::kNoModifier);
result.set_is_static(is_static);
result.set_is_const(is_const);
result.set_is_abstract(is_abstract);
result.set_is_external(is_external);
result.set_is_native(is_native);
result.set_is_reflectable(true); // Will be computed later.
result.set_is_visible(true); // Will be computed later.
result.set_is_debuggable(true); // Will be computed later.
result.set_is_intrinsic(false);
result.set_is_redirecting(false);
result.set_is_generated_body(false);
result.set_has_pragma(false);
result.set_always_inline(false);
result.set_is_polymorphic_target(false);
NOT_IN_PRECOMPILED(result.set_state_bits(0));
result.set_owner(owner);
NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
NOT_IN_PRECOMPILED(result.set_end_token_pos(token_pos));
result.set_num_fixed_parameters(0);
result.SetNumOptionalParameters(0, false);
NOT_IN_PRECOMPILED(result.set_usage_counter(0));
NOT_IN_PRECOMPILED(result.set_deoptimization_counter(0));
NOT_IN_PRECOMPILED(result.set_optimized_instruction_count(0));
NOT_IN_PRECOMPILED(result.set_optimized_call_site_count(0));
NOT_IN_PRECOMPILED(result.set_inlining_depth(0));
NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
result.set_is_optimizable(is_native ? false : true);
result.set_is_background_optimizable(is_native ? false : true);
result.set_is_inlinable(true);
result.SetInstructionsSafe(
Code::Handle(StubCode::LazyCompile_entry()->code()));
if (kind == RawFunction::kClosureFunction ||
kind == RawFunction::kImplicitClosureFunction) {
ASSERT(space == Heap::kOld);
const ClosureData& data = ClosureData::Handle(ClosureData::New());
result.set_data(data);
} else if (kind == RawFunction::kSignatureFunction) {
const SignatureData& data =
SignatureData::Handle(SignatureData::New(space));
result.set_data(data);
} else {
// Functions other than signature functions have no reason to be allocated
// in new space.
ASSERT(space == Heap::kOld);
}
return result.raw();
}
RawFunction* Function::Clone(const Class& new_owner) const {
ASSERT(!IsGenerativeConstructor());
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Function& clone = Function::Handle(zone);
clone ^= Object::Clone(*this, Heap::kOld);
const Class& origin = Class::Handle(zone, this->origin());
const PatchClass& clone_owner =
PatchClass::Handle(zone, PatchClass::New(new_owner, origin));
clone.set_owner(clone_owner);
clone.ClearICDataArray();
clone.ClearCode();
clone.set_data(Object::null_object());
clone.set_usage_counter(0);
clone.set_deoptimization_counter(0);
clone.set_optimized_instruction_count(0);
clone.set_inlining_depth(0);
clone.set_optimized_call_site_count(0);
if (new_owner.NumTypeParameters() > 0) {
// Adjust uninstantiated types to refer to type parameters of the new owner.
const TypeArguments& type_params =
TypeArguments::Handle(zone, type_parameters());
if (!type_params.IsNull()) {
const intptr_t num_type_params = type_params.Length();
const TypeArguments& type_params_clone =
TypeArguments::Handle(zone, TypeArguments::New(num_type_params));
TypeParameter& type_param = TypeParameter::Handle(zone);
for (intptr_t i = 0; i < num_type_params; i++) {
type_param ^= type_params.TypeAt(i);
type_param ^= type_param.CloneUninstantiated(new_owner);
type_params_clone.SetTypeAt(i, type_param);
}
clone.set_type_parameters(type_params_clone);
}
AbstractType& type = AbstractType::Handle(zone, clone.result_type());
type ^= type.CloneUninstantiated(new_owner);
clone.set_result_type(type);
const intptr_t num_params = clone.NumParameters();
Array& array = Array::Handle(zone, clone.parameter_types());
array ^= Object::Clone(array, Heap::kOld);
clone.set_parameter_types(array);
for (intptr_t i = 0; i < num_params; i++) {
type = clone.ParameterTypeAt(i);
type ^= type.CloneUninstantiated(new_owner);
clone.SetParameterTypeAt(i, type);
}
}
return clone.raw();
}
RawFunction* Function::NewClosureFunctionWithKind(RawFunction::Kind kind,
const String& name,
const Function& parent,
TokenPosition token_pos) {
ASSERT((kind == RawFunction::kClosureFunction) ||
(kind == RawFunction::kImplicitClosureFunction));
ASSERT(!parent.IsNull());
// Use the owner defining the parent function and not the class containing it.
const Object& parent_owner = Object::Handle(parent.raw_ptr()->owner_);
ASSERT(!parent_owner.IsNull());
const Function& result = Function::Handle(
Function::New(name, kind,
/* is_static = */ parent.is_static(),
/* is_const = */ false,
/* is_abstract = */ false,
/* is_external = */ false,
/* is_native = */ false, parent_owner, token_pos));
result.set_parent_function(parent);
return result.raw();
}
RawFunction* Function::NewClosureFunction(const String& name,
const Function& parent,
TokenPosition token_pos) {
return NewClosureFunctionWithKind(RawFunction::kClosureFunction, name, parent,
token_pos);
}
RawFunction* Function::NewImplicitClosureFunction(const String& name,
const Function& parent,
TokenPosition token_pos) {
return NewClosureFunctionWithKind(RawFunction::kImplicitClosureFunction, name,
parent, token_pos);
}
RawFunction* Function::NewSignatureFunction(const Object& owner,
const Function& parent,
TokenPosition token_pos,
Heap::Space space) {
const Function& result = Function::Handle(Function::New(
Symbols::AnonymousSignature(), RawFunction::kSignatureFunction,
/* is_static = */ false,
/* is_const = */ false,
/* is_abstract = */ false,
/* is_external = */ false,
/* is_native = */ false,
owner, // Same as function type scope class.
token_pos, space));
result.set_parent_function(parent);
result.set_is_reflectable(false);
result.set_is_visible(false);
result.set_is_debuggable(false);
return result.raw();
}
RawFunction* Function::NewEvalFunction(const Class& owner,
const Script& script,
bool is_static) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Function& result = Function::Handle(
zone,
Function::New(String::Handle(Symbols::New(thread, ":Eval")),
RawFunction::kRegularFunction, is_static,
/* is_const = */ false,
/* is_abstract = */ false,
/* is_external = */ false,
/* is_native = */ false, owner, TokenPosition::kMinSource));
ASSERT(!script.IsNull());
result.set_is_debuggable(false);
result.set_is_visible(true);
result.set_eval_script(script);
return result.raw();
}
RawFunction* Function::ImplicitClosureFunction() const {
// Return the existing implicit closure function if any.
if (implicit_closure_function() != Function::null()) {
return implicit_closure_function();
}
#if defined(DART_PRECOMPILED_RUNTIME)
// In AOT mode all implicit closures are pre-created.
UNREACHABLE();
return Function::null();
#else
ASSERT(!IsSignatureFunction() && !IsClosureFunction());
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
// Create closure function.
const String& closure_name = String::Handle(zone, name());
const Function& closure_function = Function::Handle(
zone, NewImplicitClosureFunction(closure_name, *this, token_pos()));
// Set closure function's context scope.
if (is_static()) {
closure_function.set_context_scope(Object::empty_context_scope());
} else {
const ContextScope& context_scope = ContextScope::Handle(
zone, LocalScope::CreateImplicitClosureScope(*this));
closure_function.set_context_scope(context_scope);
}
// Set closure function's type parameters.
closure_function.set_type_parameters(
TypeArguments::Handle(zone, type_parameters()));
// Set closure function's result type to this result type.
closure_function.set_result_type(AbstractType::Handle(zone, result_type()));
// Set closure function's end token to this end token.
closure_function.set_end_token_pos(end_token_pos());
// The closurized method stub just calls into the original method and should
// therefore be skipped by the debugger and in stack traces.
closure_function.set_is_debuggable(false);
closure_function.set_is_visible(false);
// Set closure function's formal parameters to this formal parameters,
// removing the receiver if this is an instance method and adding the closure
// object as first parameter.
const int kClosure = 1;
const int has_receiver = is_static() ? 0 : 1;
const int num_fixed_params = kClosure - has_receiver + num_fixed_parameters();
const int num_opt_params = NumOptionalParameters();
const bool has_opt_pos_params = HasOptionalPositionalParameters();
const int num_params = num_fixed_params + num_opt_params;
closure_function.set_num_fixed_parameters(num_fixed_params);
closure_function.SetNumOptionalParameters(num_opt_params, has_opt_pos_params);
closure_function.set_parameter_types(
Array::Handle(zone, Array::New(num_params, Heap::kOld)));
closure_function.set_parameter_names(
Array::Handle(zone, Array::New(num_params, Heap::kOld)));
AbstractType& param_type = AbstractType::Handle(zone);
String& param_name = String::Handle(zone);
// Add implicit closure object parameter.
param_type = Type::DynamicType();
closure_function.SetParameterTypeAt(0, param_type);
closure_function.SetParameterNameAt(0, Symbols::ClosureParameter());
for (int i = kClosure; i < num_params; i++) {
param_type = ParameterTypeAt(has_receiver - kClosure + i);
closure_function.SetParameterTypeAt(i, param_type);
param_name = ParameterNameAt(has_receiver - kClosure + i);
closure_function.SetParameterNameAt(i, param_name);
}
closure_function.set_kernel_offset(kernel_offset());
// In strong mode, change covariant parameter types to Object in the implicit
// closure of a method compiled by kernel.
// The VM's parser erases covariant types immediately in strong mode.
if (FLAG_strong && !is_static() && kernel_offset() > 0) {
const Script& function_script = Script::Handle(zone, script());
kernel::TranslationHelper translation_helper(thread);
translation_helper.InitFromScript(function_script);
kernel::KernelReaderHelper kernel_reader_helper(
zone, &translation_helper, function_script,
ExternalTypedData::Handle(zone, KernelData()),
KernelDataProgramOffset());
kernel_reader_helper.SetOffset(kernel_offset());
kernel_reader_helper.ReadUntilFunctionNode();
kernel::FunctionNodeHelper fn_helper(&kernel_reader_helper);
// Check the positional parameters, including the optional positional ones.
fn_helper.ReadUntilExcluding(
kernel::FunctionNodeHelper::kPositionalParameters);
intptr_t num_pos_params = kernel_reader_helper.ReadListLength();
ASSERT(num_pos_params ==
num_fixed_params - 1 + (has_opt_pos_params ? num_opt_params : 0));
const Type& object_type = Type::Handle(zone, Type::ObjectType());
for (intptr_t i = 0; i < num_pos_params; ++i) {
kernel::VariableDeclarationHelper var_helper(&kernel_reader_helper);
var_helper.ReadUntilExcluding(kernel::VariableDeclarationHelper::kEnd);
if (var_helper.IsCovariant() || var_helper.IsGenericCovariantImpl()) {
closure_function.SetParameterTypeAt(i + 1, object_type);
}
}
fn_helper.SetJustRead(kernel::FunctionNodeHelper::kPositionalParameters);
// Check the optional named parameters.
fn_helper.ReadUntilExcluding(kernel::FunctionNodeHelper::kNamedParameters);
intptr_t num_named_params = kernel_reader_helper.ReadListLength();
ASSERT(num_named_params == (has_opt_pos_params ? 0 : num_opt_params));
for (intptr_t i = 0; i < num_named_params; ++i) {
kernel::VariableDeclarationHelper var_helper(&kernel_reader_helper);
var_helper.ReadUntilExcluding(kernel::VariableDeclarationHelper::kEnd);
if (var_helper.IsCovariant() || var_helper.IsGenericCovariantImpl()) {
closure_function.SetParameterTypeAt(num_pos_params + 1 + i,
object_type);
}
}
}
const Type& signature_type =
Type::Handle(zone, closure_function.SignatureType());
if (!signature_type.IsFinalized()) {
ClassFinalizer::FinalizeType(Class::Handle(zone, Owner()), signature_type);
}
set_implicit_closure_function(closure_function);
ASSERT(closure_function.IsImplicitClosureFunction());
return closure_function.raw();
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
void Function::DropUncompiledImplicitClosureFunction() const {
if (implicit_closure_function() != Function::null()) {
const Function& func = Function::Handle(implicit_closure_function());
if (!func.HasCode()) {
set_implicit_closure_function(Function::Handle());
}
}
}
void Function::BuildSignatureParameters(
Thread* thread,
Zone* zone,
NameVisibility name_visibility,
GrowableHandlePtrArray<const String>* pieces) const {
AbstractType& param_type = AbstractType::Handle(zone);
const intptr_t num_params = NumParameters();
const intptr_t num_fixed_params = num_fixed_parameters();
const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
const intptr_t num_opt_named_params = NumOptionalNamedParameters();
const intptr_t num_opt_params = num_opt_pos_params + num_opt_named_params;
ASSERT((num_fixed_params + num_opt_params) == num_params);
intptr_t i = 0;
if (name_visibility == kUserVisibleName) {
// Hide implicit parameters.
i = NumImplicitParameters();
}
String& name = String::Handle(zone);
while (i < num_fixed_params) {
param_type = ParameterTypeAt(i);
ASSERT(!param_type.IsNull());
name = param_type.BuildName(name_visibility);
pieces->Add(name);
if (i != (num_params - 1)) {
pieces->Add(Symbols::CommaSpace());
}
i++;
}
if (num_opt_params > 0) {
if (num_opt_pos_params > 0) {
pieces->Add(Symbols::LBracket());
} else {
pieces->Add(Symbols::LBrace());
}
for (intptr_t i = num_fixed_params; i < num_params; i++) {
param_type = ParameterTypeAt(i);
ASSERT(!param_type.IsNull());
name = param_type.BuildName(name_visibility);
pieces->Add(name);
// The parameter name of an optional positional parameter does not need
// to be part of the signature, since it is not used.
if (num_opt_named_params > 0) {
name = ParameterNameAt(i);
pieces->Add(Symbols::Blank());
pieces->Add(name);
}
if (i != (num_params - 1)) {
pieces->Add(Symbols::CommaSpace());
}
}
if (num_opt_pos_params > 0) {
pieces->Add(Symbols::RBracket());
} else {
pieces->Add(Symbols::RBrace());
}
}
}
RawInstance* Function::ImplicitStaticClosure() const {
ASSERT(IsImplicitStaticClosureFunction());
if (implicit_static_closure() == Instance::null()) {
Zone* zone = Thread::Current()->zone();
const Context& context = Context::Handle(zone);
Instance& closure =
Instance::Handle(zone, Closure::New(Object::null_type_arguments(),
Object::null_type_arguments(),
*this, context, Heap::kOld));
set_implicit_static_closure(closure);
}
return implicit_static_closure();
}
RawInstance* Function::ImplicitInstanceClosure(const Instance& receiver) const {
ASSERT(IsImplicitClosureFunction());
Zone* zone = Thread::Current()->zone();
const Context& context = Context::Handle(zone, Context::New(1));
context.SetAt(0, receiver);
TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
if (!HasInstantiatedSignature(kCurrentClass)) {
instantiator_type_arguments = receiver.GetTypeArguments();
}
ASSERT(HasInstantiatedSignature(kFunctions)); // No generic parent function.
return Closure::New(instantiator_type_arguments,
Object::null_type_arguments(), *this, context);
}
intptr_t Function::ComputeClosureHash() const {
ASSERT(IsClosureFunction());
const Class& cls = Class::Handle(Owner());
intptr_t result = String::Handle(name()).Hash();
result += String::Handle(Signature()).Hash();
result += String::Handle(cls.Name()).Hash();
return result;
}
RawString* Function::BuildSignature(NameVisibility name_visibility) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
GrowableHandlePtrArray<const String> pieces(zone, 4);
String& name = String::Handle(zone);
if (FLAG_reify_generic_functions) {
const TypeArguments& type_params =
TypeArguments::Handle(zone, type_parameters());
if (!type_params.IsNull()) {
const intptr_t num_type_params = type_params.Length();
ASSERT(num_type_params > 0);
TypeParameter& type_param = TypeParameter::Handle(zone);
AbstractType& bound = AbstractType::Handle(zone);
pieces.Add(Symbols::LAngleBracket());
for (intptr_t i = 0; i < num_type_params; i++) {
type_param ^= type_params.TypeAt(i);
name = type_param.name();
pieces.Add(name);
bound = type_param.bound();
if (!bound.IsNull() && !bound.IsObjectType()) {
pieces.Add(Symbols::SpaceExtendsSpace());
name = bound.BuildName(name_visibility);
pieces.Add(name);
}
if (i < num_type_params - 1) {
pieces.Add(Symbols::CommaSpace());
}
}
pieces.Add(Symbols::RAngleBracket());
}
}
pieces.Add(Symbols::LParen());
BuildSignatureParameters(thread, zone, name_visibility, &pieces);
pieces.Add(Symbols::RParenArrow());
const AbstractType& res_type = AbstractType::Handle(zone, result_type());
name = res_type.BuildName(name_visibility);
pieces.Add(name);
return Symbols::FromConcatAll(thread, pieces);
}
bool Function::HasInstantiatedSignature(Genericity genericity,
intptr_t num_free_fun_type_params,
TrailPtr trail) const {
if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
num_free_fun_type_params = kAllFree;
} else if (genericity != kCurrentClass) {
// A generic typedef may declare a non-generic function type and get
// instantiated with unrelated function type parameters. In that case, its
// signature is still uninstantiated, because these type parameters are
// free (they are not declared by the typedef).
// For that reason, we only adjust num_free_fun_type_params if this
// signature is generic or has a generic parent.
if (IsGeneric() || HasGenericParent()) {
// We only consider the function type parameters declared by the parents
// of this signature function as free.
const int num_parent_type_params = NumParentTypeParameters();
if (num_parent_type_params < num_free_fun_type_params) {
num_free_fun_type_params = num_parent_type_params;
}
}
}
AbstractType& type = AbstractType::Handle(result_type());
if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
return false;
}
const intptr_t num_parameters = NumParameters();
for (intptr_t i = 0; i < num_parameters; i++) {
type = ParameterTypeAt(i);
if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
return false;
}
}
TypeArguments& type_params = TypeArguments::Handle(type_parameters());
TypeParameter& type_param = TypeParameter::Handle();
for (intptr_t i = 0; i < type_params.Length(); ++i) {
type_param ^= type_params.TypeAt(i);
type = type_param.bound();
if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
return false;
}
}
return true;
}
RawClass* Function::Owner() const {
if (raw_ptr()->owner_ == Object::null()) {
ASSERT(IsSignatureFunction());
return Class::null();
}
if (raw_ptr()->owner_->IsClass()) {
return Class::RawCast(raw_ptr()->owner_);
}
const Object& obj = Object::Handle(raw_ptr()->owner_);
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).patched_class();
}
RawClass* Function::origin() const {
if (raw_ptr()->owner_ == Object::null()) {
ASSERT(IsSignatureFunction());
return Class::null();
}
if (raw_ptr()->owner_->IsClass()) {
return Class::RawCast(raw_ptr()->owner_);
}
const Object& obj = Object::Handle(raw_ptr()->owner_);
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).origin_class();
}
void Function::SetKernelDataAndScript(const Script& script,
const ExternalTypedData& data,
intptr_t offset) {
Array& data_field = Array::Handle(Array::New(3));
data_field.SetAt(0, script);
data_field.SetAt(1, data);
data_field.SetAt(2, Smi::Handle(Smi::New(offset)));
set_data(data_field);
}
RawScript* Function::script() const {
// NOTE(turnidge): If you update this function, you probably want to
// update Class::PatchFieldsAndFunctions() at the same time.
Object& data = Object::Handle(raw_ptr()->data_);
if (data.IsArray()) {
Object& script = Object::Handle(Array::Cast(data).At(0));
if (script.IsScript()) {
return Script::Cast(script).raw();
}
}
if (token_pos() == TokenPosition::kMinSource) {
// Testing for position 0 is an optimization that relies on temporary
// eval functions having token position 0.
const Script& script = Script::Handle(eval_script());
if (!script.IsNull()) {
return script.raw();
}
}
if (IsClosureFunction()) {
return Function::Handle(parent_function()).script();
}
const Object& obj = Object::Handle(raw_ptr()->owner_);
if (obj.IsNull()) {
ASSERT(IsSignatureFunction());
return Script::null();
}
if (obj.IsClass()) {
return Class::Cast(obj).script();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).script();
}
RawExternalTypedData* Function::KernelData() const {
Object& data = Object::Handle(raw_ptr()->data_);
if (data.IsArray()) {
Object& script = Object::Handle(Array::Cast(data).At(0));
if (script.IsScript()) {
return ExternalTypedData::RawCast(Array::Cast(data).At(1));
}
}
if (IsClosureFunction()) {
Function& parent = Function::Handle(parent_function());
ASSERT(!parent.IsNull());
return parent.KernelData();
}
const Object& obj = Object::Handle(raw_ptr()->owner_);
if (obj.IsClass()) {
Library& lib = Library::Handle(Class::Cast(obj).library());
return lib.kernel_data();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).library_kernel_data();
}
intptr_t Function::KernelDataProgramOffset() const {
Object& data = Object::Handle(raw_ptr()->data_);
if (data.IsArray()) {
Object& script = Object::Handle(Array::Cast(data).At(0));
if (script.IsScript()) {
return Smi::Value(Smi::RawCast(Array::Cast(data).At(2)));
}
}
if (IsClosureFunction()) {
Function& parent = Function::Handle(parent_function());
ASSERT(!parent.IsNull());
return parent.KernelDataProgramOffset();
}
const Object& obj = Object::Handle(raw_ptr()->owner_);
if (obj.IsClass()) {
Library& lib = Library::Handle(Class::Cast(obj).library());
return lib.kernel_offset();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).library_kernel_offset();
}
bool Function::HasOptimizedCode() const {
return HasCode() && Code::Handle(CurrentCode()).is_optimized();
}
bool Function::ShouldCompilerOptimize() const {
return !FLAG_enable_interpreter ||
((unoptimized_code() != Object::null()) && WasCompiled());
}
RawString* Function::UserVisibleName() const {
if (FLAG_show_internal_names) {
return name();
}
return String::ScrubName(String::Handle(name()));
}
RawString* Function::QualifiedName(NameVisibility name_visibility) const {
ASSERT(name_visibility != kInternalName); // We never request it.
// If |this| is the generated asynchronous body closure, use the
// name of the parent function.
Function& fun = Function::Handle(raw());
if (fun.IsClosureFunction()) {
// Sniff the parent function.
fun = fun.parent_function();
ASSERT(!fun.IsNull());
if (!fun.IsAsyncGenerator() && !fun.IsAsyncFunction() &&
!fun.IsSyncGenerator()) {
// Parent function is not the generator of an asynchronous body closure,
// start at |this|.
fun = raw();
}
}
// A function's scrubbed name and its user visible name are identical.
String& result = String::Handle(fun.UserVisibleName());
if (IsClosureFunction()) {
while (fun.IsLocalFunction() && !fun.IsImplicitClosureFunction()) {
fun = fun.parent_function();
if (fun.IsAsyncClosure() || fun.IsSyncGenClosure() ||
fun.IsAsyncGenClosure()) {
// Skip the closure and use the real function name found in
// the parent.
fun = fun.parent_function();
}
result = String::Concat(Symbols::Dot(), result, Heap::kOld);
result = String::Concat(String::Handle(fun.UserVisibleName()), result,
Heap::kOld);
}
}
const Class& cls = Class::Handle(Owner());
if (!cls.IsTopLevel()) {
if (fun.kind() == RawFunction::kConstructor) {
result = String::Concat(Symbols::ConstructorStacktracePrefix(), result,
Heap::kOld);
} else {
result = String::Concat(Symbols::Dot(), result, Heap::kOld);
const String& cls_name = String::Handle(name_visibility == kScrubbedName
? cls.ScrubbedName()
: cls.UserVisibleName());
result = String::Concat(cls_name, result, Heap::kOld);
}
}
return result.raw();
}
RawString* Function::GetSource() const {
if (IsImplicitConstructor() || IsSignatureFunction()) {
// We may need to handle more cases when the restrictions on mixins are
// relaxed. In particular we might start associating some source with the
// forwarding constructors when it becomes possible to specify a particular
// constructor from the mixin to use.
return String::null();
}
Zone* zone = Thread::Current()->zone();
const Script& func_script = Script::Handle(zone, script());
if (func_script.kind() == RawScript::kKernelTag) {
intptr_t from_line;
intptr_t from_col;
intptr_t to_line;
intptr_t to_col;
intptr_t to_length;
func_script.GetTokenLocation(token_pos(), &from_line, &from_col);
func_script.GetTokenLocation(end_token_pos(), &to_line, &to_col,
&to_length);
if (to_length == 1) {
// Handle special cases for end tokens of closures (where we exclude the
// last token):
// (1) "foo(() => null, bar);": End token is `,', but we don't print it.
// (2) "foo(() => null);": End token is ')`, but we don't print it.
// (3) "var foo = () => null;": End token is `;', but in this case the
// token semicolon belongs to the assignment so we skip it.
const String& src = String::Handle(func_script.Source());
uint16_t end_char = src.CharAt(end_token_pos().value());
if ((end_char == ',') || // Case 1.
(end_char == ')') || // Case 2.
(end_char == ';' && String::Handle(zone, name())
.Equals("<anonymous closure>"))) { // Case 3.
to_length = 0;
}
}
return func_script.GetSnippet(from_line, from_col, to_line,
to_col + to_length);
}
UNREACHABLE();
return String::null();
}
// Construct fingerprint from token stream. The token stream contains also
// arguments.
int32_t Function::SourceFingerprint() const {
#if !defined(DART_PRECOMPILED_RUNTIME)
return kernel::KernelSourceFingerprintHelper::CalculateFunctionFingerprint(
*this);
#else
return 0;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
void Function::SaveICDataMap(
const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data,
const Array& edge_counters_array) const {
#if !defined(DART_PRECOMPILED_RUNTIME)
// Compute number of ICData objects to save.
// Store edge counter array in the first slot.
intptr_t count = 1;
for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) {
if (deopt_id_to_ic_data[i] != NULL) {
count++;
}
}
const Array& array = Array::Handle(Array::New(count, Heap::kOld));
count = 1;
for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) {
if (deopt_id_to_ic_data[i] != NULL) {
ASSERT(i == deopt_id_to_ic_data[i]->deopt_id());
array.SetAt(count++, *deopt_id_to_ic_data[i]);
}
}
array.SetAt(0, edge_counters_array);
set_ic_data_array(array);
#else // DART_PRECOMPILED_RUNTIME
UNREACHABLE();
#endif // DART_PRECOMPILED_RUNTIME
}
void Function::RestoreICDataMap(
ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data,
bool clone_ic_data) const {
#if !defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_force_clone_compiler_objects) {
clone_ic_data = true;
}
ASSERT(deopt_id_to_ic_data->is_empty());
Zone* zone = Thread::Current()->zone();
const Array& saved_ic_data = Array::Handle(zone, ic_data_array());
if (saved_ic_data.IsNull()) {
// Could happen with deferred loading.
return;
}
const intptr_t saved_length = saved_ic_data.Length();
ASSERT(saved_length > 0);
if (saved_length > 1) {
const intptr_t restored_length =
ICData::Cast(Object::Handle(zone, saved_ic_data.At(saved_length - 1)))
.deopt_id() +
1;
deopt_id_to_ic_data->SetLength(restored_length);
for (intptr_t i = 0; i < restored_length; i++) {
(*deopt_id_to_ic_data)[i] = NULL;
}
for (intptr_t i = 1; i < saved_length; i++) {
ICData& ic_data = ICData::ZoneHandle(zone);
ic_data ^= saved_ic_data.At(i);
if (clone_ic_data) {
const ICData& original_ic_data = ICData::Handle(zone, ic_data.raw());
ic_data = ICData::Clone(ic_data);
ic_data.SetOriginal(original_ic_data);
}
ASSERT(deopt_id_to_ic_data->At(ic_data.deopt_id()) == nullptr);
(*deopt_id_to_ic_data)[ic_data.deopt_id()] = &ic_data;
}
}
#else // DART_PRECOMPILED_RUNTIME
UNREACHABLE();
#endif // DART_PRECOMPILED_RUNTIME
}
void Function::set_ic_data_array(const Array& value) const {
StorePointer(&raw_ptr()->ic_data_array_, value.raw());
}
RawArray* Function::ic_data_array() const {
return raw_ptr()->ic_data_array_;
}
void Function::ClearICDataArray() const {
set_ic_data_array(Array::null_array());
}
void Function::SetDeoptReasonForAll(intptr_t deopt_id,
ICData::DeoptReasonId reason) {
const Array& array = Array::Handle(ic_data_array());
ICData& ic_data = ICData::Handle();
for (intptr_t i = 1; i < array.Length(); i++) {
ic_data ^= array.At(i);
if (ic_data.deopt_id() == deopt_id) {
ic_data.AddDeoptReason(reason);
}
}
}
bool Function::CheckSourceFingerprint(const char* prefix, int32_t fp) const {
if (!Isolate::Current()->obfuscate() && (kernel_offset() <= 0) &&
(SourceFingerprint() != fp)) {
const bool recalculatingFingerprints = false;
if (recalculatingFingerprints) {
// This output can be copied into a file, then used with sed
// to replace the old values.
// sed -i.bak -f /tmp/newkeys runtime/vm/compiler/method_recognizer.h
THR_Print("s/0x%08x/0x%08x/\n", fp, SourceFingerprint());
} else {
THR_Print(
"FP mismatch while recognizing method %s:"
" expecting 0x%08x found 0x%08x\n",
ToFullyQualifiedCString(), fp, SourceFingerprint());
return false;
}
}
return true;
}
RawCode* Function::EnsureHasCode() const {
if (HasCode()) return CurrentCode();
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Object& result =
Object::Handle(zone, Compiler::CompileFunction(thread, *this));
if (result.IsError()) {
Exceptions::PropagateError(Error::Cast(result));
UNREACHABLE();
}
// Compiling in unoptimized mode should never fail if there are no errors.
ASSERT(HasCode());
ASSERT(unoptimized_code() == result.raw());
return CurrentCode();
}
bool Function::MayHaveUncheckedEntryPoint(Isolate* I) const {
// TODO(#34162): Support the other architectures.
// TODO(#34162): Enable multiple-entrypoints for AOT if we
// consider them beneficial.
#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM)
return !FLAG_precompiled_mode && FLAG_enable_multiple_entrypoints &&
(NeedsArgumentTypeChecks(I) || IsImplicitClosureFunction());
#else
return false;
#endif
}
const char* Function::ToCString() const {
if (IsNull()) {
return "Function: null";
}
const char* static_str = is_static() ? " static" : "";
const char* abstract_str = is_abstract() ? " abstract" : "";
const char* kind_str = NULL;
const char* const_str = is_const() ? " const" : "";
switch (kind()) {
case RawFunction::kRegularFunction:
case RawFunction::kClosureFunction:
case RawFunction::kImplicitClosureFunction:
case RawFunction::kGetterFunction:
case RawFunction::kSetterFunction:
kind_str = "";
break;
case RawFunction::kSignatureFunction:
kind_str = " signature";
break;
case RawFunction::kConstructor:
kind_str = is_static() ? " factory" : " constructor";
break;
case RawFunction::kImplicitGetter:
kind_str = " getter";
break;
case RawFunction::kImplicitSetter:
kind_str = " setter";
break;
case RawFunction::kImplicitStaticFinalGetter:
kind_str = " static-final-getter";
break;
case RawFunction::kMethodExtractor:
kind_str = " method-extractor";
break;
case RawFunction::kNoSuchMethodDispatcher:
kind_str = " no-such-method-dispatcher";
break;
case RawFunction::kDynamicInvocationForwarder:
kind_str = " dynamic-invocation-forwader";
break;
case RawFunction::kInvokeFieldDispatcher:
kind_str = "invoke-field-dispatcher";
break;
case RawFunction::kIrregexpFunction:
kind_str = "irregexp-function";
break;
default:
UNREACHABLE();
}
const char* function_name = String::Handle(name()).ToCString();
return OS::SCreate(Thread::Current()->zone(), "Function '%s':%s%s%s%s.",
function_name, static_str, abstract_str, kind_str,
const_str);
}
void ClosureData::set_context_scope(const ContextScope& value) const {
StorePointer(&raw_ptr()->context_scope_, value.raw());
}
void ClosureData::set_implicit_static_closure(const Instance& closure) const {
ASSERT(!closure.IsNull());
ASSERT(raw_ptr()->closure_ == Instance::null());
StorePointer(&raw_ptr()->closure_, closure.raw());
}
void ClosureData::set_parent_function(const Function& value) const {
StorePointer(&raw_ptr()->parent_function_, value.raw());
}
void ClosureData::set_signature_type(const Type& value) const {
StorePointer(&raw_ptr()->signature_type_, value.raw());
}
RawClosureData* ClosureData::New() {
ASSERT(Object::closure_data_class() != Class::null());
RawObject* raw = Object::Allocate(ClosureData::kClassId,
ClosureData::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawClosureData*>(raw);
}
const char* ClosureData::ToCString() const {
if (IsNull()) {
return "ClosureData: null";
}
const Function& parent = Function::Handle(parent_function());
const Type& type = Type::Handle(signature_type());
return OS::SCreate(Thread::Current()->zone(),
"ClosureData: context_scope: 0x%" Px
" parent_function: %s signature_type: %s"
" implicit_static_closure: 0x%" Px,
reinterpret_cast<uword>(context_scope()),
parent.IsNull() ? "null" : parent.ToCString(),
type.IsNull() ? "null" : type.ToCString(),
reinterpret_cast<uword>(implicit_static_closure()));
}
void SignatureData::set_parent_function(const Function& value) const {
StorePointer(&raw_ptr()->parent_function_, value.raw());
}
void SignatureData::set_signature_type(const Type& value) const {
StorePointer(&raw_ptr()->signature_type_, value.raw());
}
RawSignatureData* SignatureData::New(Heap::Space space) {
ASSERT(Object::signature_data_class() != Class::null());
RawObject* raw = Object::Allocate(SignatureData::kClassId,
SignatureData::InstanceSize(), space);
return reinterpret_cast<RawSignatureData*>(raw);
}
const char* SignatureData::ToCString() const {
if (IsNull()) {
return "SignatureData: null";
}
const Function& parent = Function::Handle(parent_function());
const Type& type = Type::Handle(signature_type());
return OS::SCreate(Thread::Current()->zone(),
"SignatureData parent_function: %s signature_type: %s",
parent.IsNull() ? "null" : parent.ToCString(),
type.IsNull() ? "null" : type.ToCString());
}
void RedirectionData::set_type(const Type& value) const {
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->type_, value.raw());
}
void RedirectionData::set_identifier(const String& value) const {
StorePointer(&raw_ptr()->identifier_, value.raw());
}
void RedirectionData::set_target(const Function& value) const {
StorePointer(&raw_ptr()->target_, value.raw());
}
RawRedirectionData* RedirectionData::New() {
ASSERT(Object::redirection_data_class() != Class::null());
RawObject* raw = Object::Allocate(
RedirectionData::kClassId, RedirectionData::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawRedirectionData*>(raw);
}
const char* RedirectionData::ToCString() const {
if (IsNull()) {
return "RedirectionData: null";
}
const Type& redir_type = Type::Handle(type());
const String& ident = String::Handle(identifier());
const Function& target_fun = Function::Handle(target());
return OS::SCreate(Thread::Current()->zone(),
"RedirectionData: type: %s identifier: %s target: %s",
redir_type.IsNull() ? "null" : redir_type.ToCString(),
ident.IsNull() ? "null" : ident.ToCString(),
target_fun.IsNull() ? "null" : target_fun.ToCString());
}
RawField* Field::CloneFromOriginal() const {
return this->Clone(*this);
}
RawField* Field::Original() const {
if (IsNull()) {
return Field::null();
}
Object& obj = Object::Handle(raw_ptr()->owner_);
if (obj.IsField()) {
return Field::RawCast(obj.raw());
} else {
return this->raw();
}
}
void Field::SetOriginal(const Field& value) const {
ASSERT(value.IsOriginal());
ASSERT(!value.IsNull());
StorePointer(&raw_ptr()->owner_, reinterpret_cast<RawObject*>(value.raw()));
}
RawString* Field::GetterName(const String& field_name) {
return String::Concat(Symbols::GetterPrefix(), field_name);
}
RawString* Field::GetterSymbol(const String& field_name) {
return Symbols::FromGet(Thread::Current(), field_name);
}
RawString* Field::LookupGetterSymbol(const String& field_name) {
return Symbols::LookupFromGet(Thread::Current(), field_name);
}
RawString* Field::SetterName(const String& field_name) {
return String::Concat(Symbols::SetterPrefix(), field_name);
}
RawString* Field::SetterSymbol(const String& field_name) {
return Symbols::FromSet(Thread::Current(), field_name);
}
RawString* Field::LookupSetterSymbol(const String& field_name) {
return Symbols::LookupFromSet(Thread::Current(), field_name);
}
RawString* Field::NameFromGetter(const String& getter_name) {
return Symbols::New(Thread::Current(), getter_name, kGetterPrefixLength,
getter_name.Length() - kGetterPrefixLength);
}
RawString* Field::NameFromSetter(const String& setter_name) {
return Symbols::New(Thread::Current(), setter_name, kSetterPrefixLength,
setter_name.Length() - kSetterPrefixLength);
}
bool Field::IsGetterName(const String& function_name) {
return function_name.StartsWith(Symbols::GetterPrefix());
}
bool Field::IsSetterName(const String& function_name) {
return function_name.StartsWith(Symbols::SetterPrefix());
}
void Field::set_name(const String& value) const {
ASSERT(value.IsSymbol());
ASSERT(IsOriginal());
StorePointer(&raw_ptr()->name_, value.raw());
}
RawObject* Field::RawOwner() const {
if (IsOriginal()) {
return raw_ptr()->owner_;
} else {
const Field& field = Field::Handle(Original());
ASSERT(field.IsOriginal());
ASSERT(!Object::Handle(field.raw_ptr()->owner_).IsField());
return field.raw_ptr()->owner_;
}
}
RawClass* Field::Owner() const {
const Field& field = Field::Handle(Original());
ASSERT(field.IsOriginal());
const Object& obj = Object::Handle(field.raw_ptr()->owner_);
if (obj.IsClass()) {
return Class::Cast(obj).raw();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).patched_class();
}
RawClass* Field::Origin() const {
const Field& field = Field::Handle(Original());
ASSERT(field.IsOriginal());
const Object& obj = Object::Handle(field.raw_ptr()->owner_);
if (obj.IsClass()) {
return Class::Cast(obj).raw();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).origin_class();
}
RawScript* Field::Script() const {
// NOTE(turnidge): If you update this function, you probably want to
// update Class::PatchFieldsAndFunctions() at the same time.
const Field& field = Field::Handle(Original());
ASSERT(field.IsOriginal());
const Object& obj = Object::Handle(field.raw_ptr()->owner_);
if (obj.IsClass()) {
return Class::Cast(obj).script();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).script();
}
RawExternalTypedData* Field::KernelData() const {
const Object& obj = Object::Handle(this->raw_ptr()->owner_);
// During background JIT compilation field objects are copied
// and copy points to the original field via the owner field.
if (obj.IsField()) {
return Field::Cast(obj).KernelData();
} else if (obj.IsClass()) {
Library& library = Library::Handle(Class::Cast(obj).library());
return library.kernel_data();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).library_kernel_data();
}
intptr_t Field::KernelDataProgramOffset() const {
const Object& obj = Object::Handle(raw_ptr()->owner_);
// During background JIT compilation field objects are copied
// and copy points to the original field via the owner field.
if (obj.IsField()) {
return Field::Cast(obj).KernelDataProgramOffset();
} else if (obj.IsClass()) {
Library& lib = Library::Handle(Class::Cast(obj).library());
return lib.kernel_offset();
}
ASSERT(obj.IsPatchClass());
return PatchClass::Cast(obj).library_kernel_offset();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
void Field::GetCovarianceAttributes(bool* is_covariant,
bool* is_generic_covariant) const {
Thread* thread = Thread::Current();
Zone* zone = Thread::Current()->zone();
auto& script = Script::Handle(zone, Script());
kernel::TranslationHelper translation_helper(thread);
translation_helper.InitFromScript(script);
kernel::KernelReaderHelper kernel_reader_helper(
zone, &translation_helper, script,
ExternalTypedData::Handle(zone, KernelData()), KernelDataProgramOffset());
kernel_reader_helper.SetOffset(kernel_offset());
kernel::FieldHelper field_helper(&kernel_reader_helper);
field_helper.ReadUntilIncluding(kernel::FieldHelper::kFlags);
*is_covariant = field_helper.IsCovariant();
*is_generic_covariant = field_helper.IsGenericCovariantImpl();
}
#endif
// Called at finalization time
void Field::SetFieldType(const AbstractType& value) const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(IsOriginal());
ASSERT(!value.IsNull());
if (value.raw() != type()) {
StorePointer(&raw_ptr()->type_, value.raw());
}
}
RawField* Field::New() {
ASSERT(Object::field_class() != Class::null());
RawObject* raw =
Object::Allocate(Field::kClassId, Field::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawField*>(raw);
}
void Field::InitializeNew(const Field& result,
const String& name,
bool is_static,
bool is_final,
bool is_const,
bool is_reflectable,
const Object& owner,
TokenPosition token_pos,
TokenPosition end_token_pos) {
result.set_name(name);
result.set_is_static(is_static);
if (!is_static) {
result.SetOffset(0);
}
result.set_is_final(is_final);
result.set_is_const(is_const);
result.set_is_reflectable(is_reflectable);
result.set_is_double_initialized(false);
result.set_owner(owner);
result.set_token_pos(token_pos);
result.set_end_token_pos(end_token_pos);
result.set_has_initializer(false);
result.set_is_unboxing_candidate(true);
result.set_initializer_changed_after_initialization(false);
result.set_kernel_offset(0);
result.set_static_type_exactness_state(
StaticTypeExactnessState::NotTracking());
Isolate* isolate = Isolate::Current();
// Use field guards if they are enabled and the isolate has never reloaded.
// TODO(johnmccutchan): The reload case assumes the worst case (everything is
// dynamic and possibly null). Attempt to relax this later.
#if defined(PRODUCT)
const bool use_guarded_cid =
FLAG_precompiled_mode || isolate->use_field_guards();
#else
const bool use_guarded_cid =
FLAG_precompiled_mode ||
(isolate->use_field_guards() && !isolate->HasAttemptedReload());
#endif // !defined(PRODUCT)
result.set_guarded_cid(use_guarded_cid ? kIllegalCid : kDynamicCid);
result.set_is_nullable(use_guarded_cid ? false : true);
result.set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
// Presently, we only attempt to remember the list length for final fields.
if (is_final && use_guarded_cid) {
result.set_guarded_list_length(Field::kUnknownFixedLength);
} else {
result.set_guarded_list_length(Field::kNoFixedLength);
}
}
RawField* Field::New(const String& name,
bool is_static,
bool is_final,
bool is_const,
bool is_reflectable,
const Object& owner,
const AbstractType& type,
TokenPosition token_pos,
TokenPosition end_token_pos) {
ASSERT(!owner.IsNull());
const Field& result = Field::Handle(Field::New());
InitializeNew(result, name, is_static, is_final, is_const, is_reflectable,
owner, token_pos, end_token_pos);
result.SetFieldType(type);
return result.raw();
}
RawField* Field::NewTopLevel(const String& name,
bool is_final,
bool is_const,
const Object& owner,
TokenPosition token_pos,
TokenPosition end_token_pos) {
ASSERT(!owner.IsNull());
const Field& result = Field::Handle(Field::New());
InitializeNew(result, name, true, /* is_static */
is_final, is_const, true, /* is_reflectable */
owner, token_pos, end_token_pos);
return result.raw();
}
RawField* Field::Clone(const Class& new_owner) const {
Field& clone = Field::Handle();
clone ^= Object::Clone(*this, Heap::kOld);
const Class& owner = Class::Handle(this->Owner());
const PatchClass& clone_owner =
PatchClass::Handle(PatchClass::New(new_owner, owner));
clone.set_owner(clone_owner);
if (!clone.is_static()) {
clone.SetOffset(0);
}
if (new_owner.NumTypeParameters() > 0) {
// Adjust the field type to refer to type parameters of the new owner.
AbstractType& type = AbstractType::Handle(clone.type());
type ^= type.CloneUninstantiated(new_owner);
clone.SetFieldType(type);
}
return clone.raw();
}
RawField* Field::Clone(const Field& original) const {
if (original.IsNull()) {
return Field::null();
}
ASSERT(original.IsOriginal());
Field& clone = Field::Handle();
clone ^= Object::Clone(*this, Heap::kOld);
clone.SetOriginal(original);
clone.set_kernel_offset(original.kernel_offset());
return clone.raw();
}
int32_t Field::SourceFingerprint() const {
#if !defined(DART_PRECOMPILED_RUNTIME)
return kernel::KernelSourceFingerprintHelper::CalculateFieldFingerprint(
*this);
#else
return 0;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
RawString* Field::InitializingExpression() const {
UNREACHABLE();
return String::null();
}
RawString* Field::UserVisibleName() const {
if (FLAG_show_internal_names) {
return name();
}
return String::ScrubName(String::Handle(name()));
}
intptr_t Field::guarded_list_length() const {
return Smi::Value(raw_ptr()->guarded_list_length_);
}
void Field::set_guarded_list_length(intptr_t list_length) const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(IsOriginal());
StoreSmi(&raw_ptr()->guarded_list_length_, Smi::New(list_length));
}
intptr_t Field::guarded_list_length_in_object_offset() const {
return raw_ptr()->guarded_list_length_in_object_offset_ + kHeapObjectTag;
}
void Field::set_guarded_list_length_in_object_offset(
intptr_t list_length_offset) const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(IsOriginal());
StoreNonPointer(&raw_ptr()->guarded_list_length_in_object_offset_,
static_cast<int8_t>(list_length_offset - kHeapObjectTag));
ASSERT(guarded_list_length_in_object_offset() == list_length_offset);
}
const char* Field::ToCString() const {
if (IsNull()) {
return "Field: null";
}
const char* kF0 = is_static() ? " static" : "";
const char* kF1 = is_final() ? " final" : "";
const char* kF2 = is_const() ? " const" : "";
const char* field_name = String::Handle(name()).ToCString();
const Class& cls = Class::Handle(Owner());
const char* cls_name = String::Handle(cls.Name()).ToCString();
return OS::SCreate(Thread::Current()->zone(), "Field <%s.%s>:%s%s%s",
cls_name, field_name, kF0, kF1, kF2);
}
// Build a closure object that gets (or sets) the contents of a static
// field f and cache the closure in a newly created static field
// named #f (or #f= in case of a setter).
RawInstance* Field::AccessorClosure(bool make_setter) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(is_static());
const Class& field_owner = Class::Handle(zone, Owner());
String& closure_name = String::Handle(zone, this->name());
closure_name = Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
if (make_setter) {
closure_name =
Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
}
Field& closure_field = Field::Handle(zone);
closure_field = field_owner.LookupStaticField(closure_name);
if (!closure_field.IsNull()) {
ASSERT(closure_field.is_static());
const Instance& closure =
Instance::Handle(zone, closure_field.StaticValue());
ASSERT(!closure.IsNull());
ASSERT(closure.IsClosure());
return closure.raw();
}
// This is the first time a closure for this field is requested.
// Create the closure and a new static field in which it is stored.
const char* field_name = String::Handle(zone, name()).ToCString();
String& expr_src = String::Handle(zone);
if (make_setter) {
expr_src = String::NewFormatted("(%s_) { return %s = %s_; }", field_name,
field_name, field_name);
} else {
expr_src = String::NewFormatted("() { return %s; }", field_name);
}
Object& result =
Object::Handle(zone, field_owner.Evaluate(expr_src, Object::empty_array(),
Object::empty_array()));
ASSERT(result.IsInstance());
// The caller may expect the closure to be allocated in old space. Copy
// the result here, since Object::Clone() is a private method.
result = Object::Clone(result, Heap::kOld);
closure_field = Field::New(closure_name,
true, // is_static
true, // is_final
true, // is_const
false, // is_reflectable
field_owner, Object::dynamic_type(),
this->token_pos(), this->end_token_pos());
closure_field.SetStaticValue(Instance::Cast(result), true);
field_owner.AddField(closure_field);
return Instance::RawCast(result.raw());
}
RawInstance* Field::GetterClosure() const {
return AccessorClosure(false);
}
RawInstance* Field::SetterClosure() const {
return AccessorClosure(true);
}
RawArray* Field::dependent_code() const {
return raw_ptr()->dependent_code_;
}
void Field::set_dependent_code(const Array& array) const {
ASSERT(IsOriginal());
StorePointer(&raw_ptr()->dependent_code_, array.raw());
}
class FieldDependentArray : public WeakCodeReferences {
public:
explicit FieldDependentArray(const Field& field)
: WeakCodeReferences(Array::Handle(field.dependent_code())),
field_(field) {}
virtual void UpdateArrayTo(const Array& value) {
field_.set_dependent_code(value);
}
virtual void ReportDeoptimization(const Code& code) {
if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
Function& function = Function::Handle(code.function());
THR_Print("Deoptimizing %s because guard on field %s failed.\n",
function.ToFullyQualifiedCString(), field_.ToCString());
}
}
virtual void ReportSwitchingCode(const Code& code) {
if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
Function& function = Function::Handle(code.function());
THR_Print(
"Switching '%s' to unoptimized code because guard"
" on field '%s' was violated.\n",
function.ToFullyQualifiedCString(), field_.ToCString());
}
}
private:
const Field& field_;
DISALLOW_COPY_AND_ASSIGN(FieldDependentArray);
};
void Field::RegisterDependentCode(const Code& code) const {
ASSERT(IsOriginal());
DEBUG_ASSERT(IsMutatorOrAtSafepoint());
ASSERT(code.is_optimized());
FieldDependentArray a(*this);
a.Register(code);
}
void Field::DeoptimizeDependentCode() const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(IsOriginal());
FieldDependentArray a(*this);
if (FLAG_trace_deoptimization && a.HasCodes()) {
THR_Print("Deopt for field guard (field %s)\n", ToCString());
}
a.DisableCode();
}
bool Field::IsConsistentWith(const Field& other) const {
return (raw_ptr()->guarded_cid_ == other.raw_ptr()->guarded_cid_) &&
(raw_ptr()->is_nullable_ == other.raw_ptr()->is_nullable_) &&
(raw_ptr()->guarded_list_length_ ==
other.raw_ptr()->guarded_list_length_) &&
(is_unboxing_candidate() == other.is_unboxing_candidate()) &&
(static_type_exactness_state().Encode() ==
other.static_type_exactness_state().Encode());
}
bool Field::IsUninitialized() const {
const Instance& value = Instance::Handle(raw_ptr()->value_.static_value_);
ASSERT(value.raw() != Object::transition_sentinel().raw());
return value.raw() == Object::sentinel().raw();
}
void Field::SetPrecompiledInitializer(const Function& initializer) const {
ASSERT(IsOriginal());
StorePointer(&raw_ptr()->initializer_.precompiled_, initializer.raw());
}
bool Field::HasPrecompiledInitializer() const {
return raw_ptr()->initializer_.precompiled_->IsHeapObject() &&
raw_ptr()->initializer_.precompiled_->IsFunction();
}
void Field::EvaluateInitializer() const {
ASSERT(IsOriginal());
ASSERT(is_static());
if (StaticValue() == Object::sentinel().raw()) {
SetStaticValue(Object::transition_sentinel());
const Object& value =
Object::Handle(Compiler::EvaluateStaticInitializer(*this));
if (value.IsError()) {
SetStaticValue(Object::null_instance());
Exceptions::PropagateError(Error::Cast(value));
UNREACHABLE();
}
ASSERT(value.IsNull() || value.IsInstance());
SetStaticValue(value.IsNull() ? Instance::null_instance()
: Instance::Cast(value));
return;
} else if (StaticValue() == Object::transition_sentinel().raw()) {
const Array& ctor_args = Array::Handle(Array::New(1));
const String& field_name = String::Handle(name());
ctor_args.SetAt(0, field_name);
Exceptions::ThrowByType(Exceptions::kCyclicInitializationError, ctor_args);
UNREACHABLE();
return;
}
UNREACHABLE();
}
static intptr_t GetListLength(const Object& value) {
if (value.IsTypedData()) {
const TypedData& list = TypedData::Cast(value);
return list.Length();
} else if (value.IsArray()) {
const Array& list = Array::Cast(value);
return list.Length();
} else if (value.IsGrowableObjectArray()) {
// List length is variable.
return Field::kNoFixedLength;
} else if (value.IsExternalTypedData()) {
// TODO(johnmccutchan): Enable for external typed data.
return Field::kNoFixedLength;
} else if (RawObject::IsTypedDataViewClassId(value.GetClassId())) {
// TODO(johnmccutchan): Enable for typed data views.
return Field::kNoFixedLength;
}
return Field::kNoFixedLength;
}
static intptr_t GetListLengthOffset(intptr_t cid) {
if (RawObject::IsTypedDataClassId(cid)) {
return TypedData::length_offset();
} else if (cid == kArrayCid || cid == kImmutableArrayCid) {
return Array::length_offset();
} else if (cid == kGrowableObjectArrayCid) {
// List length is variable.
return Field::kUnknownLengthOffset;
} else if (RawObject::IsExternalTypedDataClassId(cid)) {
// TODO(johnmccutchan): Enable for external typed data.
return Field::kUnknownLengthOffset;
} else if (RawObject::IsTypedDataViewClassId(cid)) {
// TODO(johnmccutchan): Enable for typed data views.
return Field::kUnknownLengthOffset;
}
return Field::kUnknownLengthOffset;
}
const char* Field::GuardedPropertiesAsCString() const {
if (guarded_cid() == kIllegalCid) {
return "<?>";
} else if (guarded_cid() == kDynamicCid) {
ASSERT(!static_type_exactness_state().IsExactOrUninitialized());
return "<*>";
}
Zone* zone = Thread::Current()->zone();
const char* exactness = "";
if (static_type_exactness_state().IsTracking()) {
exactness =
zone->PrintToString(" {%s}", static_type_exactness_state().ToCString());
}
const Class& cls =
Class::Handle(Isolate::Current()->class_table()->At(guarded_cid()));
const char* class_name = String::Handle(cls.Name()).ToCString();
if (RawObject::IsBuiltinListClassId(guarded_cid()) && !is_nullable() &&
is_final()) {
ASSERT(guarded_list_length() != kUnknownFixedLength);
if (guarded_list_length() == kNoFixedLength) {
return zone->PrintToString("<%s [*]%s>", class_name, exactness);
} else {
return zone->PrintToString(
"<%s [%" Pd " @%" Pd "]%s>", class_name, guarded_list_length(),
guarded_list_length_in_object_offset(), exactness);
}
}
return zone->PrintToString("<%s %s%s>",
is_nullable() ? "nullable" : "not-nullable",
class_name, exactness);
}
void Field::InitializeGuardedListLengthInObjectOffset() const {
ASSERT(IsOriginal());
if (needs_length_check() &&
(guarded_list_length() != Field::kUnknownFixedLength)) {
const intptr_t offset = GetListLengthOffset(guarded_cid());
set_guarded_list_length_in_object_offset(offset);
ASSERT(offset != Field::kUnknownLengthOffset);
} else {
set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
}
}
bool Field::UpdateGuardedCidAndLength(const Object& value) const {
ASSERT(IsOriginal());
const intptr_t cid = value.GetClassId();
if (guarded_cid() == kIllegalCid) {
// Field is assigned first time.
set_guarded_cid(cid);
set_is_nullable(cid == kNullCid);
// Start tracking length if needed.
ASSERT((guarded_list_length() == Field::kUnknownFixedLength) ||
(guarded_list_length() == Field::kNoFixedLength));
if (needs_length_check()) {
ASSERT(guarded_list_length() == Field::kUnknownFixedLength);
set_guarded_list_length(GetListLength(value));
InitializeGuardedListLengthInObjectOffset();
}
if (FLAG_trace_field_guards) {
THR_Print(" => %s\n", GuardedPropertiesAsCString());
}
return false;
}
if ((cid == guarded_cid()) || ((cid == kNullCid) && is_nullable())) {
// Class id of the assigned value matches expected class id and nullability.
// If we are tracking length check if it has matches.
if (needs_length_check() &&
(guarded_list_length() != GetListLength(value))) {
ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
set_guarded_list_length(Field::kNoFixedLength);
set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
return true;
}
// Everything matches.
return false;
}
if ((cid == kNullCid) && !is_nullable()) {
// Assigning null value to a non-nullable field makes it nullable.
set_is_nullable(true);
} else if ((cid != kNullCid) && (guarded_cid() == kNullCid)) {
// Assigning non-null value to a field that previously contained only null
// turns it into a nullable field with the given class id.
ASSERT(is_nullable());
set_guarded_cid(cid);
} else {
// Give up on tracking class id of values contained in this field.
ASSERT(guarded_cid() != cid);
set_guarded_cid(kDynamicCid);
set_is_nullable(true);
}
// If we were tracking length drop collected feedback.
if (needs_length_check()) {
ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
set_guarded_list_length(Field::kNoFixedLength);
set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
}
// Expected class id or nullability of the field changed.
return true;
}
// Given the type G<T0, ..., Tn> and class C<U0, ..., Un> find path to C at G.
// This path can be used to compute type arguments of C at G.
//
// Note: we are relying on the restriction that the same class can only occur
// once among the supertype.
static bool FindInstantiationOf(const Type& type,
const Class& cls,
GrowableArray<const AbstractType*>* path,
bool consider_only_super_classes) {
if (type.type_class() == cls.raw()) {
return true; // Found instantiation.
}
Class& cls2 = Class::Handle();
AbstractType& super_type = AbstractType::Handle();
super_type = cls.super_type();
if (!super_type.IsNull() && !super_type.IsObjectType()) {
cls2 = super_type.type_class();
path->Add(&super_type);
if (FindInstantiationOf(type, cls2, path, consider_only_super_classes)) {
return true; // Found instantiation.
}
path->RemoveLast();
}
if (!consider_only_super_classes) {
Array& super_interfaces = Array::Handle(cls.interfaces());
for (intptr_t i = 0; i < super_interfaces.Length(); i++) {
super_type ^= super_interfaces.At(i);
cls2 = super_type.type_class();
path->Add(&super_type);
if (FindInstantiationOf(type, cls2, path,
/*consider_only_supertypes=*/false)) {
return true; // Found instantiation.
}
path->RemoveLast();
}
}
return false; // Not found.
}
static StaticTypeExactnessState TrivialTypeExactnessFor(const Class& cls) {
const intptr_t type_arguments_offset = cls.type_arguments_field_offset();
ASSERT(type_arguments_offset != Class::kNoTypeArguments);
if (StaticTypeExactnessState::CanRepresentAsTriviallyExact(
type_arguments_offset)) {
return StaticTypeExactnessState::TriviallyExact(type_arguments_offset);
} else {
return StaticTypeExactnessState::NotExact();
}
}
static const char* SafeTypeArgumentsToCString(const TypeArguments& args) {
return (args.raw() == TypeArguments::null()) ? "<null>" : args.ToCString();
}
StaticTypeExactnessState StaticTypeExactnessState::Compute(
const Type& static_type,
const Instance& value,
bool print_trace /* = false */) {
const TypeArguments& static_type_args =
TypeArguments::Handle(static_type.arguments());
TypeArguments& args = TypeArguments::Handle();
ASSERT(static_type.IsFinalized());
const Class& cls = Class::Handle(value.clazz());
GrowableArray<const AbstractType*> path(10);
bool is_super_class = true;
if (!FindInstantiationOf(static_type, cls, &path,
/*consider_only_super_classes=*/true)) {
is_super_class = false;
bool found_super_interface = FindInstantiationOf(
static_type, cls, &path, /*consider_only_super_classes=*/false);
ASSERT(found_super_interface);
}
// Trivial case: field has type G<T0, ..., Tn> and value has type
// G<U0, ..., Un>. Check if type arguments match.
if (path.is_empty()) {
ASSERT(cls.raw() == static_type.type_class());
args = value.GetTypeArguments();
// TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that
// disregards superclass own arguments) improves precision of the
// tracking.
if (args.raw() == static_type_args.raw()) {
return TrivialTypeExactnessFor(cls);
}
if (print_trace) {
THR_Print(" expected %s got %s type arguments\n",
SafeTypeArgumentsToCString(static_type_args),
SafeTypeArgumentsToCString(args));
}
return StaticTypeExactnessState::NotExact();
}
// Value has type C<U0, ..., Un> and field has type G<T0, ..., Tn> and G != C.
// Compute C<X0, ..., Xn> at G (Xi are free type arguments).
// Path array contains a chain of immediate supertypes S0 <: S1 <: ... Sn,
// such that S0 is an immediate supertype of C and Sn is G<...>.
// Each Si might depend on type parameters of the previous supertype S{i-1}.
// To compute C<X0, ..., Xn> at G we walk the chain backwards and
// instantiate Si using type parameters of S{i-1} which gives us a type
// depending on type parameters of S{i-2}.
Error& error = Error::Handle();
AbstractType& type = AbstractType::Handle(path.Last()->raw());
for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated();
i--) {
args = path[i]->arguments();
type = type.InstantiateFrom(
args, TypeArguments::null_type_arguments(), kAllFree, &error,
/*instantiation_trail=*/nullptr, /*bound_trail=*/nullptr, Heap::kNew);
}
if (type.IsInstantiated()) {
// C<X0, ..., Xn> at G is fully instantiated and does not depend on
// Xi. In this case just check if type arguments match.
args = type.arguments();
if (args.Equals(static_type_args)) {
return is_super_class ? StaticTypeExactnessState::HasExactSuperClass()
: StaticTypeExactnessState::HasExactSuperType();
}
if (print_trace) {
THR_Print(" expected %s got %s type arguments\n",
SafeTypeArgumentsToCString(static_type_args),
SafeTypeArgumentsToCString(args));
}
return StaticTypeExactnessState::NotExact();
}
// The most complicated case: C<X0, ..., Xn> at G depends on
// Xi values. To compare type arguments we would need to instantiate
// it fully from value's type arguments and compare with <U0, ..., Un>.
// However this would complicate fast path in the native code. To avoid this
// complication we would optimize for the trivial case: we check if
// C<X0, ..., Xn> at G is exactly G<X0, ..., Xn> which means we can simply
// compare values type arguements (<T0, ..., Tn>) to fields type arguments
// (<U0, ..., Un>) to establish if field type is exact.
ASSERT(cls.IsGeneric());
const intptr_t num_type_params = cls.NumTypeParameters();
bool trivial_case =
(num_type_params ==
Class::Handle(static_type.type_class()).NumTypeParameters()) &&
(value.GetTypeArguments() == static_type.arguments());
if (!trivial_case && FLAG_trace_field_guards) {
THR_Print("Not a simple case: %" Pd " vs %" Pd
" type parameters, %s vs %s type arguments\n",
num_type_params,
Class::Handle(static_type.type_class()).NumTypeParameters(),
SafeTypeArgumentsToCString(
TypeArguments::Handle(value.GetTypeArguments())),
SafeTypeArgumentsToCString(static_type_args));
}
AbstractType& type_arg = AbstractType::Handle();
args = type.arguments();
for (intptr_t i = 0; (i < num_type_params) && trivial_case; i++) {
type_arg = args.TypeAt(i);
if (!type_arg.IsTypeParameter() ||
(TypeParameter::Cast(type_arg).index() != i)) {
if (FLAG_trace_field_guards) {
THR_Print(" => encountered %s at index % " Pd "\n",
type_arg.ToCString(), i);
}
trivial_case = false;
}
}
return trivial_case ? TrivialTypeExactnessFor(cls)
: StaticTypeExactnessState::NotExact();
}
const char* StaticTypeExactnessState::ToCString() const {
if (!IsTracking()) {
return "not-tracking";
} else if (!IsExactOrUninitialized()) {
return "not-exact";
} else if (IsTriviallyExact()) {
return Thread::Current()->zone()->PrintToString(
"trivially-exact(%" Pd ")", GetTypeArgumentsOffsetInWords());
} else if (IsHasExactSuperType()) {
return "has-exact-super-type";
} else if (IsHasExactSuperClass()) {
return "has-exact-super-class";
} else {
ASSERT(IsUninitialized());
return "uninitialized-exactness";
}
}
bool Field::UpdateGuardedExactnessState(const Object& value) const {
if (!static_type_exactness_state().IsExactOrUninitialized()) {
// Nothing to update.
return false;
}
if (guarded_cid() == kDynamicCid) {
if (FLAG_trace_field_guards) {
THR_Print(
" => switching off exactness tracking because guarded cid is "
"dynamic\n");
}
set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
return true; // Invalidate.
}
// If we are storing null into a field or we have an exact super type
// then there is nothing to do.
if (value.IsNull() || static_type_exactness_state().IsHasExactSuperType() ||
static_type_exactness_state().IsHasExactSuperClass()) {
return false;
}
// If we are storing a non-null value into a field that is considered
// to be trivially exact then we need to check if value has an appropriate
// type.
ASSERT(guarded_cid() != kNullCid);
const Type& field_type = Type::Cast(AbstractType::Handle(type()));
const TypeArguments& field_type_args =
TypeArguments::Handle(field_type.arguments());
const Instance& instance = Instance::Cast(value);
TypeArguments& args = TypeArguments::Handle();
if (static_type_exactness_state().IsTriviallyExact()) {
args = instance.GetTypeArguments();
if (args.raw() == field_type_args.raw()) {
return false;
}
if (FLAG_trace_field_guards) {
THR_Print(" expected %s got %s type arguments\n",
field_type_args.ToCString(), args.ToCString());
}
set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
return true;
}
ASSERT(static_type_exactness_state().IsUninitialized());
set_static_type_exactness_state(StaticTypeExactnessState::Compute(
field_type, instance, FLAG_trace_field_guards));
return true;
}
void Field::RecordStore(const Object& value) const {
ASSERT(IsOriginal());
if (!Isolate::Current()->use_field_guards()) {
return;
}
if ((guarded_cid() == kDynamicCid) ||
(is_nullable() && value.raw() == Object::null())) {
// Nothing to do: the field is not guarded or we are storing null into
// a nullable field.
return;
}
if (FLAG_trace_field_guards) {
THR_Print("Store %s %s <- %s\n", ToCString(), GuardedPropertiesAsCString(),
value.ToCString());
}
bool invalidate = false;
if (UpdateGuardedCidAndLength(value)) {
invalidate = true;
}
if (UpdateGuardedExactnessState(value)) {
invalidate = true;
}
if (invalidate) {
if (FLAG_trace_field_guards) {
THR_Print(" => %s\n", GuardedPropertiesAsCString());
}
DeoptimizeDependentCode();
}
}
void Field::ForceDynamicGuardedCidAndLength() const {
// Assume nothing about this field.
set_is_unboxing_candidate(false);
set_guarded_cid(kDynamicCid);
set_is_nullable(true);
set_guarded_list_length(Field::kNoFixedLength);
set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
if (static_type_exactness_state().IsTracking()) {
set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
}
// Drop any code that relied on the above assumptions.
DeoptimizeDependentCode();
}
bool Script::HasSource() const {
return raw_ptr()->source_ != String::null();
}
RawString* Script::Source() const {
return raw_ptr()->source_;
}
void Script::set_compile_time_constants(const Array& value) const {
StorePointer(&raw_ptr()->compile_time_constants_, value.raw());
}
void Script::set_kernel_program_info(const KernelProgramInfo& info) const {
StorePointer(&raw_ptr()->kernel_program_info_, info.raw());
}
void Script::set_kernel_script_index(const intptr_t kernel_script_index) const {
StoreNonPointer(&raw_ptr()->kernel_script_index_, kernel_script_index);
}
RawTypedData* Script::kernel_string_offsets() const {
KernelProgramInfo& program_info =
KernelProgramInfo::Handle(kernel_program_info());
ASSERT(!program_info.IsNull());
return program_info.string_offsets();
}
RawGrowableObjectArray* Script::GenerateLineNumberArray() const {
Zone* zone = Thread::Current()->zone();
const GrowableObjectArray& info =
GrowableObjectArray::Handle(zone, GrowableObjectArray::New());
const Object& line_separator = Object::Handle(zone);
if (kind() == RawScript::kKernelTag) {
const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
if (line_starts_data.IsNull()) {
// Scripts in the AOT snapshot do not have a line starts array.
// A well-formed line number array has a leading null.
info.Add(line_separator); // New line.
return info.raw();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
Smi& value = Smi::Handle(zone);
intptr_t line_count = line_starts_data.Length();
ASSERT(line_count > 0);
const Array& debug_positions_array = Array::Handle(debug_positions());
intptr_t token_count = debug_positions_array.Length();
int token_index = 0;
kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
intptr_t previous_start = 0;
for (int line_index = 0; line_index < line_count; ++line_index) {
intptr_t start = previous_start + line_starts_reader.DeltaAt(line_index);
// Output the rest of the tokens if we have no next line.
intptr_t end = TokenPosition::kMaxSourcePos;
if (line_index + 1 < line_count) {
end = start + line_starts_reader.DeltaAt(line_index + 1);
}
bool first = true;
while (token_index < token_count) {
value ^= debug_positions_array.At(token_index);
intptr_t debug_position = value.Value();
if (debug_position >= end) break;
if (first) {
info.Add(line_separator); // New line.
value = Smi::New(line_index + 1); // Line number.
info.Add(value);
first = false;
}
value ^= debug_positions_array.At(token_index);
info.Add(value); // Token position.
value = Smi::New(debug_position - start + 1); // Column.
info.Add(value);
++token_index;
}
previous_start = start;
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
return info.raw();
}
UNREACHABLE();
return GrowableObjectArray::null();
}
const char* Script::GetKindAsCString() const {
switch (kind()) {
case RawScript::kScriptTag:
return "script";
case RawScript::kLibraryTag:
return "library";
case RawScript::kSourceTag:
return "source";
case RawScript::kPatchTag:
return "patch";
case RawScript::kEvaluateTag:
return "evaluate";
case RawScript::kKernelTag:
return "kernel";
default:
UNIMPLEMENTED();
}
UNREACHABLE();
return NULL;
}
void Script::set_url(const String& value) const {
StorePointer(&raw_ptr()->url_, value.raw());
}
void Script::set_resolved_url(const String& value) const {
StorePointer(&raw_ptr()->resolved_url_, value.raw());
}
void Script::set_source(const String& value) const {
StorePointer(&raw_ptr()->source_, value.raw());
}
void Script::set_line_starts(const TypedData& value) const {
StorePointer(&raw_ptr()->line_starts_, value.raw());
}
void Script::set_debug_positions(const Array& value) const {
StorePointer(&raw_ptr()->debug_positions_, value.raw());
}
void Script::set_yield_positions(const Array& value) const {
StorePointer(&raw_ptr()->yield_positions_, value.raw());
}
RawArray* Script::yield_positions() const {
#if !defined(DART_PRECOMPILED_RUNTIME)
Array& yields = Array::Handle(raw_ptr()->yield_positions_);
if (yields.IsNull() && kind() == RawScript::kKernelTag) {
// This is created lazily. Now we need it.
kernel::CollectTokenPositionsFor(*this);
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
return raw_ptr()->yield_positions_;
}
RawTypedData* Script::line_starts() const {
return raw_ptr()->line_starts_;
}
RawArray* Script::debug_positions() const {
#if !defined(DART_PRECOMPILED_RUNTIME)
Array& debug_positions_array = Array::Handle(raw_ptr()->debug_positions_);
if (debug_positions_array.IsNull() && kind() == RawScript::kKernelTag) {
// This is created lazily. Now we need it.
kernel::CollectTokenPositionsFor(*this);
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
return raw_ptr()->debug_positions_;
}
void Script::set_kind(RawScript::Kind value) const {
StoreNonPointer(&raw_ptr()->kind_, value);
}
void Script::set_load_timestamp(int64_t value) const {
StoreNonPointer(&raw_ptr()->load_timestamp_, value);
}
void Script::SetLocationOffset(intptr_t line_offset,
intptr_t col_offset) const {
ASSERT(line_offset >= 0);
ASSERT(col_offset >= 0);
StoreNonPointer(&raw_ptr()->line_offset_, line_offset);
StoreNonPointer(&raw_ptr()->col_offset_, col_offset);
}
// Specialized for AOT compilation, which does this lookup for every token
// position that could be part of a stack trace.
intptr_t Script::GetTokenLineUsingLineStarts(
TokenPosition target_token_pos) const {
if (target_token_pos.IsNoSource()) {
return 0;
}
Zone* zone = Thread::Current()->zone();
TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
if (line_starts_data.IsNull()) {
ASSERT(kind() != RawScript::kKernelTag);
UNREACHABLE();
}
if (kind() == RawScript::kKernelTag) {
#if !defined(DART_PRECOMPILED_RUNTIME)
kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
return line_starts_reader.LineNumberForPosition(target_token_pos.value());
#else
return 0;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
ASSERT(line_starts_data.Length() > 0);
intptr_t offset = target_token_pos.Pos();
intptr_t min = 0;
intptr_t max = line_starts_data.Length() - 1;
// Binary search to find the line containing this offset.
while (min < max) {
int midpoint = (max - min + 1) / 2 + min;
int32_t token_pos = line_starts_data.GetInt32(midpoint * 4);
if (token_pos > offset) {
max = midpoint - 1;
} else {
min = midpoint;
}
}
return min + 1; // Line numbers start at 1.
}
}
void Script::GetTokenLocation(TokenPosition token_pos,
intptr_t* line,
intptr_t* column,
intptr_t* token_len) const {
ASSERT(line != NULL);
Zone* zone = Thread::Current()->zone();
if (kind() == RawScript::kKernelTag) {
const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
if (line_starts_data.IsNull()) {
// Scripts in the AOT snapshot do not have a line starts array.
*line = -1;
if (column != NULL) {
*column = -1;
}
if (token_len != NULL) {
*token_len = 1;
}
return;
}
#if !defined(DART_PRECOMPILED_RUNTIME)
ASSERT(line_starts_data.Length() > 0);
kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
line_starts_reader.LocationForPosition(token_pos.value(), line, column);
if (token_len != NULL) {
// We don't explicitly save this data: Load the source
// and find it from there.
const String& source = String::Handle(zone, Source());
intptr_t offset = token_pos.value();
*token_len = 1;
if (offset < source.Length() &&
Scanner::IsIdentStartChar(source.CharAt(offset))) {
for (intptr_t i = offset + 1;
i < source.Length() && Scanner::IsIdentChar(source.CharAt(i));
++i) {
++*token_len;
}
}
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
return;
}
UNREACHABLE();
}
void Script::TokenRangeAtLine(intptr_t line_number,
TokenPosition* first_token_index,
TokenPosition* last_token_index) const {
ASSERT(first_token_index != NULL && last_token_index != NULL);
ASSERT(line_number > 0);
if (kind() == RawScript::kKernelTag) {
const TypedData& line_starts_data = TypedData::Handle(line_starts());
const String& source = String::Handle(Source());
if (line_starts_data.IsNull() || source.IsNull()) {
// Scripts in the AOT snapshot do not have a line starts array.
*first_token_index = TokenPosition::kNoSource;
*last_token_index = TokenPosition::kNoSource;
return;
}
#if !defined(DART_PRECOMPILED_RUNTIME)
kernel::KernelLineStartsReader line_starts_reader(
line_starts_data, Thread::Current()->zone());
line_starts_reader.TokenRangeAtLine(source.Length(), line_number,
first_token_index, last_token_index);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
return;
}
UNREACHABLE();
}
RawString* Script::GetLine(intptr_t line_number, Heap::Space space) const {
const String& src = String::Handle(Source());
if (src.IsNull()) {
ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
return Symbols::OptimizedOut().raw();
}
intptr_t relative_line_number = line_number - line_offset();
intptr_t current_line = 1;
intptr_t line_start_idx = -1;
intptr_t last_char_idx = -1;
for (intptr_t ix = 0;
(ix < src.Length()) && (current_line <= relative_line_number); ix++) {
if ((current_line == relative_line_number) && (line_start_idx < 0)) {
line_start_idx = ix;
}
if (src.CharAt(ix) == '\n') {
current_line++;
} else if (src.CharAt(ix) == '\r') {
if ((ix + 1 != src.Length()) && (src.CharAt(ix + 1) != '\n')) {
current_line++;
}
} else {
last_char_idx = ix;
}
}
// Guarantee that returned string is never NULL.
if (line_start_idx >= 0) {
return String::SubString(src, line_start_idx,
last_char_idx - line_start_idx + 1, space);
} else {
return Symbols::Empty().raw();
}
}
RawString* Script::GetSnippet(TokenPosition from, TokenPosition to) const {
intptr_t from_line;
intptr_t from_column;
intptr_t to_line;
intptr_t to_column;
GetTokenLocation(from, &from_line, &from_column);
GetTokenLocation(to, &to_line, &to_column);
return GetSnippet(from_line, from_column, to_line, to_column);
}
RawString* Script::GetSnippet(intptr_t from_line,
intptr_t from_column,
intptr_t to_line,
intptr_t to_column) const {
const String& src = String::Handle(Source());
if (src.IsNull()) {
return Symbols::OptimizedOut().raw();
}
intptr_t length = src.Length();
intptr_t line = 1 + line_offset();
intptr_t column = 1;
intptr_t scan_position = 0;
intptr_t snippet_start = -1;
intptr_t snippet_end = -1;
if (from_line - line_offset() == 1) {
column += col_offset();
}
while (scan_position != length) {
if (snippet_start == -1) {
if ((line == from_line) && (column == from_column)) {
snippet_start = scan_position;
}
}
char c = src.CharAt(scan_position);
if (c == '\n') {
line++;
column = 0;
} else if (c == '\r') {
line++;
column = 0;
if ((scan_position + 1 != length) &&
(src.CharAt(scan_position + 1) == '\n')) {
scan_position++;
}
}
scan_position++;
column++;
if ((line == to_line) && (column == to_column)) {
snippet_end = scan_position;
break;
}
}
String& snippet = String::Handle();
if ((snippet_start != -1) && (snippet_end != -1)) {
snippet =
String::SubString(src, snippet_start, snippet_end - snippet_start);
}
return snippet.raw();
}
RawScript* Script::New() {
ASSERT(Object::script_class() != Class::null());
RawObject* raw =
Object::Allocate(Script::kClassId, Script::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawScript*>(raw);
}
RawScript* Script::New(const String& url,
const String& source,
RawScript::Kind kind) {
return Script::New(url, url, source, kind);
}
RawScript* Script::New(const String& url,
const String& resolved_url,
const String& source,
RawScript::Kind kind) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Script& result = Script::Handle(zone, Script::New());
result.set_url(String::Handle(zone, Symbols::New(thread, url)));
result.set_resolved_url(
String::Handle(zone, Symbols::New(thread, resolved_url)));
result.set_source(source);
result.SetLocationOffset(0, 0);
result.set_kind(kind);
result.set_kernel_script_index(0);
result.set_load_timestamp(
FLAG_remove_script_timestamps_for_test ? 0 : OS::GetCurrentTimeMillis());
return result.raw();
}
const char* Script::ToCString() const {
const String& name = String::Handle(url());
return OS::SCreate(Thread::Current()->zone(), "Script(%s)", name.ToCString());
}
RawLibrary* Script::FindLibrary() const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
const GrowableObjectArray& libs =
GrowableObjectArray::Handle(zone, isolate->object_store()->libraries());
Library& lib = Library::Handle(zone);
Array& scripts = Array::Handle(zone);
for (intptr_t i = 0; i < libs.Length(); i++) {
lib ^= libs.At(i);
scripts = lib.LoadedScripts();
for (intptr_t j = 0; j < scripts.Length(); j++) {
if (scripts.At(j) == raw()) {
return lib.raw();
}
}
}
return Library::null();
}
DictionaryIterator::DictionaryIterator(const Library& library)
: array_(Array::Handle(library.dictionary())),
// Last element in array is a Smi indicating the number of entries used.
size_(Array::Handle(library.dictionary()).Length() - 1),
next_ix_(0) {
MoveToNextObject();
}
RawObject* DictionaryIterator::GetNext() {
ASSERT(HasNext());
int ix = next_ix_++;
MoveToNextObject();
ASSERT(array_.At(ix) != Object::null());
return array_.At(ix);
}
void DictionaryIterator::MoveToNextObject() {
Object& obj = Object::Handle(array_.At(next_ix_));
while (obj.IsNull() && HasNext()) {
next_ix_++;
obj = array_.At(next_ix_);
}
}
ClassDictionaryIterator::ClassDictionaryIterator(const Library& library,
IterationKind kind)
: DictionaryIterator(library),
toplevel_class_(Class::Handle((kind == kIteratePrivate)
? library.toplevel_class()
: Class::null())) {
MoveToNextClass();
}
RawClass* ClassDictionaryIterator::GetNextClass() {
ASSERT(HasNext());
Class& cls = Class::Handle();
if (next_ix_ < size_) {
int ix = next_ix_++;
cls ^= array_.At(ix);
MoveToNextClass();
return cls.raw();
}
ASSERT(!toplevel_class_.IsNull());
cls = toplevel_class_.raw();
toplevel_class_ = Class::null();
return cls.raw();
}
void ClassDictionaryIterator::MoveToNextClass() {
Object& obj = Object::Handle();
while (next_ix_ < size_) {
obj = array_.At(next_ix_);
if (obj.IsClass()) {
return;
}
next_ix_++;
}
}
LibraryPrefixIterator::LibraryPrefixIterator(const Library& library)
: DictionaryIterator(library) {
Advance();
}
RawLibraryPrefix* LibraryPrefixIterator::GetNext() {
ASSERT(HasNext());
int ix = next_ix_++;
Object& obj = Object::Handle(array_.At(ix));
Advance();
return LibraryPrefix::Cast(obj).raw();
}
void LibraryPrefixIterator::Advance() {
Object& obj = Object::Handle(array_.At(next_ix_));
while (!obj.IsLibraryPrefix() && HasNext()) {
next_ix_++;
obj = array_.At(next_ix_);
}
}
static void ReportTooManyImports(const Library& lib) {
const String& url = String::Handle(lib.url());
Report::MessageF(Report::kError, Script::Handle(lib.LookupScript(url)),
TokenPosition::kNoSource, Report::AtLocation,
"too many imports in library '%s'", url.ToCString());
UNREACHABLE();
}
bool Library::IsAnyCoreLibrary() const {
String& url_str = Thread::Current()->StringHandle();
url_str = url();
return url_str.StartsWith(Symbols::DartScheme()) ||
url_str.StartsWith(Symbols::DartSchemePrivate());
}
void Library::set_num_imports(intptr_t value) const {
if (!Utils::IsUint(16, value)) {
ReportTooManyImports(*this);
}
StoreNonPointer(&raw_ptr()->num_imports_, value);
}
void Library::set_name(const String& name) const {
ASSERT(name.IsSymbol());
StorePointer(&raw_ptr()->name_, name.raw());
}
void Library::set_url(const String& name) const {
StorePointer(&raw_ptr()->url_, name.raw());
}
void Library::set_kernel_data(const ExternalTypedData& data) const {
StorePointer(&raw_ptr()->kernel_data_, data.raw());
}
void Library::SetName(const String& name) const {
// Only set name once.
ASSERT(!Loaded());
set_name(name);
}
void Library::SetLoadInProgress() const {
// Must not already be in the process of being loaded.
ASSERT(raw_ptr()->load_state_ <= RawLibrary::kLoadRequested);
StoreNonPointer(&raw_ptr()->load_state_, RawLibrary::kLoadInProgress);
}
void Library::SetLoadRequested() const {
// Must not be already loaded.
ASSERT(raw_ptr()->load_state_ == RawLibrary::kAllocated);
StoreNonPointer(&raw_ptr()->load_state_, RawLibrary::kLoadRequested);
}
void Library::SetLoaded() const {
// Should not be already loaded or just allocated.
ASSERT(LoadInProgress() || LoadRequested());
StoreNonPointer(&raw_ptr()->load_state_, RawLibrary::kLoaded);
}
void Library::SetLoadError(const Instance& error) const {
// Should not be already successfully loaded or just allocated.
ASSERT(LoadInProgress() || LoadRequested() || LoadFailed());
StoreNonPointer(&raw_ptr()->load_state_, RawLibrary::kLoadError);
StorePointer(&raw_ptr()->load_error_, error.raw());
}
// Traits for looking up Libraries by url in a hash set.
class LibraryUrlTraits {
public:
static const char* Name() { return "LibraryUrlTraits"; }
static bool ReportStats() { return false; }
// Called when growing the table.
static bool IsMatch(const Object& a, const Object& b) {
ASSERT(a.IsLibrary() && b.IsLibrary());
// Library objects are always canonical.
return a.raw() == b.raw();
}
static uword Hash(const Object& key) { return Library::Cast(key).UrlHash(); }
};
typedef UnorderedHashSet<LibraryUrlTraits> LibraryLoadErrorSet;
RawInstance* Library::TransitiveLoadError() const {
if (LoadError() != Instance::null()) {
return LoadError();
}
Thread* thread = Thread::Current();
Isolate* isolate = thread->isolate();
Zone* zone = thread->zone();
ObjectStore* object_store = isolate->object_store();
LibraryLoadErrorSet set(object_store->library_load_error_table());
bool present = false;
if (set.GetOrNull(*this, &present) != Object::null()) {
object_store->set_library_load_error_table(set.Release());
return Instance::null();
}
// Ensure we don't repeatedly visit the same library again.
set.Insert(*this);
object_store->set_library_load_error_table(set.Release());
intptr_t num_imp = num_imports();
Library& lib = Library::Handle(zone);
Instance& error = Instance::Handle(zone);
for (intptr_t i = 0; i < num_imp; i++) {
HANDLESCOPE(thread);
lib = ImportLibraryAt(i);
error = lib.TransitiveLoadError();
if (!error.IsNull()) {
break;
}
}
return error.raw();
}
void Library::AddPatchClass(const Class& cls) const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(cls.is_patch());
ASSERT(GetPatchClass(String::Handle(cls.Name())) == Class::null());
const GrowableObjectArray& patch_classes =
GrowableObjectArray::Handle(this->patch_classes());
patch_classes.Add(cls);
}
RawClass* Library::GetPatchClass(const String& name) const {
ASSERT(Thread::Current()->IsMutatorThread());
const GrowableObjectArray& patch_classes =
GrowableObjectArray::Handle(this->patch_classes());
Object& obj = Object::Handle();
for (intptr_t i = 0; i < patch_classes.Length(); i++) {
obj = patch_classes.At(i);
if (obj.IsClass() &&
(Class::Cast(obj).Name() == name.raw())) { // Names are canonicalized.
return Class::RawCast(obj.raw());
}
}
return Class::null();
}
void Library::RemovePatchClass(const Class& cls) const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(cls.is_patch());
const GrowableObjectArray& patch_classes =
GrowableObjectArray::Handle(this->patch_classes());
const intptr_t num_classes = patch_classes.Length();
intptr_t i = 0;
while (i < num_classes) {
if (cls.raw() == patch_classes.At(i)) break;
i++;
}
if (i == num_classes) return;
// Replace the entry with the script. We keep the script so that
// Library::LoadedScripts() can find it without having to iterate
// over the members of each class.
ASSERT(i < num_classes); // We must have found a class.
const Script& patch_script = Script::Handle(cls.script());
patch_classes.SetAt(i, patch_script);
}
static RawString* MakeClassMetaName(Thread* thread,
Zone* zone,
const Class& cls) {
return Symbols::FromConcat(thread, Symbols::At(),
String::Handle(zone, cls.Name()));
}
static RawString* MakeFieldMetaName(Thread* thread,
Zone* zone,
const Field& field) {
const String& cname = String::Handle(
zone,
MakeClassMetaName(thread, zone, Class::Handle(zone, field.Origin())));
GrowableHandlePtrArray<const String> pieces(zone, 3);
pieces.Add(cname);
pieces.Add(Symbols::At());
pieces.Add(String::Handle(field.name()));
return Symbols::FromConcatAll(thread, pieces);
}
static RawString* MakeFunctionMetaName(Thread* thread,
Zone* zone,
const Function& func) {
const String& cname = String::Handle(
zone,
MakeClassMetaName(thread, zone, Class::Handle(zone, func.origin())));
GrowableHandlePtrArray<const String> pieces(zone, 3);
pieces.Add(cname);
pieces.Add(Symbols::At());
pieces.Add(String::Handle(func.QualifiedScrubbedName()));
return Symbols::FromConcatAll(thread, pieces);
}
static RawString* MakeTypeParameterMetaName(Thread* thread,
Zone* zone,
const TypeParameter& param) {
const String& cname = String::Handle(
zone,
MakeClassMetaName(thread, zone,
Class::Handle(zone, param.parameterized_class())));
GrowableHandlePtrArray<const String> pieces(zone, 3);
pieces.Add(cname);
pieces.Add(Symbols::At());
pieces.Add(String::Handle(param.name()));
return Symbols::FromConcatAll(thread, pieces);
}
void Library::AddMetadata(const Object& owner,
const String& name,
TokenPosition token_pos,
intptr_t kernel_offset) const {
Thread* thread = Thread::Current();
ASSERT(thread->IsMutatorThread());
Zone* zone = thread->zone();
const String& metaname = String::Handle(zone, Symbols::New(thread, name));
const Field& field =
Field::Handle(zone, Field::NewTopLevel(metaname,
false, // is_final
false, // is_const
owner, token_pos, token_pos));
field.SetFieldType(Object::dynamic_type());
field.set_is_reflectable(false);
field.SetStaticValue(Array::empty_array(), true);
field.set_kernel_offset(kernel_offset);
GrowableObjectArray& metadata =
GrowableObjectArray::Handle(zone, this->metadata());
metadata.Add(field, Heap::kOld);
}
void Library::AddClassMetadata(const Class& cls,
const Object& tl_owner,
TokenPosition token_pos,
intptr_t kernel_offset) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
// We use the toplevel class as the owner of a class's metadata field because
// a class's metadata is in scope of the library, not the class.
AddMetadata(tl_owner,
String::Handle(zone, MakeClassMetaName(thread, zone, cls)),
token_pos, kernel_offset);
}
void Library::AddFieldMetadata(const Field& field,
TokenPosition token_pos,
intptr_t kernel_offset) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
AddMetadata(Object::Handle(zone, field.RawOwner()),
String::Handle(zone, MakeFieldMetaName(thread, zone, field)),
token_pos, kernel_offset);
}
void Library::AddFunctionMetadata(const Function& func,
TokenPosition token_pos,
intptr_t kernel_offset) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
AddMetadata(Object::Handle(zone, func.RawOwner()),
String::Handle(zone, MakeFunctionMetaName(thread, zone, func)),
token_pos, kernel_offset);
}
void Library::AddTypeParameterMetadata(const TypeParameter& param,
TokenPosition token_pos) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
AddMetadata(
Class::Handle(zone, param.parameterized_class()),
String::Handle(zone, MakeTypeParameterMetaName(thread, zone, param)),
token_pos);
}
void Library::AddLibraryMetadata(const Object& tl_owner,
TokenPosition token_pos,
intptr_t kernel_offset) const {
AddMetadata(tl_owner, Symbols::TopLevel(), token_pos, kernel_offset);
}
RawString* Library::MakeMetadataName(const Object& obj) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
if (obj.IsClass()) {
return MakeClassMetaName(thread, zone, Class::Cast(obj));
} else if (obj.IsField()) {
return MakeFieldMetaName(thread, zone, Field::Cast(obj));
} else if (obj.IsFunction()) {
return MakeFunctionMetaName(thread, zone, Function::Cast(obj));
} else if (obj.IsLibrary()) {
return Symbols::TopLevel().raw();
} else if (obj.IsTypeParameter()) {
return MakeTypeParameterMetaName(thread, zone, TypeParameter::Cast(obj));
}
UNIMPLEMENTED();
return String::null();
}
RawField* Library::GetMetadataField(const String& metaname) const {
const GrowableObjectArray& metadata =
GrowableObjectArray::Handle(this->metadata());
Field& entry = Field::Handle();
String& entryname = String::Handle();
intptr_t num_entries = metadata.Length();
for (intptr_t i = 0; i < num_entries; i++) {
entry ^= metadata.At(i);
entryname = entry.name();
if (entryname.Equals(metaname)) {
return entry.raw();
}
}
return Field::null();
}
void Library::CloneMetadataFrom(const Library& from_library,
const Function& from_fun,
const Function& to_fun) const {
const String& metaname = String::Handle(MakeMetadataName(from_fun));
const Field& from_field =
Field::Handle(from_library.GetMetadataField(metaname));
if (!from_field.IsNull()) {
AddFunctionMetadata(to_fun, from_field.token_pos(),
from_field.kernel_offset());
}
}
RawObject* Library::GetMetadata(const Object& obj) const {
#if defined(DART_PRECOMPILED_RUNTIME)
return Object::empty_array().raw();
#else
if (!obj.IsClass() && !obj.IsField() && !obj.IsFunction() &&
!obj.IsLibrary() && !obj.IsTypeParameter()) {
return Object::null();
}
const String& metaname = String::Handle(MakeMetadataName(obj));
Field& field = Field::Handle(GetMetadataField(metaname));
if (field.IsNull()) {
// There is no metadata for this object.
return Object::empty_array().raw();
}
Object& metadata = Object::Handle();
metadata = field.StaticValue();
if (field.StaticValue() == Object::empty_array().raw()) {
if (field.kernel_offset() > 0) {
metadata = kernel::EvaluateMetadata(
field, /* is_annotations_offset = */ obj.IsLibrary());
} else {
UNREACHABLE();
}
if (metadata.IsArray()) {
ASSERT(Array::Cast(metadata).raw() != Object::empty_array().raw());
field.SetStaticValue(Array::Cast(metadata), true);
}
}
return metadata.raw();
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
static bool ShouldBePrivate(const String& name) {
return (name.Length() >= 1 && name.CharAt(0) == '_') ||
(name.Length() >= 5 &&
(name.CharAt(4) == '_' &&
(name.CharAt(0) == 'g' || name.CharAt(0) == 's') &&
name.CharAt(1) == 'e' && name.CharAt(2) == 't' &&
name.CharAt(3) == ':'));
}
RawObject* Library::ResolveName(const String& name) const {
Object& obj = Object::Handle();
if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, &obj)) {
return obj.raw();
}
obj = LookupLocalObject(name);
if (!obj.IsNull()) {
// Names that are in this library's dictionary and are unmangled
// are not cached. This reduces the size of the cache.
return obj.raw();
}
String& accessor_name = String::Handle(Field::LookupGetterSymbol(name));
if (!accessor_name.IsNull()) {
obj = LookupLocalObject(accessor_name);
}
if (obj.IsNull()) {
accessor_name = Field::LookupSetterSymbol(name);
if (!accessor_name.IsNull()) {
obj = LookupLocalObject(accessor_name);
}
if (obj.IsNull() && !ShouldBePrivate(name)) {
obj = LookupImportedObject(name);
}
}
AddToResolvedNamesCache(name, obj);
return obj.raw();
}
class StringEqualsTraits {
public:
static const char* Name() { return "StringEqualsTraits"; }
static bool ReportStats() { return false; }
static bool IsMatch(const Object& a, const Object& b) {
return String::Cast(a).Equals(String::Cast(b));
}
static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
};
typedef UnorderedHashMap<StringEqualsTraits> ResolvedNamesMap;
// Returns true if the name is found in the cache, false no cache hit.
// obj is set to the cached entry. It may be null, indicating that the
// name does not resolve to anything in this library.
bool Library::LookupResolvedNamesCache(const String& name, Object* obj) const {
if (resolved_names() == Array::null()) {
return false;
}
ResolvedNamesMap cache(resolved_names());
bool present = false;
*obj = cache.GetOrNull(name, &present);
// Mutator compiler thread may add entries and therefore
// change 'resolved_names()' while running a background compilation;
// ASSERT that 'resolved_names()' has not changed only in mutator.
#if defined(DEBUG)
if (Thread::Current()->IsMutatorThread()) {
ASSERT(cache.Release().raw() == resolved_names());
} else {
// Release must be called in debug mode.
cache.Release();
}
#endif
return present;
}
// Add a name to the resolved name cache. This name resolves to the
// given object in this library scope. obj may be null, which means
// the name does not resolve to anything in this library scope.
void Library::AddToResolvedNamesCache(const String& name,
const Object& obj) const {
if (!FLAG_use_lib_cache || Compiler::IsBackgroundCompilation()) {
return;
}
if (resolved_names() == Array::null()) {
InitResolvedNamesCache();
}
ResolvedNamesMap cache(resolved_names());
cache.UpdateOrInsert(name, obj);
StorePointer(&raw_ptr()->resolved_names_, cache.Release().raw());
}
bool Library::LookupExportedNamesCache(const String& name, Object* obj) const {
ASSERT(FLAG_use_exp_cache);
if (exported_names() == Array::null()) {
return false;
}
ResolvedNamesMap cache(exported_names());
bool present = false;
*obj = cache.GetOrNull(name, &present);
// Mutator compiler thread may add entries and therefore
// change 'exported_names()' while running a background compilation;
// do not ASSERT that 'exported_names()' has not changed.
#if defined(DEBUG)
if (Thread::Current()->IsMutatorThread()) {
ASSERT(cache.Release().raw() == exported_names());
} else {
// Release must be called in debug mode.
cache.Release();
}
#endif
return present;
}
void Library::AddToExportedNamesCache(const String& name,
const Object& obj) const {
if (!FLAG_use_exp_cache || Compiler::IsBackgroundCompilation()) {
return;
}
if (exported_names() == Array::null()) {
InitExportedNamesCache();
}
ResolvedNamesMap cache(exported_names());
cache.UpdateOrInsert(name, obj);
StorePointer(&raw_ptr()->exported_names_, cache.Release().raw());
}
void Library::InvalidateResolvedName(const String& name) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Object& entry = Object::Handle(zone);
if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, &entry)) {
// TODO(koda): Support deleted sentinel in snapshots and remove only 'name'.
ClearResolvedNamesCache();
}
if (!FLAG_use_exp_cache) {
return;
}
// When a new name is added to a library, we need to invalidate all
// caches that contain an entry for this name. If the name was previously
// looked up but could not be resolved, the cache contains a null entry.
GrowableObjectArray& libs = GrowableObjectArray::Handle(
zone, thread->isolate()->object_store()->libraries());
Library& lib = Library::Handle(zone);
intptr_t num_libs = libs.Length();
for (intptr_t i = 0; i < num_libs; i++) {
lib ^= libs.At(i);
if (lib.LookupExportedNamesCache(name, &entry)) {
lib.ClearExportedNamesCache();
}
}
}
// Invalidate all exported names caches in the isolate.
void Library::InvalidateExportedNamesCaches() {
GrowableObjectArray& libs = GrowableObjectArray::Handle(
Isolate::Current()->object_store()->libraries());
Library& lib = Library::Handle();
intptr_t num_libs = libs.Length();
for (intptr_t i = 0; i < num_libs; i++) {
lib ^= libs.At(i);
lib.ClearExportedNamesCache();
}
}
void Library::RehashDictionary(const Array& old_dict,
intptr_t new_dict_size) const {
intptr_t old_dict_size = old_dict.Length() - 1;
const Array& new_dict =
Array::Handle(Array::New(new_dict_size + 1, Heap::kOld));
// Rehash all elements from the original dictionary
// to the newly allocated array.
Object& entry = Class::Handle();
String& entry_name = String::Handle();
Object& new_entry = Object::Handle();
intptr_t used = 0;
for (intptr_t i = 0; i < old_dict_size; i++) {
entry = old_dict.At(i);
if (!entry.IsNull()) {
entry_name = entry.DictionaryName();
ASSERT(!entry_name.IsNull());
const intptr_t hash = entry_name.Hash();
intptr_t index = hash % new_dict_size;
new_entry = new_dict.At(index);
while (!new_entry.IsNull()) {
index = (index + 1) % new_dict_size; // Move to next element.
new_entry = new_dict.At(index);
}
new_dict.SetAt(index, entry);
used++;
}
}
// Set used count.
ASSERT(used < new_dict_size); // Need at least one empty slot.
new_entry = Smi::New(used);
new_dict.SetAt(new_dict_size, new_entry);
// Remember the new dictionary now.
StorePointer(&raw_ptr()->dictionary_, new_dict.raw());
}
void Library::AddObject(const Object& obj, const String& name) const {
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(obj.IsClass() || obj.IsFunction() || obj.IsField() ||
obj.IsLibraryPrefix());
ASSERT(name.Equals(String::Handle(obj.DictionaryName())));
ASSERT(LookupLocalObject(name) == Object::null());
const Array& dict = Array::Handle(dictionary());
intptr_t dict_size = dict.Length() - 1;
intptr_t index = name.Hash() % dict_size;
Object& entry = Object::Handle();
entry = dict.At(index);
// An empty spot will be found because we keep the hash set at most 75% full.
while (!entry.IsNull()) {
index = (index + 1) % dict_size;
entry = dict.At(index);
}
// Insert the object at the empty slot.
dict.SetAt(index, obj);
// One more element added.
intptr_t used_elements = Smi::Value(Smi::RawCast(dict.At(dict_size))) + 1;
const Smi& used = Smi::Handle(Smi::New(used_elements));
dict.SetAt(dict_size, used); // Update used count.
// Rehash if symbol_table is 75% full.
if (used_elements > ((dict_size / 4) * 3)) {
// TODO(iposva): Avoid exponential growth.
RehashDictionary(dict, 2 * dict_size);
}
// Invalidate the cache of loaded scripts.
if (loaded_scripts() != Array::null()) {
StorePointer(&raw_ptr()->loaded_scripts_, Array::null());
}
}
// Lookup a name in the library's re-export namespace.
// This lookup can occur from two different threads: background compiler and
// mutator thread.
RawObject* Library::LookupReExport(const String& name,
ZoneGrowableArray<intptr_t>* trail) const {
if (!HasExports()) {
return Object::null();
}
if (trail == NULL) {
trail = new ZoneGrowableArray<intptr_t>();
}
Object& obj = Object::Handle();
if (FLAG_use_exp_cache && LookupExportedNamesCache(name, &obj)) {
return obj.raw();
}
const intptr_t lib_id = this->index();
ASSERT(lib_id >= 0); // We use -1 to indicate that a cycle was found.
trail->Add(lib_id);
const Array& exports = Array::Handle(this->exports());
Namespace& ns = Namespace::Handle();
for (int i = 0; i < exports.Length(); i++) {
ns ^= exports.At(i);
obj = ns.Lookup(name, trail);
if (!obj.IsNull()) {
// The Lookup call above may return a setter x= when we are looking
// for the name x. Make sure we only return when a matching name
// is found.
String& obj_name = String::Handle(obj.DictionaryName());
if (Field::IsSetterName(obj_name) == Field::IsSetterName(name)) {
break;
}
}
}
bool in_cycle = (trail->RemoveLast() < 0);
if (FLAG_use_exp_cache && !in_cycle && !Compiler::IsBackgroundCompilation()) {
AddToExportedNamesCache(name, obj);
}
return obj.raw();
}
RawObject* Library::LookupEntry(const String& name, intptr_t* index) const {
Thread* thread = Thread::Current();
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_OBJECT_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
Array& dict = thread->ArrayHandle();
dict ^= dictionary();
intptr_t dict_size = dict.Length() - 1;
*index = name.Hash() % dict_size;
Object& entry = thread->ObjectHandle();
String& entry_name = thread->StringHandle();
entry = dict.At(*index);
// Search the entry in the hash set.
while (!entry.IsNull()) {
entry_name = entry.DictionaryName();
ASSERT(!entry_name.IsNull());
if (entry_name.Equals(name)) {
return entry.raw();
}
*index = (*index + 1) % dict_size;
entry = dict.At(*index);
}
return Object::null();
}
void Library::ReplaceObject(const Object& obj, const String& name) const {
ASSERT(!Compiler::IsBackgroundCompilation());
ASSERT(obj.IsClass() || obj.IsFunction() || obj.IsField());
ASSERT(LookupLocalObject(name) != Object::null());
intptr_t index;
LookupEntry(name, &index);
// The value is guaranteed to be found.
const Array& dict = Array::Handle(dictionary());
dict.SetAt(index, obj);
}
void Library::AddClass(const Class& cls) const {
ASSERT(!Compiler::IsBackgroundCompilation());
const String& class_name = String::Handle(cls.Name());
AddObject(cls, class_name);
// Link class to this library.
cls.set_library(*this);
InvalidateResolvedName(class_name);
}
static void AddScriptIfUnique(const GrowableObjectArray& scripts,
const Script& candidate) {
if (candidate.IsNull()) {
return;
}
Script& script_obj = Script::Handle();
for (int i = 0; i < scripts.Length(); i++) {
script_obj ^= scripts.At(i);
if (script_obj.raw() == candidate.raw()) {
// We already have a reference to this script.
return;
}
}
// Add script to the list of scripts.
scripts.Add(candidate);
}
RawArray* Library::LoadedScripts() const {
ASSERT(Thread::Current()->IsMutatorThread());
// We compute the list of loaded scripts lazily. The result is
// cached in loaded_scripts_.
if (loaded_scripts() == Array::null()) {
// Iterate over the library dictionary and collect all scripts.
const GrowableObjectArray& scripts =
GrowableObjectArray::Handle(GrowableObjectArray::New(8));
Object& entry = Object::Handle();
Class& cls = Class::Handle();
Script& owner_script = Script::Handle();
DictionaryIterator it(*this);
while (it.HasNext()) {
entry = it.GetNext();
if (entry.IsClass()) {
owner_script = Class::Cast(entry).script();
} else if (entry.IsFunction()) {
owner_script = Function::Cast(entry).script();
} else if (entry.IsField()) {
owner_script = Field::Cast(entry).Script();
} else {
continue;
}
AddScriptIfUnique(scripts, owner_script);
}
// Add all scripts from patch classes.
GrowableObjectArray& patches = GrowableObjectArray::Handle(patch_classes());
for (intptr_t i = 0; i < patches.Length(); i++) {
entry = patches.At(i);
if (entry.IsClass()) {
owner_script = Class::Cast(entry).script();
} else {
ASSERT(entry.IsScript());
owner_script = Script::Cast(entry).raw();
}
AddScriptIfUnique(scripts, owner_script);
}
cls ^= toplevel_class();
if (!cls.IsNull()) {
owner_script = cls.script();
AddScriptIfUnique(scripts, owner_script);
// Special case: Scripts that only contain external top-level functions
// are not included above, but can be referenced through a library's
// anonymous classes. Example: dart-core:identical.dart.
Function& func = Function::Handle();
Array& functions = Array::Handle(cls.functions());
for (intptr_t j = 0; j < functions.Length(); j++) {
func ^= functions.At(j);
if (func.is_external()) {
owner_script = func.script();
AddScriptIfUnique(scripts, owner_script);
}
}
}
// Create the array of scripts and cache it in loaded_scripts_.
const Array& scripts_array = Array::Handle(Array::MakeFixedLength(scripts));
StorePointer(&raw_ptr()->loaded_scripts_, scripts_array.raw());
}
return loaded_scripts();
}
// TODO(hausner): we might want to add a script dictionary to the
// library class to make this lookup faster.
RawScript* Library::LookupScript(const String& url,
bool useResolvedUri /* = false */) const {
const intptr_t url_length = url.Length();
if (url_length == 0) {
return Script::null();
}
const Array& scripts = Array::Handle(LoadedScripts());
Script& script = Script::Handle();
String& script_url = String::Handle();
const intptr_t num_scripts = scripts.Length();
for (int i = 0; i < num_scripts; i++) {
script ^= scripts.At(i);
if (!useResolvedUri) {
script_url = script.url();
} else {
script_url = script.resolved_url();
}
const intptr_t start_idx = script_url.Length() - url_length;
if ((start_idx == 0) && url.Equals(script_url)) {
return script.raw();
} else if (start_idx > 0) {
// If we do a suffix match, only match if the partial path
// starts at or immediately after the path separator.
if (((url.CharAt(0) == '/') ||
(script_url.CharAt(start_idx - 1) == '/')) &&
url.Equals(script_url, start_idx, url_length)) {
return script.raw();
}
}
}
return Script::null();
}
RawObject* Library::LookupLocalObject(const String& name) const {
intptr_t index;
return LookupEntry(name, &index);
}
RawObject* Library::LookupLocalOrReExportObject(const String& name) const {
intptr_t index;
const Object& result = Object::Handle(LookupEntry(name, &index));
if (!result.IsNull() && !result.IsLibraryPrefix()) {
return result.raw();
}
return LookupReExport(name);
}
RawField* Library::LookupFieldAllowPrivate(const String& name) const {
Object& obj = Object::Handle(LookupObjectAllowPrivate(name));
if (obj.IsField()) {
return Field::Cast(obj).raw();
}
return Field::null();
}
RawField* Library::LookupLocalField(const String& name) const {
Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
if (obj.IsField()) {
return Field::Cast(obj).raw();
}
return Field::null();
}
RawFunction* Library::LookupFunctionAllowPrivate(const String& name) const {
Object& obj = Object::Handle(LookupObjectAllowPrivate(name));
if (obj.IsFunction()) {
return Function::Cast(obj).raw();
}
return Function::null();
}
RawFunction* Library::LookupLocalFunction(const String& name) const {
Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
if (obj.IsFunction()) {
return Function::Cast(obj).raw();
}
return Function::null();
}
RawObject* Library::LookupLocalObjectAllowPrivate(const String& name) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Object& obj = Object::Handle(zone, Object::null());
obj = LookupLocalObject(name);
if (obj.IsNull() && ShouldBePrivate(name)) {
String& private_name = String::Handle(zone, PrivateName(name));
obj = LookupLocalObject(private_name);
}
return obj.raw();
}
RawObject* Library::LookupObjectAllowPrivate(const String& name) const {
// First check if name is found in the local scope of the library.
Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
if (!obj.IsNull()) {
return obj.raw();
}
// Do not look up private names in imported libraries.
if (ShouldBePrivate(name)) {
return Object::null();
}
// Now check if name is found in any imported libs.
return LookupImportedObject(name);
}
RawObject* Library::LookupImportedObject(const String& name) const {
Object& obj = Object::Handle();
Namespace& import = Namespace::Handle();
Library& import_lib = Library::Handle();
String& import_lib_url = String::Handle();
String& first_import_lib_url = String::Handle();
Object& found_obj = Object::Handle();
String& found_obj_name = String::Handle();
ASSERT(!ShouldBePrivate(name));
for (intptr_t i = 0; i < num_imports(); i++) {
import ^= ImportAt(i);
obj = import.Lookup(name);
if (!obj.IsNull()) {
import_lib = import.library();
import_lib_url = import_lib.url();
if (found_obj.raw() != obj.raw()) {
if (first_import_lib_url.IsNull() ||
first_import_lib_url.StartsWith(Symbols::DartScheme())) {
// This is the first object we found, or the
// previously found object is exported from a Dart
// system library. The newly found object hides the one
// from the Dart library.
first_import_lib_url = import_lib.url();
found_obj = obj.raw();
found_obj_name = obj.DictionaryName();
} else if (import_lib_url.StartsWith(Symbols::DartScheme())) {
// The newly found object is exported from a Dart system
// library. It is hidden by the previously found object.
// We continue to search.
} else if (Field::IsSetterName(found_obj_name) &&
!Field::IsSetterName(name)) {
// We are looking for an unmangled name or a getter, but
// the first object we found is a setter. Replace the first
// object with the one we just found.
first_import_lib_url = import_lib.url();
found_obj = obj.raw();
found_obj_name = found_obj.DictionaryName();
} else {
// We found two different objects with the same name.
// Note that we need to compare the names again because
// looking up an unmangled name can return a getter or a
// setter. A getter name is the same as the unmangled name,
// but a setter name is different from an unmangled name or a
// getter name.
if (Field::IsGetterName(found_obj_name)) {
found_obj_name = Field::NameFromGetter(found_obj_name);
}
String& second_obj_name = String::Handle(obj.DictionaryName());
if (Field::IsGetterName(second_obj_name)) {
second_obj_name = Field::NameFromGetter(second_obj_name);
}
if (found_obj_name.Equals(second_obj_name)) {
return Object::null();
}
}
}
}
}
return found_obj.raw();
}
RawClass* Library::LookupClass(const String& name) const {
Object& obj = Object::Handle(ResolveName(name));
if (obj.IsClass()) {
return Class::Cast(obj).raw();
}
return Class::null();
}
RawClass* Library::LookupLocalClass(const String& name) const {
Object& obj = Object::Handle(LookupLocalObject(name));
if (obj.IsClass()) {
return Class::Cast(obj).raw();
}
return Class::null();
}
RawClass* Library::LookupClassAllowPrivate(const String& name) const {
// See if the class is available in this library or in the top level
// scope of any imported library.
Zone* zone = Thread::Current()->zone();
const Class& cls = Class::Handle(zone, LookupClass(name));
if (!cls.IsNull()) {
return cls.raw();
}
// Now try to lookup the class using its private name, but only in
// this library (not in imported libraries).
if (ShouldBePrivate(name)) {
String& private_name = String::Handle(zone, PrivateName(name));
const Object& obj = Object::Handle(LookupLocalObject(private_name));
if (obj.IsClass()) {
return Class::Cast(obj).raw();
}
}
return Class::null();
}
// Mixin applications can have multiple private keys from different libraries.
RawClass* Library::SlowLookupClassAllowMultiPartPrivate(
const String& name) const {
Array& dict = Array::Handle(dictionary());
Object& entry = Object::Handle();
String& cls_name = String::Handle();
for (intptr_t i = 0; i < dict.Length(); i++) {
entry = dict.At(i);
if (entry.IsClass()) {
cls_name = Class::Cast(entry).Name();
// Warning: comparison is not symmetric.
if (String::EqualsIgnoringPrivateKey(cls_name, name)) {
return Class::Cast(entry).raw();
}
}
}
return Class::null();
}
RawLibraryPrefix* Library::LookupLocalLibraryPrefix(const String& name) const {
const Object& obj = Object::Handle(LookupLocalObject(name));
if (obj.IsLibraryPrefix()) {
return LibraryPrefix::Cast(obj).raw();
}
return LibraryPrefix::null();
}
void Library::set_toplevel_class(const Class& value) const {
ASSERT(raw_ptr()->toplevel_class_ == Class::null());
StorePointer(&raw_ptr()->toplevel_class_, value.raw());
}
void Library::set_metadata(const GrowableObjectArray& value) const {
StorePointer(&raw_ptr()->metadata_, value.raw());
}
RawLibrary* Library::ImportLibraryAt(intptr_t index) const {
Namespace& import = Namespace::Handle(ImportAt(index));
if (import.IsNull()) {
return Library::null();
}
return import.library();
}
RawNamespace* Library::ImportAt(intptr_t index) const {
if ((index < 0) || index >= num_imports()) {
return Namespace::null();
}
const Array& import_list = Array::Handle(imports());
return Namespace::RawCast(import_list.At(index));
}
bool Library::ImportsCorelib() const {
Zone* zone = Thread::Current()->zone();
Library& imported = Library::Handle(zone);
intptr_t count = num_imports();
for (int i = 0; i < count; i++) {
imported = ImportLibraryAt(i);
if (imported.IsCoreLibrary()) {
return true;
}
}
LibraryPrefix& prefix = LibraryPrefix::Handle(zone);
LibraryPrefixIterator it(*this);
while (it.HasNext()) {
prefix = it.GetNext();
count = prefix.num_imports();
for (int i = 0; i < count; i++) {
imported = prefix.GetLibrary(i);
if (imported.IsCoreLibrary()) {
return true;
}
}
}
return false;
}
void Library::DropDependenciesAndCaches() const {
StorePointer(&raw_ptr()->imports_, Object::empty_array().raw());
StorePointer(&raw_ptr()->exports_, Object::empty_array().raw());
StoreNonPointer(&raw_ptr()->num_imports_, 0);
StorePointer(&raw_ptr()->resolved_names_, Array::null());
StorePointer(&raw_ptr()->exported_names_, Array::null());
StorePointer(&raw_ptr()->loaded_scripts_, Array::null());
}
void Library::AddImport(const Namespace& ns) const {
Array& imports = Array::Handle(this->imports());
intptr_t capacity = imports.Length();
if (num_imports() == capacity) {
capacity = capacity + kImportsCapacityIncrement + (capacity >> 2);
imports = Array::Grow(imports, capacity);
StorePointer(&raw_ptr()->imports_, imports.raw());
}
intptr_t index = num_imports();
imports.SetAt(index, ns);
set_num_imports(index + 1);
}
// Convenience function to determine whether the export list is
// non-empty.
bool Library::HasExports() const {
return exports() != Object::empty_array().raw();
}
// We add one namespace at a time to the exports array and don't
// pre-allocate any unused capacity. The assumption is that
// re-exports are quite rare.
void Library::AddExport(const Namespace& ns) const {
Array& exports = Array::Handle(this->exports());
intptr_t num_exports = exports.Length();
exports = Array::Grow(exports, num_exports + 1);
StorePointer(&raw_ptr()->exports_, exports.raw());
exports.SetAt(num_exports, ns);
}
static RawArray* NewDictionary(intptr_t initial_size) {
const Array& dict = Array::Handle(Array::New(initial_size + 1, Heap::kOld));
// The last element of the dictionary specifies the number of in use slots.
dict.SetAt(initial_size, Smi::Handle(Smi::New(0)));
return dict.raw();
}
void Library::InitResolvedNamesCache() const {
ASSERT(Thread::Current()->IsMutatorThread());
StorePointer(&raw_ptr()->resolved_names_,
HashTables::New<ResolvedNamesMap>(64));
}
void Library::ClearResolvedNamesCache() const {
ASSERT(Thread::Current()->IsMutatorThread());
StorePointer(&raw_ptr()->resolved_names_, Array::null());
}
void Library::InitExportedNamesCache() const {
StorePointer(&raw_ptr()->exported_names_,
HashTables::New<ResolvedNamesMap>(16));
}
void Library::ClearExportedNamesCache() const {
StorePointer(&raw_ptr()->exported_names_, Array::null());
}
void Library::InitClassDictionary() const {
// TODO(iposva): Find reasonable initial size.
const int kInitialElementCount = 16;
StorePointer(&raw_ptr()->dictionary_, NewDictionary(kInitialElementCount));
}
void Library::InitImportList() const {
const Array& imports =
Array::Handle(Array::New(kInitialImportsCapacity, Heap::kOld));
StorePointer(&raw_ptr()->imports_, imports.raw());
StoreNonPointer(&raw_ptr()->num_imports_, 0);
}
RawLibrary* Library::New() {
ASSERT(Object::library_class() != Class::null());
RawObject* raw =
Object::Allocate(Library::kClassId, Library::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawLibrary*>(raw);
}
RawLibrary* Library::NewLibraryHelper(const String& url, bool import_core_lib) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(thread->IsMutatorThread());
// Force the url to have a hash code.
url.Hash();
const bool dart_scheme = url.StartsWith(Symbols::DartScheme());
const bool dart_private_scheme =
dart_scheme && url.StartsWith(Symbols::DartSchemePrivate());
const Library& result = Library::Handle(zone, Library::New());
result.StorePointer(&result.raw_ptr()->name_, Symbols::Empty().raw());
result.StorePointer(&result.raw_ptr()->url_, url.raw());
result.StorePointer(&result.raw_ptr()->resolved_names_, Array::null());
result.StorePointer(&result.raw_ptr()->exported_names_, Array::null());
result.StorePointer(&result.raw_ptr()->dictionary_,
Object::empty_array().raw());
result.StorePointer(&result.raw_ptr()->metadata_,
GrowableObjectArray::New(4, Heap::kOld));
result.StorePointer(&result.raw_ptr()->toplevel_class_, Class::null());
result.StorePointer(
&result.raw_ptr()->patch_classes_,
GrowableObjectArray::New(Object::empty_array(), Heap::kOld));
result.StorePointer(&result.raw_ptr()->imports_, Object::empty_array().raw());
result.StorePointer(&result.raw_ptr()->exports_, Object::empty_array().raw());
result.StorePointer(&result.raw_ptr()->loaded_scripts_, Array::null());
result.StorePointer(&result.raw_ptr()->load_error_, Instance::null());
result.set_native_entry_resolver(NULL);
result.set_native_entry_symbol_resolver(NULL);
result.set_is_in_fullsnapshot(false);
result.StoreNonPointer(&result.raw_ptr()->corelib_imported_, true);
if (dart_private_scheme) {
// Never debug dart:_ libraries.
result.set_debuggable(false);
} else if (dart_scheme) {
// Only debug dart: libraries if we have been requested to show invisible
// frames.
result.set_debuggable(FLAG_show_invisible_frames);
} else {
// Default to debuggable for all other libraries.
result.set_debuggable(true);
}
result.set_is_dart_scheme(dart_scheme);
result.set_kernel_offset(-1);
result.StoreNonPointer(&result.raw_ptr()->load_state_,
RawLibrary::kAllocated);
result.StoreNonPointer(&result.raw_ptr()->index_, -1);
result.InitClassDictionary();
result.InitImportList();
result.AllocatePrivateKey();
if (import_core_lib) {
const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
ASSERT(!core_lib.IsNull());
const Namespace& ns = Namespace::Handle(
zone,
Namespace::New(core_lib, Object::null_array(), Object::null_array()));
result.AddImport(ns);
}
return result.raw();
}
RawLibrary* Library::New(const String& url) {
return NewLibraryHelper(url, false);
}
void Library::InitCoreLibrary(Isolate* isolate) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const String& core_lib_url = Symbols::DartCore();
const Library& core_lib =
Library::Handle(zone, Library::NewLibraryHelper(core_lib_url, false));
core_lib.SetLoadRequested();
core_lib.Register(thread);
isolate->object_store()->set_bootstrap_library(ObjectStore::kCore, core_lib);
isolate->object_store()->set_root_library(Library::Handle());
// Hook up predefined classes without setting their library pointers. These
// classes are coming from the VM isolate, and are shared between multiple
// isolates so setting their library pointers would be wrong.
const Class& cls = Class::Handle(zone, Object::dynamic_class());
core_lib.AddObject(cls, String::Handle(zone, cls.Name()));
}
// Invoke the function, or noSuchMethod if it is null.
static RawObject* InvokeInstanceFunction(
const Instance& receiver,
const Function& function,
const String& target_name,
const Array& args,
const Array& args_descriptor_array,
bool respect_reflectable,
const TypeArguments& instantiator_type_args) {
// Note "args" is already the internal arguments with the receiver as the
// first element.
ArgumentsDescriptor args_descriptor(args_descriptor_array);
if (function.IsNull() || !function.AreValidArguments(args_descriptor, NULL) ||
(respect_reflectable && !function.is_reflectable())) {
return DartEntry::InvokeNoSuchMethod(receiver, target_name, args,
args_descriptor_array);
}
RawObject* type_error = function.DoArgumentTypesMatch(args, args_descriptor,
instantiator_type_args);
if (type_error != Error::null()) {
return type_error;
}
return DartEntry::InvokeFunction(function, args, args_descriptor_array);
}
RawObject* Library::InvokeGetter(const String& getter_name,
bool throw_nsm_if_absent,
bool respect_reflectable) const {
Object& obj = Object::Handle(LookupLocalOrReExportObject(getter_name));
Function& getter = Function::Handle();
if (obj.IsField()) {
const Field& field = Field::Cast(obj);
if (!field.IsUninitialized()) {
return field.StaticValue();
}
// An uninitialized field was found. Check for a getter in the field's
// owner class.
const Class& klass = Class::Handle(field.Owner());
const String& internal_getter_name =
String::Handle(Field::GetterName(getter_name));
getter = klass.LookupStaticFunction(internal_getter_name);
} else {
// No field found. Check for a getter in the lib.
const String& internal_getter_name =
String::Handle(Field::GetterName(getter_name));
obj = LookupLocalOrReExportObject(internal_getter_name);
if (obj.IsFunction()) {
getter = Function::Cast(obj).raw();
} else {
obj = LookupLocalOrReExportObject(getter_name);
if (obj.IsFunction()) {
// Looking for a getter but found a regular method: closurize it.
const Function& closure_function =
Function::Handle(Function::Cast(obj).ImplicitClosureFunction());
return closure_function.ImplicitStaticClosure();
}
}
}
if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
if (throw_nsm_if_absent) {
return ThrowNoSuchMethod(
AbstractType::Handle(Class::Handle(toplevel_class()).RareType()),
getter_name, Object::null_array(), Object::null_array(),
InvocationMirror::kTopLevel, InvocationMirror::kGetter);
}
// Fall through case: Indicate that we didn't find any function or field
// using a special null instance. This is different from a field being null.
// Callers make sure that this null does not leak into Dartland.
return Object::sentinel().raw();
}
// Invoke the getter and return the result.
return DartEntry::InvokeFunction(getter, Object::empty_array());
}
RawObject* Library::InvokeSetter(const String& setter_name,
const Instance& value,
bool respect_reflectable) const {
Object& obj = Object::Handle(LookupLocalOrReExportObject(setter_name));
const String& internal_setter_name =
String::Handle(Field::SetterName(setter_name));
AbstractType& setter_type = AbstractType::Handle();
AbstractType& argument_type = AbstractType::Handle(value.GetType(Heap::kOld));
if (obj.IsField()) {
const Field& field = Field::Cast(obj);
setter_type ^= field.type();
if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
!value.IsInstanceOf(setter_type, Object::null_type_arguments(),
Object::null_type_arguments(), NULL)) {
return ThrowTypeError(field.token_pos(), value, setter_type, setter_name);
}
if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
const int kNumArgs = 1;
const Array& args = Array::Handle(Array::New(kNumArgs));
args.SetAt(0, value);
return ThrowNoSuchMethod(
AbstractType::Handle(Class::Handle(toplevel_class()).RareType()),
internal_setter_name, args, Object::null_array(),
InvocationMirror::kTopLevel, InvocationMirror::kSetter);
}
field.SetStaticValue(value);
return value.raw();
}
Function& setter = Function::Handle();
obj = LookupLocalOrReExportObject(internal_setter_name);
if (obj.IsFunction()) {
setter ^= obj.raw();
}
const int kNumArgs = 1;
const Array& args = Array::Handle(Array::New(kNumArgs));
args.SetAt(0, value);
if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
return ThrowNoSuchMethod(
AbstractType::Handle(Class::Handle(toplevel_class()).RareType()),
internal_setter_name, args, Object::null_array(),
InvocationMirror::kTopLevel, InvocationMirror::kSetter);
}
setter_type ^= setter.ParameterTypeAt(0);
if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
!value.IsInstanceOf(setter_type, Object::null_type_arguments(),
Object::null_type_arguments(), NULL)) {
return ThrowTypeError(setter.token_pos(), value, setter_type, setter_name);
}
return DartEntry::InvokeFunction(setter, args);
}
RawObject* Library::Invoke(const String& function_name,
const Array& args,
const Array& arg_names,
bool respect_reflectable) const {
// TODO(regis): Support invocation of generic functions with type arguments.
const int kTypeArgsLen = 0;
Function& function = Function::Handle();
Object& obj = Object::Handle(LookupLocalOrReExportObject(function_name));
if (obj.IsFunction()) {
function ^= obj.raw();
}
if (function.IsNull()) {
// Didn't find a method: try to find a getter and invoke call on its result.
const Object& getter_result =
Object::Handle(InvokeGetter(function_name, false));
if (getter_result.raw() != Object::sentinel().raw()) {
// Make room for the closure (receiver) in arguments.
intptr_t numArgs = args.Length();
const Array& call_args = Array::Handle(Array::New(numArgs + 1));
Object& temp = Object::Handle();
for (int i = 0; i < numArgs; i++) {
temp = args.At(i);
call_args.SetAt(i + 1, temp);
}
call_args.SetAt(0, getter_result);
const Array& call_args_descriptor_array =
Array::Handle(ArgumentsDescriptor::New(
kTypeArgsLen, call_args.Length(), arg_names));
// Call closure.
return DartEntry::InvokeClosure(call_args, call_args_descriptor_array);
}
}
const Array& args_descriptor_array = Array::Handle(
ArgumentsDescriptor::New(kTypeArgsLen, args.Length(), arg_names));
ArgumentsDescriptor args_descriptor(args_descriptor_array);
const TypeArguments& type_args = Object::null_type_arguments();
if (function.IsNull() || !function.AreValidArguments(args_descriptor, NULL) ||
(respect_reflectable && !function.is_reflectable())) {
return ThrowNoSuchMethod(
AbstractType::Handle(Class::Handle(toplevel_class()).RareType()),
function_name, args, arg_names, InvocationMirror::kTopLevel,
InvocationMirror::kMethod);
}
RawObject* type_error =
function.DoArgumentTypesMatch(args, args_descriptor, type_args);
if (type_error != Error::null()) {
return type_error;
}
return DartEntry::InvokeFunction(function, args, args_descriptor_array);
}
RawObject* Library::Evaluate(const String& expr,
const Array& param_names,
const Array& param_values) const {
return Evaluate(expr, param_names, param_values, Array::empty_array(),
TypeArguments::null_type_arguments());
}
RawObject* Library::Evaluate(const String& expr,
const Array& param_names,
const Array& param_values,
const Array& type_param_names,
const TypeArguments& type_param_values) const {
ASSERT(kernel_data() == ExternalTypedData::null() ||
!FLAG_enable_kernel_expression_compilation);
// Evaluate the expression as a static function of the toplevel class.
Class& top_level_class = Class::Handle(toplevel_class());
ASSERT(top_level_class.is_finalized());
return top_level_class.Evaluate(expr, param_names, param_values);
}
RawObject* Library::EvaluateCompiledExpression(
const uint8_t* kernel_bytes,
intptr_t kernel_length,
const Array& type_definitions,
const Array& arguments,
const TypeArguments& type_arguments) const {
return EvaluateCompiledExpressionHelper(
kernel_bytes, kernel_length, type_definitions, String::Handle(url()),
String::Handle(), arguments, type_arguments);
}
void Library::InitNativeWrappersLibrary(Isolate* isolate, bool is_kernel) {
static const int kNumNativeWrappersClasses = 4;
COMPILE_ASSERT((kNumNativeWrappersClasses > 0) &&
(kNumNativeWrappersClasses < 10));
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const String& native_flds_lib_url = Symbols::DartNativeWrappers();
const Library& native_flds_lib = Library::Handle(
zone, Library::NewLibraryHelper(native_flds_lib_url, false));
const String& native_flds_lib_name = Symbols::DartNativeWrappersLibName();
native_flds_lib.SetName(native_flds_lib_name);
native_flds_lib.SetLoadRequested();
native_flds_lib.Register(thread);
native_flds_lib.SetLoadInProgress();
isolate->object_store()->set_native_wrappers_library(native_flds_lib);
static const char* const kNativeWrappersClass = "NativeFieldWrapperClass";
static const int kNameLength = 25;
ASSERT(kNameLength == (strlen(kNativeWrappersClass) + 1 + 1));
char name_buffer[kNameLength];
String& cls_name = String::Handle(zone);
for (int fld_cnt = 1; fld_cnt <= kNumNativeWrappersClasses; fld_cnt++) {
Utils::SNPrint(name_buffer, kNameLength, "%s%d", kNativeWrappersClass,
fld_cnt);
cls_name = Symbols::New(thread, name_buffer);
Class::NewNativeWrapper(native_flds_lib, cls_name, fld_cnt);
}
// NOTE: If we bootstrap from a Kernel IR file we want to generate the
// synthetic constructors for the native wrapper classes. We leave this up to
// the [KernelLoader] who will take care of it later.
if (!is_kernel) {
native_flds_lib.SetLoaded();
}
}
// LibraryLookupSet maps URIs to libraries.
class LibraryLookupTraits {
public:
static const char* Name() { return "LibraryLookupTraits"; }
static bool ReportStats() { return false; }
static bool IsMatch(const Object& a, const Object& b) {
const String& a_str = String::Cast(a);
const String& b_str = String::Cast(b);
ASSERT(a_str.HasHash() && b_str.HasHash());
return a_str.Equals(b_str);
}
static uword Hash(const Object& key) { return String::Cast(key).Hash(); }
static RawObject* NewKey(const String& str) { return str.raw(); }
};
typedef UnorderedHashMap<LibraryLookupTraits> LibraryLookupMap;
static RawObject* EvaluateCompiledExpressionHelper(
const uint8_t* kernel_bytes,
intptr_t kernel_length,
const Array& type_definitions,
const String& library_url,
const String& klass,
const Array& arguments,
const TypeArguments& type_arguments) {
#if defined(DART_PRECOMPILED_RUNTIME)
const String& error_str = String::Handle(
String::New("Expression evaluation not available in precompiled mode."));
return ApiError::New(error_str);
#else
kernel::Program* kernel_pgm =
kernel::Program::ReadFromBuffer(kernel_bytes, kernel_length);
if (kernel_pgm == NULL) {
return ApiError::New(String::Handle(
String::New("Kernel isolate returned ill-formed kernel.")));
}
kernel::KernelLoader loader(kernel_pgm);
const Object& result = Object::Handle(
loader.LoadExpressionEvaluationFunction(library_url, klass));
delete kernel_pgm;
kernel_pgm = NULL;
if (result.IsError()) return result.raw();
const Function& callee = Function::Cast(result);
// type_arguments is null if all type arguments are dynamic.
if (type_definitions.Length() == 0 || type_arguments.IsNull()) {
return DartEntry::InvokeFunction(callee, arguments);
}
intptr_t num_type_args = type_arguments.Length();
Array& real_arguments = Array::Handle(Array::New(arguments.Length() + 1));
real_arguments.SetAt(0, type_arguments);
Object& arg = Object::Handle();
for (intptr_t i = 0; i < arguments.Length(); ++i) {
arg = arguments.At(i);
real_arguments.SetAt(i + 1, arg);
}
const Array& args_desc = Array::Handle(
ArgumentsDescriptor::New(num_type_args, arguments.Length()));
return DartEntry::InvokeFunction(callee, real_arguments, args_desc);
#endif
}
// Returns library with given url in current isolate, or NULL.
RawLibrary* Library::LookupLibrary(Thread* thread, const String& url) {
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
ObjectStore* object_store = isolate->object_store();
// Make sure the URL string has an associated hash code
// to speed up the repeated equality checks.
url.Hash();
// Use the libraries map to lookup the library by URL.
Library& lib = Library::Handle(zone);
if (object_store->libraries_map() == Array::null()) {
return Library::null();
} else {
LibraryLookupMap map(object_store->libraries_map());
lib ^= map.GetOrNull(url);
ASSERT(map.Release().raw() == object_store->libraries_map());
}
return lib.raw();
}
RawError* Library::Patch(const Script& script) const {
ASSERT(script.kind() == RawScript::kPatchTag);
return Compiler::Compile(*this, script);
}
bool Library::IsPrivate(const String& name) {
if (ShouldBePrivate(name)) return true;
// Factory names: List._fromLiteral.
for (intptr_t i = 1; i < name.Length() - 1; i++) {
if (name.CharAt(i) == '.') {
if (name.CharAt(i + 1) == '_') {
return true;
}
}
}
return false;
}
// Create a private key for this library. It is based on the hash of the
// library URI and the sequence number of the library to guarantee unique
// private keys without having to verify.
void Library::AllocatePrivateKey() const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_support_reload && isolate->IsReloading()) {
// When reloading, we need to make sure we use the original private key
// if this library previously existed.
IsolateReloadContext* reload_context = isolate->reload_context();
const String& original_key =
String::Handle(reload_context->FindLibraryPrivateKey(*this));
if (!original_key.IsNull()) {
StorePointer(&raw_ptr()->private_key_, original_key.raw());
return;
}
}
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
// Format of the private key is: "@<sequence number><6 digits of hash>
const intptr_t hash_mask = 0x7FFFF;
const String& url = String::Handle(zone, this->url());
intptr_t hash_value = url.Hash() & hash_mask;
const GrowableObjectArray& libs =
GrowableObjectArray::Handle(zone, isolate->object_store()->libraries());
intptr_t sequence_value = libs.Length();
char private_key[32];
Utils::SNPrint(private_key, sizeof(private_key), "%c%" Pd "%06" Pd "",
kPrivateKeySeparator, sequence_value, hash_value);
const String& key =
String::Handle(zone, String::New(private_key, Heap::kOld));
key.Hash(); // This string may end up in the VM isolate.
StorePointer(&raw_ptr()->private_key_, key.raw());
}
const String& Library::PrivateCoreLibName(const String& member) {
const Library& core_lib = Library::Handle(Library::CoreLibrary());
const String& private_name = String::ZoneHandle(core_lib.PrivateName(member));
return private_name;
}
RawClass* Library::LookupCoreClass(const String& class_name) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
String& name = String::Handle(zone, class_name.raw());
if (class_name.CharAt(0) == kPrivateIdentifierStart) {
// Private identifiers are mangled on a per library basis.
name = Symbols::FromConcat(thread, name,
String::Handle(zone, core_lib.private_key()));
}
return core_lib.LookupClass(name);
}
// Cannot handle qualified names properly as it only appends private key to
// the end (e.g. _Alfa.foo -> _Alfa.foo@...).
RawString* Library::PrivateName(const String& name) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(IsPrivate(name));
// ASSERT(strchr(name, '@') == NULL);
String& str = String::Handle(zone);
str = name.raw();
str = Symbols::FromConcat(thread, str,
String::Handle(zone, this->private_key()));
return str.raw();
}
RawLibrary* Library::GetLibrary(intptr_t index) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
const GrowableObjectArray& libs =
GrowableObjectArray::Handle(zone, isolate->object_store()->libraries());
ASSERT(!libs.IsNull());
if ((0 <= index) && (index < libs.Length())) {
Library& lib = Library::Handle(zone);
lib ^= libs.At(index);
return lib.raw();
}
return Library::null();
}
void Library::Register(Thread* thread) const {
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
ObjectStore* object_store = isolate->object_store();
// A library is "registered" in two places:
// - A growable array mapping from index to library.
const String& lib_url = String::Handle(zone, url());
ASSERT(Library::LookupLibrary(thread, lib_url) == Library::null());
ASSERT(lib_url.HasHash());
GrowableObjectArray& libs =
GrowableObjectArray::Handle(zone, object_store->libraries());
ASSERT(!libs.IsNull());
set_index(libs.Length());
libs.Add(*this);
// - A map from URL string to library.
if (object_store->libraries_map() == Array::null()) {
LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
object_store->set_libraries_map(map.Release());
}
LibraryLookupMap map(object_store->libraries_map());
bool present = map.UpdateOrInsert(lib_url, *this);
ASSERT(!present);
object_store->set_libraries_map(map.Release());
}
void Library::RegisterLibraries(Thread* thread,
const GrowableObjectArray& libs) {
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
Library& lib = Library::Handle(zone);
String& lib_url = String::Handle(zone);
LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
intptr_t len = libs.Length();
for (intptr_t i = 0; i < len; i++) {
lib ^= libs.At(i);
lib_url = lib.url();
map.InsertNewOrGetValue(lib_url, lib);
}
// Now remember these in the isolate's object store.
isolate->object_store()->set_libraries(libs);
isolate->object_store()->set_libraries_map(map.Release());
}
RawLibrary* Library::AsyncLibrary() {
return Isolate::Current()->object_store()->async_library();
}
RawLibrary* Library::ConvertLibrary() {
return Isolate::Current()->object_store()->convert_library();
}
RawLibrary* Library::CoreLibrary() {
return Isolate::Current()->object_store()->core_library();
}
RawLibrary* Library::CollectionLibrary() {
return Isolate::Current()->object_store()->collection_library();
}
RawLibrary* Library::DeveloperLibrary() {
return Isolate::Current()->object_store()->developer_library();
}
RawLibrary* Library::InternalLibrary() {
return Isolate::Current()->object_store()->_internal_library();
}
RawLibrary* Library::IsolateLibrary() {
return Isolate::Current()->object_store()->isolate_library();
}
RawLibrary* Library::MathLibrary() {
return Isolate::Current()->object_store()->math_library();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
RawLibrary* Library::MirrorsLibrary() {
return Isolate::Current()->object_store()->mirrors_library();
}
#endif
RawLibrary* Library::NativeWrappersLibrary() {
return Isolate::Current()->object_store()->native_wrappers_library();
}
RawLibrary* Library::ProfilerLibrary() {
return Isolate::Current()->object_store()->profiler_library();
}
RawLibrary* Library::TypedDataLibrary() {
return Isolate::Current()->object_store()->typed_data_library();
}
RawLibrary* Library::VMServiceLibrary() {
return Isolate::Current()->object_store()->_vmservice_library();
}
const char* Library::ToCString() const {
const String& name = String::Handle(url());
return OS::SCreate(Thread::Current()->zone(), "Library:'%s'",
name.ToCString());
}
RawLibrary* LibraryPrefix::GetLibrary(int index) const {
if ((index >= 0) || (index < num_imports())) {
const Array& imports = Array::Handle(this->imports());
Namespace& import = Namespace::Handle();
import ^= imports.At(index);
return import.library();
}
return Library::null();
}
RawInstance* LibraryPrefix::LoadError() const {
Thread* thread = Thread::Current();
Isolate* isolate = thread->isolate();
Zone* zone = thread->zone();
ObjectStore* object_store = isolate->object_store();
GrowableObjectArray& libs =
GrowableObjectArray::Handle(zone, object_store->libraries());
ASSERT(!libs.IsNull());
LibraryLoadErrorSet set(HashTables::New<LibraryLoadErrorSet>(libs.Length()));
object_store->set_library_load_error_table(set.Release());
Library& lib = Library::Handle(zone);
Instance& error = Instance::Handle(zone);
for (int32_t i = 0; i < num_imports(); i++) {
lib = GetLibrary(i);
ASSERT(!lib.IsNull());
HANDLESCOPE(thread);
error = lib.TransitiveLoadError();
if (!error.IsNull()) {
break;
}
}
object_store->set_library_load_error_table(Object::empty_array());
return error.raw();
}
bool LibraryPrefix::ContainsLibrary(const Library& library) const {
int32_t num_current_imports = num_imports();
if (num_current_imports > 0) {
Library& lib = Library::Handle();
const String& url = String::Handle(library.url());
String& lib_url = String::Handle();
for (int32_t i = 0; i < num_current_imports; i++) {
lib = GetLibrary(i);
ASSERT(!lib.IsNull());
lib_url = lib.url();
if (url.Equals(lib_url)) {
return true;
}
}
}
return false;
}
void LibraryPrefix::AddImport(const Namespace& import) const {
intptr_t num_current_imports = num_imports();
// Prefixes with deferred libraries can only contain one library.
ASSERT((num_current_imports == 0) || !is_deferred_load());
// The library needs to be added to the list.
Array& imports = Array::Handle(this->imports());
const intptr_t length = (imports.IsNull()) ? 0 : imports.Length();
// Grow the list if it is full.
if (num_current_imports >= length) {
const intptr_t new_length = length + kIncrementSize + (length >> 2);
imports = Array::Grow(imports, new_length, Heap::kOld);
set_imports(imports);
}
imports.SetAt(num_current_imports, import);
set_num_imports(num_current_imports + 1);
}
RawObject* LibraryPrefix::LookupObject(const String& name) const {
if (!is_loaded() && !FLAG_load_deferred_eagerly) {
return Object::null();
}
Array& imports = Array::Handle(this->imports());
Object& obj = Object::Handle();
Namespace& import = Namespace::Handle();
Library& import_lib = Library::Handle();
String& import_lib_url = String::Handle();
String& first_import_lib_url = String::Handle();
Object& found_obj = Object::Handle();
String& found_obj_name = String::Handle();
for (intptr_t i = 0; i < num_imports(); i++) {
import ^= imports.At(i);
obj = import.Lookup(name);
if (!obj.IsNull()) {
import_lib = import.library();
import_lib_url = import_lib.url();
if (found_obj.raw() != obj.raw()) {
if (first_import_lib_url.IsNull() ||
first_import_lib_url.StartsWith(Symbols::DartScheme())) {
// This is the first object we found, or the
// previously found object is exported from a Dart
// system library. The newly found object hides the one
// from the Dart library.
first_import_lib_url = import_lib.url();
found_obj = obj.raw();
found_obj_name = found_obj.DictionaryName();
} else if (import_lib_url.StartsWith(Symbols::DartScheme())) {
// The newly found object is exported from a Dart system
// library. It is hidden by the previously found object.
// We continue to search.
} else if (Field::IsSetterName(found_obj_name) &&
!Field::IsSetterName(name)) {
// We are looking for an unmangled name or a getter, but
// the first object we found is a setter. Replace the first
// object with the one we just found.
first_import_lib_url = import_lib.url();
found_obj = obj.raw();
found_obj_name = found_obj.DictionaryName();
} else {
// We found two different objects with the same name.
// Note that we need to compare the names again because
// looking up an unmangled name can return a getter or a
// setter. A getter name is the same as the unmangled name,
// but a setter name is different from an unmangled name or a
// getter name.
if (Field::IsGetterName(found_obj_name)) {
found_obj_name = Field::NameFromGetter(found_obj_name);
}
String& second_obj_name = String::Handle(obj.DictionaryName());
if (Field::IsGetterName(second_obj_name)) {
second_obj_name = Field::NameFromGetter(second_obj_name);
}
if (found_obj_name.Equals(second_obj_name)) {
return Object::null();
}
}
}
}
}
return found_obj.raw();
}
RawClass* LibraryPrefix::LookupClass(const String& class_name) const {
const Object& obj = Object::Handle(LookupObject(class_name));
if (obj.IsClass()) {
return Class::Cast(obj).raw();
}
return Class::null();
}
void LibraryPrefix::set_is_loaded() const {
StoreNonPointer(&raw_ptr()->is_loaded_, true);
}
bool LibraryPrefix::LoadLibrary() const {
// Non-deferred prefixes are loaded.
ASSERT(is_deferred_load() || is_loaded());
if (is_loaded()) {
return true; // Load request has already completed.
}
ASSERT(is_deferred_load());
ASSERT(num_imports() == 1);
if (Dart::vm_snapshot_kind() == Snapshot::kFullAOT) {
// The library list was tree-shaken away.
this->set_is_loaded();
return true;
}
// This is a prefix for a deferred library. If the library is not loaded
// yet and isn't being loaded, call the library tag handler to schedule
// loading. Once all outstanding load requests have completed, the embedder
// will call the core library to:
// - invalidate dependent code of this prefix;
// - mark this prefixes as loaded;
// - complete the future associated with this prefix.
const Library& deferred_lib = Library::Handle(GetLibrary(0));
if (deferred_lib.Loaded()) {
this->set_is_loaded();
return true;
} else if (deferred_lib.LoadNotStarted()) {
Thread* thread = Thread::Current();
Isolate* isolate = thread->isolate();
Zone* zone = thread->zone();
deferred_lib.SetLoadRequested();
const GrowableObjectArray& pending_deferred_loads =
GrowableObjectArray::Handle(
isolate->object_store()->pending_deferred_loads());
pending_deferred_loads.Add(deferred_lib);
const String& lib_url = String::Handle(zone, deferred_lib.url());
Dart_LibraryTagHandler handler = isolate->library_tag_handler();
Object& obj = Object::Handle(zone);
{
TransitionVMToNative transition(thread);
Api::Scope api_scope(thread);
obj = Api::UnwrapHandle(handler(Dart_kImportTag,
Api::NewHandle(thread, importer()),
Api::NewHandle(thread, lib_url.raw())));
}
if (obj.IsError()) {
Exceptions::PropagateError(Error::Cast(obj));
}
} else {
// Another load request is in flight or previously failed.
ASSERT(deferred_lib.LoadRequested() || deferred_lib.LoadFailed());
}
return false; // Load request not yet completed.
}
RawArray* LibraryPrefix::dependent_code() const {
return raw_ptr()->dependent_code_;
}
void LibraryPrefix::set_dependent_code(const Array& array) const {
StorePointer(&raw_ptr()->dependent_code_, array.raw());
}
class PrefixDependentArray : public WeakCodeReferences {
public:
explicit PrefixDependentArray(const LibraryPrefix& prefix)
: WeakCodeReferences(Array::Handle(prefix.dependent_code())),
prefix_(prefix) {}
virtual void UpdateArrayTo(const Array& value) {
prefix_.set_dependent_code(value);
}
virtual void ReportDeoptimization(const Code& code) {
// This gets called when the code object is on the stack
// while nuking code that depends on a prefix. We don't expect
// this to happen, so make sure we die loudly if we find
// ourselves here.
UNIMPLEMENTED();
}
virtual void ReportSwitchingCode(const Code& code) {
if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
THR_Print("Prefix '%s': disabling %s code for %s function '%s'\n",
String::Handle(prefix_.name()).ToCString(),
code.is_optimized() ? "optimized" : "unoptimized",
code.IsDisabled() ? "'patched'" : "'unpatched'",
Function::Handle(code.function()).ToCString());
}
}
private:
const LibraryPrefix& prefix_;
DISALLOW_COPY_AND_ASSIGN(PrefixDependentArray);
};
void LibraryPrefix::RegisterDependentCode(const Code& code) const {
ASSERT(is_deferred_load());
// In background compilation, a library can be loaded while we are compiling.
// The generated code will be rejected in that case,
ASSERT(!is_loaded() || Compiler::IsBackgroundCompilation());
PrefixDependentArray a(*this);
a.Register(code);
}
void LibraryPrefix::InvalidateDependentCode() const {
PrefixDependentArray a(*this);
if (FLAG_trace_deoptimization && a.HasCodes()) {
THR_Print("Deopt for lazy load (prefix %s)\n", ToCString());
}
a.DisableCode();
set_is_loaded();
}
RawLibraryPrefix* LibraryPrefix::New() {
RawObject* raw = Object::Allocate(LibraryPrefix::kClassId,
LibraryPrefix::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawLibraryPrefix*>(raw);
}
RawLibraryPrefix* LibraryPrefix::New(const String& name,
const Namespace& import,
bool deferred_load,
const Library& importer) {
const LibraryPrefix& result = LibraryPrefix::Handle(LibraryPrefix::New());
result.set_name(name);
result.set_num_imports(0);
result.set_importer(importer);
result.StoreNonPointer(&result.raw_ptr()->is_deferred_load_, deferred_load);
result.StoreNonPointer(&result.raw_ptr()->is_loaded_, !deferred_load);
result.set_imports(Array::Handle(Array::New(kInitialSize)));
result.AddImport(import);
return result.raw();
}
void LibraryPrefix::set_name(const String& value) const {
ASSERT(value.IsSymbol());
StorePointer(&raw_ptr()->name_, value.raw());
}
void LibraryPrefix::set_imports(const Array& value) const {
StorePointer(&raw_ptr()->imports_, value.raw());
}
void LibraryPrefix::set_num_imports(intptr_t value) const {
if (!Utils::IsUint(16, value)) {
ReportTooManyImports(Library::Handle(importer()));
}
StoreNonPointer(&raw_ptr()->num_imports_, value);
}
void LibraryPrefix::set_importer(const Library& value) const {
StorePointer(&raw_ptr()->importer_, value.raw());
}
const char* LibraryPrefix::ToCString() const {
const String& prefix = String::Handle(name());
return OS::SCreate(Thread::Current()->zone(), "LibraryPrefix:'%s'",
prefix.ToCString());
}
void Namespace::set_metadata_field(const Field& value) const {
StorePointer(&raw_ptr()->metadata_field_, value.raw());
}
void Namespace::AddMetadata(const Object& owner,
TokenPosition token_pos,
intptr_t kernel_offset) {
ASSERT(Field::Handle(metadata_field()).IsNull());
Field& field = Field::Handle(Field::NewTopLevel(Symbols::TopLevel(),
false, // is_final
false, // is_const
owner, token_pos, token_pos));
field.set_is_reflectable(false);
field.SetFieldType(Object::dynamic_type());
field.SetStaticValue(Array::empty_array(), true);
field.set_kernel_offset(kernel_offset);
set_metadata_field(field);
}
RawObject* Namespace::GetMetadata() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return Object::empty_array().raw();
#else
Field& field = Field::Handle(metadata_field());
if (field.IsNull()) {
// There is no metadata for this object.
return Object::empty_array().raw();
}
Object& metadata = Object::Handle();
metadata = field.StaticValue();
if (field.StaticValue() == Object::empty_array().raw()) {
if (field.kernel_offset() > 0) {
metadata =
kernel::EvaluateMetadata(field, /* is_annotations_offset = */ true);
} else {
UNREACHABLE();
}
if (metadata.IsArray()) {
ASSERT(Array::Cast(metadata).raw() != Object::empty_array().raw());
field.SetStaticValue(Array::Cast(metadata), true);
}
}
return metadata.raw();
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
const char* Namespace::ToCString() const {
const Library& lib = Library::Handle(library());
return OS::SCreate(Thread::Current()->zone(), "Namespace for library '%s'",
lib.ToCString());
}
bool Namespace::HidesName(const String& name) const {
// Quick check for common case with no combinators.
if (hide_names() == show_names()) {
ASSERT(hide_names() == Array::null());
return false;
}
const String* plain_name = &name;
if (Field::IsGetterName(name)) {
plain_name = &String::Handle(Field::NameFromGetter(name));
} else if (Field::IsSetterName(name)) {
plain_name = &String::Handle(Field::NameFromSetter(name));
}
// Check whether the name is in the list of explicitly hidden names.
if (hide_names() != Array::null()) {
const Array& names = Array::Handle(hide_names());
String& hidden = String::Handle();
intptr_t num_names = names.Length();
for (intptr_t i = 0; i < num_names; i++) {
hidden ^= names.At(i);
if (plain_name->Equals(hidden)) {
return true;
}
}
}
// The name is not explicitly hidden. Now check whether it is in the
// list of explicitly visible names, if there is one.
if (show_names() != Array::null()) {
const Array& names = Array::Handle(show_names());
String& shown = String::Handle();
intptr_t num_names = names.Length();
for (intptr_t i = 0; i < num_names; i++) {
shown ^= names.At(i);
if (plain_name->Equals(shown)) {
return false;
}
}
// There is a list of visible names. The name we're looking for is not
// contained in the list, so it is hidden.
return true;
}
// The name is not filtered out.
return false;
}
// Look up object with given name in library and filter out hidden
// names. Also look up getters and setters.
RawObject* Namespace::Lookup(const String& name,
ZoneGrowableArray<intptr_t>* trail) const {
Zone* zone = Thread::Current()->zone();
const Library& lib = Library::Handle(zone, library());
if (trail != NULL) {
// Look for cycle in reexport graph.
for (int i = 0; i < trail->length(); i++) {
if (trail->At(i) == lib.index()) {
for (int j = i + 1; j < trail->length(); j++) {
(*trail)[j] = -1;
}
return Object::null();
}
}
}
intptr_t ignore = 0;
// Lookup the name in the library's symbols.
Object& obj = Object::Handle(zone, lib.LookupEntry(name, &ignore));
if (!Field::IsGetterName(name) && !Field::IsSetterName(name) &&
(obj.IsNull() || obj.IsLibraryPrefix())) {
String& accessor_name = String::Handle(zone);
accessor_name ^= Field::LookupGetterSymbol(name);
if (!accessor_name.IsNull()) {
obj = lib.LookupEntry(accessor_name, &ignore);
}
if (obj.IsNull()) {
accessor_name ^= Field::LookupSetterSymbol(name);
if (!accessor_name.IsNull()) {
obj = lib.LookupEntry(accessor_name, &ignore);
}
}
}
// Library prefixes are not exported.
if (obj.IsNull() || obj.IsLibraryPrefix()) {
// Lookup in the re-exported symbols.
obj = lib.LookupReExport(name, trail);
if (obj.IsNull() && !Field::IsSetterName(name)) {
// LookupReExport() only returns objects that match the given name.
// If there is no field/func/getter, try finding a setter.
const String& setter_name =
String::Handle(zone, Field::LookupSetterSymbol(name));
if (!setter_name.IsNull()) {
obj = lib.LookupReExport(setter_name, trail);
}
}
}
if (obj.IsNull() || HidesName(name) || obj.IsLibraryPrefix()) {
return Object::null();
}
return obj.raw();
}
RawNamespace* Namespace::New() {
ASSERT(Object::namespace_class() != Class::null());
RawObject* raw = Object::Allocate(Namespace::kClassId,
Namespace::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawNamespace*>(raw);
}
RawNamespace* Namespace::New(const Library& library,
const Array& show_names,
const Array& hide_names) {
ASSERT(show_names.IsNull() || (show_names.Length() > 0));
ASSERT(hide_names.IsNull() || (hide_names.Length() > 0));
const Namespace& result = Namespace::Handle(Namespace::New());
result.StorePointer(&result.raw_ptr()->library_, library.raw());
result.StorePointer(&result.raw_ptr()->show_names_, show_names.raw());
result.StorePointer(&result.raw_ptr()->hide_names_, hide_names.raw());
return result.raw();
}
RawKernelProgramInfo* KernelProgramInfo::New() {
RawObject* raw =
Object::Allocate(KernelProgramInfo::kClassId,
KernelProgramInfo::InstanceSize(), Heap::kOld);
return reinterpret_cast<RawKernelProgramInfo*>(raw);
}
RawKernelProgramInfo* KernelProgramInfo::New(
const TypedData& string_offsets,
const ExternalTypedData& string_data,
const TypedData& canonical_names,
const ExternalTypedData& metadata_payloads,
const ExternalTypedData& metadata_mappings,
const ExternalTypedData& constants_table,
const Array& scripts,
const Array& libraries_cache,
const Array& classes_cache) {
const KernelProgramInfo& info =
KernelProgramInfo::Handle(KernelProgramInfo::New());
info.StorePointer(&info.raw_ptr()->string_offsets_, string_offsets.raw());
info.StorePointer(&info.raw_ptr()->string_data_, string_data.raw());
info.StorePointer(&info.raw_ptr()->canonical_names_, canonical_names.raw());
info.StorePointer(&info.raw_ptr()->metadata_payloads_,
metadata_payloads.raw());
info.StorePointer(&info.raw_ptr()->metadata_mappings_,
metadata_mappings.raw());
info.StorePointer(&info.raw_ptr()->scripts_, scripts.raw());
info.StorePointer(&info.raw_ptr()->constants_table_, constants_table.raw());
info.StorePointer(&info.raw_ptr()->libraries_cache_, libraries_cache.raw());
info.StorePointer(&info.raw_ptr()->classes_cache_, classes_cache.raw());
return info.raw();
}
const char* KernelProgramInfo::ToCString() const {
return OS::SCreate(Thread::Current()->zone(), "[KernelProgramInfo]");
}
RawScript* KernelProgramInfo::ScriptAt(intptr_t index) const {
const Array& all_scripts = Array::Handle(scripts());
RawObject* script = all_scripts.At(index);
return Script::RawCast(script);
}
void KernelProgramInfo::set_constants(const Array& constants) const {
StorePointer(&raw_ptr()->constants_, constants.raw());
}
void KernelProgramInfo::set_constants_table(
const ExternalTypedData& value) const {
StorePointer(&raw_ptr()->constants_table_, value.raw());
}
void KernelProgramInfo::set_potential_natives(
const GrowableObjectArray& candidates) const {
StorePointer(&raw_ptr()->potential_natives_, candidates.raw());
}
void KernelProgramInfo::set_potential_pragma_functions(
const GrowableObjectArray& candidates) const {
StorePointer(&raw_ptr()->potential_pragma_functions_, candidates.raw());
}
void KernelProgramInfo::set_libraries_cache(const Array& cache) const {
StorePointer(&raw_ptr()->libraries_cache_, cache.raw());
}
typedef UnorderedHashMap<SmiTraits> IntHashMap;
RawLibrary* KernelProgramInfo::LookupLibrary(Thread* thread,
const Smi& name_index) const {
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_LIBRARY_HANDLESCOPE(thread);
REUSABLE_OBJECT_HANDLESCOPE(thread);
REUSABLE_SMI_HANDLESCOPE(thread);
Array& data = thread->ArrayHandle();
Library& result = thread->LibraryHandle();
Object& key = thread->ObjectHandle();
Smi& value = thread->SmiHandle();
{
data ^= libraries_cache();
ASSERT(!data.IsNull());
IntHashMap table(&key, &value, &data);
result ^= table.GetOrNull(name_index);
table.Release();
}
return result.raw();
}
RawLibrary* KernelProgramInfo::InsertLibrary(Thread* thread,
const Smi& name_index,
const Library& lib) const {
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_LIBRARY_HANDLESCOPE(thread);
REUSABLE_OBJECT_HANDLESCOPE(thread);
REUSABLE_SMI_HANDLESCOPE(thread);
Array& data = thread->ArrayHandle();
Library& result = thread->LibraryHandle();
Object& key = thread->ObjectHandle();
Smi& value = thread->SmiHandle();
{
data ^= libraries_cache();
ASSERT(!data.IsNull());
IntHashMap table(&key, &value, &data);
result ^= table.GetOrNull(name_index);
table.Release();
}
if (result.IsNull()) {
Isolate* isolate = thread->isolate();
SafepointMutexLocker ml(isolate->kernel_data_lib_cache_mutex());
data ^= libraries_cache();
ASSERT(!data.IsNull());
IntHashMap table(&key, &value, &data);
result ^= table.InsertOrGetValue(name_index, lib);
set_libraries_cache(table.Release());
}
return result.raw();
}
void KernelProgramInfo::set_classes_cache(const Array& cache) const {
StorePointer(&raw_ptr()->classes_cache_, cache.raw());
}
RawClass* KernelProgramInfo::LookupClass(Thread* thread,
const Smi& name_index) const {
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_CLASS_HANDLESCOPE(thread);
REUSABLE_OBJECT_HANDLESCOPE(thread);
REUSABLE_SMI_HANDLESCOPE(thread);
Array& data = thread->ArrayHandle();
Class& result = thread->ClassHandle();
Object& key = thread->ObjectHandle();
Smi& value = thread->SmiHandle();
{
data ^= classes_cache();
ASSERT(!data.IsNull());
IntHashMap table(&key, &value, &data);
result ^= table.GetOrNull(name_index);
table.Release();
}
return result.raw();
}
RawClass* KernelProgramInfo::InsertClass(Thread* thread,
const Smi& name_index,
const Class& klass) const {
REUSABLE_ARRAY_HANDLESCOPE(thread);
REUSABLE_CLASS_HANDLESCOPE(thread);
REUSABLE_OBJECT_HANDLESCOPE(thread);
REUSABLE_SMI_HANDLESCOPE(thread);
Array& data = thread->ArrayHandle();
Class& result = thread->ClassHandle();
Object& key = thread->ObjectHandle();
Smi& value = thread->SmiHandle();
{
data ^= classes_cache();
ASSERT(!data.IsNull());
IntHashMap table(&key, &value, &data);
result ^= table.GetOrNull(name_index);
table.Release();
}
if (result.IsNull()) {
Isolate* isolate = thread->isolate();
SafepointMutexLocker ml(isolate->kernel_data_class_cache_mutex());
data ^= classes_cache();
ASSERT(!data.IsNull());
IntHashMap table(&key, &value, &data);
result ^= table.InsertOrGetValue(name_index, klass);
set_classes_cache(table.Release());
}
return result.raw();
}
RawError* Library::CompileAll(bool ignore_error /* = false */) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Error& error = Error::Handle(zone);
const GrowableObjectArray& libs = GrowableObjectArray::Handle(
Isolate::Current()->object_store()->libraries());
Library& lib = Library::Handle(zone);
Class& cls = Class::Handle(zone);
for (int i = 0; i < libs.Length(); i++) {
lib ^= libs.At(i);
ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
while (it.HasNext()) {
cls = it.GetNextClass();
error = cls.EnsureIsFinalized(thread);
if (!error.IsNull()) {
if (ignore_error) continue;
return error.raw();
}
error = Compiler::CompileAllFunctions(cls);
if (!error.IsNull()) {
if (ignore_error) continue;
return error.raw();
}
}
}
// Inner functions get added to the closures array. As part of compilation
// more closures can be added to the end of the array. Compile all the
// closures until we have reached the end of the "worklist".
Object& result = Object::Handle(zone);
const GrowableObjectArray& closures = GrowableObjectArray::Handle(
zone, Isolate::Current()->object_store()->closure_functions());
Function& func = Function::Handle(zone);
for (int i = 0; i < closures.Length(); i++) {
func ^= closures.At(i);
if (!func.HasCode()) {
result = Compiler::CompileFunction(thread, func);
if (result.IsError()) {
if (ignore_error) continue;
return Error::Cast(result).raw();
}
}
}
return Error::null();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
RawError* Library::ReadAllBytecode() {
Thread* thread = Thread::Current();
ASSERT(thread->IsMutatorThread());
Zone* zone = thread->zone();
Error& error = Error::Handle(zone);
const GrowableObjectArray& libs = GrowableObjectArray::Handle(
Isolate::Current()->object_store()->libraries());
Library& lib = Library::Handle(zone);
Class& cls = Class::Handle(zone);
for (int i = 0; i < libs.Length(); i++) {
lib ^= libs.At(i);
ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
while (it.HasNext()) {
cls = it.GetNextClass();
error = cls.EnsureIsFinalized(thread);
if (!error.IsNull()) {
return error.raw();
}
error = Compiler::ReadAllBytecode(cls);
if (!error.IsNull()) {
return error.raw();
}
}
}
// Inner functions get added to the closures array. As part of compilation
// more closures can be added to the end of the array. Compile all the
// closures until we have reached the end of the "worklist".
const GrowableObjectArray& closures = GrowableObjectArray::Handle(
zone, Isolate::Current()->object_store()->closure_functions());
Function& func = Function::Handle(zone);
for (int i = 0; i < closures.Length(); i++) {
func ^= closures.At(i);
if (func.IsBytecodeAllowed(zone) && !func.HasBytecode()) {
RawError* error =
kernel::BytecodeReader::ReadFunctionBytecode(thread, func);
if (error != Error::null()) {
return error;
}
}
}
return Error::null();
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
// Return Function::null() if function does not exist in libs.
RawFunction* Library::GetFunction(const GrowableArray<Library*>& libs,
const char* class_name,
const char* function_name) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Function& func = Function::Handle(zone);
String& class_str = String::Handle(zone);
String& func_str = String::Handle(zone);
Class& cls = Class::Handle(zone);
for (intptr_t l = 0; l < libs.length(); l++) {
const Library& lib = *libs[l];
if (strcmp(class_name, "::") == 0) {
func_str = Symbols::New(thread, function_name);
func = lib.LookupFunctionAllowPrivate(func_str);
} else {
class_str = String::New(class_name);
cls = lib.LookupClassAllowPrivate(class_str);
if (!cls.IsNull()) {
func_str = String::New(function_name);
if (function_name[0] == '.') {
func_str = String::Concat(class_str, func_str);
}
func = cls.LookupFunctionAllowPrivate(func_str);
}
}
if (!func.IsNull()) {
return func.raw();
}
}
return Function::null();
}
RawObject* Library::GetFunctionClosure(const String& name) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Function& func = Function::Handle(zone, LookupFunctionAllowPrivate(name));
if (func.IsNull()) {
// Check whether the function is reexported into the library.
const Object& obj = Object::Handle(zone, LookupReExport(name));
if (obj.IsFunction()) {
func ^= obj.raw();
} else {
// Check if there is a getter of 'name', in which case invoke it
// and return the result.
const String& getter_name = String::Handle(zone, Field::GetterName(name));
func = LookupFunctionAllowPrivate(getter_name);
if (func.IsNull()) {
return Closure::null();
}
// Invoke the getter and return the result.
return DartEntry::InvokeFunction(func, Object::empty_array());
}
}
func = func.ImplicitClosureFunction();
return func.ImplicitStaticClosure();
}
#if defined(DART_NO_SNAPSHOT) && !defined(PRODUCT)
void Library::CheckFunctionFingerprints() {
GrowableArray<Library*> all_libs;
Function& func = Function::Handle();
bool has_errors = false;
#define CHECK_FINGERPRINTS(class_name, function_name, dest, fp) \
func = GetFunction(all_libs, #class_name, #function_name); \
if (func.IsNull()) { \
has_errors = true; \
OS::PrintErr("Function not found %s.%s\n", #class_name, #function_name); \
} else { \
CHECK_FINGERPRINT3(func, class_name, function_name, dest, fp); \
}
#define CHECK_FINGERPRINTS2(class_name, function_name, dest, type, fp) \
CHECK_FINGERPRINTS(class_name, function_name, dest, fp)
all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
CORE_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
CORE_INTEGER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
all_libs.Add(&Library::ZoneHandle(Library::CollectionLibrary()));
all_libs.Add(&Library::ZoneHandle(Library::InternalLibrary()));
OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS2);
INLINE_WHITE_LIST(CHECK_FINGERPRINTS);
INLINE_BLACK_LIST(CHECK_FINGERPRINTS);
POLYMORPHIC_TARGET_LIST(CHECK_FINGERPRINTS);
all_libs.Clear();
all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
DEVELOPER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
all_libs.Clear();
all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
MATH_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
all_libs.Clear();
all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
TYPED_DATA_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
#undef CHECK_FINGERPRINTS
#undef CHECK_FINGERPRINTS2
#define CHECK_FACTORY_FINGERPRINTS(symbol, class_name, factory_name, cid, fp) \
func = GetFunction(all_libs, #class_name, #factory_name); \
if (func.IsNull()) { \
has_errors = true; \
OS::PrintErr("Function not found %s.%s\n", #class_name, #factory_name); \
} else { \
CHECK_FINGERPRINT2(func, symbol, cid, fp); \
}
all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
RECOGNIZED_LIST_FACTORY_LIST(CHECK_FACTORY_FINGERPRINTS);
#undef CHECK_FACTORY_FINGERPRINTS
if (has_errors) {
FATAL("Fingerprint mismatch.");
}
}
#endif // defined(DART_NO_SNAPSHOT) && !defined(PRODUCT).
RawInstructions* Instructions::New(intptr_t size,
bool has_single_entry_point,
uword unchecked_entrypoint_pc_offset) {
ASSERT(size >= 0);
ASSERT(Object::instructions_class() != Class::null());
if (size < 0 || size > kMaxElements) {
// This should be caught before we reach here.
FATAL1("Fatal error in Instructions::New: invalid size %" Pd "\n", size);
}
Instructions& result = Instructions::Handle();
{
uword aligned_size = Instructions::InstanceSize(size);
RawObject* raw =
Object::Allocate(Instructions::kClassId, aligned_size, Heap::kCode);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetSize(size);
result.SetHasSingleEntryPoint(has_single_entry_point);
result.set_stats(nullptr);
result.set_unchecked_entrypoint_pc_offset(unchecked_entrypoint_pc_offset);
}
return result.raw();
}
const char* Instructions::ToCString() const {
return "Instructions";
}
// Encode integer |value| in SLEB128 format and store into |data|.
static void EncodeSLEB128(GrowableArray<uint8_t>* data, intptr_t value) {
bool is_last_part = false;
while (!is_last_part) {
uint8_t part = value & 0x7f;
value >>= 7;
if ((value == 0 && (part & 0x40) == 0) ||
(value == static_cast<intptr_t>(-1) && (part & 0x40) != 0)) {
is_last_part = true;
} else {
part |= 0x80;
}
data->Add(part);
}
}
// Decode integer in SLEB128 format from |data| and update |byte_index|.
static intptr_t DecodeSLEB128(const uint8_t* data,
const intptr_t data_length,
intptr_t* byte_index) {
ASSERT(*byte_index < data_length);
uword shift = 0;
intptr_t value = 0;
uint8_t part = 0;
do {
part = data[(*byte_index)++];
value |= static_cast<intptr_t>(part & 0x7f) << shift;
shift += 7;
} while ((part & 0x80) != 0);
if ((shift < (sizeof(value) * 8)) && ((part & 0x40) != 0)) {
value |= static_cast<intptr_t>(kUwordMax << shift);
}
return value;
}
// Encode integer in SLEB128 format.
void PcDescriptors::EncodeInteger(GrowableArray<uint8_t>* data,
intptr_t value) {
return EncodeSLEB128(data, value);
}
// Decode SLEB128 encoded integer. Update byte_index to the next integer.
intptr_t PcDescriptors::DecodeInteger(intptr_t* byte_index) const {
NoSafepointScope no_safepoint;
const uint8_t* data = raw_ptr()->data();
return DecodeSLEB128(data, Length(), byte_index);
}
RawObjectPool* ObjectPool::New(intptr_t len) {
ASSERT(Object::object_pool_class() != Class::null());
if (len < 0 || len > kMaxElements) {
// This should be caught before we reach here.
FATAL1("Fatal error in ObjectPool::New: invalid length %" Pd "\n", len);
}
ObjectPool& result = ObjectPool::Handle();
{
uword size = ObjectPool::InstanceSize(len);
RawObject* raw = Object::Allocate(ObjectPool::kClassId, size, Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetLength(len);
for (intptr_t i = 0; i < len; i++) {
result.SetTypeAt(i, ObjectPool::kImmediate, ObjectPool::kPatchable);
}
}
return result.raw();
}
const char* ObjectPool::ToCString() const {
Zone* zone = Thread::Current()->zone();
return zone->PrintToString("ObjectPool len:%" Pd, Length());
}
void ObjectPool::DebugPrint() const {
THR_Print("Object Pool: 0x%" Px "{\n", reinterpret_cast<uword>(raw()));
for (intptr_t i = 0; i < Length(); i++) {
intptr_t offset = OffsetFromIndex(i);
THR_Print(" %" Pd " PP+0x%" Px ": ", i, offset);
if ((TypeAt(i) == kTaggedObject) || (TypeAt(i) == kNativeEntryData)) {
RawObject* obj = ObjectAt(i);
THR_Print("0x%" Px " %s (obj)\n", reinterpret_cast<uword>(obj),
Object::Handle(obj).ToCString());
} else if (TypeAt(i) == kNativeFunction) {
THR_Print("0x%" Px " (native function)\n", RawValueAt(i));
} else if (TypeAt(i) == kNativeFunctionWrapper) {
THR_Print("0x%" Px " (native function wrapper)\n", RawValueAt(i));
} else {
THR_Print("0x%" Px " (raw)\n", RawValueAt(i));
}
}
THR_Print("}\n");
}
intptr_t PcDescriptors::Length() const {
return raw_ptr()->length_;
}
void PcDescriptors::SetLength(intptr_t value) const {
StoreNonPointer(&raw_ptr()->length_, value);
}
void PcDescriptors::CopyData(GrowableArray<uint8_t>* delta_encoded_data) {
NoSafepointScope no_safepoint;
uint8_t* data = UnsafeMutableNonPointer(&raw_ptr()->data()[0]);
for (intptr_t i = 0; i < delta_encoded_data->length(); ++i) {
data[i] = (*delta_encoded_data)[i];
}
}
RawPcDescriptors* PcDescriptors::New(GrowableArray<uint8_t>* data) {
ASSERT(Object::pc_descriptors_class() != Class::null());
Thread* thread = Thread::Current();
PcDescriptors& result = PcDescriptors::Handle(thread->zone());
{
uword size = PcDescriptors::InstanceSize(data->length());
RawObject* raw =
Object::Allocate(PcDescriptors::kClassId, size, Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetLength(data->length());
result.CopyData(data);
}
return result.raw();
}
RawPcDescriptors* PcDescriptors::New(intptr_t length) {
ASSERT(Object::pc_descriptors_class() != Class::null());
Thread* thread = Thread::Current();
PcDescriptors& result = PcDescriptors::Handle(thread->zone());
{
uword size = PcDescriptors::InstanceSize(length);
RawObject* raw =
Object::Allocate(PcDescriptors::kClassId, size, Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetLength(length);
}
return result.raw();
}
const char* PcDescriptors::KindAsStr(RawPcDescriptors::Kind kind) {
switch (kind) {
case RawPcDescriptors::kDeopt:
return "deopt ";
case RawPcDescriptors::kIcCall:
return "ic-call ";
case RawPcDescriptors::kUnoptStaticCall:
return "unopt-call ";
case RawPcDescriptors::kRuntimeCall:
return "runtime-call ";
case RawPcDescriptors::kOsrEntry:
return "osr-entry ";
case RawPcDescriptors::kRewind:
return "rewind ";
case RawPcDescriptors::kOther:
return "other ";
case RawPcDescriptors::kAnyKind:
UNREACHABLE();
break;
}
UNREACHABLE();
return "";
}
void PcDescriptors::PrintHeaderString() {
// 4 bits per hex digit + 2 for "0x".
const int addr_width = (kBitsPerWord / 4) + 2;
// "*" in a printf format specifier tells it to read the field width from
// the printf argument list.
THR_Print("%-*s\tkind \tdeopt-id\ttok-ix\ttry-ix\n", addr_width, "pc");
}
const char* PcDescriptors::ToCString() const {
// "*" in a printf format specifier tells it to read the field width from
// the printf argument list.
#define FORMAT "%#-*" Px "\t%s\t%" Pd "\t\t%s\t%" Pd "\n"
if (Length() == 0) {
return "empty PcDescriptors\n";
}
// 4 bits per hex digit.
const int addr_width = kBitsPerWord / 4;
// First compute the buffer size required.
intptr_t len = 1; // Trailing '\0'.
{
Iterator iter(*this, RawPcDescriptors::kAnyKind);
while (iter.MoveNext()) {
len += Utils::SNPrint(NULL, 0, FORMAT, addr_width, iter.PcOffset(),
KindAsStr(iter.Kind()), iter.DeoptId(),
iter.TokenPos().ToCString(), iter.TryIndex());
}
}
// Allocate the buffer.
char* buffer = Thread::Current()->zone()->Alloc<char>(len);
// Layout the fields in the buffer.
intptr_t index = 0;
Iterator iter(*this, RawPcDescriptors::kAnyKind);
while (iter.MoveNext()) {
index +=
Utils::SNPrint((buffer + index), (len - index), FORMAT, addr_width,
iter.PcOffset(), KindAsStr(iter.Kind()), iter.DeoptId(),
iter.TokenPos().ToCString(), iter.TryIndex());
}
return buffer;
#undef FORMAT
}
// Verify assumptions (in debug mode only).
// - No two deopt descriptors have the same deoptimization id.
// - No two ic-call descriptors have the same deoptimization id (type feedback).
// A function without unique ids is marked as non-optimizable (e.g., because of
// finally blocks).
void PcDescriptors::Verify(const Function& function) const {
#if defined(DEBUG)
// Only check ids for unoptimized code that is optimizable.
if (!function.IsOptimizable()) {
return;
}
intptr_t max_deopt_id = 0;
Iterator max_iter(*this,
RawPcDescriptors::kDeopt | RawPcDescriptors::kIcCall);
while (max_iter.MoveNext()) {
if (max_iter.DeoptId() > max_deopt_id) {
max_deopt_id = max_iter.DeoptId();
}
}
Zone* zone = Thread::Current()->zone();
BitVector* deopt_ids = new (zone) BitVector(zone, max_deopt_id + 1);
BitVector* iccall_ids = new (zone) BitVector(zone, max_deopt_id + 1);
Iterator iter(*this, RawPcDescriptors::kDeopt | RawPcDescriptors::kIcCall);
while (iter.MoveNext()) {
// 'deopt_id' is set for kDeopt and kIcCall and must be unique for one kind.
if (DeoptId::IsDeoptAfter(iter.DeoptId())) {
// TODO(vegorov): some instructions contain multiple calls and have
// multiple "after" targets recorded. Right now it is benign but might
// lead to issues in the future. Fix that and enable verification.
continue;
}
if (iter.Kind() == RawPcDescriptors::kDeopt) {
ASSERT(!deopt_ids->Contains(iter.DeoptId()));
deopt_ids->Add(iter.DeoptId());
} else {
ASSERT(!iccall_ids->Contains(iter.DeoptId()));
iccall_ids->Add(iter.DeoptId());
}
}
#endif // DEBUG
}
void CodeSourceMap::SetLength(intptr_t value) const {
StoreNonPointer(&raw_ptr()->length_, value);
}
RawCodeSourceMap* CodeSourceMap::New(intptr_t length) {
ASSERT(Object::code_source_map_class() != Class::null());
Thread* thread = Thread::Current();
CodeSourceMap& result = CodeSourceMap::Handle(thread->zone());
{
uword size = CodeSourceMap::InstanceSize(length);
RawObject* raw =
Object::Allocate(CodeSourceMap::kClassId, size, Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetLength(length);
}
return result.raw();
}
const char* CodeSourceMap::ToCString() const {
return "CodeSourceMap";
}
bool StackMap::GetBit(intptr_t bit_index) const {
ASSERT(InRange(bit_index));
int byte_index = bit_index >> kBitsPerByteLog2;
int bit_remainder = bit_index & (kBitsPerByte - 1);
uint8_t byte_mask = 1U << bit_remainder;
uint8_t byte = raw_ptr()->data()[byte_index];
return (byte & byte_mask);
}
void StackMap::SetBit(intptr_t bit_index, bool value) const {
ASSERT(InRange(bit_index));
int byte_index = bit_index >> kBitsPerByteLog2;
int bit_remainder = bit_index & (kBitsPerByte - 1);
uint8_t byte_mask = 1U << bit_remainder;
NoSafepointScope no_safepoint;
uint8_t* byte_addr = UnsafeMutableNonPointer(&raw_ptr()->data()[byte_index]);
if (value) {
*byte_addr |= byte_mask;
} else {
*byte_addr &= ~byte_mask;
}
}
RawStackMap* StackMap::New(intptr_t pc_offset,
BitmapBuilder* bmap,
intptr_t slow_path_bit_count) {
ASSERT(Object::stackmap_class() != Class::null());
ASSERT(bmap != NULL);
StackMap& result = StackMap::Handle();
// Guard against integer overflow of the instance size computation.
intptr_t length = bmap->Length();
intptr_t payload_size = Utils::RoundUp(length, kBitsPerByte) / kBitsPerByte;
if ((length < 0) || (length > kMaxUint16) ||
(payload_size > kMaxLengthInBytes)) {
// This should be caught before we reach here.
FATAL1("Fatal error in StackMap::New: invalid length %" Pd "\n", length);
}
if ((slow_path_bit_count < 0) || (slow_path_bit_count > kMaxUint16)) {
// This should be caught before we reach here.
FATAL1("Fatal error in StackMap::New: invalid slow_path_bit_count %" Pd
"\n",
slow_path_bit_count);
}
{
// StackMap data objects are associated with a code object, allocate them
// in old generation.
RawObject* raw = Object::Allocate(
StackMap::kClassId, StackMap::InstanceSize(length), Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetLength(length);
}
ASSERT(pc_offset >= 0);
result.SetPcOffset(pc_offset);
if (payload_size > 0) {
// Ensure leftover bits are deterministic.
result.raw()->ptr()->data()[payload_size - 1] = 0;
}
for (intptr_t i = 0; i < length; ++i) {
result.SetBit(i, bmap->Get(i));
}
result.SetSlowPathBitCount(slow_path_bit_count);
return result.raw();
}
RawStackMap* StackMap::New(intptr_t length,
intptr_t slow_path_bit_count,
intptr_t pc_offset) {
ASSERT(Object::stackmap_class() != Class::null());
StackMap& result = StackMap::Handle();
// Guard against integer overflow of the instance size computation.
intptr_t payload_size = Utils::RoundUp(length, kBitsPerByte) / kBitsPerByte;
if ((length < 0) || (length > kMaxUint16) ||
(payload_size > kMaxLengthInBytes)) {
// This should be caught before we reach here.
FATAL1("Fatal error in StackMap::New: invalid length %" Pd "\n", length);
}
if ((slow_path_bit_count < 0) || (slow_path_bit_count > kMaxUint16)) {
// This should be caught before we reach here.
FATAL1("Fatal error in StackMap::New: invalid slow_path_bit_count %" Pd
"\n",
slow_path_bit_count);
}
{
// StackMap data objects are associated with a code object, allocate them
// in old generation.
RawObject* raw = Object::Allocate(
StackMap::kClassId, StackMap::InstanceSize(length), Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetLength(length);
}
ASSERT(pc_offset >= 0);
result.SetPcOffset(pc_offset);
result.SetSlowPathBitCount(slow_path_bit_count);
return result.raw();
}
const char* StackMap::ToCString() const {
#define FORMAT "%#05x: "
if (IsNull()) {
return "{null}";
} else {
intptr_t fixed_length = Utils::SNPrint(NULL, 0, FORMAT, PcOffset()) + 1;
Thread* thread = Thread::Current();
// Guard against integer overflow in the computation of alloc_size.
//
// TODO(kmillikin): We could just truncate the string if someone
// tries to print a 2 billion plus entry stackmap.
if (Length() > (kIntptrMax - fixed_length)) {
FATAL1("Length() is unexpectedly large (%" Pd ")", Length());
}
intptr_t alloc_size = fixed_length + Length();
char* chars = thread->zone()->Alloc<char>(alloc_size);
intptr_t index = Utils::SNPrint(chars, alloc_size, FORMAT, PcOffset());
for (intptr_t i = 0; i < Length(); i++) {
chars[index++] = IsObject(i) ? '1' : '0';
}
chars[index] = '\0';
return chars;
}
#undef FORMAT
}
RawString* LocalVarDescriptors::GetName(intptr_t var_index) const {
ASSERT(var_index < Length());
ASSERT(Object::Handle(*raw()->nameAddrAt(var_index)).IsString());
return *raw()->nameAddrAt(var_index);
}
void LocalVarDescriptors::SetVar(intptr_t var_index,
const String& name,
RawLocalVarDescriptors::VarInfo* info) const {
ASSERT(var_index < Length());
ASSERT(!name.IsNull());
StorePointer(raw()->nameAddrAt(var_index), name.raw());
raw()->data()[var_index] = *info;
}
void LocalVarDescriptors::GetInfo(intptr_t var_index,
RawLocalVarDescriptors::VarInfo* info) const {
ASSERT(var_index < Length());
*info = raw()->data()[var_index];
}
static int PrintVarInfo(char* buffer,
int len,
intptr_t i,
const String& var_name,
const RawLocalVarDescriptors::VarInfo& info) {
const RawLocalVarDescriptors::VarInfoKind kind = info.kind();
const int32_t index = info.index();
if (kind == RawLocalVarDescriptors::kContextLevel) {
return Utils::SNPrint(buffer, len,
"%2" Pd
" %-13s level=%-3d"
" begin=%-3d end=%d\n",
i, LocalVarDescriptors::KindToCString(kind), index,
static_cast<int>(info.begin_pos.value()),
static_cast<int>(info.end_pos.value()));
} else if (kind == RawLocalVarDescriptors::kContextVar) {
return Utils::SNPrint(
buffer, len,
"%2" Pd
" %-13s level=%-3d index=%-3d"
" begin=%-3d end=%-3d name=%s\n",
i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
static_cast<int>(info.begin_pos.Pos()),
static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
} else {
return Utils::SNPrint(
buffer, len,
"%2" Pd
" %-13s scope=%-3d index=%-3d"
" begin=%-3d end=%-3d name=%s\n",
i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
static_cast<int>(info.begin_pos.Pos()),
static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
}
}
const char* LocalVarDescriptors::ToCString() const {
if (IsNull()) {
return "LocalVarDescriptors: null";
}
if (Length() == 0) {
return "empty LocalVarDescriptors";
}
intptr_t len = 1; // Trailing '\0'.
String& var_name = String::Handle();
for (intptr_t i = 0; i < Length(); i++) {
RawLocalVarDescriptors::VarInfo info;
var_name = GetName(i);
GetInfo(i, &info);
len += PrintVarInfo(NULL, 0, i, var_name, info);
}
char* buffer = Thread::Current()->zone()->Alloc<char>(len + 1);
buffer[0] = '\0';
intptr_t num_chars = 0;
for (intptr_t i = 0; i < Length(); i++) {
RawLocalVarDescriptors::VarInfo info;
var_name = GetName(i);
GetInfo(i, &info);
num_chars += PrintVarInfo((buffer + num_chars), (len - num_chars), i,
var_name, info);
}
return buffer;
}
const char* LocalVarDescriptors::KindToCString(
RawLocalVarDescriptors::VarInfoKind kind) {
switch (kind) {
case RawLocalVarDescriptors::kStackVar:
return "StackVar";
case RawLocalVarDescriptors::kContextVar:
return "ContextVar";
case RawLocalVarDescriptors::kContextLevel:
return "ContextLevel";
case RawLocalVarDescriptors::kSavedCurrentContext:
return "CurrentCtx";
default:
UNIMPLEMENTED();
return NULL;
}
}
RawLocalVarDescriptors* LocalVarDescriptors::New(intptr_t num_variables) {
ASSERT(Object::var_descriptors_class() != Class::null());
if (num_variables < 0 || num_variables > kMaxElements) {
// This should be caught before we reach here.
FATAL2(