| // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| #include "vm/object.h" |
| |
| #include <memory> |
| |
| #include "include/dart_api.h" |
| #include "lib/stacktrace.h" |
| #include "platform/assert.h" |
| #include "platform/text_buffer.h" |
| #include "platform/unaligned.h" |
| #include "platform/unicode.h" |
| #include "vm/bit_vector.h" |
| #include "vm/bootstrap.h" |
| #include "vm/canonical_tables.h" |
| #include "vm/class_finalizer.h" |
| #include "vm/closure_functions_cache.h" |
| #include "vm/code_comments.h" |
| #include "vm/code_descriptors.h" |
| #include "vm/code_observers.h" |
| #include "vm/compiler/assembler/disassembler.h" |
| #include "vm/compiler/jit/compiler.h" |
| #include "vm/compiler/runtime_api.h" |
| #include "vm/cpu.h" |
| #include "vm/dart.h" |
| #include "vm/dart_api_state.h" |
| #include "vm/dart_entry.h" |
| #include "vm/datastream.h" |
| #include "vm/debugger.h" |
| #include "vm/deopt_instructions.h" |
| #include "vm/double_conversion.h" |
| #include "vm/elf.h" |
| #include "vm/exceptions.h" |
| #include "vm/growable_array.h" |
| #include "vm/hash.h" |
| #include "vm/hash_table.h" |
| #include "vm/heap/become.h" |
| #include "vm/heap/heap.h" |
| #include "vm/heap/weak_code.h" |
| #include "vm/image_snapshot.h" |
| #include "vm/isolate_reload.h" |
| #include "vm/kernel.h" |
| #include "vm/kernel_binary.h" |
| #include "vm/kernel_isolate.h" |
| #include "vm/kernel_loader.h" |
| #include "vm/native_symbol.h" |
| #include "vm/object_graph.h" |
| #include "vm/object_store.h" |
| #include "vm/parser.h" |
| #include "vm/profiler.h" |
| #include "vm/regexp.h" |
| #include "vm/resolver.h" |
| #include "vm/reusable_handles.h" |
| #include "vm/runtime_entry.h" |
| #include "vm/scopes.h" |
| #include "vm/stack_frame.h" |
| #include "vm/stub_code.h" |
| #include "vm/symbols.h" |
| #include "vm/tags.h" |
| #include "vm/thread_registry.h" |
| #include "vm/timeline.h" |
| #include "vm/type_testing_stubs.h" |
| #include "vm/zone_text_buffer.h" |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| #include "vm/compiler/aot/precompiler.h" |
| #include "vm/compiler/assembler/assembler.h" |
| #include "vm/compiler/backend/code_statistics.h" |
| #include "vm/compiler/compiler_state.h" |
| #include "vm/compiler/frontend/kernel_fingerprints.h" |
| #include "vm/compiler/frontend/kernel_translation_helper.h" |
| #include "vm/compiler/intrinsifier.h" |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| namespace dart { |
| |
| DEFINE_FLAG(uint64_t, |
| huge_method_cutoff_in_code_size, |
| 200000, |
| "Huge method cutoff in unoptimized code size (in bytes)."); |
| DEFINE_FLAG( |
| bool, |
| show_internal_names, |
| false, |
| "Show names of internal classes (e.g. \"OneByteString\") in error messages " |
| "instead of showing the corresponding interface names (e.g. \"String\"). " |
| "Also show legacy nullability in type names."); |
| DEFINE_FLAG(bool, use_lib_cache, false, "Use library name cache"); |
| DEFINE_FLAG(bool, use_exp_cache, false, "Use library exported name cache"); |
| |
| DEFINE_FLAG(bool, |
| remove_script_timestamps_for_test, |
| false, |
| "Remove script timestamps to allow for deterministic testing."); |
| |
| DECLARE_FLAG(bool, dual_map_code); |
| DECLARE_FLAG(bool, intrinsify); |
| DECLARE_FLAG(bool, trace_deoptimization); |
| DECLARE_FLAG(bool, trace_deoptimization_verbose); |
| DECLARE_FLAG(bool, trace_reload); |
| DECLARE_FLAG(bool, write_protect_code); |
| DECLARE_FLAG(bool, precompiled_mode); |
| DECLARE_FLAG(int, max_polymorphic_checks); |
| |
| static const char* const kGetterPrefix = "get:"; |
| static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix); |
| static const char* const kSetterPrefix = "set:"; |
| static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix); |
| static const char* const kInitPrefix = "init:"; |
| static const intptr_t kInitPrefixLength = strlen(kInitPrefix); |
| |
| // A cache of VM heap allocated preinitialized empty ic data entry arrays. |
| ArrayPtr ICData::cached_icdata_arrays_[kCachedICDataArrayCount]; |
| // A VM heap allocated preinitialized empty subtype entry array. |
| ArrayPtr SubtypeTestCache::cached_array_; |
| |
| cpp_vtable Object::builtin_vtables_[kNumPredefinedCids] = {}; |
| |
| // These are initialized to a value that will force a illegal memory access if |
| // they are being used. |
| #if defined(RAW_NULL) |
| #error RAW_NULL should not be defined. |
| #endif |
| #define RAW_NULL static_cast<uword>(kHeapObjectTag) |
| |
| #define CHECK_ERROR(error) \ |
| { \ |
| ErrorPtr err = (error); \ |
| if (err != Error::null()) { \ |
| return err; \ |
| } \ |
| } |
| |
| #define DEFINE_SHARED_READONLY_HANDLE(Type, name) \ |
| Type* Object::name##_ = nullptr; |
| SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE) |
| #undef DEFINE_SHARED_READONLY_HANDLE |
| |
| ObjectPtr Object::null_ = static_cast<ObjectPtr>(RAW_NULL); |
| BoolPtr Object::true_ = static_cast<BoolPtr>(RAW_NULL); |
| BoolPtr Object::false_ = static_cast<BoolPtr>(RAW_NULL); |
| ClassPtr Object::class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::dynamic_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::void_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::patch_class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::function_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::closure_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::field_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::script_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::library_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::namespace_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::code_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::instructions_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::object_pool_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::context_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::context_scope_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::sentinel_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::monomorphicsmiablecall_class_ = |
| static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::icdata_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::api_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::language_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::weak_serialization_reference_class_ = |
| static_cast<ClassPtr>(RAW_NULL); |
| |
| const double MegamorphicCache::kLoadFactor = 0.50; |
| |
| static void AppendSubString(BaseTextBuffer* buffer, |
| const char* name, |
| intptr_t start_pos, |
| intptr_t len) { |
| buffer->Printf("%.*s", static_cast<int>(len), &name[start_pos]); |
| } |
| |
| #if defined(DART_PRECOMPILER) |
| #define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \ |
| Type##Ptr Class::Name() const { \ |
| return Type::RawCast(WeakSerializationReference::Unwrap(untag()->Name())); \ |
| } |
| #else |
| #define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \ |
| void Class::set_##Name(const Type& value) const { \ |
| untag()->set_##Name(value.ptr()); \ |
| } |
| #endif |
| |
| PRECOMPILER_WSR_FIELD_DEFINITION(ClosureData, Function, parent_function) |
| |
| #undef PRECOMPILER_WSR_FIELD_DEFINITION |
| |
| // Remove private keys, but retain getter/setter/constructor/mixin manglings. |
| StringPtr String::RemovePrivateKey(const String& name) { |
| ASSERT(name.IsOneByteString()); |
| GrowableArray<uint8_t> without_key(name.Length()); |
| intptr_t i = 0; |
| while (i < name.Length()) { |
| while (i < name.Length()) { |
| uint8_t c = name.CharAt(i++); |
| if (c == '@') break; |
| without_key.Add(c); |
| } |
| while (i < name.Length()) { |
| uint8_t c = name.CharAt(i); |
| if ((c < '0') || (c > '9')) break; |
| i++; |
| } |
| } |
| |
| return String::FromLatin1(without_key.data(), without_key.length()); |
| } |
| |
| // Takes a vm internal name and makes it suitable for external user. |
| // |
| // Examples: |
| // |
| // Internal getter and setter prefixes are changed: |
| // |
| // get:foo -> foo |
| // set:foo -> foo= |
| // |
| // Private name mangling is removed, possibly multiple times: |
| // |
| // _ReceivePortImpl@709387912 -> _ReceivePortImpl |
| // _ReceivePortImpl@709387912._internal@709387912 -> |
| // _ReceivePortImpl._internal |
| // _C@6328321&_E@6328321&_F@6328321 -> _C&_E&_F |
| // |
| // The trailing . on the default constructor name is dropped: |
| // |
| // List. -> List |
| // |
| // And so forth: |
| // |
| // get:foo@6328321 -> foo |
| // _MyClass@6328321. -> _MyClass |
| // _MyClass@6328321.named -> _MyClass.named |
| // |
| // For extension methods the following demangling is done |
| // ext|func -> ext.func (instance extension method) |
| // ext|get#prop -> ext.prop (instance extension getter) |
| // ext|set#prop -> ext.prop= (instance extension setter) |
| // ext|sfunc -> ext.sfunc (static extension method) |
| // get:ext|sprop -> ext.sprop (static extension getter) |
| // set:ext|sprop -> ext.sprop= (static extension setter) |
| // |
| const char* String::ScrubName(const String& name, bool is_extension) { |
| Thread* thread = Thread::Current(); |
| NoSafepointScope no_safepoint(thread); |
| Zone* zone = thread->zone(); |
| ZoneTextBuffer printer(zone); |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (name.Equals(Symbols::TopLevel())) { |
| // Name of invisible top-level class. |
| return ""; |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| const char* cname = name.ToCString(); |
| ASSERT(strlen(cname) == static_cast<size_t>(name.Length())); |
| const intptr_t name_len = name.Length(); |
| // First remove all private name mangling and if 'is_extension' is true |
| // substitute the first '|' character with '.'. |
| intptr_t start_pos = 0; |
| intptr_t sum_segment_len = 0; |
| for (intptr_t i = 0; i < name_len; i++) { |
| if ((cname[i] == '@') && ((i + 1) < name_len) && (cname[i + 1] >= '0') && |
| (cname[i + 1] <= '9')) { |
| // Append the current segment to the unmangled name. |
| const intptr_t segment_len = i - start_pos; |
| sum_segment_len += segment_len; |
| AppendSubString(&printer, cname, start_pos, segment_len); |
| // Advance until past the name mangling. The private keys are only |
| // numbers so we skip until the first non-number. |
| i++; // Skip the '@'. |
| while ((i < name.Length()) && (name.CharAt(i) >= '0') && |
| (name.CharAt(i) <= '9')) { |
| i++; |
| } |
| start_pos = i; |
| i--; // Account for for-loop increment. |
| } else if (is_extension && cname[i] == '|') { |
| // Append the current segment to the unmangled name. |
| const intptr_t segment_len = i - start_pos; |
| AppendSubString(&printer, cname, start_pos, segment_len); |
| // Append the '.' character (replaces '|' with '.'). |
| AppendSubString(&printer, ".", 0, 1); |
| start_pos = i + 1; |
| // Account for length of segments added so far. |
| sum_segment_len += (segment_len + 1); |
| } |
| } |
| |
| const char* unmangled_name = NULL; |
| if (start_pos == 0) { |
| // No name unmangling needed, reuse the name that was passed in. |
| unmangled_name = cname; |
| sum_segment_len = name_len; |
| } else if (name.Length() != start_pos) { |
| // Append the last segment. |
| const intptr_t segment_len = name.Length() - start_pos; |
| sum_segment_len += segment_len; |
| AppendSubString(&printer, cname, start_pos, segment_len); |
| } |
| if (unmangled_name == NULL) { |
| // Merge unmangled_segments. |
| unmangled_name = printer.buffer(); |
| } |
| |
| printer.Clear(); |
| intptr_t start = 0; |
| intptr_t len = sum_segment_len; |
| bool is_setter = false; |
| if (is_extension) { |
| // First scan till we see the '.' character. |
| for (intptr_t i = 0; i < len; i++) { |
| if (unmangled_name[i] == '.') { |
| intptr_t slen = i + 1; |
| intptr_t plen = slen - start; |
| AppendSubString(&printer, unmangled_name, start, plen); |
| unmangled_name += slen; |
| len -= slen; |
| break; |
| } else if (unmangled_name[i] == ':') { |
| if (start != 0) { |
| // Reset and break. |
| start = 0; |
| is_setter = false; |
| break; |
| } |
| if (unmangled_name[0] == 's') { |
| is_setter = true; |
| } |
| start = i + 1; |
| } |
| } |
| } |
| intptr_t dot_pos = -1; // Position of '.' in the name, if any. |
| start = 0; |
| for (intptr_t i = start; i < len; i++) { |
| if (unmangled_name[i] == ':' || |
| (is_extension && unmangled_name[i] == '#')) { |
| if (start != 0) { |
| // Reset and break. |
| start = 0; |
| dot_pos = -1; |
| break; |
| } |
| ASSERT(start == 0); // Only one : is possible in getters or setters. |
| if (unmangled_name[0] == 's') { |
| ASSERT(!is_setter); |
| is_setter = true; |
| } |
| start = i + 1; |
| } else if (unmangled_name[i] == '.') { |
| if (dot_pos != -1) { |
| // Reset and break. |
| start = 0; |
| dot_pos = -1; |
| break; |
| } |
| ASSERT(dot_pos == -1); // Only one dot is supported. |
| dot_pos = i; |
| } |
| } |
| |
| if (!is_extension && (start == 0) && (dot_pos == -1)) { |
| // This unmangled_name is fine as it is. |
| return unmangled_name; |
| } |
| |
| // Drop the trailing dot if needed. |
| intptr_t end = ((dot_pos + 1) == len) ? dot_pos : len; |
| |
| intptr_t substr_len = end - start; |
| AppendSubString(&printer, unmangled_name, start, substr_len); |
| if (is_setter) { |
| const char* equals = Symbols::Equals().ToCString(); |
| const intptr_t equals_len = strlen(equals); |
| AppendSubString(&printer, equals, 0, equals_len); |
| } |
| |
| return printer.buffer(); |
| } |
| |
| StringPtr String::ScrubNameRetainPrivate(const String& name, |
| bool is_extension) { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| intptr_t len = name.Length(); |
| intptr_t start = 0; |
| intptr_t at_pos = -1; // Position of '@' in the name, if any. |
| bool is_setter = false; |
| |
| String& result = String::Handle(); |
| |
| // If extension strip out the leading prefix e.g" ext|func would strip out |
| // 'ext|'. |
| if (is_extension) { |
| // First scan till we see the '|' character. |
| for (intptr_t i = 0; i < len; i++) { |
| if (name.CharAt(i) == '|') { |
| result = String::SubString(name, start, (i - start)); |
| result = String::Concat(result, Symbols::Dot()); |
| start = i + 1; |
| break; |
| } else if (name.CharAt(i) == ':') { |
| if (start != 0) { |
| // Reset and break. |
| start = 0; |
| is_setter = false; |
| break; |
| } |
| if (name.CharAt(0) == 's') { |
| is_setter = true; |
| } |
| start = i + 1; |
| } |
| } |
| } |
| |
| for (intptr_t i = start; i < len; i++) { |
| if (name.CharAt(i) == ':' || (is_extension && name.CharAt(i) == '#')) { |
| // Only one : is possible in getters or setters. |
| ASSERT(is_extension || start == 0); |
| if (name.CharAt(start) == 's') { |
| is_setter = true; |
| } |
| start = i + 1; |
| } else if (name.CharAt(i) == '@') { |
| // Setters should have only one @ so we know where to put the =. |
| ASSERT(!is_setter || (at_pos == -1)); |
| at_pos = i; |
| } |
| } |
| |
| if (start == 0) { |
| // This unmangled_name is fine as it is. |
| return name.ptr(); |
| } |
| |
| if (is_extension) { |
| const String& fname = |
| String::Handle(String::SubString(name, start, (len - start))); |
| result = String::Concat(result, fname); |
| } else { |
| result = String::SubString(name, start, (len - start)); |
| } |
| |
| if (is_setter) { |
| // Setters need to end with '='. |
| if (at_pos == -1) { |
| return String::Concat(result, Symbols::Equals()); |
| } else { |
| const String& pre_at = |
| String::Handle(String::SubString(result, 0, at_pos - 4)); |
| const String& post_at = |
| String::Handle(String::SubString(name, at_pos, len - at_pos)); |
| result = String::Concat(pre_at, Symbols::Equals()); |
| result = String::Concat(result, post_at); |
| } |
| } |
| |
| return result.ptr(); |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| return name.ptr(); // In AOT, return argument unchanged. |
| } |
| |
| template <typename type> |
| static bool IsSpecialCharacter(type value) { |
| return ((value == '"') || (value == '\n') || (value == '\f') || |
| (value == '\b') || (value == '\t') || (value == '\v') || |
| (value == '\r') || (value == '\\') || (value == '$')); |
| } |
| |
| static inline bool IsAsciiNonprintable(int32_t c) { |
| return ((0 <= c) && (c < 32)) || (c == 127); |
| } |
| |
| static int32_t EscapeOverhead(int32_t c) { |
| if (IsSpecialCharacter(c)) { |
| return 1; // 1 additional byte for the backslash. |
| } else if (IsAsciiNonprintable(c)) { |
| return 3; // 3 additional bytes to encode c as \x00. |
| } |
| return 0; |
| } |
| |
| template <typename type> |
| static type SpecialCharacter(type value) { |
| if (value == '"') { |
| return '"'; |
| } else if (value == '\n') { |
| return 'n'; |
| } else if (value == '\f') { |
| return 'f'; |
| } else if (value == '\b') { |
| return 'b'; |
| } else if (value == '\t') { |
| return 't'; |
| } else if (value == '\v') { |
| return 'v'; |
| } else if (value == '\r') { |
| return 'r'; |
| } else if (value == '\\') { |
| return '\\'; |
| } else if (value == '$') { |
| return '$'; |
| } |
| UNREACHABLE(); |
| return '\0'; |
| } |
| |
| void Object::InitNullAndBool(IsolateGroup* isolate_group) { |
| // Should only be run by the vm isolate. |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| auto heap = isolate_group->heap(); |
| |
| // TODO(iposva): NoSafepointScope needs to be added here. |
| ASSERT(class_class() == null_); |
| |
| // Allocate and initialize the null instance. |
| // 'null_' must be the first object allocated as it is used in allocation to |
| // clear the object. |
| { |
| uword address = heap->Allocate(Instance::InstanceSize(), Heap::kOld); |
| null_ = static_cast<InstancePtr>(address + kHeapObjectTag); |
| // The call below is using 'null_' to initialize itself. |
| InitializeObject(address, kNullCid, Instance::InstanceSize(), |
| Instance::ContainsCompressedPointers()); |
| null_->untag()->SetCanonical(); |
| } |
| |
| // Allocate and initialize the bool instances. |
| // These must be allocated such that at kBoolValueBitPosition, the address |
| // of true is 0 and the address of false is 1, and their addresses are |
| // otherwise identical. |
| { |
| // Allocate a dummy bool object to give true the desired alignment. |
| uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld); |
| InitializeObject(address, kBoolCid, Bool::InstanceSize(), |
| Bool::ContainsCompressedPointers()); |
| static_cast<BoolPtr>(address + kHeapObjectTag)->untag()->value_ = false; |
| } |
| { |
| // Allocate true. |
| uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld); |
| true_ = static_cast<BoolPtr>(address + kHeapObjectTag); |
| InitializeObject(address, kBoolCid, Bool::InstanceSize(), |
| Bool::ContainsCompressedPointers()); |
| true_->untag()->value_ = true; |
| true_->untag()->SetCanonical(); |
| } |
| { |
| // Allocate false. |
| uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld); |
| false_ = static_cast<BoolPtr>(address + kHeapObjectTag); |
| InitializeObject(address, kBoolCid, Bool::InstanceSize(), |
| Bool::ContainsCompressedPointers()); |
| false_->untag()->value_ = false; |
| false_->untag()->SetCanonical(); |
| } |
| |
| // Check that the objects have been allocated at appropriate addresses. |
| ASSERT(static_cast<uword>(true_) == |
| static_cast<uword>(null_) + kTrueOffsetFromNull); |
| ASSERT(static_cast<uword>(false_) == |
| static_cast<uword>(null_) + kFalseOffsetFromNull); |
| ASSERT((static_cast<uword>(true_) & kBoolValueMask) == 0); |
| ASSERT((static_cast<uword>(false_) & kBoolValueMask) != 0); |
| ASSERT(static_cast<uword>(false_) == |
| (static_cast<uword>(true_) | kBoolValueMask)); |
| ASSERT((static_cast<uword>(null_) & kBoolVsNullMask) == 0); |
| ASSERT((static_cast<uword>(true_) & kBoolVsNullMask) != 0); |
| ASSERT((static_cast<uword>(false_) & kBoolVsNullMask) != 0); |
| } |
| |
| void Object::InitVtables() { |
| { |
| Object fake_handle; |
| builtin_vtables_[kObjectCid] = fake_handle.vtable(); |
| } |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| clazz fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| Array fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_ARRAYS(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| String fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_STRINGS(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| { |
| Instance fake_handle; |
| builtin_vtables_[kFfiNativeTypeCid] = fake_handle.vtable(); |
| } |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| Instance fake_handle; \ |
| builtin_vtables_[kFfi##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_FFI_TYPE_MARKER(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| { |
| Instance fake_handle; |
| builtin_vtables_[kFfiNativeFunctionCid] = fake_handle.vtable(); |
| } |
| |
| { |
| Pointer fake_handle; |
| builtin_vtables_[kFfiPointerCid] = fake_handle.vtable(); |
| } |
| |
| { |
| DynamicLibrary fake_handle; |
| builtin_vtables_[kFfiDynamicLibraryCid] = fake_handle.vtable(); |
| } |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| TypedData fake_internal_handle; \ |
| builtin_vtables_[kTypedData##clazz##Cid] = fake_internal_handle.vtable(); \ |
| TypedDataView fake_view_handle; \ |
| builtin_vtables_[kTypedData##clazz##ViewCid] = fake_view_handle.vtable(); \ |
| ExternalTypedData fake_external_handle; \ |
| builtin_vtables_[kExternalTypedData##clazz##Cid] = \ |
| fake_external_handle.vtable(); \ |
| } |
| CLASS_LIST_TYPED_DATA(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| { |
| TypedDataView fake_handle; |
| builtin_vtables_[kByteDataViewCid] = fake_handle.vtable(); |
| } |
| |
| { |
| Instance fake_handle; |
| builtin_vtables_[kByteBufferCid] = fake_handle.vtable(); |
| builtin_vtables_[kNullCid] = fake_handle.vtable(); |
| builtin_vtables_[kDynamicCid] = fake_handle.vtable(); |
| builtin_vtables_[kVoidCid] = fake_handle.vtable(); |
| builtin_vtables_[kNeverCid] = fake_handle.vtable(); |
| } |
| } |
| |
| void Object::Init(IsolateGroup* isolate_group) { |
| // Should only be run by the vm isolate. |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| Heap* heap = isolate_group->heap(); |
| Thread* thread = Thread::Current(); |
| ASSERT(thread != nullptr); |
| // Ensure lock checks in setters are happy. |
| SafepointWriteRwLocker ml(thread, isolate_group->program_lock()); |
| |
| InitVtables(); |
| |
| // Allocate the read only object handles here. |
| #define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \ |
| name##_ = Type::ReadOnlyHandle(); |
| SHARED_READONLY_HANDLES_LIST(INITIALIZE_SHARED_READONLY_HANDLE) |
| #undef INITIALIZE_SHARED_READONLY_HANDLE |
| |
| *null_object_ = Object::null(); |
| *null_class_ = Class::null(); |
| *null_array_ = Array::null(); |
| *null_string_ = String::null(); |
| *null_instance_ = Instance::null(); |
| *null_function_ = Function::null(); |
| *null_function_type_ = FunctionType::null(); |
| *null_type_arguments_ = TypeArguments::null(); |
| *empty_type_arguments_ = TypeArguments::null(); |
| *null_abstract_type_ = AbstractType::null(); |
| *null_compressed_stackmaps_ = CompressedStackMaps::null(); |
| *bool_true_ = true_; |
| *bool_false_ = false_; |
| |
| // Initialize the empty and zero array handles to null_ in order to be able to |
| // check if the empty and zero arrays were allocated (RAW_NULL is not |
| // available). |
| *empty_array_ = Array::null(); |
| *zero_array_ = Array::null(); |
| |
| Class& cls = Class::Handle(); |
| |
| // Allocate and initialize the class class. |
| { |
| intptr_t size = Class::InstanceSize(); |
| uword address = heap->Allocate(size, Heap::kOld); |
| class_class_ = static_cast<ClassPtr>(address + kHeapObjectTag); |
| InitializeObject(address, Class::kClassId, size, |
| Class::ContainsCompressedPointers()); |
| |
| Class fake; |
| // Initialization from Class::New<Class>. |
| // Directly set ptr_ to break a circular dependency: SetRaw will attempt |
| // to lookup class class in the class table where it is not registered yet. |
| cls.ptr_ = class_class_; |
| ASSERT(builtin_vtables_[kClassCid] == fake.vtable()); |
| cls.set_instance_size( |
| Class::InstanceSize(), |
| compiler::target::RoundedAllocationSize(RTN::Class::InstanceSize())); |
| const intptr_t host_next_field_offset = Class::NextFieldOffset(); |
| const intptr_t target_next_field_offset = RTN::Class::NextFieldOffset(); |
| cls.set_next_field_offset(host_next_field_offset, target_next_field_offset); |
| cls.set_id(Class::kClassId); |
| cls.set_state_bits(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| cls.set_type_arguments_field_offset_in_words(Class::kNoTypeArguments, |
| RTN::Class::kNoTypeArguments); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_num_native_fields(0); |
| cls.InitEmptyFields(); |
| isolate_group->class_table()->Register(cls); |
| } |
| |
| // Allocate and initialize the null class. |
| cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| isolate_group->object_store()->set_null_class(cls); |
| |
| // Allocate and initialize Never class. |
| cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| isolate_group->object_store()->set_never_class(cls); |
| |
| // Allocate and initialize the free list element class. |
| cls = Class::New<FreeListElement::FakeInstance, |
| RTN::FreeListElement::FakeInstance>(kFreeListElement, |
| isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| // Allocate and initialize the forwarding corpse class. |
| cls = Class::New<ForwardingCorpse::FakeInstance, |
| RTN::ForwardingCorpse::FakeInstance>(kForwardingCorpse, |
| isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| // Allocate and initialize Sentinel class. |
| cls = Class::New<Sentinel, RTN::Sentinel>(isolate_group); |
| sentinel_class_ = cls.ptr(); |
| |
| // Allocate and initialize the sentinel values. |
| { |
| *sentinel_ ^= Sentinel::New(); |
| *transition_sentinel_ ^= Sentinel::New(); |
| } |
| |
| // Allocate and initialize optimizing compiler constants. |
| { |
| *unknown_constant_ ^= Sentinel::New(); |
| *non_constant_ ^= Sentinel::New(); |
| } |
| |
| // Allocate the remaining VM internal classes. |
| cls = Class::New<TypeParameters, RTN::TypeParameters>(isolate_group); |
| type_parameters_class_ = cls.ptr(); |
| |
| cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate_group); |
| type_arguments_class_ = cls.ptr(); |
| |
| cls = Class::New<PatchClass, RTN::PatchClass>(isolate_group); |
| patch_class_class_ = cls.ptr(); |
| |
| cls = Class::New<Function, RTN::Function>(isolate_group); |
| function_class_ = cls.ptr(); |
| |
| cls = Class::New<ClosureData, RTN::ClosureData>(isolate_group); |
| closure_data_class_ = cls.ptr(); |
| |
| cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate_group); |
| ffi_trampoline_data_class_ = cls.ptr(); |
| |
| cls = Class::New<Field, RTN::Field>(isolate_group); |
| field_class_ = cls.ptr(); |
| |
| cls = Class::New<Script, RTN::Script>(isolate_group); |
| script_class_ = cls.ptr(); |
| |
| cls = Class::New<Library, RTN::Library>(isolate_group); |
| library_class_ = cls.ptr(); |
| |
| cls = Class::New<Namespace, RTN::Namespace>(isolate_group); |
| namespace_class_ = cls.ptr(); |
| |
| cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate_group); |
| kernel_program_info_class_ = cls.ptr(); |
| |
| cls = Class::New<Code, RTN::Code>(isolate_group); |
| code_class_ = cls.ptr(); |
| |
| cls = Class::New<Instructions, RTN::Instructions>(isolate_group); |
| instructions_class_ = cls.ptr(); |
| |
| cls = |
| Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group); |
| instructions_section_class_ = cls.ptr(); |
| |
| cls = Class::New<InstructionsTable, RTN::InstructionsTable>(isolate_group); |
| instructions_table_class_ = cls.ptr(); |
| |
| cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group); |
| object_pool_class_ = cls.ptr(); |
| |
| cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate_group); |
| pc_descriptors_class_ = cls.ptr(); |
| |
| cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate_group); |
| code_source_map_class_ = cls.ptr(); |
| |
| cls = |
| Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate_group); |
| compressed_stackmaps_class_ = cls.ptr(); |
| |
| cls = |
| Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate_group); |
| var_descriptors_class_ = cls.ptr(); |
| |
| cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate_group); |
| exception_handlers_class_ = cls.ptr(); |
| |
| cls = Class::New<Context, RTN::Context>(isolate_group); |
| context_class_ = cls.ptr(); |
| |
| cls = Class::New<ContextScope, RTN::ContextScope>(isolate_group); |
| context_scope_class_ = cls.ptr(); |
| |
| cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate_group); |
| singletargetcache_class_ = cls.ptr(); |
| |
| cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate_group); |
| unlinkedcall_class_ = cls.ptr(); |
| |
| cls = Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>( |
| isolate_group); |
| monomorphicsmiablecall_class_ = cls.ptr(); |
| |
| cls = Class::New<ICData, RTN::ICData>(isolate_group); |
| icdata_class_ = cls.ptr(); |
| |
| cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate_group); |
| megamorphic_cache_class_ = cls.ptr(); |
| |
| cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate_group); |
| subtypetestcache_class_ = cls.ptr(); |
| |
| cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate_group); |
| loadingunit_class_ = cls.ptr(); |
| |
| cls = Class::New<ApiError, RTN::ApiError>(isolate_group); |
| api_error_class_ = cls.ptr(); |
| |
| cls = Class::New<LanguageError, RTN::LanguageError>(isolate_group); |
| language_error_class_ = cls.ptr(); |
| |
| cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate_group); |
| unhandled_exception_class_ = cls.ptr(); |
| |
| cls = Class::New<UnwindError, RTN::UnwindError>(isolate_group); |
| unwind_error_class_ = cls.ptr(); |
| |
| cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>( |
| isolate_group); |
| weak_serialization_reference_class_ = cls.ptr(); |
| |
| ASSERT(class_class() != null_); |
| |
| // Pre-allocate classes in the vm isolate so that we can for example create a |
| // symbol table and populate it with some frequently used strings as symbols. |
| cls = Class::New<Array, RTN::Array>(isolate_group); |
| isolate_group->object_store()->set_array_class(cls); |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group); |
| isolate_group->object_store()->set_immutable_array_class(cls); |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls = |
| Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate_group); |
| isolate_group->object_store()->set_growable_object_array_class(cls); |
| cls.set_type_arguments_field_offset( |
| GrowableObjectArray::type_arguments_offset(), |
| RTN::GrowableObjectArray::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls = Class::NewStringClass(kOneByteStringCid, isolate_group); |
| isolate_group->object_store()->set_one_byte_string_class(cls); |
| cls = Class::NewStringClass(kTwoByteStringCid, isolate_group); |
| isolate_group->object_store()->set_two_byte_string_class(cls); |
| cls = Class::New<Mint, RTN::Mint>(isolate_group); |
| isolate_group->object_store()->set_mint_class(cls); |
| cls = Class::New<Double, RTN::Double>(isolate_group); |
| isolate_group->object_store()->set_double_class(cls); |
| cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group); |
| isolate_group->object_store()->set_float32x4_class(cls); |
| cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group); |
| isolate_group->object_store()->set_float64x2_class(cls); |
| cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group); |
| isolate_group->object_store()->set_int32x4_class(cls); |
| |
| // Ensure that class kExternalTypedDataUint8ArrayCid is registered as we |
| // need it when reading in the token stream of bootstrap classes in the VM |
| // isolate. |
| Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid, |
| isolate_group); |
| |
| // Needed for object pools of VM isolate stubs. |
| Class::NewTypedDataClass(kTypedDataInt8ArrayCid, isolate_group); |
| |
| // Allocate and initialize the empty_array instance. |
| { |
| uword address = heap->Allocate(Array::InstanceSize(0), Heap::kOld); |
| InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(0), |
| Array::ContainsCompressedPointers()); |
| Array::initializeHandle(empty_array_, |
| static_cast<ArrayPtr>(address + kHeapObjectTag)); |
| empty_array_->untag()->set_length(Smi::New(0)); |
| empty_array_->SetCanonical(); |
| } |
| |
| Smi& smi = Smi::Handle(); |
| // Allocate and initialize the zero_array instance. |
| { |
| uword address = heap->Allocate(Array::InstanceSize(1), Heap::kOld); |
| InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(1), |
| Array::ContainsCompressedPointers()); |
| Array::initializeHandle(zero_array_, |
| static_cast<ArrayPtr>(address + kHeapObjectTag)); |
| zero_array_->untag()->set_length(Smi::New(1)); |
| smi = Smi::New(0); |
| zero_array_->SetAt(0, smi); |
| zero_array_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty context scope object. |
| { |
| uword address = heap->Allocate(ContextScope::InstanceSize(0), Heap::kOld); |
| InitializeObject(address, kContextScopeCid, ContextScope::InstanceSize(0), |
| ContextScope::ContainsCompressedPointers()); |
| ContextScope::initializeHandle( |
| empty_context_scope_, |
| static_cast<ContextScopePtr>(address + kHeapObjectTag)); |
| empty_context_scope_->StoreNonPointer( |
| &empty_context_scope_->untag()->num_variables_, 0); |
| empty_context_scope_->StoreNonPointer( |
| &empty_context_scope_->untag()->is_implicit_, true); |
| empty_context_scope_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty object pool object. |
| { |
| uword address = heap->Allocate(ObjectPool::InstanceSize(0), Heap::kOld); |
| InitializeObject(address, kObjectPoolCid, ObjectPool::InstanceSize(0), |
| ObjectPool::ContainsCompressedPointers()); |
| ObjectPool::initializeHandle( |
| empty_object_pool_, |
| static_cast<ObjectPoolPtr>(address + kHeapObjectTag)); |
| empty_object_pool_->StoreNonPointer(&empty_object_pool_->untag()->length_, |
| 0); |
| empty_object_pool_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the empty_compressed_stackmaps instance. |
| { |
| const intptr_t instance_size = CompressedStackMaps::InstanceSize(0); |
| uword address = heap->Allocate(instance_size, Heap::kOld); |
| InitializeObject(address, kCompressedStackMapsCid, instance_size, |
| CompressedStackMaps::ContainsCompressedPointers()); |
| CompressedStackMaps::initializeHandle( |
| empty_compressed_stackmaps_, |
| static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag)); |
| empty_compressed_stackmaps_->StoreNonPointer( |
| &empty_compressed_stackmaps_->untag()->flags_and_size_, 0); |
| empty_compressed_stackmaps_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the empty_descriptors instance. |
| { |
| uword address = heap->Allocate(PcDescriptors::InstanceSize(0), Heap::kOld); |
| InitializeObject(address, kPcDescriptorsCid, PcDescriptors::InstanceSize(0), |
| PcDescriptors::ContainsCompressedPointers()); |
| PcDescriptors::initializeHandle( |
| empty_descriptors_, |
| static_cast<PcDescriptorsPtr>(address + kHeapObjectTag)); |
| empty_descriptors_->StoreNonPointer(&empty_descriptors_->untag()->length_, |
| 0); |
| empty_descriptors_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty variable descriptor object. |
| { |
| uword address = |
| heap->Allocate(LocalVarDescriptors::InstanceSize(0), Heap::kOld); |
| InitializeObject(address, kLocalVarDescriptorsCid, |
| LocalVarDescriptors::InstanceSize(0), |
| LocalVarDescriptors::ContainsCompressedPointers()); |
| LocalVarDescriptors::initializeHandle( |
| empty_var_descriptors_, |
| static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag)); |
| empty_var_descriptors_->StoreNonPointer( |
| &empty_var_descriptors_->untag()->num_entries_, 0); |
| empty_var_descriptors_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty exception handler info object. |
| // The vast majority of all functions do not contain an exception handler |
| // and can share this canonical descriptor. |
| { |
| uword address = |
| heap->Allocate(ExceptionHandlers::InstanceSize(0), Heap::kOld); |
| InitializeObject(address, kExceptionHandlersCid, |
| ExceptionHandlers::InstanceSize(0), |
| ExceptionHandlers::ContainsCompressedPointers()); |
| ExceptionHandlers::initializeHandle( |
| empty_exception_handlers_, |
| static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag)); |
| empty_exception_handlers_->StoreNonPointer( |
| &empty_exception_handlers_->untag()->num_entries_, 0); |
| empty_exception_handlers_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty type arguments object. |
| { |
| uword address = heap->Allocate(TypeArguments::InstanceSize(0), Heap::kOld); |
| InitializeObject(address, kTypeArgumentsCid, TypeArguments::InstanceSize(0), |
| TypeArguments::ContainsCompressedPointers()); |
| TypeArguments::initializeHandle( |
| empty_type_arguments_, |
| static_cast<TypeArgumentsPtr>(address + kHeapObjectTag)); |
| empty_type_arguments_->untag()->set_length(Smi::New(0)); |
| empty_type_arguments_->untag()->set_hash(Smi::New(0)); |
| empty_type_arguments_->ComputeHash(); |
| empty_type_arguments_->SetCanonical(); |
| } |
| |
| // The VM isolate snapshot object table is initialized to an empty array |
| // as we do not have any VM isolate snapshot at this time. |
| *vm_isolate_snapshot_object_table_ = Object::empty_array().ptr(); |
| |
| cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate_group); |
| cls.set_is_abstract(); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| dynamic_class_ = cls.ptr(); |
| |
| cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| void_class_ = cls.ptr(); |
| |
| cls = Class::New<Type, RTN::Type>(isolate_group); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| cls = dynamic_class_; |
| *dynamic_type_ = |
| Type::New(cls, Object::null_type_arguments(), Nullability::kNullable); |
| dynamic_type_->SetIsFinalized(); |
| dynamic_type_->ComputeHash(); |
| dynamic_type_->SetCanonical(); |
| |
| cls = void_class_; |
| *void_type_ = |
| Type::New(cls, Object::null_type_arguments(), Nullability::kNullable); |
| void_type_->SetIsFinalized(); |
| void_type_->ComputeHash(); |
| void_type_->SetCanonical(); |
| |
| // Since TypeArguments objects are passed as function arguments, make them |
| // behave as Dart instances, although they are just VM objects. |
| // Note that we cannot set the super type to ObjectType, which does not live |
| // in the vm isolate. See special handling in Class::SuperClass(). |
| cls = type_arguments_class_; |
| cls.set_interfaces(Object::empty_array()); |
| cls.SetFields(Object::empty_array()); |
| cls.SetFunctions(Object::empty_array()); |
| |
| cls = Class::New<Bool, RTN::Bool>(isolate_group); |
| isolate_group->object_store()->set_bool_class(cls); |
| |
| *smi_illegal_cid_ = Smi::New(kIllegalCid); |
| *smi_zero_ = Smi::New(0); |
| |
| String& error_str = String::Handle(); |
| error_str = String::New( |
| "Internal Dart data pointers have been acquired, please release them " |
| "using Dart_TypedDataReleaseData.", |
| Heap::kOld); |
| *typed_data_acquire_error_ = ApiError::New(error_str, Heap::kOld); |
| error_str = String::New("SnapshotWriter Error", Heap::kOld); |
| *snapshot_writer_error_ = |
| LanguageError::New(error_str, Report::kError, Heap::kOld); |
| error_str = String::New("Branch offset overflow", Heap::kOld); |
| *branch_offset_error_ = |
| LanguageError::New(error_str, Report::kBailout, Heap::kOld); |
| error_str = String::New("Speculative inlining failed", Heap::kOld); |
| *speculative_inlining_error_ = |
| LanguageError::New(error_str, Report::kBailout, Heap::kOld); |
| error_str = String::New("Background Compilation Failed", Heap::kOld); |
| *background_compilation_error_ = |
| LanguageError::New(error_str, Report::kBailout, Heap::kOld); |
| error_str = String::New("Out of memory", Heap::kOld); |
| *out_of_memory_error_ = |
| LanguageError::New(error_str, Report::kBailout, Heap::kOld); |
| |
| // Allocate the parameter arrays for method extractor types and names. |
| *extractor_parameter_types_ = Array::New(1, Heap::kOld); |
| extractor_parameter_types_->SetAt(0, Object::dynamic_type()); |
| *extractor_parameter_names_ = Array::New(1, Heap::kOld); |
| // Fill in extractor_parameter_names_ later, after symbols are initialized |
| // (in Object::FinalizeVMIsolate). extractor_parameter_names_ object |
| // needs to be created earlier as VM isolate snapshot reader references it |
| // before Object::FinalizeVMIsolate. |
| |
| // Some thread fields need to be reinitialized as null constants have not been |
| // initialized until now. |
| thread->ClearStickyError(); |
| thread->clear_pending_functions(); |
| |
| ASSERT(!null_object_->IsSmi()); |
| ASSERT(!null_class_->IsSmi()); |
| ASSERT(null_class_->IsClass()); |
| ASSERT(!null_array_->IsSmi()); |
| ASSERT(null_array_->IsArray()); |
| ASSERT(!null_string_->IsSmi()); |
| ASSERT(null_string_->IsString()); |
| ASSERT(!null_instance_->IsSmi()); |
| ASSERT(null_instance_->IsInstance()); |
| ASSERT(!null_function_->IsSmi()); |
| ASSERT(null_function_->IsFunction()); |
| ASSERT(!null_function_type_->IsSmi()); |
| ASSERT(null_function_type_->IsFunctionType()); |
| ASSERT(!null_type_arguments_->IsSmi()); |
| ASSERT(null_type_arguments_->IsTypeArguments()); |
| ASSERT(!null_compressed_stackmaps_->IsSmi()); |
| ASSERT(null_compressed_stackmaps_->IsCompressedStackMaps()); |
| ASSERT(!empty_array_->IsSmi()); |
| ASSERT(empty_array_->IsArray()); |
| ASSERT(!zero_array_->IsSmi()); |
| ASSERT(zero_array_->IsArray()); |
| ASSERT(!empty_type_arguments_->IsSmi()); |
| ASSERT(empty_type_arguments_->IsTypeArguments()); |
| ASSERT(!empty_context_scope_->IsSmi()); |
| ASSERT(empty_context_scope_->IsContextScope()); |
| ASSERT(!empty_compressed_stackmaps_->IsSmi()); |
| ASSERT(empty_compressed_stackmaps_->IsCompressedStackMaps()); |
| ASSERT(!empty_descriptors_->IsSmi()); |
| ASSERT(empty_descriptors_->IsPcDescriptors()); |
| ASSERT(!empty_var_descriptors_->IsSmi()); |
| ASSERT(empty_var_descriptors_->IsLocalVarDescriptors()); |
| ASSERT(!empty_exception_handlers_->IsSmi()); |
| ASSERT(empty_exception_handlers_->IsExceptionHandlers()); |
| ASSERT(!sentinel_->IsSmi()); |
| ASSERT(sentinel_->IsSentinel()); |
| ASSERT(!transition_sentinel_->IsSmi()); |
| ASSERT(transition_sentinel_->IsSentinel()); |
| ASSERT(!unknown_constant_->IsSmi()); |
| ASSERT(unknown_constant_->IsSentinel()); |
| ASSERT(!non_constant_->IsSmi()); |
| ASSERT(non_constant_->IsSentinel()); |
| ASSERT(!bool_true_->IsSmi()); |
| ASSERT(bool_true_->IsBool()); |
| ASSERT(!bool_false_->IsSmi()); |
| ASSERT(bool_false_->IsBool()); |
| ASSERT(smi_illegal_cid_->IsSmi()); |
| ASSERT(smi_zero_->IsSmi()); |
| ASSERT(!typed_data_acquire_error_->IsSmi()); |
| ASSERT(typed_data_acquire_error_->IsApiError()); |
| ASSERT(!snapshot_writer_error_->IsSmi()); |
| ASSERT(snapshot_writer_error_->IsLanguageError()); |
| ASSERT(!branch_offset_error_->IsSmi()); |
| ASSERT(branch_offset_error_->IsLanguageError()); |
| ASSERT(!speculative_inlining_error_->IsSmi()); |
| ASSERT(speculative_inlining_error_->IsLanguageError()); |
| ASSERT(!background_compilation_error_->IsSmi()); |
| ASSERT(background_compilation_error_->IsLanguageError()); |
| ASSERT(!out_of_memory_error_->IsSmi()); |
| ASSERT(out_of_memory_error_->IsLanguageError()); |
| ASSERT(!vm_isolate_snapshot_object_table_->IsSmi()); |
| ASSERT(vm_isolate_snapshot_object_table_->IsArray()); |
| ASSERT(!extractor_parameter_types_->IsSmi()); |
| ASSERT(extractor_parameter_types_->IsArray()); |
| ASSERT(!extractor_parameter_names_->IsSmi()); |
| ASSERT(extractor_parameter_names_->IsArray()); |
| } |
| |
| void Object::FinishInit(IsolateGroup* isolate_group) { |
| // The type testing stubs we initialize in AbstractType objects for the |
| // canonical type of kDynamicCid/kVoidCid need to be set in this |
| // method, which is called after StubCode::InitOnce(). |
| Code& code = Code::Handle(); |
| |
| code = TypeTestingStubGenerator::DefaultCodeForType(*dynamic_type_); |
| dynamic_type_->SetTypeTestingStub(code); |
| |
| code = TypeTestingStubGenerator::DefaultCodeForType(*void_type_); |
| void_type_->SetTypeTestingStub(code); |
| } |
| |
| void Object::Cleanup() { |
| null_ = static_cast<ObjectPtr>(RAW_NULL); |
| true_ = static_cast<BoolPtr>(RAW_NULL); |
| false_ = static_cast<BoolPtr>(RAW_NULL); |
| class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| dynamic_class_ = static_cast<ClassPtr>(RAW_NULL); |
| void_class_ = static_cast<ClassPtr>(RAW_NULL); |
| type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL); |
| type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL); |
| patch_class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| function_class_ = static_cast<ClassPtr>(RAW_NULL); |
| closure_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| field_class_ = static_cast<ClassPtr>(RAW_NULL); |
| script_class_ = static_cast<ClassPtr>(RAW_NULL); |
| library_class_ = static_cast<ClassPtr>(RAW_NULL); |
| namespace_class_ = static_cast<ClassPtr>(RAW_NULL); |
| kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL); |
| code_class_ = static_cast<ClassPtr>(RAW_NULL); |
| instructions_class_ = static_cast<ClassPtr>(RAW_NULL); |
| instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL); |
| instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL); |
| object_pool_class_ = static_cast<ClassPtr>(RAW_NULL); |
| pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL); |
| compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL); |
| var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL); |
| context_class_ = static_cast<ClassPtr>(RAW_NULL); |
| context_scope_class_ = static_cast<ClassPtr>(RAW_NULL); |
| singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL); |
| monomorphicsmiablecall_class_ = static_cast<ClassPtr>(RAW_NULL); |
| icdata_class_ = static_cast<ClassPtr>(RAW_NULL); |
| megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL); |
| api_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| language_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL); |
| unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| } |
| |
| // An object visitor which will mark all visited objects. This is used to |
| // premark all objects in the vm_isolate_ heap. Also precalculates hash |
| // codes so that we can get the identity hash code of objects in the read- |
| // only VM isolate. |
| class FinalizeVMIsolateVisitor : public ObjectVisitor { |
| public: |
| FinalizeVMIsolateVisitor() |
| #if defined(HASH_IN_OBJECT_HEADER) |
| : counter_(1337) |
| #endif |
| { |
| } |
| |
| void VisitObject(ObjectPtr obj) { |
| // Free list elements should never be marked. |
| ASSERT(!obj->untag()->IsMarked()); |
| // No forwarding corpses in the VM isolate. |
| ASSERT(!obj->IsForwardingCorpse()); |
| if (!obj->IsFreeListElement()) { |
| obj->untag()->SetMarkBitUnsynchronized(); |
| Object::FinalizeReadOnlyObject(obj); |
| #if defined(HASH_IN_OBJECT_HEADER) |
| // These objects end up in the read-only VM isolate which is shared |
| // between isolates, so we have to prepopulate them with identity hash |
| // codes, since we can't add hash codes later. |
| if (Object::GetCachedHash(obj) == 0) { |
| // Some classes have identity hash codes that depend on their contents, |
| // not per object. |
| ASSERT(!obj->IsStringInstance()); |
| if (!obj->IsMint() && !obj->IsDouble() && !obj->IsRawNull() && |
| !obj->IsBool()) { |
| counter_ += 2011; // The year Dart was announced and a prime. |
| counter_ &= 0x3fffffff; |
| if (counter_ == 0) counter_++; |
| Object::SetCachedHashIfNotSet(obj, counter_); |
| } |
| } |
| #endif |
| } |
| } |
| |
| private: |
| #if defined(HASH_IN_OBJECT_HEADER) |
| int32_t counter_; |
| #endif |
| }; |
| |
| #define SET_CLASS_NAME(class_name, name) \ |
| cls = class_name##_class(); \ |
| cls.set_name(Symbols::name()); |
| |
| void Object::FinalizeVMIsolate(IsolateGroup* isolate_group) { |
| // Should only be run by the vm isolate. |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| |
| // Finish initialization of extractor_parameter_names_ which was |
| // Started in Object::InitOnce() |
| extractor_parameter_names_->SetAt(0, Symbols::This()); |
| |
| // Set up names for all VM singleton classes. |
| Class& cls = Class::Handle(); |
| |
| SET_CLASS_NAME(class, Class); |
| SET_CLASS_NAME(dynamic, Dynamic); |
| SET_CLASS_NAME(void, Void); |
| SET_CLASS_NAME(type_parameters, TypeParameters); |
| SET_CLASS_NAME(type_arguments, TypeArguments); |
| SET_CLASS_NAME(patch_class, PatchClass); |
| SET_CLASS_NAME(function, Function); |
| SET_CLASS_NAME(closure_data, ClosureData); |
| SET_CLASS_NAME(ffi_trampoline_data, FfiTrampolineData); |
| SET_CLASS_NAME(field, Field); |
| SET_CLASS_NAME(script, Script); |
| SET_CLASS_NAME(library, LibraryClass); |
| SET_CLASS_NAME(namespace, Namespace); |
| SET_CLASS_NAME(kernel_program_info, KernelProgramInfo); |
| SET_CLASS_NAME(weak_serialization_reference, WeakSerializationReference); |
| SET_CLASS_NAME(code, Code); |
| SET_CLASS_NAME(instructions, Instructions); |
| SET_CLASS_NAME(instructions_section, InstructionsSection); |
| SET_CLASS_NAME(instructions_table, InstructionsTable); |
| SET_CLASS_NAME(object_pool, ObjectPool); |
| SET_CLASS_NAME(code_source_map, CodeSourceMap); |
| SET_CLASS_NAME(pc_descriptors, PcDescriptors); |
| SET_CLASS_NAME(compressed_stackmaps, CompressedStackMaps); |
| SET_CLASS_NAME(var_descriptors, LocalVarDescriptors); |
| SET_CLASS_NAME(exception_handlers, ExceptionHandlers); |
| SET_CLASS_NAME(context, Context); |
| SET_CLASS_NAME(context_scope, ContextScope); |
| SET_CLASS_NAME(sentinel, Sentinel); |
| SET_CLASS_NAME(singletargetcache, SingleTargetCache); |
| SET_CLASS_NAME(unlinkedcall, UnlinkedCall); |
| SET_CLASS_NAME(monomorphicsmiablecall, MonomorphicSmiableCall); |
| SET_CLASS_NAME(icdata, ICData); |
| SET_CLASS_NAME(megamorphic_cache, MegamorphicCache); |
| SET_CLASS_NAME(subtypetestcache, SubtypeTestCache); |
| SET_CLASS_NAME(loadingunit, LoadingUnit); |
| SET_CLASS_NAME(api_error, ApiError); |
| SET_CLASS_NAME(language_error, LanguageError); |
| SET_CLASS_NAME(unhandled_exception, UnhandledException); |
| SET_CLASS_NAME(unwind_error, UnwindError); |
| |
| // Set up names for classes which are also pre-allocated in the vm isolate. |
| cls = isolate_group->object_store()->array_class(); |
| cls.set_name(Symbols::_List()); |
| cls = isolate_group->object_store()->one_byte_string_class(); |
| cls.set_name(Symbols::OneByteString()); |
| cls = isolate_group->object_store()->never_class(); |
| cls.set_name(Symbols::Never()); |
| |
| // Set up names for the pseudo-classes for free list elements and forwarding |
| // corpses. Mainly this makes VM debugging easier. |
| cls = isolate_group->class_table()->At(kFreeListElement); |
| cls.set_name(Symbols::FreeListElement()); |
| cls = isolate_group->class_table()->At(kForwardingCorpse); |
| cls.set_name(Symbols::ForwardingCorpse()); |
| |
| #if defined(DART_PRECOMPILER) |
| const auto& function = |
| Function::Handle(StubCode::UnknownDartCode().function()); |
| function.set_name(Symbols::OptimizedOut()); |
| #endif // defined(DART_PRECOMPILER) |
| |
| { |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| Thread* thread = Thread::Current(); |
| WritableVMIsolateScope scope(thread); |
| HeapIterationScope iteration(thread); |
| FinalizeVMIsolateVisitor premarker; |
| ASSERT(isolate_group->heap()->UsedInWords(Heap::kNew) == 0); |
| iteration.IterateOldObjectsNoImagePages(&premarker); |
| // Make the VM isolate read-only again after setting all objects as marked. |
| // Note objects in image pages are already pre-marked. |
| } |
| } |
| |
| void Object::FinalizeReadOnlyObject(ObjectPtr object) { |
| NoSafepointScope no_safepoint; |
| intptr_t cid = object->GetClassId(); |
| if (cid == kOneByteStringCid) { |
| OneByteStringPtr str = static_cast<OneByteStringPtr>(object); |
| if (String::GetCachedHash(str) == 0) { |
| intptr_t hash = String::Hash(str); |
| String::SetCachedHashIfNotSet(str, hash); |
| } |
| intptr_t size = OneByteString::UnroundedSize(str); |
| ASSERT(size <= str->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0, |
| str->untag()->HeapSize() - size); |
| } else if (cid == kTwoByteStringCid) { |
| TwoByteStringPtr str = static_cast<TwoByteStringPtr>(object); |
| if (String::GetCachedHash(str) == 0) { |
| intptr_t hash = String::Hash(str); |
| String::SetCachedHashIfNotSet(str, hash); |
| } |
| ASSERT(String::GetCachedHash(str) != 0); |
| intptr_t size = TwoByteString::UnroundedSize(str); |
| ASSERT(size <= str->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0, |
| str->untag()->HeapSize() - size); |
| } else if (cid == kExternalOneByteStringCid) { |
| ExternalOneByteStringPtr str = |
| static_cast<ExternalOneByteStringPtr>(object); |
| if (String::GetCachedHash(str) == 0) { |
| intptr_t hash = String::Hash(str); |
| String::SetCachedHashIfNotSet(str, hash); |
| } |
| } else if (cid == kExternalTwoByteStringCid) { |
| ExternalTwoByteStringPtr str = |
| static_cast<ExternalTwoByteStringPtr>(object); |
| if (String::GetCachedHash(str) == 0) { |
| intptr_t hash = String::Hash(str); |
| String::SetCachedHashIfNotSet(str, hash); |
| } |
| } else if (cid == kCodeSourceMapCid) { |
| CodeSourceMapPtr map = CodeSourceMap::RawCast(object); |
| intptr_t size = CodeSourceMap::UnroundedSize(map); |
| ASSERT(size <= map->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(map) + size), 0, |
| map->untag()->HeapSize() - size); |
| } else if (cid == kCompressedStackMapsCid) { |
| CompressedStackMapsPtr maps = CompressedStackMaps::RawCast(object); |
| intptr_t size = CompressedStackMaps::UnroundedSize(maps); |
| ASSERT(size <= maps->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(maps) + size), 0, |
| maps->untag()->HeapSize() - size); |
| } else if (cid == kPcDescriptorsCid) { |
| PcDescriptorsPtr desc = PcDescriptors::RawCast(object); |
| intptr_t size = PcDescriptors::UnroundedSize(desc); |
| ASSERT(size <= desc->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(desc) + size), 0, |
| desc->untag()->HeapSize() - size); |
| } |
| } |
| |
| void Object::set_vm_isolate_snapshot_object_table(const Array& table) { |
| ASSERT(Isolate::Current() == Dart::vm_isolate()); |
| *vm_isolate_snapshot_object_table_ = table.ptr(); |
| } |
| |
| // Make unused space in an object whose type has been transformed safe |
| // for traversing during GC. |
| // The unused part of the transformed object is marked as an TypedDataInt8Array |
| // object. |
| void Object::MakeUnusedSpaceTraversable(const Object& obj, |
| intptr_t original_size, |
| intptr_t used_size) { |
| ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0); |
| ASSERT(!obj.IsNull()); |
| ASSERT(original_size >= used_size); |
| if (original_size > used_size) { |
| intptr_t leftover_size = original_size - used_size; |
| |
| uword addr = UntaggedObject::ToAddr(obj.ptr()) + used_size; |
| if (leftover_size >= TypedData::InstanceSize(0)) { |
| // Update the leftover space as a TypedDataInt8Array object. |
| TypedDataPtr raw = |
| static_cast<TypedDataPtr>(UntaggedObject::FromAddr(addr)); |
| uword new_tags = |
| UntaggedObject::ClassIdTag::update(kTypedDataInt8ArrayCid, 0); |
| new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags); |
| const bool is_old = obj.ptr()->IsOldObject(); |
| new_tags = UntaggedObject::OldBit::update(is_old, new_tags); |
| new_tags = UntaggedObject::OldAndNotMarkedBit::update(is_old, new_tags); |
| new_tags = |
| UntaggedObject::OldAndNotRememberedBit::update(is_old, new_tags); |
| new_tags = UntaggedObject::NewBit::update(!is_old, new_tags); |
| // On architectures with a relaxed memory model, the concurrent marker may |
| // observe the write of the filler object's header before observing the |
| // new array length, and so treat it as a pointer. Ensure it is a Smi so |
| // the marker won't dereference it. |
| ASSERT((new_tags & kSmiTagMask) == kSmiTag); |
| raw->untag()->tags_ = new_tags; |
| |
| intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0)); |
| ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size); |
| raw->untag()->set_length(Smi::New(leftover_len)); |
| raw->untag()->RecomputeDataField(); |
| } else { |
| // Update the leftover space as a basic object. |
| ASSERT(leftover_size == Object::InstanceSize()); |
| ObjectPtr raw = static_cast<ObjectPtr>(UntaggedObject::FromAddr(addr)); |
| uword new_tags = UntaggedObject::ClassIdTag::update(kInstanceCid, 0); |
| new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags); |
| const bool is_old = obj.ptr()->IsOldObject(); |
| new_tags = UntaggedObject::OldBit::update(is_old, new_tags); |
| new_tags = UntaggedObject::OldAndNotMarkedBit::update(is_old, new_tags); |
| new_tags = |
| UntaggedObject::OldAndNotRememberedBit::update(is_old, new_tags); |
| new_tags = UntaggedObject::NewBit::update(!is_old, new_tags); |
| // On architectures with a relaxed memory model, the concurrent marker may |
| // observe the write of the filler object's header before observing the |
| // new array length, and so treat it as a pointer. Ensure it is a Smi so |
| // the marker won't dereference it. |
| ASSERT((new_tags & kSmiTagMask) == kSmiTag); |
| raw->untag()->tags_ = new_tags; |
| } |
| } |
| } |
| |
| void Object::VerifyBuiltinVtables() { |
| #if defined(DEBUG) |
| ASSERT(builtin_vtables_[kIllegalCid] == 0); |
| ASSERT(builtin_vtables_[kFreeListElement] == 0); |
| ASSERT(builtin_vtables_[kForwardingCorpse] == 0); |
| ClassTable* table = IsolateGroup::Current()->class_table(); |
| for (intptr_t cid = kObjectCid; cid < kNumPredefinedCids; cid++) { |
| if (table->HasValidClassAt(cid)) { |
| ASSERT(builtin_vtables_[cid] != 0); |
| } |
| } |
| #endif |
| } |
| |
| void Object::RegisterClass(const Class& cls, |
| const String& name, |
| const Library& lib) { |
| ASSERT(name.Length() > 0); |
| ASSERT(name.CharAt(0) != '_'); |
| cls.set_name(name); |
| lib.AddClass(cls); |
| } |
| |
| void Object::RegisterPrivateClass(const Class& cls, |
| const String& public_class_name, |
| const Library& lib) { |
| ASSERT(public_class_name.Length() > 0); |
| ASSERT(public_class_name.CharAt(0) == '_'); |
| String& str = String::Handle(); |
| str = lib.PrivateName(public_class_name); |
| cls.set_name(str); |
| lib.AddClass(cls); |
| } |
| |
| // Initialize a new isolate from source or from a snapshot. |
| // |
| // There are three possibilities: |
| // 1. Running a Kernel binary. This function will bootstrap from the KERNEL |
| // file. |
| // 2. There is no vm snapshot. This function will bootstrap from source. |
| // 3. There is a vm snapshot. The caller should initialize from the snapshot. |
| // |
| // A non-NULL kernel argument indicates (1). |
| // A NULL kernel indicates (2) or (3). |
| ErrorPtr Object::Init(IsolateGroup* isolate_group, |
| const uint8_t* kernel_buffer, |
| intptr_t kernel_buffer_size) { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| ASSERT(isolate_group == thread->isolate_group()); |
| TIMELINE_DURATION(thread, Isolate, "Object::Init"); |
| |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| const bool bootstrapping = false; |
| #else |
| const bool is_kernel = (kernel_buffer != NULL); |
| const bool bootstrapping = |
| (Dart::vm_snapshot_kind() == Snapshot::kNone) || is_kernel; |
| #endif // defined(DART_PRECOMPILED_RUNTIME). |
| |
| if (bootstrapping) { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| // Object::Init version when we are bootstrapping from source or from a |
| // Kernel binary. |
| // This will initialize isolate group object_store, shared by all isolates |
| // running in the isolate group. |
| ObjectStore* object_store = isolate_group->object_store(); |
| SafepointWriteRwLocker ml(thread, isolate_group->program_lock()); |
| |
| Class& cls = Class::Handle(zone); |
| Type& type = Type::Handle(zone); |
| Array& array = Array::Handle(zone); |
| Library& lib = Library::Handle(zone); |
| TypeArguments& type_args = TypeArguments::Handle(zone); |
| |
| // All RawArray fields will be initialized to an empty array, therefore |
| // initialize array class first. |
| cls = Class::New<Array, RTN::Array>(isolate_group); |
| ASSERT(object_store->array_class() == Class::null()); |
| object_store->set_array_class(cls); |
| |
| // VM classes that are parameterized (Array, ImmutableArray, |
| // GrowableObjectArray, and LinkedHashMap) are also pre-finalized, so |
| // CalculateFieldOffsets() is not called, so we need to set the offset of |
| // their type_arguments_ field, which is explicitly declared in their |
| // respective Raw* classes. |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| |
| // Set up the growable object array class (Has to be done after the array |
| // class is setup as one of its field is an array object). |
| cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>( |
| isolate_group); |
| object_store->set_growable_object_array_class(cls); |
| cls.set_type_arguments_field_offset( |
| GrowableObjectArray::type_arguments_offset(), |
| RTN::GrowableObjectArray::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| |
| // Initialize hash set for canonical types. |
| const intptr_t kInitialCanonicalTypeSize = 16; |
| array = HashTables::New<CanonicalTypeSet>(kInitialCanonicalTypeSize, |
| Heap::kOld); |
| object_store->set_canonical_types(array); |
| |
| // Initialize hash set for canonical function types. |
| const intptr_t kInitialCanonicalFunctionTypeSize = 16; |
| array = HashTables::New<CanonicalFunctionTypeSet>( |
| kInitialCanonicalFunctionTypeSize, Heap::kOld); |
| object_store->set_canonical_function_types(array); |
| |
| // Initialize hash set for canonical type parameters. |
| const intptr_t kInitialCanonicalTypeParameterSize = 4; |
| array = HashTables::New<CanonicalTypeParameterSet>( |
| kInitialCanonicalTypeParameterSize, Heap::kOld); |
| object_store->set_canonical_type_parameters(array); |
| |
| // Initialize hash set for canonical_type_arguments_. |
| const intptr_t kInitialCanonicalTypeArgumentsSize = 4; |
| array = HashTables::New<CanonicalTypeArgumentsSet>( |
| kInitialCanonicalTypeArgumentsSize, Heap::kOld); |
| object_store->set_canonical_type_arguments(array); |
| |
| // Setup type class early in the process. |
| const Class& type_cls = |
| Class::Handle(zone, Class::New<Type, RTN::Type>(isolate_group)); |
| const Class& function_type_cls = Class::Handle( |
| zone, Class::New<FunctionType, RTN::FunctionType>(isolate_group)); |
| const Class& type_ref_cls = |
| Class::Handle(zone, Class::New<TypeRef, RTN::TypeRef>(isolate_group)); |
| const Class& type_parameter_cls = Class::Handle( |
| zone, Class::New<TypeParameter, RTN::TypeParameter>(isolate_group)); |
| const Class& library_prefix_cls = Class::Handle( |
| zone, Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group)); |
| |
| // Pre-allocate the OneByteString class needed by the symbol table. |
| cls = Class::NewStringClass(kOneByteStringCid, isolate_group); |
| object_store->set_one_byte_string_class(cls); |
| |
| // Pre-allocate the TwoByteString class needed by the symbol table. |
| cls = Class::NewStringClass(kTwoByteStringCid, isolate_group); |
| object_store->set_two_byte_string_class(cls); |
| |
| // Setup the symbol table for the symbols created in the isolate. |
| Symbols::SetupSymbolTable(isolate_group); |
| |
| // Set up the libraries array before initializing the core library. |
| const GrowableObjectArray& libraries = |
| GrowableObjectArray::Handle(zone, GrowableObjectArray::New(Heap::kOld)); |
| object_store->set_libraries(libraries); |
| |
| // Pre-register the core library. |
| Library::InitCoreLibrary(isolate_group); |
| |
| // Basic infrastructure has been setup, initialize the class dictionary. |
| const Library& core_lib = Library::Handle(zone, Library::CoreLibrary()); |
| ASSERT(!core_lib.IsNull()); |
| |
| const GrowableObjectArray& pending_classes = |
| GrowableObjectArray::Handle(zone, GrowableObjectArray::New()); |
| object_store->set_pending_classes(pending_classes); |
| |
| // Now that the symbol table is initialized and that the core dictionary as |
| // well as the core implementation dictionary have been setup, preallocate |
| // remaining classes and register them by name in the dictionaries. |
| String& name = String::Handle(zone); |
| cls = object_store->array_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::_List(), core_lib); |
| pending_classes.Add(cls); |
| // We cannot use NewNonParameterizedType(), because Array is |
| // parameterized. Warning: class _List has not been patched yet. Its |
| // declared number of type parameters is still 0. It will become 1 after |
| // patching. The array type allocated below represents the raw type _List |
| // and not _List<E> as we could expect. Use with caution. |
| type = Type::New(Class::Handle(zone, cls.ptr()), |
| TypeArguments::Handle(zone), Nullability::kNonNullable); |
| type.SetIsFinalized(); |
| type ^= type.Canonicalize(thread, nullptr); |
| object_store->set_array_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_array_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_array_type(type); |
| |
| cls = object_store->growable_object_array_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::_GrowableList(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group); |
| object_store->set_immutable_array_class(cls); |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| ASSERT(object_store->immutable_array_class() != |
| object_store->array_class()); |
| cls.set_is_prefinalized(); |
| RegisterPrivateClass(cls, Symbols::_ImmutableList(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = object_store->one_byte_string_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::OneByteString(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = object_store->two_byte_string_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::TwoByteString(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::NewStringClass(kExternalOneByteStringCid, isolate_group); |
| object_store->set_external_one_byte_string_class(cls); |
| RegisterPrivateClass(cls, Symbols::ExternalOneByteString(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::NewStringClass(kExternalTwoByteStringCid, isolate_group); |
| object_store->set_external_two_byte_string_class(cls); |
| RegisterPrivateClass(cls, Symbols::ExternalTwoByteString(), core_lib); |
| pending_classes.Add(cls); |
| |
| // Pre-register the isolate library so the native class implementations can |
| // be hooked up before compiling it. |
| Library& isolate_lib = Library::Handle( |
| zone, Library::LookupLibrary(thread, Symbols::DartIsolate())); |
| if (isolate_lib.IsNull()) { |
| isolate_lib = Library::NewLibraryHelper(Symbols::DartIsolate(), true); |
| isolate_lib.SetLoadRequested(); |
| isolate_lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kIsolate, isolate_lib); |
| ASSERT(!isolate_lib.IsNull()); |
| ASSERT(isolate_lib.ptr() == Library::IsolateLibrary()); |
| |
| cls = Class::New<Capability, RTN::Capability>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_CapabilityImpl(), isolate_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_RawReceivePortImpl(), isolate_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<SendPort, RTN::SendPort>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_SendPortImpl(), isolate_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>( |
| isolate_group); |
| RegisterPrivateClass(cls, Symbols::_TransferableTypedDataImpl(), |
| isolate_lib); |
| pending_classes.Add(cls); |
| |
| const Class& stacktrace_cls = Class::Handle( |
| zone, Class::New<StackTrace, RTN::StackTrace>(isolate_group)); |
| RegisterPrivateClass(stacktrace_cls, Symbols::_StackTrace(), core_lib); |
| pending_classes.Add(stacktrace_cls); |
| // Super type set below, after Object is allocated. |
| |
| cls = Class::New<RegExp, RTN::RegExp>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_RegExp(), core_lib); |
| pending_classes.Add(cls); |
| |
| // Initialize the base interfaces used by the core VM classes. |
| |
| // Allocate and initialize the pre-allocated classes in the core library. |
| // The script and token index of these pre-allocated classes is set up when |
| // the corelib script is compiled. |
| cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group); |
| object_store->set_object_class(cls); |
| cls.set_name(Symbols::Object()); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| cls.set_is_const(); |
| core_lib.AddClass(cls); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| ASSERT(type.IsCanonical()); |
| object_store->set_object_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| ASSERT(type.IsCanonical()); |
| object_store->set_legacy_object_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| ASSERT(type.IsCanonical()); |
| object_store->set_non_nullable_object_type(type); |
| type = type.ToNullability(Nullability::kNullable, Heap::kOld); |
| ASSERT(type.IsCanonical()); |
| object_store->set_nullable_object_type(type); |
| |
| cls = Class::New<Bool, RTN::Bool>(isolate_group); |
| object_store->set_bool_class(cls); |
| RegisterClass(cls, Symbols::Bool(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group); |
| object_store->set_null_class(cls); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| RegisterClass(cls, Symbols::Null(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| cls.set_name(Symbols::Never()); |
| object_store->set_never_class(cls); |
| |
| ASSERT(!library_prefix_cls.IsNull()); |
| RegisterPrivateClass(library_prefix_cls, Symbols::_LibraryPrefix(), |
| core_lib); |
| pending_classes.Add(library_prefix_cls); |
| |
| RegisterPrivateClass(type_cls, Symbols::_Type(), core_lib); |
| pending_classes.Add(type_cls); |
| |
| RegisterPrivateClass(function_type_cls, Symbols::_FunctionType(), core_lib); |
| pending_classes.Add(function_type_cls); |
| |
| RegisterPrivateClass(type_ref_cls, Symbols::_TypeRef(), core_lib); |
| pending_classes.Add(type_ref_cls); |
| |
| RegisterPrivateClass(type_parameter_cls, Symbols::_TypeParameter(), |
| core_lib); |
| pending_classes.Add(type_parameter_cls); |
| |
| cls = Class::New<Integer, RTN::Integer>(isolate_group); |
| object_store->set_integer_implementation_class(cls); |
| RegisterPrivateClass(cls, Symbols::_IntegerImplementation(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Smi, RTN::Smi>(isolate_group); |
| object_store->set_smi_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Smi(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Mint, RTN::Mint>(isolate_group); |
| object_store->set_mint_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Mint(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Double, RTN::Double>(isolate_group); |
| object_store->set_double_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Double(), core_lib); |
| pending_classes.Add(cls); |
| |
| // Class that represents the Dart class _Closure and C++ class Closure. |
| cls = Class::New<Closure, RTN::Closure>(isolate_group); |
| object_store->set_closure_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Closure(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group); |
| object_store->set_weak_property_class(cls); |
| RegisterPrivateClass(cls, Symbols::_WeakProperty(), core_lib); |
| |
| // Pre-register the mirrors library so we can place the vm class |
| // MirrorReference there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartMirrors()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartMirrors(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kMirrors, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::MirrorsLibrary()); |
| |
| cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_MirrorReference(), lib); |
| |
| // Pre-register the collection library so we can place the vm class |
| // LinkedHashMap there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartCollection()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartCollection(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| |
| object_store->set_bootstrap_library(ObjectStore::kCollection, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::CollectionLibrary()); |
| cls = Class::New<LinkedHashMap, RTN::LinkedHashMap>(isolate_group); |
| object_store->set_linked_hash_map_class(cls); |
| cls.set_type_arguments_field_offset( |
| LinkedHashMap::type_arguments_offset(), |
| RTN::LinkedHashMap::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(2); |
| RegisterPrivateClass(cls, Symbols::_LinkedHashMap(), lib); |
| pending_classes.Add(cls); |
| |
| // Pre-register the async library so we can place the vm class |
| // FutureOr there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartAsync()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartAsync(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kAsync, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::AsyncLibrary()); |
| cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group); |
| cls.set_type_arguments_field_offset(FutureOr::type_arguments_offset(), |
| RTN::FutureOr::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| RegisterClass(cls, Symbols::FutureOr(), lib); |
| pending_classes.Add(cls); |
| |
| // Pre-register the developer library so we can place the vm class |
| // UserTag there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartDeveloper()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartDeveloper(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kDeveloper, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::DeveloperLibrary()); |
| cls = Class::New<UserTag, RTN::UserTag>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_UserTag(), lib); |
| pending_classes.Add(cls); |
| |
| // Setup some default native field classes which can be extended for |
| // specifying native fields in dart classes. |
| Library::InitNativeWrappersLibrary(isolate_group, is_kernel); |
| ASSERT(object_store->native_wrappers_library() != Library::null()); |
| |
| // Pre-register the typed_data library so the native class implementations |
| // can be hooked up before compiling it. |
| lib = Library::LookupLibrary(thread, Symbols::DartTypedData()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartTypedData(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kTypedData, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::TypedDataLibrary()); |
| #define REGISTER_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid, isolate_group); \ |
| RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib); |
| |
| DART_CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS); |
| #undef REGISTER_TYPED_DATA_CLASS |
| #define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \ |
| cls = \ |
| Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \ |
| RegisterPrivateClass(cls, Symbols::_##clazz##View(), lib); \ |
| pending_classes.Add(cls); |
| |
| CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS); |
| |
| cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group); |
| RegisterPrivateClass(cls, Symbols::_ByteDataView(), lib); |
| pending_classes.Add(cls); |
| |
| #undef REGISTER_TYPED_DATA_VIEW_CLASS |
| #define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \ |
| isolate_group); \ |
| RegisterPrivateClass(cls, Symbols::_External##clazz(), lib); |
| |
| cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group, |
| /*register_class=*/false); |
| cls.set_instance_size(0, 0); |
| cls.set_next_field_offset(-kWordSize, -compiler::target::kWordSize); |
| isolate_group->class_table()->Register(cls); |
| RegisterPrivateClass(cls, Symbols::_ByteBuffer(), lib); |
| pending_classes.Add(cls); |
| |
| CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS); |
| #undef REGISTER_EXT_TYPED_DATA_CLASS |
| // Register Float32x4, Int32x4, and Float64x2 in the object store. |
| cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Float32x4(), lib); |
| pending_classes.Add(cls); |
| object_store->set_float32x4_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Float32x4(), lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_float32x4_type(type); |
| |
| cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Int32x4(), lib); |
| pending_classes.Add(cls); |
| object_store->set_int32x4_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Int32x4(), lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_int32x4_type(type); |
| |
| cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Float64x2(), lib); |
| pending_classes.Add(cls); |
| object_store->set_float64x2_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Float64x2(), lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_float64x2_type(type); |
| |
| // Set the super type of class StackTrace to Object type so that the |
| // 'toString' method is implemented. |
| type = object_store->object_type(); |
| stacktrace_cls.set_super_type(type); |
| |
| // Abstract class that represents the Dart class Type. |
| // Note that this class is implemented by Dart class _AbstractType. |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| RegisterClass(cls, Symbols::Type(), core_lib); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_type_type(type); |
| |
| // Abstract class that represents the Dart class Function. |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| RegisterClass(cls, Symbols::Function(), core_lib); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_function_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_function_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_function_type(type); |
| |
| cls = Class::New<Number, RTN::Number>(isolate_group); |
| RegisterClass(cls, Symbols::Number(), core_lib); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_number_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_number_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_number_type(type); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Int(), core_lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_int_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_int_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_int_type(type); |
| type = type.ToNullability(Nullability::kNullable, Heap::kOld); |
| object_store->set_nullable_int_type(type); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Double(), core_lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_double_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_double_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_double_type(type); |
| type = type.ToNullability(Nullability::kNullable, Heap::kOld); |
| object_store->set_nullable_double_type(type); |
| |
| name = Symbols::_String().ptr(); |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, name, core_lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_string_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_string_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_string_type(type); |
| |
| cls = object_store->bool_class(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_bool_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_bool_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_bool_type(type); |
| |
| cls = object_store->smi_class(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_smi_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_smi_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_smi_type(type); |
| |
| cls = object_store->mint_class(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_mint_type(type); |
| type = type.ToNullability(Nullability::kLegacy, Heap::kOld); |
| object_store->set_legacy_mint_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_mint_type(type); |
| |
| // The classes 'void' and 'dynamic' are phony classes to make type checking |
| // more regular; they live in the VM isolate. The class 'void' is not |
| // registered in the class dictionary because its name is a reserved word. |
| // The class 'dynamic' is registered in the class dictionary because its |
| // name is a built-in identifier (this is wrong). The corresponding types |
| // are stored in the object store. |
| cls = object_store->null_class(); |
| type = |
| Type::New(cls, Object::null_type_arguments(), Nullability::kNullable); |
| type.SetIsFinalized(); |
| type ^= type.Canonicalize(thread, nullptr); |
| object_store->set_null_type(type); |
| cls.set_declaration_type(type); |
| ASSERT(type.IsNullable()); |
| |
| // Consider removing when/if Null becomes an ordinary class. |
| type = object_store->object_type(); |
| cls.set_super_type(type); |
| |
| cls = object_store->never_class(); |
| type = Type::New(cls, Object::null_type_arguments(), |
| Nullability::kNonNullable); |
| type.SetIsFinalized(); |
| type ^= type.Canonicalize(thread, nullptr); |
| object_store->set_never_type(type); |
| |
| // Create and cache commonly used type arguments <int>, <double>, |
| // <String>, <String, dynamic> and <String, String>. |
| type_args = TypeArguments::New(1); |
| type = object_store->int_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_int(type_args); |
| type_args = TypeArguments::New(1); |
| type = object_store->legacy_int_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_legacy_int(type_args); |
| type_args = TypeArguments::New(1); |
| type = object_store->non_nullable_int_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_non_nullable_int(type_args); |
| |
| type_args = TypeArguments::New(1); |
| type = object_store->double_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_double(type_args); |
| type_args = TypeArguments::New(1); |
| type = object_store->legacy_double_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_legacy_double(type_args); |
| type_args = TypeArguments::New(1); |
| type = object_store->non_nullable_double_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_non_nullable_double(type_args); |
| |
| type_args = TypeArguments::New(1); |
| type = object_store->string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_string(type_args); |
| type_args = TypeArguments::New(1); |
| type = object_store->legacy_string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_legacy_string(type_args); |
| type_args = TypeArguments::New(1); |
| type = object_store->non_nullable_string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_non_nullable_string(type_args); |
| |
| type_args = TypeArguments::New(2); |
| type = object_store->string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, Object::dynamic_type()); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_string_dynamic(type_args); |
| type_args = TypeArguments::New(2); |
| type = object_store->legacy_string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, Object::dynamic_type()); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_legacy_string_dynamic(type_args); |
| type_args = TypeArguments::New(2); |
| type = object_store->non_nullable_string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, Object::dynamic_type()); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_non_nullable_string_dynamic(type_args); |
| |
| type_args = TypeArguments::New(2); |
| type = object_store->string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_string_string(type_args); |
| type_args = TypeArguments::New(2); |
| type = object_store->legacy_string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_legacy_string_legacy_string(type_args); |
| type_args = TypeArguments::New(2); |
| type = object_store->non_nullable_string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, type); |
| type_args = type_args.Canonicalize(thread, nullptr); |
| object_store->set_type_argument_non_nullable_string_non_nullable_string( |
| type_args); |
| |
| lib = Library::LookupLibrary(thread, Symbols::DartFfi()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartFfi(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kFfi, lib); |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| object_store->set_ffi_native_type_class(cls); |
| RegisterClass(cls, Symbols::FfiNativeType(), lib); |
| |
| #define REGISTER_FFI_TYPE_MARKER(clazz) \ |
| cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group); \ |
| cls.set_num_type_arguments_unsafe(0); \ |
| cls.set_is_prefinalized(); \ |
| pending_classes.Add(cls); \ |
| RegisterClass(cls, Symbols::Ffi##clazz(), lib); |
| CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_TYPE_MARKER); |
| #undef REGISTER_FFI_TYPE_MARKER |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid, |
| isolate_group); |
| cls.set_type_arguments_field_offset(Pointer::type_arguments_offset(), |
| RTN::Pointer::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| RegisterClass(cls, Symbols::FfiNativeFunction(), lib); |
| |
| cls = Class::NewPointerClass(kFfiPointerCid, isolate_group); |
| object_store->set_ffi_pointer_class(cls); |
| pending_classes.Add(cls); |
| RegisterClass(cls, Symbols::FfiPointer(), lib); |
| |
| cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kFfiDynamicLibraryCid, |
| isolate_group); |
| cls.set_instance_size(DynamicLibrary::InstanceSize(), |
| compiler::target::RoundedAllocationSize( |
| RTN::DynamicLibrary::InstanceSize())); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| RegisterClass(cls, Symbols::FfiDynamicLibrary(), lib); |
| |
| // Finish the initialization by compiling the bootstrap scripts containing |
| // the base interfaces and the implementation of the internal classes. |
| const Error& error = Error::Handle( |
| zone, Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size)); |
| if (!error.IsNull()) { |
| return error.ptr(); |
| } |
| |
| isolate_group->class_table()->CopySizesFromClassObjects(); |
| |
| ClassFinalizer::VerifyBootstrapClasses(); |
| |
| // Set up the intrinsic state of all functions (core, math and typed data). |
| compiler::Intrinsifier::InitializeState(); |
| |
| // Adds static const fields (class ids) to the class 'ClassID'); |
| lib = Library::LookupLibrary(thread, Symbols::DartInternal()); |
| ASSERT(!lib.IsNull()); |
| cls = lib.LookupClassAllowPrivate(Symbols::ClassID()); |
| ASSERT(!cls.IsNull()); |
| const bool injected = cls.InjectCIDFields(); |
| ASSERT(injected); |
| |
| isolate_group->object_store()->InitKnownObjects(); |
| |
| // Set up recognized state of all functions (core, math and typed data). |
| MethodRecognizer::InitializeState(); |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| } else { |
| // Object::Init version when we are running in a version of dart that has a |
| // full snapshot linked in and an isolate is initialized using the full |
| // snapshot. |
| ObjectStore* object_store = isolate_group->object_store(); |
| SafepointWriteRwLocker ml(thread, isolate_group->program_lock()); |
| |
| Class& cls = Class::Handle(zone); |
| |
| // Set up empty classes in the object store, these will get initialized |
| // correctly when we read from the snapshot. This is done to allow |
| // bootstrapping of reading classes from the snapshot. Some classes are not |
| // stored in the object store. Yet we still need to create their Class |
| // object so that they get put into the class_table (as a side effect of |
| // Class::New()). |
| cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group); |
| object_store->set_object_class(cls); |
| |
| cls = Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group); |
| cls = Class::New<Type, RTN::Type>(isolate_group); |
| cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group); |
| cls = Class::New<TypeRef, RTN::TypeRef>(isolate_group); |
| cls = Class::New<TypeParameter, RTN::TypeParameter>(isolate_group); |
| |
| cls = Class::New<Array, RTN::Array>(isolate_group); |
| object_store->set_array_class(cls); |
| |
| cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group); |
| object_store->set_immutable_array_class(cls); |
| |
| cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>( |
| isolate_group); |
| object_store->set_growable_object_array_class(cls); |
| |
| cls = Class::New<LinkedHashMap, RTN::LinkedHashMap>(isolate_group); |
| object_store->set_linked_hash_map_class(cls); |
| |
| cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group); |
| object_store->set_float32x4_class(cls); |
| |
| cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group); |
| object_store->set_int32x4_class(cls); |
| |
| cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group); |
| object_store->set_float64x2_class(cls); |
| |
| #define REGISTER_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewTypedDataClass(kTypedData##clazz##Cid, isolate_group); |
| CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS); |
| #undef REGISTER_TYPED_DATA_CLASS |
| #define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \ |
| cls = Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); |
| CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS); |
| #undef REGISTER_TYPED_DATA_VIEW_CLASS |
| cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group); |
| #define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \ |
| isolate_group); |
| CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS); |
| #undef REGISTER_EXT_TYPED_DATA_CLASS |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group); |
| object_store->set_ffi_native_type_class(cls); |
| |
| #define REGISTER_FFI_CLASS(clazz) \ |
| cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group); |
| CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_CLASS); |
| #undef REGISTER_FFI_CLASS |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid, |
| isolate_group); |
| |
| cls = Class::NewPointerClass(kFfiPointerCid, isolate_group); |
| object_store->set_ffi_pointer_class(cls); |
| |
| cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kFfiDynamicLibraryCid, |
| isolate_group); |
| |
| cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group, |
| /*register_isolate_group=*/false); |
| cls.set_instance_size_in_words(0, 0); |
| isolate_group->class_table()->Register(cls); |
| |
| cls = Class::New<Integer, RTN::Integer>(isolate_group); |
| object_store->set_integer_implementation_class(cls); |
| |
| cls = Class::New<Smi, RTN::Smi>(isolate_group); |
| object_store->set_smi_class(cls); |
| |
| cls = Class::New<Mint, RTN::Mint>(isolate_group); |
| object_store->set_mint_class(cls); |
| |
| cls = Class::New<Double, RTN::Double>(isolate_group); |
| object_store->set_double_class(cls); |
| |
| cls = Class::New<Closure, RTN::Closure>(isolate_group); |
| object_store->set_closure_class(cls); |
| |
| cls = Class::NewStringClass(kOneByteStringCid, isolate_group); |
| object_store->set_one_byte_string_class(cls); |
| |
| cls = Class::NewStringClass(kTwoByteStringCid, isolate_group); |
| object_store->set_two_byte_string_class(cls); |
| |
| cls = Class::NewStringClass(kExternalOneByteStringCid, isolate_group); |
| object_store->set_external_one_byte_string_class(cls); |
| |
| cls = Class::NewStringClass(kExternalTwoByteStringCid, isolate_group); |
| object_store->set_external_two_byte_string_class(cls); |
| |
| cls = Class::New<Bool, RTN::Bool>(isolate_group); |
| object_store->set_bool_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group); |
| object_store->set_null_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group); |
| object_store->set_never_class(cls); |
| |
| cls = Class::New<Capability, RTN::Capability>(isolate_group); |
| cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group); |
| cls = Class::New<SendPort, RTN::SendPort>(isolate_group); |
| cls = Class::New<StackTrace, RTN::StackTrace>(isolate_group); |
| cls = Class::New<RegExp, RTN::RegExp>(isolate_group); |
| cls = Class::New<Number, RTN::Number>(isolate_group); |
| |
| cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group); |
| object_store->set_weak_property_class(cls); |
| |
| cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group); |
| cls = Class::New<UserTag, RTN::UserTag>(isolate_group); |
| cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group); |
| cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>( |
| isolate_group); |
| } |
| return Error::null(); |
| } |
| |
| #if defined(DEBUG) |
| bool Object::InVMIsolateHeap() const { |
| if (FLAG_verify_handles && ptr()->untag()->InVMIsolateHeap()) { |
| Heap* vm_isolate_heap = Dart::vm_isolate_group()->heap(); |
| uword addr = UntaggedObject::ToAddr(ptr()); |
| if (!vm_isolate_heap->Contains(addr)) { |
| ASSERT(FLAG_write_protect_code); |
| addr = UntaggedObject::ToAddr(OldPage::ToWritable(ptr())); |
| ASSERT(vm_isolate_heap->Contains(addr)); |
| } |
| } |
| return ptr()->untag()->InVMIsolateHeap(); |
| } |
| #endif // DEBUG |
| |
| void Object::Print() const { |
| THR_Print("%s\n", ToCString()); |
| } |
| |
| StringPtr Object::DictionaryName() const { |
| return String::null(); |
| } |
| |
| void Object::InitializeObject(uword address, |
| intptr_t class_id, |
| intptr_t size, |
| bool compressed) { |
| // Note: we skip the header word here to avoid a racy read in the concurrent |
| // marker from observing the null object when it reads into a heap page |
| // allocated after marking started. |
| uword cur = address + sizeof(UntaggedObject); |
| uword end = address + size; |
| if (class_id == kInstructionsCid) { |
| compiler::target::uword initial_value = kBreakInstructionFiller; |
| while (cur < end) { |
| *reinterpret_cast<compiler::target::uword*>(cur) = initial_value; |
| cur += compiler::target::kWordSize; |
| } |
| } else { |
| uword initial_value; |
| bool needs_init; |
| if (IsTypedDataBaseClassId(class_id)) { |
| initial_value = 0; |
| // If the size is greater than both kNewAllocatableSize and |
| // kAllocatablePageSize, the object must have been allocated to a new |
| // large page, which must already have been zero initialized by the OS. |
| #if defined(DART_COMPRESSED_POINTERS) |
| needs_init = true; |
| #else |
| needs_init = Heap::IsAllocatableInNewSpace(size) || |
| Heap::IsAllocatableViaFreeLists(size); |
| #endif |
| } else { |
| initial_value = static_cast<uword>(null_); |
| #if defined(DART_COMPRESSED_POINTERS) |
| if (compressed) { |
| initial_value &= 0xFFFFFFFF; |
| initial_value |= initial_value << 32; |
| } |
| #endif |
| needs_init = true; |
| } |
| if (needs_init) { |
| while (cur < end) { |
| *reinterpret_cast<uword*>(cur) = initial_value; |
| cur += kWordSize; |
| } |
| } else { |
| // Check that MemorySantizer understands this is initialized. |
| MSAN_CHECK_INITIALIZED(reinterpret_cast<void*>(address), size); |
| #if defined(DEBUG) |
| while (cur < end) { |
| ASSERT(*reinterpret_cast<uword*>(cur) == initial_value); |
| cur += kWordSize; |
| } |
| #endif |
| } |
| } |
| uword tags = 0; |
| ASSERT(class_id != kIllegalCid); |
| tags = UntaggedObject::ClassIdTag::update(class_id, tags); |
| tags = UntaggedObject::SizeTag::update(size, tags); |
| const bool is_old = |
| (address & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset; |
| tags = UntaggedObject::OldBit::update(is_old, tags); |
| tags = UntaggedObject::OldAndNotMarkedBit::update(is_old, tags); |
| tags = UntaggedObject::OldAndNotRememberedBit::update(is_old, tags); |
| tags = UntaggedObject::NewBit::update(!is_old, tags); |
| #if defined(HASH_IN_OBJECT_HEADER) |
| tags = UntaggedObject::HashTag::update(0, tags); |
| #endif |
| reinterpret_cast<UntaggedObject*>(address)->tags_ = tags; |
| } |
| |
| void Object::CheckHandle() const { |
| #if defined(DEBUG) |
| if (ptr_ != Object::null()) { |
| intptr_t cid = ptr_->GetClassIdMayBeSmi(); |
| if (cid >= kNumPredefinedCids) { |
| cid = kInstanceCid; |
| } |
| ASSERT(vtable() == builtin_vtables_[cid]); |
| if (FLAG_verify_handles && ptr_->IsHeapObject()) { |
| Heap* isolate_heap = IsolateGroup::Current()->heap(); |
| if (!isolate_heap->new_space()->scavenging()) { |
| Heap* vm_isolate_heap = Dart::vm_isolate_group()->heap(); |
| uword addr = UntaggedObject::ToAddr(ptr_); |
| if (!isolate_heap->Contains(addr) && !vm_isolate_heap->Contains(addr)) { |
| ASSERT(FLAG_write_protect_code); |
| addr = UntaggedObject::ToAddr(OldPage::ToWritable(ptr_)); |
| ASSERT(isolate_heap->Contains(addr) || |
| vm_isolate_heap->Contains(addr)); |
| } |
| } |
| } |
| } |
| #endif |
| } |
| |
| ObjectPtr Object::Allocate(intptr_t cls_id, |
| intptr_t size, |
| Heap::Space space, |
| bool compressed) { |
| ASSERT(Utils::IsAligned(size, kObjectAlignment)); |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->execution_state() == Thread::kThreadInVM); |
| ASSERT(thread->no_safepoint_scope_depth() == 0); |
| ASSERT(thread->no_callback_scope_depth() == 0); |
| Heap* heap = thread->heap(); |
| |
| uword address = heap->Allocate(size, space); |
| if (UNLIKELY(address == 0)) { |
| // SuspendLongJumpScope during Dart entry ensures that if a longjmp base is |
| // available, it is the innermost error handler, so check for a longjmp base |
| // before checking for an exit frame. |
| if (thread->long_jump_base() != nullptr) { |
| Report::LongJump(Object::out_of_memory_error()); |
| UNREACHABLE(); |
| } else if (thread->top_exit_frame_info() != 0) { |
| // Use the preallocated out of memory exception to avoid calling |
| // into dart code or allocating any code. |
| const Instance& exception = Instance::Handle( |
| thread->isolate_group()->object_store()->out_of_memory()); |
| Exceptions::Throw(thread, exception); |
| UNREACHABLE(); |
| } else { |
| // Nowhere to propagate an exception to. |
| OUT_OF_MEMORY(); |
| } |
| } |
| NoSafepointScope no_safepoint; |
| ObjectPtr raw_obj; |
| InitializeObject(address, cls_id, size, compressed); |
| raw_obj = static_cast<ObjectPtr>(address + kHeapObjectTag); |
| ASSERT(cls_id == UntaggedObject::ClassIdTag::decode(raw_obj->untag()->tags_)); |
| if (raw_obj->IsOldObject() && UNLIKELY(thread->is_marking())) { |
| // Black allocation. Prevents a data race between the mutator and |
| // concurrent marker on ARM and ARM64 (the marker may observe a |
| // publishing store of this object before the stores that initialize its |
| // slots), and helps the collection to finish sooner. |
| // release: Setting the mark bit must not be ordered after a publishing |
| // store of this object. Compare Scavenger::ScavengePointer. |
| raw_obj->untag()->SetMarkBitRelease(); |
| heap->old_space()->AllocateBlack(size); |
| } |
| #ifndef PRODUCT |
| auto class_table = thread->isolate_group()->shared_class_table(); |
| if (class_table->TraceAllocationFor(cls_id)) { |
| uint32_t hash = |
| HeapSnapshotWriter::GetHeapSnapshotIdentityHash(thread, raw_obj); |
| Profiler::SampleAllocation(thread, cls_id, hash); |
| } |
| #endif // !PRODUCT |
| return raw_obj; |
| } |
| |
| class WriteBarrierUpdateVisitor : public ObjectPointerVisitor { |
| public: |
| explicit WriteBarrierUpdateVisitor(Thread* thread, ObjectPtr obj) |
| : ObjectPointerVisitor(thread->isolate_group()), |
| thread_(thread), |
| old_obj_(obj) { |
| ASSERT(old_obj_->IsOldObject()); |
| } |
| |
| void VisitPointers(ObjectPtr* from, ObjectPtr* to) { |
| if (old_obj_->IsArray()) { |
| for (ObjectPtr* slot = from; slot <= to; ++slot) { |
| ObjectPtr value = *slot; |
| if (value->IsHeapObject()) { |
| old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_); |
| } |
| } |
| } else { |
| for (ObjectPtr* slot = from; slot <= to; ++slot) { |
| ObjectPtr value = *slot; |
| if (value->IsHeapObject()) { |
| old_obj_->untag()->CheckHeapPointerStore(value, thread_); |
| } |
| } |
| } |
| } |
| |
| void VisitCompressedPointers(uword heap_base, |
| CompressedObjectPtr* from, |
| CompressedObjectPtr* to) { |
| if (old_obj_->IsArray()) { |
| for (CompressedObjectPtr* slot = from; slot <= to; ++slot) { |
| ObjectPtr value = slot->Decompress(heap_base); |
| if (value->IsHeapObject()) { |
| old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_); |
| } |
| } |
| } else { |
| for (CompressedObjectPtr* slot = from; slot <= to; ++slot) { |
| ObjectPtr value = slot->Decompress(heap_base); |
| if (value->IsHeapObject()) { |
| old_obj_->untag()->CheckHeapPointerStore(value, thread_); |
| } |
| } |
| } |
| } |
| |
| private: |
| Thread* thread_; |
| ObjectPtr old_obj_; |
| |
| DISALLOW_COPY_AND_ASSIGN(WriteBarrierUpdateVisitor); |
| }; |
| |
| bool Object::IsReadOnlyHandle() const { |
| return Dart::IsReadOnlyHandle(reinterpret_cast<uword>(this)); |
| } |
| |
| bool Object::IsNotTemporaryScopedHandle() const { |
| return (IsZoneHandle() || IsReadOnlyHandle()); |
| } |
| |
| ObjectPtr Object::Clone(const Object& orig, Heap::Space space) { |
| const Class& cls = Class::Handle(orig.clazz()); |
| intptr_t size = orig.ptr()->untag()->HeapSize(); |
| ObjectPtr raw_clone = |
| Object::Allocate(cls.id(), size, space, cls.HasCompressedPointers()); |
| NoSafepointScope no_safepoint; |
| // Copy the body of the original into the clone. |
| uword orig_addr = UntaggedObject::ToAddr(orig.ptr()); |
| uword clone_addr = UntaggedObject::ToAddr(raw_clone); |
| static const intptr_t kHeaderSizeInBytes = sizeof(UntaggedObject); |
| memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes), |
| reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes), |
| size - kHeaderSizeInBytes); |
| |
| // Add clone to store buffer, if needed. |
| if (!raw_clone->IsOldObject()) { |
| // No need to remember an object in new space. |
| return raw_clone; |
| } |
| WriteBarrierUpdateVisitor visitor(Thread::Current(), raw_clone); |
| raw_clone->untag()->VisitPointers(&visitor); |
| return raw_clone; |
| } |
| |
| bool Class::HasCompressedPointers() const { |
| const intptr_t cid = id(); |
| switch (cid) { |
| // Only a couple of FFI cids correspond to actual Dart classes. so they're |
| // explicitly listed here. |
| case kFfiPointerCid: |
| return Pointer::ContainsCompressedPointers(); |
| case kFfiDynamicLibraryCid: |
| return DynamicLibrary::ContainsCompressedPointers(); |
| case kByteBufferCid: |
| return ByteBuffer::ContainsCompressedPointers(); |
| #define HANDLE_CASE(clazz) \ |
| case k##clazz##Cid: \ |
| return dart::clazz::ContainsCompressedPointers(); |
| CLASS_LIST(HANDLE_CASE) |
| #undef HANDLE_CASE |
| #define HANDLE_CASE(clazz) \ |
| case kTypedData##clazz##Cid: \ |
| return dart::TypedData::ContainsCompressedPointers(); \ |
| case kTypedData##clazz##ViewCid: \ |
| return dart::TypedDataView::ContainsCompressedPointers(); \ |
| case kExternalTypedData##clazz##Cid: \ |
| return dart::ExternalTypedData::ContainsCompressedPointers(); |
| CLASS_LIST_TYPED_DATA(HANDLE_CASE) |
| #undef HANDLE_CASE |
| default: |
| if (cid >= kNumPredefinedCids) { |
| return dart::Instance::ContainsCompressedPointers(); |
| } |
| } |
| FATAL("Unsupported class for compressed pointers translation: %s (id=%" Pd |
| ", kNumPredefinedCids=%" Pd ")\n", |
| ToCString(), cid, kNumPredefinedCids); |
| return false; |
| } |
| |
| StringPtr Class::Name() const { |
| return untag()->name(); |
| } |
| |
| StringPtr Class::ScrubbedName() const { |
| return Symbols::New(Thread::Current(), ScrubbedNameCString()); |
| } |
| |
| const char* Class::ScrubbedNameCString() const { |
| return String::ScrubName(String::Handle(Name())); |
| } |
| |
| StringPtr Class::UserVisibleName() const { |
| #if !defined(PRODUCT) |
| ASSERT(untag()->user_name() != String::null()); |
| return untag()->user_name(); |
| #endif // !defined(PRODUCT) |
| // No caching in PRODUCT, regenerate. |
| return Symbols::New(Thread::Current(), GenerateUserVisibleName()); |
| } |
| |
| const char* Class::UserVisibleNameCString() const { |
| #if !defined(PRODUCT) |
| ASSERT(untag()->user_name() != String::null()); |
| return String::Handle(untag()->user_name()).ToCString(); |
| #endif // !defined(PRODUCT) |
| return GenerateUserVisibleName(); // No caching in PRODUCT, regenerate. |
| } |
| |
| const char* Class::NameCString(NameVisibility name_visibility) const { |
| switch (name_visibility) { |
| case Object::kInternalName: |
| return String::Handle(Name()).ToCString(); |
| case Object::kScrubbedName: |
| return ScrubbedNameCString(); |
| case Object::kUserVisibleName: |
| return UserVisibleNameCString(); |
| default: |
| UNREACHABLE(); |
| return nullptr; |
| } |
| } |
| |
| ClassPtr Class::Mixin() const { |
| if (is_transformed_mixin_application()) { |
| const Array& interfaces = Array::Handle(this->interfaces()); |
| const Type& mixin_type = |
| Type::Handle(Type::RawCast(interfaces.At(interfaces.Length() - 1))); |
| return mixin_type.type_class(); |
| } |
| return ptr(); |
| } |
| |
| NNBDMode Class::nnbd_mode() const { |
| return Library::Handle(library()).nnbd_mode(); |
| } |
| |
| bool Class::IsInFullSnapshot() const { |
| NoSafepointScope no_safepoint; |
| return UntaggedLibrary::InFullSnapshotBit::decode( |
| untag()->library()->untag()->flags_); |
| } |
| |
| AbstractTypePtr Class::RareType() const { |
| if (!IsGeneric() && !IsClosureClass()) { |
| return DeclarationType(); |
| } |
| ASSERT(is_declaration_loaded()); |
| const Type& type = Type::Handle(Type::New( |
| *this, Object::null_type_arguments(), Nullability::kNonNullable)); |
| return ClassFinalizer::FinalizeType(type); |
| } |
| |
| template <class FakeObject, class TargetFakeObject> |
| ClassPtr Class::New(IsolateGroup* isolate_group, bool register_class) { |
| ASSERT(Object::class_class() != Class::null()); |
| Class& result = Class::Handle(); |
| { |
| ObjectPtr raw = |
| Object::Allocate(Class::kClassId, Class::InstanceSize(), Heap::kOld, |
| Class::ContainsCompressedPointers()); |
| NoSafepointScope no_safepoint; |
| result ^= raw; |
| } |
| Object::VerifyBuiltinVtable<FakeObject>(FakeObject::kClassId); |
| NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource)); |
| NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource)); |
| result.set_instance_size(FakeObject::InstanceSize(), |
| compiler::target::RoundedAllocationSize( |
| TargetFakeObject::InstanceSize())); |
| result.set_type_arguments_field_offset_in_words(kNoTypeArguments, |
| RTN::Class::kNoTypeArguments); |
| const intptr_t host_next_field_offset = FakeObject::NextFieldOffset(); |
| const intptr_t target_next_field_offset = TargetFakeObject::NextFieldOffset(); |
| result.set_next_field_offset(host_next_field_offset, |
| target_next_field_offset); |
| COMPILE_ASSERT((FakeObject::kClassId != kInstanceCid)); |
| result.set_id(FakeObject::kClassId); |
| result.set_num_type_arguments_unsafe(0); |
| result.set_num_native_fields(0); |
| result.set_state_bits(0); |
| if ((FakeObject::kClassId < kInstanceCid) || |
| (FakeObject::kClassId == kTypeArgumentsCid)) { |
| // VM internal classes are done. There is no finalization needed or |
| // possible in this case. |
| result.set_is_declaration_loaded(); |
| result.set_is_type_finalized(); |
| result.set_is_allocate_finalized(); |
| } else if (FakeObject::kClassId != kClosureCid) { |
| // VM backed classes are almost ready: run checks and resolve class |
| // references, but do not recompute size. |
| result.set_is_prefinalized(); |
| } |
| NOT_IN_PRECOMPILED(result.set_kernel_offset(0)); |
| result.InitEmptyFields(); |
| if (register_class) { |
| isolate_group->class_table()->Register(result); |
| } |
| return result.ptr(); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| static void ReportTooManyTypeArguments(const Class& cls) { |
| Report::MessageF(Report::kError, Script::Handle(cls.script()), |
| cls.token_pos(), Report::AtLocation, |
| "too many type parameters declared in class '%s' or in its " |
| "super classes", |
| String::Handle(cls.Name()).ToCString()); |
| UNREACHABLE(); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| void Class::set_num_type_arguments(intptr_t value) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| if (!Utils::IsInt(16, value)) { |
| ReportTooManyTypeArguments(*this); |
| } |
| // We allow concurrent calculation of the number of type arguments. If two |
| // threads perform this operation it doesn't matter which one wins. |
| DEBUG_ONLY(intptr_t old_value = num_type_arguments()); |
| DEBUG_ASSERT(old_value == kUnknownNumTypeArguments || old_value == value); |
| StoreNonPointer<int16_t, int16_t, std::memory_order_relaxed>( |
| &untag()->num_type_arguments_, value); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| void Class::set_num_type_arguments_unsafe(intptr_t value) const { |
| StoreNonPointer(&untag()->num_type_arguments_, value); |
| } |
| |
| void Class::set_has_pragma(bool value) const { |
| set_state_bits(HasPragmaBit::update(value, state_bits())); |
| } |
| |
| // Initialize class fields of type Array with empty array. |
| void Class::InitEmptyFields() { |
| if (Object::empty_array().ptr() == Array::null()) { |
| // The empty array has not been initialized yet. |
| return; |
| } |
| untag()->set_interfaces(Object::empty_array().ptr()); |
| untag()->set_constants(Object::null_array().ptr()); |
| set_functions(Object::empty_array()); |
| set_fields(Object::empty_array()); |
| set_invocation_dispatcher_cache(Object::empty_array()); |
| } |
| |
| ArrayPtr Class::OffsetToFieldMap(bool original_classes) const { |
| if (untag()->offset_in_words_to_field() == Array::null()) { |
| ASSERT(is_finalized()); |
| const intptr_t length = untag()->host_instance_size_in_words_; |
| const Array& array = Array::Handle(Array::New(length, Heap::kOld)); |
| Class& cls = Class::Handle(this->ptr()); |
| Array& fields = Array::Handle(); |
| Field& f = Field::Handle(); |
| while (!cls.IsNull()) { |
| fields = cls.fields(); |
| for (intptr_t i = 0; i < fields.Length(); ++i) { |
| f ^= fields.At(i); |
| if (f.is_instance()) { |
| array.SetAt(f.HostOffset() >> kCompressedWordSizeLog2, f); |
| } |
| } |
| cls = cls.SuperClass(original_classes); |
| } |
| untag()->set_offset_in_words_to_field(array.ptr()); |
| } |
| return untag()->offset_in_words_to_field(); |
| } |
| |
| bool Class::HasInstanceFields() const { |
| const Array& field_array = Array::Handle(fields()); |
| Field& field = Field::Handle(); |
| for (intptr_t i = 0; i < field_array.Length(); ++i) { |
| field ^= field_array.At(i); |
| if (!field.is_static()) { |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| class FunctionName { |
| public: |
| FunctionName(const String& name, String* tmp_string) |
| : name_(name), tmp_string_(tmp_string) {} |
| bool Matches(const Function& function) const { |
| if (name_.IsSymbol()) { |
| return name_.ptr() == function.name(); |
| } else { |
| *tmp_string_ = function.name(); |
| return name_.Equals(*tmp_string_); |
| } |
| } |
| intptr_t Hash() const { return name_.Hash(); } |
| |
| private: |
| const String& name_; |
| String* tmp_string_; |
| }; |
| |
| // Traits for looking up Functions by name. |
| class ClassFunctionsTraits { |
| public: |
| static const char* Name() { return "ClassFunctionsTraits"; } |
| static bool ReportStats() { return false; } |
| |
| // Called when growing the table. |
| static bool IsMatch(const Object& a, const Object& b) { |
| ASSERT(a.IsFunction() && b.IsFunction()); |
| // Function objects are always canonical. |
| return a.ptr() == b.ptr(); |
| } |
| static bool IsMatch(const FunctionName& name, const Object& obj) { |
| return name.Matches(Function::Cast(obj)); |
| } |
| static uword Hash(const Object& key) { |
| return String::HashRawSymbol(Function::Cast(key).name()); |
| } |
| static uword Hash(const FunctionName& name) { return name.Hash(); } |
| }; |
| typedef UnorderedHashSet<ClassFunctionsTraits> ClassFunctionsSet; |
| |
| void Class::SetFunctions(const Array& value) const { |
| ASSERT(!value.IsNull()); |
| const intptr_t len = value.Length(); |
| #if defined(DEBUG) |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| if (is_finalized()) { |
| Function& function = Function::Handle(); |
| FunctionType& signature = FunctionType::Handle(); |
| for (intptr_t i = 0; i < len; ++i) { |
| function ^= value.At(i); |
| signature = function.signature(); |
| ASSERT(signature.IsFinalized()); |
| } |
| } |
| #endif |
| set_functions(value); |
| if (len >= kFunctionLookupHashTreshold) { |
| ClassFunctionsSet set(HashTables::New<ClassFunctionsSet>(len, Heap::kOld)); |
| Function& func = Function::Handle(); |
| for (intptr_t i = 0; i < len; ++i) { |
| func ^= value.At(i); |
| // Verify that all the functions in the array have this class as owner. |
| ASSERT(func.Owner() == ptr()); |
| set.Insert(func); |
| } |
| untag()->set_functions_hash_table(set.Release().ptr()); |
| } else { |
| untag()->set_functions_hash_table(Array::null()); |
| } |
| } |
| |
| void Class::AddFunction(const Function& function) const { |
| #if defined(DEBUG) |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(!is_finalized() || |
| FunctionType::Handle(function.signature()).IsFinalized()); |
| #endif |
| const Array& arr = Array::Handle(functions()); |
| const Array& new_array = |
| Array::Handle(Array::Grow(arr, arr.Length() + 1, Heap::kOld)); |
| new_array.SetAt(arr.Length(), function); |
| set_functions(new_array); |
| // Add to hash table, if any. |
| const intptr_t new_len = new_array.Length(); |
| if (new_len == kFunctionLookupHashTreshold) { |
| // Transition to using hash table. |
| SetFunctions(new_array); |
| } else if (new_len > kFunctionLookupHashTreshold) { |
| ClassFunctionsSet set(untag()->functions_hash_table()); |
| set.Insert(function); |
| untag()->set_functions_hash_table(set.Release().ptr()); |
| } |
| } |
| |
| FunctionPtr Class::FunctionFromIndex(intptr_t idx) const { |
| const Array& funcs = Array::Handle(current_functions()); |
| if ((idx < 0) || (idx >= funcs.Length())) { |
| return Function::null(); |
| } |
| Function& func = Function::Handle(); |
| func ^= funcs.At(idx); |
| ASSERT(!func.IsNull()); |
| return func.ptr(); |
| } |
| |
| FunctionPtr Class::ImplicitClosureFunctionFromIndex(intptr_t idx) const { |
| const Array& funcs = Array::Handle(current_functions()); |
| if ((idx < 0) || (idx >= funcs.Length())) { |
| return Function::null(); |
| } |
| Function& func = Function::Handle(); |
| func ^= funcs.At(idx); |
| ASSERT(!func.IsNull()); |
| if (!func.HasImplicitClosureFunction()) { |
| return Function::null(); |
| } |
| const Function& closure_func = |
| Function::Handle(func.ImplicitClosureFunction()); |
| ASSERT(!closure_func.IsNull()); |
| return closure_func.ptr(); |
| } |
| |
| intptr_t Class::FindImplicitClosureFunctionIndex(const Function& needle) const { |
| Thread* thread = Thread::Current(); |
| if (EnsureIsFinalized(thread) != Error::null()) { |
| return -1; |
| } |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_FUNCTION_HANDLESCOPE(thread); |
| Array& funcs = thread->ArrayHandle(); |
| Function& function = thread->FunctionHandle(); |
| funcs = current_functions(); |
| ASSERT(!funcs.IsNull()); |
| Function& implicit_closure = Function::Handle(thread->zone()); |
| const intptr_t len = funcs.Length(); |
| for (intptr_t i = 0; i < len; i++) { |
| function ^= funcs.At(i); |
| implicit_closure = function.implicit_closure_function(); |
| if (implicit_closure.IsNull()) { |
| // Skip non-implicit closure functions. |
| continue; |
| } |
| if (needle.ptr() == implicit_closure.ptr()) { |
| return i; |
| } |
| } |
| // No function found. |
| return -1; |
| } |
| |
| intptr_t Class::FindInvocationDispatcherFunctionIndex( |
| const Function& needle) const { |
| Thread* thread = Thread::Current(); |
| if (EnsureIsFinalized(thread) != Error::null()) { |
| return -1; |
| } |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_OBJECT_HANDLESCOPE(thread); |
| Array& funcs = thread->ArrayHandle(); |
| Object& object = thread->ObjectHandle(); |
| funcs = invocation_dispatcher_cache(); |
| ASSERT(!funcs.IsNull()); |
| const intptr_t len = funcs.Length(); |
| for (intptr_t i = 0; i < len; i++) { |
| object = funcs.At(i); |
| // The invocation_dispatcher_cache is a table with some entries that |
| // are functions. |
| if (object.IsFunction()) { |
| if (Function::Cast(object).ptr() == needle.ptr()) { |
| return i; |
| } |
| } |
| } |
| // No function found. |
| return -1; |
| } |
| |
| FunctionPtr Class::InvocationDispatcherFunctionFromIndex(intptr_t idx) const { |
| Thread* thread = Thread::Current(); |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_OBJECT_HANDLESCOPE(thread); |
| Array& dispatcher_cache = thread->ArrayHandle(); |
| Object& object = thread->ObjectHandle(); |
| dispatcher_cache = invocation_dispatcher_cache(); |
| object = dispatcher_cache.At(idx); |
| if (!object.IsFunction()) { |
| return Function::null(); |
| } |
| return Function::Cast(object).ptr(); |
| } |
| |
| void Class::set_state_bits(intptr_t bits) const { |
| StoreNonPointer<uint32_t, uint32_t, std::memory_order_release>( |
| &untag()->state_bits_, static_cast<uint32_t>(bits)); |
| } |
| |
| void Class::set_library(const Library& value) const { |
| untag()->set_library(value.ptr()); |
| } |
| |
| void Class::set_type_parameters(const TypeParameters& value) const { |
| ASSERT((num_type_arguments() == kUnknownNumTypeArguments) || |
| is_prefinalized()); |
| untag()->set_type_parameters(value.ptr()); |
| } |
| |
| void Class::set_functions(const Array& value) const { |
| // Ensure all writes to the [Function]s are visible by the time the array |
| // is visible. |
| untag()->set_functions<std::memory_order_release>(value.ptr()); |
| } |
| |
| void Class::set_fields(const Array& value) const { |
| // Ensure all writes to the [Field]s are visible by the time the array |
| // is visible. |
| untag()->set_fields<std::memory_order_release>(value.ptr()); |
| } |
| |
| void Class::set_invocation_dispatcher_cache(const Array& cache) const { |
| // Ensure all writes to the cache are visible by the time the array |
| // is visible. |
| untag()->set_invocation_dispatcher_cache<std::memory_order_release>( |
| cache.ptr()); |
| } |
| |
| intptr_t Class::NumTypeParameters(Thread* thread) const { |
| if (!is_declaration_loaded()) { |
| ASSERT(is_prefinalized()); |
| const intptr_t cid = id(); |
| if ((cid == kArrayCid) || (cid == kImmutableArrayCid) || |
| (cid == kGrowableObjectArrayCid)) { |
| return 1; // List's type parameter may not have been parsed yet. |
| } |
| return 0; |
| } |
| if (type_parameters() == TypeParameters::null()) { |
| return 0; |
| } |
| REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread); |
| TypeParameters& type_params = thread->TypeParametersHandle(); |
| type_params = type_parameters(); |
| return type_params.Length(); |
| } |
| |
| intptr_t Class::ComputeNumTypeArguments() const { |
| ASSERT(is_declaration_loaded()); |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| auto isolate_group = thread->isolate_group(); |
| const intptr_t num_type_params = NumTypeParameters(); |
| |
| if ((super_type() == AbstractType::null()) || |
| (super_type() == isolate_group->object_store()->object_type())) { |
| return num_type_params; |
| } |
| |
| const auto& sup_type = AbstractType::Handle(zone, super_type()); |
| ASSERT(sup_type.IsType()); |
| |
| const auto& sup_class = Class::Handle(zone, sup_type.type_class()); |
| const intptr_t sup_class_num_type_args = sup_class.NumTypeArguments(); |
| if (num_type_params == 0) { |
| return sup_class_num_type_args; |
| } |
| |
| const auto& sup_type_args = TypeArguments::Handle(zone, sup_type.arguments()); |
| if (sup_type_args.IsNull()) { |
| // The super type is raw or the super class is non generic. |
| // In either case, overlapping is not possible. |
| return sup_class_num_type_args + num_type_params; |
| } |
| |
| const intptr_t sup_type_args_length = sup_type_args.Length(); |
| // At this point, the super type may or may not be finalized. In either case, |
| // the result of this function must remain the same. |
| // The value of num_sup_type_args may increase when the super type is |
| // finalized, but the last [sup_type_args_length] type arguments will not be |
| // modified by finalization, only shifted to higher indices in the vector. |
| // The super type may not even be resolved yet. This is not necessary, since |
| // we only check for matching type parameters, which are resolved by default. |
| // Determine the maximum overlap of a prefix of the vector consisting of the |
| // type parameters of this class with a suffix of the vector consisting of the |
| // type arguments of the super type of this class. |
| // The number of own type arguments of this class is the number of its type |
| // parameters minus the number of type arguments in the overlap. |
| // Attempt to overlap the whole vector of type parameters; reduce the size |
| // of the vector (keeping the first type parameter) until it fits or until |
| // its size is zero. |
| auto& sup_type_arg = AbstractType::Handle(zone); |
| for (intptr_t num_overlapping_type_args = |
| (num_type_params < sup_type_args_length) ? num_type_params |
| : sup_type_args_length; |
| num_overlapping_type_args > 0; num_overlapping_type_args--) { |
| intptr_t i = 0; |
| for (; i < num_overlapping_type_args; i++) { |
| sup_type_arg = sup_type_args.TypeAt(sup_type_args_length - |
| num_overlapping_type_args + i); |
| ASSERT(!sup_type_arg.IsNull()); |
| if (!sup_type_arg.IsTypeParameter()) break; |
| // The only type parameters appearing in the type arguments of the super |
| // type are those declared by this class. Their finalized indices depend |
| // on the number of type arguments being computed here. Therefore, they |
| // cannot possibly be finalized yet. |
| ASSERT(!TypeParameter::Cast(sup_type_arg).IsFinalized()); |
| if (TypeParameter::Cast(sup_type_arg).index() != i || |
| TypeParameter::Cast(sup_type_arg).IsNullable()) { |
| break; |
| } |
| } |
| if (i == num_overlapping_type_args) { |
| // Overlap found. |
| return sup_class_num_type_args + num_type_params - |
| num_overlapping_type_args; |
| } |
| } |
| // No overlap found. |
| return sup_class_num_type_args + num_type_params; |
| } |
| |
| intptr_t Class::NumTypeArguments() const { |
| // Return cached value if already calculated. |
| intptr_t num_type_args = num_type_arguments(); |
| if (num_type_args != kUnknownNumTypeArguments) { |
| return num_type_args; |
| } |
| |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| return 0; |
| #else |
| num_type_args = ComputeNumTypeArguments(); |
| ASSERT(num_type_args != kUnknownNumTypeArguments); |
| set_num_type_arguments(num_type_args); |
| return num_type_args; |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| TypeArgumentsPtr Class::InstantiateToBounds(Thread* thread) const { |
| const auto& type_params = |
| TypeParameters::Handle(thread->zone(), type_parameters()); |
| if (type_params.IsNull()) { |
| return Object::empty_type_arguments().ptr(); |
| } |
| return type_params.defaults(); |
| } |
| |
| ClassPtr Class::SuperClass(bool original_classes) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| auto isolate_group = thread->isolate_group(); |
| if (super_type() == AbstractType::null()) { |
| if (id() == kTypeArgumentsCid) { |
| // Pretend TypeArguments objects are Dart instances. |
| return isolate_group->class_table()->At(kInstanceCid); |
| } |
| return Class::null(); |
| } |
| const AbstractType& sup_type = AbstractType::Handle(zone, super_type()); |
| const intptr_t type_class_id = sup_type.type_class_id(); |
| if (original_classes) { |
| return isolate_group->GetClassForHeapWalkAt(type_class_id); |
| } else { |
| return isolate_group->class_table()->At(type_class_id); |
| } |
| } |
| |
| void Class::set_super_type(const AbstractType& value) const { |
| ASSERT(value.IsNull() || (value.IsType() && !value.IsDynamicType())); |
| untag()->set_super_type(value.ptr()); |
| } |
| |
| TypeParameterPtr Class::TypeParameterAt(intptr_t index, |
| Nullability nullability) const { |
| ASSERT(index >= 0 && index < NumTypeParameters()); |
| const TypeParameters& type_params = TypeParameters::Handle(type_parameters()); |
| const TypeArguments& bounds = TypeArguments::Handle(type_params.bounds()); |
| const AbstractType& bound = AbstractType::Handle( |
| bounds.IsNull() ? Type::DynamicType() : bounds.TypeAt(index)); |
| TypeParameter& type_param = TypeParameter::Handle( |
| TypeParameter::New(*this, 0, index, bound, nullability)); |
| if (is_type_finalized()) { |
| type_param ^= ClassFinalizer::FinalizeType(type_param); |
| } |
| return type_param.ptr(); |
| } |
| |
| UnboxedFieldBitmap Class::CalculateFieldOffsets() const { |
| Array& flds = Array::Handle(fields()); |
| const Class& super = Class::Handle(SuperClass()); |
| intptr_t host_offset = 0; |
| UnboxedFieldBitmap host_bitmap{}; |
| // Target offsets might differ if the word size are different |
| intptr_t target_offset = 0; |
| intptr_t host_type_args_field_offset = kNoTypeArguments; |
| intptr_t target_type_args_field_offset = RTN::Class::kNoTypeArguments; |
| if (super.IsNull()) { |
| host_offset = Instance::NextFieldOffset(); |
| target_offset = RTN::Instance::NextFieldOffset(); |
| ASSERT(host_offset > 0); |
| ASSERT(target_offset > 0); |
| } else { |
| ASSERT(super.is_finalized() || super.is_prefinalized()); |
| host_type_args_field_offset = super.host_type_arguments_field_offset(); |
| target_type_args_field_offset = super.target_type_arguments_field_offset(); |
| host_offset = super.host_next_field_offset(); |
| ASSERT(host_offset > 0); |
| target_offset = super.target_next_field_offset(); |
| ASSERT(target_offset > 0); |
| // We should never call CalculateFieldOffsets for native wrapper |
| // classes, assert this. |
| ASSERT(num_native_fields() == 0); |
| set_num_native_fields(super.num_native_fields()); |
| |
| if (FLAG_precompiled_mode) { |
| host_bitmap = |
| IsolateGroup::Current()->shared_class_table()->GetUnboxedFieldsMapAt( |
| super.id()); |
| } |
| } |
| // If the super class is parameterized, use the same type_arguments field, |
| // otherwise, if this class is the first in the super chain to be |
| // parameterized, introduce a new type_arguments field. |
| if (host_type_args_field_offset == kNoTypeArguments) { |
| ASSERT(target_type_args_field_offset == RTN::Class::kNoTypeArguments); |
| if (IsGeneric()) { |
| // The instance needs a type_arguments field. |
| host_type_args_field_offset = host_offset; |
| target_type_args_field_offset = target_offset; |
| host_offset += kCompressedWordSize; |
| target_offset += compiler::target::kCompressedWordSize; |
| } |
| } else { |
| ASSERT(target_type_args_field_offset != RTN::Class::kNoTypeArguments); |
| } |
| |
| set_type_arguments_field_offset(host_type_args_field_offset, |
| target_type_args_field_offset); |
| ASSERT(host_offset > 0); |
| ASSERT(target_offset > 0); |
| Field& field = Field::Handle(); |
| const intptr_t len = flds.Length(); |
| for (intptr_t i = 0; i < len; i++) { |
| field ^= flds.At(i); |
| // Offset is computed only for instance fields. |
| if (!field.is_static()) { |
| ASSERT(field.HostOffset() == 0); |
| ASSERT(field.TargetOffset() == 0); |
| field.SetOffset(host_offset, target_offset); |
| |
| if (FLAG_precompiled_mode && field.is_unboxing_candidate()) { |
| intptr_t field_size; |
| switch (field.guarded_cid()) { |
| case kDoubleCid: |
| field_size = sizeof(UntaggedDouble::value_); |
| break; |
| case kFloat32x4Cid: |
| field_size = sizeof(UntaggedFloat32x4::value_); |
| break; |
| case kFloat64x2Cid: |
| field_size = sizeof(UntaggedFloat64x2::value_); |
| break; |
| default: |
| if (field.is_non_nullable_integer()) { |
| field_size = sizeof(UntaggedMint::value_); |
| } else { |
| UNREACHABLE(); |
| field_size = 0; |
| } |
| break; |
| } |
| |
| const intptr_t host_num_words = field_size / kCompressedWordSize; |
| const intptr_t host_next_offset = host_offset + field_size; |
| const intptr_t host_next_position = |
| host_next_offset / kCompressedWordSize; |
| |
| const intptr_t target_next_offset = target_offset + field_size; |
| const intptr_t target_next_position = |
| target_next_offset / compiler::target::kCompressedWordSize; |
| |
| // The bitmap has fixed length. Checks if the offset position is smaller |
| // than its length. If it is not, than the field should be boxed |
| if (host_next_position <= UnboxedFieldBitmap::Length() && |
| target_next_position <= UnboxedFieldBitmap::Length()) { |
| for (intptr_t j = 0; j < host_num_words; j++) { |
| // Activate the respective bit in the bitmap, indicating that the |
| // content is not a pointer |
| host_bitmap.Set(host_offset / kCompressedWordSize); |
| host_offset += kCompressedWordSize; |
| } |
| |
| ASSERT(host_offset == host_next_offset); |
| target_offset = target_next_offset; |
| } else { |
| // Make the field boxed |
| field.set_is_unboxing_candidate(false); |
| host_offset += kCompressedWordSize; |
| target_offset += compiler::target::kCompressedWordSize; |
| } |
| } else { |
| host_offset += kCompressedWordSize; |
| target_offset += compiler::target::kCompressedWordSize; |
| } |
| } |
| } |
| set_instance_size(RoundedAllocationSize(host_offset), |
| compiler::target::RoundedAllocationSize(target_offset)); |
| set_next_field_offset(host_offset, target_offset); |
| |
| return host_bitmap; |
| } |
| |
| void Class::AddInvocationDispatcher(const String& target_name, |
| const Array& args_desc, |
| const Function& dispatcher) const { |
| auto thread = Thread::Current(); |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| |
| auto zone = thread->zone(); |
| auto& cache = Array::Handle(zone, invocation_dispatcher_cache()); |
| InvocationDispatcherTable dispatchers(cache); |
| intptr_t i = 0; |
| for (auto dispatcher : dispatchers) { |
| if (dispatcher.Get<kInvocationDispatcherName>() == String::null()) { |
| break; |
| } |
| i++; |
| } |
| if (i == dispatchers.Length()) { |
| const intptr_t new_len = |
| cache.Length() == 0 |
| ? static_cast<intptr_t>(Class::kInvocationDispatcherEntrySize) |
| : cache.Length() * 2; |
| cache = Array::Grow(cache, new_len); |
| set_invocation_dispatcher_cache(cache); |
| } |
| // Ensure all stores are visible at the point the name is visible. |
| auto entry = dispatchers[i]; |
| entry.Set<Class::kInvocationDispatcherArgsDesc>(args_desc); |
| entry.Set<Class::kInvocationDispatcherFunction>(dispatcher); |
| entry.Set<Class::kInvocationDispatcherName, std::memory_order_release>( |
| target_name); |
| } |
| |
| FunctionPtr Class::GetInvocationDispatcher(const String& target_name, |
| const Array& args_desc, |
| UntaggedFunction::Kind kind, |
| bool create_if_absent) const { |
| ASSERT(kind == UntaggedFunction::kNoSuchMethodDispatcher || |
| kind == UntaggedFunction::kInvokeFieldDispatcher || |
| kind == UntaggedFunction::kDynamicInvocationForwarder); |
| auto thread = Thread::Current(); |
| auto Z = thread->zone(); |
| auto& function = Function::Handle(Z); |
| auto& name = String::Handle(Z); |
| auto& desc = Array::Handle(Z); |
| auto& cache = Array::Handle(Z); |
| |
| auto find_entry = [&]() { |
| cache = invocation_dispatcher_cache(); |
| ASSERT(!cache.IsNull()); |
| InvocationDispatcherTable dispatchers(cache); |
| for (auto dispatcher : dispatchers) { |
| // Ensure all loads are done after loading the name. |
| name = dispatcher.Get<Class::kInvocationDispatcherName, |
| std::memory_order_acquire>(); |
| if (name.IsNull()) break; // Reached last entry. |
| if (!name.Equals(target_name)) continue; |
| desc = dispatcher.Get<Class::kInvocationDispatcherArgsDesc>(); |
| if (desc.ptr() != args_desc.ptr()) continue; |
| function = dispatcher.Get<Class::kInvocationDispatcherFunction>(); |
| if (function.kind() == kind) { |
| return function.ptr(); |
| } |
| } |
| return Function::null(); |
| }; |
| |
| // First we'll try to find it without using locks. |
| function = find_entry(); |
| if (!function.IsNull() || !create_if_absent) { |
| return function.ptr(); |
| } |
| |
| // If we failed to find it and possibly need to create it, use a write lock. |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| |
| // Try to find it again & return if it was added in the meantime. |
| function = find_entry(); |
| if (!function.IsNull()) return function.ptr(); |
| |
| // Otherwise create it & add it. |
| function = CreateInvocationDispatcher(target_name, args_desc, kind); |
| AddInvocationDispatcher(target_name, args_desc, function); |
| return function.ptr(); |
| } |
| |
| FunctionPtr Class::CreateInvocationDispatcher( |
| const String& target_name, |
| const Array& args_desc, |
| UntaggedFunction::Kind kind) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| FunctionType& signature = FunctionType::Handle(zone, FunctionType::New()); |
| Function& invocation = Function::Handle( |
| zone, Function::New( |
| signature, |
| String::Handle(zone, Symbols::New(thread, target_name)), kind, |
| false, // Not static. |
| false, // Not const. |
| false, // Not abstract. |
| false, // Not external. |
| false, // Not native. |
| *this, TokenPosition::kMinSource)); |
| ArgumentsDescriptor desc(args_desc); |
| if (desc.TypeArgsLen() > 0) { |
| // Make dispatcher function generic, since type arguments are passed. |
| invocation.SetNumTypeParameters(desc.TypeArgsLen()); |
| } |
| |
| invocation.set_num_fixed_parameters(desc.PositionalCount()); |
| invocation.SetNumOptionalParameters(desc.NamedCount(), |
| false); // Not positional. |
| signature.set_parameter_types( |
| Array::Handle(zone, Array::New(desc.Count(), Heap::kOld))); |
| signature.CreateNameArrayIncludingFlags(Heap::kOld); |
| // Receiver. |
| signature.SetParameterTypeAt(0, Object::dynamic_type()); |
| signature.SetParameterNameAt(0, Symbols::This()); |
| // Remaining positional parameters. |
| for (intptr_t i = 1; i < desc.PositionalCount(); i++) { |
| signature.SetParameterTypeAt(i, Object::dynamic_type()); |
| char name[64]; |
| Utils::SNPrint(name, 64, ":p%" Pd, i); |
| signature.SetParameterNameAt( |
| i, String::Handle(zone, Symbols::New(thread, name))); |
| } |
| |
| // Named parameters. |
| for (intptr_t i = 0; i < desc.NamedCount(); i++) { |
| const intptr_t param_index = desc.PositionAt(i); |
| const auto& param_name = String::Handle(zone, desc.NameAt(i)); |
| signature.SetParameterTypeAt(param_index, Object::dynamic_type()); |
| signature.SetParameterNameAt(param_index, param_name); |
| } |
| signature.FinalizeNameArrays(invocation); |
| signature.set_result_type(Object::dynamic_type()); |
| invocation.set_is_debuggable(false); |
| invocation.set_is_visible(false); |
| invocation.set_is_reflectable(false); |
| invocation.set_saved_args_desc(args_desc); |
| |
| signature ^= ClassFinalizer::FinalizeType(signature); |
| invocation.set_signature(signature); |
| |
| return invocation.ptr(); |
| } |
| |
| // Method extractors are used to create implicit closures from methods. |
| // When an expression obj.M is evaluated for the first time and receiver obj |
| // does not have a getter called M but has a method called M then an extractor |
| // is created and injected as a getter (under the name get:M) into the class |
| // owning method M. |
| FunctionPtr Function::CreateMethodExtractor(const String& getter_name) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| ASSERT(Field::IsGetterName(getter_name)); |
| const Function& closure_function = |
| Function::Handle(zone, ImplicitClosureFunction()); |
| |
| const Class& owner = Class::Handle(zone, closure_function.Owner()); |
| FunctionType& signature = FunctionType::Handle(zone, FunctionType::New()); |
| const Function& extractor = Function::Handle( |
| zone, |
| Function::New(signature, |
| String::Handle(zone, Symbols::New(thread, getter_name)), |
| UntaggedFunction::kMethodExtractor, |
| false, // Not static. |
| false, // Not const. |
| is_abstract(), |
| false, // Not external. |
| false, // Not native. |
| owner, TokenPosition::kMethodExtractor)); |
| |
| // Initialize signature: receiver is a single fixed parameter. |
| const intptr_t kNumParameters = 1; |
| extractor.set_num_fixed_parameters(kNumParameters); |
| extractor.SetNumOptionalParameters(0, false); |
| signature.set_parameter_types(Object::extractor_parameter_types()); |
| signature.set_parameter_names(Object::extractor_parameter_names()); |
| extractor.SetParameterNamesFrom(signature); |
| signature.set_result_type(Object::dynamic_type()); |
| |
| extractor.InheritKernelOffsetFrom(*this); |
| |
| extractor.set_extracted_method_closure(closure_function); |
| extractor.set_is_debuggable(false); |
| extractor.set_is_visible(false); |
| |
| signature ^= ClassFinalizer::FinalizeType(signature); |
| extractor.set_signature(signature); |
| |
| owner.AddFunction(extractor); |
| |
| return extractor.ptr(); |
| } |
| |
| FunctionPtr Function::GetMethodExtractor(const String& getter_name) const { |
| ASSERT(Field::IsGetterName(getter_name)); |
| const Function& closure_function = |
| Function::Handle(ImplicitClosureFunction()); |
| const Class& owner = Class::Handle(closure_function.Owner()); |
| Thread* thread = Thread::Current(); |
| if (owner.EnsureIsFinalized(thread) != Error::null()) { |
| return Function::null(); |
| } |
| IsolateGroup* group = thread->isolate_group(); |
| Function& result = Function::Handle( |
| Resolver::ResolveDynamicFunction(thread->zone(), owner, getter_name)); |
| if (result.IsNull()) { |
| SafepointWriteRwLocker ml(thread, group->program_lock()); |
| result = owner.LookupDynamicFunctionUnsafe(getter_name); |
| if (result.IsNull()) { |
| result = CreateMethodExtractor(getter_name); |
| } |
| } |
| ASSERT(result.kind() == UntaggedFunction::kMethodExtractor); |
| return result.ptr(); |
| } |
| |
| bool Library::FindPragma(Thread* T, |
| bool only_core, |
| const Object& obj, |
| const String& pragma_name, |
| bool multiple, |
| Object* options) { |
| auto IG = T->isolate_group(); |
| auto Z = T->zone(); |
| auto& lib = Library::Handle(Z); |
| |
| if (obj.IsClass()) { |
| auto& klass = Class::Cast(obj); |
| if (!klass.has_pragma()) return false; |
| lib = klass.library(); |
| } else if (obj.IsFunction()) { |
| auto& function = Function::Cast(obj); |
| if (!function.has_pragma()) return false; |
| lib = Class::Handle(Z, function.Owner()).library(); |
| } else if (obj.IsField()) { |
| auto& field = Field::Cast(obj); |
| if (!field.has_pragma()) return false; |
| lib = Class::Handle(Z, field.Owner()).library(); |
| } else { |
| UNREACHABLE(); |
| } |
| |
| if (only_core && !lib.IsAnyCoreLibrary()) { |
| return false; |
| } |
| |
| Object& metadata_obj = Object::Handle(Z, lib.GetMetadata(obj)); |
| if (metadata_obj.IsUnwindError()) { |
| Report::LongJump(UnwindError::Cast(metadata_obj)); |
| } |
| |
| // If there is a compile-time error while evaluating the metadata, we will |
| // simply claim there was no @pragma annotation. |
| if (metadata_obj.IsNull() || metadata_obj.IsLanguageError()) { |
| return false; |
| } |
| ASSERT(metadata_obj.IsArray()); |
| |
| auto& metadata = Array::Cast(metadata_obj); |
| auto& pragma_class = Class::Handle(Z, IG->object_store()->pragma_class()); |
| auto& pragma_name_field = |
| Field::Handle(Z, pragma_class.LookupField(Symbols::name())); |
| auto& pragma_options_field = |
| Field::Handle(Z, pragma_class.LookupField(Symbols::options())); |
| |
| auto& pragma = Object::Handle(Z); |
| bool found = false; |
| auto& options_value = Object::Handle(Z); |
| auto& results = GrowableObjectArray::Handle(Z); |
| if (multiple) { |
| ASSERT(options != nullptr); |
| results ^= GrowableObjectArray::New(1); |
| } |
| for (intptr_t i = 0; i < metadata.Length(); ++i) { |
| pragma = metadata.At(i); |
| if (pragma.clazz() != pragma_class.ptr() || |
| Instance::Cast(pragma).GetField(pragma_name_field) != |
| pragma_name.ptr()) { |
| continue; |
| } |
| options_value = Instance::Cast(pragma).GetField(pragma_options_field); |
| found = true; |
| if (multiple) { |
| results.Add(options_value); |
| continue; |
| } |
| if (options != nullptr) { |
| *options = options_value.ptr(); |
| } |
| return true; |
| } |
| |
| if (found && options != nullptr) { |
| *options = results.ptr(); |
| } |
| return found; |
| } |
| |
| bool Function::IsDynamicInvocationForwarderName(const String& name) { |
| return IsDynamicInvocationForwarderName(name.ptr()); |
| } |
| |
| bool Function::IsDynamicInvocationForwarderName(StringPtr name) { |
| return String::StartsWith(name, Symbols::DynamicPrefix().ptr()); |
| } |
| |
| StringPtr Function::DemangleDynamicInvocationForwarderName(const String& name) { |
| const intptr_t kDynamicPrefixLength = 4; // "dyn:" |
| ASSERT(Symbols::DynamicPrefix().Length() == kDynamicPrefixLength); |
| return Symbols::New(Thread::Current(), name, kDynamicPrefixLength, |
| name.Length() - kDynamicPrefixLength); |
| } |
| |
| StringPtr Function::CreateDynamicInvocationForwarderName(const String& name) { |
| return Symbols::FromConcat(Thread::Current(), Symbols::DynamicPrefix(), name); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| FunctionPtr Function::CreateDynamicInvocationForwarder( |
| const String& mangled_name) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| Function& forwarder = Function::Handle(zone); |
| forwarder ^= Object::Clone(*this, Heap::kOld); |
| |
| forwarder.reset_unboxed_parameters_and_return(); |
| |
| forwarder.set_name(mangled_name); |
| forwarder.set_is_native(false); |
| // TODO(dartbug.com/37737): Currently, we intentionally keep the recognized |
| // kind when creating the dynamic invocation forwarder. |
| forwarder.set_kind(UntaggedFunction::kDynamicInvocationForwarder); |
| forwarder.set_is_debuggable(false); |
| |
| // TODO(vegorov) for error reporting reasons it is better to make this |
| // function visible and instead use a TailCall to invoke the target. |
| // Our TailCall instruction is not ready for such usage though it |
| // blocks inlining and can't take Function-s only Code objects. |
| forwarder.set_is_visible(false); |
| |
| forwarder.ClearICDataArray(); |
| forwarder.ClearCode(); |
| forwarder.set_usage_counter(0); |
| forwarder.set_deoptimization_counter(0); |
| forwarder.set_optimized_instruction_count(0); |
| forwarder.set_inlining_depth(0); |
| forwarder.set_optimized_call_site_count(0); |
| |
| forwarder.InheritKernelOffsetFrom(*this); |
| |
| const Array& checks = Array::Handle(zone, Array::New(1)); |
| checks.SetAt(0, *this); |
| forwarder.SetForwardingChecks(checks); |
| |
| return forwarder.ptr(); |
| } |
| |
| FunctionPtr Function::GetDynamicInvocationForwarder( |
| const String& mangled_name, |
| bool allow_add /*=true*/) const { |
| ASSERT(IsDynamicInvocationForwarderName(mangled_name)); |
| auto thread = Thread::Current(); |
| auto zone = thread->zone(); |
| const Class& owner = Class::Handle(zone, Owner()); |
| Function& result = Function::Handle(zone); |
| |
| // First we'll try to find it without using locks. |
| result = owner.GetInvocationDispatcher( |
| mangled_name, Array::null_array(), |
| UntaggedFunction::kDynamicInvocationForwarder, |
| /*create_if_absent=*/false); |
| if (!result.IsNull()) return result.ptr(); |
| |
| const bool needs_dyn_forwarder = |
| kernel::NeedsDynamicInvocationForwarder(*this); |
| if (!allow_add) { |
| return needs_dyn_forwarder ? Function::null() : ptr(); |
| } |
| |
| // If we failed to find it and possibly need to create it, use a write lock. |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| |
| // Try to find it again & return if it was added in the mean time. |
| result = owner.GetInvocationDispatcher( |
| mangled_name, Array::null_array(), |
| UntaggedFunction::kDynamicInvocationForwarder, |
| /*create_if_absent=*/false); |
| if (!result.IsNull()) return result.ptr(); |
| |
| // Otherwise create it & add it. |
| result = needs_dyn_forwarder ? CreateDynamicInvocationForwarder(mangled_name) |
| : ptr(); |
| owner.AddInvocationDispatcher(mangled_name, Array::null_array(), result); |
| |
| return result.ptr(); |
| } |
| |
| #endif |
| |
| bool AbstractType::InstantiateAndTestSubtype( |
| AbstractType* subtype, |
| AbstractType* supertype, |
| const TypeArguments& instantiator_type_args, |
| const TypeArguments& function_type_args) { |
| if (!subtype->IsInstantiated()) { |
| *subtype = subtype->InstantiateFrom( |
| instantiator_type_args, function_type_args, kAllFree, Heap::kOld); |
| } |
| if (!supertype->IsInstantiated()) { |
| *supertype = supertype->InstantiateFrom( |
| instantiator_type_args, function_type_args, kAllFree, Heap::kOld); |
| } |
| return subtype->IsSubtypeOf(*supertype, Heap::kOld); |
| } |
| |
| ArrayPtr Class::invocation_dispatcher_cache() const { |
| return untag()->invocation_dispatcher_cache<std::memory_order_acquire>(); |
| } |
| |
| void Class::Finalize() const { |
| auto thread = Thread::Current(); |
| auto isolate_group = thread->isolate_group(); |
| ASSERT(!thread->isolate_group()->all_classes_finalized()); |
| ASSERT(!is_finalized()); |
| // Prefinalized classes have a VM internal representation and no Dart fields. |
| // Their instance size is precomputed and field offsets are known. |
| if (!is_prefinalized()) { |
| // Compute offsets of instance fields, instance size and bitmap for unboxed |
| // fields. |
| const auto host_bitmap = CalculateFieldOffsets(); |
| if (ptr() == isolate_group->class_table()->At(id())) { |
| // Sets the new size in the class table. |
| isolate_group->class_table()->SetAt(id(), ptr()); |
| if (FLAG_precompiled_mode && !ClassTable::IsTopLevelCid(id())) { |
| isolate_group->shared_class_table()->SetUnboxedFieldsMapAt(id(), |
| host_bitmap); |
| } |
| } |
| } |
| |
| #if defined(DEBUG) |
| if (is_const()) { |
| // Double-check that all fields are final (CFE should guarantee that if it |
| // marks the class as having a constant constructor). |
| auto Z = thread->zone(); |
| const auto& super_class = Class::Handle(Z, SuperClass()); |
| ASSERT(super_class.IsNull() || super_class.is_const()); |
| const auto& fields = Array::Handle(Z, this->fields()); |
| auto& field = Field::Handle(Z); |
| for (intptr_t i = 0; i < fields.Length(); ++i) { |
| field ^= fields.At(i); |
| ASSERT(field.is_static() || field.is_final()); |
| } |
| } |
| #endif |
| |
| set_is_finalized(); |
| } |
| |
| #if defined(DEBUG) |
| static bool IsMutatorOrAtDeoptSafepoint() { |
| Thread* thread = Thread::Current(); |
| return thread->IsMutatorThread() || |
| thread->IsAtSafepoint(SafepointLevel::kGCAndDeopt); |
| } |
| #endif |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| |
| class CHACodeArray : public WeakCodeReferences { |
| public: |
| explicit CHACodeArray(const Class& cls) |
| : WeakCodeReferences(Array::Handle(cls.dependent_code())), cls_(cls) {} |
| |
| virtual void UpdateArrayTo(const Array& value) { |
| // TODO(fschneider): Fails for classes in the VM isolate. |
| cls_.set_dependent_code(value); |
| } |
| |
| virtual void ReportDeoptimization(const Code& code) { |
| if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) { |
| Function& function = Function::Handle(code.function()); |
| THR_Print("Deoptimizing %s because CHA optimized (%s).\n", |
| function.ToFullyQualifiedCString(), cls_.ToCString()); |
| } |
| } |
| |
| virtual void ReportSwitchingCode(const Code& code) { |
| if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) { |
| Function& function = Function::Handle(code.function()); |
| THR_Print( |
| "Switching %s to unoptimized code because CHA invalid" |
| " (%s)\n", |
| function.ToFullyQualifiedCString(), cls_.ToCString()); |
| } |
| } |
| |
| private: |
| const Class& cls_; |
| DISALLOW_COPY_AND_ASSIGN(CHACodeArray); |
| }; |
| |
| void Class::RegisterCHACode(const Code& code) { |
| if (FLAG_trace_cha) { |
| THR_Print("RegisterCHACode '%s' depends on class '%s'\n", |
| Function::Handle(code.function()).ToQualifiedCString(), |
| ToCString()); |
| } |
| DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint()); |
| ASSERT(code.is_optimized()); |
| CHACodeArray a(*this); |
| a.Register(code); |
| } |
| |
| void Class::DisableCHAOptimizedCode(const Class& subclass) { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| CHACodeArray a(*this); |
| if (FLAG_trace_deoptimization && a.HasCodes()) { |
| if (subclass.IsNull()) { |
| THR_Print("Deopt for CHA (all)\n"); |
| } else { |
| THR_Print("Deopt for CHA (new subclass %s)\n", subclass.ToCString()); |
| } |
| } |
| a.DisableCode(); |
| } |
| |
| void Class::DisableAllCHAOptimizedCode() { |
| DisableCHAOptimizedCode(Class::Handle()); |
| } |
| |
| ArrayPtr Class::dependent_code() const { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadReader()); |
| return untag()->dependent_code(); |
| } |
| |
| void Class::set_dependent_code(const Array& array) const { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| untag()->set_dependent_code(array.ptr()); |
| } |
| |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| bool Class::TraceAllocation(IsolateGroup* isolate_group) const { |
| #ifndef PRODUCT |
| auto class_table = isolate_group->shared_class_table(); |
| return class_table->TraceAllocationFor(id()); |
| #else |
| return false; |
| #endif |
| } |
| |
| void Class::SetTraceAllocation(bool trace_allocation) const { |
| #ifndef PRODUCT |
| auto isolate_group = IsolateGroup::Current(); |
| const bool changed = trace_allocation != this->TraceAllocation(isolate_group); |
| if (changed) { |
| auto class_table = isolate_group->shared_class_table(); |
| class_table->SetTraceAllocationFor(id(), trace_allocation); |
| DisableAllocationStub(); |
| } |
| #else |
| UNREACHABLE(); |
| #endif |
| } |
| |
| // Conventions: |
| // * For throwing a NSM in a class klass we use its runtime type as receiver, |
| // i.e., klass.RareType(). |
| // * For throwing a NSM in a library, we just pass the null instance as |
| // receiver. |
| static ObjectPtr ThrowNoSuchMethod(const Instance& receiver, |
| const String& function_name, |
| const Array& arguments, |
| const Array& argument_names, |
| const InvocationMirror::Level level, |
| const InvocationMirror::Kind kind) { |
| const Smi& invocation_type = |
| Smi::Handle(Smi::New(InvocationMirror::EncodeType(level, kind))); |
| |
| const Array& args = Array::Handle(Array::New(7)); |
| args.SetAt(0, receiver); |
| args.SetAt(1, function_name); |
| args.SetAt(2, invocation_type); |
| args.SetAt(3, Object::smi_zero()); // Type arguments length. |
| args.SetAt(4, Object::null_type_arguments()); |
| args.SetAt(5, arguments); |
| args.SetAt(6, argument_names); |
| |
| const Library& libcore = Library::Handle(Library::CoreLibrary()); |
| const Class& cls = |
| Class::Handle(libcore.LookupClass(Symbols::NoSuchMethodError())); |
| ASSERT(!cls.IsNull()); |
| const auto& error = cls.EnsureIsFinalized(Thread::Current()); |
| ASSERT(error == Error::null()); |
| const Function& throwNew = |
| Function::Handle(cls.LookupFunctionAllowPrivate(Symbols::ThrowNew())); |
| return DartEntry::InvokeFunction(throwNew, args); |
| } |
| |
| static ObjectPtr ThrowTypeError(const TokenPosition token_pos, |
| const Instance& src_value, |
| const AbstractType& dst_type, |
| const String& dst_name) { |
| const Array& args = Array::Handle(Array::New(4)); |
| const Smi& pos = Smi::Handle(Smi::New(token_pos.Serialize())); |
| args.SetAt(0, pos); |
| args.SetAt(1, src_value); |
| args.SetAt(2, dst_type); |
| args.SetAt(3, dst_name); |
| |
| const Library& libcore = Library::Handle(Library::CoreLibrary()); |
| const Class& cls = |
| Class::Handle(libcore.LookupClassAllowPrivate(Symbols::TypeError())); |
| const auto& error = cls.EnsureIsFinalized(Thread::Current()); |
| ASSERT(error == Error::null()); |
| const Function& throwNew = |
| Function::Handle(cls.LookupFunctionAllowPrivate(Symbols::ThrowNew())); |
| return DartEntry::InvokeFunction(throwNew, args); |
| } |
| |
| ObjectPtr Class::InvokeGetter(const String& getter_name, |
| bool throw_nsm_if_absent, |
| bool respect_reflectable, |
| bool check_is_entrypoint) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| CHECK_ERROR(EnsureIsFinalized(thread)); |
| |
| // Note static fields do not have implicit getters. |
| const Field& field = Field::Handle(zone, LookupStaticField(getter_name)); |
| |
| if (!field.IsNull() && check_is_entrypoint) { |
| CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly)); |
| } |
| |
| if (field.IsNull() || field.IsUninitialized()) { |
| const String& internal_getter_name = |
| String::Handle(zone, Field::GetterName(getter_name)); |
| Function& getter = |
| Function::Handle(zone, LookupStaticFunction(internal_getter_name)); |
| |
| if (field.IsNull() && !getter.IsNull() && check_is_entrypoint) { |
| CHECK_ERROR(getter.VerifyCallEntryPoint()); |
| } |
| |
| if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) { |
| if (getter.IsNull()) { |
| getter = LookupStaticFunction(getter_name); |
| if (!getter.IsNull()) { |
| if (check_is_entrypoint) { |
| CHECK_ERROR(getter.VerifyClosurizedEntryPoint()); |
| } |
| if (getter.SafeToClosurize()) { |
| // Looking for a getter but found a regular method: closurize it. |
| const Function& closure_function = |
| Function::Handle(zone, getter.ImplicitClosureFunction()); |
| return closure_function.ImplicitStaticClosure(); |
| } |
| } |
| } |
| if (throw_nsm_if_absent) { |
| return ThrowNoSuchMethod( |
| AbstractType::Handle(zone, RareType()), getter_name, |
| Object::null_array(), Object::null_array(), |
| InvocationMirror::kStatic, InvocationMirror::kGetter); |
| } |
| // Fall through case: Indicate that we didn't find any function or field |
| // using a special null instance. This is different from a field being |
| // null. Callers make sure that this null does not leak into Dartland. |
| return Object::sentinel().ptr(); |
| } |
| |
| // Invoke the getter and return the result. |
| return DartEntry::InvokeFunction(getter, Object::empty_array()); |
| } |
| |
| return field.StaticValue(); |
| } |
| |
| ObjectPtr Class::InvokeSetter(const String& setter_name, |
| const Instance& value, |
| bool respect_reflectable, |
| bool check_is_entrypoint) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| CHECK_ERROR(EnsureIsFinalized(thread)); |
| |
| // Check for real fields and user-defined setters. |
| const Field& field = Field::Handle(zone, LookupStaticField(setter_name)); |
| const String& internal_setter_name = |
| String::Handle(zone, Field::SetterName(setter_name)); |
| |
| if (!field.IsNull() && check_is_entrypoint) { |
| CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly)); |
| } |
| |
| AbstractType& parameter_type = AbstractType::Handle(zone); |
| if (field.IsNull()) { |
| const Function& setter = |
| Function::Handle(zone, LookupStaticFunction(internal_setter_name)); |
| if (!setter.IsNull() && check_is_entrypoint) { |
| CHECK_ERROR(setter.VerifyCallEntryPoint()); |
| } |
| const int kNumArgs = 1; |
| const Array& args = Array::Handle(zone, Array::New(kNumArgs)); |
| args.SetAt(0, value); |
| if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) { |
| return ThrowNoSuchMethod(AbstractType::Handle(zone, RareType()), |
| internal_setter_name, args, Object::null_array(), |
| InvocationMirror::kStatic, |
| InvocationMirror::kSetter); |
| } |
| parameter_type = setter.ParameterTypeAt(0); |
| if (!value.RuntimeTypeIsSubtypeOf(parameter_type, |
| Object::null_type_arguments(), |
| Object::null_type_arguments())) { |
| const String& argument_name = |
| String::Handle(zone, setter.ParameterNameAt(0)); |
| return ThrowTypeError(setter.token_pos(), value, parameter_type, |
| argument_name); |
| } |
| // Invoke the setter and return the result. |
| return DartEntry::InvokeFunction(setter, args); |
| } |
| |
| if (field.is_final() || (respect_reflectable && !field.is_reflectable())) { |
| const int kNumArgs = 1; |
| const Array& args = Array::Handle(zone, Array::New(kNumArgs)); |
| args.SetAt(0, value); |
| return ThrowNoSuchMethod(AbstractType::Handle(zone, RareType()), |
| internal_setter_name, args, Object::null_array(), |
| InvocationMirror::kStatic, |
| InvocationMirror::kSetter); |
| } |
| |
| parameter_type = field.type(); |
| if (!value.RuntimeTypeIsSubtypeOf(parameter_type, |
| Object::null_type_arguments(), |
| Object::null_type_arguments())) { |
| const String& argument_name = String::Handle(zone, field.name()); |
| return ThrowTypeError(field.token_pos(), value, parameter_type, |
| argument_name); |
| } |
| field.SetStaticValue(value); |
| return value.ptr(); |
| } |
| |
| // Creates a new array of boxed arguments suitable for invoking the callable |
| // from the original boxed arguments for a static call. Also sets the contents |
| // of the handle pointed to by [callable_args_desc_array_out] to an appropriate |
| // arguments descriptor array for the new arguments. |
| // |
| // Assumes [arg_names] are consistent with [static_args_descriptor]. |
| static ArrayPtr CreateCallableArgumentsFromStatic( |
| Zone* zone, |
| const Instance& receiver, |
| const Array& static_args, |
| const Array& arg_names, |
| const ArgumentsDescriptor& static_args_descriptor) { |
| const intptr_t num_static_type_args = static_args_descriptor.TypeArgsLen(); |
| const intptr_t num_static_args = static_args_descriptor.Count(); |
| // Double check that the static args descriptor expects boxed arguments |
| // and the static args descriptor is consistent with the static arguments. |
| ASSERT_EQUAL(static_args_descriptor.Size(), num_static_args); |
| ASSERT_EQUAL(static_args.Length(), |
| num_static_args + (num_static_type_args > 0 ? 1 : 0)); |
| // Add an additional slot to store the callable as the receiver. |
| const auto& callable_args = |
| Array::Handle(zone, Array::New(static_args.Length() + 1)); |
| const intptr_t first_arg_index = static_args_descriptor.FirstArgIndex(); |
| auto& temp = Object::Handle(zone); |
| // Copy the static args into the corresponding slots of the callable args. |
| if (num_static_type_args > 0) { |
| temp = static_args.At(0); |
| callable_args.SetAt(0, temp); |
| } |
| for (intptr_t i = first_arg_index; i < static_args.Length(); i++) { |
| temp = static_args.At(i); |
| callable_args.SetAt(i + 1, temp); |
| } |
| // Set the receiver slot in the callable args. |
| callable_args.SetAt(first_arg_index, receiver); |
| return callable_args.ptr(); |
| } |
| |
| ObjectPtr Class::Invoke(const String& function_name, |
| const Array& args, |
| const Array& arg_names, |
| bool respect_reflectable, |
| bool check_is_entrypoint) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| CHECK_ERROR(EnsureIsFinalized(thread)); |
| |
| // We don't pass any explicit type arguments, which will be understood as |
| // using dynamic for any function type arguments by lower layers. |
| const int kTypeArgsLen = 0; |
| const Array& args_descriptor_array = Array::Handle( |
| zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), |
| arg_names, Heap::kNew)); |
| ArgumentsDescriptor args_descriptor(args_descriptor_array); |
| |
| Function& function = |
| Function::Handle(zone, LookupStaticFunction(function_name)); |
| |
| if (!function.IsNull() && check_is_entrypoint) { |
| CHECK_ERROR(function.VerifyCallEntryPoint()); |
| } |
| |
| if (function.IsNull()) { |
| // Didn't find a method: try to find a getter and invoke call on its result. |
| const Object& getter_result = Object::Handle( |
| zone, InvokeGetter(function_name, false, respect_reflectable, |
| check_is_entrypoint)); |
| if (getter_result.ptr() != Object::sentinel().ptr()) { |
| if (check_is_entrypoint) { |
| CHECK_ERROR(EntryPointFieldInvocationError(function_name)); |
| } |
| const auto& call_args_descriptor_array = Array::Handle( |
| zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(), |
| args_descriptor.Count() + 1, |
| arg_names, Heap::kNew)); |
| const auto& call_args = Array::Handle( |
| zone, |
| CreateCallableArgumentsFromStatic(zone, Instance::Cast(getter_result), |
| args, arg_names, args_descriptor)); |
| return DartEntry::InvokeClosure(thread, call_args, |
| call_args_descriptor_array); |
| } |
| } |
| |
| if (function.IsNull() || |
| !function.AreValidArguments(args_descriptor, nullptr) || |
| (respect_reflectable && !function.is_reflectable())) { |
| return ThrowNoSuchMethod( |
| AbstractType::Handle(zone, RareType()), function_name, args, arg_names, |
| InvocationMirror::kStatic, InvocationMirror::kMethod); |
| } |
| // This is a static function, so we pass an empty instantiator tav. |
| ASSERT(function.is_static()); |
| ObjectPtr type_error = function.DoArgumentTypesMatch( |
| args, args_descriptor, Object::empty_type_arguments()); |
| if (type_error != Error::null()) { |
| return type_error; |
| } |
| return DartEntry::InvokeFunction(function, args, args_descriptor_array); |
| } |
| |
| static ObjectPtr EvaluateCompiledExpressionHelper( |
| const ExternalTypedData& kernel_buffer, |
| const Array& type_definitions, |
| const String& library_url, |
| const String& klass, |
| const Array& arguments, |
| const TypeArguments& type_arguments); |
| |
| ObjectPtr Class::EvaluateCompiledExpression( |
| const ExternalTypedData& kernel_buffer, |
| const Array& type_definitions, |
| const Array& arguments, |
| const TypeArguments& type_arguments) const { |
| ASSERT(Thread::Current()->IsMutatorThread()); |
| if (id() < kInstanceCid || id() == kTypeArgumentsCid) { |
| const Instance& exception = Instance::Handle(String::New( |
| "Expressions can be evaluated only with regular Dart instances")); |
| const Instance& stacktrace = Instance::Handle(); |
| return UnhandledException::New(exception, stacktrace); |
| } |
| |
| return EvaluateCompiledExpressionHelper( |
| kernel_buffer, type_definitions, |
| String::Handle(Library::Handle(library()).url()), |
| IsTopLevel() ? String::Handle() : String::Handle(UserVisibleName()), |
| arguments, type_arguments); |
| } |
| |
| void Class::EnsureDeclarationLoaded() const { |
| if (!is_declaration_loaded()) { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| FATAL1("Unable to use class %s which is not loaded yet.", ToCString()); |
| #endif |
| } |
| } |
| |
| // Ensure that top level parsing of the class has been done. |
| ErrorPtr Class::EnsureIsFinalized(Thread* thread) const { |
| ASSERT(!IsNull()); |
| if (is_finalized()) { |
| return Error::null(); |
| } |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| return Error::null(); |
| #else |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| if (is_finalized()) { |
| return Error::null(); |
| } |
| LeaveCompilerScope ncs(thread); |
| ASSERT(thread != NULL); |
| const Error& error = |
| Error::Handle(thread->zone(), ClassFinalizer::LoadClassMembers(*this)); |
| if (!error.IsNull()) { |
| ASSERT(thread == Thread::Current()); |
| if (thread->long_jump_base() != NULL) { |
| Report::LongJump(error); |
| UNREACHABLE(); |
| } |
| } |
| return error.ptr(); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| // Ensure that code outdated by finalized class is cleaned up, new instance of |
| // this class is ready to be allocated. |
| ErrorPtr Class::EnsureIsAllocateFinalized(Thread* thread) const { |
| ASSERT(!IsNull()); |
| if (is_allocate_finalized()) { |
| return Error::null(); |
| } |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| if (is_allocate_finalized()) { |
| return Error::null(); |
| } |
| ASSERT(thread != NULL); |
| Error& error = Error::Handle(thread->zone(), EnsureIsFinalized(thread)); |
| if (!error.IsNull()) { |
| ASSERT(thread == Thread::Current()); |
| if (thread->long_jump_base() != NULL) { |
| Report::LongJump(error); |
| UNREACHABLE(); |
| } |
| } |
| // May be allocate-finalized recursively during EnsureIsFinalized. |
| if (is_allocate_finalized()) { |
| return Error::null(); |
| } |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| error ^= ClassFinalizer::AllocateFinalizeClass(*this); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| return error.ptr(); |
| } |
| |
| void Class::SetFields(const Array& value) const { |
| ASSERT(!value.IsNull()); |
| #if defined(DEBUG) |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| // Verify that all the fields in the array have this class as owner. |
| Field& field = Field::Handle(); |
| intptr_t len = value.Length(); |
| for (intptr_t i = 0; i < len; i++) { |
| field ^= value.At(i); |
| ASSERT(field.IsOriginal()); |
| ASSERT(field.Owner() == ptr()); |
| } |
| #endif |
| // The value of static fields is already initialized to null. |
| set_fields(value); |
| } |
| |
| void Class::AddField(const Field& field) const { |
| #if defined(DEBUG) |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| #endif |
| const Array& arr = Array::Handle(fields()); |
| const Array& new_arr = Array::Handle(Array::Grow(arr, arr.Length() + 1)); |
| new_arr.SetAt(arr.Length(), field); |
| SetFields(new_arr); |
| } |
| |
| void Class::AddFields(const GrowableArray<const Field*>& new_fields) const { |
| #if defined(DEBUG) |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| #endif |
| const intptr_t num_new_fields = new_fields.length(); |
| if (num_new_fields == 0) return; |
| const Array& arr = Array::Handle(fields()); |
| const intptr_t num_old_fields = arr.Length(); |
| const Array& new_arr = Array::Handle( |
| Array::Grow(arr, num_old_fields + num_new_fields, Heap::kOld)); |
| for (intptr_t i = 0; i < num_new_fields; i++) { |
| new_arr.SetAt(i + num_old_fields, *new_fields.At(i)); |
| } |
| SetFields(new_arr); |
| } |
| |
| bool Class::InjectCIDFields() const { |
| if (library() != Library::InternalLibrary() || |
| Name() != Symbols::ClassID().ptr()) { |
| return false; |
| } |
| |
| auto thread = Thread::Current(); |
| auto isolate_group = thread->isolate_group(); |
| auto zone = thread->zone(); |
| Field& field = Field::Handle(zone); |
| Smi& value = Smi::Handle(zone); |
| String& field_name = String::Handle(zone); |
| |
| static const struct { |
| const char* const field_name; |
| const intptr_t cid; |
| } cid_fields[] = { |
| #define CLASS_LIST_WITH_NULL(V) \ |
| V(Null) \ |
| CLASS_LIST_NO_OBJECT(V) |
| #define ADD_SET_FIELD(clazz) {"cid" #clazz, k##clazz##Cid}, |
| CLASS_LIST_WITH_NULL(ADD_SET_FIELD) |
| #undef ADD_SET_FIELD |
| #define ADD_SET_FIELD(clazz) {"cid" #clazz "View", kTypedData##clazz##ViewCid}, |
| CLASS_LIST_TYPED_DATA(ADD_SET_FIELD) |
| #undef ADD_SET_FIELD |
| #define ADD_SET_FIELD(clazz) {"cid" #clazz, kTypedData##clazz##Cid}, |
| CLASS_LIST_TYPED_DATA(ADD_SET_FIELD) |
| #undef ADD_SET_FIELD |
| #define ADD_SET_FIELD(clazz) \ |
| {"cidExternal" #clazz, kExternalTypedData##clazz##Cid}, |
| CLASS_LIST_TYPED_DATA(ADD_SET_FIELD) |
| #undef ADD_SET_FIELD |
| #undef CLASS_LIST_WITH_NULL |
| }; |
| |
| const AbstractType& field_type = Type::Handle(zone, Type::IntType()); |
| for (size_t i = 0; i < ARRAY_SIZE(cid_fields); i++) { |
| field_name = Symbols::New(thread, cid_fields[i].field_name); |
| field = Field::New(field_name, /* is_static = */ true, |
| /* is_final = */ false, |
| /* is_const = */ true, |
| /* is_reflectable = */ false, |
| /* is_late = */ false, *this, field_type, |
| TokenPosition::kMinSource, TokenPosition::kMinSource); |
| value = Smi::New(cid_fields[i].cid); |
| isolate_group->RegisterStaticField(field, value); |
| AddField(field); |
| } |
| |
| return true; |
| } |
| |
| template <class FakeInstance, class TargetFakeInstance> |
| ClassPtr Class::NewCommon(intptr_t index) { |
| ASSERT(Object::class_class() != Class::null()); |
| Class& result = Class::Handle(); |
| { |
| ObjectPtr raw = |
| Object::Allocate(Class::kClassId, Class::InstanceSize(), Heap::kOld, |
| Class::ContainsCompressedPointers()); |
| NoSafepointScope no_safepoint; |
| result ^= raw; |
| } |
| // Here kIllegalCid means not-yet-assigned. |
| Object::VerifyBuiltinVtable<FakeInstance>(index == kIllegalCid ? kInstanceCid |
| : index); |
| NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource)); |
| NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource)); |
| const intptr_t host_instance_size = FakeInstance::InstanceSize(); |
| const intptr_t target_instance_size = compiler::target::RoundedAllocationSize( |
| TargetFakeInstance::InstanceSize()); |
| result.set_instance_size(host_instance_size, target_instance_size); |
| result.set_type_arguments_field_offset_in_words(kNoTypeArguments, |
| RTN::Class::kNoTypeArguments); |
| const intptr_t host_next_field_offset = FakeInstance::NextFieldOffset(); |
| const intptr_t target_next_field_offset = |
| TargetFakeInstance::NextFieldOffset(); |
| result.set_next_field_offset(host_next_field_offset, |
| target_next_field_offset); |
| result.set_id(index); |
| result.set_num_type_arguments_unsafe(kUnknownNumTypeArguments); |
| result.set_num_native_fields(0); |
| result.set_state_bits(0); |
| NOT_IN_PRECOMPILED(result.set_kernel_offset(0)); |
| result.InitEmptyFields(); |
| return result.ptr(); |
| } |
| |
| template <class FakeInstance, class TargetFakeInstance> |
| ClassPtr Class::New(intptr_t index, |
| IsolateGroup* isolate_group, |
| bool register_class, |
| bool is_abstract) { |
| Class& result = |
| Class::Handle(NewCommon<FakeInstance, TargetFakeInstance>(index)); |
| if (is_abstract) { |
| result.set_is_abstract(); |
| } |
| if (register_class) { |
| isolate_group->class_table()->Register(result); |
| } |
| return result.ptr(); |
| } |
| |
| ClassPtr Class::New(const Library& lib, |
| const String& name, |
| const Script& script, |
| TokenPosition token_pos, |
| bool register_class) { |
| Class& result = |
| Class::Handle(NewCommon<Instance, RTN::Instance>(kIllegalCid)); |
| result.set_library(lib); |
| result.set_name(name); |
| result.set_script(script); |
| NOT_IN_PRECOMPILED(result.set_token_pos(token_pos)); |
| |
| // The size gets initialized to 0. Once the class gets finalized the class |
| // finalizer will set the correct size. |
| ASSERT(!result.is_finalized() && !result.is_prefinalized()); |
| result.set_instance_size_in_words(0, 0); |
| |
| if (register_class) { |
| IsolateGroup::Current()->RegisterClass(result); |
| } |
| return result.ptr(); |
| } |
| |
| ClassPtr Class::NewInstanceClass() { |
| return Class::New<Instance, RTN::Instance>(kIllegalCid, |
| IsolateGroup::Current()); |
| } |
| |
| ClassPtr Class::NewNativeWrapper(const Library& library, |
| const String& name, |
| int field_count) { |
| Class& cls = Class::Handle(library.LookupClass(name)); |
| if (cls.IsNull()) { |
| cls = New(library, name, Script::Handle(), TokenPosition::kNoSource); |
| cls.SetFields(Object::empty_array()); |
| cls.SetFunctions(Object::empty_array()); |
| // Set super class to Object. |
| cls.set_super_type(Type::Handle(Type::ObjectType())); |
| // Compute instance size. First word contains a pointer to a properly |
| // sized typed array once the first native field has been set. |
| const intptr_t host_instance_size = sizeof(UntaggedInstance) + kWordSize; |
| #if defined(DART_PRECOMPILER) |
| const intptr_t target_instance_size = |
| compiler::target::Instance::InstanceSize() + |
| compiler::target::kWordSize; |
| #else |
| const intptr_t target_instance_size = |
| sizeof(UntaggedInstance) + compiler::target::kWordSize; |
| #endif |
| cls.set_instance_size( |
| RoundedAllocationSize(host_instance_size), |
| compiler::target::RoundedAllocationSize(target_instance_size)); |
| cls.set_next_field_offset(host_instance_size, target_instance_size); |
| cls.set_num_native_fields(field_count); |
| cls.set_is_allocate_finalized(); |
| // The signature of the constructor yet to be added to this class will have |
| // to be finalized explicitly, since the class is prematurely marked as |
| // 'is_allocate_finalized' and finalization of member types will not occur. |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| cls.set_is_synthesized_class(); |
| library.AddClass(cls); |
| return cls.ptr(); |
| } else { |
| return Class::null(); |
| } |
| } |
| |
| ClassPtr Class::NewStringClass(intptr_t class_id, IsolateGroup* isolate_group) { |
| intptr_t host_instance_size, target_instance_size; |
| if (class_id == kOneByteStringCid) { |
| host_instance_size = OneByteString::InstanceSize(); |
| target_instance_size = compiler::target::RoundedAllocationSize( |
| RTN::OneByteString::InstanceSize()); |
| } else if (class_id == kTwoByteStringCid) { |
| host_instance_size = TwoByteString::InstanceSize(); |
| target_instance_size = compiler::target::RoundedAllocationSize( |
| RTN::TwoByteString::InstanceSize()); |
| } else if (class_id == kExternalOneByteStringCid) { |
| host_instance_size = ExternalOneByteString::InstanceSize(); |
| target_instance_size = compiler::target::RoundedAllocationSize( |
| RTN::ExternalOneByteString::InstanceSize()); |
| } else { |
| ASSERT(class_id == kExternalTwoByteStringCid); |
| host_instance_size = ExternalTwoByteString::InstanceSize(); |
| target_instance_size = compiler::target::RoundedAllocationSize( |
| RTN::ExternalTwoByteString::InstanceSize()); |
| } |
| Class& result = Class::Handle(New<String, RTN::String>( |
| class_id, isolate_group, /*register_class=*/false)); |
| result.set_instance_size(host_instance_size, target_instance_size); |
| |
| const intptr_t host_next_field_offset = String::NextFieldOffset(); |
| const intptr_t target_next_field_offset = RTN::String::NextFieldOffset(); |
| result.set_next_field_offset(host_next_field_offset, |
| target_next_field_offset); |
| result.set_is_prefinalized(); |
| isolate_group->class_table()->Register(result); |
| return result.ptr(); |
| } |
| |
| ClassPtr Class::NewTypedDataClass(intptr_t class_id, |
| IsolateGroup* isolate_group) { |
| ASSERT(IsTypedDataClassId(class_id)); |
| const intptr_t host_instance_size = TypedData::InstanceSize(); |
| const intptr_t target_instance_size = |
| compiler::target::RoundedAllocationSize(RTN::TypedData::InstanceSize()); |
| Class& result = Class::Handle(New<TypedData, RTN::TypedData>( |
| class_id, isolate_group, /*register_class=*/false)); |
| result.set_instance_size(host_instance_size, target_instance_size); |
| |
| const intptr_t host_next_field_offset = TypedData::NextFieldOffset(); |
| const intptr_t target_next_field_offset = RTN::TypedData::NextFieldOffset(); |
| result.set_next_field_offset(host_next_field_offset, |
| target_next_field_offset); |
| result.set_is_prefinalized(); |
| isolate_group->class_table()->Register(result); |
| return result.ptr(); |
| } |
| |
| ClassPtr Class::NewTypedDataViewClass(intptr_t class_id, |
| IsolateGroup* isolate_group) { |
| ASSERT(IsTypedDataViewClassId(class_id)); |
| const intptr_t host_instance_size = TypedDataView::InstanceSize(); |
| const intptr_t target_instance_size = compiler::target::RoundedAllocationSize( |
| RTN::TypedDataView::InstanceSize()); |
| Class& result = Class::Handle(New<TypedDataView, RTN::TypedDataView>( |
| class_id, isolate_group, /*register_class=*/false)); |
| result.set_instance_size(host_instance_size, target_instance_size); |
| |
| const intptr_t host_next_field_offset = TypedDataView::NextFieldOffset(); |
| const intptr_t target_next_field_offset = |
| RTN::TypedDataView::NextFieldOffset(); |
| result.set_next_field_offset(host_next_field_offset, |
| target_next_field_offset); |
| result.set_is_prefinalized(); |
| isolate_group->class_table()->Register(result); |
| return result.ptr(); |
| } |
| |
| ClassPtr Class::NewExternalTypedDataClass(intptr_t class_id, |
| IsolateGroup* isolate_group) { |
| ASSERT(IsExternalTypedDataClassId(class_id)); |
| const intptr_t host_instance_size = ExternalTypedData::InstanceSize(); |
| const intptr_t target_instance_size = compiler::target::RoundedAllocationSize( |
| RTN::ExternalTypedData::InstanceSize()); |
| Class& result = Class::Handle(New<ExternalTypedData, RTN::ExternalTypedData>( |
| class_id, isolate_group, /*register_class=*/false)); |
| |
| const intptr_t host_next_field_offset = ExternalTypedData::NextFieldOffset(); |
| const intptr_t target_next_field_offset = |
| RTN::ExternalTypedData::NextFieldOffset(); |
| result.set_instance_size(host_instance_size, target_instance_size); |
| result.set_next_field_offset(host_next_field_offset, |
| target_next_field_offset); |
| result.set_is_prefinalized(); |
| isolate_group->class_table()->Register(result); |
| return result.ptr(); |
| } |
| |
| ClassPtr Class::NewPointerClass(intptr_t class_id, |
| IsolateGroup* isolate_group) { |
| ASSERT(IsFfiPointerClassId(class_id)); |
| intptr_t host_instance_size = Pointer::InstanceSize(); |
| intptr_t target_instance_size = |
| compiler::target::RoundedAllocationSize(RTN::Pointer::InstanceSize()); |
| Class& result = Class::Handle(New<Pointer, RTN::Pointer>( |
| class_id, isolate_group, /*register_class=*/false)); |
| result.set_instance_size(host_instance_size, target_instance_size); |
| result.set_type_arguments_field_offset(Pointer::type_arguments_offset(), |
| RTN::Pointer::type_arguments_offset()); |
| |
| const intptr_t host_next_field_offset = Pointer::NextFieldOffset(); |
| const intptr_t target_next_field_offset = RTN::Pointer::NextFieldOffset(); |
| |
| result.set_next_field_offset(host_next_field_offset, |
| target_next_field_offset); |
| result.set_is_prefinalized(); |
| isolate_group->class_table()->Register(result); |
| return result.ptr(); |
| } |
| |
| void Class::set_name(const String& value) const { |
| ASSERT(untag()->name() == String::null()); |
| ASSERT(value.IsSymbol()); |
| untag()->set_name(value.ptr()); |
| #if !defined(PRODUCT) |
| if (untag()->user_name() == String::null()) { |
| // TODO(johnmccutchan): Eagerly set user name for VM isolate classes, |
| // lazily set user name for the other classes. |
| // Generate and set user_name. |
| const String& user_name = String::Handle( |
| Symbols::New(Thread::Current(), GenerateUserVisibleName())); |
| set_user_name(user_name); |
| } |
| #endif // !defined(PRODUCT) |
| } |
| |
| #if !defined(PRODUCT) |
| void Class::set_user_name(const String& value) const { |
| untag()->set_user_name(value.ptr()); |
| } |
| #endif // !defined(PRODUCT) |
| |
| const char* Class::GenerateUserVisibleName() const { |
| if (FLAG_show_internal_names) { |
| return String::Handle(Name()).ToCString(); |
| } |
| switch (id()) { |
| case kFloat32x4Cid: |
| return Symbols::Float32x4().ToCString(); |
| case kInt32x4Cid: |
| return Symbols::Int32x4().ToCString(); |
| case kTypedDataInt8ArrayCid: |
| case kExternalTypedDataInt8ArrayCid: |
| return Symbols::Int8List().ToCString(); |
| case kTypedDataUint8ArrayCid: |
| case kExternalTypedDataUint8ArrayCid: |
| return Symbols::Uint8List().ToCString(); |
| case kTypedDataUint8ClampedArrayCid: |
| case kExternalTypedDataUint8ClampedArrayCid: |
| return Symbols::Uint8ClampedList().ToCString(); |
| case kTypedDataInt16ArrayCid: |
| case kExternalTypedDataInt16ArrayCid: |
| return Symbols::Int16List().ToCString(); |
| case kTypedDataUint16ArrayCid: |
| case kExternalTypedDataUint16ArrayCid: |
| return Symbols::Uint16List().ToCString(); |
| case kTypedDataInt32ArrayCid: |
| case kExternalTypedDataInt32ArrayCid: |
| return Symbols::Int32List().ToCString(); |
| case kTypedDataUint32ArrayCid: |
| case kExternalTypedDataUint32ArrayCid: |
| return Symbols::Uint32List().ToCString(); |
| case kTypedDataInt64ArrayCid: |
| case kExternalTypedDataInt64ArrayCid: |
| return Symbols::Int64List().ToCString(); |
| case kTypedDataUint64ArrayCid: |
| case kExternalTypedDataUint64ArrayCid: |
| return Symbols::Uint64List().ToCString(); |
| case kTypedDataInt32x4ArrayCid: |
| case kExternalTypedDataInt32x4ArrayCid: |
| return Symbols::Int32x4List().ToCString(); |
| case kTypedDataFloat32x4ArrayCid: |
| case kExternalTypedDataFloat32x4ArrayCid: |
| return Symbols::Float32x4List().ToCString(); |
| case kTypedDataFloat64x2ArrayCid: |
| case kExternalTypedDataFloat64x2ArrayCid: |
| return Symbols::Float64x2List().ToCString(); |
| case kTypedDataFloat32ArrayCid: |
| case kExternalTypedDataFloat32ArrayCid: |
| return Symbols::Float32List().ToCString(); |
| case kTypedDataFloat64ArrayCid: |
| case kExternalTypedDataFloat64ArrayCid: |
| return Symbols::Float64List().ToCString(); |
| |
| case kFfiPointerCid: |
| return Symbols::FfiPointer().ToCString(); |
| case kFfiDynamicLibraryCid: |
| return Symbols::FfiDynamicLibrary().ToCString(); |
| |
| #if !defined(PRODUCT) |
| case kNullCid: |
| return Symbols::Null().ToCString(); |
| case kDynamicCid: |
| return Symbols::Dynamic().ToCString(); |
| case kVoidCid: |
| return Symbols::Void().ToCString(); |
| case kNeverCid: |
| return Symbols::Never().ToCString(); |
| case kClassCid: |
| return Symbols::Class().ToCString(); |
| case kTypeParametersCid: |
| return Symbols::TypeParameters().ToCString(); |
| case kTypeArgumentsCid: |
| return Symbols::TypeArguments().ToCString(); |
| case kPatchClassCid: |
| return Symbols::PatchClass().ToCString(); |
| case kFunctionCid: |
| return Symbols::Function().ToCString(); |
| case kClosureDataCid: |
| return Symbols::ClosureData().ToCString(); |
| case kFfiTrampolineDataCid: |
| return Symbols::FfiTrampolineData().ToCString(); |
| case kFieldCid: |
| return Symbols::Field().ToCString(); |
| case kScriptCid: |
| return Symbols::Script().ToCString(); |
| case kLibraryCid: |
| return Symbols::Library().ToCString(); |
| case kLibraryPrefixCid: |
| return Symbols::LibraryPrefix().ToCString(); |
| case kNamespaceCid: |
| return Symbols::Namespace().ToCString(); |
| case kKernelProgramInfoCid: |
| return Symbols::KernelProgramInfo().ToCString(); |
| case kWeakSerializationReferenceCid: |
| return Symbols::WeakSerializationReference().ToCString(); |
| case kCodeCid: |
| return Symbols::Code().ToCString(); |
| case kInstructionsCid: |
| return Symbols::Instructions().ToCString(); |
| case kInstructionsSectionCid: |
| return Symbols::InstructionsSection().ToCString(); |
| case kInstructionsTableCid: |
| return Symbols::InstructionsTable().ToCString(); |
| case kObjectPoolCid: |
| return Symbols::ObjectPool().ToCString(); |
| case kCodeSourceMapCid: |
| return Symbols::CodeSourceMap().ToCString(); |
| case kPcDescriptorsCid: |
| return Symbols::PcDescriptors().ToCString(); |
| case kCompressedStackMapsCid: |
| return Symbols::CompressedStackMaps().ToCString(); |
| case kLocalVarDescriptorsCid: |
| return Symbols::LocalVarDescriptors().ToCString(); |
| case kExceptionHandlersCid: |
| return Symbols::ExceptionHandlers().ToCString(); |
| case kContextCid: |
| return Symbols::Context().ToCString(); |
| case kContextScopeCid: |
| return Symbols::ContextScope().ToCString(); |
| case kSentinelCid: |
| return Symbols::Sentinel().ToCString(); |
| case kSingleTargetCacheCid: |
| return Symbols::SingleTargetCache().ToCString(); |
| case kICDataCid: |
| return Symbols::ICData().ToCString(); |
| case kMegamorphicCacheCid: |
| return Symbols::MegamorphicCache().ToCString(); |
| case kSubtypeTestCacheCid: |
| return Symbols::SubtypeTestCache().ToCString(); |
| case kLoadingUnitCid: |
| return Symbols::LoadingUnit().ToCString(); |
| case kApiErrorCid: |
| return Symbols::ApiError().ToCString(); |
| case kLanguageErrorCid: |
| return Symbols::LanguageError().ToCString(); |
| case kUnhandledExceptionCid: |
| return Symbols::UnhandledException().ToCString(); |
| case kUnwindErrorCid: |
| return Symbols::UnwindError().ToCString(); |
| case kIntegerCid: |
| case kSmiCid: |
| case kMintCid: |
| return Symbols::Int().ToCString(); |
| case kDoubleCid: |
| return Symbols::Double().ToCString(); |
| case kOneByteStringCid: |
| case kTwoByteStringCid: |
| case kExternalOneByteStringCid: |
| case kExternalTwoByteStringCid: |
| return Symbols::_String().ToCString(); |
| case kArrayCid: |
| case kImmutableArrayCid: |
| case kGrowableObjectArrayCid: |
| return Symbols::List().ToCString(); |
| #endif // !defined(PRODUCT) |
| } |
| String& name = String::Handle(Name()); |
| name = Symbols::New(Thread::Current(), String::ScrubName(name)); |
| if (name.ptr() == Symbols::FutureImpl().ptr() && |
| library() == Library::AsyncLibrary()) { |
| return Symbols::Future().ToCString(); |
| } |
| return name.ToCString(); |
| } |
| |
| void Class::set_script(const Script& value) const { |
| untag()->set_script(value.ptr()); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| void Class::set_token_pos(TokenPosition token_pos) const { |
| ASSERT(!token_pos.IsClassifying()); |
| StoreNonPointer(&untag()->token_pos_, token_pos); |
| } |
| |
| void Class::set_end_token_pos(TokenPosition token_pos) const { |
| ASSERT(!token_pos.IsClassifying()); |
| StoreNonPointer(&untag()->end_token_pos_, token_pos); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| int32_t Class::SourceFingerprint() const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| return kernel::KernelSourceFingerprintHelper::CalculateClassFingerprint( |
| *this); |
| #else |
| return 0; |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| void Class::set_is_implemented() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_is_implemented_unsafe(); |
| } |
| |
| void Class::set_is_implemented_unsafe() const { |
| set_state_bits(ImplementedBit::update(true, state_bits())); |
| } |
| |
| void Class::set_is_abstract() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_state_bits(AbstractBit::update(true, state_bits())); |
| } |
| |
| void Class::set_is_declaration_loaded() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_is_declaration_loaded_unsafe(); |
| } |
| |
| void Class::set_is_declaration_loaded_unsafe() const { |
| ASSERT(!is_declaration_loaded()); |
| set_state_bits(ClassLoadingBits::update(UntaggedClass::kDeclarationLoaded, |
| state_bits())); |
| } |
| |
| void Class::set_is_type_finalized() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(is_declaration_loaded()); |
| ASSERT(!is_type_finalized()); |
| set_state_bits( |
| ClassLoadingBits::update(UntaggedClass::kTypeFinalized, state_bits())); |
| } |
| |
| void Class::set_is_synthesized_class() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_is_synthesized_class_unsafe(); |
| } |
| |
| void Class::set_is_synthesized_class_unsafe() const { |
| set_state_bits(SynthesizedClassBit::update(true, state_bits())); |
| } |
| |
| void Class::set_is_enum_class() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_state_bits(EnumBit::update(true, state_bits())); |
| } |
| |
| void Class::set_is_const() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_state_bits(ConstBit::update(true, state_bits())); |
| } |
| |
| void Class::set_is_transformed_mixin_application() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_state_bits(TransformedMixinApplicationBit::update(true, state_bits())); |
| } |
| |
| void Class::set_is_fields_marked_nullable() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_state_bits(FieldsMarkedNullableBit::update(true, state_bits())); |
| } |
| |
| void Class::set_is_allocated(bool value) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_is_allocated_unsafe(value); |
| } |
| |
| void Class::set_is_allocated_unsafe(bool value) const { |
| set_state_bits(IsAllocatedBit::update(value, state_bits())); |
| } |
| |
| void Class::set_is_loaded(bool value) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| set_state_bits(IsLoadedBit::update(value, state_bits())); |
| } |
| |
| void Class::set_is_finalized() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(!is_finalized()); |
| set_state_bits( |
| ClassFinalizedBits::update(UntaggedClass::kFinalized, state_bits())); |
| } |
| |
| void Class::set_is_allocate_finalized() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(!is_allocate_finalized()); |
| set_state_bits(ClassFinalizedBits::update(UntaggedClass::kAllocateFinalized, |
| state_bits())); |
| } |
| |
| void Class::set_is_prefinalized() const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(!is_finalized()); |
| set_state_bits( |
| ClassFinalizedBits::update(UntaggedClass::kPreFinalized, state_bits())); |
| } |
| |
| void Class::set_interfaces(const Array& value) const { |
| ASSERT(!value.IsNull()); |
| untag()->set_interfaces(value.ptr()); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| |
| void Class::AddDirectImplementor(const Class& implementor, |
| bool is_mixin) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(is_implemented()); |
| ASSERT(!implementor.IsNull()); |
| GrowableObjectArray& direct_implementors = |
| GrowableObjectArray::Handle(untag()->direct_implementors()); |
| if (direct_implementors.IsNull()) { |
| direct_implementors = GrowableObjectArray::New(4, Heap::kOld); |
| untag()->set_direct_implementors(direct_implementors.ptr()); |
| } |
| #if defined(DEBUG) |
| // Verify that the same class is not added twice. |
| // The only exception is mixins: when mixin application is transformed, |
| // mixin is added to the end of interfaces list and may be duplicated: |
| // class X = A with B implements B; |
| // This is rare and harmless. |
| if (!is_mixin) { |
| for (intptr_t i = 0; i < direct_implementors.Length(); i++) { |
| ASSERT(direct_implementors.At(i) != implementor.ptr()); |
| } |
| } |
| #endif |
| direct_implementors.Add(implementor, Heap::kOld); |
| } |
| |
| void Class::set_direct_implementors( |
| const GrowableObjectArray& implementors) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| untag()->set_direct_implementors(implementors.ptr()); |
| } |
| |
| void Class::AddDirectSubclass(const Class& subclass) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(!subclass.IsNull()); |
| ASSERT(subclass.SuperClass() == ptr()); |
| // Do not keep track of the direct subclasses of class Object. |
| ASSERT(!IsObjectClass()); |
| GrowableObjectArray& direct_subclasses = |
| GrowableObjectArray::Handle(untag()->direct_subclasses()); |
| if (direct_subclasses.IsNull()) { |
| direct_subclasses = GrowableObjectArray::New(4, Heap::kOld); |
| untag()->set_direct_subclasses(direct_subclasses.ptr()); |
| } |
| #if defined(DEBUG) |
| // Verify that the same class is not added twice. |
| for (intptr_t i = 0; i < direct_subclasses.Length(); i++) { |
| ASSERT(direct_subclasses.At(i) != subclass.ptr()); |
| } |
| #endif |
| direct_subclasses.Add(subclass, Heap::kOld); |
| } |
| |
| void Class::set_direct_subclasses(const GrowableObjectArray& subclasses) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| untag()->set_direct_subclasses(subclasses.ptr()); |
| } |
| |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| ArrayPtr Class::constants() const { |
| return untag()->constants(); |
| } |
| |
| void Class::set_constants(const Array& value) const { |
| untag()->set_constants(value.ptr()); |
| } |
| |
| void Class::set_declaration_type(const Type& value) const { |
| ASSERT(id() != kDynamicCid && id() != kVoidCid); |
| ASSERT(!value.IsNull() && value.IsCanonical() && value.IsOld()); |
| ASSERT((declaration_type() == Object::null()) || |
| (declaration_type() == value.ptr())); // Set during own finalization. |
| // Since DeclarationType is used as the runtime type of instances of a |
| // non-generic class, its nullability must be kNonNullable. |
| // The exception is DeclarationType of Null which is kNullable. |
| ASSERT(value.type_class_id() != kNullCid || value.IsNullable()); |
| ASSERT(value.type_class_id() == kNullCid || value.IsNonNullable()); |
| untag()->set_declaration_type<std::memory_order_release>(value.ptr()); |
| } |
| |
| TypePtr Class::DeclarationType() const { |
| ASSERT(is_declaration_loaded()); |
| if (IsNullClass()) { |
| return Type::NullType(); |
| } |
| if (IsDynamicClass()) { |
| return Type::DynamicType(); |
| } |
| if (IsVoidClass()) { |
| return Type::VoidType(); |
| } |
| if (declaration_type() != Type::null()) { |
| return declaration_type(); |
| } |
| { |
| auto thread = Thread::Current(); |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| if (declaration_type() != Type::null()) { |
| return declaration_type(); |
| } |
| // For efficiency, the runtimeType intrinsic returns the type cached by |
| // DeclarationType without checking its nullability. Therefore, we |
| // consistently cache the kNonNullable version of the type. |
| // The exception is type Null which is stored as kNullable. |
| TypeArguments& type_args = TypeArguments::Handle(); |
| const intptr_t num_type_params = NumTypeParameters(); |
| if (num_type_params > 0) { |
| type_args = TypeArguments::New(num_type_params); |
| TypeParameter& type_param = TypeParameter::Handle(); |
| for (intptr_t i = 0; i < num_type_params; i++) { |
| type_param = TypeParameterAt(i); |
| type_args.SetTypeAt(i, type_param); |
| } |
| } |
| Type& type = |
| Type::Handle(Type::New(*this, type_args, Nullability::kNonNullable)); |
| type ^= ClassFinalizer::FinalizeType(type); |
| set_declaration_type(type); |
| return type.ptr(); |
| } |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| void Class::set_allocation_stub(const Code& value) const { |
| // Never clear the stub as it may still be a target, but will be GC-d if |
| // not referenced. |
| ASSERT(!value.IsNull()); |
| ASSERT(untag()->allocation_stub() == Code::null()); |
| untag()->set_allocation_stub(value.ptr()); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| void Class::DisableAllocationStub() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| { |
| const Code& existing_stub = Code::Handle(allocation_stub()); |
| if (existing_stub.IsNull()) { |
| return; |
| } |
| } |
| auto thread = Thread::Current(); |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| const Code& existing_stub = Code::Handle(allocation_stub()); |
| if (existing_stub.IsNull()) { |
| return; |
| } |
| ASSERT(!existing_stub.IsDisabled()); |
| // Change the stub so that the next caller will regenerate the stub. |
| existing_stub.DisableStubCode(); |
| // Disassociate the existing stub from class. |
| untag()->set_allocation_stub(Code::null()); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| bool Class::IsDartFunctionClass() const { |
| return ptr() == Type::Handle(Type::DartFunctionType()).type_class(); |
| } |
| |
| bool Class::IsFutureClass() const { |
| // Looking up future_class in the object store would not work, because |
| // this function is called during class finalization, before the object store |
| // field would be initialized by InitKnownObjects(). |
| return (Name() == Symbols::Future().ptr()) && |
| (library() == Library::AsyncLibrary()); |
| } |
| |
| // Checks if type T0 is a subtype of type T1. |
| // Type T0 is specified by class 'cls' parameterized with 'type_arguments' and |
| // by 'nullability', and type T1 is specified by 'other' and must have a type |
| // class. |
| bool Class::IsSubtypeOf(const Class& cls, |
| const TypeArguments& type_arguments, |
| Nullability nullability, |
| const AbstractType& other, |
| Heap::Space space, |
| TrailPtr trail) { |
| // This function does not support Null, Never, dynamic, or void as type T0. |
| classid_t this_cid = cls.id(); |
| ASSERT(this_cid != kNullCid && this_cid != kNeverCid && |
| this_cid != kDynamicCid && this_cid != kVoidCid); |
| // Type T1 must have a type class (e.g. not a type param or a function type). |
| ASSERT(other.HasTypeClass()); |
| const classid_t other_cid = other.type_class_id(); |
| if (other_cid == kDynamicCid || other_cid == kVoidCid) { |
| return true; |
| } |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| auto isolate_group = thread->isolate_group(); |
| // Nullability of left and right hand sides is verified in strong mode only. |
| const bool verified_nullability = |
| !isolate_group->use_strict_null_safety_checks() || |
| nullability != Nullability::kNullable || !other.IsNonNullable(); |
| |
| // Right Object. |
| if (other_cid == kObjectCid) { |
| return verified_nullability; |
| } |
| const Class& other_class = Class::Handle(zone, other.type_class()); |
| const TypeArguments& other_type_arguments = |
| TypeArguments::Handle(zone, other.arguments()); |
| // Use the 'this_class' object as if it was the receiver of this method, but |
| // instead of recursing, reset it to the super class and loop. |
| Class& this_class = Class::Handle(zone, cls.ptr()); |
| while (true) { |
| // Apply additional subtyping rules if T0 or T1 are 'FutureOr'. |
| |
| // Left FutureOr: |
| // if T0 is FutureOr<S0> then: |
| // T0 <: T1 iff Future<S0> <: T1 and S0 <: T1 |
| if (this_cid == kFutureOrCid) { |
| // Check Future<S0> <: T1. |
| ObjectStore* object_store = IsolateGroup::Current()->object_store(); |
| const Class& future_class = |
| Class::Handle(zone, object_store->future_class()); |
| ASSERT(!future_class.IsNull() && future_class.NumTypeParameters() == 1 && |
| this_class.NumTypeParameters() == 1); |
| ASSERT(type_arguments.IsNull() || type_arguments.Length() >= 1); |
| if (Class::IsSubtypeOf(future_class, type_arguments, |
| Nullability::kNonNullable, other, space, trail)) { |
| // Check S0 <: T1. |
| const AbstractType& type_arg = |
| AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0)); |
| if (type_arg.IsSubtypeOf(other, space, trail)) { |
| return verified_nullability; |
| } |
| } |
| } |
| |
| // Right FutureOr: |
| // if T1 is FutureOr<S1> then: |
| // T0 <: T1 iff any of the following hold: |
| // either T0 <: Future<S1> |
| // or T0 <: S1 |
| // or T0 is X0 and X0 has bound S0 and S0 <: T1 (checked elsewhere) |
| if (other_cid == kFutureOrCid) { |
| const AbstractType& other_type_arg = |
| AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0)); |
| // Check if S1 is a top type. |
| if (other_type_arg.IsTopTypeForSubtyping()) { |
| return true; |
| } |
| // Check T0 <: Future<S1> when T0 is Future<S0>. |
| if (this_class.IsFutureClass()) { |
| const AbstractType& type_arg = |
| AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0)); |
| // If T0 is Future<S0>, then T0 <: Future<S1>, iff S0 <: S1. |
| if (type_arg.IsSubtypeOf(other_type_arg, space, trail)) { |
| if (verified_nullability) { |
| return true; |
| } |
| } |
| } |
| // Check T0 <: Future<S1> when T0 is FutureOr<S0> is already done. |
| // Check T0 <: S1. |
| if (other_type_arg.HasTypeClass() && |
| Class::IsSubtypeOf(this_class, type_arguments, nullability, |
| other_type_arg, space, trail)) { |
| return true; |
| } |
| } |
| |
| // Left nullable: |
| // if T0 is S0? then: |
| // T0 <: T1 iff S0 <: T1 and Null <: T1 |
| if (!verified_nullability) { |
| return false; |
| } |
| |
| // Check for reflexivity. |
| if (this_class.ptr() == other_class.ptr()) { |
| const intptr_t num_type_params = this_class.NumTypeParameters(); |
| if (num_type_params == 0) { |
| return true; |
| } |
| const intptr_t num_type_args = this_class.NumTypeArguments(); |
| const intptr_t from_index = num_type_args - num_type_params; |
| // Since we do not truncate the type argument vector of a subclass (see |
| // below), we only check a subvector of the proper length. |
| // Check for covariance. |
| if (other_type_arguments.IsNull()) { |
| return true; |
| } |
| AbstractType& type = AbstractType::Handle(zone); |
| AbstractType& other_type = AbstractType::Handle(zone); |
| for (intptr_t i = 0; i < num_type_params; ++i) { |
| type = type_arguments.TypeAtNullSafe(from_index + i); |
| other_type = other_type_arguments.TypeAt(from_index + i); |
| ASSERT(!type.IsNull() && !other_type.IsNull()); |
| if (!type.IsSubtypeOf(other_type, space, trail)) { |
| return false; |
| } |
| } |
| return true; |
| } |
| // Check for 'direct super type' specified in the implements clause |
| // and check for transitivity at the same time. |
| Array& interfaces = Array::Handle(zone, this_class.interfaces()); |
| AbstractType& interface = AbstractType::Handle(zone); |
| Class& interface_class = Class::Handle(zone); |
| TypeArguments& interface_args = TypeArguments::Handle(zone); |
| for (intptr_t i = 0; i < interfaces.Length(); i++) { |
| interface ^= interfaces.At(i); |
| ASSERT(interface.IsFinalized()); |
| interface_class = interface.type_class(); |
| interface_args = interface.arguments(); |
| if (!interface_args.IsNull() && !interface_args.IsInstantiated()) { |
| // This type class implements an interface that is parameterized with |
| // generic type(s), e.g. it implements List<T>. |
| // The uninstantiated type T must be instantiated using the type |
| // parameters of this type before performing the type test. |
| // The type arguments of this type that are referred to by the type |
| // parameters of the interface are at the end of the type vector, |
| // after the type arguments of the super type of this type. |
| // The index of the type parameters is adjusted upon finalization. |
| interface_args = interface_args.InstantiateFrom( |
| type_arguments, Object::null_type_arguments(), kNoneFree, space); |
| } |
| // In Dart 2, implementing Function has no meaning. |
| // TODO(regis): Can we encounter and skip Object as well? |
| if (interface_class.IsDartFunctionClass()) { |
| continue; |
| } |
| // No need to pass the trail as cycles are not possible via interfaces. |
| if (Class::IsSubtypeOf(interface_class, interface_args, |
| Nullability::kNonNullable, other, space)) { |
| return true; |
| } |
| } |
| // "Recurse" up the class hierarchy until we have reached the top. |
| this_class = this_class.SuperClass(); |
| if (this_class.IsNull()) { |
| return false; |
| } |
| this_cid = this_class.id(); |
| } |
| UNREACHABLE(); |
| return false; |
| } |
| |
| bool Class::IsTopLevel() const { |
| return Name() == Symbols::TopLevel().ptr(); |
| } |
| |
| bool Class::IsPrivate() const { |
| return Library::IsPrivate(String::Handle(Name())); |
| } |
| |
| FunctionPtr Class::LookupDynamicFunctionUnsafe(const String& name) const { |
| return LookupFunctionReadLocked(name, kInstance); |
| } |
| |
| FunctionPtr Class::LookupDynamicFunctionAllowPrivate(const String& name) const { |
| return LookupFunctionAllowPrivate(name, kInstance); |
| } |
| |
| FunctionPtr Class::LookupStaticFunction(const String& name) const { |
| Thread* thread = Thread::Current(); |
| SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| return LookupFunctionReadLocked(name, kStatic); |
| } |
| |
| FunctionPtr Class::LookupStaticFunctionAllowPrivate(const String& name) const { |
| return LookupFunctionAllowPrivate(name, kStatic); |
| } |
| |
| FunctionPtr Class::LookupConstructor(const String& name) const { |
| Thread* thread = Thread::Current(); |
| SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| return LookupFunctionReadLocked(name, kConstructor); |
| } |
| |
| FunctionPtr Class::LookupConstructorAllowPrivate(const String& name) const { |
| return LookupFunctionAllowPrivate(name, kConstructor); |
| } |
| |
| FunctionPtr Class::LookupFactory(const String& name) const { |
| Thread* thread = Thread::Current(); |
| SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| return LookupFunctionReadLocked(name, kFactory); |
| } |
| |
| FunctionPtr Class::LookupFactoryAllowPrivate(const String& name) const { |
| return LookupFunctionAllowPrivate(name, kFactory); |
| } |
| |
| FunctionPtr Class::LookupFunctionAllowPrivate(const String& name) const { |
| return LookupFunctionAllowPrivate(name, kAny); |
| } |
| |
| FunctionPtr Class::LookupFunctionReadLocked(const String& name) const { |
| return LookupFunctionReadLocked(name, kAny); |
| } |
| |
| // Returns true if 'prefix' and 'accessor_name' match 'name'. |
| static bool MatchesAccessorName(const String& name, |
| const char* prefix, |
| intptr_t prefix_length, |
| const String& accessor_name) { |
| intptr_t name_len = name.Length(); |
| intptr_t accessor_name_len = accessor_name.Length(); |
| |
| if (name_len != (accessor_name_len + prefix_length)) { |
| return false; |
| } |
| for (intptr_t i = 0; i < prefix_length; i++) { |
| if (name.CharAt(i) != prefix[i]) { |
| return false; |
| } |
| } |
| for (intptr_t i = 0, j = prefix_length; i < accessor_name_len; i++, j++) { |
| if (name.CharAt(j) != accessor_name.CharAt(i)) { |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| FunctionPtr Class::CheckFunctionType(const Function& func, MemberKind kind) { |
| if ((kind == kInstance) || (kind == kInstanceAllowAbstract)) { |
| if (func.IsDynamicFunction(kind == kInstanceAllowAbstract)) { |
| return func.ptr(); |
| } |
| } else if (kind == kStatic) { |
| if (func.IsStaticFunction()) { |
| return func.ptr(); |
| } |
| } else if (kind == kConstructor) { |
| if (func.IsGenerativeConstructor()) { |
| ASSERT(!func.is_static()); |
| return func.ptr(); |
| } |
| } else if (kind == kFactory) { |
| if (func.IsFactory()) { |
| ASSERT(func.is_static()); |
| return func.ptr(); |
| } |
| } else if (kind == kAny) { |
| return func.ptr(); |
| } |
| return Function::null(); |
| } |
| |
| FunctionPtr Class::LookupFunctionReadLocked(const String& name, |
| MemberKind kind) const { |
| ASSERT(!IsNull()); |
| Thread* thread = Thread::Current(); |
| RELEASE_ASSERT(is_finalized()); |
| // Caller needs to ensure they grab program_lock because this method |
| // can be invoked with either ReadRwLock or WriteRwLock. |
| #if defined(DEBUG) |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadReader()); |
| #endif |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_FUNCTION_HANDLESCOPE(thread); |
| Array& funcs = thread->ArrayHandle(); |
| funcs = functions(); |
| ASSERT(!funcs.IsNull()); |
| const intptr_t len = funcs.Length(); |
| Function& function = thread->FunctionHandle(); |
| if (len >= kFunctionLookupHashTreshold) { |
| // TODO(dartbug.com/36097): We require currently a read lock in the resolver |
| // to avoid read-write race access to this hash table. |
| // If we want to increase resolver speed by avoiding the need for read lock, |
| // we could make change this hash table to be lock-free for the reader. |
| const Array& hash_table = |
| Array::Handle(thread->zone(), untag()->functions_hash_table()); |
| if (!hash_table.IsNull()) { |
| ClassFunctionsSet set(hash_table.ptr()); |
| REUSABLE_STRING_HANDLESCOPE(thread); |
| function ^= set.GetOrNull(FunctionName(name, &(thread->StringHandle()))); |
| // No mutations. |
| ASSERT(set.Release().ptr() == hash_table.ptr()); |
| return function.IsNull() ? Function::null() |
| : CheckFunctionType(function, kind); |
| } |
| } |
| if (name.IsSymbol()) { |
| // Quick Symbol compare. |
| NoSafepointScope no_safepoint; |
| for (intptr_t i = 0; i < len; i++) { |
| function ^= funcs.At(i); |
| if (function.name() == name.ptr()) { |
| return CheckFunctionType(function, kind); |
| } |
| } |
| } else { |
| REUSABLE_STRING_HANDLESCOPE(thread); |
| String& function_name = thread->StringHandle(); |
| for (intptr_t i = 0; i < len; i++) { |
| function ^= funcs.At(i); |
| function_name = function.name(); |
| if (function_name.Equals(name)) { |
| return CheckFunctionType(function, kind); |
| } |
| } |
| } |
| // No function found. |
| return Function::null(); |
| } |
| |
| FunctionPtr Class::LookupFunctionAllowPrivate(const String& name, |
| MemberKind kind) const { |
| ASSERT(!IsNull()); |
| Thread* thread = Thread::Current(); |
| RELEASE_ASSERT(is_finalized()); |
| SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_FUNCTION_HANDLESCOPE(thread); |
| REUSABLE_STRING_HANDLESCOPE(thread); |
| Array& funcs = thread->ArrayHandle(); |
| funcs = current_functions(); |
| ASSERT(!funcs.IsNull()); |
| const intptr_t len = funcs.Length(); |
| Function& function = thread->FunctionHandle(); |
| String& function_name = thread->StringHandle(); |
| for (intptr_t i = 0; i < len; i++) { |
| function ^= funcs.At(i); |
| function_name = function.name(); |
| if (String::EqualsIgnoringPrivateKey(function_name, name)) { |
| return CheckFunctionType(function, kind); |
| } |
| } |
| // No function found. |
| return Function::null(); |
| } |
| |
| FunctionPtr Class::LookupGetterFunction(const String& name) const { |
| return LookupAccessorFunction(kGetterPrefix, kGetterPrefixLength, name); |
| } |
| |
| FunctionPtr Class::LookupSetterFunction(const String& name) const { |
| return LookupAccessorFunction(kSetterPrefix, kSetterPrefixLength, name); |
| } |
| |
| FunctionPtr Class::LookupAccessorFunction(const char* prefix, |
| intptr_t prefix_length, |
| const String& name) const { |
| ASSERT(!IsNull()); |
| Thread* thread = Thread::Current(); |
| if (EnsureIsFinalized(thread) != Error::null()) { |
| return Function::null(); |
| } |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_FUNCTION_HANDLESCOPE(thread); |
| REUSABLE_STRING_HANDLESCOPE(thread); |
| Array& funcs = thread->ArrayHandle(); |
| funcs = current_functions(); |
| intptr_t len = funcs.Length(); |
| Function& function = thread->FunctionHandle(); |
| String& function_name = thread->StringHandle(); |
| for (intptr_t i = 0; i < len; i++) { |
| function ^= funcs.At(i); |
| function_name = function.name(); |
| if (MatchesAccessorName(function_name, prefix, prefix_length, name)) { |
| return function.ptr(); |
| } |
| } |
| |
| // No function found. |
| return Function::null(); |
| } |
| |
| FieldPtr Class::LookupInstanceField(const String& name) const { |
| return LookupField(name, kInstance); |
| } |
| |
| FieldPtr Class::LookupStaticField(const String& name) const { |
| return LookupField(name, kStatic); |
| } |
| |
| FieldPtr Class::LookupField(const String& name) const { |
| return LookupField(name, kAny); |
| } |
| |
| FieldPtr Class::LookupField(const String& name, MemberKind kind) const { |
| ASSERT(!IsNull()); |
| Thread* thread = Thread::Current(); |
| if (EnsureIsFinalized(thread) != Error::null()) { |
| return Field::null(); |
| } |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_FIELD_HANDLESCOPE(thread); |
| REUSABLE_STRING_HANDLESCOPE(thread); |
| Array& flds = thread->ArrayHandle(); |
| flds = fields(); |
| ASSERT(!flds.IsNull()); |
| intptr_t len = flds.Length(); |
| Field& field = thread->FieldHandle(); |
| if (name.IsSymbol()) { |
| // Use fast raw pointer string compare for symbols. |
| for (intptr_t i = 0; i < len; i++) { |
| field ^= flds.At(i); |
| if (name.ptr() == field.name()) { |
| if (kind == kInstance) { |
| return field.is_static() ? Field::null() : field.ptr(); |
| } else if (kind == kStatic) { |
| return field.is_static() ? field.ptr() : Field::null(); |
| } |
| ASSERT(kind == kAny); |
| return field.ptr(); |
| } |
| } |
| } else { |
| String& field_name = thread->StringHandle(); |
| for (intptr_t i = 0; i < len; i++) { |
| field ^= flds.At(i); |
| field_name = field.name(); |
| if (name.Equals(field_name)) { |
| if (kind == kInstance) { |
| return field.is_static() ? Field::null() : field.ptr(); |
| } else if (kind == kStatic) { |
| return field.is_static() ? field.ptr() : Field::null(); |
| } |
| ASSERT(kind == kAny); |
| return field.ptr(); |
| } |
| } |
| } |
| return Field::null(); |
| } |
| |
| FieldPtr Class::LookupFieldAllowPrivate(const String& name, |
| bool instance_only) const { |
| ASSERT(!IsNull()); |
| // Use slow string compare, ignoring privacy name mangling. |
| Thread* thread = Thread::Current(); |
| if (EnsureIsFinalized(thread) != Error::null()) { |
| return Field::null(); |
| } |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_FIELD_HANDLESCOPE(thread); |
| REUSABLE_STRING_HANDLESCOPE(thread); |
| Array& flds = thread->ArrayHandle(); |
| flds = fields(); |
| ASSERT(!flds.IsNull()); |
| intptr_t len = flds.Length(); |
| Field& field = thread->FieldHandle(); |
| String& field_name = thread->StringHandle(); |
| for (intptr_t i = 0; i < len; i++) { |
| field ^= flds.At(i); |
| field_name = field.name(); |
| if (field.is_static() && instance_only) { |
| // If we only care about instance fields, skip statics. |
| continue; |
| } |
| if (String::EqualsIgnoringPrivateKey(field_name, name)) { |
| return field.ptr(); |
| } |
| } |
| return Field::null(); |
| } |
| |
| FieldPtr Class::LookupInstanceFieldAllowPrivate(const String& name) const { |
| Field& field = Field::Handle(LookupFieldAllowPrivate(name, true)); |
| if (!field.IsNull() && !field.is_static()) { |
| return field.ptr(); |
| } |
| return Field::null(); |
| } |
| |
| FieldPtr Class::LookupStaticFieldAllowPrivate(const String& name) const { |
| Field& field = Field::Handle(LookupFieldAllowPrivate(name)); |
| if (!field.IsNull() && field.is_static()) { |
| return field.ptr(); |
| } |
| return Field::null(); |
| } |
| |
| const char* Class::ToCString() const { |
| NoSafepointScope no_safepoint; |
| const Library& lib = Library::Handle(library()); |
| const char* library_name = lib.IsNull() ? "" : lib.ToCString(); |
| const char* class_name = String::Handle(Name()).ToCString(); |
| return OS::SCreate(Thread::Current()->zone(), "%s Class: %s", library_name, |
| class_name); |
| } |
| |
| // Thomas Wang, Integer Hash Functions. |
| // https://gist.github.com/badboy/6267743 |
| // "64 bit to 32 bit Hash Functions" |
| static uword Hash64To32(uint64_t v) { |
| v = ~v + (v << 18); |
| v = v ^ (v >> 31); |
| v = v * 21; |
| v = v ^ (v >> 11); |
| v = v + (v << 6); |
| v = v ^ (v >> 22); |
| return static_cast<uint32_t>(v); |
| } |
| |
| class CanonicalDoubleKey { |
| public: |
| explicit CanonicalDoubleKey(const Double& key) |
| : key_(&key), value_(key.value()) {} |
| explicit CanonicalDoubleKey(const double value) : key_(NULL), value_(value) {} |
| bool Matches(const Double& obj) const { |
| return obj.BitwiseEqualsToDouble(value_); |
| } |
| uword Hash() const { return Hash(value_); } |
| static uword Hash(double value) { |
| return Hash64To32(bit_cast<uint64_t>(value)); |
| } |
| |
| const Double* key_; |
| const double value_; |
| |
| private: |
| DISALLOW_ALLOCATION(); |
| }; |
| |
| class CanonicalMintKey { |
| public: |
| explicit CanonicalMintKey(const Mint& key) |
| : key_(&key), value_(key.value()) {} |
| explicit CanonicalMintKey(const int64_t value) : key_(NULL), value_(value) {} |
| bool Matches(const Mint& obj) const { return obj.value() == value_; } |
| uword Hash() const { return Hash(value_); } |
| static uword Hash(int64_t value) { |
| return Hash64To32(bit_cast<uint64_t>(value)); |
| } |
| |
| const Mint* key_; |
| const int64_t value_; |
| |
| private: |
| DISALLOW_ALLOCATION(); |
| }; |
| |
| // Traits for looking up Canonical numbers based on a hash of the value. |
| template <typename ObjectType, typename KeyType> |
| class CanonicalNumberTraits { |
| public: |
| static const char* Name() { return "CanonicalNumberTraits"; } |
| static bool ReportStats() { return false; } |
| |
| // Called when growing the table. |
| static bool IsMatch(const Object& a, const Object& b) { |
| return a.ptr() == b.ptr(); |
| } |
| static bool IsMatch(const KeyType& a, const Object& b) { |
| return a.Matches(ObjectType::Cast(b)); |
| } |
| static uword Hash(const Object& key) { |
| return KeyType::Hash(ObjectType::Cast(key).value()); |
| } |
| static uword Hash(const KeyType& key) { return key.Hash(); } |
| static ObjectPtr NewKey(const KeyType& obj) { |
| if (obj.key_ != NULL) { |
| return obj.key_->ptr(); |
| } else { |
| UNIMPLEMENTED(); |
| return NULL; |
| } |
| } |
| }; |
| typedef UnorderedHashSet<CanonicalNumberTraits<Double, CanonicalDoubleKey> > |
| CanonicalDoubleSet; |
| typedef UnorderedHashSet<CanonicalNumberTraits<Mint, CanonicalMintKey> > |
| CanonicalMintSet; |
| |
| // Returns an instance of Double or Double::null(). |
| DoublePtr Class::LookupCanonicalDouble(Zone* zone, double value) const { |
| ASSERT(this->ptr() == |
| IsolateGroup::Current()->object_store()->double_class()); |
| if (this->constants() == Array::null()) return Double::null(); |
| |
| Double& canonical_value = Double::Handle(zone); |
| CanonicalDoubleSet constants(zone, this->constants()); |
| canonical_value ^= constants.GetOrNull(CanonicalDoubleKey(value)); |
| this->set_constants(constants.Release()); |
| return canonical_value.ptr(); |
| } |
| |
| // Returns an instance of Mint or Mint::null(). |
| MintPtr Class::LookupCanonicalMint(Zone* zone, int64_t value) const { |
| ASSERT(this->ptr() == IsolateGroup::Current()->object_store()->mint_class()); |
| if (this->constants() == Array::null()) return Mint::null(); |
| |
| Mint& canonical_value = Mint::Handle(zone); |
| CanonicalMintSet constants(zone, this->constants()); |
| canonical_value ^= constants.GetOrNull(CanonicalMintKey(value)); |
| this->set_constants(constants.Release()); |
| return canonical_value.ptr(); |
| } |
| |
| class CanonicalInstanceKey { |
| public: |
| explicit CanonicalInstanceKey(const Instance& key) : key_(key) { |
| ASSERT(!(key.IsString() || key.IsInteger() || key.IsAbstractType())); |
| } |
| bool Matches(const Instance& obj) const { |
| ASSERT(!(obj.IsString() || obj.IsInteger() || obj.IsAbstractType())); |
| if (key_.CanonicalizeEquals(obj)) { |
| ASSERT(obj.IsCanonical()); |
| return true; |
| } |
| return false; |
| } |
| uword Hash() const { return key_.CanonicalizeHash(); } |
| const Instance& key_; |
| |
| private: |
| DISALLOW_ALLOCATION(); |
| }; |
| |
| // Traits for looking up Canonical Instances based on a hash of the fields. |
| class CanonicalInstanceTraits { |
| public: |
| static const char* Name() { return "CanonicalInstanceTraits"; } |
| static bool ReportStats() { return false; } |
| |
| // Called when growing the table. |
| static bool IsMatch(const Object& a, const Object& b) { |
| ASSERT(!(a.IsString() || a.IsInteger() || a.IsAbstractType())); |
| ASSERT(!(b.IsString() || b.IsInteger() || b.IsAbstractType())); |
| return a.ptr() == b.ptr(); |
| } |
| static bool IsMatch(const CanonicalInstanceKey& a, const Object& b) { |
| return a.Matches(Instance::Cast(b)); |
| } |
| static uword Hash(const Object& key) { |
| ASSERT(!(key.IsString() || key.IsNumber() || key.IsAbstractType())); |
| ASSERT(key.IsInstance()); |
| return Instance::Cast(key).CanonicalizeHash(); |
| } |
| static uword Hash(const CanonicalInstanceKey& key) { return key.Hash(); } |
| static ObjectPtr NewKey(const CanonicalInstanceKey& obj) { |
| return obj.key_.ptr(); |
| } |
| }; |
| typedef UnorderedHashSet<CanonicalInstanceTraits> CanonicalInstancesSet; |
| |
| InstancePtr Class::LookupCanonicalInstance(Zone* zone, |
| const Instance& value) const { |
| ASSERT(this->ptr() == value.clazz()); |
| ASSERT(is_finalized() || is_prefinalized()); |
| Instance& canonical_value = Instance::Handle(zone); |
| if (this->constants() != Array::null()) { |
| CanonicalInstancesSet constants(zone, this->constants()); |
| canonical_value ^= constants.GetOrNull(CanonicalInstanceKey(value)); |
| this->set_constants(constants.Release()); |
| } |
| return canonical_value.ptr(); |
| } |
| |
| InstancePtr Class::InsertCanonicalConstant(Zone* zone, |
| const Instance& constant) const { |
| ASSERT(this->ptr() == constant.clazz()); |
| Instance& canonical_value = Instance::Handle(zone); |
| if (this->constants() == Array::null()) { |
| CanonicalInstancesSet constants( |
| HashTables::New<CanonicalInstancesSet>(128, Heap::kOld)); |
| canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant)); |
| this->set_constants(constants.Release()); |
| } else { |
| CanonicalInstancesSet constants(Thread::Current()->zone(), |
| this->constants()); |
| canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant)); |
| this->set_constants(constants.Release()); |
| } |
| return canonical_value.ptr(); |
| } |
| |
| void Class::InsertCanonicalDouble(Zone* zone, const Double& constant) const { |
| if (this->constants() == Array::null()) { |
| this->set_constants(Array::Handle( |
| zone, HashTables::New<CanonicalDoubleSet>(128, Heap::kOld))); |
| } |
| CanonicalDoubleSet constants(zone, this->constants()); |
| constants.InsertNewOrGet(CanonicalDoubleKey(constant)); |
| this->set_constants(constants.Release()); |
| } |
| |
| void Class::InsertCanonicalMint(Zone* zone, const Mint& constant) const { |
| if (this->constants() == Array::null()) { |
| this->set_constants(Array::Handle( |
| zone, HashTables::New<CanonicalMintSet>(128, Heap::kOld))); |
| } |
| CanonicalMintSet constants(zone, this->constants()); |
| constants.InsertNewOrGet(CanonicalMintKey(constant)); |
| this->set_constants(constants.Release()); |
| } |
| |
| void Class::RehashConstants(Zone* zone) const { |
| intptr_t cid = id(); |
| if ((cid == kMintCid) || (cid == kDoubleCid)) { |
| // Constants stored as a plain list or in a hashset with a stable hashcode, |
| // which only depends on the actual value of the constant. |
| return; |
| } |
| |
| const Array& old_constants = Array::Handle(zone, constants()); |
| if (old_constants.IsNull()) return; |
| |
| set_constants(Object::null_array()); |
| |
| CanonicalInstancesSet set(zone, old_constants.ptr()); |
| Instance& constant = Instance::Handle(zone); |
| CanonicalInstancesSet::Iterator it(&set); |
| while (it.MoveNext()) { |
| constant ^= set.GetKey(it.Current()); |
| ASSERT(!constant.IsNull()); |
| // Shape changes lose the canonical bit because they may result/ in merging |
| // constants. E.g., [x1, y1], [x1, y2] -> [x1]. |
| DEBUG_ASSERT(constant.IsCanonical() || |
| IsolateGroup::Current()->HasAttemptedReload()); |
| InsertCanonicalConstant(zone, constant); |
| } |
| set.Release(); |
| } |
| |
| bool Class::RequireCanonicalTypeErasureOfConstants(Zone* zone) const { |
| const intptr_t num_type_params = NumTypeParameters(); |
| const intptr_t num_type_args = NumTypeArguments(); |
| const intptr_t from_index = num_type_args - num_type_params; |
| Instance& constant = Instance::Handle(zone); |
| TypeArguments& type_arguments = TypeArguments::Handle(zone); |
| CanonicalInstancesSet set(zone, constants()); |
| CanonicalInstancesSet::Iterator it(&set); |
| bool result = false; |
| while (it.MoveNext()) { |
| constant ^= set.GetKey(it.Current()); |
| ASSERT(!constant.IsNull()); |
| ASSERT(!constant.IsTypeArguments()); |
| ASSERT(!constant.IsType()); |
| type_arguments = constant.GetTypeArguments(); |
| if (type_arguments.RequireConstCanonicalTypeErasure(zone, from_index, |
| num_type_params)) { |
| result = true; |
| break; |
| } |
| } |
| set.Release(); |
| return result; |
| } |
| |
| intptr_t TypeParameters::Length() const { |
| if (IsNull() || untag()->names() == Array::null()) return 0; |
| return Smi::Value(untag()->names()->untag()->length()); |
| } |
| |
| void TypeParameters::set_names(const Array& value) const { |
| ASSERT(!value.IsNull()); |
| untag()->set_names(value.ptr()); |
| } |
| |
| StringPtr TypeParameters::NameAt(intptr_t index) const { |
| const Array& names_array = Array::Handle(names()); |
| return String::RawCast(names_array.At(index)); |
| } |
| |
| void TypeParameters::SetNameAt(intptr_t index, const String& value) const { |
| const Array& names_array = Array::Handle(names()); |
| names_array.SetAt(index, value); |
| } |
| |
| void TypeParameters::set_flags(const Array& value) const { |
| untag()->set_flags(value.ptr()); |
| } |
| |
| void TypeParameters::set_bounds(const TypeArguments& value) const { |
| // A null value represents a vector of dynamic. |
| untag()->set_bounds(value.ptr()); |
| } |
| |
| AbstractTypePtr TypeParameters::BoundAt(intptr_t index) const { |
| const TypeArguments& upper_bounds = TypeArguments::Handle(bounds()); |
| return upper_bounds.IsNull() ? Type::DynamicType() |
| : upper_bounds.TypeAt(index); |
| } |
| |
| void TypeParameters::SetBoundAt(intptr_t index, |
| const AbstractType& value) const { |
| const TypeArguments& upper_bounds = TypeArguments::Handle(bounds()); |
| upper_bounds.SetTypeAt(index, value); |
| } |
| |
| bool TypeParameters::AllDynamicBounds() const { |
| return bounds() == TypeArguments::null(); |
| } |
| |
| void TypeParameters::set_defaults(const TypeArguments& value) const { |
| // The null value represents a vector of dynamic. |
| untag()->set_defaults(value.ptr()); |
| } |
| |
| AbstractTypePtr TypeParameters::DefaultAt(intptr_t index) const { |
| const TypeArguments& default_type_args = TypeArguments::Handle(defaults()); |
| return default_type_args.IsNull() ? Type::DynamicType() |
| : default_type_args.TypeAt(index); |
| } |
| |
| void TypeParameters::SetDefaultAt(intptr_t index, |
| const AbstractType& value) const { |
| const TypeArguments& default_type_args = TypeArguments::Handle(defaults()); |
| default_type_args.SetTypeAt(index, value); |
| } |
| |
| bool TypeParameters::AllDynamicDefaults() const { |
| return defaults() == TypeArguments::null(); |
| } |
| |
| void TypeParameters::AllocateFlags(Heap::Space space) const { |
| const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift; |
| const Array& flags_array = Array::Handle(Array::New(len, space)); |
| // Initialize flags to 0. |
| const Smi& zero = Smi::Handle(Smi::New(0)); |
| for (intptr_t i = 0; i < len; i++) { |
| flags_array.SetAt(i, zero); |
| } |
| set_flags(flags_array); |
| } |
| |
| void TypeParameters::OptimizeFlags() const { |
| if (untag()->flags() == Array::null()) return; // Already optimized. |
| const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift; |
| const Array& flags_array = Array::Handle(flags()); |
| const Smi& zero = Smi::Handle(Smi::New(0)); |
| for (intptr_t i = 0; i < len; i++) { |
| if (flags_array.At(i) != zero.ptr()) return; |
| } |
| set_flags(Object::null_array()); |
| } |
| |
| bool TypeParameters::IsGenericCovariantImplAt(intptr_t index) const { |
| if (untag()->flags() == Array::null()) return false; |
| const intptr_t flag = Smi::Value( |
| Smi::RawCast(Array::Handle(flags()).At(index >> kFlagsPerSmiShift))); |
| return (flag >> (index & kFlagsPerSmiMask)) != 0; |
| } |
| |
| void TypeParameters::SetIsGenericCovariantImplAt(intptr_t index, |
| bool value) const { |
| const Array& flg = Array::Handle(flags()); |
| intptr_t flag = Smi::Value(Smi::RawCast(flg.At(index >> kFlagsPerSmiShift))); |
| if (value) { |
| flag |= 1 << (index % kFlagsPerSmiMask); |
| } else { |
| flag &= ~(1 << (index % kFlagsPerSmiMask)); |
| } |
| flg.SetAt(index >> kFlagsPerSmiShift, Smi::Handle(Smi::New(flag))); |
| } |
| |
| void TypeParameters::Print(Thread* thread, |
| Zone* zone, |
| bool are_class_type_parameters, |
| intptr_t base, |
| NameVisibility name_visibility, |
| BaseTextBuffer* printer) const { |
| String& name = String::Handle(zone); |
| AbstractType& type = AbstractType::Handle(zone); |
| const intptr_t num_type_params = Length(); |
| for (intptr_t i = 0; i < num_type_params; i++) { |
| if (are_class_type_parameters) { |
| name = NameAt(i); |
| printer->AddString(name.ToCString()); |
| } else { |
| printer->AddString(TypeParameter::CanonicalNameCString( |
| are_class_type_parameters, base, base + i)); |
| } |
| if (FLAG_show_internal_names || !AllDynamicBounds()) { |
| type = BoundAt(i); |
| // Do not print default bound or non-nullable Object bound in weak mode. |
| if (!type.IsNull() && |
| (FLAG_show_internal_names || !type.IsObjectType() || |
| (thread->isolate_group()->null_safety() && type.IsNonNullable()))) { |
| printer->AddString(" extends "); |
| type.PrintName(name_visibility, printer); |
| if (FLAG_show_internal_names && !AllDynamicDefaults()) { |
| type = DefaultAt(i); |
| if (!type.IsNull() && |
| (FLAG_show_internal_names || !type.IsDynamicType())) { |
| printer->AddString(" defaults to "); |
| type.PrintName(name_visibility, printer); |
| } |
| } |
| } |
| } |
| if (i != num_type_params - 1) { |
| printer->AddString(", "); |
| } |
| } |
| } |
| |
| const char* TypeParameters::ToCString() const { |
| if (IsNull()) { |
| return "TypeParameters: null"; |
| } |
| auto thread = Thread::Current(); |
| auto zone = thread->zone(); |
| ZoneTextBuffer buffer(zone); |
| buffer.AddString("TypeParameters: "); |
| Print(thread, zone, true, 0, kInternalName, &buffer); |
| return buffer.buffer(); |
| } |
| |
| TypeParametersPtr TypeParameters::New(Heap::Space space) { |
| ASSERT(Object::type_parameters_class() != Class::null()); |
| ObjectPtr ptr = |
| Object::Allocate(TypeParameters::kClassId, TypeParameters::InstanceSize(), |
| space, TypeParameters::ContainsCompressedPointers()); |
| return static_cast<TypeParametersPtr>(ptr); |
| } |
| |
| TypeParametersPtr TypeParameters::New(intptr_t count, Heap::Space space) { |
| const TypeParameters& result = |
| TypeParameters::Handle(TypeParameters::New(space)); |
| // Create an [ Array ] of [ String ] objects to represent the names. |
| // Create a [ TypeArguments ] vector representing the bounds. |
| // Create a [ TypeArguments ] vector representing the defaults. |
| // Create an [ Array ] of [ Smi] objects to represent the flags. |
| const Array& names_array = Array::Handle(Array::New(count, space)); |
| result.set_names(names_array); |
| TypeArguments& type_args = TypeArguments::Handle(); |
| type_args = TypeArguments::New(count, Heap::kNew); // Will get canonicalized. |
| result.set_bounds(type_args); |
| type_args = TypeArguments::New(count, Heap::kNew); // Will get canonicalized. |
| result.set_defaults(type_args); |
| result.AllocateFlags(space); // Will get optimized. |
| return result.ptr(); |
| } |
| |
| intptr_t TypeArguments::ComputeNullability() const { |
| if (IsNull()) return 0; |
| const intptr_t num_types = Length(); |
| intptr_t result = 0; |
| if (num_types <= kNullabilityMaxTypes) { |
| AbstractType& type = AbstractType::Handle(); |
| for (intptr_t i = 0; i < num_types; i++) { |
| type = TypeAt(i); |
| intptr_t type_bits = 0; |
| if (!type.IsNull() && !type.IsNullTypeRef()) { |
| switch (type.nullability()) { |
| case Nullability::kNullable: |
| type_bits = kNullableBits; |
| break; |
| case Nullability::kNonNullable: |
| type_bits = kNonNullableBits; |
| break; |
| case Nullability::kLegacy: |
| type_bits = kLegacyBits; |
| break; |
| default: |
| UNREACHABLE(); |
| } |
| } |
| result |= (type_bits << (i * kNullabilityBitsPerType)); |
| } |
| } |
| set_nullability(result); |
| return result; |
| } |
| |
| void TypeArguments::set_nullability(intptr_t value) const { |
| untag()->set_nullability(Smi::New(value)); |
| } |
| |
| uword TypeArguments::HashForRange(intptr_t from_index, intptr_t len) const { |
| if (IsNull()) return kAllDynamicHash; |
| if (IsRaw(from_index, len)) return kAllDynamicHash; |
| uint32_t result = 0; |
| AbstractType& type = AbstractType::Handle(); |
| for (intptr_t i = 0; i < len; i++) { |
| type = TypeAt(from_index + i); |
| // The hash may be calculated during type finalization (for debugging |
| // purposes only) while a type argument is still temporarily null. |
| if (type.IsNull() || type.IsNullTypeRef()) { |
| return 0; // Do not cache hash, since it will still change. |
| } |
| if (type.IsTypeRef()) { |
| // Unwrapping the TypeRef here cannot lead to infinite recursion, because |
| // traversal during hash computation stops at the TypeRef. Indeed, |
| // unwrapping the TypeRef does not always remove it completely, but may |
| // only rotate the cycle. The same TypeRef can be encountered when calling |
| // type.Hash() below after traversing the whole cycle. The class id of the |
| // referenced type is used and the traversal stops. |
| // By dereferencing the TypeRef, we maximize the information reflected by |
| // the hash value. Two equal vectors may have some of their type arguments |
| // 'oriented' differently, i.e. pointing to identical (TypeRef containing) |
| // cyclic type graphs, but to two different nodes in the cycle, thereby |
| // breaking the hash computation earlier for one vector and yielding two |
| // different hash values for identical type graphs. |
| type = TypeRef::Cast(type).type(); |
| } |
| result = CombineHashes(result, type.Hash()); |
| } |
| result = FinalizeHash(result, kHashBits); |
| return result; |
| } |
| |
| uword TypeArguments::ComputeHash() const { |
| if (IsNull()) return kAllDynamicHash; |
| const uword result = HashForRange(0, Length()); |
| if (result != 0) { |
| SetHash(result); |
| } |
| return result; |
| } |
| |
| TypeArgumentsPtr TypeArguments::Prepend(Zone* zone, |
| const TypeArguments& other, |
| intptr_t other_length, |
| intptr_t total_length) const { |
| if (other_length == 0) { |
| ASSERT(IsCanonical()); |
| return ptr(); |
| } else if (other_length == total_length) { |
| ASSERT(other.IsCanonical()); |
| return other.ptr(); |
| } else if (IsNull() && other.IsNull()) { |
| return TypeArguments::null(); |
| } |
| const TypeArguments& result = |
| TypeArguments::Handle(zone, TypeArguments::New(total_length, Heap::kNew)); |
| AbstractType& type = AbstractType::Handle(zone); |
| for (intptr_t i = 0; i < other_length; i++) { |
| type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i); |
| result.SetTypeAt(i, type); |
| } |
| for (intptr_t i = other_length; i < total_length; i++) { |
| type = IsNull() ? Type::DynamicType() : TypeAt(i - other_length); |
| result.SetTypeAt(i, type); |
| } |
| return result.Canonicalize(Thread::Current(), nullptr); |
| } |
| |
| TypeArgumentsPtr TypeArguments::ConcatenateTypeParameters( |
| Zone* zone, |
| const TypeArguments& other) const { |
| ASSERT(!IsNull() && !other.IsNull()); |
| const intptr_t this_len = Length(); |
| const intptr_t other_len = other.Length(); |
| const auto& result = TypeArguments::Handle( |
| zone, TypeArguments::New(this_len + other_len, Heap::kNew)); |
| auto& type = AbstractType::Handle(zone); |
| for (intptr_t i = 0; i < this_len; ++i) { |
| type = TypeAt(i); |
| result.SetTypeAt(i, type); |
| } |
| for (intptr_t i = 0; i < other_len; ++i) { |
| type = other.TypeAt(i); |
| result.SetTypeAt(this_len + i, type); |
| } |
| return result.ptr(); |
| } |
| |
| StringPtr TypeArguments::Name() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| PrintSubvectorName(0, Length(), kInternalName, &printer); |
| return Symbols::New(thread, printer.buffer()); |
| } |
| |
| StringPtr TypeArguments::UserVisibleName() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| PrintSubvectorName(0, Length(), kUserVisibleName, &printer); |
| return Symbols::New(thread, printer.buffer()); |
| } |
| |
| void TypeArguments::PrintSubvectorName(intptr_t from_index, |
| intptr_t len, |
| NameVisibility name_visibility, |
| BaseTextBuffer* printer) const { |
| printer->AddString("<"); |
| AbstractType& type = AbstractType::Handle(); |
| for (intptr_t i = 0; i < len; i++) { |
| if (from_index + i < Length()) { |
| type = TypeAt(from_index + i); |
| if (type.IsNull()) { |
| printer->AddString("null"); // Unfinalized vector. |
| } else { |
| type.PrintName(name_visibility, printer); |
| } |
| } else { |
| printer->AddString("dynamic"); |
| } |
| if (i < len - 1) { |
| printer->AddString(", "); |
| } |
| } |
| printer->AddString(">"); |
| } |
| |
| void TypeArguments::PrintTo(BaseTextBuffer* buffer) const { |
| buffer->AddString("TypeArguments: "); |
| if (IsNull()) { |
| return buffer->AddString("null"); |
| } |
| buffer->Printf("(H%" Px ")", Smi::Value(untag()->hash())); |
| auto& type_at = AbstractType::Handle(); |
| for (intptr_t i = 0; i < Length(); i++) { |
| type_at = TypeAt(i); |
| buffer->Printf(" [%s]", type_at.IsNull() ? "null" : type_at.ToCString()); |
| } |
| } |
| |
| bool TypeArguments::IsSubvectorEquivalent(const TypeArguments& other, |
| intptr_t from_index, |
| intptr_t len, |
| TypeEquality kind, |
| TrailPtr trail) const { |
| if (this->ptr() == other.ptr()) { |
| return true; |
| } |
| if (kind == TypeEquality::kCanonical) { |
| if (IsNull() || other.IsNull()) { |
| return false; |
| } |
| if (Length() != other.Length()) { |
| return false; |
| } |
| } |
| AbstractType& type = AbstractType::Handle(); |
| AbstractType& other_type = AbstractType::Handle(); |
| for (intptr_t i = from_index; i < from_index + len; i++) { |
| type = IsNull() ? Type::DynamicType() : TypeAt(i); |
| other_type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i); |
| // Still unfinalized vectors should not be considered equivalent. |
| if (type.IsNull() || !type.IsEquivalent(other_type, kind, trail)) { |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| bool TypeArguments::IsRecursive(TrailPtr trail) const { |
| if (IsNull()) return false; |
| const intptr_t num_types = Length(); |
| AbstractType& type = AbstractType::Handle(); |
| for (intptr_t i = 0; i < num_types; i++) { |
| type = TypeAt(i); |
| // If this type argument is null, the type parameterized with this type |
| // argument is still being finalized and is definitely recursive. The null |
| // type argument will be replaced by a non-null type before the type is |
| // marked as finalized. |
| if (type.IsNull() || type.IsRecursive(trail)) { |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| bool TypeArguments::RequireConstCanonicalTypeErasure(Zone* zone, |
| intptr_t from_index, |
| intptr_t len, |
| TrailPtr trail) const { |
| if (IsNull()) return false; |
| ASSERT(Length() >= (from_index + len)); |
| AbstractType& type = AbstractType::Handle(zone); |
| for (intptr_t i = 0; i < len; i++) { |
| type = TypeAt(from_index + i); |
| if (type.IsNonNullable() || |
| (type.IsNullable() && |
| type.RequireConstCanonicalTypeErasure(zone, trail))) { |
| // It is not possible for a legacy type to have non-nullable type |
| // arguments or for a legacy function type to have non-nullable type in |
| // its signature. |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| bool TypeArguments::IsDynamicTypes(bool raw_instantiated, |
| intptr_t from_index, |
| intptr_t len) const { |
| ASSERT(Length() >= (from_index + len)); |
| AbstractType& type = AbstractType::Handle(); |
| Class& type_class = Class::Handle(); |
| for (intptr_t i = 0; i < len; i++) { |
| type = TypeAt(from_index + i); |
| if (type.IsNull()) { |
| return false; |
| } |
| if (!type.HasTypeClass()) { |
| if (raw_instantiated && type.IsTypeParameter()) { |
| // An uninstantiated type parameter is equivalent to dynamic. |
| continue; |
| } |
| return false; |
| } |
| type_class = type.type_class(); |
| if (!type_class.IsDynamicClass()) { |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| bool TypeArguments::HasInstantiations() const { |
| const Array& prior_instantiations = Array::Handle(instantiations()); |
| ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel. |
| return prior_instantiations.Length() > 1; |
| } |
| |
| intptr_t TypeArguments::NumInstantiations() const { |
| const Array& prior_instantiations = Array::Handle(instantiations()); |
| ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel. |
| intptr_t num = 0; |
| intptr_t i = 0; |
| while (prior_instantiations.At(i) != |
| Smi::New(TypeArguments::kNoInstantiator)) { |
| i += TypeArguments::Instantiation::kSizeInWords; |
| num++; |
| } |
| return num; |
| } |
| |
| ArrayPtr TypeArguments::instantiations() const { |
| // We rely on the fact that any loads from the array are dependent loads and |
| // avoid the load-acquire barrier here. |
| return untag()->instantiations(); |
| } |
| |
| void TypeArguments::set_instantiations(const Array& value) const { |
| // We have to ensure that initializing stores to the array are available |
| // when releasing the pointer to the array pointer. |
| // => We have to use store-release here. |
| ASSERT(!value.IsNull()); |
| untag()->set_instantiations<std::memory_order_release>(value.ptr()); |
| } |
| |
| bool TypeArguments::HasCount(intptr_t count) const { |
| if (IsNull()) { |
| return true; |
| } |
| return Length() == count; |
| } |
| |
| intptr_t TypeArguments::Length() const { |
| if (IsNull()) { |
| return 0; |
| } |
| return Smi::Value(untag()->length()); |
| } |
| |
| intptr_t TypeArguments::nullability() const { |
| if (IsNull()) { |
| return 0; |
| } |
| return Smi::Value(untag()->nullability()); |
| } |
| |
| AbstractTypePtr TypeArguments::TypeAt(intptr_t index) const { |
| ASSERT(!IsNull()); |
| ASSERT((index >= 0) && (index < Length())); |
| return untag()->element(index); |
| } |
| |
| AbstractTypePtr TypeArguments::TypeAtNullSafe(intptr_t index) const { |
| if (IsNull()) { |
| // null vector represents infinite list of dynamics |
| return Type::dynamic_type().ptr(); |
| } |
| ASSERT((index >= 0) && (index < Length())); |
| return TypeAt(index); |
| } |
| |
| void TypeArguments::SetTypeAt(intptr_t index, const AbstractType& value) const { |
| ASSERT(!IsCanonical()); |
| ASSERT((index >= 0) && (index < Length())); |
| return untag()->set_element(index, value.ptr()); |
| } |
| |
| bool TypeArguments::IsSubvectorInstantiated(intptr_t from_index, |
| intptr_t len, |
| Genericity genericity, |
| intptr_t num_free_fun_type_params, |
| TrailPtr trail) const { |
| ASSERT(!IsNull()); |
| AbstractType& type = AbstractType::Handle(); |
| for (intptr_t i = 0; i < len; i++) { |
| type = TypeAt(from_index + i); |
| // If this type argument T is null, the type A containing T in its flattened |
| // type argument vector V is recursive and is still being finalized. |
| // T is the type argument of a super type of A. T is being instantiated |
| // during finalization of V, which is also the instantiator. T depends |
| // solely on the type parameters of A and will be replaced by a non-null |
| // type before A is marked as finalized. |
| if (!type.IsNull() && |
| !type.IsInstantiated(genericity, num_free_fun_type_params, trail)) { |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| bool TypeArguments::IsUninstantiatedIdentity() const { |
| AbstractType& type = AbstractType::Handle(); |
| const intptr_t num_types = Length(); |
| for (intptr_t i = 0; i < num_types; i++) { |
| type = TypeAt(i); |
| if (type.IsNull()) { |
| return false; // Still unfinalized, too early to tell. |
| } |
| if (!type.IsTypeParameter()) { |
| return false; |
| } |
| const TypeParameter& type_param = TypeParameter::Cast(type); |
| ASSERT(type_param.IsFinalized()); |
| if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) { |
| return false; |
| } |
| // Instantiating nullable and legacy type parameters may change |
| // nullability of a type, so type arguments vector containing such type |
| // parameters cannot be substituted with instantiator type arguments. |
| if (type_param.IsNullable() || type_param.IsLegacy()) { |
| return false; |
| } |
| } |
| return true; |
| // Note that it is not necessary to verify at runtime that the instantiator |
| // type vector is long enough, since this uninstantiated vector contains as |
| // many different type parameters as it is long. |
| } |
| |
| // Return true if this uninstantiated type argument vector, once instantiated |
| // at runtime, is a prefix of the type argument vector of its instantiator. |
| // A runtime check may be required, as indicated by with_runtime_check. |
| bool TypeArguments::CanShareInstantiatorTypeArguments( |
| const Class& instantiator_class, |
| bool* with_runtime_check) const { |
| ASSERT(!IsInstantiated()); |
| if (with_runtime_check != nullptr) { |
| *with_runtime_check = false; |
| } |
| const intptr_t num_type_args = Length(); |
| const intptr_t num_instantiator_type_args = |
| instantiator_class.NumTypeArguments(); |
| if (num_type_args > num_instantiator_type_args) { |
| // This vector cannot be a prefix of a shorter vector. |
| return false; |
| } |
| const intptr_t num_instantiator_type_params = |
| instantiator_class.NumTypeParameters(); |
| const intptr_t first_type_param_offset = |
| num_instantiator_type_args - num_instantiator_type_params; |
| // At compile time, the type argument vector of the instantiator consists of |
| // the type argument vector of its super type, which may refer to the type |
| // parameters of the instantiator class, followed by (or overlapping partially |
| // or fully with) the type parameters of the instantiator class in declaration |
| // order. |
| // In other words, the only variables are the type parameters of the |
| // instantiator class. |
| // This uninstantiated type argument vector is also expressed in terms of the |
| // type parameters of the instantiator class. Therefore, in order to be a |
| // prefix once instantiated at runtime, every one of its type argument must be |
| // equal to the type argument of the instantiator vector at the same index. |
| |
| // As a first requirement, the last num_instantiator_type_params type |
| // arguments of this type argument vector must refer to the corresponding type |
| // parameters of the instantiator class. |
| AbstractType& type_arg = AbstractType::Handle(); |
| for (intptr_t i = first_type_param_offset; i < num_type_args; i++) { |
| type_arg = TypeAt(i); |
| if (!type_arg.IsTypeParameter()) { |
| return false; |
| } |
| const TypeParameter& type_param = TypeParameter::Cast(type_arg); |
| ASSERT(type_param.IsFinalized()); |
| if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) { |
| return false; |
| } |
| // Instantiating nullable and legacy type parameters may change nullability |
| // of a type, so type arguments vector containing such type parameters |
| // cannot be substituted with instantiator type arguments, unless we check |
| // at runtime the nullability of the first 1 or 2 type arguments of the |
| // instantiator. |
| // Note that the presence of non-overlapping super type arguments (i.e. |
| // first_type_param_offset > 0) will prevent this optimization. |
| if (type_param.IsNullable() || type_param.IsLegacy()) { |
| if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) { |
| return false; |
| } |
| *with_runtime_check = true; |
| } |
| } |
| // As a second requirement, the type arguments corresponding to the super type |
| // must be identical. Overlapping ones have already been checked starting at |
| // first_type_param_offset. |
| if (first_type_param_offset == 0) { |
| return true; |
| } |
| AbstractType& super_type = |
| AbstractType::Handle(instantiator_class.super_type()); |
| const TypeArguments& super_type_args = |
| TypeArguments::Handle(super_type.arguments()); |
| if (super_type_args.IsNull()) { |
| ASSERT(!IsUninstantiatedIdentity()); |
| return false; |
| } |
| AbstractType& super_type_arg = AbstractType::Handle(); |
| for (intptr_t i = 0; (i < first_type_param_offset) && (i < num_type_args); |
| i++) { |
| type_arg = TypeAt(i); |
| super_type_arg = super_type_args.TypeAt(i); |
| if (!type_arg.Equals(super_type_arg)) { |
| ASSERT(!IsUninstantiatedIdentity()); |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| // Return true if this uninstantiated type argument vector, once instantiated |
| // at runtime, is a prefix of the enclosing function type arguments. |
| // A runtime check may be required, as indicated by with_runtime_check. |
| bool TypeArguments::CanShareFunctionTypeArguments( |
| const Function& function, |
| bool* with_runtime_check) const { |
| ASSERT(!IsInstantiated()); |
| if (with_runtime_check != nullptr) { |
| *with_runtime_check = false; |
| } |
| const intptr_t num_type_args = Length(); |
| const intptr_t num_parent_type_args = function.NumParentTypeArguments(); |
| const intptr_t num_function_type_params = function.NumTypeParameters(); |
| const intptr_t num_function_type_args = |
| num_parent_type_args + num_function_type_params; |
| if (num_type_args > num_function_type_args) { |
| // This vector cannot be a prefix of a shorter vector. |
| return false; |
| } |
| AbstractType& type_arg = AbstractType::Handle(); |
| for (intptr_t i = 0; i < num_type_args; i++) { |
| type_arg = TypeAt(i); |
| if (!type_arg.IsTypeParameter()) { |
| return false; |
| } |
| const TypeParameter& type_param = TypeParameter::Cast(type_arg); |
| ASSERT(type_param.IsFinalized()); |
| if ((type_param.index() != i) || !type_param.IsFunctionTypeParameter()) { |
| return false; |
| } |
| // Instantiating nullable and legacy type parameters may change nullability |
| // of a type, so type arguments vector containing such type parameters |
| // cannot be substituted with the enclosing function type arguments, unless |
| // we check at runtime the nullability of the first 1 or 2 type arguments of |
| // the enclosing function type arguments. |
| if (type_param.IsNullable() || type_param.IsLegacy()) { |
| if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) { |
| return false; |
| } |
| *with_runtime_check = true; |
| } |
| } |
| return true; |
| } |
| |
| bool TypeArguments::IsFinalized() const { |
| ASSERT(!IsNull()); |
| AbstractType& type = AbstractType::Handle(); |
| const intptr_t num_types = Length(); |
| for (intptr_t i = 0; i < num_types; i++) { |
| type = TypeAt(i); |
| if (!type.IsFinalized()) { |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| TypeArgumentsPtr TypeArguments::InstantiateFrom( |
| const TypeArguments& instantiator_type_arguments, |
| const TypeArguments& function_type_arguments, |
| intptr_t num_free_fun_type_params, |
| Heap::Space space, |
| TrailPtr trail) const { |
| ASSERT(!IsInstantiated(kAny, num_free_fun_type_params)); |
| if ((instantiator_type_arguments.IsNull() || |
| instantiator_type_arguments.Length() == Length()) && |
| IsUninstantiatedIdentity()) { |
| return instantiator_type_arguments.ptr(); |
| } |
| const intptr_t num_types = Length(); |
| TypeArguments& instantiated_array = |
| TypeArguments::Handle(TypeArguments::New(num_types, space)); |
| AbstractType& type = AbstractType::Handle(); |
| for (intptr_t i = 0; i < num_types; i++) { |
| type = TypeAt(i); |
| // If this type argument T is null, the type A containing T in its flattened |
| // type argument vector V is recursive and is still being finalized. |
| // T is the type argument of a super type of A. T is being instantiated |
| // during finalization of V, which is also the instantiator. T depends |
| // solely on the type parameters of A and will be replaced by a non-null |
| // type before A is marked as finalized. |
| if (!type.IsNull() && |
| !type.IsInstantiated(kAny, num_free_fun_type_params)) { |
| type = type.InstantiateFrom(instantiator_type_arguments, |
| function_type_arguments, |
| num_free_fun_type_params, space, trail); |
| // A returned null type indicates a failed instantiation in dead code that |
| // must be propagated up to the caller, the optimizing compiler. |
| if (type.IsNull()) { |
| return Object::empty_type_arguments().ptr(); |
| } |
| } |
| instantiated_array.SetTypeAt(i, type); |
| } |
| return instantiated_array.ptr(); |
| } |
| |
| TypeArgumentsPtr TypeArguments::InstantiateAndCanonicalizeFrom( |
| const TypeArguments& instantiator_type_arguments, |
| const TypeArguments& function_type_arguments) const { |
| auto thread = Thread::Current(); |
| auto zone = thread->zone(); |
| SafepointMutexLocker ml( |
| thread->isolate_group()->type_arguments_canonicalization_mutex()); |
| |
| ASSERT(!IsInstantiated()); |
| ASSERT(instantiator_type_arguments.IsNull() || |
| instantiator_type_arguments.IsCanonical()); |
| ASSERT(function_type_arguments.IsNull() || |
| function_type_arguments.IsCanonical()); |
| // Lookup instantiators and if found, return instantiated result. |
| Array& prior_instantiations = Array::Handle(zone, instantiations()); |
| ASSERT(!prior_instantiations.IsNull() && prior_instantiations.IsArray()); |
| // The instantiations cache is initialized with Object::zero_array() and is |
| // therefore guaranteed to contain kNoInstantiator. No length check needed. |
| ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel. |
| intptr_t index = 0; |
| while (true) { |
| if ((prior_instantiations.At( |
| index + |
| TypeArguments::Instantiation::kInstantiatorTypeArgsIndex) == |
| instantiator_type_arguments.ptr()) && |
| (prior_instantiations.At( |
| index + TypeArguments::Instantiation::kFunctionTypeArgsIndex) == |
| function_type_arguments.ptr())) { |
| return TypeArguments::RawCast(prior_instantiations.At( |
| index + TypeArguments::Instantiation::kInstantiatedTypeArgsIndex)); |
| } |
| if (prior_instantiations.At(index) == |
| Smi::New(TypeArguments::kNoInstantiator)) { |
| break; |
| } |
| index += TypeArguments::Instantiation::kSizeInWords; |
| } |
| // Cache lookup failed. Instantiate the type arguments. |
| TypeArguments& result = TypeArguments::Handle(zone); |
| result = InstantiateFrom(instantiator_type_arguments, function_type_arguments, |
| kAllFree, Heap::kOld); |
| // Canonicalize type arguments. |
| result = result.Canonicalize(thread, nullptr); |
| // InstantiateAndCanonicalizeFrom is not reentrant. It cannot have been called |
| // indirectly, so the prior_instantiations array cannot have grown. |
| ASSERT(prior_instantiations.ptr() == instantiations()); |
| // Add instantiator and function type args and result to instantiations array. |
| intptr_t length = prior_instantiations.Length(); |
| if ((index + TypeArguments::Instantiation::kSizeInWords) >= length) { |
| // TODO(regis): Should we limit the number of cached instantiations? |
| // Grow the instantiations array by about 50%, but at least by 1. |
| // The initial array is Object::zero_array() of length 1. |
| intptr_t entries = |
| (length - 1) / TypeArguments::Instantiation::kSizeInWords; |
| intptr_t new_entries = entries + (entries >> 1) + 1; |
| length = new_entries * TypeArguments::Instantiation::kSizeInWords + 1; |
| prior_instantiations = |
| Array::Grow(prior_instantiations, length, Heap::kOld); |
| set_instantiations(prior_instantiations); |
| ASSERT((index + TypeArguments::Instantiation::kSizeInWords) < length); |
| } |
| |
| // Set sentinel marker at next position. |
| prior_instantiations.SetAt( |
| index + TypeArguments::Instantiation::kSizeInWords + |
| TypeArguments::Instantiation::kInstantiatorTypeArgsIndex, |
| Smi::Handle(zone, Smi::New(TypeArguments::kNoInstantiator))); |
| |
| prior_instantiations.SetAt( |
| index + TypeArguments::Instantiation::kFunctionTypeArgsIndex, |
| function_type_arguments); |
| prior_instantiations.SetAt( |
| index + TypeArguments::Instantiation::kInstantiatedTypeArgsIndex, result); |
| |
| // We let any concurrently running mutator thread now see the new entry by |
| // using a store-release barrier. |
| ASSERT( |
| prior_instantiations.At( |
| index + TypeArguments::Instantiation::kInstantiatorTypeArgsIndex) == |
| Smi::New(TypeArguments::kNoInstantiator)); |
| prior_instantiations.SetAtRelease( |
| index + TypeArguments::Instantiation::kInstantiatorTypeArgsIndex, |
| instantiator_type_arguments); |
| return result.ptr(); |
| } |
| |
| TypeArgumentsPtr TypeArguments::New(intptr_t len, Heap::Space space) { |
| if (len < 0 || len > kMaxElements) { |
| // This should be caught before we reach here. |
| FATAL1("Fatal error in TypeArguments::New: invalid len %" Pd "\n", len); |
| } |
| TypeArguments& result = TypeArguments::Handle(); |
| { |
| ObjectPtr raw = Object::Allocate( |
| TypeArguments::kClassId, TypeArguments::InstanceSize(len), space, |
| TypeArguments::ContainsCompressedPointers()); |
| NoSafepointScope no_safepoint; |
| result ^= raw; |
| // Length must be set before we start storing into the array. |
| result.SetLength(len); |
| result.SetHash(0); |
| result.set_nullability(0); |
| } |
| // The zero array should have been initialized. |
| ASSERT(Object::zero_array().ptr() != Array::null()); |
| COMPILE_ASSERT(TypeArguments::kNoInstantiator == 0); |
| result.set_instantiations(Object::zero_array()); |
| return result.ptr(); |
| } |
| |
| void TypeArguments::SetLength(intptr_t value) const { |
| ASSERT(!IsCanonical()); |
| // This is only safe because we create a new Smi, which does not cause |
| // heap allocation. |
| untag()->set_length(Smi::New(value)); |
| } |
| |
| TypeArgumentsPtr TypeArguments::Canonicalize(Thread* thread, |
| TrailPtr trail) const { |
| if (IsNull() || IsCanonical()) { |
| ASSERT(IsOld()); |
| return this->ptr(); |
| } |
| const intptr_t num_types = Length(); |
| if (num_types == 0) { |
| return TypeArguments::empty_type_arguments().ptr(); |
| } else if (IsRaw(0, num_types)) { |
| return TypeArguments::null(); |
| } |
| Zone* zone = thread->zone(); |
| auto isolate_group = thread->isolate_group(); |
| ObjectStore* object_store = isolate_group->object_store(); |
| TypeArguments& result = TypeArguments::Handle(zone); |
| { |
| SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex()); |
| CanonicalTypeArgumentsSet table(zone, |
| object_store->canonical_type_arguments()); |
| result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this)); |
| object_store->set_canonical_type_arguments(table.Release()); |
| } |
| if (result.IsNull()) { |
| // Canonicalize each type argument. |
| AbstractType& type_arg = AbstractType::Handle(zone); |
| for (intptr_t i = 0; i < num_types; i++) { |
| type_arg = TypeAt(i); |
| type_arg = type_arg.Canonicalize(thread, trail); |
| if (IsCanonical()) { |
| // Canonicalizing this type_arg canonicalized this type. |
| ASSERT(IsRecursive()); |
| return this->ptr(); |
| } |
| SetTypeAt(i, type_arg); |
| } |
| // Canonicalization of a type argument of a recursive type argument vector |
| // may change the hash of the vector, so invalidate. |
| if (IsRecursive()) { |
| SetHash(0); |
| } |
| SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex()); |
| CanonicalTypeArgumentsSet table(zone, |
| object_store->canonical_type_arguments()); |
| // Since we canonicalized some type arguments above we need to lookup |
| // in the table again to make sure we don't already have an equivalent |
| // canonical entry. |
| result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this)); |
| if (result.IsNull()) { |
| // Make sure we have an old space object and add it to the table. |
| if (this->IsNew()) { |
| result ^= Object::Clone(*this, Heap::kOld); |
| } else { |
| result = this->ptr(); |
| } |
| ASSERT(result.IsOld()); |
| result.ComputeNullability(); |
| result.SetCanonical(); // Mark object as being canonical. |
| // Now add this TypeArgument into the canonical list of type arguments. |
| bool present = table.Insert(result); |
| ASSERT(!present); |
| } |
| object_store->set_canonical_type_arguments(table.Release()); |
| } |
| ASSERT(result.Equals(*this)); |
| ASSERT(!result.IsNull()); |
| ASSERT(result.IsTypeArguments()); |
| ASSERT(result.IsCanonical()); |
| return result.ptr(); |
| } |
| |
| void TypeArguments::EnumerateURIs(URIs* uris) const { |
| if (IsNull()) { |
| return; |
| } |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| AbstractType& type = AbstractType::Handle(zone); |
| const intptr_t num_types = Length(); |
| for (intptr_t i = 0; i < num_types; i++) { |
| type = TypeAt(i); |
| type.EnumerateURIs(uris); |
| } |
| } |
| |
| const char* TypeArguments::ToCString() const { |
| if (IsNull()) { |
| return "TypeArguments: null"; // Optimizing the frequent case. |
| } |
| ZoneTextBuffer buffer(Thread::Current()->zone()); |
| PrintTo(&buffer); |
| return buffer.buffer(); |
| } |
| |
| const char* PatchClass::ToCString() const { |
| const Class& cls = Class::Handle(patched_class()); |
| const char* cls_name = cls.ToCString(); |
| return OS::SCreate(Thread::Current()->zone(), "PatchClass for %s", cls_name); |
| } |
| |
| PatchClassPtr PatchClass::New(const Class& patched_class, |
| const Class& origin_class) { |
| const PatchClass& result = PatchClass::Handle(PatchClass::New()); |
| result.set_patched_class(patched_class); |
| result.set_origin_class(origin_class); |
| result.set_script(Script::Handle(origin_class.script())); |
| result.set_library_kernel_offset(-1); |
| return result.ptr(); |
| } |
| |
| PatchClassPtr PatchClass::New(const Class& patched_class, |
| const Script& script) { |
| const PatchClass& result = PatchClass::Handle(PatchClass::New()); |
| result.set_patched_class(patched_class); |
| result.set_origin_class(patched_class); |
| result.set_script(script); |
| result.set_library_kernel_offset(-1); |
| return result.ptr(); |
| } |
| |
| PatchClassPtr PatchClass::New() { |
| ASSERT(Object::patch_class_class() != Class::null()); |
| ObjectPtr raw = |
| Object::Allocate(PatchClass::kClassId, PatchClass::InstanceSize(), |
| Heap::kOld, PatchClass::ContainsCompressedPointers()); |
| return static_cast<PatchClassPtr>(raw); |
| } |
| |
| void PatchClass::set_patched_class(const Class& value) const { |
| untag()->set_patched_class(value.ptr()); |
| } |
| |
| void PatchClass::set_origin_class(const Class& value) const { |
| untag()->set_origin_class(value.ptr()); |
| } |
| |
| void PatchClass::set_script(const Script& value) const { |
| untag()->set_script(value.ptr()); |
| } |
| |
| void PatchClass::set_library_kernel_data(const ExternalTypedData& data) const { |
| untag()->set_library_kernel_data(data.ptr()); |
| } |
| |
| uword Function::Hash() const { |
| const uword hash = String::HashRawSymbol(name()); |
| if (untag()->owner()->IsClass()) { |
| return hash ^ Class::RawCast(untag()->owner())->untag()->id(); |
| } |
| return hash; |
| } |
| |
| bool Function::HasBreakpoint() const { |
| #if defined(PRODUCT) |
| return false; |
| #else |
| auto thread = Thread::Current(); |
| return thread->isolate_group()->debugger()->HasBreakpoint(thread, *this); |
| #endif |
| } |
| |
| void Function::InstallOptimizedCode(const Code& code) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| // We may not have previous code if FLAG_precompile is set. |
| // Hot-reload may have already disabled the current code. |
| if (HasCode() && !Code::Handle(CurrentCode()).IsDisabled()) { |
| Code::Handle(CurrentCode()).DisableDartCode(); |
| } |
| AttachCode(code); |
| } |
| |
| void Function::SetInstructions(const Code& value) const { |
| // Ensure that nobody is executing this function when we install it. |
| if (untag()->code() != Code::null() && HasCode()) { |
| GcSafepointOperationScope safepoint(Thread::Current()); |
| SetInstructionsSafe(value); |
| } else { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| SetInstructionsSafe(value); |
| } |
| } |
| |
| void Function::SetInstructionsSafe(const Code& value) const { |
| untag()->set_code(value.ptr()); |
| StoreNonPointer(&untag()->entry_point_, value.EntryPoint()); |
| StoreNonPointer(&untag()->unchecked_entry_point_, |
| value.UncheckedEntryPoint()); |
| } |
| |
| void Function::AttachCode(const Code& value) const { |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| // Finish setting up code before activating it. |
| value.set_owner(*this); |
| SetInstructions(value); |
| ASSERT(Function::Handle(value.function()).IsNull() || |
| (value.function() == this->ptr())); |
| } |
| |
| bool Function::HasCode() const { |
| NoSafepointScope no_safepoint; |
| ASSERT(untag()->code() != Code::null()); |
| return untag()->code() != StubCode::LazyCompile().ptr(); |
| } |
| |
| bool Function::HasCode(FunctionPtr function) { |
| NoSafepointScope no_safepoint; |
| ASSERT(function->untag()->code() != Code::null()); |
| return function->untag()->code() != StubCode::LazyCompile().ptr(); |
| } |
| |
| void Function::ClearCode() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| untag()->set_unoptimized_code(Code::null()); |
| SetInstructions(StubCode::LazyCompile()); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| void Function::ClearCodeSafe() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| untag()->set_unoptimized_code(Code::null()); |
| |
| SetInstructionsSafe(StubCode::LazyCompile()); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| void Function::EnsureHasCompiledUnoptimizedCode() const { |
| ASSERT(!ForceOptimize()); |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame()); |
| Zone* zone = thread->zone(); |
| |
| const Error& error = |
| Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this)); |
| if (!error.IsNull()) { |
| Exceptions::PropagateError(error); |
| } |
| } |
| |
| void Function::SwitchToUnoptimizedCode() const { |
| ASSERT(HasOptimizedCode()); |
| Thread* thread = Thread::Current(); |
| DEBUG_ASSERT( |
| thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| Zone* zone = thread->zone(); |
| // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame()); |
| const Code& current_code = Code::Handle(zone, CurrentCode()); |
| |
| if (FLAG_trace_deoptimization_verbose) { |
| THR_Print("Disabling optimized code: '%s' entry: %#" Px "\n", |
| ToFullyQualifiedCString(), current_code.EntryPoint()); |
| } |
| current_code.DisableDartCode(); |
| const Error& error = |
| Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this)); |
| if (!error.IsNull()) { |
| Exceptions::PropagateError(error); |
| } |
| const Code& unopt_code = Code::Handle(zone, unoptimized_code()); |
| unopt_code.Enable(); |
| AttachCode(unopt_code); |
| } |
| |
| void Function::SwitchToLazyCompiledUnoptimizedCode() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| if (!HasOptimizedCode()) { |
| return; |
| } |
| |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| ASSERT(thread->IsMutatorThread()); |
| |
| const Code& current_code = Code::Handle(zone, CurrentCode()); |
| TIR_Print("Disabling optimized code for %s\n", ToCString()); |
| current_code.DisableDartCode(); |
| |
| const Code& unopt_code = Code::Handle(zone, unoptimized_code()); |
| if (unopt_code.IsNull()) { |
| // Set the lazy compile stub code. |
| TIR_Print("Switched to lazy compile stub for %s\n", ToCString()); |
| SetInstructions(StubCode::LazyCompile()); |
| return; |
| } |
| |
| TIR_Print("Switched to unoptimized code for %s\n", ToCString()); |
| |
| AttachCode(unopt_code); |
| unopt_code.Enable(); |
| #endif |
| } |
| |
| void Function::set_unoptimized_code(const Code& value) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint()); |
| ASSERT(value.IsNull() || !value.is_optimized()); |
| untag()->set_unoptimized_code(value.ptr()); |
| #endif |
| } |
| |
| ContextScopePtr Function::context_scope() const { |
| if (IsClosureFunction()) { |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| return ClosureData::Cast(obj).context_scope(); |
| } |
| return ContextScope::null(); |
| } |
| |
| void Function::set_context_scope(const ContextScope& value) const { |
| if (IsClosureFunction()) { |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| ClosureData::Cast(obj).set_context_scope(value); |
| return; |
| } |
| UNREACHABLE(); |
| } |
| |
| ClosurePtr Function::implicit_static_closure() const { |
| if (IsImplicitStaticClosureFunction()) { |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| return ClosureData::Cast(obj).implicit_static_closure(); |
| } |
| return Closure::null(); |
| } |
| |
| void Function::set_implicit_static_closure(const Closure& closure) const { |
| if (IsImplicitStaticClosureFunction()) { |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| ClosureData::Cast(obj).set_implicit_static_closure(closure); |
| return; |
| } |
| UNREACHABLE(); |
| } |
| |
| ScriptPtr Function::eval_script() const { |
| const Object& obj = Object::Handle(untag()->data()); |
| if (obj.IsScript()) { |
| return Script::Cast(obj).ptr(); |
| } |
| return Script::null(); |
| } |
| |
| void Function::set_eval_script(const Script& script) const { |
| ASSERT(token_pos() == TokenPosition::kMinSource); |
| ASSERT(untag()->data() == Object::null()); |
| set_data(script); |
| } |
| |
| FunctionPtr Function::extracted_method_closure() const { |
| ASSERT(kind() == UntaggedFunction::kMethodExtractor); |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(obj.IsFunction()); |
| return Function::Cast(obj).ptr(); |
| } |
| |
| void Function::set_extracted_method_closure(const Function& value) const { |
| ASSERT(kind() == UntaggedFunction::kMethodExtractor); |
| ASSERT(untag()->data() == Object::null()); |
| set_data(value); |
| } |
| |
| ArrayPtr Function::saved_args_desc() const { |
| ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher || |
| kind() == UntaggedFunction::kInvokeFieldDispatcher); |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(obj.IsArray()); |
| return Array::Cast(obj).ptr(); |
| } |
| |
| void Function::set_saved_args_desc(const Array& value) const { |
| ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher || |
| kind() == UntaggedFunction::kInvokeFieldDispatcher); |
| ASSERT(untag()->data() == Object::null()); |
| set_data(value); |
| } |
| |
| FieldPtr Function::accessor_field() const { |
| ASSERT(kind() == UntaggedFunction::kImplicitGetter || |
| kind() == UntaggedFunction::kImplicitSetter || |
| kind() == UntaggedFunction::kImplicitStaticGetter || |
| kind() == UntaggedFunction::kFieldInitializer); |
| return Field::RawCast(untag()->data()); |
| } |
| |
| void Function::set_accessor_field(const Field& value) const { |
| ASSERT(kind() == UntaggedFunction::kImplicitGetter || |
| kind() == UntaggedFunction::kImplicitSetter || |
| kind() == UntaggedFunction::kImplicitStaticGetter || |
| kind() == UntaggedFunction::kFieldInitializer); |
| // Top level classes may be finalized multiple times. |
| ASSERT(untag()->data() == Object::null() || untag()->data() == value.ptr()); |
| set_data(value); |
| } |
| |
| FunctionPtr Function::parent_function() const { |
| if (!IsClosureFunction()) return Function::null(); |
| Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| return ClosureData::Cast(obj).parent_function(); |
| } |
| |
| void Function::set_parent_function(const Function& value) const { |
| ASSERT(IsClosureFunction()); |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| ClosureData::Cast(obj).set_parent_function(value); |
| } |
| |
| TypeArgumentsPtr Function::InstantiateToBounds( |
| Thread* thread, |
| DefaultTypeArgumentsKind* kind_out) const { |
| if (type_parameters() == TypeParameters::null()) { |
| if (kind_out != nullptr) { |
| *kind_out = DefaultTypeArgumentsKind::kIsInstantiated; |
| } |
| return Object::empty_type_arguments().ptr(); |
| } |
| auto& type_params = TypeParameters::Handle(thread->zone(), type_parameters()); |
| auto& result = TypeArguments::Handle(thread->zone(), type_params.defaults()); |
| if (kind_out != nullptr) { |
| if (IsClosureFunction()) { |
| *kind_out = default_type_arguments_kind(); |
| } else { |
| // We just return is/is not instantiated if the value isn't cached, as |
| // the other checks may be more overhead at runtime than just doing the |
| // instantiation. |
| *kind_out = result.IsNull() || result.IsInstantiated() |
| ? DefaultTypeArgumentsKind::kIsInstantiated |
| : DefaultTypeArgumentsKind::kNeedsInstantiation; |
| } |
| } |
| return result.ptr(); |
| } |
| |
| Function::DefaultTypeArgumentsKind Function::default_type_arguments_kind() |
| const { |
| if (!IsClosureFunction()) { |
| UNREACHABLE(); |
| } |
| const auto& closure_data = ClosureData::Handle(ClosureData::RawCast(data())); |
| ASSERT(!closure_data.IsNull()); |
| return closure_data.default_type_arguments_kind(); |
| } |
| |
| void Function::set_default_type_arguments_kind( |
| Function::DefaultTypeArgumentsKind value) const { |
| if (!IsClosureFunction()) { |
| UNREACHABLE(); |
| } |
| const auto& closure_data = ClosureData::Handle(ClosureData::RawCast(data())); |
| ASSERT(!closure_data.IsNull()); |
| closure_data.set_default_type_arguments_kind(value); |
| } |
| |
| Function::DefaultTypeArgumentsKind Function::DefaultTypeArgumentsKindFor( |
| const TypeArguments& value) const { |
| if (value.IsNull() || value.IsInstantiated()) { |
| return DefaultTypeArgumentsKind::kIsInstantiated; |
| } |
| if (value.CanShareFunctionTypeArguments(*this)) { |
| return DefaultTypeArgumentsKind::kSharesFunctionTypeArguments; |
| } |
| const auto& cls = Class::Handle(Owner()); |
| if (value.CanShareInstantiatorTypeArguments(cls)) { |
| return DefaultTypeArgumentsKind::kSharesInstantiatorTypeArguments; |
| } |
| return DefaultTypeArgumentsKind::kNeedsInstantiation; |
| } |
| |
| // Enclosing outermost function of this local function. |
| FunctionPtr Function::GetOutermostFunction() const { |
| FunctionPtr parent = parent_function(); |
| if (parent == Object::null()) { |
| return ptr(); |
| } |
| Function& function = Function::Handle(); |
| do { |
| function = parent; |
| parent = function.parent_function(); |
| } while (parent != Object::null()); |
| return function.ptr(); |
| } |
| |
| FunctionPtr Function::implicit_closure_function() const { |
| if (IsClosureFunction() || IsFactory() || IsDispatcherOrImplicitAccessor() || |
| IsFieldInitializer() || IsFfiTrampoline()) { |
| return Function::null(); |
| } |
| const Object& obj = Object::Handle(data()); |
| ASSERT(obj.IsNull() || obj.IsScript() || obj.IsFunction() || obj.IsArray()); |
| if (obj.IsNull() || obj.IsScript()) { |
| return Function::null(); |
| } |
| if (obj.IsFunction()) { |
| return Function::Cast(obj).ptr(); |
| } |
| ASSERT(is_native()); |
| ASSERT(obj.IsArray()); |
| const Object& res = Object::Handle(Array::Cast(obj).AtAcquire(1)); |
| return res.IsNull() ? Function::null() : Function::Cast(res).ptr(); |
| } |
| |
| void Function::set_implicit_closure_function(const Function& value) const { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(!IsClosureFunction()); |
| const Object& old_data = Object::Handle(data()); |
| if (is_native()) { |
| ASSERT(old_data.IsArray()); |
| ASSERT((Array::Cast(old_data).AtAcquire(1) == Object::null()) || |
| value.IsNull()); |
| Array::Cast(old_data).SetAtRelease(1, value); |
| } else { |
| // Maybe this function will turn into a native later on :-/ |
| if (old_data.IsArray()) { |
| ASSERT((Array::Cast(old_data).AtAcquire(1) == Object::null()) || |
| value.IsNull()); |
| Array::Cast(old_data).SetAtRelease(1, value); |
| } else { |
| ASSERT(old_data.IsNull() || value.IsNull()); |
| set_data(value); |
| } |
| } |
| } |
| |
| void Function::SetFfiCSignature(const FunctionType& sig) const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| FfiTrampolineData::Cast(obj).set_c_signature(sig); |
| } |
| |
| FunctionTypePtr Function::FfiCSignature() const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| return FfiTrampolineData::Cast(obj).c_signature(); |
| } |
| |
| bool Function::FfiCSignatureContainsHandles() const { |
| ASSERT(IsFfiTrampoline()); |
| const FunctionType& c_signature = FunctionType::Handle(FfiCSignature()); |
| const intptr_t num_params = c_signature.num_fixed_parameters(); |
| for (intptr_t i = 0; i < num_params; i++) { |
| const bool is_handle = |
| AbstractType::Handle(c_signature.ParameterTypeAt(i)).type_class_id() == |
| kFfiHandleCid; |
| if (is_handle) { |
| return true; |
| } |
| } |
| return AbstractType::Handle(c_signature.result_type()).type_class_id() == |
| kFfiHandleCid; |
| } |
| |
| bool Function::FfiCSignatureReturnsStruct() const { |
| ASSERT(IsFfiTrampoline()); |
| const FunctionType& c_signature = FunctionType::Handle(FfiCSignature()); |
| const auto& return_type = AbstractType::Handle(c_signature.result_type()); |
| const bool predefined = IsFfiTypeClassId(return_type.type_class_id()); |
| return !predefined; |
| } |
| |
| int32_t Function::FfiCallbackId() const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| return FfiTrampolineData::Cast(obj).callback_id(); |
| } |
| |
| void Function::SetFfiCallbackId(int32_t value) const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| FfiTrampolineData::Cast(obj).set_callback_id(value); |
| } |
| |
| bool Function::FfiIsLeaf() const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| return FfiTrampolineData::Cast(obj).is_leaf(); |
| } |
| |
| void Function::SetFfiIsLeaf(bool is_leaf) const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(untag()->data()); |
| ASSERT(!obj.IsNull()); |
| FfiTrampolineData::Cast(obj).set_is_leaf(is_leaf); |
| } |
| |
| FunctionPtr Function::FfiCallbackTarget() const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| return FfiTrampolineData::Cast(obj).callback_target(); |
| } |
| |
| void Function::SetFfiCallbackTarget(const Function& target) const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| FfiTrampolineData::Cast(obj).set_callback_target(target); |
| } |
| |
| InstancePtr Function::FfiCallbackExceptionalReturn() const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| return FfiTrampolineData::Cast(obj).callback_exceptional_return(); |
| } |
| |
| void Function::SetFfiCallbackExceptionalReturn(const Instance& value) const { |
| ASSERT(IsFfiTrampoline()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(!obj.IsNull()); |
| FfiTrampolineData::Cast(obj).set_callback_exceptional_return(value); |
| } |
| |
| const char* Function::KindToCString(UntaggedFunction::Kind kind) { |
| return UntaggedFunction::KindToCString(kind); |
| } |
| |
| FunctionPtr Function::ForwardingTarget() const { |
| ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder); |
| Array& checks = Array::Handle(); |
| checks ^= data(); |
| return Function::RawCast(checks.At(0)); |
| } |
| |
| void Function::SetForwardingChecks(const Array& checks) const { |
| ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder); |
| ASSERT(checks.Length() >= 1); |
| ASSERT(Object::Handle(checks.At(0)).IsFunction()); |
| set_data(checks); |
| } |
| |
| // This field is heavily overloaded: |
| // kernel eval function: Array[0] = Script |
| // Array[1] = Kernel data |
| // Array[2] = Kernel offset of enclosing library |
| // method extractor: Function extracted closure function |
| // implicit getter: Field |
| // implicit setter: Field |
| // impl. static final gttr: Field |
| // field initializer: Field |
| // noSuchMethod dispatcher: Array arguments descriptor |
| // invoke-field dispatcher: Array arguments descriptor |
| // closure function: ClosureData |
| // irregexp function: Array[0] = RegExp |
| // Array[1] = Smi string specialization cid |
| // native function: Array[0] = String native name |
| // Array[1] = Function implicit closure function |
| // regular function: Function for implicit closure function |
| // ffi trampoline function: FfiTrampolineData (Dart->C) |
| // dyn inv forwarder: Array[0] = Function target |
| // Array[1] = TypeArguments default type args |
| void Function::set_data(const Object& value) const { |
| untag()->set_data<std::memory_order_release>(value.ptr()); |
| } |
| |
| void Function::set_name(const String& value) const { |
| ASSERT(value.IsSymbol()); |
| untag()->set_name(value.ptr()); |
| } |
| |
| void Function::set_owner(const Object& value) const { |
| ASSERT(!value.IsNull()); |
| untag()->set_owner(value.ptr()); |
| } |
| |
| RegExpPtr Function::regexp() const { |
| ASSERT(kind() == UntaggedFunction::kIrregexpFunction); |
| const Array& pair = Array::Cast(Object::Handle(data())); |
| return RegExp::RawCast(pair.At(0)); |
| } |
| |
| class StickySpecialization : public BitField<intptr_t, bool, 0, 1> {}; |
| class StringSpecializationCid |
| : public BitField<intptr_t, intptr_t, 1, UntaggedObject::kClassIdTagSize> { |
| }; |
| |
| intptr_t Function::string_specialization_cid() const { |
| ASSERT(kind() == UntaggedFunction::kIrregexpFunction); |
| const Array& pair = Array::Cast(Object::Handle(data())); |
| return StringSpecializationCid::decode(Smi::Value(Smi::RawCast(pair.At(1)))); |
| } |
| |
| bool Function::is_sticky_specialization() const { |
| ASSERT(kind() == UntaggedFunction::kIrregexpFunction); |
| const Array& pair = Array::Cast(Object::Handle(data())); |
| return StickySpecialization::decode(Smi::Value(Smi::RawCast(pair.At(1)))); |
| } |
| |
| void Function::SetRegExpData(const RegExp& regexp, |
| intptr_t string_specialization_cid, |
| bool sticky) const { |
| ASSERT(kind() == UntaggedFunction::kIrregexpFunction); |
| ASSERT(IsStringClassId(string_specialization_cid)); |
| ASSERT(data() == Object::null()); |
| const Array& pair = Array::Handle(Array::New(2, Heap::kOld)); |
| pair.SetAt(0, regexp); |
| pair.SetAt(1, Smi::Handle(Smi::New(StickySpecialization::encode(sticky) | |
| StringSpecializationCid::encode( |
| string_specialization_cid)))); |
| set_data(pair); |
| } |
| |
| StringPtr Function::native_name() const { |
| ASSERT(is_native()); |
| const Object& obj = Object::Handle(data()); |
| ASSERT(obj.IsArray()); |
| return String::RawCast(Array::Cast(obj).At(0)); |
| } |
| |
| void Function::set_native_name(const String& value) const { |
| Zone* zone = Thread::Current()->zone(); |
| ASSERT(is_native()); |
| |
| // Due to the fact that kernel needs to read in the constant table before the |
| // annotation data is available, we don't know at function creation time |
| // whether the function is a native or not. |
| // |
| // Reading the constant table can cause a static function to get an implicit |
| // closure function. |
| // |
| // We therefore handle both cases. |
| const Object& old_data = Object::Handle(zone, data()); |
| ASSERT(old_data.IsNull() || |
| (old_data.IsFunction() && |
| Function::Handle(zone, Function::RawCast(old_data.ptr())) |
| .IsImplicitClosureFunction())); |
| |
| const Array& pair = Array::Handle(zone, Array::New(2, Heap::kOld)); |
| pair.SetAt(0, value); |
| pair.SetAt(1, old_data); // will be the implicit closure function if needed. |
| set_data(pair); |
| } |
| |
| void Function::set_signature(const FunctionType& value) const { |
| // Signature may be reset to null in aot to save space. |
| untag()->set_signature(value.ptr()); |
| if (!value.IsNull()) { |
| ASSERT(NumImplicitParameters() == value.num_implicit_parameters()); |
| if (IsClosureFunction() && value.IsGeneric()) { |
| const TypeParameters& type_params = |
| TypeParameters::Handle(value.type_parameters()); |
| const TypeArguments& defaults = |
| TypeArguments::Handle(type_params.defaults()); |
| auto kind = DefaultTypeArgumentsKindFor(defaults); |
| ASSERT(kind != DefaultTypeArgumentsKind::kInvalid); |
| set_default_type_arguments_kind(kind); |
| } |
| } |
| } |
| |
| TypeParameterPtr FunctionType::TypeParameterAt(intptr_t index, |
| Nullability nullability) const { |
| ASSERT(index >= 0 && index < NumTypeParameters()); |
| const TypeParameters& type_params = TypeParameters::Handle(type_parameters()); |
| const AbstractType& bound = AbstractType::Handle(type_params.BoundAt(index)); |
| TypeParameter& type_param = TypeParameter::Handle( |
| TypeParameter::New(Object::null_class(), NumParentTypeArguments(), |
| NumParentTypeArguments() + index, bound, nullability)); |
| if (IsFinalized()) { |
| type_param ^= ClassFinalizer::FinalizeType(type_param); |
| } |
| return type_param.ptr(); |
| } |
| |
| void FunctionType::set_result_type(const AbstractType& value) const { |
| ASSERT(!value.IsNull()); |
| untag()->set_result_type(value.ptr()); |
| } |
| |
| AbstractTypePtr Function::ParameterTypeAt(intptr_t index) const { |
| const Array& parameter_types = |
| Array::Handle(untag()->signature()->untag()->parameter_types()); |
| return AbstractType::RawCast(parameter_types.At(index)); |
| } |
| |
| AbstractTypePtr FunctionType::ParameterTypeAt(intptr_t index) const { |
| const Array& parameter_types = Array::Handle(untag()->parameter_types()); |
| return AbstractType::RawCast(parameter_types.At(index)); |
| } |
| |
| void FunctionType::SetParameterTypeAt(intptr_t index, |
| const AbstractType& value) const { |
| ASSERT(!value.IsNull()); |
| const Array& parameter_types = Array::Handle(untag()->parameter_types()); |
| parameter_types.SetAt(index, value); |
| } |
| |
| void Function::set_parameter_types(const Array& value) const { |
| ASSERT(value.IsNull() || value.Length() > 0); |
| untag()->signature()->untag()->set_parameter_types(value.ptr()); |
| } |
| |
| void FunctionType::set_parameter_types(const Array& value) const { |
| ASSERT(value.IsNull() || value.Length() > 0); |
| untag()->set_parameter_types(value.ptr()); |
| } |
| |
| StringPtr Function::ParameterNameAt(intptr_t index) const { |
| const Array& parameter_names = Array::Handle(untag()->parameter_names()); |
| return String::RawCast(parameter_names.At(index)); |
| } |
| |
| void Function::SetParameterNamesFrom(const FunctionType& signature) const { |
| untag()->set_parameter_names(signature.parameter_names()); |
| } |
| |
| StringPtr FunctionType::ParameterNameAt(intptr_t index) const { |
| const Array& parameter_names = Array::Handle(untag()->parameter_names()); |
| return String::RawCast(parameter_names.At(index)); |
| } |
| |
| void FunctionType::SetParameterNameAt(intptr_t index, |
| const String& value) const { |
| ASSERT(!value.IsNull() && value.IsSymbol()); |
| const Array& parameter_names = Array::Handle(untag()->parameter_names()); |
| parameter_names.SetAt(index, value); |
| } |
| |
| void Function::set_parameter_names(const Array& value) const { |
| ASSERT(value.IsNull() || value.Length() > 0); |
| untag()->set_parameter_names(value.ptr()); |
| } |
| |
| void FunctionType::set_parameter_names(const Array& value) const { |
| ASSERT(value.IsNull() || value.Length() > 0); |
| untag()->set_parameter_names(value.ptr()); |
| } |
| |
| void FunctionType::CreateNameArrayIncludingFlags(Heap::Space space) const { |
| // Currently, we only store flags for named parameters that are required. |
| const intptr_t num_parameters = NumParameters(); |
| if (num_parameters == 0) return; |
| intptr_t num_total_slots = num_parameters; |
| if (HasOptionalNamedParameters()) { |
| const intptr_t last_index = (NumOptionalNamedParameters() - 1) / |
| compiler::target::kNumParameterFlagsPerElement; |
| const intptr_t num_flag_slots = last_index + 1; |
| num_total_slots += num_flag_slots; |
| } |
| auto& array = Array::Handle(Array::New(num_total_slots, space)); |
| if (num_total_slots > num_parameters) { |
| // Set flag slots to Smi 0 before handing off. |
| auto& empty_flags_smi = Smi::Handle(Smi::New(0)); |
| for (intptr_t i = num_parameters; i < num_total_slots; i++) { |
| array.SetAt(i, empty_flags_smi); |
| } |
| } |
| set_parameter_names(array); |
| } |
| |
| intptr_t FunctionType::GetRequiredFlagIndex(intptr_t index, |
| intptr_t* flag_mask) const { |
| // If these calculations change, also change |
| // FlowGraphBuilder::BuildClosureCallHasRequiredNamedArgumentsCheck. |
| ASSERT(flag_mask != nullptr); |
| ASSERT(index >= num_fixed_parameters()); |
| index -= num_fixed_parameters(); |
| *flag_mask = (1 << compiler::target::kRequiredNamedParameterFlag) |
| << ((static_cast<uintptr_t>(index) % |
| compiler::target::kNumParameterFlagsPerElement) * |
| compiler::target::kNumParameterFlags); |
| return NumParameters() + |
| index / compiler::target::kNumParameterFlagsPerElement; |
| } |
| |
| bool Function::HasRequiredNamedParameters() const { |
| const FunctionType& sig = FunctionType::Handle(signature()); |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| if (sig.IsNull()) { |
| // Signature is not dropped in aot when any named parameter is required. |
| return false; |
| } |
| #else |
| ASSERT(!sig.IsNull()); |
| #endif |
| const Array& parameter_names = Array::Handle(sig.parameter_names()); |
| if (parameter_names.IsNull()) { |
| return false; |
| } |
| return parameter_names.Length() > NumParameters(); |
| } |
| |
| bool Function::IsRequiredAt(intptr_t index) const { |
| if (index < num_fixed_parameters() + NumOptionalPositionalParameters()) { |
| return false; |
| } |
| const FunctionType& sig = FunctionType::Handle(signature()); |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| if (sig.IsNull()) { |
| // Signature is not dropped in aot when any named parameter is required. |
| return false; |
| } |
| #else |
| ASSERT(!sig.IsNull()); |
| #endif |
| return sig.IsRequiredAt(index); |
| } |
| |
| bool FunctionType::IsRequiredAt(intptr_t index) const { |
| if (index < num_fixed_parameters() + NumOptionalPositionalParameters()) { |
| return false; |
| } |
| intptr_t flag_mask; |
| const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask); |
| const Array& parameter_names = Array::Handle(untag()->parameter_names()); |
| if (flag_index >= parameter_names.Length()) { |
| return false; |
| } |
| const intptr_t flags = |
| Smi::Value(Smi::RawCast(parameter_names.At(flag_index))); |
| return (flags & flag_mask) != 0; |
| } |
| |
| void FunctionType::SetIsRequiredAt(intptr_t index) const { |
| intptr_t flag_mask; |
| const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask); |
| const Array& parameter_names = Array::Handle(untag()->parameter_names()); |
| ASSERT(flag_index < parameter_names.Length()); |
| const intptr_t flags = |
| Smi::Value(Smi::RawCast(parameter_names.At(flag_index))); |
| parameter_names.SetAt(flag_index, Smi::Handle(Smi::New(flags | flag_mask))); |
| } |
| |
| void FunctionType::TruncateUnusedParameterFlags() const { |
| const intptr_t num_params = NumParameters(); |
| if (num_params == 0) return; |
| const Array& parameter_names = Array::Handle(untag()->parameter_names()); |
| if (parameter_names.Length() == num_params) { |
| // No flag slots to truncate. |
| return; |
| } |
| // Truncate the parameter names array to remove unused flags from the end. |
| intptr_t last_used = parameter_names.Length() - 1; |
| for (; last_used >= num_params; --last_used) { |
| if (Smi::Value(Smi::RawCast(parameter_names.At(last_used))) != 0) { |
| break; |
| } |
| } |
| parameter_names.Truncate(last_used + 1); |
| } |
| |
| void FunctionType::FinalizeNameArrays(const Function& function) const { |
| TruncateUnusedParameterFlags(); |
| if (!function.IsNull()) { |
| function.SetParameterNamesFrom(*this); |
| // Unless the function is a dispatcher, its number of type parameters |
| // must match the number of type parameters in its signature. |
| ASSERT(function.kind() == UntaggedFunction::kNoSuchMethodDispatcher || |
| function.kind() == UntaggedFunction::kInvokeFieldDispatcher || |
| function.kind() == UntaggedFunction::kDynamicInvocationForwarder || |
| function.NumTypeParameters() == NumTypeParameters()); |
| } |
| } |
| |
| void FunctionType::set_type_parameters(const TypeParameters& value) const { |
| untag()->set_type_parameters(value.ptr()); |
| } |
| |
| static void ReportTooManyTypeParameters(const Function& function) { |
| Report::MessageF(Report::kError, Script::Handle(), TokenPosition::kNoSource, |
| Report::AtLocation, |
| "too many type parameters declared in function '%s'", |
| function.UserVisibleNameCString()); |
| UNREACHABLE(); |
| } |
| |
| static void ReportTooManyTypeParameters(const FunctionType& sig) { |
| Report::MessageF(Report::kError, Script::Handle(), TokenPosition::kNoSource, |
| Report::AtLocation, |
| "too many type parameters declared in signature '%s' or in " |
| "its enclosing signatures", |
| sig.ToUserVisibleCString()); |
| UNREACHABLE(); |
| } |
| |
| void FunctionType::SetNumParentTypeArguments(intptr_t value) const { |
| ASSERT(value >= 0); |
| if (!Utils::IsUint(UntaggedFunctionType::kMaxParentTypeArgumentsBits, |
| value)) { |
| ReportTooManyTypeParameters(*this); |
| } |
| const uint32_t* original = &untag()->packed_fields_; |
| StoreNonPointer(original, |
| UntaggedFunctionType::PackedNumParentTypeArguments::update( |
| value, *original)); |
| } |
| |
| void Function::SetNumTypeParameters(intptr_t value) const { |
| ASSERT(value >= 0); |
| if (!Utils::IsUint(UntaggedFunction::kMaxTypeParametersBits, value)) { |
| ReportTooManyTypeParameters(*this); |
| } |
| const uint32_t* original = &untag()->packed_fields_; |
| StoreNonPointer(original, UntaggedFunction::PackedNumTypeParameters::update( |
| value, *original)); |
| } |
| |
| intptr_t FunctionType::NumTypeParameters(Thread* thread) const { |
| if (type_parameters() == TypeParameters::null()) { |
| return 0; |
| } |
| REUSABLE_TYPE_PARAMETERS_HANDLESCOPE(thread); |
| TypeParameters& type_params = thread->TypeParametersHandle(); |
| type_params = type_parameters(); |
| // We require null to represent a non-generic signature. |
| ASSERT(type_params.Length() != 0); |
| return type_params.Length(); |
| } |
| |
| intptr_t Function::NumParentTypeArguments() const { |
| // Don't allocate handle in cases where we know it is 0. |
| if (!IsClosureFunction()) return 0; |
| return FunctionType::Handle(signature()).NumParentTypeArguments(); |
| } |
| |
| TypeParameterPtr Function::TypeParameterAt(intptr_t index, |
| Nullability nullability) const { |
| const FunctionType& sig = FunctionType::Handle(signature()); |
| return sig.TypeParameterAt(index, nullability); |
| } |
| |
| void Function::set_kind(UntaggedFunction::Kind value) const { |
| untag()->kind_tag_.Update<KindBits>(value); |
| } |
| |
| void Function::set_modifier(UntaggedFunction::AsyncModifier value) const { |
| untag()->kind_tag_.Update<ModifierBits>(value); |
| } |
| |
| void Function::set_recognized_kind(MethodRecognizer::Kind value) const { |
| // Prevent multiple settings of kind. |
| ASSERT((value == MethodRecognizer::kUnknown) || !IsRecognized()); |
| untag()->kind_tag_.Update<RecognizedBits>(value); |
| } |
| |
| void Function::set_token_pos(TokenPosition token_pos) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| ASSERT(!token_pos.IsClassifying() || IsMethodExtractor()); |
| StoreNonPointer(&untag()->token_pos_, token_pos); |
| #endif |
| } |
| |
| void Function::set_kind_tag(uint32_t value) const { |
| untag()->kind_tag_ = value; |
| } |
| |
| void Function::set_packed_fields(uint32_t packed_fields) const { |
| StoreNonPointer(&untag()->packed_fields_, packed_fields); |
| } |
| |
| bool Function::IsOptimizable() const { |
| if (FLAG_precompiled_mode) { |
| return true; |
| } |
| if (ForceOptimize()) return true; |
| if (is_native()) { |
| // Native methods don't need to be optimized. |
| return false; |
| } |
| if (is_optimizable() && (script() != Script::null()) && |
| SourceSize() < FLAG_huge_method_cutoff_in_tokens) { |
| // Additional check needed for implicit getters. |
| return (unoptimized_code() == Object::null()) || |
| (Code::Handle(unoptimized_code()).Size() < |
| FLAG_huge_method_cutoff_in_code_size); |
| } |
| return false; |
| } |
| |
| void Function::SetIsOptimizable(bool value) const { |
| ASSERT(!is_native()); |
| set_is_optimizable(value); |
| if (!value) { |
| set_is_inlinable(false); |
| set_usage_counter(INT32_MIN); |
| } |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| bool Function::CanBeInlined() const { |
| // Our force-optimized functions cannot deoptimize to an unoptimized frame. |
| // If the instructions of the force-optimized function body get moved via |
| // code motion, we might attempt do deoptimize a frame where the force- |
| // optimized function has only partially finished. Since force-optimized |
| // functions cannot deoptimize to unoptimized frames we prevent them from |
| // being inlined (for now). |
| if (ForceOptimize()) { |
| if (IsFfiTrampoline()) { |
| // The CallSiteInliner::InlineCall asserts in PrepareGraphs that |
| // GraphEntryInstr::SuccessorCount() == 1, but FFI trampoline has two |
| // entries (a normal and a catch entry). |
| return false; |
| } |
| return CompilerState::Current().is_aot(); |
| } |
| |
| if (HasBreakpoint()) { |
| return false; |
| } |
| |
| return is_inlinable() && !is_external() && !is_generated_body(); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| intptr_t Function::NumParameters() const { |
| return num_fixed_parameters() + NumOptionalParameters(); |
| } |
| |
| intptr_t Function::NumImplicitParameters() const { |
| const UntaggedFunction::Kind k = kind(); |
| if (k == UntaggedFunction::kConstructor) { |
| // Type arguments for factory; instance for generative constructor. |
| return 1; |
| } |
| if ((k == UntaggedFunction::kClosureFunction) || |
| (k == UntaggedFunction::kImplicitClosureFunction) || |
| (k == UntaggedFunction::kFfiTrampoline)) { |
| return 1; // Closure object. |
| } |
| if (!is_static()) { |
| // Closure functions defined inside instance (i.e. non-static) functions are |
| // marked as non-static, but they do not have a receiver. |
| // Closures are handled above. |
| ASSERT((k != UntaggedFunction::kClosureFunction) && |
| (k != UntaggedFunction::kImplicitClosureFunction)); |
| return 1; // Receiver. |
| } |
| return 0; // No implicit parameters. |
| } |
| |
| bool Function::AreValidArgumentCounts(intptr_t num_type_arguments, |
| intptr_t num_arguments, |
| intptr_t num_named_arguments, |
| String* error_message) const { |
| if ((num_type_arguments != 0) && |
| (num_type_arguments != NumTypeParameters())) { |
| if (error_message != NULL) { |
| const intptr_t kMessageBufferSize = 64; |
| char message_buffer[kMessageBufferSize]; |
| Utils::SNPrint(message_buffer, kMessageBufferSize, |
| "%" Pd " type arguments passed, but %" Pd " expected", |
| num_type_arguments, NumTypeParameters()); |
| // Allocate in old space because it can be invoked in background |
| // optimizing compilation. |
| *error_message = String::New(message_buffer, Heap::kOld); |
| } |
| return false; // Too many type arguments. |
| } |
| if (num_named_arguments > NumOptionalNamedParameters()) { |
| if (error_message != NULL) { |
| const intptr_t kMessageBufferSize = 64; |
| char message_buffer[kMessageBufferSize]; |
| Utils::SNPrint(message_buffer, kMessageBufferSize, |
| "%" Pd " named passed, at most %" Pd " expected", |
| num_named_arguments, NumOptionalNamedParameters()); |
| // Allocate in old space because it can be invoked in background |
| // optimizing compilation. |
| *error_message = String::New(message_buffer, Heap::kOld); |
| } |
| return false; // Too many named arguments. |
| } |
| const intptr_t num_pos_args = num_arguments - num_named_arguments; |
| const intptr_t num_opt_pos_params = NumOptionalPositionalParameters(); |
| const intptr_t num_pos_params = num_fixed_parameters() + num_opt_pos_params; |
| if (num_pos_args > num_pos_params) { |
| if (error_message != NULL) { |
| const intptr_t kMessageBufferSize = 64; |
| char message_buffer[kMessageBufferSize]; |
| // Hide implicit parameters to the user. |
| const intptr_t num_hidden_params = NumImplicitParameters(); |
| Utils::SNPrint(message_buffer, kMessageBufferSize, |
| "%" Pd "%s passed, %s%" Pd " expected", |
| num_pos_args - num_hidden_params, |
| num_opt_pos_params > 0 ? " positional" : "", |
| num_opt_pos_params > 0 ? "at most " : "", |
| num_pos_params - num_hidden_params); |
| // Allocate in old space because it can be invoked in background |
| // optimizing compilation. |
| *error_message = String::New(message_buffer, Heap::kOld); |
| } |
| return false; // Too many fixed and/or positional arguments. |
| } |
| if (num_pos_args < num_fixed_parameters()) { |
| if (error_message != NULL) { |
| const intptr_t kMessageBufferSize = 64; |
| char message_buffer[kMessageBufferSize]; |
| // Hide implicit parameters to the user. |
| const intptr_t num_hidden_params = NumImplicitParameters(); |
| Utils::SNPrint(message_buffer, kMessageBufferSize, |
| "%" Pd "%s passed, %s%" Pd " expected", |
| num_pos_args - num_hidden_params, |
| num_opt_pos_params > 0 ? " positional" : "", |
| num_opt_pos_params > 0 ? "at least " : "", |
| num_fixed_parameters() - num_hidden_params); |
| // Allocate in old space because it can be invoked in background |
| // optimizing compilation. |
| *error_message = String::New(message_buffer, Heap::kOld); |
| } |
| return false; // Too few fixed and/or positional arguments. |
| } |
| return true; |
| } |
| |
| bool Function::AreValidArguments(intptr_t num_type_arguments, |
| intptr_t num_arguments, |
| const Array& argument_names, |
| String* error_message) const { |
| const Array& args_desc_array = Array::Handle(ArgumentsDescriptor::NewBoxed( |
| num_type_arguments, num_arguments, argument_names, Heap::kNew)); |
| ArgumentsDescriptor args_desc(args_desc_array); |
| return AreValidArguments(args_desc, error_message); |
| } |
| |
| bool Function::AreValidArguments(const ArgumentsDescriptor& args_desc, |
| String* error_message) const { |
| const intptr_t num_type_arguments = args_desc.TypeArgsLen(); |
| const intptr_t num_arguments = args_desc.Count(); |
| const intptr_t num_named_arguments = args_desc.NamedCount(); |
| |
| if (!AreValidArgumentCounts(num_type_arguments, num_arguments, |
| num_named_arguments, error_message)) { |
| return false; |
| } |
| // Verify that all argument names are valid parameter names. |
| Thread* thread = Thread::Current(); |
| auto isolate_group = thread->isolate_group(); |
| Zone* zone = thread->zone(); |
| String& argument_name = String::Handle(zone); |
| String& parameter_name = String::Handle(zone); |
| const intptr_t num_positional_args = num_arguments - num_named_arguments; |
| const intptr_t num_parameters = NumParameters(); |
| for (intptr_t i = 0; i < num_named_arguments; i++) { |
| argument_name = args_desc.NameAt(i); |
| ASSERT(argument_name.IsSymbol()); |
| bool found = false; |
| for (intptr_t j = num_positional_args; j < num_parameters; j++) { |
| parameter_name = ParameterNameAt(j); |
| ASSERT(parameter_name.IsSymbol()); |
| if (argument_name.Equals(parameter_name)) { |
| found = true; |
| break; |
| } |
| } |
| if (!found) { |
| if (error_message != nullptr) { |
| const intptr_t kMessageBufferSize = 64; |
| char message_buffer[kMessageBufferSize]; |
| Utils::SNPrint(message_buffer, kMessageBufferSize, |
| "no optional formal parameter named '%s'", |
| argument_name.ToCString()); |
| *error_message = String::New(message_buffer); |
| } |
| return false; |
| } |
| } |
| if (isolate_group->use_strict_null_safety_checks()) { |
| // Verify that all required named parameters are filled. |
| for (intptr_t j = num_parameters - NumOptionalNamedParameters(); |
| j < num_parameters; j++) { |
| if (IsRequiredAt(j)) { |
| parameter_name = ParameterNameAt(j); |
| ASSERT(parameter_name.IsSymbol()); |
| bool found = false; |
| for (intptr_t i = 0; i < num_named_arguments; i++) { |
| argument_name = args_desc.NameAt(i); |
| ASSERT(argument_name.IsSymbol()); |
| if (argument_name.Equals(parameter_name)) { |
| found = true; |
| break; |
| } |
| } |
| if (!found) { |
| if (error_message != nullptr) { |
| const intptr_t kMessageBufferSize = 64; |
| char message_buffer[kMessageBufferSize]; |
| Utils::SNPrint(message_buffer, kMessageBufferSize, |
| "missing required named parameter '%s'", |
| parameter_name.ToCString()); |
| *error_message = String::New(message_buffer); |
| } |
| return false; |
| } |
| } |
| } |
| } |
| return true; |
| } |
| |
| // Retrieves the function type arguments, if any. This could be explicitly |
| // passed type from the arguments array, delayed type arguments in closures, |
| // or instantiated bounds for the type parameters if no other source for |
| // function type arguments are found. |
| static TypeArgumentsPtr RetrieveFunctionTypeArguments( |
| Thread* thread, |
| Zone* zone, |
| const Function& function, |
| const Instance& receiver, |
| const TypeArguments& instantiator_type_args, |
| const Array& args, |
| const ArgumentsDescriptor& args_desc) { |
| ASSERT(!function.IsNull()); |
| |
| const intptr_t kNumCurrentTypeArgs = function.NumTypeParameters(); |
| const intptr_t kNumParentTypeArgs = function.NumParentTypeArguments(); |
| const intptr_t kNumTypeArgs = kNumCurrentTypeArgs + kNumParentTypeArgs; |
| // Non-generic functions don't receive type arguments. |
| if (kNumTypeArgs == 0) return Object::empty_type_arguments().ptr(); |
| // Closure functions require that the receiver be provided (and is a closure). |
| ASSERT(!function.IsClosureFunction() || receiver.IsClosure()); |
| |
| // Only closure functions should have possibly generic parents. |
| ASSERT(function.IsClosureFunction() || kNumParentTypeArgs == 0); |
| const auto& parent_type_args = |
| function.IsClosureFunction() |
| ? TypeArguments::Handle( |
| zone, Closure::Cast(receiver).function_type_arguments()) |
| : Object::empty_type_arguments(); |
| // We don't try to instantiate the parent type parameters to their bounds |
| // if not provided or check any closed-over type arguments against the parent |
| // type parameter bounds (since they have been type checked already). |
| if (kNumCurrentTypeArgs == 0) return parent_type_args.ptr(); |
| |
| auto& function_type_args = TypeArguments::Handle(zone); |
| // First check for delayed type arguments before using either provided or |
| // default type arguments. |
| bool has_delayed_type_args = false; |
| if (function.IsClosureFunction()) { |
| const auto& closure = Closure::Cast(receiver); |
| function_type_args = closure.delayed_type_arguments(); |
| has_delayed_type_args = |
| function_type_args.ptr() != Object::empty_type_arguments().ptr(); |
| } |
| |
| if (args_desc.TypeArgsLen() > 0) { |
| // We should never end up here when the receiver is a closure with delayed |
| // type arguments unless this dynamically called closure function was |
| // retrieved directly from the closure instead of going through |
| // DartEntry::ResolveCallable, which appropriately checks for this case. |
| ASSERT(!has_delayed_type_args); |
| function_type_args ^= args.At(0); |
| } else if (!has_delayed_type_args) { |
| // We have no explicitly provided function type arguments, so instantiate |
| // the type parameters to bounds or replace as appropriate. |
| Function::DefaultTypeArgumentsKind kind; |
| function_type_args = function.InstantiateToBounds(thread, &kind); |
| switch (kind) { |
| case Function::DefaultTypeArgumentsKind::kInvalid: |
| // We shouldn't hit the invalid case. |
| UNREACHABLE(); |
| break; |
| case Function::DefaultTypeArgumentsKind::kIsInstantiated: |
| // Nothing left to do. |
| break; |
| case Function::DefaultTypeArgumentsKind::kNeedsInstantiation: |
| function_type_args = function_type_args.InstantiateAndCanonicalizeFrom( |
| instantiator_type_args, parent_type_args); |
| break; |
| case Function::DefaultTypeArgumentsKind::kSharesInstantiatorTypeArguments: |
| function_type_args = instantiator_type_args.ptr(); |
| break; |
| case Function::DefaultTypeArgumentsKind::kSharesFunctionTypeArguments: |
| function_type_args = parent_type_args.ptr(); |
| break; |
| } |
| } |
| |
| return function_type_args.Prepend(zone, parent_type_args, kNumParentTypeArgs, |
| kNumTypeArgs); |
| } |
| |
| // Retrieves the instantiator type arguments, if any, from the receiver. |
| static TypeArgumentsPtr RetrieveInstantiatorTypeArguments( |
| Zone* zone, |
| const Function& function, |
| const Instance& receiver) { |
| if (function.IsClosureFunction()) { |
| ASSERT(receiver.IsClosure()); |
| const auto& closure = Closure::Cast(receiver); |
| return closure.instantiator_type_arguments(); |
| } |
| if (!receiver.IsNull()) { |
| const auto& cls = Class::Handle(zone, receiver.clazz()); |
| if (cls.NumTypeArguments() > 0) { |
| return receiver.GetTypeArguments(); |
| } |
| } |
| return Object::empty_type_arguments().ptr(); |
| } |
| |
| ObjectPtr Function::DoArgumentTypesMatch( |
| const Array& args, |
| const ArgumentsDescriptor& args_desc) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| if (signature() == FunctionType::null()) { |
| // Precompiler deleted signature because of missing entry point pragma. |
| return EntryPointMemberInvocationError(*this); |
| } |
| #endif |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| auto& receiver = Instance::Handle(zone); |
| if (IsClosureFunction() || HasThisParameter()) { |
| receiver ^= args.At(args_desc.FirstArgIndex()); |
| } |
| const auto& instantiator_type_arguments = TypeArguments::Handle( |
| zone, RetrieveInstantiatorTypeArguments(zone, *this, receiver)); |
| return Function::DoArgumentTypesMatch(args, args_desc, |
| instantiator_type_arguments); |
| } |
| |
| ObjectPtr Function::DoArgumentTypesMatch( |
| const Array& args, |
| const ArgumentsDescriptor& args_desc, |
| const TypeArguments& instantiator_type_arguments) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| if (signature() == FunctionType::null()) { |
| // Precompiler deleted signature because of missing entry point pragma. |
| return EntryPointMemberInvocationError(*this); |
| } |
| #endif |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| auto& receiver = Instance::Handle(zone); |
| if (IsClosureFunction() || HasThisParameter()) { |
| receiver ^= args.At(args_desc.FirstArgIndex()); |
| } |
| |
| const auto& function_type_arguments = TypeArguments::Handle( |
| zone, RetrieveFunctionTypeArguments(thread, zone, *this, receiver, |
| instantiator_type_arguments, args, |
| args_desc)); |
| return Function::DoArgumentTypesMatch( |
| args, args_desc, instantiator_type_arguments, function_type_arguments); |
| } |
| |
| ObjectPtr Function::DoArgumentTypesMatch( |
| const Array& args, |
| const ArgumentsDescriptor& args_desc, |
| const TypeArguments& instantiator_type_arguments, |
| const TypeArguments& function_type_arguments) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| if (signature() == FunctionType::null()) { |
| // Precompiler deleted signature because of missing entry point pragma. |
| return EntryPointMemberInvocationError(*this); |
| } |
| #endif |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| // Perform any non-covariant bounds checks on the provided function type |
| // arguments to make sure they are appropriate subtypes of the bounds. |
| const intptr_t kNumLocalTypeArgs = NumTypeParameters(); |
| if (kNumLocalTypeArgs > 0) { |
| const intptr_t kNumParentTypeArgs = NumParentTypeArguments(); |
| ASSERT(function_type_arguments.HasCount(kNumParentTypeArgs + |
| kNumLocalTypeArgs)); |
| const auto& params = TypeParameters::Handle(zone, type_parameters()); |
| // No checks are needed if all bounds are dynamic. |
| if (!params.AllDynamicBounds()) { |
| auto& param = AbstractType::Handle(zone); |
| auto& bound = AbstractType::Handle(zone); |
| for (intptr_t i = 0; i < kNumLocalTypeArgs; i++) { |
| bound = params.BoundAt(i); |
| // Only perform non-covariant checks where the bound is not |
| // the top type. |
| if (params.IsGenericCovariantImplAt(i) || |
| bound.IsTopTypeForSubtyping()) { |
| continue; |
| } |
| param = TypeParameterAt(i); |
| if (!AbstractType::InstantiateAndTestSubtype( |
| ¶m, &bound, instantiator_type_arguments, |
| function_type_arguments)) { |
| const auto& names = Array::Handle(zone, params.names()); |
| auto& name = String::Handle(zone); |
| name ^= names.At(i); |
| return Error::RawCast( |
| ThrowTypeError(token_pos(), param, bound, name)); |
| } |
| } |
| } |
| } else { |
| ASSERT(function_type_arguments.HasCount(NumParentTypeArguments())); |
| } |
| |
| AbstractType& type = AbstractType::Handle(zone); |
| Instance& argument = Instance::Handle(zone); |
| |
| auto check_argument = [](const Instance& argument, const AbstractType& type, |
| const TypeArguments& instantiator_type_args, |
| const TypeArguments& function_type_args) -> bool { |
| // If the argument type is the top type, no need to check. |
| if (type.IsTopTypeForSubtyping()) return true; |
| if (argument.IsNull()) { |
| return Instance::NullIsAssignableTo(type, instantiator_type_args, |
| function_type_args); |
| } |
| return argument.IsAssignableTo(type, instantiator_type_args, |
| function_type_args); |
| }; |
| |
| // Check types of the provided arguments against the expected parameter types. |
| const intptr_t arg_offset = args_desc.FirstArgIndex(); |
| // Only check explicit arguments. |
| const intptr_t arg_start = arg_offset + NumImplicitParameters(); |
| const intptr_t end_positional_args = arg_offset + args_desc.PositionalCount(); |
| for (intptr_t arg_index = arg_start; arg_index < end_positional_args; |
| ++arg_index) { |
| argument ^= args.At(arg_index); |
| // Adjust for type arguments when they're present. |
| const intptr_t param_index = arg_index - arg_offset; |
| type = ParameterTypeAt(param_index); |
| if (!check_argument(argument, type, instantiator_type_arguments, |
| function_type_arguments)) { |
| auto& name = String::Handle(zone, ParameterNameAt(param_index)); |
| if (!type.IsInstantiated()) { |
| type = |
| type.InstantiateFrom(instantiator_type_arguments, |
| function_type_arguments, kAllFree, Heap::kNew); |
| } |
| return ThrowTypeError(token_pos(), argument, type, name); |
| } |
| } |
| |
| const intptr_t num_named_arguments = args_desc.NamedCount(); |
| if (num_named_arguments == 0) { |
| return Error::null(); |
| } |
| |
| const int num_parameters = NumParameters(); |
| const int num_fixed_params = num_fixed_parameters(); |
| |
| String& argument_name = String::Handle(zone); |
| String& parameter_name = String::Handle(zone); |
| |
| // Check types of named arguments against expected parameter type. |
| for (intptr_t named_index = 0; named_index < num_named_arguments; |
| named_index++) { |
| argument_name = args_desc.NameAt(named_index); |
| ASSERT(argument_name.IsSymbol()); |
| argument ^= args.At(arg_offset + args_desc.PositionAt(named_index)); |
| |
| // Try to find the named parameter that matches the provided argument. |
| // Even when annotated with @required, named parameters are still stored |
| // as if they were optional and so come after the fixed parameters. |
| // Currently O(n^2) as there's no guarantee from either the CFE or the |
| // VM that named parameters and named arguments are sorted in the same way. |
| intptr_t param_index = num_fixed_params; |
| for (; param_index < num_parameters; param_index++) { |
| parameter_name = ParameterNameAt(param_index); |
| ASSERT(parameter_name.IsSymbol()); |
| |
| if (!parameter_name.Equals(argument_name)) continue; |
| |
| type = ParameterTypeAt(param_index); |
| if (!check_argument(argument, type, instantiator_type_arguments, |
| function_type_arguments)) { |
| auto& name = String::Handle(zone, ParameterNameAt(param_index)); |
| if (!type.IsInstantiated()) { |
| type = type.InstantiateFrom(instantiator_type_arguments, |
| function_type_arguments, kAllFree, |
| Heap::kNew); |
| } |
| return ThrowTypeError(token_pos(), argument, type, name); |
| } |
| break; |
| } |
| // Only should fail if AreValidArguments returns a false positive. |
| ASSERT(param_index < num_parameters); |
| } |
| return Error::null(); |
| } |
| |
| // Helper allocating a C string buffer in the zone, printing the fully qualified |
| // name of a function in it, and replacing ':' by '_' to make sure the |
| // constructed name is a valid C++ identifier for debugging purpose. |
| // Set 'chars' to allocated buffer and return number of written characters. |
| |
| enum QualifiedFunctionLibKind { |
| kQualifiedFunctionLibKindLibUrl, |
| kQualifiedFunctionLibKindLibName |
| }; |
| |
| static intptr_t ConstructFunctionFullyQualifiedCString( |
| const Function& function, |
| char** chars, |
| intptr_t reserve_len, |
| bool with_lib, |
| QualifiedFunctionLibKind lib_kind) { |
| Zone* zone = Thread::Current()->zone(); |
| const char* name = String::Handle(zone, function.name()).ToCString(); |
| const char* function_format = (reserve_len == 0) ? "%s" : "%s_"; |
| reserve_len += Utils::SNPrint(NULL, 0, function_format, name); |
| const Function& parent = Function::Handle(zone, function.parent_function()); |
| intptr_t written = 0; |
| if (parent.IsNull()) { |
| const Class& function_class = Class::Handle(zone, function.Owner()); |
| ASSERT(!function_class.IsNull()); |
| const char* class_name = |
| String::Handle(zone, function_class.Name()).ToCString(); |
| ASSERT(class_name != NULL); |
| const char* library_name = NULL; |
| const char* lib_class_format = NULL; |
| if (with_lib) { |
| const Library& library = Library::Handle(zone, function_class.library()); |
| ASSERT(!library.IsNull()); |
| switch (lib_kind) { |
| case kQualifiedFunctionLibKindLibUrl: |
| library_name = String::Handle(zone, library.url()).ToCString(); |
| break; |
| case kQualifiedFunctionLibKindLibName: |
| library_name = String::Handle(zone, library.name()).ToCString(); |
| break; |
| default: |
| UNREACHABLE(); |
| } |
| ASSERT(library_name != NULL); |
| lib_class_format = (library_name[0] == '\0') ? "%s%s_" : "%s_%s_"; |
| } else { |
| library_name = ""; |
| lib_class_format = "%s%s."; |
| } |
| reserve_len += |
| Utils::SNPrint(NULL, 0, lib_class_format, library_name, class_name); |
| ASSERT(chars != NULL); |
| *chars = zone->Alloc<char>(reserve_len + 1); |
| written = Utils::SNPrint(*chars, reserve_len + 1, lib_class_format, |
| library_name, class_name); |
| } else { |
| written = ConstructFunctionFullyQualifiedCString(parent, chars, reserve_len, |
| with_lib, lib_kind); |
| } |
| ASSERT(*chars != NULL); |
| char* next = *chars + written; |
| written += Utils::SNPrint(next, reserve_len + 1, function_format, name); |
| // Replace ":" with "_". |
| while (true) { |
| next = strchr(next, ':'); |
| if (next == NULL) break; |
| *next = '_'; |
| } |
| return written; |
| } |
| |
| const char* Function::ToFullyQualifiedCString() const { |
| char* chars = NULL; |
| ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true, |
| kQualifiedFunctionLibKindLibUrl); |
| return chars; |
| } |
| |
| const char* Function::ToLibNamePrefixedQualifiedCString() const { |
| char* chars = NULL; |
| ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true, |
| kQualifiedFunctionLibKindLibName); |
| return chars; |
| } |
| |
| const char* Function::ToQualifiedCString() const { |
| char* chars = NULL; |
| ConstructFunctionFullyQualifiedCString(*this, &chars, 0, false, |
| kQualifiedFunctionLibKindLibUrl); |
| return chars; |
| } |
| |
| AbstractTypePtr FunctionType::InstantiateFrom( |
| const TypeArguments& instantiator_type_arguments, |
| const TypeArguments& function_type_arguments, |
| intptr_t num_free_fun_type_params, |
| Heap::Space space, |
| TrailPtr trail) const { |
| ASSERT(IsFinalized() || IsBeingFinalized()); |
| Zone* zone = Thread::Current()->zone(); |
| const intptr_t num_parent_type_args = NumParentTypeArguments(); |
| bool delete_type_parameters = false; |
| if (num_free_fun_type_params == kCurrentAndEnclosingFree) { |
| // See the comment on kCurrentAndEnclosingFree to understand why we don't |
| // adjust 'num_free_fun_type_params' downward in this case. |
| num_free_fun_type_params = kAllFree; |
| delete_type_parameters = true; |
| } else { |
| ASSERT(!IsInstantiated(kAny, num_free_fun_type_params)); |
| // We only consider the function type parameters declared by the parents |
| // of this signature function as free. |
| if (num_parent_type_args < num_free_fun_type_params) { |
| num_free_fun_type_params = num_parent_type_args; |
| } |
| } |
| |
| // The number of parent type parameters that remain uninstantiated. |
| const intptr_t remaining_parent_type_params = |
| num_free_fun_type_params < num_parent_type_args |
| ? num_parent_type_args - num_free_fun_type_params |
| : 0; |
| FunctionType& sig = FunctionType::Handle( |
| FunctionType::New(remaining_parent_type_params, nullability(), space)); |
| AbstractType& type = AbstractType::Handle(zone); |
| |
| // Copy the type parameters and instantiate their bounds and defaults. |
| if (!delete_type_parameters) { |
| const TypeParameters& type_params = |
| TypeParameters::Handle(zone, type_parameters()); |
| if (!type_params.IsNull()) { |
| const TypeParameters& sig_type_params = |
| TypeParameters::Handle(zone, TypeParameters::New()); |
| // No need to set names that are ignored in a signature, however, the |
| // length of the names array defines the number of type parameters. |
| sig_type_params.set_names(Array::Handle(zone, type_params.names())); |
| sig_type_params.set_flags(Array::Handle(zone, type_params.flags())); |
| TypeArguments& type_args = TypeArguments::Handle(zone); |
| type_args = type_params.bounds(); |
| if (!type_args.IsNull() && |
| !type_args.IsInstantiated(kAny, num_free_fun_type_params)) { |
| type_args = type_args.InstantiateFrom( |
| instantiator_type_arguments, function_type_arguments, |
| num_free_fun_type_params, space, trail); |
| } |
| sig_type_params.set_bounds(type_args); |
| type_args = type_params.defaults(); |
| if (!type_args.IsNull() && |
| !type_args.IsInstantiated(kAny, num_free_fun_type_params)) { |
| type_args = type_args.InstantiateFrom( |
| instantiator_type_arguments, function_type_arguments, |
| num_free_fun_type_params, space, trail); |
| } |
| sig_type_params.set_defaults(type_args); |
| sig.set_type_parameters(sig_type_params); |
| } |
| } |
| |
| type = result_type(); |
| if (!type.IsInstantiated(kAny, num_free_fun_type_params)) { |
| type = type.InstantiateFrom(instantiator_type_arguments, |
| function_type_arguments, |
| num_free_fun_type_params, space, trail); |
| // A returned null type indicates a failed instantiation in dead code that |
| // must be propagated up to the caller, the optimizing compiler. |
| if (type.IsNull()) { |
| return FunctionType::null(); |
| } |
| } |
| sig.set_result_type(type); |
| const intptr_t num_params = NumParameters(); |
| sig.set_num_implicit_parameters(num_implicit_parameters()); |
| sig.set_num_fixed_parameters(num_fixed_parameters()); |
| sig.SetNumOptionalParameters(NumOptionalParameters(), |
| HasOptionalPositionalParameters()); |
| sig.set_parameter_types(Array::Handle(Array::New(num_params, space))); |
| for (intptr_t i = 0; i < num_params; i++) { |
| type = ParameterTypeAt(i); |
| if (!type.IsInstantiated(kAny, num_free_fun_type_params)) { |
| type = type.InstantiateFrom(instantiator_type_arguments, |
| function_type_arguments, |
| num_free_fun_type_params, space, trail); |
| // A returned null type indicates a failed instantiation in dead code that |
| // must be propagated up to the caller, the optimizing compiler. |
| if (type.IsNull()) { |
| return FunctionType::null(); |
| } |
| } |
| sig.SetParameterTypeAt(i, type); |
| } |
| sig.set_parameter_names(Array::Handle(zone, parameter_names())); |
| |
| if (delete_type_parameters) { |
| ASSERT(sig.IsInstantiated(kFunctions)); |
| } |
| |
| if (IsFinalized()) { |
| sig.SetIsFinalized(); |
| } else { |
| if (IsBeingFinalized()) { |
| sig.SetIsBeingFinalized(); |
| } |
| } |
| |
| // Canonicalization is not part of instantiation. |
| return sig.ptr(); |
| } |
| |
| // Checks if the type of the specified parameter of this signature is a |
| // supertype of the type of the specified parameter of the other signature |
| // (i.e. check parameter contravariance). |
| // Note that types marked as covariant are already dealt with in the front-end. |
| bool FunctionType::IsContravariantParameter(intptr_t parameter_position, |
| const FunctionType& other, |
| intptr_t other_parameter_position, |
| Heap::Space space) const { |
| const AbstractType& param_type = |
| AbstractType::Handle(ParameterTypeAt(parameter_position)); |
| if (param_type.IsTopTypeForSubtyping()) { |
| return true; |
| } |
| const AbstractType& other_param_type = |
| AbstractType::Handle(other.ParameterTypeAt(other_parameter_position)); |
| return other_param_type.IsSubtypeOf(param_type, space); |
| } |
| |
| bool FunctionType::HasSameTypeParametersAndBounds(const FunctionType& other, |
| TypeEquality kind, |
| TrailPtr trail) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| const intptr_t num_type_params = NumTypeParameters(thread); |
| if (num_type_params != other.NumTypeParameters(thread)) { |
| return false; |
| } |
| if (num_type_params > 0) { |
| const TypeParameters& type_params = |
| TypeParameters::Handle(zone, type_parameters()); |
| ASSERT(!type_params.IsNull()); |
| const TypeParameters& other_type_params = |
| TypeParameters::Handle(zone, other.type_parameters()); |
| ASSERT(!other_type_params.IsNull()); |
| if (kind == TypeEquality::kInSubtypeTest) { |
| if (!type_params.AllDynamicBounds() || |
| !other_type_params.AllDynamicBounds()) { |
| AbstractType& bound = AbstractType::Handle(zone); |
| AbstractType& other_bound = AbstractType::Handle(zone); |
| for (intptr_t i = 0; i < num_type_params; i++) { |
| bound = type_params.BoundAt(i); |
| other_bound = other_type_params.BoundAt(i); |
| // Bounds that are mutual subtypes are considered equal. |
| if (!bound.IsSubtypeOf(other_bound, Heap::kOld) || |
| !other_bound.IsSubtypeOf(bound, Heap::kOld)) { |
| return false; |
| } |
| } |
| } |
| } else { |
| if (NumParentTypeArguments() != other.NumParentTypeArguments()) { |
| return false; |
| } |
| const TypeArguments& bounds = |
| TypeArguments::Handle(zone, type_params.bounds()); |
| const TypeArguments& other_bounds = |
| TypeArguments::Handle(zone, other_type_params.bounds()); |
| if (!bounds.IsEquivalent(other_bounds, kind, trail)) { |
| return false; |
| } |
| if (kind == TypeEquality::kCanonical) { |
| // Compare default arguments. |
| const TypeArguments& defaults = |
| TypeArguments::Handle(zone, type_params.defaults()); |
| const TypeArguments& other_defaults = |
| TypeArguments::Handle(zone, other_type_params.defaults()); |
| if (defaults.IsNull()) { |
| if (!other_defaults.IsNull()) { |
| return false; |
| } |
| } else if (!defaults.IsEquivalent(other_defaults, kind, trail)) { |
| return false; |
| } |
| } |
| } |
| // Compare flags (IsGenericCovariantImpl). |
| if (!Array::Equals(type_params.flags(), other_type_params.flags())) { |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| bool FunctionType::IsSubtypeOf(const FunctionType& other, |
| Heap::Space space) const { |
| const intptr_t num_fixed_params = num_fixed_parameters(); |
| const intptr_t num_opt_pos_params = NumOptionalPositionalParameters(); |
| const intptr_t num_opt_named_params = NumOptionalNamedParameters(); |
| const intptr_t other_num_fixed_params = other.num_fixed_parameters(); |
| const intptr_t other_num_opt_pos_params = |
| other.NumOptionalPositionalParameters(); |
| const intptr_t other_num_opt_named_params = |
| other.NumOptionalNamedParameters(); |
| // This signature requires the same arguments or less and accepts the same |
| // arguments or more. We can ignore implicit parameters. |
| const intptr_t num_ignored_params = num_implicit_parameters(); |
| const intptr_t other_num_ignored_params = other.num_implicit_parameters(); |
| if (((num_fixed_params - num_ignored_params) > |
| (other_num_fixed_params - other_num_ignored_params)) || |
| ((num_fixed_params - num_ignored_params + num_opt_pos_params) < |
| (other_num_fixed_params - other_num_ignored_params + |
| other_num_opt_pos_params)) || |
| (num_opt_named_params < other_num_opt_named_params)) { |
| return false; |
| } |
| // Check the type parameters and bounds of generic functions. |
| if (!HasSameTypeParametersAndBounds(other, TypeEquality::kInSubtypeTest)) { |
| return false; |
| } |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| auto isolate_group = thread->isolate_group(); |
| // Check the result type. |
| const AbstractType& other_res_type = |
| AbstractType::Handle(zone, other.result_type()); |
| // 'void Function()' is a subtype of 'Object Function()'. |
| if (!other_res_type.IsTopTypeForSubtyping()) { |
| const AbstractType& res_type = AbstractType::Handle(zone, result_type()); |
| if (!res_type.IsSubtypeOf(other_res_type, space)) { |
| return false; |
| } |
| } |
| // Check the types of fixed and optional positional parameters. |
| for (intptr_t i = 0; i < (other_num_fixed_params - other_num_ignored_params + |
| other_num_opt_pos_params); |
| i++) { |
| if (!IsContravariantParameter(i + num_ignored_params, other, |
| i + other_num_ignored_params, space)) { |
| return false; |
| } |
| } |
| // Check that for each optional named parameter of type T of the other |
| // function type, there exists an optional named parameter of this function |
| // type with an identical name and with a type S that is a supertype of T. |
| // Note that SetParameterNameAt() guarantees that names are symbols, so we |
| // can compare their raw pointers. |
| const int num_params = num_fixed_params + num_opt_named_params; |
| const int other_num_params = |
| other_num_fixed_params + other_num_opt_named_params; |
| bool found_param_name; |
| String& other_param_name = String::Handle(zone); |
| for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) { |
| other_param_name = other.ParameterNameAt(i); |
| ASSERT(other_param_name.IsSymbol()); |
| found_param_name = false; |
| for (intptr_t j = num_fixed_params; j < num_params; j++) { |
| ASSERT(String::Handle(zone, ParameterNameAt(j)).IsSymbol()); |
| if (ParameterNameAt(j) == other_param_name.ptr()) { |
| found_param_name = true; |
| if (!IsContravariantParameter(j, other, i, space)) { |
| return false; |
| } |
| break; |
| } |
| } |
| if (!found_param_name) { |
| return false; |
| } |
| } |
| if (isolate_group->use_strict_null_safety_checks()) { |
| // Check that for each required named parameter in this function, there's a |
| // corresponding required named parameter in the other function. |
| String& param_name = other_param_name; |
| for (intptr_t j = num_params - num_opt_named_params; j < num_params; j++) { |
| if (IsRequiredAt(j)) { |
| param_name = ParameterNameAt(j); |
| ASSERT(param_name.IsSymbol()); |
| bool found = false; |
| for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) { |
| ASSERT(String::Handle(zone, other.ParameterNameAt(i)).IsSymbol()); |
| if (other.ParameterNameAt(i) == param_name.ptr()) { |
| found = true; |
| if (!other.IsRequiredAt(i)) { |
| return false; |
| } |
| } |
| } |
| if (!found) { |
| return false; |
| } |
| } |
| } |
| } |
| return true; |
| } |
| |
| // The compiler generates an implicit constructor if a class definition |
| // does not contain an explicit constructor or factory. The implicit |
| // constructor has the same token position as the owner class. |
| bool Function::IsImplicitConstructor() const { |
| return IsGenerativeConstructor() && (token_pos() == end_token_pos()); |
| } |
| |
| bool Function::IsImplicitStaticClosureFunction(FunctionPtr func) { |
| NoSafepointScope no_safepoint; |
| uint32_t kind_tag = func->untag()->kind_tag_.load(std::memory_order_relaxed); |
| return (KindBits::decode(kind_tag) == |
| UntaggedFunction::kImplicitClosureFunction) && |
| StaticBit::decode(kind_tag); |
| } |
| |
| FunctionPtr Function::New(Heap::Space space) { |
| ASSERT(Object::function_class() != Class::null()); |
| ObjectPtr raw = |
| Object::Allocate(Function::kClassId, Function::InstanceSize(), space, |
| Function::ContainsCompressedPointers()); |
| return static_cast<FunctionPtr>(raw); |
| } |
| |
| FunctionPtr Function::New(const FunctionType& signature, |
| const String& name, |
| UntaggedFunction::Kind kind, |
| bool is_static, |
| bool is_const, |
| bool is_abstract, |
| bool is_external, |
| bool is_native, |
| const Object& owner, |
| TokenPosition token_pos, |
| Heap::Space space) { |
| ASSERT(!owner.IsNull()); |
| const Function& result = Function::Handle(Function::New(space)); |
| result.set_kind_tag(0); |
| result.set_packed_fields(0); |
| result.set_name(name); |
| result.set_kind_tag(0); // Ensure determinism of uninitialized bits. |
| result.set_kind(kind); |
| result.set_recognized_kind(MethodRecognizer::kUnknown); |
| result.set_modifier(UntaggedFunction::kNoModifier); |
| result.set_is_static(is_static); |
| result.set_is_const(is_const); |
| result.set_is_abstract(is_abstract); |
| result.set_is_external(is_external); |
| result.set_is_native(is_native); |
| result.set_is_reflectable(true); // Will be computed later. |
| result.set_is_visible(true); // Will be computed later. |
| result.set_is_debuggable(true); // Will be computed later. |
| result.set_is_intrinsic(false); |
| result.set_is_generated_body(false); |
| result.set_has_pragma(false); |
| result.set_is_polymorphic_target(false); |
| result.set_is_synthetic(false); |
| NOT_IN_PRECOMPILED(result.set_state_bits(0)); |
| result.set_owner(owner); |
| NOT_IN_PRECOMPILED(result.set_token_pos(token_pos)); |
| NOT_IN_PRECOMPILED(result.set_end_token_pos(token_pos)); |
| NOT_IN_PRECOMPILED(result.set_usage_counter(0)); |
| NOT_IN_PRECOMPILED(result.set_deoptimization_counter(0)); |
| NOT_IN_PRECOMPILED(result.set_optimized_instruction_count(0)); |
| NOT_IN_PRECOMPILED(result.set_optimized_call_site_count(0)); |
| NOT_IN_PRECOMPILED(result.set_inlining_depth(0)); |
| NOT_IN_PRECOMPILED(result.set_kernel_offset(0)); |
| result.set_is_optimizable(is_native ? false : true); |
| result.set_is_inlinable(true); |
| result.reset_unboxed_parameters_and_return(); |
| result.SetInstructionsSafe(StubCode::LazyCompile()); |
| if (kind == UntaggedFunction::kClosureFunction || |
| kind == UntaggedFunction::kImplicitClosureFunction) { |
| ASSERT(space == Heap::kOld); |
| const ClosureData& data = ClosureData::Handle(ClosureData::New()); |
| result.set_data(data); |
| } else if (kind == UntaggedFunction::kFfiTrampoline) { |
| const FfiTrampolineData& data = |
| FfiTrampolineData::Handle(FfiTrampolineData::New()); |
| result.set_data(data); |
| } else { |
| // Functions other than signature functions have no reason to be allocated |
| // in new space. |
| ASSERT(space == Heap::kOld); |
| } |
| |
| // Force-optimized functions are not debuggable because they cannot |
| // deoptimize. |
| if (result.ForceOptimize()) { |
| result.set_is_debuggable(false); |
| } |
| if (!signature.IsNull()) { |
| signature.set_num_implicit_parameters(result.NumImplicitParameters()); |
| result.set_signature(signature); |
| } else { |
| ASSERT(kind == UntaggedFunction::kFfiTrampoline); |
| } |
| return result.ptr(); |
| } |
| |
| FunctionPtr Function::NewClosureFunctionWithKind(UntaggedFunction::Kind kind, |
| const String& name, |
| const Function& parent, |
| TokenPosition token_pos, |
| const Object& owner) { |
| ASSERT((kind == UntaggedFunction::kClosureFunction) || |
| (kind == UntaggedFunction::kImplicitClosureFunction)); |
| ASSERT(!parent.IsNull()); |
| ASSERT(!owner.IsNull()); |
| const FunctionType& signature = FunctionType::Handle(FunctionType::New( |
| kind == UntaggedFunction::kClosureFunction ? parent.NumTypeArguments() |
| : 0)); |
| const Function& result = Function::Handle( |
| Function::New(signature, name, kind, |
| /* is_static = */ parent.is_static(), |
| /* is_const = */ false, |
| /* is_abstract = */ false, |
| /* is_external = */ false, |
| /* is_native = */ false, owner, token_pos)); |
| result.set_parent_function(parent); |
| return result.ptr(); |
| } |
| |
| FunctionPtr Function::NewClosureFunction(const String& name, |
| const Function& parent, |
| TokenPosition token_pos) { |
| // Use the owner defining the parent function and not the class containing it. |
| const Object& parent_owner = Object::Handle(parent.RawOwner()); |
| return NewClosureFunctionWithKind(UntaggedFunction::kClosureFunction, name, |
| parent, token_pos, parent_owner); |
| } |
| |
| FunctionPtr Function::NewImplicitClosureFunction(const String& name, |
| const Function& parent, |
| TokenPosition token_pos) { |
| // Use the owner defining the parent function and not the class containing it. |
| const Object& parent_owner = Object::Handle(parent.RawOwner()); |
| return NewClosureFunctionWithKind(UntaggedFunction::kImplicitClosureFunction, |
| name, parent, token_pos, parent_owner); |
| } |
| |
| bool Function::SafeToClosurize() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| return HasImplicitClosureFunction(); |
| #else |
| return true; |
| #endif |
| } |
| |
| bool Function::IsDynamicClosureCallDispatcher(Thread* thread) const { |
| if (!IsInvokeFieldDispatcher()) return false; |
| if (thread->isolate_group()->object_store()->closure_class() != Owner()) { |
| return false; |
| } |
| const auto& handle = String::Handle(thread->zone(), name()); |
| return handle.Equals(Symbols::DynamicCall()); |
| } |
| |
| FunctionPtr Function::ImplicitClosureFunction() const { |
| // Return the existing implicit closure function if any. |
| if (implicit_closure_function() != Function::null()) { |
| return implicit_closure_function(); |
| } |
| |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| // In AOT mode all implicit closures are pre-created. |
| FATAL("Cannot create implicit closure in AOT!"); |
| return Function::null(); |
| #else |
| ASSERT(!IsClosureFunction()); |
| Thread* thread = Thread::Current(); |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| |
| if (implicit_closure_function() != Function::null()) { |
| return implicit_closure_function(); |
| } |
| |
| // Create closure function. |
| Zone* zone = thread->zone(); |
| const String& closure_name = String::Handle(zone, name()); |
| const Function& closure_function = Function::Handle( |
| zone, NewImplicitClosureFunction(closure_name, *this, token_pos())); |
| |
| // Set closure function's context scope. |
| if (is_static()) { |
| closure_function.set_context_scope(Object::empty_context_scope()); |
| } else { |
| const ContextScope& context_scope = ContextScope::Handle( |
| zone, LocalScope::CreateImplicitClosureScope(*this)); |
| closure_function.set_context_scope(context_scope); |
| } |
| |
| FunctionType& closure_signature = |
| FunctionType::Handle(zone, closure_function.signature()); |
| |
| // Set closure function's type parameters. |
| // This function cannot be local, therefore it has no generic parent. |
| // Its implicit closure function therefore has no generic parent function |
| // either. That is why it is safe to simply copy the type parameters. |
| closure_signature.set_type_parameters( |
| TypeParameters::Handle(zone, type_parameters())); |
| closure_function.SetNumTypeParameters(NumTypeParameters()); |
| |
| // Set closure function's result type to this result type. |
| closure_signature.set_result_type(AbstractType::Handle(zone, result_type())); |
| |
| // Set closure function's end token to this end token. |
| closure_function.set_end_token_pos(end_token_pos()); |
| |
| // The closurized method stub just calls into the original method and should |
| // therefore be skipped by the debugger and in stack traces. |
| closure_function.set_is_debuggable(false); |
| closure_function.set_is_visible(false); |
| |
| // Set closure function's formal parameters to this formal parameters, |
| // removing the receiver if this is an instance method and adding the closure |
| // object as first parameter. |
| const int kClosure = 1; |
| const int has_receiver = is_static() ? 0 : 1; |
| const int num_fixed_params = kClosure - has_receiver + num_fixed_parameters(); |
| const int num_opt_params = NumOptionalParameters(); |
| const bool has_opt_pos_params = HasOptionalPositionalParameters(); |
| const int num_params = num_fixed_params + num_opt_params; |
| closure_function.set_num_fixed_parameters(num_fixed_params); |
| closure_function.SetNumOptionalParameters(num_opt_params, has_opt_pos_params); |
| closure_signature.set_parameter_types( |
| Array::Handle(zone, Array::New(num_params, Heap::kOld))); |
| closure_signature.CreateNameArrayIncludingFlags(Heap::kOld); |
| AbstractType& param_type = AbstractType::Handle(zone); |
| String& param_name = String::Handle(zone); |
| // Add implicit closure object parameter. |
| param_type = Type::DynamicType(); |
| closure_signature.SetParameterTypeAt(0, param_type); |
| closure_signature.SetParameterNameAt(0, Symbols::ClosureParameter()); |
| for (int i = kClosure; i < num_params; i++) { |
| param_type = ParameterTypeAt(has_receiver - kClosure + i); |
| closure_signature.SetParameterTypeAt(i, param_type); |
| param_name = ParameterNameAt(has_receiver - kClosure + i); |
| closure_signature.SetParameterNameAt(i, param_name); |
| if (IsRequiredAt(has_receiver - kClosure + i)) { |
| closure_signature.SetIsRequiredAt(i); |
| } |
| } |
| closure_signature.FinalizeNameArrays(closure_function); |
| closure_function.InheritKernelOffsetFrom(*this); |
| |
| // Change covariant parameter types to either Object? for an opted-in implicit |
| // closure or to Object* for a legacy implicit closure. |
| if (!is_static()) { |
| BitVector is_covariant(zone, NumParameters()); |
| BitVector is_generic_covariant_impl(zone, NumParameters()); |
| kernel::ReadParameterCovariance(*this, &is_covariant, |
| &is_generic_covariant_impl); |
| |
| Type& object_type = Type::Handle(zone, Type::ObjectType()); |
| ObjectStore* object_store = IsolateGroup::Current()->object_store(); |
| object_type = nnbd_mode() == NNBDMode::kOptedInLib |
| ? object_store->nullable_object_type() |
| : object_store->legacy_object_type(); |
| ASSERT(object_type.IsCanonical()); |
| for (intptr_t i = kClosure; i < num_params; ++i) { |
| const intptr_t original_param_index = has_receiver - kClosure + i; |
| if (is_covariant.Contains(original_param_index) || |
| is_generic_covariant_impl.Contains(original_param_index)) { |
| closure_signature.SetParameterTypeAt(i, object_type); |
| } |
| } |
| } |
| ASSERT(!closure_signature.IsFinalized()); |
| closure_signature ^= ClassFinalizer::FinalizeType(closure_signature); |
| closure_function.set_signature(closure_signature); |
| set_implicit_closure_function(closure_function); |
| ASSERT(closure_function.IsImplicitClosureFunction()); |
| return closure_function.ptr(); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| void Function::DropUncompiledImplicitClosureFunction() const { |
| if (implicit_closure_function() != Function::null()) { |
| const Function& func = Function::Handle(implicit_closure_function()); |
| if (!func.HasCode()) { |
| set_implicit_closure_function(Function::Handle()); |
| } |
| } |
| } |
| |
| StringPtr Function::InternalSignature() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| const FunctionType& sig = FunctionType::Handle(signature()); |
| sig.Print(kInternalName, &printer); |
| return Symbols::New(thread, printer.buffer()); |
| } |
| |
| StringPtr Function::UserVisibleSignature() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| const FunctionType& sig = FunctionType::Handle(signature()); |
| sig.Print(kUserVisibleName, &printer); |
| return Symbols::New(thread, printer.buffer()); |
| } |
| |
| void FunctionType::PrintParameters(Thread* thread, |
| Zone* zone, |
| NameVisibility name_visibility, |
| BaseTextBuffer* printer) const { |
| AbstractType& param_type = AbstractType::Handle(zone); |
| const intptr_t num_params = NumParameters(); |
| const intptr_t num_fixed_params = num_fixed_parameters(); |
| const intptr_t num_opt_pos_params = NumOptionalPositionalParameters(); |
| const intptr_t num_opt_named_params = NumOptionalNamedParameters(); |
| const intptr_t num_opt_params = num_opt_pos_params + num_opt_named_params; |
| ASSERT((num_fixed_params + num_opt_params) == num_params); |
| intptr_t i = 0; |
| if (name_visibility == kUserVisibleName) { |
| // Hide implicit parameters. |
| i = num_implicit_parameters(); |
| } |
| String& name = String::Handle(zone); |
| while (i < num_fixed_params) { |
| param_type = ParameterTypeAt(i); |
| ASSERT(!param_type.IsNull()); |
| param_type.PrintName(name_visibility, printer); |
| if (i != (num_params - 1)) { |
| printer->AddString(", "); |
| } |
| i++; |
| } |
| if (num_opt_params > 0) { |
| if (num_opt_pos_params > 0) { |
| printer->AddString("["); |
| } else { |
| printer->AddString("{"); |
| } |
| for (intptr_t i = num_fixed_params; i < num_params; i++) { |
| if (num_opt_named_params > 0 && IsRequiredAt(i)) { |
| printer->AddString("required "); |
| } |
| param_type = ParameterTypeAt(i); |
| ASSERT(!param_type.IsNull()); |
| param_type.PrintName(name_visibility, printer); |
| // The parameter name of an optional positional parameter does not need |
| // to be part of the signature, since it is not used. |
| if (num_opt_named_params > 0) { |
| name = ParameterNameAt(i); |
| printer->AddString(" "); |
| printer->AddString(name.ToCString()); |
| } |
| if (i != (num_params - 1)) { |
| printer->AddString(", "); |
| } |
| } |
| if (num_opt_pos_params > 0) { |
| printer->AddString("]"); |
| } else { |
| printer->AddString("}"); |
| } |
| } |
| } |
| |
| ClosurePtr Function::ImplicitStaticClosure() const { |
| ASSERT(IsImplicitStaticClosureFunction()); |
| if (implicit_static_closure() != Closure::null()) { |
| return implicit_static_closure(); |
| } |
| |
| auto thread = Thread::Current(); |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| |
| if (implicit_static_closure() != Closure::null()) { |
| return implicit_static_closure(); |
| } |
| |
| Zone* zone = thread->zone(); |
| const auto& null_context = Context::Handle(zone); |
| const auto& closure = |
| Closure::Handle(zone, Closure::New(Object::null_type_arguments(), |
| Object::null_type_arguments(), *this, |
| null_context, Heap::kOld)); |
| set_implicit_static_closure(closure); |
| return implicit_static_closure(); |
| } |
| |
| ClosurePtr Function::ImplicitInstanceClosure(const Instance& receiver) const { |
| ASSERT(IsImplicitClosureFunction()); |
| Zone* zone = Thread::Current()->zone(); |
| const Context& context = Context::Handle(zone, Context::New(1)); |
| context.SetAt(0, receiver); |
| TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone); |
| if (!HasInstantiatedSignature(kCurrentClass)) { |
| instantiator_type_arguments = receiver.GetTypeArguments(); |
| } |
| ASSERT(!HasGenericParent()); // No generic parent function. |
| return Closure::New(instantiator_type_arguments, |
| Object::null_type_arguments(), *this, context); |
| } |
| |
| FunctionPtr Function::ImplicitClosureTarget(Zone* zone) const { |
| const auto& parent = Function::Handle(zone, parent_function()); |
| const auto& func_name = String::Handle(zone, parent.name()); |
| const auto& owner = Class::Handle(zone, parent.Owner()); |
| Thread* thread = Thread::Current(); |
| const auto& error = owner.EnsureIsFinalized(thread); |
| ASSERT(error == Error::null()); |
| auto& target = |
| Function::Handle(zone, Resolver::ResolveFunction(zone, owner, func_name)); |
| |
| if (!target.IsNull() && (target.ptr() != parent.ptr())) { |
| DEBUG_ASSERT(IsolateGroup::Current()->HasAttemptedReload()); |
| if ((target.is_static() != parent.is_static()) || |
| (target.kind() != parent.kind())) { |
| target = Function::null(); |
| } |
| } |
| |
| return target.ptr(); |
| } |
| |
| intptr_t Function::ComputeClosureHash() const { |
| ASSERT(IsClosureFunction()); |
| const Class& cls = Class::Handle(Owner()); |
| uintptr_t result = String::Handle(name()).Hash(); |
| result += String::Handle(InternalSignature()).Hash(); |
| result += String::Handle(cls.Name()).Hash(); |
| return result; |
| } |
| |
| void FunctionType::Print(NameVisibility name_visibility, |
| BaseTextBuffer* printer) const { |
| if (IsNull()) { |
| printer->AddString("null"); // Signature optimized out in precompiler. |
| return; |
| } |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| const TypeParameters& type_params = |
| TypeParameters::Handle(zone, type_parameters()); |
| if (!type_params.IsNull()) { |
| printer->AddString("<"); |
| const intptr_t base = NumParentTypeArguments(); |
| const bool kIsClassTypeParameter = false; |
| // Type parameter names are meaningless after canonicalization. |
| type_params.Print(thread, zone, kIsClassTypeParameter, base, |
| name_visibility, printer); |
| printer->AddString(">"); |
| } |
| printer->AddString("("); |
| PrintParameters(thread, zone, name_visibility, printer); |
| printer->AddString(") => "); |
| const AbstractType& res_type = AbstractType::Handle(zone, result_type()); |
| if (!res_type.IsNull()) { |
| res_type.PrintName(name_visibility, printer); |
| } else { |
| printer->AddString("null"); |
| } |
| } |
| |
| bool Function::HasInstantiatedSignature(Genericity genericity, |
| intptr_t num_free_fun_type_params, |
| TrailPtr trail) const { |
| return FunctionType::Handle(signature()) |
| .IsInstantiated(genericity, num_free_fun_type_params, trail); |
| } |
| |
| bool FunctionType::IsInstantiated(Genericity genericity, |
| intptr_t num_free_fun_type_params, |
| TrailPtr trail) const { |
| if (num_free_fun_type_params == kCurrentAndEnclosingFree) { |
| num_free_fun_type_params = kAllFree; |
| } else if (genericity != kCurrentClass) { |
| const intptr_t num_parent_type_args = NumParentTypeArguments(); |
| if (num_parent_type_args > 0 && num_free_fun_type_params > 0) { |
| // The number of parent type arguments is cached in the FunctionType, so |
| // we can't consider any FunctionType with free parent type arguments as |
| // fully instantiated. Instead, the FunctionType must be instantiated to |
| // reduce the number of parent type arguments, even if they're unused in |
| // its component types. |
| return false; |
| } |
| // Don't consider local function type parameters as free. |
| if (num_free_fun_type_params > num_parent_type_args) { |
| num_free_fun_type_params = num_parent_type_args; |
| } |
| } |
| AbstractType& type = AbstractType::Handle(result_type()); |
| if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) { |
| return false; |
| } |
| const intptr_t num_parameters = NumParameters(); |
| for (intptr_t i = 0; i < num_parameters; i++) { |
| type = ParameterTypeAt(i); |
| if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) { |
| return false; |
| } |
| } |
| const intptr_t num_type_params = NumTypeParameters(); |
| if (num_type_params > 0) { |
| TypeParameters& type_params = TypeParameters::Handle(type_parameters()); |
| if (!type_params.AllDynamicBounds()) { |
| for (intptr_t i = 0; i < type_params.Length(); ++i) { |
| type = type_params.BoundAt(i); |
| if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) { |
| return false; |
| } |
| } |
| } |
| } |
| return true; |
| } |
| |
| ClassPtr Function::Owner() const { |
| ASSERT(untag()->owner() != Object::null()); |
| if (untag()->owner()->IsClass()) { |
| return Class::RawCast(untag()->owner()); |
| } |
| const Object& obj = Object::Handle(untag()->owner()); |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).patched_class(); |
| } |
| |
| ClassPtr Function::origin() const { |
| ASSERT(untag()->owner() != Object::null()); |
| if (untag()->owner()->IsClass()) { |
| return Class::RawCast(untag()->owner()); |
| } |
| const Object& obj = Object::Handle(untag()->owner()); |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).origin_class(); |
| } |
| |
| void Function::InheritKernelOffsetFrom(const Function& src) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_); |
| #endif |
| } |
| |
| void Function::InheritKernelOffsetFrom(const Field& src) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| set_kernel_offset(src.kernel_offset()); |
| #endif |
| } |
| |
| void Function::SetKernelDataAndScript(const Script& script, |
| const ExternalTypedData& data, |
| intptr_t offset) const { |
| Array& data_field = Array::Handle(Array::New(3)); |
| data_field.SetAt(0, script); |
| data_field.SetAt(1, data); |
| data_field.SetAt(2, Smi::Handle(Smi::New(offset))); |
| set_data(data_field); |
| } |
| |
| ScriptPtr Function::script() const { |
| // NOTE(turnidge): If you update this function, you probably want to |
| // update Class::PatchFieldsAndFunctions() at the same time. |
| const Object& data = Object::Handle(this->data()); |
| if (IsDynamicInvocationForwarder()) { |
| const auto& forwarding_target = Function::Handle(ForwardingTarget()); |
| return forwarding_target.script(); |
| } |
| if (IsImplicitGetterOrSetter()) { |
| const auto& field = Field::Handle(accessor_field()); |
| return field.Script(); |
| } |
| if (data.IsArray()) { |
| Object& script = Object::Handle(Array::Cast(data).At(0)); |
| if (script.IsScript()) { |
| return Script::Cast(script).ptr(); |
| } |
| } |
| if (token_pos() == TokenPosition::kMinSource) { |
| // Testing for position 0 is an optimization that relies on temporary |
| // eval functions having token position 0. |
| const Script& script = Script::Handle(eval_script()); |
| if (!script.IsNull()) { |
| return script.ptr(); |
| } |
| } |
| const Object& obj = Object::Handle(untag()->owner()); |
| if (obj.IsPatchClass()) { |
| return PatchClass::Cast(obj).script(); |
| } |
| if (IsClosureFunction()) { |
| const Function& function = Function::Handle(parent_function()); |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| if (function.IsNull()) return Script::null(); |
| #endif |
| return function.script(); |
| } |
| ASSERT(obj.IsClass()); |
| return Class::Cast(obj).script(); |
| } |
| |
| ExternalTypedDataPtr Function::KernelData() const { |
| Object& data = Object::Handle(this->data()); |
| if (data.IsArray()) { |
| Object& script = Object::Handle(Array::Cast(data).At(0)); |
| if (script.IsScript()) { |
| return ExternalTypedData::RawCast(Array::Cast(data).At(1)); |
| } |
| } |
| if (IsClosureFunction()) { |
| Function& parent = Function::Handle(parent_function()); |
| ASSERT(!parent.IsNull()); |
| return parent.KernelData(); |
| } |
| |
| const Object& obj = Object::Handle(untag()->owner()); |
| if (obj.IsClass()) { |
| Library& lib = Library::Handle(Class::Cast(obj).library()); |
| return lib.kernel_data(); |
| } |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).library_kernel_data(); |
| } |
| |
| intptr_t Function::KernelDataProgramOffset() const { |
| if (IsNoSuchMethodDispatcher() || IsInvokeFieldDispatcher() || |
| IsFfiTrampoline()) { |
| return 0; |
| } |
| Object& data = Object::Handle(this->data()); |
| if (data.IsArray()) { |
| Object& script = Object::Handle(Array::Cast(data).At(0)); |
| if (script.IsScript()) { |
| return Smi::Value(Smi::RawCast(Array::Cast(data).At(2))); |
| } |
| } |
| if (IsClosureFunction()) { |
| Function& parent = Function::Handle(parent_function()); |
| ASSERT(!parent.IsNull()); |
| return parent.KernelDataProgramOffset(); |
| } |
| |
| const Object& obj = Object::Handle(untag()->owner()); |
| if (obj.IsClass()) { |
| Library& lib = Library::Handle(Class::Cast(obj).library()); |
| return lib.kernel_offset(); |
| } |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).library_kernel_offset(); |
| } |
| |
| bool Function::HasOptimizedCode() const { |
| return HasCode() && Code::Handle(CurrentCode()).is_optimized(); |
| } |
| |
| const char* Function::NameCString(NameVisibility name_visibility) const { |
| switch (name_visibility) { |
| case kInternalName: |
| return String::Handle(name()).ToCString(); |
| case kScrubbedName: |
| case kUserVisibleName: |
| return UserVisibleNameCString(); |
| } |
| UNREACHABLE(); |
| return nullptr; |
| } |
| |
| const char* Function::UserVisibleNameCString() const { |
| if (FLAG_show_internal_names) { |
| return String::Handle(name()).ToCString(); |
| } |
| return String::ScrubName(String::Handle(name()), is_extension_member()); |
| } |
| |
| StringPtr Function::UserVisibleName() const { |
| if (FLAG_show_internal_names) { |
| return name(); |
| } |
| return Symbols::New( |
| Thread::Current(), |
| String::ScrubName(String::Handle(name()), is_extension_member())); |
| } |
| |
| StringPtr Function::QualifiedScrubbedName() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| PrintName(NameFormattingParams(kScrubbedName), &printer); |
| return Symbols::New(thread, printer.buffer()); |
| } |
| |
| StringPtr Function::QualifiedUserVisibleName() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| PrintName(NameFormattingParams(kUserVisibleName), &printer); |
| return Symbols::New(thread, printer.buffer()); |
| } |
| |
| const char* Function::QualifiedUserVisibleNameCString() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| PrintName(NameFormattingParams(kUserVisibleName), &printer); |
| return printer.buffer(); |
| } |
| |
| static void FunctionPrintNameHelper(const Function& fun, |
| const NameFormattingParams& params, |
| BaseTextBuffer* printer) { |
| if (fun.IsNonImplicitClosureFunction()) { |
| if (params.include_parent_name) { |
| const auto& parent = Function::Handle(fun.parent_function()); |
| if (parent.IsNull()) { |
| printer->AddString(Symbols::OptimizedOut().ToCString()); |
| } else { |
| parent.PrintName(params, printer); |
| } |
| // A function's scrubbed name and its user visible name are identical. |
| printer->AddString("."); |
| } |
| if (params.disambiguate_names && |
| fun.name() == Symbols::AnonymousClosure().ptr()) { |
| printer->Printf("<anonymous closure @%" Pd ">", fun.token_pos().Pos()); |
| } else { |
| printer->AddString(fun.NameCString(params.name_visibility)); |
| } |
| return; |
| } |
| if (params.disambiguate_names) { |
| if (fun.IsInvokeFieldDispatcher()) { |
| printer->AddString("[invoke-field] "); |
| } |
| if (fun.IsNoSuchMethodDispatcher()) { |
| printer->AddString("[no-such-method] "); |
| } |
| if (fun.IsImplicitClosureFunction()) { |
| printer->AddString("[tear-off] "); |
| } |
| if (fun.IsMethodExtractor()) { |
| printer->AddString("[tear-off-extractor] "); |
| } |
| } |
| |
| if (fun.kind() == UntaggedFunction::kConstructor) { |
| printer->AddString("new "); |
| } else if (params.include_class_name) { |
| const Class& cls = Class::Handle(fun.Owner()); |
| if (!cls.IsTopLevel()) { |
| const Class& mixin = Class::Handle(cls.Mixin()); |
| printer->AddString(params.name_visibility == Object::kUserVisibleName |
| ? mixin.UserVisibleNameCString() |
| : cls.NameCString(params.name_visibility)); |
| printer->AddString("."); |
| } |
| } |
| |
| printer->AddString(fun.NameCString(params.name_visibility)); |
| |
| // Dispatchers that are created with an arguments descriptor need both the |
| // name and the saved arguments descriptor to disambiguate. |
| if (params.disambiguate_names && fun.HasSavedArgumentsDescriptor()) { |
| const auto& args_desc_array = Array::Handle(fun.saved_args_desc()); |
| const ArgumentsDescriptor args_desc(args_desc_array); |
| args_desc.PrintTo(printer); |
| } |
| } |
| |
| void Function::PrintName(const NameFormattingParams& params, |
| BaseTextBuffer* printer) const { |
| if (!IsLocalFunction()) { |
| FunctionPrintNameHelper(*this, params, printer); |
| return; |
| } |
| auto& fun = Function::Handle(ptr()); |
| intptr_t fun_depth = 0; |
| // If |this| is a generated body closure, start with the closest |
| // non-generated parent function. |
| while (fun.is_generated_body()) { |
| fun = fun.parent_function(); |
| fun_depth++; |
| } |
| FunctionPrintNameHelper(fun, params, printer); |
| // If we skipped generated bodies then append a suffix to the end. |
| if (fun_depth > 0 && params.disambiguate_names) { |
| printer->AddString("{body"); |
| if (fun_depth > 1) { |
| printer->Printf(" depth %" Pd "", fun_depth); |
| } |
| printer->AddString("}"); |
| } |
| } |
| |
| StringPtr Function::GetSource() const { |
| if (IsImplicitConstructor() || is_synthetic()) { |
| // We may need to handle more cases when the restrictions on mixins are |
| // relaxed. In particular we might start associating some source with the |
| // forwarding constructors when it becomes possible to specify a particular |
| // constructor from the mixin to use. |
| return String::null(); |
| } |
| Zone* zone = Thread::Current()->zone(); |
| const Script& func_script = Script::Handle(zone, script()); |
| |
| intptr_t from_line, from_col; |
| if (!func_script.GetTokenLocation(token_pos(), &from_line, &from_col)) { |
| return String::null(); |
| } |
| intptr_t to_line, to_col; |
| if (!func_script.GetTokenLocation(end_token_pos(), &to_line, &to_col)) { |
| return String::null(); |
| } |
| intptr_t to_length = func_script.GetTokenLength(end_token_pos()); |
| if (to_length < 0) { |
| return String::null(); |
| } |
| |
| if (to_length == 1) { |
| // Handle special cases for end tokens of closures (where we exclude the |
| // last token): |
| // (1) "foo(() => null, bar);": End token is `,', but we don't print it. |
| // (2) "foo(() => null);": End token is ')`, but we don't print it. |
| // (3) "var foo = () => null;": End token is `;', but in this case the |
| // token semicolon belongs to the assignment so we skip it. |
| const String& src = String::Handle(func_script.Source()); |
| if (src.IsNull() || src.Length() == 0) { |
| return Symbols::OptimizedOut().ptr(); |
| } |
| uint16_t end_char = src.CharAt(end_token_pos().Pos()); |
| if ((end_char == ',') || // Case 1. |
| (end_char == ')') || // Case 2. |
| (end_char == ';' && String::Handle(zone, name()) |
| .Equals("<anonymous closure>"))) { // Case 3. |
| to_length = 0; |
| } |
| } |
| |
| return func_script.GetSnippet(from_line, from_col, to_line, |
| to_col + to_length); |
| } |
| |
| // Construct fingerprint from token stream. The token stream contains also |
| // arguments. |
| int32_t Function::SourceFingerprint() const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| return kernel::KernelSourceFingerprintHelper::CalculateFunctionFingerprint( |
| *this); |
| #else |
| return 0; |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| void Function::SaveICDataMap( |
| const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data, |
| const Array& edge_counters_array) const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| // Compute number of ICData objects to save. |
| // Store edge counter array in the first slot. |
| intptr_t count = 1; |
| for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) { |
| if (deopt_id_to_ic_data[i] != NULL) { |
| count++; |
| } |
| } |
| const Array& array = Array::Handle(Array::New(count, Heap::kOld)); |
| count = 1; |
| for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) { |
| if (deopt_id_to_ic_data[i] != NULL) { |
| ASSERT(i == deopt_id_to_ic_data[i]->deopt_id()); |
| array.SetAt(count++, *deopt_id_to_ic_data[i]); |
| } |
| } |
| array.SetAt(0, edge_counters_array); |
| set_ic_data_array(array); |
| #else // DART_PRECOMPILED_RUNTIME |
| UNREACHABLE(); |
| #endif // DART_PRECOMPILED_RUNTIME |
| } |
| |
| void Function::RestoreICDataMap( |
| ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data, |
| bool clone_ic_data) const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (FLAG_force_clone_compiler_objects) { |
| clone_ic_data = true; |
| } |
| ASSERT(deopt_id_to_ic_data->is_empty()); |
| Zone* zone = Thread::Current()->zone(); |
| const Array& saved_ic_data = Array::Handle(zone, ic_data_array()); |
| if (saved_ic_data.IsNull()) { |
| // Could happen with not-yet compiled unoptimized code or force-optimized |
| // functions. |
| return; |
| } |
| const intptr_t saved_length = saved_ic_data.Length(); |
| ASSERT(saved_length > 0); |
| if (saved_length > 1) { |
| const intptr_t restored_length = |
| ICData::Cast(Object::Handle(zone, saved_ic_data.At(saved_length - 1))) |
| .deopt_id() + |
| 1; |
| deopt_id_to_ic_data->SetLength(restored_length); |
| for (intptr_t i = 0; i < restored_length; i++) { |
| (*deopt_id_to_ic_data)[i] = NULL; |
| } |
| for (intptr_t i = 1; i < saved_length; i++) { |
| ICData& ic_data = ICData::ZoneHandle(zone); |
| ic_data ^= saved_ic_data.At(i); |
| if (clone_ic_data) { |
| const ICData& original_ic_data = ICData::Handle(zone, ic_data.ptr()); |
| ic_data = ICData::Clone(ic_data); |
| ic_data.SetOriginal(original_ic_data); |
| } |
| ASSERT(deopt_id_to_ic_data->At(ic_data.deopt_id()) == nullptr); |
| (*deopt_id_to_ic_data)[ic_data.deopt_id()] = &ic_data; |
| } |
| } |
| #else // DART_PRECOMPILED_RUNTIME |
| UNREACHABLE(); |
| #endif // DART_PRECOMPILED_RUNTIME |
| } |
| |
| void Function::set_ic_data_array(const Array& value) const { |
| untag()->set_ic_data_array<std::memory_order_release>(value.ptr()); |
| } |
| |
| ArrayPtr Function::ic_data_array() const { |
| return untag()->ic_data_array<std::memory_order_acquire>(); |
| } |
| |
| void Function::ClearICDataArray() const { |
| set_ic_data_array(Array::null_array()); |
| } |
| |
| ICDataPtr Function::FindICData(intptr_t deopt_id) const { |
| const Array& array = Array::Handle(ic_data_array()); |
| ICData& ic_data = ICData::Handle(); |
| for (intptr_t i = 1; i < array.Length(); i++) { |
| ic_data ^= array.At(i); |
| if (ic_data.deopt_id() == deopt_id) { |
| return ic_data.ptr(); |
| } |
| } |
| return ICData::null(); |
| } |
| |
| void Function::SetDeoptReasonForAll(intptr_t deopt_id, |
| ICData::DeoptReasonId reason) { |
| const Array& array = Array::Handle(ic_data_array()); |
| ICData& ic_data = ICData::Handle(); |
| for (intptr_t i = 1; i < array.Length(); i++) { |
| ic_data ^= array.At(i); |
| if (ic_data.deopt_id() == deopt_id) { |
| ic_data.AddDeoptReason(reason); |
| } |
| } |
| } |
| |
| bool Function::CheckSourceFingerprint(int32_t fp, const char* kind) const { |
| #if !defined(DEBUG) |
| return true; // Only check on debug. |
| #endif |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| // Check that the function is marked as recognized via the vm:recognized |
| // pragma. This is so that optimizations that change the signature will know |
| // not to touch it. |
| if (kind != nullptr && !MethodRecognizer::IsMarkedAsRecognized(*this, kind)) { |
| OS::PrintErr( |
| "Recognized method %s should be marked with: " |
| "@pragma(\"vm:recognized\", \"%s\")\n", |
| ToQualifiedCString(), kind); |
| return false; |
| } |
| #endif |
| |
| if (IsolateGroup::Current()->obfuscate() || FLAG_precompiled_mode || |
| (Dart::vm_snapshot_kind() != Snapshot::kNone)) { |
| return true; // The kernel structure has been altered, skip checking. |
| } |
| |
| if (SourceFingerprint() != fp) { |
| // This output can be copied into a file, then used with sed |
| // to replace the old values. |
| // sed -i.bak -f /tmp/newkeys \ |
| // runtime/vm/compiler/recognized_methods_list.h |
| THR_Print("s/0x%08x/0x%08x/\n", fp, SourceFingerprint()); |
| return false; |
| } |
| return true; |
| } |
| |
| CodePtr Function::EnsureHasCode() const { |
| if (HasCode()) return CurrentCode(); |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame()); |
| Zone* zone = thread->zone(); |
| const Object& result = |
| Object::Handle(zone, Compiler::CompileFunction(thread, *this)); |
| if (result.IsError()) { |
| if (result.IsLanguageError()) { |
| Exceptions::ThrowCompileTimeError(LanguageError::Cast(result)); |
| UNREACHABLE(); |
| } |
| Exceptions::PropagateError(Error::Cast(result)); |
| UNREACHABLE(); |
| } |
| // Compiling in unoptimized mode should never fail if there are no errors. |
| ASSERT(HasCode()); |
| ASSERT(ForceOptimize() || unoptimized_code() == result.ptr()); |
| return CurrentCode(); |
| } |
| |
| bool Function::NeedsMonomorphicCheckedEntry(Zone* zone) const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (!IsDynamicFunction()) { |
| return false; |
| } |
| |
| // For functions which need an args descriptor the switchable call sites will |
| // transition directly to calling via a stub (and therefore never call the |
| // monomorphic entry). |
| // |
| // See runtime_entry.cc:DEFINE_RUNTIME_ENTRY(UnlinkedCall) |
| if (PrologueNeedsArgumentsDescriptor()) { |
| return false; |
| } |
| |
| // All dyn:* forwarders are called via SwitchableCalls and all except the ones |
| // with `PrologueNeedsArgumentsDescriptor()` transition into monomorphic |
| // state. |
| if (Function::IsDynamicInvocationForwarderName(name())) { |
| return true; |
| } |
| |
| // If table dispatch is disabled, all instance calls use switchable calls. |
| if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions && |
| FLAG_use_table_dispatch)) { |
| return true; |
| } |
| |
| // Only if there are dynamic callers and if we didn't create a dyn:* forwarder |
| // for it do we need the monomorphic checked entry. |
| return HasDynamicCallers(zone) && |
| !kernel::NeedsDynamicInvocationForwarder(*this); |
| #else |
| UNREACHABLE(); |
| return true; |
| #endif |
| } |
| |
| bool Function::HasDynamicCallers(Zone* zone) const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| // Issue(dartbug.com/42719): |
| // Right now the metadata of _Closure.call says there are no dynamic callers - |
| // even though there can be. To be conservative we return true. |
| if ((name() == Symbols::GetCall().ptr() || name() == Symbols::Call().ptr()) && |
| Class::IsClosureClass(Owner())) { |
| return true; |
| } |
| |
| // Use the results of TFA to determine whether this function is ever |
| // called dynamically, i.e. using switchable calls. |
| kernel::ProcedureAttributesMetadata metadata; |
| metadata = kernel::ProcedureAttributesOf(*this, zone); |
| if (IsGetterFunction() || IsImplicitGetterFunction() || IsMethodExtractor()) { |
| // Dynamic method call through field/getter involves dynamic call of |
| // the field/getter. |
| return metadata.getter_called_dynamically || |
| metadata.method_or_setter_called_dynamically; |
| } else { |
| return metadata.method_or_setter_called_dynamically; |
| } |
| #else |
| UNREACHABLE(); |
| return true; |
| #endif |
| } |
| |
| bool Function::PrologueNeedsArgumentsDescriptor() const { |
| // These functions have a saved compile-time arguments descriptor that is |
| // used in lieu of the runtime arguments descriptor in generated IL. |
| if (HasSavedArgumentsDescriptor()) { |
| return false; |
| } |
| // The prologue of those functions need to examine the arg descriptor for |
| // various purposes. |
| return IsGeneric() || HasOptionalParameters(); |
| } |
| |
| bool Function::MayHaveUncheckedEntryPoint() const { |
| return FLAG_enable_multiple_entrypoints && |
| (NeedsTypeArgumentTypeChecks() || NeedsArgumentTypeChecks()); |
| } |
| |
| intptr_t Function::SourceSize() const { |
| const TokenPosition& start = token_pos(); |
| const TokenPosition& end = end_token_pos(); |
| if (!end.IsReal() || start.IsNoSource() || start.IsClassifying()) { |
| // No source information, so just return 0. |
| return 0; |
| } |
| if (start.IsSynthetic()) { |
| // Try and approximate the source size using the parent's source size. |
| const auto& parent = Function::Handle(parent_function()); |
| ASSERT(!parent.IsNull()); |
| const intptr_t parent_size = parent.SourceSize(); |
| if (parent_size == 0) { |
| return parent_size; |
| } |
| // Parent must have a real ending position. |
| return parent_size - (parent.end_token_pos().Pos() - end.Pos()); |
| } |
| return end.Pos() - start.Pos(); |
| } |
| |
| const char* Function::ToCString() const { |
| if (IsNull()) { |
| return "Function: null"; |
| } |
| Zone* zone = Thread::Current()->zone(); |
| ZoneTextBuffer buffer(zone); |
| buffer.Printf("Function '%s':", String::Handle(zone, name()).ToCString()); |
| if (is_static()) { |
| buffer.AddString(" static"); |
| } |
| if (is_abstract()) { |
| buffer.AddString(" abstract"); |
| } |
| switch (kind()) { |
| case UntaggedFunction::kRegularFunction: |
| case UntaggedFunction::kClosureFunction: |
| case UntaggedFunction::kImplicitClosureFunction: |
| case UntaggedFunction::kGetterFunction: |
| case UntaggedFunction::kSetterFunction: |
| break; |
| case UntaggedFunction::kConstructor: |
| buffer.AddString(is_static() ? " factory" : " constructor"); |
| break; |
| case UntaggedFunction::kImplicitGetter: |
| buffer.AddString(" getter"); |
| break; |
| case UntaggedFunction::kImplicitSetter: |
| buffer.AddString(" setter"); |
| break; |
| case UntaggedFunction::kImplicitStaticGetter: |
| buffer.AddString(" static-getter"); |
| break; |
| case UntaggedFunction::kFieldInitializer: |
| buffer.AddString(" field-initializer"); |
| break; |
| case UntaggedFunction::kMethodExtractor: |
| buffer.AddString(" method-extractor"); |
| break; |
| case UntaggedFunction::kNoSuchMethodDispatcher: |
| buffer.AddString(" no-such-method-dispatcher"); |
| break; |
| case UntaggedFunction::kDynamicInvocationForwarder: |
| buffer.AddString(" dynamic-invocation-forwarder"); |
| break; |
| case UntaggedFunction::kInvokeFieldDispatcher: |
| buffer.AddString(" invoke-field-dispatcher"); |
| break; |
| case UntaggedFunction::kIrregexpFunction: |
| buffer.AddString(" irregexp-function"); |
| break; |
| case UntaggedFunction::kFfiTrampoline: |
| buffer.AddString(" ffi-trampoline-function"); |
| break; |
| default: |
| UNREACHABLE(); |
| } |
| if (HasSavedArgumentsDescriptor()) { |
| const auto& args_desc_array = Array::Handle(zone, saved_args_desc()); |
| const ArgumentsDescriptor args_desc(args_desc_array); |
| buffer.AddChar('['); |
| args_desc.PrintTo(&buffer); |
| buffer.AddChar(']'); |
| } |
| if (is_const()) { |
| buffer.AddString(" const"); |
| } |
| buffer.AddChar('.'); |
| return buffer.buffer(); |
| } |
| |
| void FunctionType::set_packed_fields(uint32_t packed_fields) const { |
| StoreNonPointer(&untag()->packed_fields_, packed_fields); |
| } |
| |
| intptr_t FunctionType::NumParameters() const { |
| return num_fixed_parameters() + NumOptionalParameters(); |
| } |
| |
| void FunctionType::set_num_implicit_parameters(intptr_t value) const { |
| ASSERT(value >= 0); |
| ASSERT( |
| Utils::IsUint(UntaggedFunctionType::kMaxImplicitParametersBits, value)); |
| const uint32_t* original = &untag()->packed_fields_; |
| StoreNonPointer(original, |
| UntaggedFunctionType::PackedNumImplicitParameters::update( |
| value, *original)); |
| } |
| |
| ClosureData::DefaultTypeArgumentsKind ClosureData::default_type_arguments_kind() |
| const { |
| return LoadNonPointer(&untag()->default_type_arguments_kind_); |
| } |
| |
| void ClosureData::set_default_type_arguments_kind( |
| DefaultTypeArgumentsKind value) const { |
| StoreNonPointer(&untag()->default_type_arguments_kind_, value); |
| } |
| |
| ClosureDataPtr ClosureData::New() { |
| ASSERT(Object::closure_data_class() != Class::null()); |
| ObjectPtr raw = |
| Object::Allocate(ClosureData::kClassId, ClosureData::InstanceSize(), |
| Heap::kOld, ClosureData::ContainsCompressedPointers()); |
| return static_cast<ClosureDataPtr>(raw); |
| } |
| |
| const char* ClosureData::ToCString() const { |
| if (IsNull()) { |
| return "ClosureData: null"; |
| } |
| auto const zone = Thread::Current()->zone(); |
| ZoneTextBuffer buffer(zone); |
| buffer.Printf("ClosureData: context_scope: 0x%" Px "", |
| static_cast<uword>(context_scope())); |
| buffer.AddString(" parent_function: "); |
| if (parent_function() == Object::null()) { |
| buffer.AddString("null"); |
| } else { |
| buffer.AddString(Object::Handle(parent_function()).ToCString()); |
| } |
| buffer.Printf(" implicit_static_closure: 0x%" Px "", |
| static_cast<uword>(implicit_static_closure())); |
| return buffer.buffer(); |
| } |
| |
| void Function::set_num_fixed_parameters(intptr_t value) const { |
| ASSERT(value >= 0); |
| ASSERT(Utils::IsUint(UntaggedFunction::kMaxFixedParametersBits, value)); |
| const uint32_t* original = &untag()->packed_fields_; |
| StoreNonPointer(original, UntaggedFunction::PackedNumFixedParameters::update( |
| value, *original)); |
| // Also store in signature. |
| FunctionType::Handle(signature()).set_num_fixed_parameters(value); |
| } |
| |
| void FunctionType::set_num_fixed_parameters(intptr_t value) const { |
| ASSERT(value >= 0); |
| ASSERT(Utils::IsUint(UntaggedFunctionType::kMaxFixedParametersBits, value)); |
| const uint32_t* original = &untag()->packed_fields_; |
| StoreNonPointer( |
| original, |
| UntaggedFunctionType::PackedNumFixedParameters::update(value, *original)); |
| } |
| |
| void Function::SetNumOptionalParameters(intptr_t value, |
| bool are_optional_positional) const { |
| ASSERT(Utils::IsUint(UntaggedFunction::kMaxOptionalParametersBits, value)); |
| uint32_t packed_fields = untag()->packed_fields_; |
| packed_fields = UntaggedFunction::PackedHasNamedOptionalParameters::update( |
| (value > 0) && !are_optional_positional, packed_fields); |
| packed_fields = UntaggedFunction::PackedNumOptionalParameters::update( |
| value, packed_fields); |
| StoreNonPointer(&untag()->packed_fields_, packed_fields); |
| // Also store in signature. |
| FunctionType::Handle(signature()) |
| .SetNumOptionalParameters(value, are_optional_positional); |
| } |
| |
| void FfiTrampolineData::set_callback_target(const Function& value) const { |
| untag()->set_callback_target(value.ptr()); |
| } |
| |
| void FunctionType::SetNumOptionalParameters( |
| intptr_t value, |
| bool are_optional_positional) const { |
| ASSERT( |
| Utils::IsUint(UntaggedFunctionType::kMaxOptionalParametersBits, value)); |
| uint32_t packed_fields = untag()->packed_fields_; |
| packed_fields = |
| UntaggedFunctionType::PackedHasNamedOptionalParameters::update( |
| (value > 0) && !are_optional_positional, packed_fields); |
| packed_fields = UntaggedFunctionType::PackedNumOptionalParameters::update( |
| value, packed_fields); |
| StoreNonPointer(&untag()->packed_fields_, packed_fields); |
| } |
| |
| FunctionTypePtr FunctionType::New(Heap::Space space) { |
| ObjectPtr raw = |
| Object::Allocate(FunctionType::kClassId, FunctionType::InstanceSize(), |
| space, FunctionType::ContainsCompressedPointers()); |
| return static_cast<FunctionTypePtr>(raw); |
| } |
| |
| FunctionTypePtr FunctionType::New(intptr_t num_parent_type_arguments, |
| Nullability nullability, |
| Heap::Space space) { |
| Zone* Z = Thread::Current()->zone(); |
| const FunctionType& result = |
| FunctionType::Handle(Z, FunctionType::New(space)); |
| result.set_packed_fields(0); |
| result.SetNumParentTypeArguments(num_parent_type_arguments); |
| result.set_num_fixed_parameters(0); |
| result.SetNumOptionalParameters(0, false); |
| result.set_nullability(nullability); |
| result.SetHash(0); |
| result.StoreNonPointer(&result.untag()->type_state_, |
| UntaggedType::kAllocated); |
| result.SetTypeTestingStub( |
| Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result))); |
| return result.ptr(); |
| } |
| |
| void FunctionType::set_type_state(uint8_t state) const { |
| ASSERT(state <= UntaggedFunctionType::kFinalizedUninstantiated); |
| StoreNonPointer(&untag()->type_state_, state); |
| } |
| |
| const char* FunctionType::ToUserVisibleCString() const { |
| Zone* zone = Thread::Current()->zone(); |
| ZoneTextBuffer printer(zone); |
| Print(kUserVisibleName, &printer); |
| return printer.buffer(); |
| } |
| |
| StringPtr FunctionType::ToUserVisibleString() const { |
| Thread* thread = Thread::Current(); |
| ZoneTextBuffer printer(thread->zone()); |
| Print(kUserVisibleName, &printer); |
| return Symbols::New(thread, printer.buffer()); |
| } |
| |
| const char* FunctionType::ToCString() const { |
| if (IsNull()) { |
| return "FunctionType: null"; |
| } |
| Zone* zone = Thread::Current()->zone(); |
| ZoneTextBuffer printer(zone); |
| const char* suffix = NullabilitySuffix(kInternalName); |
| if (suffix[0] != '\0') { |
| printer.AddString("("); |
| } |
| Print(kInternalName, &printer); |
| if (suffix[0] != '\0') { |
| printer.AddString(")"); |
| printer.AddString(suffix); |
| } |
| return printer.buffer(); |
| } |
| |
| void ClosureData::set_context_scope(const ContextScope& value) const { |
| untag()->set_context_scope(value.ptr()); |
| } |
| |
| void ClosureData::set_implicit_static_closure(const Closure& closure) const { |
| ASSERT(!closure.IsNull()); |
| ASSERT(untag()->closure() == Closure::null()); |
| untag()->set_closure<std::memory_order_release>(closure.ptr()); |
| } |
| |
| void FfiTrampolineData::set_c_signature(const FunctionType& value) const { |
| untag()->set_c_signature(value.ptr()); |
| } |
| |
| void FfiTrampolineData::set_callback_id(int32_t callback_id) const { |
| StoreNonPointer(&untag()->callback_id_, callback_id); |
| } |
| |
| void FfiTrampolineData::set_is_leaf(bool is_leaf) const { |
| StoreNonPointer(&untag()->is_leaf_, is_leaf); |
| } |
| |
| void FfiTrampolineData::set_callback_exceptional_return( |
| const Instance& value) const { |
| untag()->set_callback_exceptional_return(value.ptr()); |
| } |
| |
| FfiTrampolineDataPtr FfiTrampolineData::New() { |
| ASSERT(Object::ffi_trampoline_data_class() != Class::null()); |
| ObjectPtr raw = Object::Allocate( |
| FfiTrampolineData::kClassId, FfiTrampolineData::InstanceSize(), |
| Heap::kOld, FfiTrampolineData::ContainsCompressedPointers()); |
| FfiTrampolineDataPtr data = static_cast<FfiTrampolineDataPtr>(raw); |
| data->untag()->callback_id_ = 0; |
| data->untag()->is_leaf_ = false; |
| return data; |
| } |
| |
| const char* FfiTrampolineData::ToCString() const { |
| const FunctionType& c_sig = FunctionType::Handle(c_signature()); |
| return OS::SCreate(Thread::Current()->zone(), |
| "TrampolineData: c_signature=%s", |
| c_sig.ToUserVisibleCString()); |
| } |
| |
| FieldPtr Field::CloneFromOriginal() const { |
| return this->Clone(*this); |
| } |
| |
| FieldPtr Field::Original() const { |
| if (IsNull()) { |
| return Field::null(); |
| } |
| Object& obj = Object::Handle(untag()->owner()); |
| if (obj.IsField()) { |
| return Field::RawCast(obj.ptr()); |
| } else { |
| return this->ptr(); |
| } |
| } |
| |
| const Object* Field::CloneForUnboxed(const Object& value) const { |
| if (is_unboxing_candidate() && !is_nullable()) { |
| switch (guarded_cid()) { |
| case kDoubleCid: |
| case kFloat32x4Cid: |
| case kFloat64x2Cid: |
| return &Object::Handle(Object::Clone(value, Heap::kNew)); |
| default: |
| // Not a supported unboxed field type. |
| return &value; |
| } |
| } |
| return &value; |
| } |
| |
| void Field::DisableFieldUnboxing() const { |
| ASSERT(!IsOriginal()); |
| const Field& original = Field::Handle(Original()); |
| if (!original.is_unboxing_candidate()) { |
| return; |
| } |
| auto thread = Thread::Current(); |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| if (!original.is_unboxing_candidate()) { |
| return; |
| } |
| |
| // Ensures that to-be-disabled existing code won't continue running as we |
| // update field properties as it might write into now boxed field thinking |
| // it still holds unboxed(reusable box) value. |
| thread->isolate_group()->RunWithStoppedMutators([&]() { |
| original.set_is_unboxing_candidate(false); |
| set_is_unboxing_candidate(false); |
| original.DeoptimizeDependentCode(); |
| }); |
| } |
| |
| intptr_t Field::guarded_cid() const { |
| #if defined(DEBUG) |
| // This assertion ensures that the cid seen by the background compiler is |
| // consistent. So the assertion passes if the field is a clone. It also |
| // passes if the field is static, because we don't use field guards on |
| // static fields. It also passes if we're compiling unoptimized |
| // code (in which case the caller might get different answers if it obtains |
| // the guarded cid multiple times). |
| Thread* thread = Thread::Current(); |
| ASSERT(!thread->IsInsideCompiler() || |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| ((CompilerState::Current().should_clone_fields() == !IsOriginal())) || |
| #endif |
| is_static()); |
| #endif |
| return LoadNonPointer<ClassIdTagType, std::memory_order_relaxed>( |
| &untag()->guarded_cid_); |
| } |
| |
| bool Field::is_nullable() const { |
| #if defined(DEBUG) |
| // Same assert as guarded_cid(), because is_nullable() also needs to be |
| // consistent for the background compiler. |
| Thread* thread = Thread::Current(); |
| ASSERT(!thread->IsInsideCompiler() || |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| ((CompilerState::Current().should_clone_fields() == !IsOriginal())) || |
| #endif |
| is_static()); |
| #endif |
| return is_nullable_unsafe(); |
| } |
| |
| void Field::SetOriginal(const Field& value) const { |
| ASSERT(value.IsOriginal()); |
| ASSERT(!value.IsNull()); |
| untag()->set_owner(static_cast<ObjectPtr>(value.ptr())); |
| } |
| |
| StringPtr Field::GetterName(const String& field_name) { |
| return String::Concat(Symbols::GetterPrefix(), field_name); |
| } |
| |
| StringPtr Field::GetterSymbol(const String& field_name) { |
| return Symbols::FromGet(Thread::Current(), field_name); |
| } |
| |
| StringPtr Field::LookupGetterSymbol(const String& field_name) { |
| return Symbols::LookupFromGet(Thread::Current(), field_name); |
| } |
| |
| StringPtr Field::SetterName(const String& field_name) { |
| return String::Concat(Symbols::SetterPrefix(), field_name); |
| } |
| |
| StringPtr Field::SetterSymbol(const String& field_name) { |
| return Symbols::FromSet(Thread::Current(), field_name); |
| } |
| |
| StringPtr Field::LookupSetterSymbol(const String& field_name) { |
| return Symbols::LookupFromSet(Thread::Current(), field_name); |
| } |
| |
| StringPtr Field::NameFromGetter(const String& getter_name) { |
| return Symbols::New(Thread::Current(), getter_name, kGetterPrefixLength, |
| getter_name.Length() - kGetterPrefixLength); |
| } |
| |
| StringPtr Field::NameFromSetter(const String& setter_name) { |
| return Symbols::New(Thread::Current(), setter_name, kSetterPrefixLength, |
| setter_name.Length() - kSetterPrefixLength); |
| } |
| |
| StringPtr Field::NameFromInit(const String& init_name) { |
| return Symbols::New(Thread::Current(), init_name, kInitPrefixLength, |
| init_name.Length() - kInitPrefixLength); |
| } |
| |
| bool Field::IsGetterName(const String& function_name) { |
| return function_name.StartsWith(Symbols::GetterPrefix()); |
| } |
| |
| bool Field::IsSetterName(const String& function_name) { |
| return function_name.StartsWith(Symbols::SetterPrefix()); |
| } |
| |
| bool Field::IsInitName(const String& function_name) { |
| return function_name.StartsWith(Symbols::InitPrefix()); |
| } |
| |
| void Field::set_name(const String& value) const { |
| ASSERT(value.IsSymbol()); |
| ASSERT(IsOriginal()); |
| untag()->set_name(value.ptr()); |
| } |
| |
| ObjectPtr Field::RawOwner() const { |
| if (IsOriginal()) { |
| return untag()->owner(); |
| } else { |
| const Field& field = Field::Handle(Original()); |
| ASSERT(field.IsOriginal()); |
| ASSERT(!Object::Handle(field.untag()->owner()).IsField()); |
| return field.untag()->owner(); |
| } |
| } |
| |
| ClassPtr Field::Owner() const { |
| const Field& field = Field::Handle(Original()); |
| ASSERT(field.IsOriginal()); |
| const Object& obj = Object::Handle(field.untag()->owner()); |
| if (obj.IsClass()) { |
| return Class::Cast(obj).ptr(); |
| } |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).patched_class(); |
| } |
| |
| ClassPtr Field::Origin() const { |
| const Field& field = Field::Handle(Original()); |
| ASSERT(field.IsOriginal()); |
| const Object& obj = Object::Handle(field.untag()->owner()); |
| if (obj.IsClass()) { |
| return Class::Cast(obj).ptr(); |
| } |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).origin_class(); |
| } |
| |
| ScriptPtr Field::Script() const { |
| // NOTE(turnidge): If you update this function, you probably want to |
| // update Class::PatchFieldsAndFunctions() at the same time. |
| const Field& field = Field::Handle(Original()); |
| ASSERT(field.IsOriginal()); |
| const Object& obj = Object::Handle(field.untag()->owner()); |
| if (obj.IsClass()) { |
| return Class::Cast(obj).script(); |
| } |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).script(); |
| } |
| |
| ExternalTypedDataPtr Field::KernelData() const { |
| const Object& obj = Object::Handle(this->untag()->owner()); |
| // During background JIT compilation field objects are copied |
| // and copy points to the original field via the owner field. |
| if (obj.IsField()) { |
| return Field::Cast(obj).KernelData(); |
| } else if (obj.IsClass()) { |
| Library& library = Library::Handle(Class::Cast(obj).library()); |
| return library.kernel_data(); |
| } |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).library_kernel_data(); |
| } |
| |
| void Field::InheritKernelOffsetFrom(const Field& src) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_); |
| #endif |
| } |
| |
| intptr_t Field::KernelDataProgramOffset() const { |
| const Object& obj = Object::Handle(untag()->owner()); |
| // During background JIT compilation field objects are copied |
| // and copy points to the original field via the owner field. |
| if (obj.IsField()) { |
| return Field::Cast(obj).KernelDataProgramOffset(); |
| } else if (obj.IsClass()) { |
| Library& lib = Library::Handle(Class::Cast(obj).library()); |
| return lib.kernel_offset(); |
| } |
| ASSERT(obj.IsPatchClass()); |
| return PatchClass::Cast(obj).library_kernel_offset(); |
| } |
| |
| void Field::SetFieldTypeSafe(const AbstractType& value) const { |
| ASSERT(IsOriginal()); |
| ASSERT(!value.IsNull()); |
| if (value.ptr() != type()) { |
| untag()->set_type(value.ptr()); |
| } |
| } |
| |
| // Called at finalization time |
| void Field::SetFieldType(const AbstractType& value) const { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| SetFieldTypeSafe(value); |
| } |
| |
| FieldPtr Field::New() { |
| ASSERT(Object::field_class() != Class::null()); |
| ObjectPtr raw = |
| Object::Allocate(Field::kClassId, Field::InstanceSize(), Heap::kOld, |
| Field::ContainsCompressedPointers()); |
| return static_cast<FieldPtr>(raw); |
| } |
| |
| void Field::InitializeNew(const Field& result, |
| const String& name, |
| bool is_static, |
| bool is_final, |
| bool is_const, |
| bool is_reflectable, |
| bool is_late, |
| const Object& owner, |
| TokenPosition token_pos, |
| TokenPosition end_token_pos) { |
| result.set_kind_bits(0); |
| result.set_name(name); |
| result.set_is_static(is_static); |
| if (is_static) { |
| result.set_field_id_unsafe(-1); |
| } else { |
| result.SetOffset(0, 0); |
| } |
| result.set_is_final(is_final); |
| result.set_is_const(is_const); |
| result.set_is_reflectable(is_reflectable); |
| result.set_is_late(is_late); |
| result.set_is_double_initialized_unsafe(false); |
| result.set_owner(owner); |
| result.set_token_pos(token_pos); |
| result.set_end_token_pos(end_token_pos); |
| result.set_has_nontrivial_initializer_unsafe(false); |
| result.set_has_initializer_unsafe(false); |
| if (FLAG_precompiled_mode) { |
| // May be updated by KernelLoader::ReadInferredType |
| result.set_is_unboxing_candidate_unsafe(false); |
| } else { |
| result.set_is_unboxing_candidate_unsafe(!is_final && !is_late && |
| !is_static); |
| } |
| result.set_initializer_changed_after_initialization(false); |
| NOT_IN_PRECOMPILED(result.set_kernel_offset(0)); |
| result.set_has_pragma(false); |
| result.set_static_type_exactness_state( |
| StaticTypeExactnessState::NotTracking()); |
| auto isolate_group = IsolateGroup::Current(); |
| |
| // Use field guards if they are enabled and the isolate has never reloaded. |
| // TODO(johnmccutchan): The reload case assumes the worst case (everything is |
| // dynamic and possibly null). Attempt to relax this later. |
| #if defined(PRODUCT) |
| const bool use_guarded_cid = |
| FLAG_precompiled_mode || isolate_group->use_field_guards(); |
| #else |
| const bool use_guarded_cid = |
| FLAG_precompiled_mode || (isolate_group->use_field_guards() && |
| !isolate_group->HasAttemptedReload()); |
| #endif // !defined(PRODUCT) |
| result.set_guarded_cid_unsafe(use_guarded_cid ? kIllegalCid : kDynamicCid); |
| result.set_is_nullable_unsafe(use_guarded_cid ? false : true); |
| result.set_guarded_list_length_in_object_offset_unsafe( |
| Field::kUnknownLengthOffset); |
| // Presently, we only attempt to remember the list length for final fields. |
| if (is_final && use_guarded_cid) { |
| result.set_guarded_list_length_unsafe(Field::kUnknownFixedLength); |
| } else { |
| result.set_guarded_list_length_unsafe(Field::kNoFixedLength); |
| } |
| } |
| |
| FieldPtr Field::New(const String& name, |
| bool is_static, |
| bool is_final, |
| bool is_const, |
| bool is_reflectable, |
| bool is_late, |
| const Object& owner, |
| const AbstractType& type, |
| TokenPosition token_pos, |
| TokenPosition end_token_pos) { |
| ASSERT(!owner.IsNull()); |
| const Field& result = Field::Handle(Field::New()); |
| InitializeNew(result, name, is_static, is_final, is_const, is_reflectable, |
| is_late, owner, token_pos, end_token_pos); |
| result.SetFieldTypeSafe(type); |
| return result.ptr(); |
| } |
| |
| FieldPtr Field::NewTopLevel(const String& name, |
| bool is_final, |
| bool is_const, |
| bool is_late, |
| const Object& owner, |
| TokenPosition token_pos, |
| TokenPosition end_token_pos) { |
| ASSERT(!owner.IsNull()); |
| const Field& result = Field::Handle(Field::New()); |
| InitializeNew(result, name, true, /* is_static */ |
| is_final, is_const, true, /* is_reflectable */ |
| is_late, owner, token_pos, end_token_pos); |
| return result.ptr(); |
| } |
| |
| FieldPtr Field::Clone(const Field& original) const { |
| if (original.IsNull()) { |
| return Field::null(); |
| } |
| ASSERT(original.IsOriginal()); |
| Field& clone = Field::Handle(); |
| clone ^= Object::Clone(*this, Heap::kOld); |
| clone.SetOriginal(original); |
| clone.InheritKernelOffsetFrom(original); |
| return clone.ptr(); |
| } |
| |
| int32_t Field::SourceFingerprint() const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| return kernel::KernelSourceFingerprintHelper::CalculateFieldFingerprint( |
| *this); |
| #else |
| return 0; |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| StringPtr Field::InitializingExpression() const { |
| UNREACHABLE(); |
| return String::null(); |
| } |
| |
| const char* Field::UserVisibleNameCString() const { |
| NoSafepointScope no_safepoint; |
| if (FLAG_show_internal_names) { |
| return String::Handle(name()).ToCString(); |
| } |
| return String::ScrubName(String::Handle(name()), is_extension_member()); |
| } |
| |
| StringPtr Field::UserVisibleName() const { |
| if (FLAG_show_internal_names) { |
| return name(); |
| } |
| return Symbols::New( |
| Thread::Current(), |
| String::ScrubName(String::Handle(name()), is_extension_member())); |
| } |
| |
| intptr_t Field::guarded_list_length() const { |
| return Smi::Value(untag()->guarded_list_length()); |
| } |
| |
| void Field::set_guarded_list_length_unsafe(intptr_t list_length) const { |
| ASSERT(IsOriginal()); |
| untag()->set_guarded_list_length(Smi::New(list_length)); |
| } |
| |
| intptr_t Field::guarded_list_length_in_object_offset() const { |
| return untag()->guarded_list_length_in_object_offset_ + kHeapObjectTag; |
| } |
| |
| void Field::set_guarded_list_length_in_object_offset_unsafe( |
| intptr_t list_length_offset) const { |
| ASSERT(IsOriginal()); |
| StoreNonPointer(&untag()->guarded_list_length_in_object_offset_, |
| static_cast<int8_t>(list_length_offset - kHeapObjectTag)); |
| ASSERT(guarded_list_length_in_object_offset() == list_length_offset); |
| } |
| |
| bool Field::NeedsSetter() const { |
| // Late fields always need a setter, unless they're static and non-final, or |
| // final with an initializer. |
| if (is_late()) { |
| if (is_static() && !is_final()) { |
| return false; |
| } |
| if (is_final() && has_initializer()) { |
| return false; |
| } |
| return true; |
| } |
| |
| // Non-late static fields never need a setter. |
| if (is_static()) { |
| return false; |
| } |
| |
| // Otherwise, the field only needs a setter if it isn't final. |
| return !is_final(); |
| } |
| |
| bool Field::NeedsGetter() const { |
| // All instance fields need a getter. |
| if (!is_static()) return true; |
| |
| // Static fields also need a getter if they have a non-trivial initializer, |
| // because it needs to be initialized lazily. |
| if (has_nontrivial_initializer()) return true; |
| |
| // Static late fields with no initializer also need a getter, to check if it's |
| // been initialized. |
| return is_late() && !has_initializer(); |
| } |
| |
| const char* Field::ToCString() const { |
| NoSafepointScope no_safepoint; |
| if (IsNull()) { |
| return "Field: null"; |
| } |
| const char* kF0 = is_static() ? " static" : ""; |
| const char* kF1 = is_late() ? " late" : ""; |
| const char* kF2 = is_final() ? " final" : ""; |
| const char* kF3 = is_const() ? " const" : ""; |
| const char* field_name = String::Handle(name()).ToCString(); |
| const Class& cls = Class::Handle(Owner()); |
| const char* cls_name = String::Handle(cls.Name()).ToCString(); |
| return OS::SCreate(Thread::Current()->zone(), "Field <%s.%s>:%s%s%s%s", |
| cls_name, field_name, kF0, kF1, kF2, kF3); |
| } |
| |
| // Build a closure object that gets (or sets) the contents of a static |
| // field f and cache the closure in a newly created static field |
| // named #f (or #f= in case of a setter). |
| InstancePtr Field::AccessorClosure(bool make_setter) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| ASSERT(is_static()); |
| const Class& field_owner = Class::Handle(zone, Owner()); |
| |
| String& closure_name = String::Handle(zone, this->name()); |
| closure_name = Symbols::FromConcat(thread, Symbols::HashMark(), closure_name); |
| if (make_setter) { |
| closure_name = |
| Symbols::FromConcat(thread, Symbols::HashMark(), closure_name); |
| } |
| |
| Field& closure_field = Field::Handle(zone); |
| closure_field = field_owner.LookupStaticField(closure_name); |
| if (!closure_field.IsNull()) { |
| ASSERT(closure_field.is_static()); |
| const Instance& closure = |
| Instance::Handle(zone, Instance::RawCast(closure_field.StaticValue())); |
| ASSERT(!closure.IsNull()); |
| ASSERT(closure.IsClosure()); |
| return closure.ptr(); |
| } |
| |
| UNREACHABLE(); |
| return Instance::null(); |
| } |
| |
| InstancePtr Field::GetterClosure() const { |
| return AccessorClosure(false); |
| } |
| |
| InstancePtr Field::SetterClosure() const { |
| return AccessorClosure(true); |
| } |
| |
| ArrayPtr Field::dependent_code() const { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadReader()); |
| return untag()->dependent_code(); |
| } |
| |
| void Field::set_dependent_code(const Array& array) const { |
| ASSERT(IsOriginal()); |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| untag()->set_dependent_code(array.ptr()); |
| } |
| |
| class FieldDependentArray : public WeakCodeReferences { |
| public: |
| explicit FieldDependentArray(const Field& field) |
| : WeakCodeReferences(Array::Handle(field.dependent_code())), |
| field_(field) {} |
| |
| virtual void UpdateArrayTo(const Array& value) { |
| field_.set_dependent_code(value); |
| } |
| |
| virtual void ReportDeoptimization(const Code& code) { |
| if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) { |
| Function& function = Function::Handle(code.function()); |
| THR_Print("Deoptimizing %s because guard on field %s failed.\n", |
| function.ToFullyQualifiedCString(), field_.ToCString()); |
| } |
| } |
| |
| virtual void ReportSwitchingCode(const Code& code) { |
| if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) { |
| Function& function = Function::Handle(code.function()); |
| THR_Print( |
| "Switching '%s' to unoptimized code because guard" |
| " on field '%s' was violated.\n", |
| function.ToFullyQualifiedCString(), field_.ToCString()); |
| } |
| } |
| |
| private: |
| const Field& field_; |
| DISALLOW_COPY_AND_ASSIGN(FieldDependentArray); |
| }; |
| |
| void Field::RegisterDependentCode(const Code& code) const { |
| ASSERT(IsOriginal()); |
| DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint()); |
| ASSERT(code.is_optimized()); |
| FieldDependentArray a(*this); |
| a.Register(code); |
| } |
| |
| void Field::DeoptimizeDependentCode() const { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| ASSERT(IsOriginal()); |
| FieldDependentArray a(*this); |
| if (FLAG_trace_deoptimization && a.HasCodes()) { |
| THR_Print("Deopt for field guard (field %s)\n", ToCString()); |
| } |
| a.DisableCode(); |
| } |
| |
| bool Field::IsConsistentWith(const Field& other) const { |
| return (untag()->guarded_cid_ == other.untag()->guarded_cid_) && |
| (untag()->is_nullable_ == other.untag()->is_nullable_) && |
| (untag()->guarded_list_length() == |
| other.untag()->guarded_list_length()) && |
| (is_unboxing_candidate() == other.is_unboxing_candidate()) && |
| (static_type_exactness_state().Encode() == |
| other.static_type_exactness_state().Encode()); |
| } |
| |
| bool Field::IsUninitialized() const { |
| Thread* thread = Thread::Current(); |
| const FieldTable* field_table = thread->isolate()->field_table(); |
| const ObjectPtr raw_value = field_table->At(field_id()); |
| ASSERT(raw_value != Object::transition_sentinel().ptr()); |
| return raw_value == Object::sentinel().ptr(); |
| } |
| |
| FunctionPtr Field::EnsureInitializerFunction() const { |
| ASSERT(has_nontrivial_initializer()); |
| ASSERT(IsOriginal()); |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| Function& initializer = Function::Handle(zone, InitializerFunction()); |
| if (initializer.IsNull()) { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| SafepointMutexLocker ml( |
| thread->isolate_group()->initializer_functions_mutex()); |
| // Double check after grabbing the lock. |
| initializer = InitializerFunction(); |
| if (initializer.IsNull()) { |
| initializer = kernel::CreateFieldInitializerFunction(thread, zone, *this); |
| } |
| #endif |
| } |
| return initializer.ptr(); |
| } |
| |
| void Field::SetInitializerFunction(const Function& initializer) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| ASSERT(IsOriginal()); |
| ASSERT(IsolateGroup::Current() |
| ->initializer_functions_mutex() |
| ->IsOwnedByCurrentThread()); |
| // We have to ensure that all stores into the initializer function object |
| // happen before releasing the pointer to the initializer as it may be |
| // accessed without grabbing the lock. |
| untag()->set_initializer_function<std::memory_order_release>( |
| initializer.ptr()); |
| #endif |
| } |
| |
| bool Field::HasInitializerFunction() const { |
| return untag()->initializer_function() != Function::null(); |
| } |
| |
| ErrorPtr Field::InitializeInstance(const Instance& instance) const { |
| ASSERT(IsOriginal()); |
| ASSERT(is_instance()); |
| ASSERT(instance.GetField(*this) == Object::sentinel().ptr()); |
| Object& value = Object::Handle(); |
| |
| if (has_nontrivial_initializer()) { |
| const Function& initializer = Function::Handle(EnsureInitializerFunction()); |
| const Array& args = Array::Handle(Array::New(1)); |
| args.SetAt(0, instance); |
| value = DartEntry::InvokeFunction(initializer, args); |
| if (!value.IsNull() && value.IsError()) { |
| return Error::Cast(value).ptr(); |
| } |
| } else { |
| if (is_late() && !has_initializer()) { |
| Exceptions::ThrowLateFieldNotInitialized(String::Handle(name())); |
| UNREACHABLE(); |
| } |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| // Our trivial initializer is `null`. Any non-`null` initializer is |
| // non-trivial (see `KernelLoader::CheckForInitializer()`). |
| value = Object::null(); |
| #endif |
| } |
| ASSERT(value.IsNull() || value.IsInstance()); |
| if (is_late() && is_final() && |
| (instance.GetField(*this) != Object::sentinel().ptr())) { |
| Exceptions::ThrowLateFieldAssignedDuringInitialization( |
| String::Handle(name())); |
| UNREACHABLE(); |
| } |
| instance.SetField(*this, value); |
| return Error::null(); |
| } |
| |
| ErrorPtr Field::InitializeStatic() const { |
| ASSERT(IsOriginal()); |
| ASSERT(is_static()); |
| if (StaticValue() == Object::sentinel().ptr()) { |
| auto& value = Object::Handle(); |
| if (is_late()) { |
| if (!has_initializer()) { |
| Exceptions::ThrowLateFieldNotInitialized(String::Handle(name())); |
| UNREACHABLE(); |
| } |
| value = EvaluateInitializer(); |
| if (value.IsError()) { |
| return Error::Cast(value).ptr(); |
| } |
| if (is_final() && (StaticValue() != Object::sentinel().ptr())) { |
| Exceptions::ThrowLateFieldAssignedDuringInitialization( |
| String::Handle(name())); |
| UNREACHABLE(); |
| } |
| } else { |
| SetStaticValue(Object::transition_sentinel()); |
| value = EvaluateInitializer(); |
| if (value.IsError()) { |
| SetStaticValue(Object::null_instance()); |
| return Error::Cast(value).ptr(); |
| } |
| } |
| ASSERT(value.IsNull() || value.IsInstance()); |
| SetStaticValue(value.IsNull() ? Instance::null_instance() |
| : Instance::Cast(value)); |
| return Error::null(); |
| } else if (StaticValue() == Object::transition_sentinel().ptr()) { |
| ASSERT(!is_late()); |
| const Array& ctor_args = Array::Handle(Array::New(1)); |
| const String& field_name = String::Handle(name()); |
| ctor_args.SetAt(0, field_name); |
| Exceptions::ThrowByType(Exceptions::kCyclicInitializationError, ctor_args); |
| UNREACHABLE(); |
| } |
| return Error::null(); |
| } |
| |
| ObjectPtr Field::StaticConstFieldValue() const { |
| ASSERT(is_static() && is_const()); |
| |
| auto thread = Thread::Current(); |
| auto zone = thread->zone(); |
| auto initial_field_table = thread->isolate_group()->initial_field_table(); |
| |
| // We can safely cache the value of the static const field in the initial |
| // field table. |
| auto& value = Object::Handle(zone, initial_field_table->At(field_id())); |
| if (value.ptr() == Object::sentinel().ptr()) { |
| ASSERT(has_initializer()); |
| value = EvaluateInitializer(); |
| if (!value.IsError()) { |
| ASSERT(value.IsNull() || value.IsInstance()); |
| SetStaticConstFieldValue(value.IsNull() ? Instance::null_instance() |
| : Instance::Cast(value)); |
| } |
| } |
| return value.ptr(); |
| } |
| |
| void Field::SetStaticConstFieldValue(const Instance& value, |
| bool assert_initializing_store) const { |
| auto thread = Thread::Current(); |
| auto initial_field_table = thread->isolate_group()->initial_field_table(); |
| |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| ASSERT(initial_field_table->At(field_id()) == Object::sentinel().ptr() || |
| initial_field_table->At(field_id()) == value.ptr() || |
| !assert_initializing_store); |
| initial_field_table->SetAt(field_id(), value.IsNull() |
| ? Instance::null_instance().ptr() |
| : Instance::Cast(value).ptr()); |
| } |
| |
| ObjectPtr Field::EvaluateInitializer() const { |
| Thread* const thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (is_static() && is_const()) { |
| return kernel::EvaluateStaticConstFieldInitializer(*this); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| NoOOBMessageScope no_msg_scope(thread); |
| NoReloadScope no_reload_scope(thread); |
| const Function& initializer = Function::Handle(EnsureInitializerFunction()); |
| return DartEntry::InvokeFunction(initializer, Object::empty_array()); |
| } |
| |
| static intptr_t GetListLength(const Object& value) { |
| if (value.IsTypedDataBase()) { |
| return TypedDataBase::Cast(value).Length(); |
| } else if (value.IsArray()) { |
| return Array::Cast(value).Length(); |
| } else if (value.IsGrowableObjectArray()) { |
| // List length is variable. |
| return Field::kNoFixedLength; |
| } |
| return Field::kNoFixedLength; |
| } |
| |
| static intptr_t GetListLengthOffset(intptr_t cid) { |
| if (IsTypedDataClassId(cid) || IsTypedDataViewClassId(cid) || |
| IsExternalTypedDataClassId(cid)) { |
| return TypedData::length_offset(); |
| } else if (cid == kArrayCid || cid == kImmutableArrayCid) { |
| return Array::length_offset(); |
| } else if (cid == kGrowableObjectArrayCid) { |
| // List length is variable. |
| return Field::kUnknownLengthOffset; |
| } |
| return Field::kUnknownLengthOffset; |
| } |
| |
| const char* Field::GuardedPropertiesAsCString() const { |
| if (guarded_cid() == kIllegalCid) { |
| return "<?>"; |
| } else if (guarded_cid() == kDynamicCid) { |
| ASSERT(!static_type_exactness_state().IsExactOrUninitialized()); |
| return "<*>"; |
| } |
| |
| Zone* zone = Thread::Current()->zone(); |
| |
| const char* exactness = ""; |
| if (static_type_exactness_state().IsTracking()) { |
| exactness = |
| zone->PrintToString(" {%s}", static_type_exactness_state().ToCString()); |
| } |
| |
| const Class& cls = |
| Class::Handle(IsolateGroup::Current()->class_table()->At(guarded_cid())); |
| const char* class_name = String::Handle(cls.Name()).ToCString(); |
| |
| if (IsBuiltinListClassId(guarded_cid()) && !is_nullable() && is_final()) { |
| ASSERT(guarded_list_length() != kUnknownFixedLength); |
| if (guarded_list_length() == kNoFixedLength) { |
| return zone->PrintToString("<%s [*]%s>", class_name, exactness); |
| } else { |
| return zone->PrintToString( |
| "<%s [%" Pd " @%" Pd "]%s>", class_name, guarded_list_length(), |
| guarded_list_length_in_object_offset(), exactness); |
| } |
| } |
| |
| return zone->PrintToString("<%s %s%s>", |
| is_nullable() ? "nullable" : "not-nullable", |
| class_name, exactness); |
| } |
| |
| void Field::InitializeGuardedListLengthInObjectOffset(bool unsafe) const { |
| auto setter = unsafe ? &Field::set_guarded_list_length_in_object_offset_unsafe |
| : &Field::set_guarded_list_length_in_object_offset; |
| ASSERT(IsOriginal()); |
| if (needs_length_check() && |
| (guarded_list_length() != Field::kUnknownFixedLength)) { |
| const intptr_t offset = GetListLengthOffset(guarded_cid()); |
| (this->*setter)(offset); |
| ASSERT(offset != Field::kUnknownLengthOffset); |
| } else { |
| (this->*setter)(Field::kUnknownLengthOffset); |
| } |
| } |
| |
| bool Field::UpdateGuardedCidAndLength(const Object& value) const { |
| ASSERT(IsOriginal()); |
| const intptr_t cid = value.GetClassId(); |
| |
| if (guarded_cid() == kIllegalCid) { |
| // Field is assigned first time. |
| set_guarded_cid(cid); |
| set_is_nullable(cid == kNullCid); |
| |
| // Start tracking length if needed. |
| ASSERT((guarded_list_length() == Field::kUnknownFixedLength) || |
| (guarded_list_length() == Field::kNoFixedLength)); |
| if (needs_length_check()) { |
| ASSERT(guarded_list_length() == Field::kUnknownFixedLength); |
| set_guarded_list_length(GetListLength(value)); |
| InitializeGuardedListLengthInObjectOffset(); |
| } |
| |
| if (FLAG_trace_field_guards) { |
| THR_Print(" => %s\n", GuardedPropertiesAsCString()); |
| } |
| |
| return false; |
| } |
| |
| if ((cid == guarded_cid()) || ((cid == kNullCid) && is_nullable())) { |
| // Class id of the assigned value matches expected class id and nullability. |
| |
| // If we are tracking length check if it has matches. |
| if (needs_length_check() && |
| (guarded_list_length() != GetListLength(value))) { |
| ASSERT(guarded_list_length() != Field::kUnknownFixedLength); |
| set_guarded_list_length(Field::kNoFixedLength); |
| set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset); |
| return true; |
| } |
| |
| // Everything matches. |
| return false; |
| } |
| |
| if ((cid == kNullCid) && !is_nullable()) { |
| // Assigning null value to a non-nullable field makes it nullable. |
| set_is_nullable(true); |
| } else if ((cid != kNullCid) && (guarded_cid() == kNullCid)) { |
| // Assigning non-null value to a field that previously contained only null |
| // turns it into a nullable field with the given class id. |
| ASSERT(is_nullable()); |
| set_guarded_cid(cid); |
| } else { |
| // Give up on tracking class id of values contained in this field. |
| ASSERT(guarded_cid() != cid); |
| set_guarded_cid(kDynamicCid); |
| set_is_nullable(true); |
| } |
| |
| // If we were tracking length drop collected feedback. |
| if (needs_length_check()) { |
| ASSERT(guarded_list_length() != Field::kUnknownFixedLength); |
| set_guarded_list_length(Field::kNoFixedLength); |
| set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset); |
| } |
| |
| // Expected class id or nullability of the field changed. |
| return true; |
| } |
| |
| // Given the type G<T0, ..., Tn> and class C<U0, ..., Un> find path to C at G. |
| // This path can be used to compute type arguments of C at G. |
| // |
| // Note: we are relying on the restriction that the same class can only occur |
| // once among the supertype. |
| static bool FindInstantiationOf(const Type& type, |
| const Class& cls, |
| GrowableArray<const AbstractType*>* path, |
| bool consider_only_super_classes) { |
| if (type.type_class() == cls.ptr()) { |
| return true; // Found instantiation. |
| } |
| |
| Class& cls2 = Class::Handle(); |
| AbstractType& super_type = AbstractType::Handle(); |
| super_type = cls.super_type(); |
| if (!super_type.IsNull() && !super_type.IsObjectType()) { |
| cls2 = super_type.type_class(); |
| path->Add(&super_type); |
| if (FindInstantiationOf(type, cls2, path, consider_only_super_classes)) { |
| return true; // Found instantiation. |
| } |
| path->RemoveLast(); |
| } |
| |
| if (!consider_only_super_classes) { |
| Array& super_interfaces = Array::Handle(cls.interfaces()); |
| for (intptr_t i = 0; i < super_interfaces.Length(); i++) { |
| super_type ^= super_interfaces.At(i); |
| cls2 = super_type.type_class(); |
| path->Add(&super_type); |
| if (FindInstantiationOf(type, cls2, path, |
| /*consider_only_supertypes=*/false)) { |
| return true; // Found instantiation. |
| } |
| path->RemoveLast(); |
| } |
| } |
| |
| return false; // Not found. |
| } |
| |
| void Field::SetStaticValue(const Object& value) const { |
| auto thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| ASSERT(value.IsNull() || value.IsSentinel() || value.IsInstance()); |
| |
| ASSERT(is_static()); // Valid only for static dart fields. |
| const intptr_t id = field_id(); |
| ASSERT(id >= 0); |
| |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| thread->isolate()->field_table()->SetAt(id, value.ptr()); |
| } |
| |
| static StaticTypeExactnessState TrivialTypeExactnessFor(const Class& cls) { |
| const intptr_t type_arguments_offset = cls.host_type_arguments_field_offset(); |
| ASSERT(type_arguments_offset != Class::kNoTypeArguments); |
| if (StaticTypeExactnessState::CanRepresentAsTriviallyExact( |
| type_arguments_offset / kCompressedWordSize)) { |
| return StaticTypeExactnessState::TriviallyExact(type_arguments_offset / |
| kCompressedWordSize); |
| } else { |
| return StaticTypeExactnessState::NotExact(); |
| } |
| } |
| |
| static const char* SafeTypeArgumentsToCString(const TypeArguments& args) { |
| return (args.ptr() == TypeArguments::null()) ? "<null>" : args.ToCString(); |
| } |
| |
| StaticTypeExactnessState StaticTypeExactnessState::Compute( |
| const Type& static_type, |
| const Instance& value, |
| bool print_trace /* = false */) { |
| ASSERT(!value.IsNull()); // Should be handled by the caller. |
| ASSERT(value.ptr() != Object::sentinel().ptr()); |
| ASSERT(value.ptr() != Object::transition_sentinel().ptr()); |
| |
| const TypeArguments& static_type_args = |
| TypeArguments::Handle(static_type.arguments()); |
| |
| TypeArguments& args = TypeArguments::Handle(); |
| |
| ASSERT(static_type.IsFinalized()); |
| const Class& cls = Class::Handle(value.clazz()); |
| GrowableArray<const AbstractType*> path(10); |
| |
| bool is_super_class = true; |
| if (!FindInstantiationOf(static_type, cls, &path, |
| /*consider_only_super_classes=*/true)) { |
| is_super_class = false; |
| bool found_super_interface = FindInstantiationOf( |
| static_type, cls, &path, /*consider_only_super_classes=*/false); |
| ASSERT(found_super_interface); |
| } |
| |
| // Trivial case: field has type G<T0, ..., Tn> and value has type |
| // G<U0, ..., Un>. Check if type arguments match. |
| if (path.is_empty()) { |
| ASSERT(cls.ptr() == static_type.type_class()); |
| args = value.GetTypeArguments(); |
| // TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that |
| // disregards superclass own arguments) improves precision of the |
| // tracking. |
| if (args.ptr() == static_type_args.ptr()) { |
| return TrivialTypeExactnessFor(cls); |
| } |
| |
| if (print_trace) { |
| THR_Print(" expected %s got %s type arguments\n", |
| SafeTypeArgumentsToCString(static_type_args), |
| SafeTypeArgumentsToCString(args)); |
| } |
| return StaticTypeExactnessState::NotExact(); |
| } |
| |
| // Value has type C<U0, ..., Un> and field has type G<T0, ..., Tn> and G != C. |
| // Compute C<X0, ..., Xn> at G (Xi are free type arguments). |
| // Path array contains a chain of immediate supertypes S0 <: S1 <: ... Sn, |
| // such that S0 is an immediate supertype of C and Sn is G<...>. |
| // Each Si might depend on type parameters of the previous supertype S{i-1}. |
| // To compute C<X0, ..., Xn> at G we walk the chain backwards and |
| // instantiate Si using type parameters of S{i-1} which gives us a type |
| // depending on type parameters of S{i-2}. |
| AbstractType& type = AbstractType::Handle(path.Last()->ptr()); |
| for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated(); |
| i--) { |
| args = path[i]->arguments(); |
| type = type.InstantiateFrom(args, TypeArguments::null_type_arguments(), |
| kAllFree, Heap::kNew); |
| } |
| |
| if (type.IsInstantiated()) { |
| // C<X0, ..., Xn> at G is fully instantiated and does not depend on |
| // Xi. In this case just check if type arguments match. |
| args = type.arguments(); |
| if (args.Equals(static_type_args)) { |
| return is_super_class ? StaticTypeExactnessState::HasExactSuperClass() |
| : StaticTypeExactnessState::HasExactSuperType(); |
| } |
| |
| if (print_trace) { |
| THR_Print(" expected %s got %s type arguments\n", |
| SafeTypeArgumentsToCString(static_type_args), |
| SafeTypeArgumentsToCString(args)); |
| } |
| |
| return StaticTypeExactnessState::NotExact(); |
| } |
| |
| // The most complicated case: C<X0, ..., Xn> at G depends on |
| // Xi values. To compare type arguments we would need to instantiate |
| // it fully from value's type arguments and compare with <U0, ..., Un>. |
| // However this would complicate fast path in the native code. To avoid this |
| // complication we would optimize for the trivial case: we check if |
| // C<X0, ..., Xn> at G is exactly G<X0, ..., Xn> which means we can simply |
| // compare values type arguements (<T0, ..., Tn>) to fields type arguments |
| // (<U0, ..., Un>) to establish if field type is exact. |
| ASSERT(cls.IsGeneric()); |
| const intptr_t num_type_params = cls.NumTypeParameters(); |
| bool trivial_case = |
| (num_type_params == |
| Class::Handle(static_type.type_class()).NumTypeParameters()) && |
| (value.GetTypeArguments() == static_type.arguments()); |
| if (!trivial_case && FLAG_trace_field_guards) { |
| THR_Print("Not a simple case: %" Pd " vs %" Pd |
| " type parameters, %s vs %s type arguments\n", |
| num_type_params, |
| Class::Handle(static_type.type_class()).NumTypeParameters(), |
| SafeTypeArgumentsToCString( |
| TypeArguments::Handle(value.GetTypeArguments())), |
| SafeTypeArgumentsToCString(static_type_args)); |
| } |
| |
| AbstractType& type_arg = AbstractType::Handle(); |
| args = type.arguments(); |
| for (intptr_t i = 0; (i < num_type_params) && trivial_case; i++) { |
| type_arg = args.TypeAt(i); |
| if (!type_arg.IsTypeParameter() || |
| (TypeParameter::Cast(type_arg).index() != i)) { |
| if (FLAG_trace_field_guards) { |
| THR_Print(" => encountered %s at index % " Pd "\n", |
| type_arg.ToCString(), i); |
| } |
| trivial_case = false; |
| } |
| } |
| |
| return trivial_case ? TrivialTypeExactnessFor(cls) |
| : StaticTypeExactnessState::NotExact(); |
| } |
| |
| const char* StaticTypeExactnessState::ToCString() const { |
| if (!IsTracking()) { |
| return "not-tracking"; |
| } else if (!IsExactOrUninitialized()) { |
| return "not-exact"; |
| } else if (IsTriviallyExact()) { |
| return Thread::Current()->zone()->PrintToString( |
| "trivially-exact(%hhu)", GetTypeArgumentsOffsetInWords()); |
| } else if (IsHasExactSuperType()) { |
| return "has-exact-super-type"; |
| } else if (IsHasExactSuperClass()) { |
| return "has-exact-super-class"; |
| } else { |
| ASSERT(IsUninitialized()); |
| return "uninitialized-exactness"; |
| } |
| } |
| |
| bool Field::UpdateGuardedExactnessState(const Object& value) const { |
| if (!static_type_exactness_state().IsExactOrUninitialized()) { |
| // Nothing to update. |
| return false; |
| } |
| |
| if (guarded_cid() == kDynamicCid) { |
| if (FLAG_trace_field_guards) { |
| THR_Print( |
| " => switching off exactness tracking because guarded cid is " |
| "dynamic\n"); |
| } |
| set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); |
| return true; // Invalidate. |
| } |
| |
| // If we are storing null into a field or we have an exact super type |
| // then there is nothing to do. |
| if (value.IsNull() || static_type_exactness_state().IsHasExactSuperType() || |
| static_type_exactness_state().IsHasExactSuperClass()) { |
| return false; |
| } |
| |
| // If we are storing a non-null value into a field that is considered |
| // to be trivially exact then we need to check if value has an appropriate |
| // type. |
| ASSERT(guarded_cid() != kNullCid); |
| |
| const Type& field_type = Type::Cast(AbstractType::Handle(type())); |
| const TypeArguments& field_type_args = |
| TypeArguments::Handle(field_type.arguments()); |
| |
| const Instance& instance = Instance::Cast(value); |
| TypeArguments& args = TypeArguments::Handle(); |
| if (static_type_exactness_state().IsTriviallyExact()) { |
| args = instance.GetTypeArguments(); |
| if (args.ptr() == field_type_args.ptr()) { |
| return false; |
| } |
| |
| if (FLAG_trace_field_guards) { |
| THR_Print(" expected %s got %s type arguments\n", |
| field_type_args.ToCString(), args.ToCString()); |
| } |
| |
| set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); |
| return true; |
| } |
| |
| ASSERT(static_type_exactness_state().IsUninitialized()); |
| set_static_type_exactness_state(StaticTypeExactnessState::Compute( |
| field_type, instance, FLAG_trace_field_guards)); |
| return true; |
| } |
| |
| void Field::RecordStore(const Object& value) const { |
| ASSERT(IsOriginal()); |
| if (!IsolateGroup::Current()->use_field_guards()) { |
| return; |
| } |
| |
| // We should never try to record a sentinel. |
| ASSERT(value.ptr() != Object::sentinel().ptr()); |
| |
| Thread* const thread = Thread::Current(); |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| if ((guarded_cid() == kDynamicCid) || |
| (is_nullable() && value.ptr() == Object::null())) { |
| // Nothing to do: the field is not guarded or we are storing null into |
| // a nullable field. |
| return; |
| } |
| |
| if (FLAG_trace_field_guards) { |
| THR_Print("Store %s %s <- %s\n", ToCString(), GuardedPropertiesAsCString(), |
| value.ToCString()); |
| } |
| |
| bool invalidate = false; |
| if (UpdateGuardedCidAndLength(value)) { |
| invalidate = true; |
| } |
| if (UpdateGuardedExactnessState(value)) { |
| invalidate = true; |
| } |
| |
| if (invalidate) { |
| if (FLAG_trace_field_guards) { |
| THR_Print(" => %s\n", GuardedPropertiesAsCString()); |
| } |
| |
| DeoptimizeDependentCode(); |
| } |
| } |
| |
| void Field::ForceDynamicGuardedCidAndLength() const { |
| // Assume nothing about this field. |
| set_is_unboxing_candidate(false); |
| set_guarded_cid(kDynamicCid); |
| set_is_nullable(true); |
| set_guarded_list_length(Field::kNoFixedLength); |
| set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset); |
| if (static_type_exactness_state().IsTracking()) { |
| set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); |
| } |
| // Drop any code that relied on the above assumptions. |
| DeoptimizeDependentCode(); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| void Field::set_type_test_cache(const SubtypeTestCache& cache) const { |
| untag()->set_type_test_cache(cache.ptr()); |
| } |
| #endif |
| |
| StringPtr Script::resolved_url() const { |
| #if defined(DART_PRECOMPILER) |
| return String::RawCast( |
| WeakSerializationReference::Unwrap(untag()->resolved_url())); |
| #else |
| return untag()->resolved_url(); |
| #endif |
| } |
| |
| bool Script::HasSource() const { |
| return untag()->source() != String::null(); |
| } |
| |
| StringPtr Script::Source() const { |
| return untag()->source(); |
| } |
| |
| bool Script::IsPartOfDartColonLibrary() const { |
| const String& script_url = String::Handle(url()); |
| return (script_url.StartsWith(Symbols::DartScheme()) || |
| script_url.StartsWith(Symbols::DartSchemePrivate())); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| void Script::LoadSourceFromKernel(const uint8_t* kernel_buffer, |
| intptr_t kernel_buffer_len) const { |
| String& uri = String::Handle(resolved_url()); |
| String& source = String::Handle(kernel::KernelLoader::FindSourceForScript( |
| kernel_buffer, kernel_buffer_len, uri)); |
| set_source(source); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| void Script::set_kernel_program_info(const KernelProgramInfo& info) const { |
| untag()->set_kernel_program_info(info.ptr()); |
| } |
| |
| void Script::set_kernel_script_index(const intptr_t kernel_script_index) const { |
| StoreNonPointer(&untag()->kernel_script_index_, kernel_script_index); |
| } |
| |
| TypedDataPtr Script::kernel_string_offsets() const { |
| KernelProgramInfo& program_info = |
| KernelProgramInfo::Handle(kernel_program_info()); |
| ASSERT(!program_info.IsNull()); |
| return program_info.string_offsets(); |
| } |
| |
| void Script::LookupSourceAndLineStarts(Zone* zone) const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (!IsLazyLookupSourceAndLineStarts()) { |
| return; |
| } |
| const String& uri = String::Handle(zone, resolved_url()); |
| ASSERT(uri.IsSymbol()); |
| if (uri.Length() > 0) { |
| // Entry included only to provide URI - actual source should already exist |
| // in the VM, so try to find it. |
| Library& lib = Library::Handle(zone); |
| Script& script = Script::Handle(zone); |
| const GrowableObjectArray& libs = GrowableObjectArray::Handle( |
| zone, IsolateGroup::Current()->object_store()->libraries()); |
| for (intptr_t i = 0; i < libs.Length(); i++) { |
| lib ^= libs.At(i); |
| script = lib.LookupScript(uri, /* useResolvedUri = */ true); |
| if (!script.IsNull()) { |
| const auto& source = String::Handle(zone, script.Source()); |
| const auto& starts = TypedData::Handle(zone, script.line_starts()); |
| if (!source.IsNull() || !starts.IsNull()) { |
| set_source(source); |
| set_line_starts(starts); |
| break; |
| } |
| } |
| } |
| } |
| SetLazyLookupSourceAndLineStarts(false); |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| GrowableObjectArrayPtr Script::GenerateLineNumberArray() const { |
| Zone* zone = Thread::Current()->zone(); |
| const GrowableObjectArray& info = |
| GrowableObjectArray::Handle(zone, GrowableObjectArray::New()); |
| const Object& line_separator = Object::Handle(zone); |
| LookupSourceAndLineStarts(zone); |
| if (line_starts() == TypedData::null()) { |
| // Scripts in the AOT snapshot do not have a line starts array. |
| // A well-formed line number array has a leading null. |
| info.Add(line_separator); // New line. |
| return info.ptr(); |
| } |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| Smi& value = Smi::Handle(zone); |
| const TypedData& line_starts_data = TypedData::Handle(zone, line_starts()); |
| intptr_t line_count = line_starts_data.Length(); |
| const Array& debug_positions_array = Array::Handle(debug_positions()); |
| intptr_t token_count = debug_positions_array.Length(); |
| int token_index = 0; |
| |
| kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone); |
| intptr_t previous_start = 0; |
| for (int line_index = 0; line_index < line_count; ++line_index) { |
| intptr_t start = previous_start + line_starts_reader.DeltaAt(line_index); |
| // Output the rest of the tokens if we have no next line. |
| intptr_t end = TokenPosition::kMaxSourcePos; |
| if (line_index + 1 < line_count) { |
| end = start + line_starts_reader.DeltaAt(line_index + 1); |
| } |
| bool first = true; |
| while (token_index < token_count) { |
| value ^= debug_positions_array.At(token_index); |
| intptr_t debug_position = value.Value(); |
| if (debug_position >= end) break; |
| |
| if (first) { |
| info.Add(line_separator); // New line. |
| value = Smi::New(line_index + 1); // Line number. |
| info.Add(value); |
| first = false; |
| } |
| |
| value ^= debug_positions_array.At(token_index); |
| info.Add(value); // Token position. |
| value = Smi::New(debug_position - start + 1); // Column. |
| info.Add(value); |
| ++token_index; |
| } |
| previous_start = start; |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| return info.ptr(); |
| } |
| |
| TokenPosition Script::MaxPosition() const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (HasCachedMaxPosition()) { |
| return TokenPosition::Deserialize( |
| UntaggedScript::CachedMaxPositionBitField::decode( |
| untag()->flags_and_max_position_)); |
| } |
| auto const zone = Thread::Current()->zone(); |
| LookupSourceAndLineStarts(zone); |
| if (!HasCachedMaxPosition() && line_starts() != TypedData::null()) { |
| const auto& starts = TypedData::Handle(zone, line_starts()); |
| kernel::KernelLineStartsReader reader(starts, zone); |
| const intptr_t max_position = reader.MaxPosition(); |
| SetCachedMaxPosition(max_position); |
| SetHasCachedMaxPosition(true); |
| return TokenPosition::Deserialize(max_position); |
| } |
| #endif |
| return TokenPosition::kNoSource; |
| } |
| |
| void Script::set_url(const String& value) const { |
| untag()->set_url(value.ptr()); |
| } |
| |
| void Script::set_resolved_url(const String& value) const { |
| untag()->set_resolved_url(value.ptr()); |
| } |
| |
| void Script::set_source(const String& value) const { |
| untag()->set_source(value.ptr()); |
| } |
| |
| void Script::set_line_starts(const TypedData& value) const { |
| untag()->set_line_starts(value.ptr()); |
| } |
| |
| #if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME) |
| void Script::set_constant_coverage(const ExternalTypedData& value) const { |
| untag()->set_constant_coverage(value.ptr()); |
| } |
| |
| ExternalTypedDataPtr Script::constant_coverage() const { |
| return untag()->constant_coverage(); |
| } |
| #endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME) |
| |
| void Script::set_debug_positions(const Array& value) const { |
| untag()->set_debug_positions(value.ptr()); |
| } |
| |
| TypedDataPtr Script::line_starts() const { |
| return untag()->line_starts(); |
| } |
| |
| ArrayPtr Script::debug_positions() const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| Array& debug_positions_array = Array::Handle(untag()->debug_positions()); |
| if (debug_positions_array.IsNull()) { |
| // This is created lazily. Now we need it. |
| kernel::CollectTokenPositionsFor(*this); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| return untag()->debug_positions(); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| void Script::SetLazyLookupSourceAndLineStarts(bool value) const { |
| StoreNonPointer(&untag()->flags_and_max_position_, |
| UntaggedScript::LazyLookupSourceAndLineStartsBit::update( |
| value, untag()->flags_and_max_position_)); |
| } |
| |
| bool Script::IsLazyLookupSourceAndLineStarts() const { |
| return UntaggedScript::LazyLookupSourceAndLineStartsBit::decode( |
| untag()->flags_and_max_position_); |
| } |
| |
| bool Script::HasCachedMaxPosition() const { |
| return UntaggedScript::HasCachedMaxPositionBit::decode( |
| untag()->flags_and_max_position_); |
| } |
| |
| void Script::SetHasCachedMaxPosition(bool value) const { |
| StoreNonPointer(&untag()->flags_and_max_position_, |
| UntaggedScript::HasCachedMaxPositionBit::update( |
| value, untag()->flags_and_max_position_)); |
| } |
| |
| void Script::SetCachedMaxPosition(intptr_t value) const { |
| StoreNonPointer(&untag()->flags_and_max_position_, |
| UntaggedScript::CachedMaxPositionBitField::update( |
| value, untag()->flags_and_max_position_)); |
| } |
| #endif |
| |
| void Script::set_load_timestamp(int64_t value) const { |
| StoreNonPointer(&untag()->load_timestamp_, value); |
| } |
| |
| bool Script::IsValidTokenPosition(TokenPosition token_pos) const { |
| const TokenPosition& max_position = MaxPosition(); |
| // We may end up with scripts that have the empty string as a source file |
| // in testing and the like, so allow any token position when the max position |
| // is 0 as well as when it is kNoSource. |
| return !max_position.IsReal() || !token_pos.IsReal() || |
| max_position.Pos() == 0 || token_pos <= max_position; |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| static bool IsLetter(int32_t c) { |
| return (('A' <= c) && (c <= 'Z')) || (('a' <= c) && (c <= 'z')); |
| } |
| |
| static bool IsDecimalDigit(int32_t c) { |
| return '0' <= c && c <= '9'; |
| } |
| |
| static bool IsIdentStartChar(int32_t c) { |
| return IsLetter(c) || (c == '_') || (c == '$'); |
| } |
| |
| static bool IsIdentChar(int32_t c) { |
| return IsLetter(c) || IsDecimalDigit(c) || (c == '_') || (c == '$'); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| bool Script::GetTokenLocation(const TokenPosition& token_pos, |
| intptr_t* line, |
| intptr_t* column) const { |
| ASSERT(line != nullptr); |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| // Scripts in the AOT snapshot do not have a line starts array. |
| return false; |
| #else |
| if (!token_pos.IsReal()) return false; |
| |
| auto const zone = Thread::Current()->zone(); |
| LookupSourceAndLineStarts(zone); |
| const TypedData& line_starts_data = TypedData::Handle(zone, line_starts()); |
| if (line_starts_data.IsNull()) return false; |
| kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone); |
| return line_starts_reader.LocationForPosition(token_pos.Pos(), line, column); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| intptr_t Script::GetTokenLength(const TokenPosition& token_pos) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| // Scripts in the AOT snapshot do not have their source. |
| return -1; |
| #else |
| if (!HasSource() || !token_pos.IsReal()) return -1; |
| auto const zone = Thread::Current()->zone(); |
| LookupSourceAndLineStarts(zone); |
| // We don't explicitly save this data: Load the source and find it from there. |
| const String& source = String::Handle(zone, Source()); |
| const intptr_t start = token_pos.Pos(); |
| if (start >= source.Length()) return -1; // Can't determine token_len. |
| intptr_t end = start; |
| if (IsIdentStartChar(source.CharAt(end++))) { |
| for (; end < source.Length(); ++end) { |
| if (!IsIdentChar(source.CharAt(end))) break; |
| } |
| } |
| return end - start; |
| #endif |
| } |
| |
| bool Script::TokenRangeAtLine(intptr_t line_number, |
| TokenPosition* first_token_index, |
| TokenPosition* last_token_index) const { |
| ASSERT(first_token_index != nullptr && last_token_index != nullptr); |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| // Scripts in the AOT snapshot do not have a line starts array. |
| return false; |
| #else |
| // Line numbers are 1-indexed. |
| if (line_number <= 0) return false; |
| Zone* zone = Thread::Current()->zone(); |
| LookupSourceAndLineStarts(zone); |
| const TypedData& line_starts_data = TypedData::Handle(zone, line_starts()); |
| kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone); |
| if (!line_starts_reader.TokenRangeAtLine(line_number, first_token_index, |
| last_token_index)) { |
| return false; |
| } |
| #if defined(DEBUG) |
| intptr_t source_length; |
| if (!HasSource()) { |
| Smi& value = Smi::Handle(zone); |
| const Array& debug_positions_array = Array::Handle(zone, debug_positions()); |
| value ^= debug_positions_array.At(debug_positions_array.Length() - 1); |
| source_length = value.Value(); |
| } else { |
| const String& source = String::Handle(zone, Source()); |
| source_length = source.Length(); |
| } |
| ASSERT(last_token_index->Serialize() <= source_length); |
| #endif |
| return true; |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| // Returns the index in the given source string for the given (1-based) absolute |
| // line and column numbers. The line and column offsets are used to calculate |
| // the absolute line and column number for the starting index in the source. |
| // |
| // If the given line number is outside the range of lines represented by the |
| // source, the given column number invalid for the given line, or a negative |
| // starting index is given, a negative value is returned to indicate failure. |
| static intptr_t GetRelativeSourceIndex(const String& src, |
| intptr_t line, |
| intptr_t line_offset = 0, |
| intptr_t column = 1, |
| intptr_t column_offset = 0, |
| intptr_t starting_index = 0) { |
| if (starting_index < 0 || line < 1 || column < 1 || line <= line_offset || |
| (line == line_offset + 1 && column <= column_offset)) { |
| return -1; |
| } |
| intptr_t len = src.Length(); |
| intptr_t current_line = line_offset + 1; |
| intptr_t current_index = starting_index; |
| for (; current_index < len; current_index++) { |
| if (current_line == line) { |
| break; |
| } |
| const uint16_t c = src.CharAt(current_index); |
| if (c == '\n' || c == '\r') { |
| current_line++; |
| } |
| if (c == '\r' && current_index + 1 < len && |
| src.CharAt(current_index + 1) == '\n') { |
| // \r\n is treated as a single line terminator. |
| current_index++; |
| } |
| } |
| if (current_line != line) { |
| return -1; |
| } |
| // Only adjust with column offset when still on the first line. |
| intptr_t current_column = 1 + (line == line_offset + 1 ? column_offset : 0); |
| for (; current_index < len; current_index++, current_column++) { |
| if (current_column == column) { |
| return current_index; |
| } |
| const uint16_t c = src.CharAt(current_index); |
| if (c == '\n' || c == '\r') { |
| break; |
| } |
| } |
| // Check for a column value representing the source's end. |
| if (current_column == column) { |
| return current_index; |
| } |
| return -1; |
| } |
| |
| StringPtr Script::GetLine(intptr_t line_number, Heap::Space space) const { |
| if (!HasSource()) { |
| return Symbols::OptimizedOut().ptr(); |
| } |
| const String& src = String::Handle(Source()); |
| const intptr_t start = |
| GetRelativeSourceIndex(src, line_number, line_offset()); |
| if (start < 0) { |
| return Symbols::Empty().ptr(); |
| } |
| intptr_t end = start; |
| for (; end < src.Length(); end++) { |
| const uint16_t c = src.CharAt(end); |
| if (c == '\n' || c == '\r') { |
| break; |
| } |
| } |
| return String::SubString(src, start, end - start, space); |
| } |
| |
| StringPtr Script::GetSnippet(intptr_t from_line, |
| intptr_t from_column, |
| intptr_t to_line, |
| intptr_t to_column) const { |
| if (!HasSource()) { |
| return Symbols::OptimizedOut().ptr(); |
| } |
| const String& src = String::Handle(Source()); |
| const intptr_t start = GetRelativeSourceIndex(src, from_line, line_offset(), |
| from_column, col_offset()); |
| // Lines and columns are 1-based, so need to subtract one to get offsets. |
| const intptr_t end = GetRelativeSourceIndex( |
| src, to_line, from_line - 1, to_column, from_column - 1, start); |
| // Only need to check end, because a negative start results in a negative end. |
| if (end < 0) { |
| return String::null(); |
| } |
| return String::SubString(src, start, end - start); |
| } |
| |
| ScriptPtr Script::New() { |
| ASSERT(Object::script_class() != Class::null()); |
| ObjectPtr raw = |
| Object::Allocate(Script::kClassId, Script::InstanceSize(), Heap::kOld, |
| Script::ContainsCompressedPointers()); |
| return static_cast<ScriptPtr>(raw); |
| } |
| |
| ScriptPtr Script::New(const String& url, const String& source) { |
| return Script::New(url, url, source); |
| } |
| |
| ScriptPtr Script::New(const String& url, |
| const String& resolved_url, |
| const String& source) { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| const Script& result = Script::Handle(zone, Script::New()); |
| result.set_url(String::Handle(zone, Symbols::New(thread, url))); |
| result.set_resolved_url( |
| String::Handle(zone, Symbols::New(thread, resolved_url))); |
| result.set_source(source); |
| NOT_IN_PRECOMPILED(result.SetLazyLookupSourceAndLineStarts(false)); |
| NOT_IN_PRECOMPILED(result.SetHasCachedMaxPosition(false)); |
| result.set_kernel_script_index(0); |
| result.set_load_timestamp( |
| FLAG_remove_script_timestamps_for_test ? 0 : OS::GetCurrentTimeMillis()); |
| return result.ptr(); |
| } |
| |
| const char* Script::ToCString() const { |
| const String& name = String::Handle(url()); |
| return OS::SCreate(Thread::Current()->zone(), "Script(%s)", name.ToCString()); |
| } |
| |
| LibraryPtr Script::FindLibrary() const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| auto isolate_group = thread->isolate_group(); |
| const GrowableObjectArray& libs = GrowableObjectArray::Handle( |
| zone, isolate_group->object_store()->libraries()); |
| Library& lib = Library::Handle(zone); |
| Array& scripts = Array::Handle(zone); |
| for (intptr_t i = 0; i < libs.Length(); i++) { |
| lib ^= libs.At(i); |
| scripts = lib.LoadedScripts(); |
| for (intptr_t j = 0; j < scripts.Length(); j++) { |
| if (scripts.At(j) == ptr()) { |
| return lib.ptr(); |
| } |
| } |
| } |
| return Library::null(); |
| } |
| |
| DictionaryIterator::DictionaryIterator(const Library& library) |
| : array_(Array::Handle(library.dictionary())), |
| // Last element in array is a Smi indicating the number of entries used. |
| size_(Array::Handle(library.dictionary()).Length() - 1), |
| next_ix_(0) { |
| MoveToNextObject(); |
| } |
| |
| ObjectPtr DictionaryIterator::GetNext() { |
| ASSERT(HasNext()); |
| int ix = next_ix_++; |
| MoveToNextObject(); |
| ASSERT(array_.At(ix) != Object::null()); |
| return array_.At(ix); |
| } |
| |
| void DictionaryIterator::MoveToNextObject() { |
| Object& obj = Object::Handle(array_.At(next_ix_)); |
| while (obj.IsNull() && HasNext()) { |
| next_ix_++; |
| obj = array_.At(next_ix_); |
| } |
| } |
| |
| ClassDictionaryIterator::ClassDictionaryIterator(const Library& library, |
| IterationKind kind) |
| : DictionaryIterator(library), |
| toplevel_class_(Class::Handle((kind == kIteratePrivate) |
| ? library.toplevel_class() |
| : Class::null())) { |
| MoveToNextClass(); |
| } |
| |
| ClassPtr ClassDictionaryIterator::GetNextClass() { |
| ASSERT(HasNext()); |
| Class& cls = Class::Handle(); |
| if (next_ix_ < size_) { |
| int ix = next_ix_++; |
| cls ^= array_.At(ix); |
| MoveToNextClass(); |
| return cls.ptr(); |
| } |
| ASSERT(!toplevel_class_.IsNull()); |
| cls = toplevel_class_.ptr(); |
| toplevel_class_ = Class::null(); |
| return cls.ptr(); |
| } |
| |
| void ClassDictionaryIterator::MoveToNextClass() { |
| Object& obj = Object::Handle(); |
| while (next_ix_ < size_) { |
| obj = array_.At(next_ix_); |
| if (obj.IsClass()) { |
| return; |
| } |
| next_ix_++; |
| } |
| } |
| |
| static void ReportTooManyImports(const Library& lib) { |
| const String& url = String::Handle(lib.url()); |
| Report::MessageF(Report::kError, Script::Handle(lib.LookupScript(url)), |
| TokenPosition::kNoSource, Report::AtLocation, |
| "too many imports in library '%s'", url.ToCString()); |
| UNREACHABLE(); |
| } |
| |
| bool Library::IsAnyCoreLibrary() const { |
| String& url_str = Thread::Current()->StringHandle(); |
| url_str = url(); |
| return url_str.StartsWith(Symbols::DartScheme()) || |
| url_str.StartsWith(Symbols::DartSchemePrivate()); |
| } |
| |
| void Library::set_num_imports(intptr_t value) const { |
| if (!Utils::IsUint(16, value)) { |
| ReportTooManyImports(*this); |
| } |
| StoreNonPointer(&untag()->num_imports_, value); |
| } |
| |
| void Library::set_name(const String& name) const { |
| ASSERT(name.IsSymbol()); |
| untag()->set_name(name.ptr()); |
| } |
| |
| void Library::set_url(const String& name) const { |
| untag()->set_url(name.ptr()); |
| } |
| |
| void Library::set_kernel_data(const ExternalTypedData& data) const { |
| untag()->set_kernel_data(data.ptr()); |
| } |
| |
| void Library::set_loading_unit(const LoadingUnit& value) const { |
| untag()->set_loading_unit(value.ptr()); |
| } |
| |
| void Library::SetName(const String& name) const { |
| // Only set name once. |
| ASSERT(!Loaded()); |
| set_name(name); |
| } |
| |
| void Library::SetLoadInProgress() const { |
| // Must not already be in the process of being loaded. |
| ASSERT(untag()->load_state_ <= UntaggedLibrary::kLoadRequested); |
| StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadInProgress); |
| } |
| |
| void Library::SetLoadRequested() const { |
| // Must not be already loaded. |
| ASSERT(untag()->load_state_ == UntaggedLibrary::kAllocated); |
| StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadRequested); |
| } |
| |
| void Library::SetLoaded() const { |
| // Should not be already loaded or just allocated. |
| ASSERT(LoadInProgress() || LoadRequested()); |
| StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoaded); |
| } |
| |
| void Library::AddMetadata(const Object& declaration, |
| intptr_t kernel_offset) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); |
| |
| MetadataMap map(metadata()); |
| map.UpdateOrInsert(declaration, Smi::Handle(Smi::New(kernel_offset))); |
| set_metadata(map.Release()); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| ObjectPtr Library::GetMetadata(const Object& declaration) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| return Object::empty_array().ptr(); |
| #else |
| RELEASE_ASSERT(declaration.IsClass() || declaration.IsField() || |
| declaration.IsFunction() || declaration.IsLibrary() || |
| declaration.IsTypeParameter() || declaration.IsNamespace()); |
| |
| auto thread = Thread::Current(); |
| auto zone = thread->zone(); |
| |
| if (declaration.IsLibrary()) { |
| // Ensure top-level class is loaded as it may contain annotations of |
| // a library. |
| const auto& cls = Class::Handle(zone, toplevel_class()); |
| if (!cls.IsNull()) { |
| cls.EnsureDeclarationLoaded(); |
| } |
| } |
| Object& value = Object::Handle(zone); |
| { |
| SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| MetadataMap map(metadata()); |
| value = map.GetOrNull(declaration); |
| set_metadata(map.Release()); |
| } |
| if (value.IsNull()) { |
| // There is no metadata for this object. |
| return Object::empty_array().ptr(); |
| } |
| if (!value.IsSmi()) { |
| // Metadata is already evaluated. |
| ASSERT(value.IsArray()); |
| return value.ptr(); |
| } |
| const auto& smi_value = Smi::Cast(value); |
| intptr_t kernel_offset = smi_value.Value(); |
| ASSERT(kernel_offset > 0); |
| const auto& evaluated_value = Object::Handle( |
| zone, kernel::EvaluateMetadata( |
| *this, kernel_offset, |
| /* is_annotations_offset = */ declaration.IsLibrary() || |
| declaration.IsNamespace())); |
| if (evaluated_value.IsArray() || evaluated_value.IsNull()) { |
| ASSERT(evaluated_value.ptr() != Object::empty_array().ptr()); |
| SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); |
| MetadataMap map(metadata()); |
| if (map.GetOrNull(declaration) == smi_value.ptr()) { |
| map.UpdateOrInsert(declaration, evaluated_value); |
| } else { |
| ASSERT(map.GetOrNull(declaration) == evaluated_value.ptr()); |
| } |
| set_metadata(map.Release()); |
| } |
| return evaluated_value.ptr(); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| static bool ShouldBePrivate(const String& name) { |
| return (name.Length() >= 1 && name.CharAt(0) == '_') || |
| (name.Length() >= 5 && |
| (name.CharAt(4) == '_' && |
| (name.CharAt(0) == 'g' || name.CharAt(0) == 's') && |
| name.CharAt(1) == 'e' && name.CharAt(2) == 't' && |
| name.CharAt(3) == ':')); |
| } |
| |
| ObjectPtr Library::ResolveName(const String& name) const { |
| Object& obj = Object::Handle(); |
| if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, &obj)) { |
| return obj.ptr(); |
| } |
| EnsureTopLevelClassIsFinalized(); |
| obj = LookupLocalObject(name); |
| if (!obj.IsNull()) { |
| // Names that are in this library's dictionary and are unmangled |
| // are not cached. This reduces the size of the cache. |
| return obj.ptr(); |
| } |
| String& accessor_name = String::Handle(Field::LookupGetterSymbol(name)); |
| if (!accessor_name.IsNull()) { |
| obj = LookupLocalObject(accessor_name); |
| } |
| if (obj.IsNull()) { |
| accessor_name = Field::LookupSetterSymbol(name); |
| if (!accessor_name.IsNull()) { |
| obj = LookupLocalObject(accessor_name); |
| } |
| if (obj.IsNull() && !ShouldBePrivate(name)) { |
| obj = LookupImportedObject(name); |
| } |
| } |
| AddToResolvedNamesCache(name, obj); |
| return obj.ptr(); |
| } |
| |
| class StringEqualsTraits { |
| public: |
| static const char* Name() { return "StringEqualsTraits"; } |
| static bool ReportStats() { return false; } |
| |
| static bool IsMatch(const Object& a, const Object& b) { |
| return String::Cast(a).Equals(String::Cast(b)); |
| } |
| static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); } |
| }; |
| typedef UnorderedHashMap<StringEqualsTraits> ResolvedNamesMap; |
| |
| // Returns true if the name is found in the cache, false no cache hit. |
| // obj is set to the cached entry. It may be null, indicating that the |
| // name does not resolve to anything in this library. |
| bool Library::LookupResolvedNamesCache(const String& name, Object* obj) const { |
| if (resolved_names() == Array::null()) { |
| return false; |
| } |
| ResolvedNamesMap cache(resolved_names()); |
| bool present = false; |
| *obj = cache.GetOrNull(name, &present); |
| // Mutator compiler thread may add entries and therefore |
| // change 'resolved_names()' while running a background compilation; |
| // ASSERT that 'resolved_names()' has not changed only in mutator. |
| #if defined(DEBUG) |
| if (Thread::Current()->IsMutatorThread()) { |
| ASSERT(cache.Release().ptr() == resolved_names()); |
| } else { |
| // Release must be called in debug mode. |
| cache.Release(); |
| } |
| #endif |
| return present; |
| } |
| |
| // Add a name to the resolved name cache. This name resolves to the |
| // given object in this library scope. obj may be null, which means |
| // the name does not resolve to anything in this library scope. |
| void Library::AddToResolvedNamesCache(const String& name, |
| const Object& obj) const { |
| if (!FLAG_use_lib_cache || Compiler::IsBackgroundCompilation()) { |
| return; |
| } |
| if (resolved_names() == Array::null()) { |
| InitResolvedNamesCache(); |
| } |
| ResolvedNamesMap cache(resolved_names()); |
| cache.UpdateOrInsert(name, obj); |
| untag()->set_resolved_names(cache.Release().ptr()); |
| } |
| |
| bool Library::LookupExportedNamesCache(const String& name, Object* obj) const { |
| ASSERT(FLAG_use_exp_cache); |
| if (exported_names() == Array::null()) { |
| return false; |
| } |
| ResolvedNamesMap cache(exported_names()); |
| bool present = false; |
| *obj = cache.GetOrNull(name, &present); |
| // Mutator compiler thread may add entries and therefore |
| // change 'exported_names()' while running a background compilation; |
| // do not ASSERT that 'exported_names()' has not changed. |
| #if defined(DEBUG) |
| if (Thread::Current()->IsMutatorThread()) { |
| ASSERT(cache.Release().ptr() == exported_names()); |
| } else { |
| // Release must be called in debug mode. |
| cache.Release(); |
| } |
| #endif |
| return present; |
| } |
| |
| void Library::AddToExportedNamesCache(const String& name, |
| const Object& obj) const { |
| if (!FLAG_use_exp_cache || Compiler::IsBackgroundCompilation()) { |
| return; |
| } |
| if (exported_names() == Array::null()) { |
| InitExportedNamesCache(); |
| } |
| ResolvedNamesMap cache(exported_names()); |
| cache.UpdateOrInsert(name, obj); |
| untag()->set_exported_names(cache.Release().ptr()); |
| } |
| |
| void Library::InvalidateResolvedName(const String& name) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| Object& entry = Object::Handle(zone); |
| if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, &entry)) { |
| // TODO(koda): Support deleted sentinel in snapshots and remove only 'name'. |
| ClearResolvedNamesCache(); |
| } |
| if (!FLAG_use_exp_cache) { |
| return; |
| } |
| // When a new name is added to a library, we need to invalidate all |
| // caches that contain an entry for this name. If the name was previously |
| // looked up but could not be resolved, the cache contains a null entry. |
| GrowableObjectArray& libs = GrowableObjectArray::Handle( |
| zone, thread->isolate_group()->object_store()->libraries()); |
| Library& lib = Library::Handle(zone); |
| intptr_t num_libs = libs.Length(); |
| for (intptr_t i = 0; i < num_libs; i++) { |
| lib ^= libs.At(i); |
| if (lib.LookupExportedNamesCache(name, &entry)) { |
| lib.ClearExportedNamesCache(); |
| } |
| } |
| } |
| |
| // Invalidate all exported names caches in the isolate. |
| void Library::InvalidateExportedNamesCaches() { |
| GrowableObjectArray& libs = GrowableObjectArray::Handle( |
| IsolateGroup::Current()->object_store()->libraries()); |
| Library& lib = Library::Handle(); |
| intptr_t num_libs = libs.Length(); |
| for (intptr_t i = 0; i < num_libs; i++) { |
| lib ^= libs.At(i); |
| lib.ClearExportedNamesCache(); |
| } |
| } |
| |
| void Library::RehashDictionary(const Array& old_dict, |
| intptr_t new_dict_size) const { |
| intptr_t old_dict_size = old_dict.Length() - 1; |
| const Array& new_dict = |
| Array::Handle(Array::New(new_dict_size + 1, Heap::kOld)); |
| // Rehash all elements from the original dictionary |
| // to the newly allocated array. |
| Object& entry = Class::Handle(); |
| String& entry_name = String::Handle(); |
| Object& new_entry = Object::Handle(); |
| intptr_t used = 0; |
| for (intptr_t i = 0; i < old_dict_size; i++) { |
| entry = old_dict.At(i); |
| if (!entry.IsNull()) { |
| entry_name = entry.DictionaryName(); |
| ASSERT(!entry_name.IsNull()); |
| const intptr_t hash = entry_name.Hash(); |
| intptr_t index = hash % new_dict_size; |
| new_entry = new_dict.At(index); |
| while (!new_entry.IsNull()) { |
| index = (index + 1) % new_dict_size; // Move to next element. |
| new_entry = new_dict.At(index); |
| } |
| new_dict.SetAt(index, entry); |
| used++; |
| } |
| } |
| // Set used count. |
| ASSERT(used < new_dict_size); // Need at least one empty slot. |
| new_entry = Smi::New(used); |
| new_dict.SetAt(new_dict_size, new_entry); |
| // Remember the new dictionary now. |
| untag()->set_dictionary(new_dict.ptr()); |
| } |
| |
| void Library::AddObject(const Object& obj, const String& name) const { |
| ASSERT(Thread::Current()->IsMutatorThread()); |
| ASSERT(obj.IsClass() || obj.IsFunction() || obj.IsField() || |
| obj.IsLibraryPrefix()); |
| ASSERT(name.Equals(String::Handle(obj.DictionaryName()))); |
| ASSERT(LookupLocalObject(name) == Object::null()); |
| const Array& dict = Array::Handle(dictionary()); |
| intptr_t dict_size = dict.Length() - 1; |
| intptr_t index = name.Hash() % dict_size; |
| |
| Object& entry = Object::Handle(); |
| entry = dict.At(index); |
| // An empty spot will be found because we keep the hash set at most 75% full. |
| while (!entry.IsNull()) { |
| index = (index + 1) % dict_size; |
| entry = dict.At(index); |
| } |
| |
| // Insert the object at the empty slot. |
| dict.SetAt(index, obj); |
| // One more element added. |
| intptr_t used_elements = Smi::Value(Smi::RawCast(dict.At(dict_size))) + 1; |
| const Smi& used = Smi::Handle(Smi::New(used_elements)); |
| dict.SetAt(dict_size, used); // Update used count. |
| |
| // Rehash if symbol_table is 75% full. |
| if (used_elements > ((dict_size / 4) * 3)) { |
| // TODO(iposva): Avoid exponential growth. |
| RehashDictionary(dict, 2 * dict_size); |
| } |
| |
| // Invalidate the cache of loaded scripts. |
| if (loaded_scripts() != Array::null()) { |
| untag()->set_loaded_scripts(Array::null()); |
| } |
| } |
| |
| // Lookup a name in the library's re-export namespace. |
| // This lookup can occur from two different threads: background compiler and |
| // mutator thread. |
| ObjectPtr Library::LookupReExport(const String& name, |
| ZoneGrowableArray<intptr_t>* trail) const { |
| if (!HasExports()) { |
| return Object::null(); |
| } |
| |
| if (trail == NULL) { |
| trail = new ZoneGrowableArray<intptr_t>(); |
| } |
| Object& obj = Object::Handle(); |
| if (FLAG_use_exp_cache && LookupExportedNamesCache(name, &obj)) { |
| return obj.ptr(); |
| } |
| |
| const intptr_t lib_id = this->index(); |
| ASSERT(lib_id >= 0); // We use -1 to indicate that a cycle was found. |
| trail->Add(lib_id); |
| const Array& exports = Array::Handle(this->exports()); |
| Namespace& ns = Namespace::Handle(); |
| for (int i = 0; i < exports.Length(); i++) { |
| ns ^= exports.At(i); |
| obj = ns.Lookup(name, trail); |
| if (!obj.IsNull()) { |
| // The Lookup call above may return a setter x= when we are looking |
| // for the name x. Make sure we only return when a matching name |
| // is found. |
| String& obj_name = String::Handle(obj.DictionaryName()); |
| if (Field::IsSetterName(obj_name) == Field::IsSetterName(name)) { |
| break; |
| } |
| } |
| } |
| bool in_cycle = (trail->RemoveLast() < 0); |
| if (FLAG_use_exp_cache && !in_cycle && !Compiler::IsBackgroundCompilation()) { |
| AddToExportedNamesCache(name, obj); |
| } |
| return obj.ptr(); |
| } |
| |
| ObjectPtr Library::LookupEntry(const String& name, intptr_t* index) const { |
| ASSERT(!IsNull()); |
| Thread* thread = Thread::Current(); |
| REUSABLE_ARRAY_HANDLESCOPE(thread); |
| REUSABLE_OBJECT_HANDLESCOPE(thread); |
| REUSABLE_STRING_HANDLESCOPE(thread); |
| Array& dict = thread->ArrayHandle(); |
| dict = dictionary(); |
| intptr_t dict_size = dict.Length() - 1; |
| *index = name.Hash() % dict_size; |
| Object& entry = thread->ObjectHandle(); |
| String& entry_name = thread->StringHandle(); |
| entry = dict.At(*index); |
| // Search the entry in the hash set. |
| while (!entry.IsNull()) { |
| entry_name = entry.DictionaryName(); |
| ASSERT(!entry_name.IsNull()); |
| if (entry_name.Equals(name)) { |
| return entry.ptr(); |
| } |
| *index = (*index + 1) % dict_size; |
| entry = dict.At(*index); |
| } |
| return Object::null(); |
| } |
| |
| void Library::AddClass(const Class& cls) const { |
| ASSERT(!Compiler::IsBackgroundCompilation()); |
| const String& class_name = String::Handle(cls.Name()); |
| AddObject(cls, class_name); |
| // Link class to this library. |
| cls.set_library(*this); |
| InvalidateResolvedName(class_name); |
| } |
| |
| static void AddScriptIfUnique(const GrowableObjectArray& scripts, |
| const Script& candidate) { |
| if (candidate.IsNull()) { |
| return; |
| } |
| Script& script_obj = Script::Handle(); |
| |
| for (int i = 0; i < scripts.Length(); i++) { |
| script_obj ^= scripts.At(i); |
| if (script_obj.ptr() == candidate.ptr()) { |
| // We already have a reference to this script. |
| return; |
| } |
| } |
| // Add script to the list of scripts. |
| scripts.Add(candidate); |
| } |
| |
| ArrayPtr Library::LoadedScripts() const { |
| ASSERT(Thread::Current()->IsMutatorThread()); |
| // We compute the list of loaded scripts lazily. The result is |
| // cached in loaded_scripts_. |
| if (loaded_scripts() == Array::null()) { |
| // TODO(jensj): This can be cleaned up. |
| // It really should just return the content of `used_scripts`, and there |
| // should be no need to do the O(n) call to `AddScriptIfUnique` per script. |
| |
| // Iterate over the library dictionary and collect all scripts. |
| const GrowableObjectArray& scripts = |
| GrowableObjectArray::Handle(GrowableObjectArray::New(8)); |
| Object& entry = Object::Handle(); |
| Class& cls = Class::Handle(); |
| Script& owner_script = Script::Handle(); |
| DictionaryIterator it(*this); |
| while (it.HasNext()) { |
| entry = it.GetNext(); |
| if (entry.IsClass()) { |
| owner_script = Class::Cast(entry).script(); |
| } else if (entry.IsFunction()) { |
| owner_script = Function::Cast(entry).script(); |
| } else if (entry.IsField()) { |
| owner_script = Field::Cast(entry).Script(); |
| } else { |
| continue; |
| } |
| AddScriptIfUnique(scripts, owner_script); |
| } |
| |
| // Add all scripts from patch classes. |
| GrowableObjectArray& patches = GrowableObjectArray::Handle(used_scripts()); |
| for (intptr_t i = 0; i < patches.Length(); i++) { |
| entry = patches.At(i); |
| if (entry.IsClass()) { |
| owner_script = Class::Cast(entry).script(); |
| } else { |
| ASSERT(entry.IsScript()); |
| owner_script = Script::Cast(entry).ptr(); |
| } |
| AddScriptIfUnique(scripts, owner_script); |
| } |
| |
| cls = toplevel_class(); |
| if (!cls.IsNull()) { |
| owner_script = cls.script(); |
| AddScriptIfUnique(scripts, owner_script); |
| // Special case: Scripts that only contain external top-level functions |
| // are not included above, but can be referenced through a library's |
| // anonymous classes. Example: dart-core:identical.dart. |
| Function& func = Function::Handle(); |
| Array& functions = Array::Handle(cls.current_functions()); |
| for (intptr_t j = 0; j < functions.Length(); j++) { |
| func ^= functions.At(j); |
| if (func.is_external()) { |
| owner_script = func.script(); |
| AddScriptIfUnique(scripts, owner_script); |
| } |
| } |
| } |
| |
| // Create the array of scripts and cache it in loaded_scripts_. |
| const Array& scripts_array = Array::Handle(Array::MakeFixedLength(scripts)); |
| untag()->set_loaded_scripts(scripts_array.ptr()); |
| } |
| return loaded_scripts(); |
| } |
| |
| // TODO(hausner): we might want to add a script dictionary to the |
| // library class to make this lookup faster. |
| ScriptPtr Library::LookupScript(const String& url, |
| bool useResolvedUri /* = false */) const { |
| const intptr_t url_length = url.Length(); |
| if (url_length == 0) { |
| return Script::null(); |
| } |
| const Array& scripts = Array::Handle(LoadedScripts()); |
| Script& script = Script::Handle(); |
| String& script_url = String::Handle(); |
| const intptr_t num_scripts = scripts.Length(); |
| for (int i = 0; i < num_scripts; i++) { |
| script ^= scripts.At(i); |
| if (useResolvedUri) { |
| // Use for urls with 'org-dartlang-sdk:' or 'file:' schemes |
| script_url = script.resolved_url(); |
| } else { |
| // Use for urls with 'dart:', 'package:', or 'file:' schemes |
| script_url = script.url(); |
| } |
| const intptr_t start_idx = script_url.Length() - url_length; |
| if ((start_idx == 0) && url.Equals(script_url)) { |
| return script.ptr(); |
| } else if (start_idx > 0) { |
| // If we do a suffix match, only match if the partial path |
| // starts at or immediately after the path separator. |
| if (((url.CharAt(0) == '/') || |
| (script_url.CharAt(start_idx - 1) == '/')) && |
| url.Equals(script_url, start_idx, url_length)) { |
| return script.ptr(); |
| } |
| } |
| } |
| return Script::null(); |
| } |
| |
| void Library::EnsureTopLevelClassIsFinalized() const { |
| if (toplevel_class() == Object::null()) { |
| return; |
| } |
| Thread* thread = Thread::Current(); |
| const Class& cls = Class::Handle(thread->zone(), toplevel_class()); |
| if (cls.is_finalized()) { |
| return; |
| } |
| const Error& error = |
| Error::Handle(thread->zone(), cls.EnsureIsFinalized(thread)); |
| if (!error.IsNull()) { |
| Exceptions::PropagateError(error); |
| } |
| } |
| |
| ObjectPtr Library::LookupLocalObject(const String& name) const { |
| intptr_t index; |
| return LookupEntry(name, &index); |
| } |
| |
| ObjectPtr Library::LookupLocalOrReExportObject(const String& name) const { |
| intptr_t index; |
| EnsureTopLevelClassIsFinalized(); |
| const Object& result = Object::Handle(LookupEntry(name, &index)); |
| if (!result.IsNull() && !result.IsLibraryPrefix()) { |
| return result.ptr(); |
| } |
| return LookupReExport(name); |
| } |
| |
| FieldPtr Library::LookupFieldAllowPrivate(const String& name) const { |
| EnsureTopLevelClassIsFinalized(); |
| Object& obj = Object::Handle(LookupObjectAllowPrivate(name)); |
| if (obj.IsField()) { |
| return Field::Cast(obj).ptr(); |
| } |
| return Field::null(); |
| } |
| |
| FieldPtr Library::LookupLocalField(const String& name) const { |
| EnsureTopLevelClassIsFinalized(); |
| Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name)); |
| if (obj.IsField()) { |
| return Field::Cast(obj).ptr(); |
| } |
| return Field::null(); |
| } |
| |
| FunctionPtr Library::LookupFunctionAllowPrivate(const String& name) const { |
| EnsureTopLevelClassIsFinalized(); |
| Object& obj = Object::Handle(LookupObjectAllowPrivate(name)); |
| if (obj.IsFunction()) { |
| return Function::Cast(obj).ptr(); |
| } |
| return Function::null(); |
| } |
| |
| FunctionPtr Library::LookupLocalFunction(const String& name) const { |
| EnsureTopLevelClassIsFinalized(); |
| Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name)); |
| if (obj.IsFunction()) { |
| return Function::Cast(obj).ptr(); |
| } |
| return Function::null(); |
| } |
| |
| ObjectPtr Library::LookupLocalObjectAllowPrivate(const String& name) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| Object& obj = Object::Handle(zone, Object::null()); |
| obj = LookupLocalObject(name); |
| if (obj.IsNull() && ShouldBePrivate(name)) { |
| String& private_name = String::Handle(zone, PrivateName(name)); |
| obj = LookupLocalObject(private_name); |
| } |
| return obj.ptr(); |
| } |
| |
| ObjectPtr Library::LookupObjectAllowPrivate(const String& name) const { |
| // First check if name is found in the local scope of the library. |
| Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name)); |
| if (!obj.IsNull()) { |
| return obj.ptr(); |
| } |
| |
| // Do not look up private names in imported libraries. |
| if (ShouldBePrivate(name)) { |
| return Object::null(); |
| } |
| |
| // Now check if name is found in any imported libs. |
| return LookupImportedObject(name); |
| } |
| |
| ObjectPtr Library::LookupImportedObject(const String& name) const { |
| Object& obj = Object::Handle(); |
| Namespace& import = Namespace::Handle(); |
| Library& import_lib = Library::Handle(); |
| String& import_lib_url = String::Handle(); |
| String& first_import_lib_url = String::Handle(); |
| Object& found_obj = Object::Handle(); |
| String& found_obj_name = String::Handle(); |
| ASSERT(!ShouldBePrivate(name)); |
| for (intptr_t i = 0; i < num_imports(); i++) { |
| import = ImportAt(i); |
| obj = import.Lookup(name); |
| if (!obj.IsNull()) { |
| import_lib = import.target(); |
| import_lib_url = import_lib.url(); |
| if (found_obj.ptr() != obj.ptr()) { |
| if (first_import_lib_url.IsNull() || |
| first_import_lib_url.StartsWith(Symbols::DartScheme())) { |
| // This is the first object we found, or the |
| // previously found object is exported from a Dart |
| // system library. The newly found object hides the one |
| // from the Dart library. |
| first_import_lib_url = import_lib.url(); |
| found_obj = obj.ptr(); |
| found_obj_name = obj.DictionaryName(); |
| } else if (import_lib_url.StartsWith(Symbols::DartScheme())) { |
| // The newly found object is exported from a Dart system |
| // library. It is hidden by the previously found object. |
| // We continue to search. |
| } else if (Field::IsSetterName(found_obj_name) && |
| !Field::IsSetterName(name)) { |
| // We are looking for an unmangled name or a getter, but |
| // the first object we found is a setter. Replace the first |
| // object with the one we just found. |
| first_import_lib_url = import_lib.url(); |
| found_obj = obj.ptr(); |
| found_obj_name = found_obj.DictionaryName(); |
| } else { |
| // We found two different objects with the same name. |
| // Note that we need to compare the names again because |
| // looking up an unmangled name can return a getter or a |
| // setter. A getter name is the same as the unmangled name, |
| // but a setter name is different from an unmangled name or a |
| // getter name. |
| if (Field::IsGetterName(found_obj_name)) { |
| found_obj_name = Field::NameFromGetter(found_obj_name); |
| } |
| String& second_obj_name = String::Handle(obj.DictionaryName()); |
| if (Field::IsGetterName(second_obj_name)) { |
| second_obj_name = Field::NameFromGetter(second_obj_name); |
| } |
| if (found_obj_name.Equals(second_obj_name)) { |
| return Object::null(); |
| } |
| } |
| } |
| } |
| } |
| return found_obj.ptr(); |
| } |
| |
| ClassPtr Library::LookupClass(const String& name) const { |
| Object& obj = Object::Handle(LookupLocalObject(name)); |
| if (obj.IsNull() && !ShouldBePrivate(name)) { |
| obj = LookupImportedObject(name); |
| } |
| if (obj.IsClass()) { |
| return Class::Cast(obj).ptr(); |
| } |
| return Class::null(); |
| } |
| |
| ClassPtr Library::LookupLocalClass(const String& name) const { |
| Object& obj = Object::Handle(LookupLocalObject(name)); |
| if (obj.IsClass()) { |
| return Class::Cast(obj).ptr(); |
| } |
| return Class::null(); |
| } |
| |
| ClassPtr Library::LookupClassAllowPrivate(const String& name) const { |
| // See if the class is available in this library or in the top level |
| // scope of any imported library. |
| Zone* zone = Thread::Current()->zone(); |
| const Class& cls = Class::Handle(zone, LookupClass(name)); |
| if (!cls.IsNull()) { |
| return cls.ptr(); |
| } |
| |
| // Now try to lookup the class using its private name, but only in |
| // this library (not in imported libraries). |
| if (ShouldBePrivate(name)) { |
| String& private_name = String::Handle(zone, PrivateName(name)); |
| const Object& obj = Object::Handle(LookupLocalObject(private_name)); |
| if (obj.IsClass()) { |
| return Class::Cast(obj).ptr(); |
| } |
| } |
| return Class::null(); |
| } |
| |
| // Mixin applications can have multiple private keys from different libraries. |
| ClassPtr Library::SlowLookupClassAllowMultiPartPrivate( |
| const String& name) const { |
| Array& dict = Array::Handle(dictionary()); |
| Object& entry = Object::Handle(); |
| String& cls_name = String::Handle(); |
| for (intptr_t i = 0; i < dict.Length(); i++) { |
| entry = dict.At(i); |
| if (entry.IsClass()) { |
| cls_name = Class::Cast(entry).Name(); |
| // Warning: comparison is not symmetric. |
| if (String::EqualsIgnoringPrivateKey(cls_name, name)) { |
| return Class::Cast(entry).ptr(); |
| } |
| } |
| } |
| return Class::null(); |
| } |
| |
| LibraryPrefixPtr Library::LookupLocalLibraryPrefix(const String& name) const { |
| const Object& obj = Object::Handle(LookupLocalObject(name)); |
| if (obj.IsLibraryPrefix()) { |
| return LibraryPrefix::Cast(obj).ptr(); |
| } |
| return LibraryPrefix::null(); |
| } |
| |
| void Library::set_toplevel_class(const Class& value) const { |
| ASSERT(untag()->toplevel_class() == Class::null()); |
| untag()->set_toplevel_class(value.ptr()); |
| } |
| |
| void Library::set_dependencies(const Array& deps) const { |
| untag()->set_dependencies(deps.ptr()); |
| } |
| |
| void Library::set_metadata(const Array& value) const { |
| if (untag()->metadata() != value.ptr()) { |
| DEBUG_ASSERT( |
| IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter()); |
| untag()->set_metadata(value.ptr()); |
| } |
| } |
| |
| LibraryPtr Library::ImportLibraryAt(intptr_t index) const { |
| Namespace& import = Namespace::Handle(ImportAt(index)); |
| if (import.IsNull()) { |
| return Library::null(); |
| } |
| return import.target(); |
| } |
| |
| NamespacePtr Library::ImportAt(intptr_t index) const { |
| if ((index < 0) || index >= num_imports()) { |
| return Namespace::null(); |
| } |
| const Array& import_list = Array::Handle(imports()); |
| return Namespace::RawCast(import_list.At(index)); |
| } |
| |
| void Library::DropDependenciesAndCaches() const { |
| // We need to preserve the "dart-ext:" imports because they are used by |
| // Loader::ReloadNativeExtensions(). |
| intptr_t native_import_count = 0; |
| Array& imports = Array::Handle(untag()->imports()); |
| Namespace& ns = Namespace::Handle(); |
| Library& lib = Library::Handle(); |
| String& url = String::Handle(); |
| for (int i = 0; i < imports.Length(); ++i) { |
| ns = Namespace::RawCast(imports.At(i)); |
| if (ns.IsNull()) continue; |
| lib = ns.target(); |
| url = lib.url(); |
| if (url.StartsWith(Symbols::DartExtensionScheme())) { |
| native_import_count++; |
| } |
| } |
| Array& new_imports = |
| Array::Handle(Array::New(native_import_count, Heap::kOld)); |
| for (int i = 0, j = 0; i < imports.Length(); ++i) { |
| ns = Namespace::RawCast(imports.At(i)); |
| if (ns.IsNull()) continue; |
| lib = ns.target(); |
| url = lib.url(); |
| if (url.StartsWith(Symbols::DartExtensionScheme())) { |
| new_imports.SetAt(j++, ns); |
| } |
| } |
| |
| untag()->set_imports(new_imports.ptr()); |
| untag()->set_exports(Object::empty_array().ptr()); |
| StoreNonPointer(&untag()->num_imports_, 0); |
| untag()->set_resolved_names(Array::null()); |
| untag()->set_exported_names(Array::null()); |
| untag()->set_loaded_scripts(Array::null()); |
| untag()->set_dependencies(Array::null()); |
| } |
| |
| void Library::AddImport(const Namespace& ns) const { |
| Array& imports = Array::Handle(this->imports()); |
| intptr_t capacity = imports.Length(); |
| if (num_imports() == capacity) { |
| capacity = capacity + kImportsCapacityIncrement + (capacity >> 2); |
| imports = Array::Grow(imports, capacity); |
| untag()->set_imports(imports.ptr()); |
| } |
| intptr_t index = num_imports(); |
| imports.SetAt(index, ns); |
| set_num_imports(index + 1); |
| } |
| |
| // Convenience function to determine whether the export list is |
| // non-empty. |
| bool Library::HasExports() const { |
| return exports() != Object::empty_array().ptr(); |
| } |
| |
| // We add one namespace at a time to the exports array and don't |
| // pre-allocate any unused capacity. The assumption is that |
| // re-exports are quite rare. |
| void Library::AddExport(const Namespace& ns) const { |
| Array& exports = Array::Handle(this->exports()); |
| intptr_t num_exports = exports.Length(); |
| exports = Array::Grow(exports, num_exports + 1); |
| untag()->set_exports(exports.ptr()); |
| exports.SetAt(num_exports, ns); |
| } |
| |
| static ArrayPtr NewDictionary(intptr_t initial_size) { |
| const Array& dict = Array::Handle(Array::New(initial_size + 1, Heap::kOld)); |
| // The last element of the dictionary specifies the number of in use slots. |
| dict.SetAt(initial_size, Object::smi_zero()); |
| return dict.ptr(); |
| } |
| |
| void Library::InitResolvedNamesCache() const { |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| REUSABLE_FUNCTION_HANDLESCOPE(thread); |
| Array& cache = thread->ArrayHandle(); |
| cache = HashTables::New<ResolvedNamesMap>(64); |
| untag()->set_resolved_names(cache.ptr()); |
| } |
| |
| void Library::ClearResolvedNamesCache() const { |
| ASSERT(Thread::Current()->IsMutatorThread()); |
| untag()->set_resolved_names(Array::null()); |
| } |
| |
| void Library::InitExportedNamesCache() const { |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| REUSABLE_FUNCTION_HANDLESCOPE(thread); |
| Array& cache = thread->ArrayHandle(); |
| cache = HashTables::New<ResolvedNamesMap>(16); |
| untag()->set_exported_names(cache.ptr()); |
| } |
| |
| void Library::ClearExportedNamesCache() const { |
| untag()->set_exported_names(Array::null()); |
| } |
| |
| void Library::InitClassDictionary() const { |
| Thread* thread = Thread::Current(); |
| ASSERT(thread->IsMutatorThread()); |
| REUSABLE_FUNCTION_HANDLESCOPE(thread); |
| Array& dictionary = thread->ArrayHandle(); |
| // TODO(iposva): Find reasonable initial size. |
| const int kInitialElementCount = 16; |
| dictionary = NewDictionary(kInitialElementCount); |
| untag()->set_dictionary(dictionary.ptr()); |
| } |
| |
| void Library::InitImportList() const { |
| const Array& imports = |
| Array::Handle(Array::New(kInitialImportsCapacity, Heap::kOld)); |
| untag()->set_imports(imports.ptr()); |
| StoreNonPointer(&untag()->num_imports_, 0); |
| } |
| |
| LibraryPtr Library::New() { |
| ASSERT(Object::library_class() != Class::null()); |
| ObjectPtr raw = |
| Object::Allocate(Library::kClassId, Library::InstanceSize(), Heap::kOld, |
| Library ::ContainsCompressedPointers()); |
| return static_cast<LibraryPtr>(raw); |
| } |
| |
| LibraryPtr Library::NewLibraryHelper(const String& url, bool import_core_lib) { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| ASSERT(thread->IsMutatorThread()); |
| // Force the url to have a hash code. |
| url.Hash(); |
| const bool dart_scheme = url.StartsWith(Symbols::DartScheme()); |
| const Library& result = Library::Handle(zone, Library::New()); |
| result.untag()->set_name(Symbols::Empty().ptr()); |
| result.untag()->set_url(url.ptr()); |
| result.untag()->set_resolved_names(Array::null()); |
| result.untag()->set_exported_names(Array::null()); |
| result.untag()->set_dictionary(Object::empty_array().ptr()); |
| Array& array = Array::Handle(zone); |
| array = HashTables::New<MetadataMap>(4, Heap::kOld); |
| result.untag()->set_metadata(array.ptr()); |
| result.untag()->set_toplevel_class(Class::null()); |
| GrowableObjectArray& list = GrowableObjectArray::Handle(zone); |
| list = GrowableObjectArray::New(Object::empty_array(), Heap::kOld); |
| result.untag()->set_used_scripts(list.ptr()); |
| result.untag()->set_imports(Object::empty_array().ptr()); |
| result.untag()->set_exports(Object::empty_array().ptr()); |
| result.untag()->set_loaded_scripts(Array::null()); |
| result.set_native_entry_resolver(NULL); |
| result.set_native_entry_symbol_resolver(NULL); |
| result.set_ffi_native_resolver(nullptr); |
| result.set_flags(0); |
| result.set_is_in_fullsnapshot(false); |
| result.set_is_nnbd(false); |
| if (dart_scheme) { |
| // Only debug dart: libraries if we have been requested to show invisible |
| // frames. |
| result.set_debuggable(FLAG_show_invisible_frames); |
| } else { |
| // Default to debuggable for all other libraries. |
| result.set_debuggable(true); |
| } |
| result.set_is_dart_scheme(dart_scheme); |
| NOT_IN_PRECOMPILED(result.set_kernel_offset(0)); |
| result.StoreNonPointer(&result.untag()->load_state_, |
| UntaggedLibrary::kAllocated); |
| result.StoreNonPointer(&result.untag()->index_, -1); |
| result.InitClassDictionary(); |
| result.InitImportList(); |
| result.AllocatePrivateKey(); |
| if (import_core_lib) { |
| const Library& core_lib = Library::Handle(zone, Library::CoreLibrary()); |
| ASSERT(!core_lib.IsNull()); |
| const Namespace& ns = |
| Namespace::Handle(zone, Namespace::New(core_lib, Object::null_array(), |
| Object::null_array(), result)); |
| result.AddImport(ns); |
| } |
| return result.ptr(); |
| } |
| |
| LibraryPtr Library::New(const String& url) { |
| return NewLibraryHelper(url, false); |
| } |
| |
| void Library::set_flags(uint8_t flags) const { |
| StoreNonPointer(&untag()->flags_, flags); |
| } |
| |
| void Library::InitCoreLibrary(IsolateGroup* isolate_group) { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| const String& core_lib_url = Symbols::DartCore(); |
| const Library& core_lib = |
| Library::Handle(zone, Library::NewLibraryHelper(core_lib_url, false)); |
| core_lib.SetLoadRequested(); |
| core_lib.Register(thread); |
| isolate_group->object_store()->set_bootstrap_library(ObjectStore::kCore, |
| core_lib); |
| isolate_group->object_store()->set_root_library(Library::Handle()); |
| } |
| |
| // Invoke the function, or noSuchMethod if it is null. |
| static ObjectPtr InvokeInstanceFunction( |
| Thread* thread, |
| const Instance& receiver, |
| const Function& function, |
| const String& target_name, |
| const Array& args, |
| const Array& args_descriptor_array, |
| bool respect_reflectable, |
| const TypeArguments& instantiator_type_args) { |
| // Note "args" is already the internal arguments with the receiver as the |
| // first element. |
| ArgumentsDescriptor args_descriptor(args_descriptor_array); |
| if (function.IsNull() || |
| !function.AreValidArguments(args_descriptor, nullptr) || |
| (respect_reflectable && !function.is_reflectable())) { |
| return DartEntry::InvokeNoSuchMethod(thread, receiver, target_name, args, |
| args_descriptor_array); |
| } |
| ObjectPtr type_error = function.DoArgumentTypesMatch(args, args_descriptor, |
| instantiator_type_args); |
| if (type_error != Error::null()) { |
| return type_error; |
| } |
| return DartEntry::InvokeFunction(function, args, args_descriptor_array); |
| } |
| |
| ObjectPtr Library::InvokeGetter(const String& getter_name, |
| bool throw_nsm_if_absent, |
| bool respect_reflectable, |
| bool check_is_entrypoint) const { |
| Object& obj = Object::Handle(LookupLocalOrReExportObject(getter_name)); |
| Function& getter = Function::Handle(); |
| if (obj.IsField()) { |
| const Field& field = Field::Cast(obj); |
| if (check_is_entrypoint) { |
| CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly)); |
| } |
| if (!field.IsUninitialized()) { |
| return field.StaticValue(); |
| } |
| // An uninitialized field was found. Check for a getter in the field's |
| // owner class. |
| const Class& klass = Class::Handle(field.Owner()); |
| const String& internal_getter_name = |
| String::Handle(Field::GetterName(getter_name)); |
| getter = klass.LookupStaticFunction(internal_getter_name); |
| } else { |
| // No field found. Check for a getter in the lib. |
| const String& internal_getter_name = |
| String::Handle(Field::GetterName(getter_name)); |
| obj = LookupLocalOrReExportObject(internal_getter_name); |
| if (obj.IsFunction()) { |
| getter = Function::Cast(obj).ptr(); |
| if (check_is_entrypoint) { |
| CHECK_ERROR(getter.VerifyCallEntryPoint()); |
| } |
| } else { |
| obj = LookupLocalOrReExportObject(getter_name); |
| // Normally static top-level methods cannot be closurized through the |
| // native API even if they are marked as entry-points, with the one |
| // exception of "main". |
| if (obj.IsFunction() && check_is_entrypoint) { |
| if (!getter_name.Equals(String::Handle(String::New("main"))) || |
| ptr() != IsolateGroup::Current()->object_store()->root_library()) { |
| CHECK_ERROR(Function::Cast(obj).VerifyClosurizedEntryPoint()); |
| } |
| } |
| if (obj.IsFunction() && Function::Cast(obj).SafeToClosurize()) { |
| // Looking for a getter but found a regular method: closurize it. |
| const Function& closure_function = |
| Function::Handle(Function::Cast(obj).ImplicitClosureFunction()); |
| return closure_function.ImplicitStaticClosure(); |
| } |
| } |
| } |
| |
| if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) { |
| if (throw_nsm_if_absent) { |
| return ThrowNoSuchMethod( |
| AbstractType::Handle(Class::Handle(toplevel_class()).RareType()), |
| getter_name, Object::null_array(), Object::null_array(), |
| InvocationMirror::kTopLevel, InvocationMirror::kGetter); |
| } |
| |
| // Fall through case: Indicate that we didn't find any function or field |
| // using a special null instance. This is different from a field being null. |
| // Callers make sure that this null does not leak into Dartland. |
| return Object::sentinel().ptr(); |
| } |
| |
| // Invoke the getter and return the result. |
| return DartEntry::InvokeFunction(getter, Object::empty_array()); |
| } |
| |
| ObjectPtr Library::InvokeSetter(const String& setter_name, |
| const Instance& value, |
| bool respect_reflectable, |
| bool check_is_entrypoint) const { |
| Object& obj = Object::Handle(LookupLocalOrReExportObject(setter_name)); |
| const String& internal_setter_name = |
| String::Handle(Field::SetterName(setter_name)); |
| AbstractType& setter_type = AbstractType::Handle(); |
| AbstractType& argument_type = AbstractType::Handle(value.GetType(Heap::kOld)); |
| if (obj.IsField()) { |
| const Field& field = Field::Cast(obj); |
| if (check_is_entrypoint) { |
| CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly)); |
| } |
| setter_type = field.type(); |
| if (!argument_type.IsNullType() && !setter_type.IsDynamicType() && |
| !value.IsInstanceOf(setter_type, Object::null_type_arguments(), |
| Object::null_type_arguments())) { |
| return ThrowTypeError(field.token_pos(), value, setter_type, setter_name); |
| } |
| if (field.is_final() || (respect_reflectable && !field.is_reflectable())) { |
| const int kNumArgs = 1; |
| const Array& args = Array::Handle(Array::New(kNumArgs)); |
| args.SetAt(0, value); |
| |
| return ThrowNoSuchMethod( |
| AbstractType::Handle(Class::Handle(toplevel_class()).RareType()), |
| internal_setter_name, args, Object::null_array(), |
| InvocationMirror::kTopLevel, InvocationMirror::kSetter); |
| } |
| field.SetStaticValue(value); |
| return value.ptr(); |
| } |
| |
| Function& setter = Function::Handle(); |
| obj = LookupLocalOrReExportObject(internal_setter_name); |
| if (obj.IsFunction()) { |
| setter ^= obj.ptr(); |
| } |
| |
| if (!setter.IsNull() && check_is_entrypoint) { |
| CHECK_ERROR(setter.VerifyCallEntryPoint()); |
| } |
| |
| const int kNumArgs = 1; |
| const Array& args = Array::Handle(Array::New(kNumArgs)); |
| args.SetAt(0, value); |
| if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) { |
| return ThrowNoSuchMethod( |
| AbstractType::Handle(Class::Handle(toplevel_class()).RareType()), |
| internal_setter_name, args, Object::null_array(), |
| InvocationMirror::kTopLevel, InvocationMirror::kSetter); |
| } |
| |
| setter_type = setter.ParameterTypeAt(0); |
| if (!argument_type.IsNullType() && !setter_type.IsDynamicType() && |
| !value.IsInstanceOf(setter_type, Object::null_type_arguments(), |
| Object::null_type_arguments())) { |
| return ThrowTypeError(setter.token_pos(), value, setter_type, setter_name); |
| } |
| |
| return DartEntry::InvokeFunction(setter, args); |
| } |
| |
| ObjectPtr Library::Invoke(const String& function_name, |
| const Array& args, |
| const Array& arg_names, |
| bool respect_reflectable, |
| bool check_is_entrypoint) const { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| // We don't pass any explicit type arguments, which will be understood as |
| // using dynamic for any function type arguments by lower layers. |
| const int kTypeArgsLen = 0; |
| const Array& args_descriptor_array = Array::Handle( |
| zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), |
| arg_names, Heap::kNew)); |
| ArgumentsDescriptor args_descriptor(args_descriptor_array); |
| |
| auto& function = Function::Handle(zone); |
| auto& result = |
| Object::Handle(zone, LookupLocalOrReExportObject(function_name)); |
| if (result.IsFunction()) { |
| function ^= result.ptr(); |
| } |
| |
| if (!function.IsNull() && check_is_entrypoint) { |
| CHECK_ERROR(function.VerifyCallEntryPoint()); |
| } |
| |
| if (function.IsNull()) { |
| // Didn't find a method: try to find a getter and invoke call on its result. |
| const Object& getter_result = Object::Handle( |
| zone, InvokeGetter(function_name, false, respect_reflectable, |
| check_is_entrypoint)); |
| if (getter_result.ptr() != Object::sentinel().ptr()) { |
| if (check_is_entrypoint) { |
| CHECK_ERROR(EntryPointFieldInvocationError(function_name)); |
| } |
| const auto& call_args_descriptor_array = Array::Handle( |
| zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(), |
| args_descriptor.Count() + 1, |
| arg_names, Heap::kNew)); |
|