| // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| #include "vm/object.h" |
| |
| #include <memory> |
| |
| #include "compiler/method_recognizer.h" |
| #include "include/dart_api.h" |
| #include "lib/integers.h" |
| #include "lib/stacktrace.h" |
| #include "platform/assert.h" |
| #include "platform/text_buffer.h" |
| #include "platform/unaligned.h" |
| #include "platform/unicode.h" |
| #include "vm/bit_vector.h" |
| #include "vm/bootstrap.h" |
| #include "vm/bytecode_reader.h" |
| #include "vm/canonical_tables.h" |
| #include "vm/class_finalizer.h" |
| #include "vm/class_id.h" |
| #include "vm/closure_functions_cache.h" |
| #include "vm/code_comments.h" |
| #include "vm/code_descriptors.h" |
| #include "vm/code_observers.h" |
| #include "vm/compiler/assembler/disassembler.h" |
| #include "vm/compiler/assembler/disassembler_kbc.h" |
| #include "vm/compiler/jit/compiler.h" |
| #include "vm/compiler/runtime_api.h" |
| #include "vm/cpu.h" |
| #include "vm/dart.h" |
| #include "vm/dart_api_state.h" |
| #include "vm/dart_entry.h" |
| #include "vm/datastream.h" |
| #include "vm/debugger.h" |
| #include "vm/deopt_instructions.h" |
| #include "vm/double_conversion.h" |
| #include "vm/elf.h" |
| #include "vm/exceptions.h" |
| #include "vm/growable_array.h" |
| #include "vm/hash.h" |
| #include "vm/hash_table.h" |
| #include "vm/heap/become.h" |
| #include "vm/heap/heap.h" |
| #include "vm/heap/sampler.h" |
| #include "vm/heap/weak_code.h" |
| #include "vm/image_snapshot.h" |
| #include "vm/isolate_reload.h" |
| #include "vm/kernel.h" |
| #include "vm/kernel_binary.h" |
| #include "vm/kernel_isolate.h" |
| #include "vm/kernel_loader.h" |
| #include "vm/log.h" |
| #include "vm/native_symbol.h" |
| #include "vm/object_graph.h" |
| #include "vm/object_store.h" |
| #include "vm/os.h" |
| #include "vm/parser.h" |
| #include "vm/profiler.h" |
| #include "vm/regexp/regexp.h" |
| #include "vm/resolver.h" |
| #include "vm/reusable_handles.h" |
| #include "vm/reverse_pc_lookup_cache.h" |
| #include "vm/runtime_entry.h" |
| #include "vm/scopes.h" |
| #include "vm/stack_frame.h" |
| #include "vm/stub_code.h" |
| #include "vm/symbols.h" |
| #include "vm/tags.h" |
| #include "vm/thread_registry.h" |
| #include "vm/timeline.h" |
| #include "vm/type_testing_stubs.h" |
| #include "vm/zone_text_buffer.h" |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| #include "vm/compiler/aot/precompiler.h" |
| #include "vm/compiler/assembler/assembler.h" |
| #include "vm/compiler/backend/code_statistics.h" |
| #include "vm/compiler/compiler_state.h" |
| #include "vm/compiler/frontend/kernel_fingerprints.h" |
| #include "vm/compiler/frontend/kernel_translation_helper.h" |
| #include "vm/compiler/intrinsifier.h" |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| namespace dart { |
| |
| DEFINE_FLAG(uint64_t, |
| huge_method_cutoff_in_code_size, |
| 200000, |
| "Huge method cutoff in unoptimized code size (in bytes)."); |
| DEFINE_FLAG( |
| bool, |
| show_internal_names, |
| false, |
| "Show names of internal classes (e.g. \"OneByteString\") in error messages " |
| "instead of showing the corresponding interface names (e.g. \"String\"). " |
| "Also show legacy nullability in type names."); |
| |
| DEFINE_FLAG(bool, |
| remove_script_timestamps_for_test, |
| false, |
| "Remove script timestamps to allow for deterministic testing."); |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| DEFINE_FLAG(bool, use_register_cc, true, "Use register calling conventions"); |
| #endif |
| |
| DECLARE_FLAG(bool, intrinsify); |
| DECLARE_FLAG(bool, trace_deoptimization); |
| DECLARE_FLAG(bool, trace_deoptimization_verbose); |
| DECLARE_FLAG(bool, trace_reload); |
| DECLARE_FLAG(bool, write_protect_code); |
| DECLARE_FLAG(bool, precompiled_mode); |
| DECLARE_FLAG(int, max_polymorphic_checks); |
| |
| static const char* const kGetterPrefix = "get:"; |
| static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix); |
| static const char* const kSetterPrefix = "set:"; |
| static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix); |
| static const char* const kInitPrefix = "init:"; |
| static const intptr_t kInitPrefixLength = strlen(kInitPrefix); |
| |
| // A cache of VM heap allocated preinitialized empty ic data entry arrays. |
| ArrayPtr ICData::cached_icdata_arrays_[kCachedICDataArrayCount]; |
| |
| cpp_vtable Object::builtin_vtables_[kNumPredefinedCids] = {}; |
| |
| // These are initialized to a value that will force an illegal memory access if |
| // they are being used. |
| #if defined(RAW_NULL) |
| #error RAW_NULL should not be defined. |
| #endif |
| #define RAW_NULL static_cast<uword>(kHeapObjectTag) |
| |
| #define CHECK_ERROR(error) \ |
| { \ |
| ErrorPtr err = (error); \ |
| if (err != Error::null()) { \ |
| return err; \ |
| } \ |
| } |
| |
| #define DEFINE_SHARED_READONLY_HANDLE(Type, name) \ |
| Type* Object::name##_ = nullptr; |
| SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE) |
| #undef DEFINE_SHARED_READONLY_HANDLE |
| |
| ObjectPtr Object::null_ = static_cast<ObjectPtr>(RAW_NULL); |
| BoolPtr Object::true_ = static_cast<BoolPtr>(RAW_NULL); |
| BoolPtr Object::false_ = static_cast<BoolPtr>(RAW_NULL); |
| ClassPtr Object::class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::dynamic_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::void_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::patch_class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::function_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::closure_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::field_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::script_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::library_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::namespace_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::code_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::instructions_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::object_pool_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::context_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::context_scope_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::bytecode_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::sentinel_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::monomorphicsmiablecall_class_ = |
| static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::icdata_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::api_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::language_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::weak_serialization_reference_class_ = |
| static_cast<ClassPtr>(RAW_NULL); |
| ClassPtr Object::weak_array_class_ = static_cast<ClassPtr>(RAW_NULL); |
| |
| static void AppendSubString(BaseTextBuffer* buffer, |
| const char* name, |
| intptr_t start_pos, |
| intptr_t len) { |
| buffer->Printf("%.*s", static_cast<int>(len), &name[start_pos]); |
| } |
| |
| // Used to define setters and getters for untagged object fields that are |
| // defined with the WSR_COMPRESSED_POINTER_FIELD macro. See |
| // PRECOMPILER_WSR_FIELD_DECLARATION in object.h for more information. |
| #if defined(DART_PRECOMPILER) |
| #define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \ |
| Type##Ptr Class::Name() const { \ |
| return Type::RawCast(WeakSerializationReference::Unwrap(untag()->Name())); \ |
| } |
| #else |
| #define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \ |
| void Class::set_##Name(const Type& value) const { \ |
| untag()->set_##Name(value.ptr()); \ |
| } |
| #endif |
| |
| PRECOMPILER_WSR_FIELD_DEFINITION(ClosureData, Function, parent_function) |
| PRECOMPILER_WSR_FIELD_DEFINITION(Function, FunctionType, signature) |
| |
| #undef PRECOMPILER_WSR_FIELD_DEFINITION |
| |
| #if defined(_MSC_VER) |
| #define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \ |
| if (FLAG_trace_type_checks_verbose) { \ |
| OS::PrintErr(format, __VA_ARGS__); \ |
| } |
| #else |
| #define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \ |
| if (FLAG_trace_type_checks_verbose) { \ |
| OS::PrintErr(format, ##__VA_ARGS__); \ |
| } |
| #endif |
| |
| // Remove private keys, but retain getter/setter/constructor/mixin manglings. |
| StringPtr String::RemovePrivateKey(const String& name) { |
| ASSERT(name.IsOneByteString()); |
| GrowableArray<uint8_t> without_key(name.Length()); |
| intptr_t i = 0; |
| while (i < name.Length()) { |
| while (i < name.Length()) { |
| uint8_t c = name.CharAt(i++); |
| if (c == '@') break; |
| without_key.Add(c); |
| } |
| while (i < name.Length()) { |
| uint8_t c = name.CharAt(i); |
| if ((c < '0') || (c > '9')) break; |
| i++; |
| } |
| } |
| |
| return String::FromLatin1(without_key.data(), without_key.length()); |
| } |
| |
| // Takes a vm internal name and makes it suitable for external user. |
| // |
| // Examples: |
| // |
| // Internal getter and setter prefixes are changed: |
| // |
| // get:foo -> foo |
| // set:foo -> foo= |
| // |
| // Private name mangling is removed, possibly multiple times: |
| // |
| // _ReceivePortImpl@709387912 -> _ReceivePortImpl |
| // _ReceivePortImpl@709387912._internal@709387912 -> |
| // _ReceivePortImpl._internal |
| // _C@6328321&_E@6328321&_F@6328321 -> _C&_E&_F |
| // |
| // The trailing . on the default constructor name is dropped: |
| // |
| // List. -> List |
| // |
| // And so forth: |
| // |
| // get:foo@6328321 -> foo |
| // _MyClass@6328321. -> _MyClass |
| // _MyClass@6328321.named -> _MyClass.named |
| // |
| // For extension methods the following demangling is done |
| // ext|func -> ext.func (instance extension method) |
| // ext|get#prop -> ext.prop (instance extension getter) |
| // ext|set#prop -> ext.prop= (instance extension setter) |
| // ext|sfunc -> ext.sfunc (static extension method) |
| // get:ext|sprop -> ext.sprop (static extension getter) |
| // set:ext|sprop -> ext.sprop= (static extension setter) |
| // |
| const char* String::ScrubName(const String& name, bool is_extension) { |
| Thread* thread = Thread::Current(); |
| NoSafepointScope no_safepoint(thread); |
| Zone* zone = thread->zone(); |
| ZoneTextBuffer printer(zone); |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (name.Equals(Symbols::TopLevel())) { |
| // Name of invisible top-level class. |
| return ""; |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| const char* cname = name.ToCString(); |
| ASSERT(strlen(cname) == static_cast<size_t>(name.Length())); |
| const intptr_t name_len = name.Length(); |
| // First remove all private name mangling and if 'is_extension' is true |
| // substitute the first '|' character with '.'. |
| intptr_t start_pos = 0; |
| intptr_t sum_segment_len = 0; |
| for (intptr_t i = 0; i < name_len; i++) { |
| if ((cname[i] == '@') && ((i + 1) < name_len) && (cname[i + 1] >= '0') && |
| (cname[i + 1] <= '9')) { |
| // Append the current segment to the unmangled name. |
| const intptr_t segment_len = i - start_pos; |
| sum_segment_len += segment_len; |
| AppendSubString(&printer, cname, start_pos, segment_len); |
| // Advance until past the name mangling. The private keys are only |
| // numbers so we skip until the first non-number. |
| i++; // Skip the '@'. |
| while ((i < name.Length()) && (name.CharAt(i) >= '0') && |
| (name.CharAt(i) <= '9')) { |
| i++; |
| } |
| start_pos = i; |
| i--; // Account for for-loop increment. |
| } else if (is_extension && cname[i] == '|') { |
| // Append the current segment to the unmangled name. |
| const intptr_t segment_len = i - start_pos; |
| AppendSubString(&printer, cname, start_pos, segment_len); |
| // Append the '.' character (replaces '|' with '.'). |
| AppendSubString(&printer, ".", 0, 1); |
| start_pos = i + 1; |
| // Account for length of segments added so far. |
| sum_segment_len += (segment_len + 1); |
| } |
| } |
| |
| const char* unmangled_name = nullptr; |
| if (start_pos == 0) { |
| // No name unmangling needed, reuse the name that was passed in. |
| unmangled_name = cname; |
| sum_segment_len = name_len; |
| } else if (name.Length() != start_pos) { |
| // Append the last segment. |
| const intptr_t segment_len = name.Length() - start_pos; |
| sum_segment_len += segment_len; |
| AppendSubString(&printer, cname, start_pos, segment_len); |
| } |
| if (unmangled_name == nullptr) { |
| // Merge unmangled_segments. |
| unmangled_name = printer.buffer(); |
| } |
| |
| printer.Clear(); |
| intptr_t start = 0; |
| intptr_t len = sum_segment_len; |
| bool is_setter = false; |
| if (is_extension) { |
| // First scan till we see the '.' character. |
| for (intptr_t i = 0; i < len; i++) { |
| if (unmangled_name[i] == '.') { |
| intptr_t slen = i + 1; |
| intptr_t plen = slen - start; |
| AppendSubString(&printer, unmangled_name, start, plen); |
| unmangled_name += slen; |
| len -= slen; |
| break; |
| } else if (unmangled_name[i] == ':') { |
| if (start != 0) { |
| // Reset and break. |
| start = 0; |
| is_setter = false; |
| break; |
| } |
| if (unmangled_name[0] == 's') { |
| is_setter = true; |
| } |
| start = i + 1; |
| } |
| } |
| } |
| intptr_t dot_pos = -1; // Position of '.' in the name, if any. |
| start = 0; |
| for (intptr_t i = start; i < len; i++) { |
| if (unmangled_name[i] == ':' || |
| (is_extension && unmangled_name[i] == '#')) { |
| if (start != 0) { |
| // Reset and break. |
| start = 0; |
| dot_pos = -1; |
| break; |
| } |
| ASSERT(start == 0); // Only one : is possible in getters or setters. |
| if (unmangled_name[0] == 's') { |
| ASSERT(!is_setter); |
| is_setter = true; |
| } |
| start = i + 1; |
| } else if (unmangled_name[i] == '.') { |
| if (dot_pos != -1) { |
| // Reset and break. |
| start = 0; |
| dot_pos = -1; |
| break; |
| } |
| ASSERT(dot_pos == -1); // Only one dot is supported. |
| dot_pos = i; |
| } |
| } |
| |
| if (!is_extension && (start == 0) && (dot_pos == -1)) { |
| // This unmangled_name is fine as it is. |
| return unmangled_name; |
| } |
| |
| // Drop the trailing dot if needed. |
| intptr_t end = ((dot_pos + 1) == len) ? dot_pos : len; |
| |
| intptr_t substr_len = end - start; |
| AppendSubString(&printer, unmangled_name, start, substr_len); |
| if (is_setter) { |
| const char* equals = Symbols::Equals().ToCString(); |
| const intptr_t equals_len = strlen(equals); |
| AppendSubString(&printer, equals, 0, equals_len); |
| } |
| |
| return printer.buffer(); |
| } |
| |
| StringPtr String::ScrubNameRetainPrivate(const String& name, |
| bool is_extension) { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| intptr_t len = name.Length(); |
| intptr_t start = 0; |
| intptr_t at_pos = -1; // Position of '@' in the name, if any. |
| bool is_setter = false; |
| |
| String& result = String::Handle(); |
| |
| // If extension strip out the leading prefix e.g" ext|func would strip out |
| // 'ext|'. |
| if (is_extension) { |
| // First scan till we see the '|' character. |
| for (intptr_t i = 0; i < len; i++) { |
| if (name.CharAt(i) == '|') { |
| result = String::SubString(name, start, (i - start)); |
| result = String::Concat(result, Symbols::Dot()); |
| start = i + 1; |
| break; |
| } else if (name.CharAt(i) == ':') { |
| if (start != 0) { |
| // Reset and break. |
| start = 0; |
| is_setter = false; |
| break; |
| } |
| if (name.CharAt(0) == 's') { |
| is_setter = true; |
| } |
| start = i + 1; |
| } |
| } |
| } |
| |
| for (intptr_t i = start; i < len; i++) { |
| if (name.CharAt(i) == ':' || (is_extension && name.CharAt(i) == '#')) { |
| // Only one : is possible in getters or setters. |
| ASSERT(is_extension || start == 0); |
| if (name.CharAt(start) == 's') { |
| is_setter = true; |
| } |
| start = i + 1; |
| } else if (name.CharAt(i) == '@') { |
| // Setters should have only one @ so we know where to put the =. |
| ASSERT(!is_setter || (at_pos == -1)); |
| at_pos = i; |
| } |
| } |
| |
| if (start == 0) { |
| // This unmangled_name is fine as it is. |
| return name.ptr(); |
| } |
| |
| if (is_extension) { |
| const String& fname = |
| String::Handle(String::SubString(name, start, (len - start))); |
| result = String::Concat(result, fname); |
| } else { |
| result = String::SubString(name, start, (len - start)); |
| } |
| |
| if (is_setter) { |
| // Setters need to end with '='. |
| if (at_pos == -1) { |
| return String::Concat(result, Symbols::Equals()); |
| } else { |
| const String& pre_at = |
| String::Handle(String::SubString(result, 0, at_pos - 4)); |
| const String& post_at = |
| String::Handle(String::SubString(name, at_pos, len - at_pos)); |
| result = String::Concat(pre_at, Symbols::Equals()); |
| result = String::Concat(result, post_at); |
| } |
| } |
| |
| return result.ptr(); |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| return name.ptr(); // In AOT, return argument unchanged. |
| } |
| |
| template <typename type> |
| static bool IsSpecialCharacter(type value) { |
| return ((value == '"') || (value == '\n') || (value == '\f') || |
| (value == '\b') || (value == '\t') || (value == '\v') || |
| (value == '\r') || (value == '\\') || (value == '$')); |
| } |
| |
| static inline bool IsAsciiNonprintable(int32_t c) { |
| return ((0 <= c) && (c < 32)) || (c == 127); |
| } |
| |
| static int32_t EscapeOverhead(int32_t c) { |
| if (IsSpecialCharacter(c)) { |
| return 1; // 1 additional byte for the backslash. |
| } else if (IsAsciiNonprintable(c)) { |
| return 3; // 3 additional bytes to encode c as \x00. |
| } |
| return 0; |
| } |
| |
| template <typename type> |
| static type SpecialCharacter(type value) { |
| if (value == '"') { |
| return '"'; |
| } else if (value == '\n') { |
| return 'n'; |
| } else if (value == '\f') { |
| return 'f'; |
| } else if (value == '\b') { |
| return 'b'; |
| } else if (value == '\t') { |
| return 't'; |
| } else if (value == '\v') { |
| return 'v'; |
| } else if (value == '\r') { |
| return 'r'; |
| } else if (value == '\\') { |
| return '\\'; |
| } else if (value == '$') { |
| return '$'; |
| } |
| UNREACHABLE(); |
| return '\0'; |
| } |
| |
| #if defined(DART_DYNAMIC_MODULES) |
| static BytecodePtr CreateVMInternalBytecode(KernelBytecode::Opcode opcode) { |
| const KBCInstr* instructions = nullptr; |
| intptr_t instructions_size = 0; |
| |
| KernelBytecode::GetVMInternalBytecodeInstructions(opcode, &instructions, |
| &instructions_size); |
| |
| const auto& bytecode = Bytecode::Handle( |
| Bytecode::New(reinterpret_cast<uword>(instructions), instructions_size, |
| -1, TypedDataBase::Handle(), Object::empty_object_pool())); |
| bytecode.set_pc_descriptors(Object::empty_descriptors()); |
| bytecode.set_exception_handlers(Object::empty_exception_handlers()); |
| return bytecode.ptr(); |
| } |
| #endif // defined(DART_DYNAMIC_MODULES) |
| |
| void Object::InitNullAndBool(IsolateGroup* isolate_group) { |
| // Should only be run by the vm isolate. |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| Thread* thread = Thread::Current(); |
| auto heap = isolate_group->heap(); |
| |
| // TODO(iposva): NoSafepointScope needs to be added here. |
| ASSERT(class_class() == null_); |
| |
| // Allocate and initialize the null instance. |
| // 'null_' must be the first object allocated as it is used in allocation to |
| // clear the pointer fields of objects. |
| { |
| uword address = |
| heap->Allocate(thread, Instance::InstanceSize(), Heap::kOld); |
| null_ = static_cast<InstancePtr>(address + kHeapObjectTag); |
| InitializeObjectVariant<Instance>(address, kNullCid); |
| null_->untag()->SetCanonical(); |
| } |
| |
| // Allocate and initialize the bool instances. |
| // These must be allocated such that at kBoolValueBitPosition, the address |
| // of true is 0 and the address of false is 1, and their addresses are |
| // otherwise identical. |
| { |
| // Allocate a dummy bool object to give true the desired alignment. |
| uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld); |
| InitializeObject<Bool>(address); |
| static_cast<BoolPtr>(address + kHeapObjectTag)->untag()->value_ = false; |
| } |
| { |
| // Allocate true. |
| uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld); |
| true_ = static_cast<BoolPtr>(address + kHeapObjectTag); |
| InitializeObject<Bool>(address); |
| true_->untag()->value_ = true; |
| true_->untag()->SetCanonical(); |
| } |
| { |
| // Allocate false. |
| uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld); |
| false_ = static_cast<BoolPtr>(address + kHeapObjectTag); |
| InitializeObject<Bool>(address); |
| false_->untag()->value_ = false; |
| false_->untag()->SetCanonical(); |
| } |
| |
| // Check that the objects have been allocated at appropriate addresses. |
| ASSERT(static_cast<uword>(true_) == |
| static_cast<uword>(null_) + kTrueOffsetFromNull); |
| ASSERT(static_cast<uword>(false_) == |
| static_cast<uword>(null_) + kFalseOffsetFromNull); |
| ASSERT((static_cast<uword>(true_) & kBoolValueMask) == 0); |
| ASSERT((static_cast<uword>(false_) & kBoolValueMask) != 0); |
| ASSERT(static_cast<uword>(false_) == |
| (static_cast<uword>(true_) | kBoolValueMask)); |
| ASSERT((static_cast<uword>(null_) & kBoolVsNullMask) == 0); |
| ASSERT((static_cast<uword>(true_) & kBoolVsNullMask) != 0); |
| ASSERT((static_cast<uword>(false_) & kBoolVsNullMask) != 0); |
| } |
| |
| void Object::InitVtables() { |
| { |
| Object fake_handle; |
| builtin_vtables_[kObjectCid] = fake_handle.vtable(); |
| } |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| clazz fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY_NOR_MAP(INIT_VTABLE) |
| INIT_VTABLE(GrowableObjectArray) |
| #undef INIT_VTABLE |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| Map fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_MAPS(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| Set fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_SETS(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| Array fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_FIXED_LENGTH_ARRAYS(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| String fake_handle; \ |
| builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_STRINGS(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| { |
| Instance fake_handle; |
| builtin_vtables_[kFfiNativeTypeCid] = fake_handle.vtable(); |
| } |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| Instance fake_handle; \ |
| builtin_vtables_[kFfi##clazz##Cid] = fake_handle.vtable(); \ |
| } |
| CLASS_LIST_FFI_TYPE_MARKER(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| { |
| Instance fake_handle; |
| builtin_vtables_[kFfiNativeFunctionCid] = fake_handle.vtable(); |
| } |
| |
| { |
| Pointer fake_handle; |
| builtin_vtables_[kPointerCid] = fake_handle.vtable(); |
| } |
| |
| { |
| DynamicLibrary fake_handle; |
| builtin_vtables_[kDynamicLibraryCid] = fake_handle.vtable(); |
| } |
| |
| #define INIT_VTABLE(clazz) \ |
| { \ |
| TypedData fake_internal_handle; \ |
| builtin_vtables_[kTypedData##clazz##Cid] = fake_internal_handle.vtable(); \ |
| TypedDataView fake_view_handle; \ |
| builtin_vtables_[kTypedData##clazz##ViewCid] = fake_view_handle.vtable(); \ |
| builtin_vtables_[kUnmodifiableTypedData##clazz##ViewCid] = \ |
| fake_view_handle.vtable(); \ |
| ExternalTypedData fake_external_handle; \ |
| builtin_vtables_[kExternalTypedData##clazz##Cid] = \ |
| fake_external_handle.vtable(); \ |
| } |
| CLASS_LIST_TYPED_DATA(INIT_VTABLE) |
| #undef INIT_VTABLE |
| |
| { |
| TypedDataView fake_handle; |
| builtin_vtables_[kByteDataViewCid] = fake_handle.vtable(); |
| builtin_vtables_[kUnmodifiableByteDataViewCid] = fake_handle.vtable(); |
| } |
| |
| { |
| Instance fake_handle; |
| builtin_vtables_[kByteBufferCid] = fake_handle.vtable(); |
| builtin_vtables_[kNullCid] = fake_handle.vtable(); |
| builtin_vtables_[kDynamicCid] = fake_handle.vtable(); |
| builtin_vtables_[kVoidCid] = fake_handle.vtable(); |
| builtin_vtables_[kNeverCid] = fake_handle.vtable(); |
| } |
| } |
| |
| void Object::Init(IsolateGroup* isolate_group) { |
| // Should only be run by the vm isolate. |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| Heap* heap = isolate_group->heap(); |
| Thread* thread = Thread::Current(); |
| ASSERT(thread != nullptr); |
| // Ensure lock checks in setters are happy. |
| SafepointWriteRwLocker ml(thread, isolate_group->program_lock()); |
| |
| InitVtables(); |
| |
| // Allocate the read only object handles here. |
| #define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \ |
| name##_ = Type::ReadOnlyHandle(); |
| SHARED_READONLY_HANDLES_LIST(INITIALIZE_SHARED_READONLY_HANDLE) |
| #undef INITIALIZE_SHARED_READONLY_HANDLE |
| |
| *null_object_ = Object::null(); |
| *null_class_ = Class::null(); |
| *null_array_ = Array::null(); |
| *null_string_ = String::null(); |
| *null_instance_ = Instance::null(); |
| *null_function_ = Function::null(); |
| *null_function_type_ = FunctionType::null(); |
| *null_record_type_ = RecordType::null(); |
| *null_type_arguments_ = TypeArguments::null(); |
| *null_closure_ = Closure::null(); |
| *empty_type_arguments_ = TypeArguments::null(); |
| *null_abstract_type_ = AbstractType::null(); |
| *null_compressed_stackmaps_ = CompressedStackMaps::null(); |
| *bool_true_ = true_; |
| *bool_false_ = false_; |
| |
| // Initialize the empty array and empty instantiations cache array handles to |
| // null_ in order to be able to check if the empty and zero arrays were |
| // allocated (RAW_NULL is not available). |
| *empty_array_ = Array::null(); |
| *empty_instantiations_cache_array_ = Array::null(); |
| *empty_subtype_test_cache_array_ = Array::null(); |
| |
| Class& cls = Class::Handle(); |
| |
| // Allocate and initialize the class class. |
| { |
| intptr_t size = Class::InstanceSize(); |
| uword address = heap->Allocate(thread, size, Heap::kOld); |
| class_class_ = static_cast<ClassPtr>(address + kHeapObjectTag); |
| InitializeObject<Class>(address); |
| |
| Class fake; |
| // Initialization from Class::New<Class>. |
| // Directly set ptr_ to break a circular dependency: SetRaw will attempt |
| // to lookup class class in the class table where it is not registered yet. |
| cls.ptr_ = class_class_; |
| ASSERT(builtin_vtables_[kClassCid] == fake.vtable()); |
| cls.set_instance_size( |
| Class::InstanceSize(), |
| compiler::target::RoundedAllocationSize(RTN::Class::InstanceSize())); |
| const intptr_t host_next_field_offset = Class::NextFieldOffset(); |
| const intptr_t target_next_field_offset = RTN::Class::NextFieldOffset(); |
| cls.set_next_field_offset(host_next_field_offset, target_next_field_offset); |
| cls.set_id(Class::kClassId); |
| cls.set_state_bits(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| cls.set_type_arguments_field_offset_in_words(Class::kNoTypeArguments, |
| RTN::Class::kNoTypeArguments); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_num_native_fields(0); |
| cls.InitEmptyFields(); |
| isolate_group->class_table()->Register(cls); |
| } |
| |
| // Allocate and initialize the null class. |
| cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| isolate_group->object_store()->set_null_class(cls); |
| |
| // Allocate and initialize Never class. |
| cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| isolate_group->object_store()->set_never_class(cls); |
| |
| // Allocate and initialize the free list element class. |
| cls = Class::New<FreeListElement::FakeInstance, |
| RTN::FreeListElement::FakeInstance>(kFreeListElement, |
| isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| // Allocate and initialize the forwarding corpse class. |
| cls = Class::New<ForwardingCorpse::FakeInstance, |
| RTN::ForwardingCorpse::FakeInstance>(kForwardingCorpse, |
| isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| // Allocate and initialize Sentinel class. |
| cls = Class::New<Sentinel, RTN::Sentinel>(isolate_group); |
| sentinel_class_ = cls.ptr(); |
| |
| // Allocate and initialize the sentinel values. |
| { |
| *sentinel_ ^= Sentinel::New(); |
| } |
| |
| // Allocate and initialize optimizing compiler constants. |
| { |
| *unknown_constant_ ^= Sentinel::New(); |
| *non_constant_ ^= Sentinel::New(); |
| *optimized_out_ ^= Sentinel::New(); |
| } |
| |
| // Allocate the remaining VM internal classes. |
| cls = Class::New<TypeParameters, RTN::TypeParameters>(isolate_group); |
| type_parameters_class_ = cls.ptr(); |
| |
| cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate_group); |
| type_arguments_class_ = cls.ptr(); |
| |
| cls = Class::New<PatchClass, RTN::PatchClass>(isolate_group); |
| patch_class_class_ = cls.ptr(); |
| |
| cls = Class::New<Function, RTN::Function>(isolate_group); |
| function_class_ = cls.ptr(); |
| |
| cls = Class::New<ClosureData, RTN::ClosureData>(isolate_group); |
| closure_data_class_ = cls.ptr(); |
| |
| cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate_group); |
| ffi_trampoline_data_class_ = cls.ptr(); |
| |
| cls = Class::New<Field, RTN::Field>(isolate_group); |
| field_class_ = cls.ptr(); |
| |
| cls = Class::New<Script, RTN::Script>(isolate_group); |
| script_class_ = cls.ptr(); |
| |
| cls = Class::New<Library, RTN::Library>(isolate_group); |
| library_class_ = cls.ptr(); |
| |
| cls = Class::New<Namespace, RTN::Namespace>(isolate_group); |
| namespace_class_ = cls.ptr(); |
| |
| cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate_group); |
| kernel_program_info_class_ = cls.ptr(); |
| |
| cls = Class::New<Code, RTN::Code>(isolate_group); |
| code_class_ = cls.ptr(); |
| |
| cls = Class::New<Instructions, RTN::Instructions>(isolate_group); |
| instructions_class_ = cls.ptr(); |
| |
| cls = |
| Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group); |
| instructions_section_class_ = cls.ptr(); |
| |
| cls = Class::New<InstructionsTable, RTN::InstructionsTable>(isolate_group); |
| instructions_table_class_ = cls.ptr(); |
| |
| cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group); |
| object_pool_class_ = cls.ptr(); |
| |
| cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate_group); |
| pc_descriptors_class_ = cls.ptr(); |
| |
| cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate_group); |
| code_source_map_class_ = cls.ptr(); |
| |
| cls = |
| Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate_group); |
| compressed_stackmaps_class_ = cls.ptr(); |
| |
| cls = |
| Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate_group); |
| var_descriptors_class_ = cls.ptr(); |
| |
| cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate_group); |
| exception_handlers_class_ = cls.ptr(); |
| |
| cls = Class::New<Context, RTN::Context>(isolate_group); |
| context_class_ = cls.ptr(); |
| |
| cls = Class::New<ContextScope, RTN::ContextScope>(isolate_group); |
| context_scope_class_ = cls.ptr(); |
| |
| cls = Class::New<Bytecode, RTN::Bytecode>(isolate_group); |
| bytecode_class_ = cls.ptr(); |
| |
| cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate_group); |
| singletargetcache_class_ = cls.ptr(); |
| |
| cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate_group); |
| unlinkedcall_class_ = cls.ptr(); |
| |
| cls = Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>( |
| isolate_group); |
| monomorphicsmiablecall_class_ = cls.ptr(); |
| |
| cls = Class::New<ICData, RTN::ICData>(isolate_group); |
| icdata_class_ = cls.ptr(); |
| |
| cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate_group); |
| megamorphic_cache_class_ = cls.ptr(); |
| |
| cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate_group); |
| subtypetestcache_class_ = cls.ptr(); |
| |
| cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate_group); |
| loadingunit_class_ = cls.ptr(); |
| |
| cls = Class::New<ApiError, RTN::ApiError>(isolate_group); |
| api_error_class_ = cls.ptr(); |
| |
| cls = Class::New<LanguageError, RTN::LanguageError>(isolate_group); |
| language_error_class_ = cls.ptr(); |
| |
| cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate_group); |
| unhandled_exception_class_ = cls.ptr(); |
| |
| cls = Class::New<UnwindError, RTN::UnwindError>(isolate_group); |
| unwind_error_class_ = cls.ptr(); |
| |
| cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>( |
| isolate_group); |
| weak_serialization_reference_class_ = cls.ptr(); |
| |
| cls = Class::New<WeakArray, RTN::WeakArray>(isolate_group); |
| weak_array_class_ = cls.ptr(); |
| |
| ASSERT(class_class() != null_); |
| |
| // Pre-allocate classes in the vm isolate so that we can for example create a |
| // symbol table and populate it with some frequently used strings as symbols. |
| cls = Class::New<Array, RTN::Array>(isolate_group); |
| isolate_group->object_store()->set_array_class(cls); |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group); |
| isolate_group->object_store()->set_immutable_array_class(cls); |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| // In order to be able to canonicalize arguments descriptors early. |
| cls.set_is_prefinalized(); |
| cls = |
| Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate_group); |
| isolate_group->object_store()->set_growable_object_array_class(cls); |
| cls.set_type_arguments_field_offset( |
| GrowableObjectArray::type_arguments_offset(), |
| RTN::GrowableObjectArray::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls = Class::NewStringClass(kOneByteStringCid, isolate_group); |
| isolate_group->object_store()->set_one_byte_string_class(cls); |
| cls = Class::NewStringClass(kTwoByteStringCid, isolate_group); |
| isolate_group->object_store()->set_two_byte_string_class(cls); |
| cls = Class::New<Mint, RTN::Mint>(isolate_group); |
| isolate_group->object_store()->set_mint_class(cls); |
| cls = Class::New<Double, RTN::Double>(isolate_group); |
| isolate_group->object_store()->set_double_class(cls); |
| cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group); |
| isolate_group->object_store()->set_float32x4_class(cls); |
| cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group); |
| isolate_group->object_store()->set_float64x2_class(cls); |
| cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group); |
| isolate_group->object_store()->set_int32x4_class(cls); |
| |
| // Ensure that class kExternalTypedDataUint8ArrayCid is registered as we |
| // need it when reading in the token stream of bootstrap classes in the VM |
| // isolate. |
| Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid, |
| isolate_group); |
| |
| // Needed for object pools of VM isolate stubs. |
| Class::NewTypedDataClass(kTypedDataInt8ArrayCid, isolate_group); |
| |
| // Allocate and initialize the empty_array instance. |
| { |
| uword address = heap->Allocate(thread, Array::InstanceSize(0), Heap::kOld); |
| InitializeObjectVariant<Array>(address, kImmutableArrayCid, 0); |
| Array::initializeHandle(empty_array_, |
| static_cast<ArrayPtr>(address + kHeapObjectTag)); |
| empty_array_->untag()->set_length(Smi::New(0)); |
| empty_array_->SetCanonical(); |
| } |
| |
| Smi& smi = Smi::Handle(); |
| // Allocate and initialize the empty instantiations cache array instance, |
| // which contains metadata as the first element and a sentinel value |
| // at the start of the first entry. |
| { |
| const intptr_t array_size = |
| static_cast<intptr_t>(TypeArguments::Cache::kHeaderSize) + |
| static_cast<intptr_t>(TypeArguments::Cache::kEntrySize); |
| uword address = |
| heap->Allocate(thread, Array::InstanceSize(array_size), Heap::kOld); |
| InitializeObjectVariant<Array>(address, kImmutableArrayCid, array_size); |
| Array::initializeHandle(empty_instantiations_cache_array_, |
| static_cast<ArrayPtr>(address + kHeapObjectTag)); |
| empty_instantiations_cache_array_->untag()->set_length( |
| Smi::New(array_size)); |
| // The empty cache has no occupied entries and is not a hash-based cache. |
| smi = Smi::New(0); |
| empty_instantiations_cache_array_->SetAt( |
| TypeArguments::Cache::kMetadataIndex, smi); |
| // Make the first (and only) entry unoccupied by setting its first element |
| // to the sentinel value. |
| smi = TypeArguments::Cache::Sentinel(); |
| InstantiationsCacheTable table(*empty_instantiations_cache_array_); |
| table.At(0).Set<TypeArguments::Cache::kSentinelIndex>(smi); |
| // The other contents of the array are immaterial. |
| empty_instantiations_cache_array_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the empty subtype test cache array instance, |
| // which contains a single unoccupied entry. |
| { |
| const intptr_t array_size = SubtypeTestCache::kTestEntryLength; |
| uword address = |
| heap->Allocate(thread, Array::InstanceSize(array_size), Heap::kOld); |
| InitializeObjectVariant<Array>(address, kImmutableArrayCid, array_size); |
| Array::initializeHandle(empty_subtype_test_cache_array_, |
| static_cast<ArrayPtr>(address + kHeapObjectTag)); |
| empty_subtype_test_cache_array_->untag()->set_length(Smi::New(array_size)); |
| // Make the first (and only) entry unoccupied by setting its first element |
| // to the null value. |
| empty_subtype_test_cache_array_->SetAt( |
| SubtypeTestCache::kInstanceCidOrSignature, Object::null_object()); |
| smi = TypeArguments::Cache::Sentinel(); |
| SubtypeTestCacheTable table(*empty_subtype_test_cache_array_); |
| table.At(0).Set<SubtypeTestCache::kInstanceCidOrSignature>( |
| Object::null_object()); |
| // The other contents of the array are immaterial. |
| empty_subtype_test_cache_array_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty context scope object. |
| { |
| uword address = |
| heap->Allocate(thread, ContextScope::InstanceSize(0), Heap::kOld); |
| InitializeObject<ContextScope>(address, 0); |
| ContextScope::initializeHandle( |
| empty_context_scope_, |
| static_cast<ContextScopePtr>(address + kHeapObjectTag)); |
| empty_context_scope_->StoreNonPointer( |
| &empty_context_scope_->untag()->num_variables_, 0); |
| empty_context_scope_->StoreNonPointer( |
| &empty_context_scope_->untag()->is_implicit_, true); |
| empty_context_scope_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty object pool object. |
| { |
| uword address = |
| heap->Allocate(thread, ObjectPool::InstanceSize(0), Heap::kOld); |
| InitializeObject<ObjectPool>(address, 0); |
| ObjectPool::initializeHandle( |
| empty_object_pool_, |
| static_cast<ObjectPoolPtr>(address + kHeapObjectTag)); |
| empty_object_pool_->StoreNonPointer(&empty_object_pool_->untag()->length_, |
| 0); |
| empty_object_pool_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the empty_compressed_stackmaps instance. |
| { |
| const intptr_t instance_size = CompressedStackMaps::InstanceSize(0); |
| uword address = heap->Allocate(thread, instance_size, Heap::kOld); |
| InitializeObject<CompressedStackMaps>(address, 0); |
| CompressedStackMaps::initializeHandle( |
| empty_compressed_stackmaps_, |
| static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag)); |
| empty_compressed_stackmaps_->untag()->payload()->set_flags_and_size(0); |
| empty_compressed_stackmaps_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the empty_descriptors instance. |
| { |
| uword address = |
| heap->Allocate(thread, PcDescriptors::InstanceSize(0), Heap::kOld); |
| InitializeObject<PcDescriptors>(address, 0); |
| PcDescriptors::initializeHandle( |
| empty_descriptors_, |
| static_cast<PcDescriptorsPtr>(address + kHeapObjectTag)); |
| empty_descriptors_->StoreNonPointer(&empty_descriptors_->untag()->length_, |
| 0); |
| empty_descriptors_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty variable descriptor object. |
| { |
| uword address = heap->Allocate(thread, LocalVarDescriptors::InstanceSize(0), |
| Heap::kOld); |
| InitializeObject<LocalVarDescriptors>(address, 0); |
| LocalVarDescriptors::initializeHandle( |
| empty_var_descriptors_, |
| static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag)); |
| empty_var_descriptors_->StoreNonPointer( |
| &empty_var_descriptors_->untag()->num_entries_, 0); |
| empty_var_descriptors_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty exception handler info object. |
| // The vast majority of all functions do not contain an exception handler |
| // and can share this canonical descriptor. |
| { |
| uword address = |
| heap->Allocate(thread, ExceptionHandlers::InstanceSize(0), Heap::kOld); |
| InitializeObject<ExceptionHandlers>(address, 0); |
| ExceptionHandlers::initializeHandle( |
| empty_exception_handlers_, |
| static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag)); |
| empty_exception_handlers_->StoreNonPointer( |
| &empty_exception_handlers_->untag()->packed_fields_, 0); |
| empty_exception_handlers_->SetCanonical(); |
| } |
| |
| // Empty exception handlers for async/async* functions. |
| { |
| uword address = |
| heap->Allocate(thread, ExceptionHandlers::InstanceSize(0), Heap::kOld); |
| InitializeObject<ExceptionHandlers>(address, 0); |
| ExceptionHandlers::initializeHandle( |
| empty_async_exception_handlers_, |
| static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag)); |
| empty_async_exception_handlers_->StoreNonPointer( |
| &empty_async_exception_handlers_->untag()->packed_fields_, |
| UntaggedExceptionHandlers::AsyncHandlerBit::update(true, 0)); |
| empty_async_exception_handlers_->SetCanonical(); |
| } |
| |
| // Allocate and initialize the canonical empty type arguments object. |
| { |
| uword address = |
| heap->Allocate(thread, TypeArguments::InstanceSize(0), Heap::kOld); |
| InitializeObject<TypeArguments>(address, 0); |
| TypeArguments::initializeHandle( |
| empty_type_arguments_, |
| static_cast<TypeArgumentsPtr>(address + kHeapObjectTag)); |
| empty_type_arguments_->untag()->set_length(Smi::New(0)); |
| empty_type_arguments_->untag()->set_hash(Smi::New(0)); |
| empty_type_arguments_->ComputeHash(); |
| empty_type_arguments_->SetCanonical(); |
| } |
| |
| // The VM isolate snapshot object table is initialized to an empty array |
| // as we do not have any VM isolate snapshot at this time. |
| *vm_isolate_snapshot_object_table_ = Object::empty_array().ptr(); |
| |
| cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate_group); |
| cls.set_is_abstract(); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| dynamic_class_ = cls.ptr(); |
| |
| cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| void_class_ = cls.ptr(); |
| |
| cls = Class::New<Type, RTN::Type>(isolate_group); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| cls = Class::New<RecordType, RTN::RecordType>(isolate_group); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| |
| cls = dynamic_class_; |
| *dynamic_type_ = |
| Type::New(cls, Object::null_type_arguments(), Nullability::kNullable); |
| dynamic_type_->SetIsFinalized(); |
| dynamic_type_->ComputeHash(); |
| dynamic_type_->SetCanonical(); |
| |
| cls = void_class_; |
| *void_type_ = |
| Type::New(cls, Object::null_type_arguments(), Nullability::kNullable); |
| void_type_->SetIsFinalized(); |
| void_type_->ComputeHash(); |
| void_type_->SetCanonical(); |
| |
| // Since TypeArguments objects are passed as function arguments, make them |
| // behave as Dart instances, although they are just VM objects. |
| // Note that we cannot set the super type to ObjectType, which does not live |
| // in the vm isolate. See special handling in Class::SuperClass(). |
| cls = type_arguments_class_; |
| cls.set_interfaces(Object::empty_array()); |
| cls.SetFields(Object::empty_array()); |
| cls.SetFunctions(Object::empty_array()); |
| |
| cls = Class::New<Bool, RTN::Bool>(isolate_group); |
| isolate_group->object_store()->set_bool_class(cls); |
| |
| *smi_illegal_cid_ = Smi::New(kIllegalCid); |
| *smi_zero_ = Smi::New(0); |
| |
| String& error_str = String::Handle(); |
| error_str = String::New( |
| "Callbacks into the Dart VM are currently prohibited. Either there are " |
| "outstanding pointers from Dart_TypedDataAcquireData that have not been " |
| "released with Dart_TypedDataReleaseData, or a finalizer is running.", |
| Heap::kOld); |
| *no_callbacks_error_ = ApiError::New(error_str, Heap::kOld); |
| error_str = String::New( |
| "No api calls are allowed while unwind is in progress", Heap::kOld); |
| *unwind_in_progress_error_ = UnwindError::New(error_str, Heap::kOld); |
| error_str = String::New("SnapshotWriter Error", Heap::kOld); |
| *snapshot_writer_error_ = |
| LanguageError::New(error_str, Report::kError, Heap::kOld); |
| error_str = String::New("Branch offset overflow", Heap::kOld); |
| *branch_offset_error_ = |
| LanguageError::New(error_str, Report::kBailout, Heap::kOld); |
| error_str = String::New("Background Compilation Failed", Heap::kOld); |
| *background_compilation_error_ = |
| LanguageError::New(error_str, Report::kBailout, Heap::kOld); |
| error_str = String::New("No debuggable code where breakpoint was requested", |
| Heap::kOld); |
| *no_debuggable_code_error_ = |
| LanguageError::New(error_str, Report::kError, Heap::kOld); |
| error_str = String::New("Out of memory", Heap::kOld); |
| *out_of_memory_error_ = |
| LanguageError::New(error_str, Report::kError, Heap::kOld); |
| |
| // Allocate the parameter types and names for synthetic getters. |
| *synthetic_getter_parameter_types_ = Array::New(1, Heap::kOld); |
| synthetic_getter_parameter_types_->SetAt(0, Object::dynamic_type()); |
| *synthetic_getter_parameter_names_ = Array::New(1, Heap::kOld); |
| // Fill in synthetic_getter_parameter_names_ later, after symbols are |
| // initialized (in Object::FinalizeVMIsolate). |
| // synthetic_getter_parameter_names_ object needs to be created earlier as |
| // VM isolate snapshot reader references it before Object::FinalizeVMIsolate. |
| |
| #if defined(DART_DYNAMIC_MODULES) |
| *implicit_getter_bytecode_ = |
| CreateVMInternalBytecode(KernelBytecode::kVMInternal_ImplicitGetter); |
| |
| *implicit_setter_bytecode_ = |
| CreateVMInternalBytecode(KernelBytecode::kVMInternal_ImplicitSetter); |
| |
| *implicit_static_getter_bytecode_ = CreateVMInternalBytecode( |
| KernelBytecode::kVMInternal_ImplicitStaticGetter); |
| |
| *implicit_static_setter_bytecode_ = CreateVMInternalBytecode( |
| KernelBytecode::kVMInternal_ImplicitStaticSetter); |
| |
| *method_extractor_bytecode_ = |
| CreateVMInternalBytecode(KernelBytecode::kVMInternal_MethodExtractor); |
| |
| *invoke_closure_bytecode_ = |
| CreateVMInternalBytecode(KernelBytecode::kVMInternal_InvokeClosure); |
| |
| *invoke_field_bytecode_ = |
| CreateVMInternalBytecode(KernelBytecode::kVMInternal_InvokeField); |
| |
| *nsm_dispatcher_bytecode_ = CreateVMInternalBytecode( |
| KernelBytecode::kVMInternal_NoSuchMethodDispatcher); |
| |
| *dynamic_invocation_forwarder_bytecode_ = CreateVMInternalBytecode( |
| KernelBytecode::kVMInternal_ForwardDynamicInvocation); |
| |
| *implicit_static_closure_bytecode_ = CreateVMInternalBytecode( |
| KernelBytecode::kVMInternal_ImplicitStaticClosure); |
| |
| *implicit_instance_closure_bytecode_ = CreateVMInternalBytecode( |
| KernelBytecode::kVMInternal_ImplicitInstanceClosure); |
| |
| *implicit_constructor_closure_bytecode_ = CreateVMInternalBytecode( |
| KernelBytecode::kVMInternal_ImplicitConstructorClosure); |
| #endif // defined(DART_DYNAMIC_MODULES) |
| |
| // Some thread fields need to be reinitialized as null constants have not been |
| // initialized until now. |
| thread->ClearStickyError(); |
| |
| ASSERT(!null_object_->IsSmi()); |
| ASSERT(!null_class_->IsSmi()); |
| ASSERT(null_class_->IsClass()); |
| ASSERT(!null_array_->IsSmi()); |
| ASSERT(null_array_->IsArray()); |
| ASSERT(!null_string_->IsSmi()); |
| ASSERT(null_string_->IsString()); |
| ASSERT(!null_instance_->IsSmi()); |
| ASSERT(null_instance_->IsInstance()); |
| ASSERT(!null_function_->IsSmi()); |
| ASSERT(null_function_->IsFunction()); |
| ASSERT(!null_function_type_->IsSmi()); |
| ASSERT(null_function_type_->IsFunctionType()); |
| ASSERT(!null_record_type_->IsSmi()); |
| ASSERT(null_record_type_->IsRecordType()); |
| ASSERT(!null_type_arguments_->IsSmi()); |
| ASSERT(null_type_arguments_->IsTypeArguments()); |
| ASSERT(!null_compressed_stackmaps_->IsSmi()); |
| ASSERT(null_compressed_stackmaps_->IsCompressedStackMaps()); |
| ASSERT(!empty_array_->IsSmi()); |
| ASSERT(empty_array_->IsArray()); |
| ASSERT(!empty_instantiations_cache_array_->IsSmi()); |
| ASSERT(empty_instantiations_cache_array_->IsArray()); |
| ASSERT(!empty_subtype_test_cache_array_->IsSmi()); |
| ASSERT(empty_subtype_test_cache_array_->IsArray()); |
| ASSERT(!empty_type_arguments_->IsSmi()); |
| ASSERT(empty_type_arguments_->IsTypeArguments()); |
| ASSERT(!empty_context_scope_->IsSmi()); |
| ASSERT(empty_context_scope_->IsContextScope()); |
| ASSERT(!empty_compressed_stackmaps_->IsSmi()); |
| ASSERT(empty_compressed_stackmaps_->IsCompressedStackMaps()); |
| ASSERT(!empty_descriptors_->IsSmi()); |
| ASSERT(empty_descriptors_->IsPcDescriptors()); |
| ASSERT(!empty_var_descriptors_->IsSmi()); |
| ASSERT(empty_var_descriptors_->IsLocalVarDescriptors()); |
| ASSERT(!empty_exception_handlers_->IsSmi()); |
| ASSERT(empty_exception_handlers_->IsExceptionHandlers()); |
| ASSERT(!empty_async_exception_handlers_->IsSmi()); |
| ASSERT(empty_async_exception_handlers_->IsExceptionHandlers()); |
| ASSERT(!sentinel_->IsSmi()); |
| ASSERT(sentinel_->IsSentinel()); |
| ASSERT(!unknown_constant_->IsSmi()); |
| ASSERT(unknown_constant_->IsSentinel()); |
| ASSERT(!non_constant_->IsSmi()); |
| ASSERT(non_constant_->IsSentinel()); |
| ASSERT(!optimized_out_->IsSmi()); |
| ASSERT(optimized_out_->IsSentinel()); |
| ASSERT(!bool_true_->IsSmi()); |
| ASSERT(bool_true_->IsBool()); |
| ASSERT(!bool_false_->IsSmi()); |
| ASSERT(bool_false_->IsBool()); |
| ASSERT(smi_illegal_cid_->IsSmi()); |
| ASSERT(smi_zero_->IsSmi()); |
| ASSERT(!no_callbacks_error_->IsSmi()); |
| ASSERT(no_callbacks_error_->IsApiError()); |
| ASSERT(!unwind_in_progress_error_->IsSmi()); |
| ASSERT(unwind_in_progress_error_->IsUnwindError()); |
| ASSERT(!snapshot_writer_error_->IsSmi()); |
| ASSERT(snapshot_writer_error_->IsLanguageError()); |
| ASSERT(!branch_offset_error_->IsSmi()); |
| ASSERT(branch_offset_error_->IsLanguageError()); |
| ASSERT(!background_compilation_error_->IsSmi()); |
| ASSERT(background_compilation_error_->IsLanguageError()); |
| ASSERT(!out_of_memory_error_->IsSmi()); |
| ASSERT(out_of_memory_error_->IsLanguageError()); |
| ASSERT(!vm_isolate_snapshot_object_table_->IsSmi()); |
| ASSERT(vm_isolate_snapshot_object_table_->IsArray()); |
| ASSERT(!synthetic_getter_parameter_types_->IsSmi()); |
| ASSERT(synthetic_getter_parameter_types_->IsArray()); |
| ASSERT(!synthetic_getter_parameter_names_->IsSmi()); |
| ASSERT(synthetic_getter_parameter_names_->IsArray()); |
| ASSERT(!implicit_getter_bytecode_->IsSmi()); |
| ASSERT(implicit_getter_bytecode_->IsBytecode()); |
| ASSERT(!implicit_setter_bytecode_->IsSmi()); |
| ASSERT(implicit_setter_bytecode_->IsBytecode()); |
| ASSERT(!implicit_static_getter_bytecode_->IsSmi()); |
| ASSERT(implicit_static_getter_bytecode_->IsBytecode()); |
| ASSERT(!implicit_static_setter_bytecode_->IsSmi()); |
| ASSERT(implicit_static_setter_bytecode_->IsBytecode()); |
| ASSERT(!method_extractor_bytecode_->IsSmi()); |
| ASSERT(method_extractor_bytecode_->IsBytecode()); |
| ASSERT(!invoke_closure_bytecode_->IsSmi()); |
| ASSERT(invoke_closure_bytecode_->IsBytecode()); |
| ASSERT(!invoke_field_bytecode_->IsSmi()); |
| ASSERT(invoke_field_bytecode_->IsBytecode()); |
| ASSERT(!nsm_dispatcher_bytecode_->IsSmi()); |
| ASSERT(nsm_dispatcher_bytecode_->IsBytecode()); |
| ASSERT(!dynamic_invocation_forwarder_bytecode_->IsSmi()); |
| ASSERT(dynamic_invocation_forwarder_bytecode_->IsBytecode()); |
| ASSERT(!implicit_static_closure_bytecode_->IsSmi()); |
| ASSERT(implicit_static_closure_bytecode_->IsBytecode()); |
| ASSERT(!implicit_instance_closure_bytecode_->IsSmi()); |
| ASSERT(implicit_instance_closure_bytecode_->IsBytecode()); |
| ASSERT(!implicit_constructor_closure_bytecode_->IsSmi()); |
| ASSERT(implicit_constructor_closure_bytecode_->IsBytecode()); |
| } |
| |
| void Object::FinishInit(IsolateGroup* isolate_group) { |
| // The type testing stubs we initialize in AbstractType objects for the |
| // canonical type of kDynamicCid/kVoidCid need to be set in this |
| // method, which is called after StubCode::InitOnce(). |
| Code& code = Code::Handle(); |
| |
| code = TypeTestingStubGenerator::DefaultCodeForType(*dynamic_type_); |
| dynamic_type_->InitializeTypeTestingStubNonAtomic(code); |
| |
| code = TypeTestingStubGenerator::DefaultCodeForType(*void_type_); |
| void_type_->InitializeTypeTestingStubNonAtomic(code); |
| } |
| |
| void Object::Cleanup() { |
| null_ = static_cast<ObjectPtr>(RAW_NULL); |
| true_ = static_cast<BoolPtr>(RAW_NULL); |
| false_ = static_cast<BoolPtr>(RAW_NULL); |
| class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| dynamic_class_ = static_cast<ClassPtr>(RAW_NULL); |
| void_class_ = static_cast<ClassPtr>(RAW_NULL); |
| type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL); |
| type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL); |
| patch_class_class_ = static_cast<ClassPtr>(RAW_NULL); |
| function_class_ = static_cast<ClassPtr>(RAW_NULL); |
| closure_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL); |
| field_class_ = static_cast<ClassPtr>(RAW_NULL); |
| script_class_ = static_cast<ClassPtr>(RAW_NULL); |
| library_class_ = static_cast<ClassPtr>(RAW_NULL); |
| namespace_class_ = static_cast<ClassPtr>(RAW_NULL); |
| kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL); |
| code_class_ = static_cast<ClassPtr>(RAW_NULL); |
| instructions_class_ = static_cast<ClassPtr>(RAW_NULL); |
| instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL); |
| instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL); |
| object_pool_class_ = static_cast<ClassPtr>(RAW_NULL); |
| pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL); |
| compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL); |
| var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL); |
| exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL); |
| context_class_ = static_cast<ClassPtr>(RAW_NULL); |
| context_scope_class_ = static_cast<ClassPtr>(RAW_NULL); |
| bytecode_class_ = static_cast<ClassPtr>(RAW_NULL); |
| singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL); |
| monomorphicsmiablecall_class_ = static_cast<ClassPtr>(RAW_NULL); |
| icdata_class_ = static_cast<ClassPtr>(RAW_NULL); |
| megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL); |
| loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL); |
| api_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| language_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL); |
| unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL); |
| } |
| |
| // An object visitor which will mark all visited objects. This is used to |
| // premark all objects in the vm_isolate_ heap. Also precalculates hash |
| // codes so that we can get the identity hash code of objects in the read- |
| // only VM isolate. |
| class FinalizeVMIsolateVisitor : public ObjectVisitor { |
| public: |
| FinalizeVMIsolateVisitor() |
| #if defined(HASH_IN_OBJECT_HEADER) |
| : counter_(1337) |
| #endif |
| { |
| } |
| |
| void VisitObject(ObjectPtr obj) { |
| // Free list elements should never be marked. |
| ASSERT(!obj->untag()->IsMarked()); |
| // No forwarding corpses in the VM isolate. |
| ASSERT(!obj->IsForwardingCorpse()); |
| if (!obj->IsFreeListElement()) { |
| obj->untag()->SetMarkBitUnsynchronized(); |
| Object::FinalizeReadOnlyObject(obj); |
| #if defined(HASH_IN_OBJECT_HEADER) |
| // These objects end up in the read-only VM isolate which is shared |
| // between isolates, so we have to prepopulate them with identity hash |
| // codes, since we can't add hash codes later. |
| if (Object::GetCachedHash(obj) == 0) { |
| // Some classes have identity hash codes that depend on their contents, |
| // not per object. |
| ASSERT(!obj->IsStringInstance()); |
| if (obj == Object::null()) { |
| Object::SetCachedHashIfNotSet(obj, kNullIdentityHash); |
| } else if (obj == Object::bool_true().ptr()) { |
| Object::SetCachedHashIfNotSet(obj, kTrueIdentityHash); |
| } else if (obj == Object::bool_false().ptr()) { |
| Object::SetCachedHashIfNotSet(obj, kFalseIdentityHash); |
| } else if (!obj->IsMint() && !obj->IsDouble()) { |
| counter_ += 2011; // The year Dart was announced and a prime. |
| counter_ &= 0x3fffffff; |
| if (counter_ == 0) counter_++; |
| Object::SetCachedHashIfNotSet(obj, counter_); |
| } |
| } |
| #endif |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (obj->IsClass()) { |
| // Won't be able to update read-only VM isolate classes if implementors |
| // are discovered later. We use kVoidCid instead of kDynamicCid here to |
| // be able to distinguish read-only VM isolate classes during reload. |
| // See ProgramReloadContext::RestoreClassHierarchyInvariants. |
| static_cast<ClassPtr>(obj)->untag()->implementor_cid_ = kVoidCid; |
| } |
| #endif |
| } |
| } |
| |
| private: |
| #if defined(HASH_IN_OBJECT_HEADER) |
| int32_t counter_; |
| #endif |
| }; |
| |
| #define SET_CLASS_NAME(class_name, name) \ |
| cls = class_name##_class(); \ |
| cls.set_name(Symbols::name()); |
| |
| void Object::FinalizeVMIsolate(IsolateGroup* isolate_group) { |
| // Should only be run by the vm isolate. |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| |
| // Finish initialization of synthetic_getter_parameter_names_ which was |
| // Started in Object::InitOnce() |
| synthetic_getter_parameter_names_->SetAt(0, Symbols::This()); |
| |
| // Set up names for all VM singleton classes. |
| Class& cls = Class::Handle(); |
| |
| SET_CLASS_NAME(class, Class); |
| SET_CLASS_NAME(dynamic, Dynamic); |
| SET_CLASS_NAME(void, Void); |
| SET_CLASS_NAME(type_parameters, TypeParameters); |
| SET_CLASS_NAME(type_arguments, TypeArguments); |
| SET_CLASS_NAME(patch_class, PatchClass); |
| SET_CLASS_NAME(function, Function); |
| SET_CLASS_NAME(closure_data, ClosureData); |
| SET_CLASS_NAME(ffi_trampoline_data, FfiTrampolineData); |
| SET_CLASS_NAME(field, Field); |
| SET_CLASS_NAME(script, Script); |
| SET_CLASS_NAME(library, LibraryClass); |
| SET_CLASS_NAME(namespace, Namespace); |
| SET_CLASS_NAME(kernel_program_info, KernelProgramInfo); |
| SET_CLASS_NAME(weak_serialization_reference, WeakSerializationReference); |
| SET_CLASS_NAME(weak_array, WeakArray); |
| SET_CLASS_NAME(code, Code); |
| SET_CLASS_NAME(instructions, Instructions); |
| SET_CLASS_NAME(instructions_section, InstructionsSection); |
| SET_CLASS_NAME(instructions_table, InstructionsTable); |
| SET_CLASS_NAME(object_pool, ObjectPool); |
| SET_CLASS_NAME(code_source_map, CodeSourceMap); |
| SET_CLASS_NAME(pc_descriptors, PcDescriptors); |
| SET_CLASS_NAME(compressed_stackmaps, CompressedStackMaps); |
| SET_CLASS_NAME(var_descriptors, LocalVarDescriptors); |
| SET_CLASS_NAME(exception_handlers, ExceptionHandlers); |
| SET_CLASS_NAME(context, Context); |
| SET_CLASS_NAME(context_scope, ContextScope); |
| SET_CLASS_NAME(bytecode, Bytecode); |
| SET_CLASS_NAME(sentinel, Sentinel); |
| SET_CLASS_NAME(singletargetcache, SingleTargetCache); |
| SET_CLASS_NAME(unlinkedcall, UnlinkedCall); |
| SET_CLASS_NAME(monomorphicsmiablecall, MonomorphicSmiableCall); |
| SET_CLASS_NAME(icdata, ICData); |
| SET_CLASS_NAME(megamorphic_cache, MegamorphicCache); |
| SET_CLASS_NAME(subtypetestcache, SubtypeTestCache); |
| SET_CLASS_NAME(loadingunit, LoadingUnit); |
| SET_CLASS_NAME(api_error, ApiError); |
| SET_CLASS_NAME(language_error, LanguageError); |
| SET_CLASS_NAME(unhandled_exception, UnhandledException); |
| SET_CLASS_NAME(unwind_error, UnwindError); |
| |
| // Set up names for classes which are also pre-allocated in the vm isolate. |
| cls = isolate_group->object_store()->array_class(); |
| cls.set_name(Symbols::_List()); |
| cls = isolate_group->object_store()->one_byte_string_class(); |
| cls.set_name(Symbols::OneByteString()); |
| cls = isolate_group->object_store()->never_class(); |
| cls.set_name(Symbols::Never()); |
| |
| // Set up names for the pseudo-classes for free list elements and forwarding |
| // corpses. Mainly this makes VM debugging easier. |
| cls = isolate_group->class_table()->At(kFreeListElement); |
| cls.set_name(Symbols::FreeListElement()); |
| cls = isolate_group->class_table()->At(kForwardingCorpse); |
| cls.set_name(Symbols::ForwardingCorpse()); |
| |
| #if defined(DART_PRECOMPILER) |
| const auto& function = |
| Function::Handle(StubCode::UnknownDartCode().function()); |
| function.set_name(Symbols::OptimizedOut()); |
| #endif // defined(DART_PRECOMPILER) |
| |
| { |
| ASSERT(isolate_group == Dart::vm_isolate_group()); |
| Thread* thread = Thread::Current(); |
| WritableVMIsolateScope scope(thread); |
| HeapIterationScope iteration(thread); |
| FinalizeVMIsolateVisitor premarker; |
| ASSERT(isolate_group->heap()->UsedInWords(Heap::kNew) == 0); |
| iteration.IterateOldObjectsNoImagePages(&premarker); |
| // Make the VM isolate read-only again after setting all objects as marked. |
| // Note objects in image pages are already pre-marked. |
| } |
| } |
| |
| void Object::FinalizeReadOnlyObject(ObjectPtr object) { |
| NoSafepointScope no_safepoint; |
| intptr_t cid = object->GetClassIdOfHeapObject(); |
| if (cid == kOneByteStringCid) { |
| OneByteStringPtr str = static_cast<OneByteStringPtr>(object); |
| if (String::GetCachedHash(str) == 0) { |
| intptr_t hash = String::Hash(str); |
| String::SetCachedHashIfNotSet(str, hash); |
| } |
| intptr_t size = OneByteString::UnroundedSize(str); |
| ASSERT(size <= str->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0, |
| str->untag()->HeapSize() - size); |
| } else if (cid == kTwoByteStringCid) { |
| TwoByteStringPtr str = static_cast<TwoByteStringPtr>(object); |
| if (String::GetCachedHash(str) == 0) { |
| intptr_t hash = String::Hash(str); |
| String::SetCachedHashIfNotSet(str, hash); |
| } |
| ASSERT(String::GetCachedHash(str) != 0); |
| intptr_t size = TwoByteString::UnroundedSize(str); |
| ASSERT(size <= str->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0, |
| str->untag()->HeapSize() - size); |
| } else if (cid == kCodeSourceMapCid) { |
| CodeSourceMapPtr map = CodeSourceMap::RawCast(object); |
| intptr_t size = CodeSourceMap::UnroundedSize(map); |
| ASSERT(size <= map->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(map) + size), 0, |
| map->untag()->HeapSize() - size); |
| } else if (cid == kCompressedStackMapsCid) { |
| CompressedStackMapsPtr maps = CompressedStackMaps::RawCast(object); |
| intptr_t size = CompressedStackMaps::UnroundedSize(maps); |
| ASSERT(size <= maps->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(maps) + size), 0, |
| maps->untag()->HeapSize() - size); |
| } else if (cid == kPcDescriptorsCid) { |
| PcDescriptorsPtr desc = PcDescriptors::RawCast(object); |
| intptr_t size = PcDescriptors::UnroundedSize(desc); |
| ASSERT(size <= desc->untag()->HeapSize()); |
| memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(desc) + size), 0, |
| desc->untag()->HeapSize() - size); |
| } |
| } |
| |
| void Object::set_vm_isolate_snapshot_object_table(const Array& table) { |
| ASSERT(Isolate::Current() == Dart::vm_isolate()); |
| *vm_isolate_snapshot_object_table_ = table.ptr(); |
| } |
| |
| // Make unused space in an object whose type has been transformed safe |
| // for traversing during GC. |
| // The unused part of the transformed object is marked as a FreeListElement |
| // object that is not inserted into to the freelist. |
| void Object::MakeUnusedSpaceTraversable(const Object& obj, |
| intptr_t original_size, |
| intptr_t used_size) { |
| ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0); |
| ASSERT(!obj.IsNull()); |
| ASSERT(original_size >= used_size); |
| if (original_size > used_size) { |
| intptr_t leftover_size = original_size - used_size; |
| uword addr = UntaggedObject::ToAddr(obj.ptr()) + used_size; |
| if (obj.ptr()->IsNewObject()) { |
| FreeListElement::AsElementNew(addr, leftover_size); |
| } else { |
| FreeListElement::AsElement(addr, leftover_size); |
| } |
| // On architectures with a relaxed memory model, the concurrent marker may |
| // observe the write of the filler object's header before observing the |
| // new array length, and so treat it as a pointer. Ensure it is a Smi so |
| // the marker won't dereference it. |
| ASSERT((*reinterpret_cast<uword*>(addr) & kSmiTagMask) == kSmiTag); |
| ASSERT((*reinterpret_cast<uword*>(addr + kWordSize) & kSmiTagMask) == |
| kSmiTag); |
| } |
| } |
| |
| void Object::VerifyBuiltinVtables() { |
| #if defined(DEBUG) |
| ASSERT(builtin_vtables_[kIllegalCid] == 0); |
| ASSERT(builtin_vtables_[kFreeListElement] == 0); |
| ASSERT(builtin_vtables_[kForwardingCorpse] == 0); |
| ClassTable* table = IsolateGroup::Current()->class_table(); |
| for (intptr_t cid = kObjectCid; cid < kNumPredefinedCids; cid++) { |
| if (table->HasValidClassAt(cid)) { |
| ASSERT(builtin_vtables_[cid] != 0); |
| } |
| } |
| #endif |
| } |
| |
| void Object::RegisterClass(const Class& cls, |
| const String& name, |
| const Library& lib) { |
| ASSERT(name.Length() > 0); |
| ASSERT(name.CharAt(0) != '_'); |
| cls.set_name(name); |
| lib.AddClass(cls); |
| } |
| |
| void Object::RegisterPrivateClass(const Class& cls, |
| const String& public_class_name, |
| const Library& lib) { |
| ASSERT(public_class_name.Length() > 0); |
| ASSERT(public_class_name.CharAt(0) == '_'); |
| String& str = String::Handle(); |
| str = lib.PrivateName(public_class_name); |
| cls.set_name(str); |
| lib.AddClass(cls); |
| } |
| |
| // Initialize a new isolate from source or from a snapshot. |
| // |
| // There are three possibilities: |
| // 1. Running a Kernel binary. This function will bootstrap from the KERNEL |
| // file. |
| // 2. There is no vm snapshot. This function will bootstrap from source. |
| // 3. There is a vm snapshot. The caller should initialize from the snapshot. |
| // |
| // A non-null kernel argument indicates (1). |
| // A nullptr kernel indicates (2) or (3). |
| ErrorPtr Object::Init(IsolateGroup* isolate_group, |
| const uint8_t* kernel_buffer, |
| intptr_t kernel_buffer_size) { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| ASSERT(isolate_group == thread->isolate_group()); |
| TIMELINE_DURATION(thread, Isolate, "Object::Init"); |
| |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| const bool bootstrapping = false; |
| #else |
| const bool is_kernel = (kernel_buffer != nullptr); |
| const bool bootstrapping = |
| (Dart::vm_snapshot_kind() == Snapshot::kNone) || is_kernel; |
| #endif // defined(DART_PRECOMPILED_RUNTIME). |
| |
| if (bootstrapping) { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| // Object::Init version when we are bootstrapping from source or from a |
| // Kernel binary. |
| // This will initialize isolate group object_store, shared by all isolates |
| // running in the isolate group. |
| ObjectStore* object_store = isolate_group->object_store(); |
| SafepointWriteRwLocker ml(thread, isolate_group->program_lock()); |
| |
| Class& cls = Class::Handle(zone); |
| Type& type = Type::Handle(zone); |
| Array& array = Array::Handle(zone); |
| WeakArray& weak_array = WeakArray::Handle(zone); |
| Library& lib = Library::Handle(zone); |
| TypeArguments& type_args = TypeArguments::Handle(zone); |
| |
| // All RawArray fields will be initialized to an empty array, therefore |
| // initialize array class first. |
| cls = Class::New<Array, RTN::Array>(isolate_group); |
| ASSERT(object_store->array_class() == Class::null()); |
| object_store->set_array_class(cls); |
| |
| // VM classes that are parameterized (Array, ImmutableArray, |
| // GrowableObjectArray, Map, ConstMap, |
| // Set, ConstSet) are also pre-finalized, so |
| // CalculateFieldOffsets() is not called, so we need to set the offset |
| // of their type_arguments_ field, which is explicitly |
| // declared in their respective Raw* classes. |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| |
| // Set up the growable object array class (Has to be done after the array |
| // class is setup as one of its field is an array object). |
| cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>( |
| isolate_group); |
| object_store->set_growable_object_array_class(cls); |
| cls.set_type_arguments_field_offset( |
| GrowableObjectArray::type_arguments_offset(), |
| RTN::GrowableObjectArray::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| |
| // Initialize hash set for regexp_table_. |
| const intptr_t kInitialCanonicalRegExpSize = 4; |
| weak_array = HashTables::New<CanonicalRegExpSet>( |
| kInitialCanonicalRegExpSize, Heap::kOld); |
| object_store->set_regexp_table(weak_array); |
| |
| // Initialize hash set for canonical types. |
| const intptr_t kInitialCanonicalTypeSize = 16; |
| array = HashTables::New<CanonicalTypeSet>(kInitialCanonicalTypeSize, |
| Heap::kOld); |
| object_store->set_canonical_types(array); |
| |
| // Initialize hash set for canonical function types. |
| const intptr_t kInitialCanonicalFunctionTypeSize = 16; |
| array = HashTables::New<CanonicalFunctionTypeSet>( |
| kInitialCanonicalFunctionTypeSize, Heap::kOld); |
| object_store->set_canonical_function_types(array); |
| |
| // Initialize hash set for canonical record types. |
| const intptr_t kInitialCanonicalRecordTypeSize = 16; |
| array = HashTables::New<CanonicalRecordTypeSet>( |
| kInitialCanonicalRecordTypeSize, Heap::kOld); |
| object_store->set_canonical_record_types(array); |
| |
| // Initialize hash set for canonical type parameters. |
| const intptr_t kInitialCanonicalTypeParameterSize = 4; |
| array = HashTables::New<CanonicalTypeParameterSet>( |
| kInitialCanonicalTypeParameterSize, Heap::kOld); |
| object_store->set_canonical_type_parameters(array); |
| |
| // Initialize hash set for canonical_type_arguments_. |
| const intptr_t kInitialCanonicalTypeArgumentsSize = 4; |
| array = HashTables::New<CanonicalTypeArgumentsSet>( |
| kInitialCanonicalTypeArgumentsSize, Heap::kOld); |
| object_store->set_canonical_type_arguments(array); |
| |
| // Setup type class early in the process. |
| const Class& type_cls = |
| Class::Handle(zone, Class::New<Type, RTN::Type>(isolate_group)); |
| const Class& function_type_cls = Class::Handle( |
| zone, Class::New<FunctionType, RTN::FunctionType>(isolate_group)); |
| const Class& record_type_cls = Class::Handle( |
| zone, Class::New<RecordType, RTN::RecordType>(isolate_group)); |
| const Class& type_parameter_cls = Class::Handle( |
| zone, Class::New<TypeParameter, RTN::TypeParameter>(isolate_group)); |
| const Class& library_prefix_cls = Class::Handle( |
| zone, Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group)); |
| |
| // Pre-allocate the OneByteString class needed by the symbol table. |
| cls = Class::NewStringClass(kOneByteStringCid, isolate_group); |
| object_store->set_one_byte_string_class(cls); |
| |
| // Pre-allocate the TwoByteString class needed by the symbol table. |
| cls = Class::NewStringClass(kTwoByteStringCid, isolate_group); |
| object_store->set_two_byte_string_class(cls); |
| |
| // Setup the symbol table for the symbols created in the isolate. |
| Symbols::SetupSymbolTable(isolate_group); |
| |
| // Set up the libraries array before initializing the core library. |
| const GrowableObjectArray& libraries = |
| GrowableObjectArray::Handle(zone, GrowableObjectArray::New(Heap::kOld)); |
| object_store->set_libraries(libraries); |
| |
| // Pre-register the core library. |
| Library::InitCoreLibrary(isolate_group); |
| |
| // Basic infrastructure has been setup, initialize the class dictionary. |
| const Library& core_lib = Library::Handle(zone, Library::CoreLibrary()); |
| ASSERT(!core_lib.IsNull()); |
| |
| const GrowableObjectArray& pending_classes = |
| GrowableObjectArray::Handle(zone, GrowableObjectArray::New()); |
| object_store->set_pending_classes(pending_classes); |
| |
| // Now that the symbol table is initialized and that the core dictionary as |
| // well as the core implementation dictionary have been setup, preallocate |
| // remaining classes and register them by name in the dictionaries. |
| String& name = String::Handle(zone); |
| cls = object_store->array_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::_List(), core_lib); |
| pending_classes.Add(cls); |
| // We cannot use NewNonParameterizedType(), because Array is |
| // parameterized. Warning: class _List has not been patched yet. Its |
| // declared number of type parameters is still 0. It will become 1 after |
| // patching. The array type allocated below represents the raw type _List |
| // and not _List<E> as we could expect. Use with caution. |
| type = Type::New(Class::Handle(zone, cls.ptr()), |
| Object::null_type_arguments(), Nullability::kNonNullable); |
| type.SetIsFinalized(); |
| type ^= type.Canonicalize(thread); |
| object_store->set_array_type(type); |
| |
| cls = object_store->growable_object_array_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::_GrowableList(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group); |
| object_store->set_immutable_array_class(cls); |
| cls.set_type_arguments_field_offset(Array::type_arguments_offset(), |
| RTN::Array::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| ASSERT(object_store->immutable_array_class() != |
| object_store->array_class()); |
| cls.set_is_prefinalized(); |
| RegisterPrivateClass(cls, Symbols::_ImmutableList(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = object_store->one_byte_string_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::OneByteString(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = object_store->two_byte_string_class(); // Was allocated above. |
| RegisterPrivateClass(cls, Symbols::TwoByteString(), core_lib); |
| pending_classes.Add(cls); |
| |
| // Pre-register the isolate library so the native class implementations can |
| // be hooked up before compiling it. |
| Library& isolate_lib = Library::Handle( |
| zone, Library::LookupLibrary(thread, Symbols::DartIsolate())); |
| if (isolate_lib.IsNull()) { |
| isolate_lib = Library::NewLibraryHelper(Symbols::DartIsolate(), true); |
| isolate_lib.SetLoadRequested(); |
| isolate_lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kIsolate, isolate_lib); |
| ASSERT(!isolate_lib.IsNull()); |
| ASSERT(isolate_lib.ptr() == Library::IsolateLibrary()); |
| |
| cls = Class::New<Capability, RTN::Capability>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Capability(), isolate_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_RawReceivePort(), isolate_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<SendPort, RTN::SendPort>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_SendPort(), isolate_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>( |
| isolate_group); |
| RegisterPrivateClass(cls, Symbols::_TransferableTypedDataImpl(), |
| isolate_lib); |
| pending_classes.Add(cls); |
| |
| const Class& stacktrace_cls = Class::Handle( |
| zone, Class::New<StackTrace, RTN::StackTrace>(isolate_group)); |
| RegisterPrivateClass(stacktrace_cls, Symbols::_StackTrace(), core_lib); |
| pending_classes.Add(stacktrace_cls); |
| // Super type set below, after Object is allocated. |
| |
| cls = Class::New<RegExp, RTN::RegExp>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_RegExp(), core_lib); |
| pending_classes.Add(cls); |
| |
| // Initialize the base interfaces used by the core VM classes. |
| |
| // Allocate and initialize the pre-allocated classes in the core library. |
| // The script and token index of these pre-allocated classes is set up when |
| // the corelib script is compiled. |
| cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group); |
| object_store->set_object_class(cls); |
| cls.set_name(Symbols::Object()); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| cls.set_is_const(); |
| core_lib.AddClass(cls); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| ASSERT(type.IsCanonical()); |
| object_store->set_object_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| ASSERT(type.IsCanonical()); |
| object_store->set_non_nullable_object_type(type); |
| type = type.ToNullability(Nullability::kNullable, Heap::kOld); |
| ASSERT(type.IsCanonical()); |
| object_store->set_nullable_object_type(type); |
| |
| cls = Class::New<Bool, RTN::Bool>(isolate_group); |
| object_store->set_bool_class(cls); |
| RegisterClass(cls, Symbols::Bool(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group); |
| object_store->set_null_class(cls); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| RegisterClass(cls, Symbols::Null(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_allocate_finalized(); |
| cls.set_is_declaration_loaded(); |
| cls.set_is_type_finalized(); |
| cls.set_name(Symbols::Never()); |
| object_store->set_never_class(cls); |
| |
| ASSERT(!library_prefix_cls.IsNull()); |
| RegisterPrivateClass(library_prefix_cls, Symbols::_LibraryPrefix(), |
| core_lib); |
| pending_classes.Add(library_prefix_cls); |
| |
| RegisterPrivateClass(type_cls, Symbols::_Type(), core_lib); |
| pending_classes.Add(type_cls); |
| |
| RegisterPrivateClass(function_type_cls, Symbols::_FunctionType(), core_lib); |
| pending_classes.Add(function_type_cls); |
| |
| RegisterPrivateClass(record_type_cls, Symbols::_RecordType(), core_lib); |
| pending_classes.Add(record_type_cls); |
| |
| RegisterPrivateClass(type_parameter_cls, Symbols::_TypeParameter(), |
| core_lib); |
| pending_classes.Add(type_parameter_cls); |
| |
| cls = Class::New<Integer, RTN::Integer>(isolate_group); |
| object_store->set_integer_implementation_class(cls); |
| RegisterPrivateClass(cls, Symbols::_IntegerImplementation(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Smi, RTN::Smi>(isolate_group); |
| object_store->set_smi_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Smi(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Mint, RTN::Mint>(isolate_group); |
| object_store->set_mint_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Mint(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Double, RTN::Double>(isolate_group); |
| object_store->set_double_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Double(), core_lib); |
| pending_classes.Add(cls); |
| |
| // Class that represents the Dart class _Closure and C++ class Closure. |
| cls = Class::New<Closure, RTN::Closure>(isolate_group); |
| object_store->set_closure_class(cls); |
| RegisterPrivateClass(cls, Symbols::_Closure(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Record, RTN::Record>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Record(), core_lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group); |
| object_store->set_weak_property_class(cls); |
| RegisterPrivateClass(cls, Symbols::_WeakProperty(), core_lib); |
| |
| cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group); |
| cls.set_type_arguments_field_offset( |
| WeakReference::type_arguments_offset(), |
| RTN::WeakReference::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| object_store->set_weak_reference_class(cls); |
| RegisterPrivateClass(cls, Symbols::_WeakReference(), core_lib); |
| |
| // Pre-register the mirrors library so we can place the vm class |
| // MirrorReference there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartMirrors()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartMirrors(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kMirrors, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::MirrorsLibrary()); |
| |
| cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_MirrorReference(), lib); |
| |
| // Pre-register dart:_compact_hash library so that we could place |
| // collection classes (_Map, _ConstMap, _Set, _ConstSet) here. |
| lib = Library::LookupLibrary(thread, Symbols::DartCompactHash()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartCompactHash(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kCompactHash, lib); |
| |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::CompactHashLibrary()); |
| cls = Class::New<Map, RTN::Map>(isolate_group); |
| object_store->set_map_impl_class(cls); |
| cls.set_type_arguments_field_offset(Map::type_arguments_offset(), |
| RTN::Map::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(2); |
| RegisterPrivateClass(cls, Symbols::_Map(), lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Map, RTN::Map>(kConstMapCid, isolate_group); |
| object_store->set_const_map_impl_class(cls); |
| cls.set_type_arguments_field_offset(Map::type_arguments_offset(), |
| RTN::Map::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(2); |
| cls.set_is_prefinalized(); |
| RegisterPrivateClass(cls, Symbols::_ConstMap(), lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Set, RTN::Set>(isolate_group); |
| object_store->set_set_impl_class(cls); |
| cls.set_type_arguments_field_offset(Set::type_arguments_offset(), |
| RTN::Set::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| RegisterPrivateClass(cls, Symbols::_Set(), lib); |
| pending_classes.Add(cls); |
| |
| cls = Class::New<Set, RTN::Set>(kConstSetCid, isolate_group); |
| object_store->set_const_set_impl_class(cls); |
| cls.set_type_arguments_field_offset(Set::type_arguments_offset(), |
| RTN::Set::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls.set_is_prefinalized(); |
| RegisterPrivateClass(cls, Symbols::_ConstSet(), lib); |
| pending_classes.Add(cls); |
| |
| // Pre-register the collection library. |
| lib = Library::LookupLibrary(thread, Symbols::DartCollection()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartCollection(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kCollection, lib); |
| |
| // Pre-register the async library so we can place the vm class |
| // FutureOr there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartAsync()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartAsync(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kAsync, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::AsyncLibrary()); |
| cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group); |
| cls.set_type_arguments_field_offset(FutureOr::type_arguments_offset(), |
| RTN::FutureOr::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| RegisterClass(cls, Symbols::FutureOr(), lib); |
| pending_classes.Add(cls); |
| object_store->set_future_or_class(cls); |
| |
| cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_SuspendState(), lib); |
| pending_classes.Add(cls); |
| |
| // Pre-register the developer library so we can place the vm class |
| // UserTag there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartDeveloper()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartDeveloper(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kDeveloper, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::DeveloperLibrary()); |
| cls = Class::New<UserTag, RTN::UserTag>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_UserTag(), lib); |
| pending_classes.Add(cls); |
| |
| // Setup some default native field classes which can be extended for |
| // specifying native fields in dart classes. |
| Library::InitNativeWrappersLibrary(isolate_group, is_kernel); |
| ASSERT(object_store->native_wrappers_library() != Library::null()); |
| |
| // Pre-register the typed_data library so the native class implementations |
| // can be hooked up before compiling it. |
| lib = Library::LookupLibrary(thread, Symbols::DartTypedData()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartTypedData(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kTypedData, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::TypedDataLibrary()); |
| #define REGISTER_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid, isolate_group); \ |
| RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib); |
| |
| DART_CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS); |
| #undef REGISTER_TYPED_DATA_CLASS |
| #define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \ |
| cls = \ |
| Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \ |
| RegisterPrivateClass(cls, Symbols::_##clazz##View(), lib); \ |
| pending_classes.Add(cls); \ |
| cls = Class::NewUnmodifiableTypedDataViewClass( \ |
| kUnmodifiableTypedData##clazz##ViewCid, isolate_group); \ |
| RegisterPrivateClass(cls, Symbols::_Unmodifiable##clazz##View(), lib); \ |
| pending_classes.Add(cls); |
| |
| CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS); |
| |
| cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group); |
| RegisterPrivateClass(cls, Symbols::_ByteDataView(), lib); |
| pending_classes.Add(cls); |
| cls = Class::NewUnmodifiableTypedDataViewClass(kUnmodifiableByteDataViewCid, |
| isolate_group); |
| RegisterPrivateClass(cls, Symbols::_UnmodifiableByteDataView(), lib); |
| pending_classes.Add(cls); |
| |
| #undef REGISTER_TYPED_DATA_VIEW_CLASS |
| #define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \ |
| isolate_group); \ |
| RegisterPrivateClass(cls, Symbols::_External##clazz(), lib); |
| |
| cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group, |
| /*register_class=*/false); |
| cls.set_instance_size(0, 0); |
| cls.set_next_field_offset(-kWordSize, -compiler::target::kWordSize); |
| isolate_group->class_table()->Register(cls); |
| RegisterPrivateClass(cls, Symbols::_ByteBuffer(), lib); |
| pending_classes.Add(cls); |
| |
| CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS); |
| #undef REGISTER_EXT_TYPED_DATA_CLASS |
| // Register Float32x4, Int32x4, and Float64x2 in the object store. |
| cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Float32x4(), lib); |
| pending_classes.Add(cls); |
| object_store->set_float32x4_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Float32x4(), lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_float32x4_type(type); |
| |
| cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Int32x4(), lib); |
| pending_classes.Add(cls); |
| object_store->set_int32x4_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Int32x4(), lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_int32x4_type(type); |
| |
| cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group); |
| RegisterPrivateClass(cls, Symbols::_Float64x2(), lib); |
| pending_classes.Add(cls); |
| object_store->set_float64x2_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Float64x2(), lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_float64x2_type(type); |
| |
| // Set the super type of class StackTrace to Object type so that the |
| // 'toString' method is implemented. |
| type = object_store->object_type(); |
| stacktrace_cls.set_super_type(type); |
| |
| // Abstract class that represents the Dart class Type. |
| // Note that this class is implemented by Dart class _AbstractType. |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| RegisterClass(cls, Symbols::Type(), core_lib); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_type_type(type); |
| |
| // Abstract class that represents the Dart class Function. |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| RegisterClass(cls, Symbols::Function(), core_lib); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_function_type(type); |
| |
| // Abstract class that represents the Dart class Record. |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Record(), core_lib); |
| pending_classes.Add(cls); |
| object_store->set_record_class(cls); |
| |
| cls = Class::New<Number, RTN::Number>(isolate_group); |
| RegisterClass(cls, Symbols::Number(), core_lib); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_number_type(type); |
| type = type.ToNullability(Nullability::kNullable, Heap::kOld); |
| object_store->set_nullable_number_type(type); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Int(), core_lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_int_type(type); |
| type = type.ToNullability(Nullability::kNonNullable, Heap::kOld); |
| object_store->set_non_nullable_int_type(type); |
| type = type.ToNullability(Nullability::kNullable, Heap::kOld); |
| object_store->set_nullable_int_type(type); |
| |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, Symbols::Double(), core_lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_double_type(type); |
| type = type.ToNullability(Nullability::kNullable, Heap::kOld); |
| object_store->set_nullable_double_type(type); |
| |
| name = Symbols::_String().ptr(); |
| cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group, |
| /*register_class=*/true, |
| /*is_abstract=*/true); |
| RegisterClass(cls, name, core_lib); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_string_type(type); |
| |
| cls = object_store->bool_class(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_bool_type(type); |
| |
| cls = object_store->smi_class(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_smi_type(type); |
| |
| cls = object_store->mint_class(); |
| type = Type::NewNonParameterizedType(cls); |
| object_store->set_mint_type(type); |
| |
| // The classes 'void' and 'dynamic' are phony classes to make type checking |
| // more regular; they live in the VM isolate. The class 'void' is not |
| // registered in the class dictionary because its name is a reserved word. |
| // The class 'dynamic' is registered in the class dictionary because its |
| // name is a built-in identifier (this is wrong). The corresponding types |
| // are stored in the object store. |
| cls = object_store->null_class(); |
| type = |
| Type::New(cls, Object::null_type_arguments(), Nullability::kNullable); |
| type.SetIsFinalized(); |
| type ^= type.Canonicalize(thread); |
| object_store->set_null_type(type); |
| cls.set_declaration_type(type); |
| ASSERT(type.IsNullable()); |
| |
| // Consider removing when/if Null becomes an ordinary class. |
| type = object_store->object_type(); |
| cls.set_super_type(type); |
| |
| cls = object_store->never_class(); |
| type = Type::New(cls, Object::null_type_arguments(), |
| Nullability::kNonNullable); |
| type.SetIsFinalized(); |
| type ^= type.Canonicalize(thread); |
| object_store->set_never_type(type); |
| type_args = TypeArguments::New(1); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread); |
| object_store->set_type_argument_never(type_args); |
| |
| // Create and cache commonly used type arguments <int>, <double>, |
| // <String>, <String, dynamic> and <String, String>. |
| type_args = TypeArguments::New(1); |
| type = object_store->int_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread); |
| object_store->set_type_argument_int(type_args); |
| |
| type_args = TypeArguments::New(1); |
| type = object_store->double_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread); |
| object_store->set_type_argument_double(type_args); |
| |
| type_args = TypeArguments::New(1); |
| type = object_store->string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args = type_args.Canonicalize(thread); |
| object_store->set_type_argument_string(type_args); |
| |
| type_args = TypeArguments::New(2); |
| type = object_store->string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, Object::dynamic_type()); |
| type_args = type_args.Canonicalize(thread); |
| object_store->set_type_argument_string_dynamic(type_args); |
| |
| type_args = TypeArguments::New(2); |
| type = object_store->string_type(); |
| type_args.SetTypeAt(0, type); |
| type_args.SetTypeAt(1, type); |
| type_args = type_args.Canonicalize(thread); |
| object_store->set_type_argument_string_string(type_args); |
| |
| lib = Library::LookupLibrary(thread, Symbols::DartFfi()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartFfi(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kFfi, lib); |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group); |
| cls.set_num_type_arguments_unsafe(0); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| object_store->set_ffi_native_type_class(cls); |
| RegisterClass(cls, Symbols::FfiNativeType(), lib); |
| |
| #define REGISTER_FFI_TYPE_MARKER(clazz) \ |
| cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group); \ |
| cls.set_num_type_arguments_unsafe(0); \ |
| cls.set_is_prefinalized(); \ |
| pending_classes.Add(cls); \ |
| RegisterClass(cls, Symbols::Ffi##clazz(), lib); |
| CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_TYPE_MARKER); |
| #undef REGISTER_FFI_TYPE_MARKER |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid, |
| isolate_group); |
| cls.set_type_arguments_field_offset(Instance::NextFieldOffset(), |
| RTN::Instance::NextFieldOffset()); |
| cls.set_num_type_arguments_unsafe(1); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| RegisterClass(cls, Symbols::FfiNativeFunction(), lib); |
| |
| cls = Class::NewPointerClass(kPointerCid, isolate_group); |
| object_store->set_ffi_pointer_class(cls); |
| pending_classes.Add(cls); |
| RegisterClass(cls, Symbols::FfiPointer(), lib); |
| |
| cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kDynamicLibraryCid, |
| isolate_group); |
| cls.set_instance_size(DynamicLibrary::InstanceSize(), |
| compiler::target::RoundedAllocationSize( |
| RTN::DynamicLibrary::InstanceSize())); |
| cls.set_is_prefinalized(); |
| pending_classes.Add(cls); |
| RegisterClass(cls, Symbols::FfiDynamicLibrary(), lib); |
| |
| cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group); |
| object_store->set_native_finalizer_class(cls); |
| RegisterPrivateClass(cls, Symbols::_NativeFinalizer(), lib); |
| |
| cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group); |
| cls.set_type_arguments_field_offset( |
| Finalizer::type_arguments_offset(), |
| RTN::Finalizer::type_arguments_offset()); |
| cls.set_num_type_arguments_unsafe(1); |
| object_store->set_finalizer_class(cls); |
| pending_classes.Add(cls); |
| RegisterPrivateClass(cls, Symbols::_FinalizerImpl(), core_lib); |
| |
| // Pre-register the internal library so we can place the vm class |
| // FinalizerEntry there rather than the core library. |
| lib = Library::LookupLibrary(thread, Symbols::DartInternal()); |
| if (lib.IsNull()) { |
| lib = Library::NewLibraryHelper(Symbols::DartInternal(), true); |
| lib.SetLoadRequested(); |
| lib.Register(thread); |
| } |
| object_store->set_bootstrap_library(ObjectStore::kInternal, lib); |
| ASSERT(!lib.IsNull()); |
| ASSERT(lib.ptr() == Library::InternalLibrary()); |
| |
| cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group); |
| object_store->set_finalizer_entry_class(cls); |
| pending_classes.Add(cls); |
| RegisterClass(cls, Symbols::FinalizerEntry(), lib); |
| |
| // Finish the initialization by compiling the bootstrap scripts containing |
| // the base interfaces and the implementation of the internal classes. |
| const Error& error = Error::Handle( |
| zone, Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size)); |
| if (!error.IsNull()) { |
| return error.ptr(); |
| } |
| |
| isolate_group->class_table()->CopySizesFromClassObjects(); |
| |
| ClassFinalizer::VerifyBootstrapClasses(); |
| |
| // Adds static const fields (class ids) to the class 'ClassID'); |
| lib = Library::LookupLibrary(thread, Symbols::DartInternal()); |
| ASSERT(!lib.IsNull()); |
| cls = lib.LookupClassAllowPrivate(Symbols::ClassID()); |
| ASSERT(!cls.IsNull()); |
| const bool injected = cls.InjectCIDFields(); |
| ASSERT(injected); |
| |
| // Set up recognized state of all functions (core, math and typed data). |
| MethodRecognizer::InitializeState(); |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| } else { |
| // Object::Init version when we are running in a version of dart that has a |
| // full snapshot linked in and an isolate is initialized using the full |
| // snapshot. |
| ObjectStore* object_store = isolate_group->object_store(); |
| SafepointWriteRwLocker ml(thread, isolate_group->program_lock()); |
| |
| Class& cls = Class::Handle(zone); |
| |
| // Set up empty classes in the object store, these will get initialized |
| // correctly when we read from the snapshot. This is done to allow |
| // bootstrapping of reading classes from the snapshot. Some classes are not |
| // stored in the object store. Yet we still need to create their Class |
| // object so that they get put into the class_table (as a side effect of |
| // Class::New()). |
| cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group); |
| object_store->set_object_class(cls); |
| |
| cls = Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group); |
| cls = Class::New<Type, RTN::Type>(isolate_group); |
| cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group); |
| cls = Class::New<RecordType, RTN::RecordType>(isolate_group); |
| cls = Class::New<TypeParameter, RTN::TypeParameter>(isolate_group); |
| |
| cls = Class::New<Array, RTN::Array>(isolate_group); |
| object_store->set_array_class(cls); |
| |
| cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group); |
| object_store->set_immutable_array_class(cls); |
| |
| cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>( |
| isolate_group); |
| object_store->set_growable_object_array_class(cls); |
| |
| cls = Class::New<Map, RTN::Map>(isolate_group); |
| object_store->set_map_impl_class(cls); |
| |
| cls = Class::New<Map, RTN::Map>(kConstMapCid, isolate_group); |
| object_store->set_const_map_impl_class(cls); |
| |
| cls = Class::New<Set, RTN::Set>(isolate_group); |
| object_store->set_set_impl_class(cls); |
| |
| cls = Class::New<Set, RTN::Set>(kConstSetCid, isolate_group); |
| object_store->set_const_set_impl_class(cls); |
| |
| cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group); |
| object_store->set_float32x4_class(cls); |
| |
| cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group); |
| object_store->set_int32x4_class(cls); |
| |
| cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group); |
| object_store->set_float64x2_class(cls); |
| |
| #define REGISTER_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewTypedDataClass(kTypedData##clazz##Cid, isolate_group); |
| CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS); |
| #undef REGISTER_TYPED_DATA_CLASS |
| #define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \ |
| cls = \ |
| Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \ |
| cls = Class::NewUnmodifiableTypedDataViewClass( \ |
| kUnmodifiableTypedData##clazz##ViewCid, isolate_group); |
| CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS); |
| #undef REGISTER_TYPED_DATA_VIEW_CLASS |
| cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group); |
| cls = Class::NewUnmodifiableTypedDataViewClass(kUnmodifiableByteDataViewCid, |
| isolate_group); |
| #define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \ |
| cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \ |
| isolate_group); |
| CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS); |
| #undef REGISTER_EXT_TYPED_DATA_CLASS |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group); |
| object_store->set_ffi_native_type_class(cls); |
| |
| #define REGISTER_FFI_CLASS(clazz) \ |
| cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group); |
| CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_CLASS); |
| #undef REGISTER_FFI_CLASS |
| |
| cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid, |
| isolate_group); |
| |
| cls = Class::NewPointerClass(kPointerCid, isolate_group); |
| object_store->set_ffi_pointer_class(cls); |
| |
| cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kDynamicLibraryCid, |
| isolate_group); |
| |
| cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group, |
| /*register_isolate_group=*/false); |
| cls.set_instance_size_in_words(0, 0); |
| isolate_group->class_table()->Register(cls); |
| |
| cls = Class::New<Integer, RTN::Integer>(isolate_group); |
| object_store->set_integer_implementation_class(cls); |
| |
| cls = Class::New<Smi, RTN::Smi>(isolate_group); |
| object_store->set_smi_class(cls); |
| |
| cls = Class::New<Mint, RTN::Mint>(isolate_group); |
| object_store->set_mint_class(cls); |
| |
| cls = Class::New<Double, RTN::Double>(isolate_group); |
| object_store->set_double_class(cls); |
| |
| cls = Class::New<Closure, RTN::Closure>(isolate_group); |
| object_store->set_closure_class(cls); |
| |
| cls = Class::New<Record, RTN::Record>(isolate_group); |
| |
| cls = Class::NewStringClass(kOneByteStringCid, isolate_group); |
| object_store->set_one_byte_string_class(cls); |
| |
| cls = Class::NewStringClass(kTwoByteStringCid, isolate_group); |
| object_store->set_two_byte_string_class(cls); |
| |
| cls = Class::New<Bool, RTN::Bool>(isolate_group); |
| object_store->set_bool_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group); |
| object_store->set_null_class(cls); |
| |
| cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group); |
| object_store->set_never_class(cls); |
| |
| cls = Class::New<Capability, RTN::Capability>(isolate_group); |
| cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group); |
| cls = Class::New<SendPort, RTN::SendPort>(isolate_group); |
| cls = Class::New<StackTrace, RTN::StackTrace>(isolate_group); |
| cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group); |
| cls = Class::New<RegExp, RTN::RegExp>(isolate_group); |
| cls = Class::New<Number, RTN::Number>(isolate_group); |
| |
| cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group); |
| object_store->set_weak_property_class(cls); |
| cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group); |
| object_store->set_weak_reference_class(cls); |
| cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group); |
| object_store->set_finalizer_class(cls); |
| cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group); |
| object_store->set_native_finalizer_class(cls); |
| cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group); |
| object_store->set_finalizer_entry_class(cls); |
| |
| cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group); |
| c
|