| // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| #ifndef RUNTIME_VM_OBJECT_H_ |
| #define RUNTIME_VM_OBJECT_H_ |
| |
| #include <tuple> |
| |
| #include "include/dart_api.h" |
| #include "platform/assert.h" |
| #include "platform/utils.h" |
| #include "vm/bitmap.h" |
| #include "vm/compiler/method_recognizer.h" |
| #include "vm/dart.h" |
| #include "vm/flags.h" |
| #include "vm/globals.h" |
| #include "vm/growable_array.h" |
| #include "vm/handles.h" |
| #include "vm/heap/heap.h" |
| #include "vm/isolate.h" |
| #include "vm/json_stream.h" |
| #include "vm/os.h" |
| #include "vm/raw_object.h" |
| #include "vm/report.h" |
| #include "vm/tags.h" |
| #include "vm/thread.h" |
| #include "vm/token_position.h" |
| |
| namespace dart { |
| |
| // Forward declarations. |
| namespace kernel { |
| class Program; |
| class TreeNode; |
| } // namespace kernel |
| |
| #define DEFINE_FORWARD_DECLARATION(clazz) class clazz; |
| CLASS_LIST(DEFINE_FORWARD_DECLARATION) |
| #undef DEFINE_FORWARD_DECLARATION |
| class Api; |
| class ArgumentsDescriptor; |
| class Assembler; |
| class Closure; |
| class Code; |
| class DeoptInstr; |
| class DisassemblyFormatter; |
| class FinalizablePersistentHandle; |
| class FlowGraphCompiler; |
| class HierarchyInfo; |
| class LocalScope; |
| class CodeStatistics; |
| |
| #define REUSABLE_FORWARD_DECLARATION(name) class Reusable##name##HandleScope; |
| REUSABLE_HANDLE_LIST(REUSABLE_FORWARD_DECLARATION) |
| #undef REUSABLE_FORWARD_DECLARATION |
| |
| class Symbols; |
| |
| #if defined(DEBUG) |
| #define CHECK_HANDLE() CheckHandle(); |
| #else |
| #define CHECK_HANDLE() |
| #endif |
| |
| #define BASE_OBJECT_IMPLEMENTATION(object, super) \ |
| public: /* NOLINT */ \ |
| using RawObjectType = Raw##object; \ |
| Raw##object* raw() const { return reinterpret_cast<Raw##object*>(raw_); } \ |
| bool Is##object() const { return true; } \ |
| static object& Handle(Zone* zone, Raw##object* raw_ptr) { \ |
| object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone)); \ |
| initializeHandle(obj, raw_ptr); \ |
| return *obj; \ |
| } \ |
| static object& Handle() { \ |
| return Handle(Thread::Current()->zone(), object::null()); \ |
| } \ |
| static object& Handle(Zone* zone) { return Handle(zone, object::null()); } \ |
| static object& Handle(Raw##object* raw_ptr) { \ |
| return Handle(Thread::Current()->zone(), raw_ptr); \ |
| } \ |
| static object& CheckedHandle(Zone* zone, RawObject* raw_ptr) { \ |
| object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone)); \ |
| initializeHandle(obj, raw_ptr); \ |
| if (!obj->Is##object()) { \ |
| FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(), \ |
| #object); \ |
| } \ |
| return *obj; \ |
| } \ |
| static object& ZoneHandle(Zone* zone, Raw##object* raw_ptr) { \ |
| object* obj = \ |
| reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone)); \ |
| initializeHandle(obj, raw_ptr); \ |
| return *obj; \ |
| } \ |
| static object* ReadOnlyHandle() { \ |
| object* obj = reinterpret_cast<object*>(Dart::AllocateReadOnlyHandle()); \ |
| initializeHandle(obj, object::null()); \ |
| return obj; \ |
| } \ |
| static object& ZoneHandle(Zone* zone) { \ |
| return ZoneHandle(zone, object::null()); \ |
| } \ |
| static object& ZoneHandle() { \ |
| return ZoneHandle(Thread::Current()->zone(), object::null()); \ |
| } \ |
| static object& ZoneHandle(Raw##object* raw_ptr) { \ |
| return ZoneHandle(Thread::Current()->zone(), raw_ptr); \ |
| } \ |
| static object& CheckedZoneHandle(Zone* zone, RawObject* raw_ptr) { \ |
| object* obj = \ |
| reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone)); \ |
| initializeHandle(obj, raw_ptr); \ |
| if (!obj->Is##object()) { \ |
| FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(), \ |
| #object); \ |
| } \ |
| return *obj; \ |
| } \ |
| static object& CheckedZoneHandle(RawObject* raw_ptr) { \ |
| return CheckedZoneHandle(Thread::Current()->zone(), raw_ptr); \ |
| } \ |
| /* T::Cast cannot be applied to a null Object, because the object vtable */ \ |
| /* is not setup for type T, although some methods are supposed to work */ \ |
| /* with null, for example Instance::Equals(). */ \ |
| static const object& Cast(const Object& obj) { \ |
| ASSERT(obj.Is##object()); \ |
| return reinterpret_cast<const object&>(obj); \ |
| } \ |
| static Raw##object* RawCast(RawObject* raw) { \ |
| ASSERT(Object::Handle(raw).IsNull() || Object::Handle(raw).Is##object()); \ |
| return reinterpret_cast<Raw##object*>(raw); \ |
| } \ |
| static Raw##object* null() { \ |
| return reinterpret_cast<Raw##object*>(Object::null()); \ |
| } \ |
| virtual const char* ToCString() const; \ |
| static const ClassId kClassId = k##object##Cid; \ |
| \ |
| private: /* NOLINT */ \ |
| /* Initialize the handle based on the raw_ptr in the presence of null. */ \ |
| static void initializeHandle(object* obj, RawObject* raw_ptr) { \ |
| if (raw_ptr != Object::null()) { \ |
| obj->SetRaw(raw_ptr); \ |
| } else { \ |
| obj->raw_ = Object::null(); \ |
| object fake_object; \ |
| obj->set_vtable(fake_object.vtable()); \ |
| } \ |
| } \ |
| /* Disallow allocation, copy constructors and override super assignment. */ \ |
| public: /* NOLINT */ \ |
| void operator delete(void* pointer) { UNREACHABLE(); } \ |
| \ |
| private: /* NOLINT */ \ |
| void* operator new(size_t size); \ |
| object(const object& value); \ |
| void operator=(Raw##super* value); \ |
| void operator=(const object& value); \ |
| void operator=(const super& value); |
| |
| // Conditionally include object_service.cc functionality in the vtable to avoid |
| // link errors like the following: |
| // |
| // object.o:(.rodata._ZTVN4....E[_ZTVN4...E]+0x278): |
| // undefined reference to |
| // `dart::Instance::PrintSharedInstanceJSON(dart::JSONObject*, bool) const'. |
| // |
| #ifndef PRODUCT |
| #define OBJECT_SERVICE_SUPPORT(object) \ |
| protected: /* NOLINT */ \ |
| /* Object is printed as JSON into stream. If ref is true only a header */ \ |
| /* with an object id is printed. If ref is false the object is fully */ \ |
| /* printed. */ \ |
| virtual void PrintJSONImpl(JSONStream* stream, bool ref) const; \ |
| virtual const char* JSONType() const { return "" #object; } |
| #else |
| #define OBJECT_SERVICE_SUPPORT(object) protected: /* NOLINT */ |
| #endif // !PRODUCT |
| |
| #define SNAPSHOT_READER_SUPPORT(object) \ |
| static Raw##object* ReadFrom(SnapshotReader* reader, intptr_t object_id, \ |
| intptr_t tags, Snapshot::Kind, \ |
| bool as_reference); \ |
| friend class SnapshotReader; |
| |
| #define OBJECT_IMPLEMENTATION(object, super) \ |
| public: /* NOLINT */ \ |
| void operator=(Raw##object* value) { initializeHandle(this, value); } \ |
| void operator^=(RawObject* value) { \ |
| initializeHandle(this, value); \ |
| ASSERT(IsNull() || Is##object()); \ |
| } \ |
| \ |
| protected: /* NOLINT */ \ |
| object() : super() {} \ |
| BASE_OBJECT_IMPLEMENTATION(object, super) \ |
| OBJECT_SERVICE_SUPPORT(object) |
| |
| #define HEAP_OBJECT_IMPLEMENTATION(object, super) \ |
| OBJECT_IMPLEMENTATION(object, super); \ |
| const Raw##object* raw_ptr() const { \ |
| ASSERT(raw() != null()); \ |
| return raw()->ptr(); \ |
| } \ |
| SNAPSHOT_READER_SUPPORT(object) \ |
| friend class StackFrame; \ |
| friend class Thread; |
| |
| // This macro is used to denote types that do not have a sub-type. |
| #define FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super) \ |
| public: /* NOLINT */ \ |
| void operator=(Raw##object* value) { \ |
| raw_ = value; \ |
| CHECK_HANDLE(); \ |
| } \ |
| void operator^=(RawObject* value) { \ |
| raw_ = value; \ |
| CHECK_HANDLE(); \ |
| } \ |
| \ |
| private: /* NOLINT */ \ |
| object() : super() {} \ |
| BASE_OBJECT_IMPLEMENTATION(object, super) \ |
| OBJECT_SERVICE_SUPPORT(object) \ |
| const Raw##object* raw_ptr() const { \ |
| ASSERT(raw() != null()); \ |
| return raw()->ptr(); \ |
| } \ |
| static intptr_t NextFieldOffset() { return -kWordSize; } \ |
| SNAPSHOT_READER_SUPPORT(rettype) \ |
| friend class StackFrame; \ |
| friend class Thread; |
| |
| #define FINAL_HEAP_OBJECT_IMPLEMENTATION(object, super) \ |
| FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, object, super) |
| |
| #define MINT_OBJECT_IMPLEMENTATION(object, rettype, super) \ |
| FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super) |
| |
| class Object { |
| public: |
| using RawObjectType = RawObject; |
| static RawObject* RawCast(RawObject* obj) { return obj; } |
| |
| virtual ~Object() {} |
| |
| RawObject* raw() const { return raw_; } |
| void operator=(RawObject* value) { initializeHandle(this, value); } |
| |
| uint32_t CompareAndSwapTags(uint32_t old_tags, uint32_t new_tags) const { |
| return AtomicOperations::CompareAndSwapUint32(&raw()->ptr()->tags_, |
| old_tags, new_tags); |
| } |
| bool IsCanonical() const { return raw()->IsCanonical(); } |
| void SetCanonical() const { raw()->SetCanonical(); } |
| void ClearCanonical() const { raw()->ClearCanonical(); } |
| intptr_t GetClassId() const { |
| return !raw()->IsHeapObject() ? static_cast<intptr_t>(kSmiCid) |
| : raw()->GetClassId(); |
| } |
| inline RawClass* clazz() const; |
| static intptr_t tags_offset() { return OFFSET_OF(RawObject, tags_); } |
| |
| // Class testers. |
| #define DEFINE_CLASS_TESTER(clazz) \ |
| virtual bool Is##clazz() const { return false; } |
| CLASS_LIST_FOR_HANDLES(DEFINE_CLASS_TESTER); |
| #undef DEFINE_CLASS_TESTER |
| |
| bool IsNull() const { return raw_ == null_; } |
| |
| // Matches Object.toString on instances (except String::ToCString, bug 20583). |
| virtual const char* ToCString() const { |
| if (IsNull()) { |
| return "null"; |
| } else { |
| return "Object"; |
| } |
| } |
| |
| #ifndef PRODUCT |
| void PrintJSON(JSONStream* stream, bool ref = true) const; |
| virtual void PrintJSONImpl(JSONStream* stream, bool ref) const; |
| virtual const char* JSONType() const { return IsNull() ? "null" : "Object"; } |
| #endif |
| |
| // Returns the name that is used to identify an object in the |
| // namespace dictionary. |
| // Object::DictionaryName() returns String::null(). Only subclasses |
| // of Object that need to be entered in the library and library prefix |
| // namespaces need to provide an implementation. |
| virtual RawString* DictionaryName() const; |
| |
| bool IsNew() const { return raw()->IsNewObject(); } |
| bool IsOld() const { return raw()->IsOldObject(); } |
| #if defined(DEBUG) |
| bool InVMHeap() const; |
| #else |
| bool InVMHeap() const { return raw()->IsVMHeapObject(); } |
| #endif // DEBUG |
| |
| // Print the object on stdout for debugging. |
| void Print() const; |
| |
| bool IsZoneHandle() const { |
| return VMHandles::IsZoneHandle(reinterpret_cast<uword>(this)); |
| } |
| |
| bool IsReadOnlyHandle() const; |
| |
| bool IsNotTemporaryScopedHandle() const; |
| |
| static Object& Handle(Zone* zone, RawObject* raw_ptr) { |
| Object* obj = reinterpret_cast<Object*>(VMHandles::AllocateHandle(zone)); |
| initializeHandle(obj, raw_ptr); |
| return *obj; |
| } |
| static Object* ReadOnlyHandle() { |
| Object* obj = reinterpret_cast<Object*>(Dart::AllocateReadOnlyHandle()); |
| initializeHandle(obj, Object::null()); |
| return obj; |
| } |
| |
| static Object& Handle() { return Handle(Thread::Current()->zone(), null_); } |
| |
| static Object& Handle(Zone* zone) { return Handle(zone, null_); } |
| |
| static Object& Handle(RawObject* raw_ptr) { |
| return Handle(Thread::Current()->zone(), raw_ptr); |
| } |
| |
| static Object& ZoneHandle(Zone* zone, RawObject* raw_ptr) { |
| Object* obj = |
| reinterpret_cast<Object*>(VMHandles::AllocateZoneHandle(zone)); |
| initializeHandle(obj, raw_ptr); |
| return *obj; |
| } |
| |
| static Object& ZoneHandle() { |
| return ZoneHandle(Thread::Current()->zone(), null_); |
| } |
| |
| static Object& ZoneHandle(RawObject* raw_ptr) { |
| return ZoneHandle(Thread::Current()->zone(), raw_ptr); |
| } |
| |
| static RawObject* null() { return null_; } |
| |
| #if defined(HASH_IN_OBJECT_HEADER) |
| static uint32_t GetCachedHash(const RawObject* obj) { |
| return obj->ptr()->hash_; |
| } |
| |
| static void SetCachedHash(RawObject* obj, uint32_t hash) { |
| obj->ptr()->hash_ = hash; |
| } |
| #endif |
| |
| // The list below enumerates read-only handles for singleton |
| // objects that are shared between the different isolates. |
| // |
| // - sentinel is a value that cannot be produced by Dart code. It can be used |
| // to mark special values, for example to distinguish "uninitialized" fields. |
| // - transition_sentinel is a value marking that we are transitioning from |
| // sentinel, e.g., computing a field value. Used to detect circular |
| // initialization. |
| // - unknown_constant and non_constant are optimizing compiler's constant |
| // propagation constants. |
| #define SHARED_READONLY_HANDLES_LIST(V) \ |
| V(Object, null_object) \ |
| V(Array, null_array) \ |
| V(String, null_string) \ |
| V(Instance, null_instance) \ |
| V(Function, null_function) \ |
| V(TypeArguments, null_type_arguments) \ |
| V(TypeArguments, empty_type_arguments) \ |
| V(Array, empty_array) \ |
| V(Array, zero_array) \ |
| V(ContextScope, empty_context_scope) \ |
| V(ObjectPool, empty_object_pool) \ |
| V(PcDescriptors, empty_descriptors) \ |
| V(LocalVarDescriptors, empty_var_descriptors) \ |
| V(ExceptionHandlers, empty_exception_handlers) \ |
| V(Array, extractor_parameter_types) \ |
| V(Array, extractor_parameter_names) \ |
| V(Instance, sentinel) \ |
| V(Instance, transition_sentinel) \ |
| V(Instance, unknown_constant) \ |
| V(Instance, non_constant) \ |
| V(Bool, bool_true) \ |
| V(Bool, bool_false) \ |
| V(Smi, smi_illegal_cid) \ |
| V(LanguageError, snapshot_writer_error) \ |
| V(LanguageError, branch_offset_error) \ |
| V(LanguageError, speculative_inlining_error) \ |
| V(LanguageError, background_compilation_error) \ |
| V(Array, vm_isolate_snapshot_object_table) \ |
| V(Type, dynamic_type) \ |
| V(Type, void_type) \ |
| V(AbstractType, null_abstract_type) |
| |
| #define DEFINE_SHARED_READONLY_HANDLE_GETTER(Type, name) \ |
| static const Type& name() { \ |
| ASSERT(name##_ != nullptr); \ |
| return *name##_; \ |
| } |
| SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE_GETTER) |
| #undef DEFINE_SHARED_READONLY_HANDLE_GETTER |
| |
| static void set_vm_isolate_snapshot_object_table(const Array& table); |
| |
| static RawClass* class_class() { return class_class_; } |
| static RawClass* dynamic_class() { return dynamic_class_; } |
| static RawClass* void_class() { return void_class_; } |
| static RawClass* type_arguments_class() { return type_arguments_class_; } |
| static RawClass* patch_class_class() { return patch_class_class_; } |
| static RawClass* function_class() { return function_class_; } |
| static RawClass* closure_data_class() { return closure_data_class_; } |
| static RawClass* signature_data_class() { return signature_data_class_; } |
| static RawClass* redirection_data_class() { return redirection_data_class_; } |
| static RawClass* field_class() { return field_class_; } |
| static RawClass* script_class() { return script_class_; } |
| static RawClass* library_class() { return library_class_; } |
| static RawClass* namespace_class() { return namespace_class_; } |
| static RawClass* kernel_program_info_class() { |
| return kernel_program_info_class_; |
| } |
| static RawClass* code_class() { return code_class_; } |
| static RawClass* bytecode_class() { return bytecode_class_; } |
| static RawClass* instructions_class() { return instructions_class_; } |
| static RawClass* object_pool_class() { return object_pool_class_; } |
| static RawClass* pc_descriptors_class() { return pc_descriptors_class_; } |
| static RawClass* code_source_map_class() { return code_source_map_class_; } |
| static RawClass* stackmap_class() { return stackmap_class_; } |
| static RawClass* var_descriptors_class() { return var_descriptors_class_; } |
| static RawClass* exception_handlers_class() { |
| return exception_handlers_class_; |
| } |
| static RawClass* deopt_info_class() { return deopt_info_class_; } |
| static RawClass* context_class() { return context_class_; } |
| static RawClass* context_scope_class() { return context_scope_class_; } |
| static RawClass* api_error_class() { return api_error_class_; } |
| static RawClass* language_error_class() { return language_error_class_; } |
| static RawClass* unhandled_exception_class() { |
| return unhandled_exception_class_; |
| } |
| static RawClass* unwind_error_class() { return unwind_error_class_; } |
| static RawClass* singletargetcache_class() { |
| return singletargetcache_class_; |
| } |
| static RawClass* unlinkedcall_class() { return unlinkedcall_class_; } |
| static RawClass* icdata_class() { return icdata_class_; } |
| static RawClass* megamorphic_cache_class() { |
| return megamorphic_cache_class_; |
| } |
| static RawClass* subtypetestcache_class() { return subtypetestcache_class_; } |
| |
| // Initialize the VM isolate. |
| static void InitNull(Isolate* isolate); |
| static void Init(Isolate* isolate); |
| static void FinishInit(Isolate* isolate); |
| static void FinalizeVMIsolate(Isolate* isolate); |
| static void FinalizeReadOnlyObject(RawObject* object); |
| |
| static void Cleanup(); |
| |
| // Initialize a new isolate either from a Kernel IR, from source, or from a |
| // snapshot. |
| static RawError* Init(Isolate* isolate, |
| const uint8_t* kernel_buffer, |
| intptr_t kernel_buffer_size); |
| |
| static void MakeUnusedSpaceTraversable(const Object& obj, |
| intptr_t original_size, |
| intptr_t used_size); |
| |
| static intptr_t InstanceSize() { |
| return RoundedAllocationSize(sizeof(RawObject)); |
| } |
| |
| static void VerifyBuiltinVtables(); |
| |
| static const ClassId kClassId = kObjectCid; |
| |
| // Different kinds of type tests. |
| enum TypeTestKind { kIsSubtypeOf = 0, kIsMoreSpecificThan }; |
| |
| // Different kinds of name visibility. |
| enum NameVisibility { |
| // Internal names are the true names of classes, fields, |
| // etc. inside the vm. These names include privacy suffixes, |
| // getter prefixes, and trailing dots on unnamed constructors. |
| // |
| // The names of core implementation classes (like _OneByteString) |
| // are preserved as well. |
| // |
| // e.g. |
| // private getter -> get:foo@6be832b |
| // private constructor -> _MyClass@6b3832b. |
| // private named constructor -> _MyClass@6b3832b.named |
| // core impl class name shown -> _OneByteString |
| kInternalName = 0, |
| |
| // Scrubbed names drop privacy suffixes, getter prefixes, and |
| // trailing dots on unnamed constructors. These names are used in |
| // the vm service. |
| // |
| // e.g. |
| // get:foo@6be832b -> foo |
| // _MyClass@6b3832b. -> _MyClass |
| // _MyClass@6b3832b.named -> _MyClass.named |
| // _OneByteString -> _OneByteString (not remapped) |
| kScrubbedName, |
| |
| // User visible names are appropriate for reporting type errors |
| // directly to programmers. The names have been scrubbed and |
| // the names of core implementation classes are remapped to their |
| // public interface names. |
| // |
| // e.g. |
| // get:foo@6be832b -> foo |
| // _MyClass@6b3832b. -> _MyClass |
| // _MyClass@6b3832b.named -> _MyClass.named |
| // _OneByteString -> String (remapped) |
| kUserVisibleName |
| }; |
| |
| protected: |
| // Used for extracting the C++ vtable during bringup. |
| Object() : raw_(null_) {} |
| |
| uword raw_value() const { return reinterpret_cast<uword>(raw()); } |
| |
| inline void SetRaw(RawObject* value); |
| void CheckHandle() const; |
| |
| cpp_vtable vtable() const { return bit_copy<cpp_vtable>(*this); } |
| void set_vtable(cpp_vtable value) { *vtable_address() = value; } |
| |
| static RawObject* Allocate(intptr_t cls_id, intptr_t size, Heap::Space space); |
| |
| static intptr_t RoundedAllocationSize(intptr_t size) { |
| return Utils::RoundUp(size, kObjectAlignment); |
| } |
| |
| bool Contains(uword addr) const { return raw()->Contains(addr); } |
| |
| // Start of field mutator guards. |
| // |
| // All writes to heap objects should ultimately pass through one of the |
| // methods below or their counterparts in RawObject, to ensure that the |
| // write barrier is correctly applied. |
| |
| template <typename type, MemoryOrder order = MemoryOrder::kRelaxed> |
| void StorePointer(type const* addr, type value) const { |
| raw()->StorePointer<type, order>(addr, value); |
| } |
| |
| // Use for storing into an explicitly Smi-typed field of an object |
| // (i.e., both the previous and new value are Smis). |
| void StoreSmi(RawSmi* const* addr, RawSmi* value) const { |
| raw()->StoreSmi(addr, value); |
| } |
| |
| template <typename FieldType> |
| void StoreSimd128(const FieldType* addr, simd128_value_t value) const { |
| ASSERT(Contains(reinterpret_cast<uword>(addr))); |
| value.writeTo(const_cast<FieldType*>(addr)); |
| } |
| |
| // Needs two template arguments to allow assigning enums to fixed-size ints. |
| template <typename FieldType, typename ValueType> |
| void StoreNonPointer(const FieldType* addr, ValueType value) const { |
| // Can't use Contains, as it uses tags_, which is set through this method. |
| ASSERT(reinterpret_cast<uword>(addr) >= RawObject::ToAddr(raw())); |
| *const_cast<FieldType*>(addr) = value; |
| } |
| |
| // Provides non-const access to non-pointer fields within the object. Such |
| // access does not need a write barrier, but it is *not* GC-safe, since the |
| // object might move, hence must be fully contained within a NoSafepointScope. |
| template <typename FieldType> |
| FieldType* UnsafeMutableNonPointer(const FieldType* addr) const { |
| // Allow pointers at the end of variable-length data, and disallow pointers |
| // within the header word. |
| ASSERT(Contains(reinterpret_cast<uword>(addr) - 1) && |
| Contains(reinterpret_cast<uword>(addr) - kWordSize)); |
| // At least check that there is a NoSafepointScope and hope it's big enough. |
| ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0); |
| return const_cast<FieldType*>(addr); |
| } |
| |
| // Fail at link time if StoreNonPointer or UnsafeMutableNonPointer is |
| // instantiated with an object pointer type. |
| #define STORE_NON_POINTER_ILLEGAL_TYPE(type) \ |
| template <typename ValueType> \ |
| void StoreNonPointer(Raw##type* const* addr, ValueType value) const { \ |
| UnimplementedMethod(); \ |
| } \ |
| Raw##type** UnsafeMutableNonPointer(Raw##type* const* addr) const { \ |
| UnimplementedMethod(); \ |
| return NULL; \ |
| } |
| |
| CLASS_LIST(STORE_NON_POINTER_ILLEGAL_TYPE); |
| void UnimplementedMethod() const; |
| #undef STORE_NON_POINTER_ILLEGAL_TYPE |
| |
| // Allocate an object and copy the body of 'orig'. |
| static RawObject* Clone(const Object& orig, Heap::Space space); |
| |
| // End of field mutator guards. |
| |
| RawObject* raw_; // The raw object reference. |
| |
| protected: |
| void AddCommonObjectProperties(JSONObject* jsobj, |
| const char* protocol_type, |
| bool ref) const; |
| |
| private: |
| static intptr_t NextFieldOffset() { |
| // Indicates this class cannot be extended by dart code. |
| return -kWordSize; |
| } |
| |
| static void InitializeObject(uword address, |
| intptr_t id, |
| intptr_t size, |
| bool is_vm_object); |
| |
| static void RegisterClass(const Class& cls, |
| const String& name, |
| const Library& lib); |
| static void RegisterPrivateClass(const Class& cls, |
| const String& name, |
| const Library& lib); |
| |
| /* Initialize the handle based on the raw_ptr in the presence of null. */ |
| static void initializeHandle(Object* obj, RawObject* raw_ptr) { |
| if (raw_ptr != Object::null()) { |
| obj->SetRaw(raw_ptr); |
| } else { |
| obj->raw_ = Object::null(); |
| Object fake_object; |
| obj->set_vtable(fake_object.vtable()); |
| } |
| } |
| |
| cpp_vtable* vtable_address() const { |
| uword vtable_addr = reinterpret_cast<uword>(this); |
| return reinterpret_cast<cpp_vtable*>(vtable_addr); |
| } |
| |
| static cpp_vtable handle_vtable_; |
| static cpp_vtable builtin_vtables_[kNumPredefinedCids]; |
| |
| // The static values below are singletons shared between the different |
| // isolates. They are all allocated in the non-GC'd Dart::vm_isolate_. |
| static RawObject* null_; |
| |
| static RawClass* class_class_; // Class of the Class vm object. |
| static RawClass* dynamic_class_; // Class of the 'dynamic' type. |
| static RawClass* void_class_; // Class of the 'void' type. |
| static RawClass* type_arguments_class_; // Class of TypeArguments vm object. |
| static RawClass* patch_class_class_; // Class of the PatchClass vm object. |
| static RawClass* function_class_; // Class of the Function vm object. |
| static RawClass* closure_data_class_; // Class of ClosureData vm obj. |
| static RawClass* signature_data_class_; // Class of SignatureData vm obj. |
| static RawClass* redirection_data_class_; // Class of RedirectionData vm obj. |
| static RawClass* field_class_; // Class of the Field vm object. |
| static RawClass* script_class_; // Class of the Script vm object. |
| static RawClass* library_class_; // Class of the Library vm object. |
| static RawClass* namespace_class_; // Class of Namespace vm object. |
| static RawClass* kernel_program_info_class_; // Class of KernelProgramInfo vm |
| // object. |
| static RawClass* code_class_; // Class of the Code vm object. |
| static RawClass* bytecode_class_; // Class of the Bytecode vm object. |
| static RawClass* instructions_class_; // Class of the Instructions vm object. |
| static RawClass* object_pool_class_; // Class of the ObjectPool vm object. |
| static RawClass* pc_descriptors_class_; // Class of PcDescriptors vm object. |
| static RawClass* code_source_map_class_; // Class of CodeSourceMap vm object. |
| static RawClass* stackmap_class_; // Class of StackMap vm object. |
| static RawClass* var_descriptors_class_; // Class of LocalVarDescriptors. |
| static RawClass* exception_handlers_class_; // Class of ExceptionHandlers. |
| static RawClass* deopt_info_class_; // Class of DeoptInfo. |
| static RawClass* context_class_; // Class of the Context vm object. |
| static RawClass* context_scope_class_; // Class of ContextScope vm object. |
| static RawClass* singletargetcache_class_; // Class of SingleTargetCache. |
| static RawClass* unlinkedcall_class_; // Class of UnlinkedCall. |
| static RawClass* icdata_class_; // Class of ICData. |
| static RawClass* megamorphic_cache_class_; // Class of MegamorphiCache. |
| static RawClass* subtypetestcache_class_; // Class of SubtypeTestCache. |
| static RawClass* api_error_class_; // Class of ApiError. |
| static RawClass* language_error_class_; // Class of LanguageError. |
| static RawClass* unhandled_exception_class_; // Class of UnhandledException. |
| static RawClass* unwind_error_class_; // Class of UnwindError. |
| |
| #define DECLARE_SHARED_READONLY_HANDLE(Type, name) static Type* name##_; |
| SHARED_READONLY_HANDLES_LIST(DECLARE_SHARED_READONLY_HANDLE) |
| #undef DECLARE_SHARED_READONLY_HANDLE |
| |
| friend void ClassTable::Register(const Class& cls); |
| friend void RawObject::Validate(Isolate* isolate) const; |
| friend class Closure; |
| friend class SnapshotReader; |
| friend class InstanceDeserializationCluster; |
| friend class OneByteString; |
| friend class TwoByteString; |
| friend class ExternalOneByteString; |
| friend class ExternalTwoByteString; |
| friend class Thread; |
| |
| #define REUSABLE_FRIEND_DECLARATION(name) \ |
| friend class Reusable##name##HandleScope; |
| REUSABLE_HANDLE_LIST(REUSABLE_FRIEND_DECLARATION) |
| #undef REUSABLE_FRIEND_DECLARATION |
| |
| DISALLOW_ALLOCATION(); |
| DISALLOW_COPY_AND_ASSIGN(Object); |
| }; |
| |
| class PassiveObject : public Object { |
| public: |
| void operator=(RawObject* value) { raw_ = value; } |
| void operator^=(RawObject* value) { raw_ = value; } |
| |
| static PassiveObject& Handle(Zone* zone, RawObject* raw_ptr) { |
| PassiveObject* obj = |
| reinterpret_cast<PassiveObject*>(VMHandles::AllocateHandle(zone)); |
| obj->raw_ = raw_ptr; |
| obj->set_vtable(0); |
| return *obj; |
| } |
| static PassiveObject& Handle(RawObject* raw_ptr) { |
| return Handle(Thread::Current()->zone(), raw_ptr); |
| } |
| static PassiveObject& Handle() { |
| return Handle(Thread::Current()->zone(), Object::null()); |
| } |
| static PassiveObject& Handle(Zone* zone) { |
| return Handle(zone, Object::null()); |
| } |
| static PassiveObject& ZoneHandle(Zone* zone, RawObject* raw_ptr) { |
| PassiveObject* obj = |
| reinterpret_cast<PassiveObject*>(VMHandles::AllocateZoneHandle(zone)); |
| obj->raw_ = raw_ptr; |
| obj->set_vtable(0); |
| return *obj; |
| } |
| static PassiveObject& ZoneHandle(RawObject* raw_ptr) { |
| return ZoneHandle(Thread::Current()->zone(), raw_ptr); |
| } |
| static PassiveObject& ZoneHandle() { |
| return ZoneHandle(Thread::Current()->zone(), Object::null()); |
| } |
| static PassiveObject& ZoneHandle(Zone* zone) { |
| return ZoneHandle(zone, Object::null()); |
| } |
| |
| private: |
| PassiveObject() : Object() {} |
| DISALLOW_ALLOCATION(); |
| DISALLOW_COPY_AND_ASSIGN(PassiveObject); |
| }; |
| |
| typedef ZoneGrowableHandlePtrArray<const AbstractType> Trail; |
| typedef ZoneGrowableHandlePtrArray<const AbstractType>* TrailPtr; |
| |
| // A URIs array contains triplets of strings. |
| // The first string in the triplet is a type name (usually a class). |
| // The second string in the triplet is the URI of the type. |
| // The third string in the triplet is "print" if the triplet should be printed. |
| typedef ZoneGrowableHandlePtrArray<const String> URIs; |
| |
| class Class : public Object { |
| public: |
| enum InvocationDispatcherEntry { |
| kInvocationDispatcherName, |
| kInvocationDispatcherArgsDesc, |
| kInvocationDispatcherFunction, |
| kInvocationDispatcherEntrySize, |
| }; |
| |
| intptr_t instance_size() const { |
| ASSERT(is_finalized() || is_prefinalized()); |
| return (raw_ptr()->instance_size_in_words_ * kWordSize); |
| } |
| static intptr_t instance_size(RawClass* clazz) { |
| return (clazz->ptr()->instance_size_in_words_ * kWordSize); |
| } |
| void set_instance_size(intptr_t value_in_bytes) const { |
| ASSERT(kWordSize != 0); |
| set_instance_size_in_words(value_in_bytes / kWordSize); |
| } |
| void set_instance_size_in_words(intptr_t value) const { |
| ASSERT(Utils::IsAligned((value * kWordSize), kObjectAlignment)); |
| StoreNonPointer(&raw_ptr()->instance_size_in_words_, value); |
| } |
| |
| intptr_t next_field_offset() const { |
| return raw_ptr()->next_field_offset_in_words_ * kWordSize; |
| } |
| void set_next_field_offset(intptr_t value_in_bytes) const { |
| ASSERT(kWordSize != 0); |
| set_next_field_offset_in_words(value_in_bytes / kWordSize); |
| } |
| void set_next_field_offset_in_words(intptr_t value) const { |
| ASSERT((value == -1) || |
| (Utils::IsAligned((value * kWordSize), kObjectAlignment) && |
| (value == raw_ptr()->instance_size_in_words_)) || |
| (!Utils::IsAligned((value * kWordSize), kObjectAlignment) && |
| ((value + 1) == raw_ptr()->instance_size_in_words_))); |
| StoreNonPointer(&raw_ptr()->next_field_offset_in_words_, value); |
| } |
| |
| cpp_vtable handle_vtable() const { return raw_ptr()->handle_vtable_; } |
| void set_handle_vtable(cpp_vtable value) const { |
| StoreNonPointer(&raw_ptr()->handle_vtable_, value); |
| } |
| |
| static bool is_valid_id(intptr_t value) { |
| return RawObject::ClassIdTag::is_valid(value); |
| } |
| intptr_t id() const { return raw_ptr()->id_; } |
| void set_id(intptr_t value) const { |
| ASSERT(is_valid_id(value)); |
| StoreNonPointer(&raw_ptr()->id_, value); |
| } |
| static intptr_t id_offset() { return OFFSET_OF(RawClass, id_); } |
| |
| RawString* Name() const; |
| RawString* ScrubbedName() const; |
| RawString* UserVisibleName() const; |
| bool IsInFullSnapshot() const; |
| |
| virtual RawString* DictionaryName() const { return Name(); } |
| |
| RawScript* script() const { return raw_ptr()->script_; } |
| void set_script(const Script& value) const; |
| |
| TokenPosition token_pos() const { return raw_ptr()->token_pos_; } |
| void set_token_pos(TokenPosition value) const; |
| |
| TokenPosition ComputeEndTokenPos() const; |
| |
| int32_t SourceFingerprint() const; |
| |
| // This class represents a typedef if the signature function is not null. |
| RawFunction* signature_function() const { |
| return raw_ptr()->signature_function_; |
| } |
| void set_signature_function(const Function& value) const; |
| |
| // Return the Type with type parameters declared by this class filled in with |
| // dynamic and type parameters declared in superclasses filled in as declared |
| // in superclass clauses. |
| RawAbstractType* RareType() const; |
| |
| // Return the Type whose arguments are the type parameters declared by this |
| // class preceded by the type arguments declared for superclasses, etc. |
| // e.g. given |
| // class B<T, S> |
| // class C<R> extends B<R, int> |
| // C.DeclarationType() --> C [R, int, R] |
| RawAbstractType* DeclarationType() const; |
| |
| RawLibrary* library() const { return raw_ptr()->library_; } |
| void set_library(const Library& value) const; |
| |
| // The type parameters (and their bounds) are specified as an array of |
| // TypeParameter. |
| RawTypeArguments* type_parameters() const { |
| return raw_ptr()->type_parameters_; |
| } |
| void set_type_parameters(const TypeArguments& value) const; |
| intptr_t NumTypeParameters(Thread* thread) const; |
| intptr_t NumTypeParameters() const { |
| return NumTypeParameters(Thread::Current()); |
| } |
| static intptr_t type_parameters_offset() { |
| return OFFSET_OF(RawClass, type_parameters_); |
| } |
| |
| // Return a TypeParameter if the type_name is a type parameter of this class. |
| // Return null otherwise. |
| RawTypeParameter* LookupTypeParameter(const String& type_name) const; |
| |
| // The type argument vector is flattened and includes the type arguments of |
| // the super class. |
| intptr_t NumTypeArguments() const; |
| |
| // Return the number of type arguments that are specific to this class, i.e. |
| // not overlapping with the type arguments of the super class of this class. |
| intptr_t NumOwnTypeArguments() const; |
| |
| // Return true if this class declares type parameters. |
| bool IsGeneric() const { return NumTypeParameters(Thread::Current()) > 0; } |
| |
| // If this class is parameterized, each instance has a type_arguments field. |
| static const intptr_t kNoTypeArguments = -1; |
| intptr_t type_arguments_field_offset() const { |
| ASSERT(is_type_finalized() || is_prefinalized()); |
| if (raw_ptr()->type_arguments_field_offset_in_words_ == kNoTypeArguments) { |
| return kNoTypeArguments; |
| } |
| return raw_ptr()->type_arguments_field_offset_in_words_ * kWordSize; |
| } |
| void set_type_arguments_field_offset(intptr_t value_in_bytes) const { |
| intptr_t value; |
| if (value_in_bytes == kNoTypeArguments) { |
| value = kNoTypeArguments; |
| } else { |
| ASSERT(kWordSize != 0); |
| value = value_in_bytes / kWordSize; |
| } |
| set_type_arguments_field_offset_in_words(value); |
| } |
| void set_type_arguments_field_offset_in_words(intptr_t value) const { |
| StoreNonPointer(&raw_ptr()->type_arguments_field_offset_in_words_, value); |
| } |
| static intptr_t type_arguments_field_offset_in_words_offset() { |
| return OFFSET_OF(RawClass, type_arguments_field_offset_in_words_); |
| } |
| |
| // Returns the cached canonical type of this class, i.e. the canonical type |
| // whose type class is this class and whose type arguments are the |
| // uninstantiated type parameters declared by this class if it is generic, |
| // e.g. Map<K, V>. |
| // Returns Type::null() if the canonical type is not cached yet. |
| RawType* CanonicalType() const; |
| |
| // Caches the canonical type of this class. |
| void SetCanonicalType(const Type& type) const; |
| |
| static intptr_t canonical_type_offset() { |
| return OFFSET_OF(RawClass, canonical_type_); |
| } |
| |
| // The super type of this class, Object type if not explicitly specified. |
| // Note that the super type may be bounded, as in this example: |
| // class C<T> extends S<T> { }; class S<T extends num> { }; |
| RawAbstractType* super_type() const { return raw_ptr()->super_type_; } |
| void set_super_type(const AbstractType& value) const; |
| static intptr_t super_type_offset() { |
| return OFFSET_OF(RawClass, super_type_); |
| } |
| |
| // Asserts that the class of the super type has been resolved. |
| // |original_classes| only has an effect when reloading. If true and we |
| // are reloading, it will prefer the original classes to the replacement |
| // classes. |
| RawClass* SuperClass(bool original_classes = false) const; |
| |
| RawType* mixin() const { return raw_ptr()->mixin_; } |
| void set_mixin(const Type& value) const; |
| |
| // Note this returns false for mixin application aliases. |
| bool IsMixinApplication() const; |
| |
| RawClass* GetPatchClass() const; |
| |
| // Interfaces is an array of Types. |
| RawArray* interfaces() const { return raw_ptr()->interfaces_; } |
| void set_interfaces(const Array& value) const; |
| static intptr_t interfaces_offset() { |
| return OFFSET_OF(RawClass, interfaces_); |
| } |
| |
| // Returns the list of classes directly implementing this class. |
| RawGrowableObjectArray* direct_implementors() const { |
| return raw_ptr()->direct_implementors_; |
| } |
| void AddDirectImplementor(const Class& subclass) const; |
| void ClearDirectImplementors() const; |
| |
| // Returns the list of classes having this class as direct superclass. |
| RawGrowableObjectArray* direct_subclasses() const { |
| return raw_ptr()->direct_subclasses_; |
| } |
| void AddDirectSubclass(const Class& subclass) const; |
| void ClearDirectSubclasses() const; |
| |
| // Check if this class represents the class of null. |
| bool IsNullClass() const { return id() == kNullCid; } |
| |
| // Check if this class represents the 'dynamic' class. |
| bool IsDynamicClass() const { return id() == kDynamicCid; } |
| |
| // Check if this class represents the 'void' class. |
| bool IsVoidClass() const { return id() == kVoidCid; } |
| |
| // Check if this class represents the 'Object' class. |
| bool IsObjectClass() const { return id() == kInstanceCid; } |
| |
| // Check if this class represents the 'Function' class. |
| bool IsDartFunctionClass() const; |
| |
| // Check if this class represents the 'Future' class. |
| bool IsFutureClass() const; |
| |
| // Check if this class represents the 'FutureOr' class. |
| bool IsFutureOrClass() const; |
| |
| // Check if this class represents the 'Closure' class. |
| bool IsClosureClass() const { return id() == kClosureCid; } |
| static bool IsClosureClass(RawClass* cls) { |
| NoSafepointScope no_safepoint; |
| return cls->ptr()->id_ == kClosureCid; |
| } |
| |
| // Check if this class represents a typedef class. |
| bool IsTypedefClass() const { return signature_function() != Object::null(); } |
| |
| static bool IsInFullSnapshot(RawClass* cls) { |
| NoSafepointScope no_safepoint; |
| return cls->ptr()->library_->ptr()->is_in_fullsnapshot_; |
| } |
| |
| // Check the subtype relationship. |
| bool IsSubtypeOf(const TypeArguments& type_arguments, |
| const Class& other, |
| const TypeArguments& other_type_arguments, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space) const { |
| return TypeTest(kIsSubtypeOf, type_arguments, other, other_type_arguments, |
| bound_error, bound_trail, space); |
| } |
| |
| // Check the 'more specific' relationship. |
| bool IsMoreSpecificThan(const TypeArguments& type_arguments, |
| const Class& other, |
| const TypeArguments& other_type_arguments, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space) const { |
| return TypeTest(kIsMoreSpecificThan, type_arguments, other, |
| other_type_arguments, bound_error, bound_trail, space); |
| } |
| |
| // Check if this is the top level class. |
| bool IsTopLevel() const; |
| |
| bool IsPrivate() const; |
| |
| // Returns an array of instance and static fields defined by this class. |
| RawArray* fields() const { return raw_ptr()->fields_; } |
| void SetFields(const Array& value) const; |
| void AddField(const Field& field) const; |
| void AddFields(const GrowableArray<const Field*>& fields) const; |
| |
| void InjectCIDFields() const; |
| |
| // Returns an array of all instance fields of this class and its superclasses |
| // indexed by offset in words. |
| // |original_classes| only has an effect when reloading. If true and we |
| // are reloading, it will prefer the original classes to the replacement |
| // classes. |
| RawArray* OffsetToFieldMap(bool original_classes = false) const; |
| |
| // Returns true if non-static fields are defined. |
| bool HasInstanceFields() const; |
| |
| // TODO(koda): Unite w/ hash table. |
| RawArray* functions() const { return raw_ptr()->functions_; } |
| void SetFunctions(const Array& value) const; |
| void AddFunction(const Function& function) const; |
| void RemoveFunction(const Function& function) const; |
| RawFunction* FunctionFromIndex(intptr_t idx) const; |
| intptr_t FindImplicitClosureFunctionIndex(const Function& needle) const; |
| RawFunction* ImplicitClosureFunctionFromIndex(intptr_t idx) const; |
| |
| RawFunction* LookupDynamicFunction(const String& name) const; |
| RawFunction* LookupDynamicFunctionAllowAbstract(const String& name) const; |
| RawFunction* LookupDynamicFunctionAllowPrivate(const String& name) const; |
| RawFunction* LookupStaticFunction(const String& name) const; |
| RawFunction* LookupStaticFunctionAllowPrivate(const String& name) const; |
| RawFunction* LookupConstructor(const String& name) const; |
| RawFunction* LookupConstructorAllowPrivate(const String& name) const; |
| RawFunction* LookupFactory(const String& name) const; |
| RawFunction* LookupFactoryAllowPrivate(const String& name) const; |
| RawFunction* LookupFunction(const String& name) const; |
| RawFunction* LookupFunctionAllowPrivate(const String& name) const; |
| RawFunction* LookupGetterFunction(const String& name) const; |
| RawFunction* LookupSetterFunction(const String& name) const; |
| RawField* LookupInstanceField(const String& name) const; |
| RawField* LookupStaticField(const String& name) const; |
| RawField* LookupField(const String& name) const; |
| RawField* LookupFieldAllowPrivate(const String& name, |
| bool instance_only = false) const; |
| RawField* LookupInstanceFieldAllowPrivate(const String& name) const; |
| RawField* LookupStaticFieldAllowPrivate(const String& name) const; |
| |
| RawLibraryPrefix* LookupLibraryPrefix(const String& name) const; |
| |
| RawDouble* LookupCanonicalDouble(Zone* zone, double value) const; |
| RawMint* LookupCanonicalMint(Zone* zone, int64_t value) const; |
| |
| // The methods above are more efficient than this generic one. |
| RawInstance* LookupCanonicalInstance(Zone* zone, const Instance& value) const; |
| |
| RawInstance* InsertCanonicalConstant(Zone* zone, |
| const Instance& constant) const; |
| void InsertCanonicalDouble(Zone* zone, const Double& constant) const; |
| void InsertCanonicalMint(Zone* zone, const Mint& constant) const; |
| |
| void RehashConstants(Zone* zone) const; |
| |
| static intptr_t InstanceSize() { |
| return RoundedAllocationSize(sizeof(RawClass)); |
| } |
| |
| bool is_implemented() const { |
| return ImplementedBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_implemented() const; |
| |
| bool is_abstract() const { |
| return AbstractBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_abstract() const; |
| |
| bool is_type_finalized() const { |
| return TypeFinalizedBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_type_finalized() const; |
| |
| bool is_patch() const { return PatchBit::decode(raw_ptr()->state_bits_); } |
| void set_is_patch() const; |
| |
| bool is_synthesized_class() const { |
| return SynthesizedClassBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_synthesized_class() const; |
| |
| bool is_enum_class() const { return EnumBit::decode(raw_ptr()->state_bits_); } |
| void set_is_enum_class() const; |
| |
| bool is_finalized() const { |
| return ClassFinalizedBits::decode(raw_ptr()->state_bits_) == |
| RawClass::kFinalized; |
| } |
| void set_is_finalized() const; |
| |
| bool is_prefinalized() const { |
| return ClassFinalizedBits::decode(raw_ptr()->state_bits_) == |
| RawClass::kPreFinalized; |
| } |
| |
| void set_is_prefinalized() const; |
| |
| bool is_refinalize_after_patch() const { |
| return ClassFinalizedBits::decode(raw_ptr()->state_bits_) == |
| RawClass::kRefinalizeAfterPatch; |
| } |
| |
| void SetRefinalizeAfterPatch() const; |
| void ResetFinalization() const; |
| |
| bool is_marked_for_parsing() const { |
| return MarkedForParsingBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_marked_for_parsing() const; |
| void reset_is_marked_for_parsing() const; |
| |
| bool is_const() const { return ConstBit::decode(raw_ptr()->state_bits_); } |
| void set_is_const() const; |
| |
| bool is_mixin_app_alias() const { |
| return MixinAppAliasBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_mixin_app_alias() const; |
| |
| bool is_mixin_type_applied() const { |
| return MixinTypeAppliedBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_mixin_type_applied() const; |
| |
| // Tests if this is a mixin application class which was desugared |
| // to a normal class by kernel mixin transformation |
| // (pkg/kernel/lib/transformations/mixin_full_resolution.dart). |
| // |
| // In such case, its mixed-in type was pulled into the end of |
| // interfaces list. |
| bool is_transformed_mixin_application() const { |
| return TransformedMixinApplicationBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_transformed_mixin_application() const; |
| |
| bool is_fields_marked_nullable() const { |
| return FieldsMarkedNullableBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_fields_marked_nullable() const; |
| |
| bool is_cycle_free() const { |
| return CycleFreeBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_cycle_free() const; |
| |
| bool is_allocated() const { |
| return IsAllocatedBit::decode(raw_ptr()->state_bits_); |
| } |
| void set_is_allocated(bool value) const; |
| |
| uint16_t num_native_fields() const { return raw_ptr()->num_native_fields_; } |
| void set_num_native_fields(uint16_t value) const { |
| StoreNonPointer(&raw_ptr()->num_native_fields_, value); |
| } |
| |
| RawCode* allocation_stub() const { return raw_ptr()->allocation_stub_; } |
| void set_allocation_stub(const Code& value) const; |
| |
| intptr_t kernel_offset() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| return -1; |
| #else |
| return raw_ptr()->kernel_offset_; |
| #endif |
| } |
| |
| void set_kernel_offset(intptr_t offset) const { |
| NOT_IN_PRECOMPILED(StoreNonPointer(&raw_ptr()->kernel_offset_, offset)); |
| } |
| |
| void DisableAllocationStub() const; |
| |
| RawArray* constants() const; |
| void set_constants(const Array& value) const; |
| |
| intptr_t FindInvocationDispatcherFunctionIndex(const Function& needle) const; |
| RawFunction* InvocationDispatcherFunctionFromIndex(intptr_t idx) const; |
| |
| RawFunction* GetInvocationDispatcher(const String& target_name, |
| const Array& args_desc, |
| RawFunction::Kind kind, |
| bool create_if_absent) const; |
| |
| void Finalize() const; |
| |
| // Apply given patch class to this class. |
| // Return true on success, or false and error otherwise. |
| bool ApplyPatch(const Class& patch, Error* error) const; |
| |
| RawObject* Invoke(const String& selector, |
| const Array& arguments, |
| const Array& argument_names, |
| bool respect_reflectable = true) const; |
| RawObject* InvokeGetter(const String& selector, |
| bool throw_nsm_if_absent, |
| bool respect_reflectable = true) const; |
| RawObject* InvokeSetter(const String& selector, |
| const Instance& argument, |
| bool respect_reflectable = true) const; |
| |
| // Evaluate the given expression as if it appeared in a static method of this |
| // class and return the resulting value, or an error object if evaluating the |
| // expression fails. The method has the formal (type) parameters given in |
| // (type_)param_names, and is invoked with the (type)argument values given in |
| // (type_)param_values. |
| RawObject* EvaluateCompiledExpression( |
| const uint8_t* kernel_bytes, |
| intptr_t kernel_length, |
| const Array& type_definitions, |
| const Array& param_values, |
| const TypeArguments& type_param_values) const; |
| |
| RawError* EnsureIsFinalized(Thread* thread) const; |
| |
| // Allocate a class used for VM internal objects. |
| template <class FakeObject> |
| static RawClass* New(); |
| |
| // Allocate instance classes. |
| static RawClass* New(const Library& lib, |
| const String& name, |
| const Script& script, |
| TokenPosition token_pos, |
| bool register_class = true); |
| static RawClass* NewNativeWrapper(const Library& library, |
| const String& name, |
| int num_fields); |
| |
| // Allocate the raw string classes. |
| static RawClass* NewStringClass(intptr_t class_id); |
| |
| // Allocate the raw TypedData classes. |
| static RawClass* NewTypedDataClass(intptr_t class_id); |
| |
| // Allocate the raw TypedDataView classes. |
| static RawClass* NewTypedDataViewClass(intptr_t class_id); |
| |
| // Allocate the raw ExternalTypedData classes. |
| static RawClass* NewExternalTypedDataClass(intptr_t class_id); |
| |
| // Register code that has used CHA for optimization. |
| // TODO(srdjan): Also register kind of CHA optimization (e.g.: leaf class, |
| // leaf method, ...). |
| void RegisterCHACode(const Code& code); |
| |
| void DisableCHAOptimizedCode(const Class& subclass); |
| |
| void DisableAllCHAOptimizedCode(); |
| |
| void DisableCHAImplementorUsers() { DisableAllCHAOptimizedCode(); } |
| |
| // Return the list of code objects that were compiled using CHA of this class. |
| // These code objects will be invalidated if new subclasses of this class |
| // are finalized. |
| RawArray* dependent_code() const { return raw_ptr()->dependent_code_; } |
| void set_dependent_code(const Array& array) const; |
| |
| bool TraceAllocation(Isolate* isolate) const; |
| void SetTraceAllocation(bool trace_allocation) const; |
| |
| bool ValidatePostFinalizePatch(const Class& orig_class, Error* error) const; |
| void ReplaceEnum(const Class& old_enum) const; |
| void CopyStaticFieldValues(const Class& old_cls) const; |
| void PatchFieldsAndFunctions() const; |
| void MigrateImplicitStaticClosures(IsolateReloadContext* context, |
| const Class& new_cls) const; |
| void CopyCanonicalConstants(const Class& old_cls) const; |
| void CopyCanonicalType(const Class& old_cls) const; |
| void CheckReload(const Class& replacement, |
| IsolateReloadContext* context) const; |
| |
| void AddInvocationDispatcher(const String& target_name, |
| const Array& args_desc, |
| const Function& dispatcher) const; |
| |
| private: |
| bool CanReloadFinalized(const Class& replacement, |
| IsolateReloadContext* context) const; |
| bool CanReloadPreFinalized(const Class& replacement, |
| IsolateReloadContext* context) const; |
| |
| // Tells whether instances need morphing for reload. |
| bool RequiresInstanceMorphing(const Class& replacement) const; |
| |
| template <class FakeObject> |
| static RawClass* NewCommon(intptr_t index); |
| |
| enum MemberKind { |
| kAny = 0, |
| kStatic, |
| kInstance, |
| kInstanceAllowAbstract, |
| kConstructor, |
| kFactory, |
| }; |
| enum StateBits { |
| kConstBit = 0, |
| kImplementedBit = 1, |
| kTypeFinalizedBit = 2, |
| kClassFinalizedPos = 3, |
| kClassFinalizedSize = 2, |
| kAbstractBit = kClassFinalizedPos + kClassFinalizedSize, // = 5 |
| kPatchBit = 6, |
| kSynthesizedClassBit = 7, |
| kMarkedForParsingBit = 8, |
| kMixinAppAliasBit = 9, |
| kMixinTypeAppliedBit = 10, |
| kFieldsMarkedNullableBit = 11, |
| kCycleFreeBit = 12, |
| kEnumBit = 13, |
| kTransformedMixinApplicationBit = 14, |
| kIsAllocatedBit = 15, |
| }; |
| class ConstBit : public BitField<uint16_t, bool, kConstBit, 1> {}; |
| class ImplementedBit : public BitField<uint16_t, bool, kImplementedBit, 1> {}; |
| class TypeFinalizedBit |
| : public BitField<uint16_t, bool, kTypeFinalizedBit, 1> {}; |
| class ClassFinalizedBits : public BitField<uint16_t, |
| RawClass::ClassFinalizedState, |
| kClassFinalizedPos, |
| kClassFinalizedSize> {}; |
| class AbstractBit : public BitField<uint16_t, bool, kAbstractBit, 1> {}; |
| class PatchBit : public BitField<uint16_t, bool, kPatchBit, 1> {}; |
| class SynthesizedClassBit |
| : public BitField<uint16_t, bool, kSynthesizedClassBit, 1> {}; |
| class MarkedForParsingBit |
| : public BitField<uint16_t, bool, kMarkedForParsingBit, 1> {}; |
| class MixinAppAliasBit |
| : public BitField<uint16_t, bool, kMixinAppAliasBit, 1> {}; |
| class MixinTypeAppliedBit |
| : public BitField<uint16_t, bool, kMixinTypeAppliedBit, 1> {}; |
| class FieldsMarkedNullableBit |
| : public BitField<uint16_t, bool, kFieldsMarkedNullableBit, 1> {}; |
| class CycleFreeBit : public BitField<uint16_t, bool, kCycleFreeBit, 1> {}; |
| class EnumBit : public BitField<uint16_t, bool, kEnumBit, 1> {}; |
| class TransformedMixinApplicationBit |
| : public BitField<uint16_t, bool, kTransformedMixinApplicationBit, 1> {}; |
| class IsAllocatedBit : public BitField<uint16_t, bool, kIsAllocatedBit, 1> {}; |
| |
| void set_name(const String& value) const; |
| void set_user_name(const String& value) const; |
| RawString* GenerateUserVisibleName() const; |
| void set_state_bits(intptr_t bits) const; |
| |
| void set_canonical_type(const Type& value) const; |
| RawType* canonical_type() const; |
| |
| RawArray* invocation_dispatcher_cache() const; |
| void set_invocation_dispatcher_cache(const Array& cache) const; |
| RawFunction* CreateInvocationDispatcher(const String& target_name, |
| const Array& args_desc, |
| RawFunction::Kind kind) const; |
| |
| void CalculateFieldOffsets() const; |
| |
| // functions_hash_table is in use iff there are at least this many functions. |
| static const intptr_t kFunctionLookupHashTreshold = 16; |
| |
| enum HasPragmaAndNumOwnTypeArgumentsBits { |
| kHasPragmaBit = 0, |
| kNumOwnTypeArgumentsPos = 1, |
| kNumOwnTypeArgumentsSize = 15 |
| }; |
| |
| class HasPragmaBit : public BitField<uint16_t, bool, kHasPragmaBit, 1> {}; |
| class NumOwnTypeArguments : public BitField<uint16_t, |
| uint16_t, |
| kNumOwnTypeArgumentsPos, |
| kNumOwnTypeArgumentsSize> {}; |
| |
| // Initial value for the cached number of type arguments. |
| static const intptr_t kUnknownNumTypeArguments = |
| (1U << kNumOwnTypeArgumentsSize) - 1; |
| |
| int16_t num_type_arguments() const { return raw_ptr()->num_type_arguments_; } |
| void set_num_type_arguments(intptr_t value) const; |
| static intptr_t num_type_arguments_offset() { |
| return OFFSET_OF(RawClass, num_type_arguments_); |
| } |
| |
| public: |
| bool has_pragma() const { |
| return HasPragmaBit::decode( |
| raw_ptr()->has_pragma_and_num_own_type_arguments_); |
| } |
| void set_has_pragma(bool has_pragma) const; |
| |
| private: |
| uint16_t num_own_type_arguments() const { |
| return NumOwnTypeArguments::decode( |
| raw_ptr()->has_pragma_and_num_own_type_arguments_); |
| } |
| void set_num_own_type_arguments(intptr_t value) const; |
| |
| void set_has_pragma_and_num_own_type_arguments(uint16_t value) const; |
| |
| // Assigns empty array to all raw class array fields. |
| void InitEmptyFields(); |
| |
| static RawFunction* CheckFunctionType(const Function& func, MemberKind kind); |
| RawFunction* LookupFunction(const String& name, MemberKind kind) const; |
| RawFunction* LookupFunctionAllowPrivate(const String& name, |
| MemberKind kind) const; |
| RawField* LookupField(const String& name, MemberKind kind) const; |
| |
| RawFunction* LookupAccessorFunction(const char* prefix, |
| intptr_t prefix_length, |
| const String& name) const; |
| |
| // Allocate an instance class which has a VM implementation. |
| template <class FakeInstance> |
| static RawClass* New(intptr_t id); |
| |
| // Helper that calls 'Class::New<Instance>(kIllegalCid)'. |
| static RawClass* NewInstanceClass(); |
| |
| // Check the subtype or 'more specific' relationship. |
| bool TypeTest(TypeTestKind test_kind, |
| const TypeArguments& type_arguments, |
| const Class& other, |
| const TypeArguments& other_type_arguments, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space) const; |
| |
| // Returns true if the type specified by this class and type_arguments is a |
| // subtype of FutureOr<T> specified by other class and other_type_arguments. |
| // Returns false if other class is not a FutureOr. |
| bool FutureOrTypeTest(Zone* zone, |
| const TypeArguments& type_arguments, |
| const Class& other, |
| const TypeArguments& other_type_arguments, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space) const; |
| |
| static bool TypeTestNonRecursive(const Class& cls, |
| TypeTestKind test_kind, |
| const TypeArguments& type_arguments, |
| const Class& other, |
| const TypeArguments& other_type_arguments, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space); |
| |
| FINAL_HEAP_OBJECT_IMPLEMENTATION(Class, Object); |
| friend class AbstractType; |
| friend class Instance; |
| friend class Object; |
| friend class Type; |
| friend class Intrinsifier; |
| friend class ClassFunctionVisitor; |
| }; |
| |
| // Classification of type genericity according to type parameter owners. |
| enum Genericity { |
| kAny, // Consider type params of current class and functions. |
| kCurrentClass, // Consider type params of current class only. |
| kFunctions, // Consider type params of current and parent functions. |
| }; |
| |
| class PatchClass : public Object { |
| public: |
| RawClass* patched_class() const { return raw_ptr()->patched_class_; } |
| RawClass* origin_class() const { return raw_ptr()->origin_class_; } |
| RawScript* script() const { return raw_ptr()->script_; } |
| RawExternalTypedData* library_kernel_data() const { |
| return raw_ptr()->library_kernel_data_; |
| } |
| void set_library_kernel_data(const ExternalTypedData& data) const; |
| |
| intptr_t library_kernel_offset() const { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| return raw_ptr()->library_kernel_offset_; |
| #else |
| return -1; |
| #endif |
| } |
| void set_library_kernel_offset(intptr_t offset) const { |
| NOT_IN_PRECOMPILED( |
| StoreNonPointer(&raw_ptr()->library_kernel_offset_, offset)); |
| } |
| |
| static intptr_t InstanceSize() { |
| return RoundedAllocationSize(sizeof(RawPatchClass)); |
| } |
| static bool IsInFullSnapshot(RawPatchClass* cls) { |
| NoSafepointScope no_safepoint; |
| return Class::IsInFullSnapshot(cls->ptr()->patched_class_); |
| } |
| |
| static RawPatchClass* New(const Class& patched_class, |
| const Class& origin_class); |
| |
| static RawPatchClass* New(const Class& patched_class, const Script& source); |
| |
| private: |
| void set_patched_class(const Class& value) const; |
| void set_origin_class(const Class& value) const; |
| void set_script(const Script& value) const; |
| |
| static RawPatchClass* New(); |
| |
| FINAL_HEAP_OBJECT_IMPLEMENTATION(PatchClass, Object); |
| friend class Class; |
| }; |
| |
| class SingleTargetCache : public Object { |
| public: |
| RawCode* target() const { return raw_ptr()->target_; } |
| void set_target(const Code& target) const; |
| static intptr_t target_offset() { |
| return OFFSET_OF(RawSingleTargetCache, target_); |
| } |
| |
| #define DEFINE_NON_POINTER_FIELD_ACCESSORS(type, name) \ |
| type name() const { return raw_ptr()->name##_; } \ |
| void set_##name(type value) const { \ |
| StoreNonPointer(&raw_ptr()->name##_, value); \ |
| } \ |
| static intptr_t name##_offset() { \ |
| return OFFSET_OF(RawSingleTargetCache, name##_); \ |
| } |
| |
| DEFINE_NON_POINTER_FIELD_ACCESSORS(uword, entry_point); |
| DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, lower_limit); |
| DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, upper_limit); |
| #undef DEFINE_NON_POINTER_FIELD_ACCESSORS |
| |
| static intptr_t InstanceSize() { |
| return RoundedAllocationSize(sizeof(RawSingleTargetCache)); |
| } |
| |
| static RawSingleTargetCache* New(); |
| |
| private: |
| FINAL_HEAP_OBJECT_IMPLEMENTATION(SingleTargetCache, Object); |
| friend class Class; |
| }; |
| |
| class UnlinkedCall : public Object { |
| public: |
| RawString* target_name() const { return raw_ptr()->target_name_; } |
| void set_target_name(const String& target_name) const; |
| RawArray* args_descriptor() const { return raw_ptr()->args_descriptor_; } |
| void set_args_descriptor(const Array& args_descriptor) const; |
| |
| static intptr_t InstanceSize() { |
| return RoundedAllocationSize(sizeof(RawUnlinkedCall)); |
| } |
| |
| static RawUnlinkedCall* New(); |
| |
| private: |
| FINAL_HEAP_OBJECT_IMPLEMENTATION(UnlinkedCall, Object); |
| friend class Class; |
| }; |
| |
| // Representation of a state of runtime tracking of static type exactness for |
| // a particular location in the program (e.g. exactness of type annotation |
| // on a field). |
| // |
| // Given the static type G<T0, ..., Tn> we say that it is exact iff any |
| // values that can be observed at this location has runtime type T such that |
| // type arguments of T at G are exactly <T0, ..., Tn>. |
| // |
| // Currently we only support tracking for locations that are also known |
| // to be monomorphic with respect to the actual class of the values it contains. |
| // |
| // Important: locations should never switch from tracked (kIsTriviallyExact, |
| // kHasExactSuperType, kHasExactSuperClass, kNotExact) to not tracked |
| // (kNotTracking) or the other way around because that would affect unoptimized |
| // graphs generated by graph builder and skew deopt ids. |
| class StaticTypeExactnessState final { |
| public: |
| // Values stored in the location with static type G<T0, ..., Tn> are all |
| // instances of C<T0, ..., Tn> and C<U0, ..., Un> at G has type parameters |
| // <U0, ..., Un>. |
| // |
| // For trivially exact types we can simply compare type argument |
| // vectors as pointers to check exactness. That's why we represent |
| // trivially exact locations as offset in words to the type arguments of |
| // class C. All other states are represented as non-positive values. |
| // |
| // Note: we are ignoring the type argument vector sharing optimization for |
| // now. |
| static inline StaticTypeExactnessState TriviallyExact( |
| intptr_t type_arguments_offset) { |
| ASSERT((type_arguments_offset > 0) && |
| Utils::IsAligned(type_arguments_offset, kWordSize) && |
| Utils::IsInt(8, type_arguments_offset / kWordSize)); |
| return StaticTypeExactnessState(type_arguments_offset / kWordSize); |
| } |
| |
| static inline bool CanRepresentAsTriviallyExact( |
| intptr_t type_arguments_offset) { |
| return Utils::IsInt(8, type_arguments_offset / kWordSize); |
| } |
| |
| // Values stored in the location with static type G<T0, ..., Tn> are all |
| // instances of class C<...> and C<U0, ..., Un> at G has type |
| // parameters <T0, ..., Tn> for any <U0, ..., Un> - that is C<...> has a |
| // supertype G<T0, ..., Tn>. |
| // |
| // For such locations we can simply check if the value stored |
| // is an instance of an expected class and we don't have to look at |
| // type arguments carried by the instance. |
| // |
| // We distinguish situations where we know that G is a superclass of C from |
| // situations where G might be superinterface of C - because in the first |
| // type arguments of G give us constant prefix of type arguments of C. |
| static inline StaticTypeExactnessState HasExactSuperType() { |
| return StaticTypeExactnessState(kHasExactSuperType); |
| } |
| |
| static inline StaticTypeExactnessState HasExactSuperClass() { |
| return StaticTypeExactnessState(kHasExactSuperClass); |
| } |
| |
| // Values stored in the location don't fall under either kIsTriviallyExact |
| // or kHasExactSuperType categories. |
| // |
| // Note: that does not imply that static type annotation is not exact |
| // according to a broader definition, e.g. location might simply be |
| // polymorphic and store instances of multiple different types. |
| // However for simplicity we don't track such cases yet. |
| static inline StaticTypeExactnessState NotExact() { |
| return StaticTypeExactnessState(kNotExact); |
| } |
| |
| // The location does not track exactness of its static type at runtime. |
| static inline StaticTypeExactnessState NotTracking() { |
| return StaticTypeExactnessState(kNotTracking); |
| } |
| |
| static inline StaticTypeExactnessState Unitialized() { |
| return StaticTypeExactnessState(kUninitialized); |
| } |
| |
| static StaticTypeExactnessState Compute(const Type& static_type, |
| const Instance& value, |
| bool print_trace = false); |
| |
| bool IsTracking() const { return value_ != kNotTracking; } |
| bool IsUninitialized() const { return value_ == kUninitialized; } |
| bool IsHasExactSuperClass() const { return value_ == kHasExactSuperClass; } |
| bool IsHasExactSuperType() const { return value_ == kHasExactSuperType; } |
| bool IsTriviallyExact() const { return value_ > kUninitialized; } |
| bool NeedsFieldGuard() const { return value_ >= kUninitialized; } |
| bool IsExactOrUninitialized() const { return value_ > kNotExact; } |
| bool IsExact() const { |
| return IsTriviallyExact() || IsHasExactSuperType() || |
| IsHasExactSuperClass(); |
| } |
| |
| const char* ToCString() const; |
| |
| StaticTypeExactnessState CollapseSuperTypeExactness() const { |
| return IsHasExactSuperClass() ? HasExactSuperType() : *this; |
| } |
| |
| static inline StaticTypeExactnessState Decode(int8_t value) { |
| return StaticTypeExactnessState(value); |
| } |
| |
| int8_t Encode() const { return value_; } |
| intptr_t GetTypeArgumentsOffsetInWords() const { |
| ASSERT(IsTriviallyExact()); |
| return value_; |
| } |
| |
| static constexpr int8_t kUninitialized = 0; |
| |
| private: |
| static constexpr int8_t kNotTracking = -4; |
| static constexpr int8_t kNotExact = -3; |
| static constexpr int8_t kHasExactSuperType = -2; |
| static constexpr int8_t kHasExactSuperClass = -1; |
| |
| explicit StaticTypeExactnessState(int8_t value) : value_(value) {} |
| |
| int8_t value_; |
| |
| DISALLOW_ALLOCATION(); |
| }; |
| |
| // Object holding information about an IC: test classes and their |
| // corresponding targets. The owner of the ICData can be either the function |
| // or the original ICData object. In case of background compilation we |
| // copy the ICData in a child object, thus freezing it during background |
| // compilation. Code may contain only original ICData objects. |
| class ICData : public Object { |
| public: |
| RawFunction* Owner() const; |
| |
| RawICData* Original() const; |
| |
| void SetOriginal(const ICData& value) const; |
| |
| bool IsOriginal() const { return Original() == this->raw(); } |
| |
| RawString* target_name() const { return raw_ptr()->target_name_; } |
| |
| RawArray* arguments_descriptor() const { return raw_ptr()->args_descriptor_; } |
| |
| intptr_t NumArgsTested() const; |
| |
| intptr_t TypeArgsLen() const; |
| |
| intptr_t CountWithTypeArgs() const; |
| |
| intptr_t CountWithoutTypeArgs() const; |
| |
| intptr_t deopt_id() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| return -1; |
| #else |
| return raw_ptr()->deopt_id_; |
| #endif |
| } |
| |
| bool IsImmutable() const; |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| RawAbstractType* StaticReceiverType() const { |
| return raw_ptr()->static_receiver_type_; |
| } |
| void SetStaticReceiverType(const AbstractType& type) const; |
| bool IsTrackingExactness() const { |
| return StaticReceiverType() != Object::null(); |
| } |
| #else |
| bool IsTrackingExactness() const { return false; } |
| #endif |
| |
| void Reset(Zone* zone) const; |
| void ResetSwitchable(Zone* zone) const; |
| |
| // Note: only deopts with reasons before Unknown in this list are recorded in |
| // the ICData. All other reasons are used purely for informational messages |
| // printed during deoptimization itself. |
| #define DEOPT_REASONS(V) \ |
| V(BinarySmiOp) \ |
| V(BinaryInt64Op) \ |
| V(DoubleToSmi) \ |
| V(CheckSmi) \ |
| V(CheckClass) \ |
| V(Unknown) \ |
| V(PolymorphicInstanceCallTestFail) \ |
| V(UnaryInt64Op) \ |
| V(BinaryDoubleOp) \ |
| V(UnaryOp) \ |
| V(UnboxInteger) \ |
| V(CheckArrayBound) \ |
| V(AtCall) \ |
| V(GuardField) \ |
| V(TestCids) \ |
| V(NumReasons) |
| |
| enum DeoptReasonId { |
| #define DEFINE_ENUM_LIST(name) kDeopt##name, |
| DEOPT_REASONS(DEFINE_ENUM_LIST) |
| #undef DEFINE_ENUM_LIST |
| }; |
| |
| static const intptr_t kLastRecordedDeoptReason = kDeoptUnknown - 1; |
| |
| enum DeoptFlags { |
| // Deoptimization is caused by an optimistically hoisted instruction. |
| kHoisted = 1 << 0, |
| |
| // Deoptimization is caused by an optimistically generalized bounds check. |
| kGeneralized = 1 << 1 |
| }; |
| |
| bool HasDeoptReasons() const { return DeoptReasons() != 0; } |
| uint32_t DeoptReasons() const; |
| void SetDeoptReasons(uint32_t reasons) const; |
| |
| bool HasDeoptReason(ICData::DeoptReasonId reason) const; |
| void AddDeoptReason(ICData::DeoptReasonId reason) const; |
| |
| // Call site classification that is helpful for hot-reload. Call sites with |
| // different `RebindRule` have to be rebound differently. |
| enum RebindRule { |
| kInstance, |
| kNoRebind, |
| kNSMDispatch, |
| kOptimized, |
| kStatic, |
| kSuper, |
| kNumRebindRules, |
| }; |
| RebindRule rebind_rule() const; |
| void set_rebind_rule(uint32_t rebind_rule) const; |
| |
| // The length of the array. This includes all sentinel entries including |
| // the final one. |
| intptr_t Length() const; |
| |
| // Takes O(result) time! |
| intptr_t NumberOfChecks() const; |
| |
| // Discounts any checks with usage of zero. |
| // Takes O(result)) time! |
| intptr_t NumberOfUsedChecks() const; |
| |
| // Takes O(n) time! |
| bool NumberOfChecksIs(intptr_t n) const; |
| |
| static intptr_t InstanceSize() { |
| return RoundedAllocationSize(sizeof(RawICData)); |
| } |
| |
| static intptr_t target_name_offset() { |
| return OFFSET_OF(RawICData, target_name_); |
| } |
| |
| static intptr_t state_bits_offset() { |
| return OFFSET_OF(RawICData, state_bits_); |
| } |
| |
| static intptr_t NumArgsTestedShift() { return kNumArgsTestedPos; } |
| |
| static intptr_t NumArgsTestedMask() { |
| return ((1 << kNumArgsTestedSize) - 1) << kNumArgsTestedPos; |
| } |
| |
| static intptr_t arguments_descriptor_offset() { |
| return OFFSET_OF(RawICData, args_descriptor_); |
| } |
| |
| static intptr_t ic_data_offset() { return OFFSET_OF(RawICData, ic_data_); } |
| |
| static intptr_t owner_offset() { return OFFSET_OF(RawICData, owner_); } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| static intptr_t static_receiver_type_offset() { |
| return OFFSET_OF(RawICData, static_receiver_type_); |
| } |
| #endif |
| |
| // Replaces entry |index| with the sentinel. |
| void WriteSentinelAt(intptr_t index) const; |
| |
| // Clears the count for entry |index|. |
| void ClearCountAt(intptr_t index) const; |
| |
| // Clear all entries with the sentinel value (but will preserve initial |
| // smi smi checks). |
| void ClearWithSentinel() const; |
| |
| // Clear all entries with the sentinel value and reset the first entry |
| // with the dummy target entry. |
| void ClearAndSetStaticTarget(const Function& func) const; |
| |
| // Returns the first index that should be used to for a new entry. Will |
| // grow the array if necessary. |
| RawArray* FindFreeIndex(intptr_t* index) const; |
| |
| void DebugDump() const; |
| |
| // Returns true if this is a two arg smi operation. |
| bool AddSmiSmiCheckForFastSmiStubs() const; |
| |
| // Used for unoptimized static calls when no class-ids are checked. |
| void AddTarget(const Function& target) const; |
| |
| // Adding checks. |
| |
| // Adds one more class test to ICData. Length of 'classes' must be equal to |
| // the number of arguments tested. Use only for num_args_tested > 1. |
| void AddCheck(const GrowableArray<intptr_t>& class_ids, |
| const Function& target, |
| intptr_t count = 1) const; |
| |
| StaticTypeExactnessState GetExactnessAt(intptr_t count) const; |
| |
| // Adds sorted so that Smi is the first class-id. Use only for |
| // num_args_tested == 1. |
| void AddReceiverCheck(intptr_t receiver_class_id, |
| const Function& target, |
| intptr_t count = 1, |
| StaticTypeExactnessState exactness = |
| StaticTypeExactnessState::NotTracking()) const; |
| |
| // Does entry |index| contain the sentinel value? |
| bool IsSentinelAt(intptr_t index) const; |
| |
| // Retrieving checks. |
| |
| void GetCheckAt(intptr_t index, |
| GrowableArray<intptr_t>* class_ids, |
| Function* target) const; |
| void GetClassIdsAt(intptr_t index, GrowableArray<intptr_t>* class_ids) const; |
| |
| // Only for 'num_args_checked == 1'. |
| void GetOneClassCheckAt(intptr_t index, |
| intptr_t* class_id, |
| Function* target) const; |
| // Only for 'num_args_checked == 1'. |
| intptr_t GetCidAt(intptr_t index) const; |
| |
| intptr_t GetReceiverClassIdAt(intptr_t index) const; |
| intptr_t GetClassIdAt(intptr_t index, intptr_t arg_nr) const; |
| |
| RawFunction* GetTargetAt(intptr_t index) const; |
| RawFunction* GetTargetForReceiverClassId(intptr_t class_id, |
| intptr_t* count_return) const; |
| |
| RawObject* GetTargetOrCodeAt(intptr_t index) const; |
| void SetCodeAt(intptr_t index, const Code& value) const; |
| void SetEntryPointAt(intptr_t index, const Smi& value) const; |
| |
| void IncrementCountAt(intptr_t index, intptr_t value) const; |
| void SetCountAt(intptr_t index, intptr_t value) const; |
| intptr_t GetCountAt(intptr_t index) const; |
| intptr_t AggregateCount() const; |
| |
| // Returns this->raw() if num_args_tested == 1 and arg_nr == 1, otherwise |
| // returns a new ICData object containing only unique arg_nr checks. |
| // Returns only used entries. |
| RawICData* AsUnaryClassChecksForArgNr(intptr_t arg_nr) const; |
| RawICData* AsUnaryClassChecks() const { |
| return AsUnaryClassChecksForArgNr(0); |
| } |
| RawICData* AsUnaryClassChecksForCid(intptr_t cid, |
| const Function& target) const; |
| |
| // Returns ICData with aggregated receiver count, sorted by highest count. |
| // Smi not first!! (the convention for ICData used in code generation is that |
| // Smi check is first) |
| // Used for printing and optimizations. |
| RawICData* AsUnaryClassChecksSortedByCount() const; |
| |
| // Consider only used entries. |
| bool AllTargetsHaveSameOwner(intptr_t owner_cid) const; |
| bool AllReceiversAreNumbers() const; |
| bool HasOneTarget() const; |
| bool HasReceiverClassId(intptr_t class_id) const; |
| |
| // Note: passing non-null receiver_type enables exactness tracking for |
| // the receiver type. Receiver type is expected to be a fully |
| // instantiated generic (but not a FutureOr). |
| // See StaticTypeExactnessState for more information. |
| static RawICData* New( |
| const Function& owner, |
| const String& target_name, |
| const Array& arguments_descriptor, |
| intptr_t deopt_id, |
| intptr_t num_args_tested, |
| RebindRule rebind_rule, |
| const AbstractType& receiver_type = Object::null_abstract_type()); |
| static RawICData* NewFrom(const ICData& from, intptr_t num_args_tested); |
| |
| // Generates a new ICData with descriptor and data array copied (deep clone). |
| static RawICData* Clone(const ICData& from); |
| |
| static intptr_t TestEntryLengthFor(intptr_t num_args, |
| bool tracking_exactness); |
| |
| static intptr_t TargetIndexFor(intptr_t num_args) { return num_args; } |
| static intptr_t CodeIndexFor(intptr_t num_args) { return num_args; } |
| |
| static intptr_t CountIndexFor(intptr_t num_args) { return (num_args + 1); } |
| static intptr_t EntryPointIndexFor(intptr_t num_args) { |
| return (num_args + 1); |
| } |
| static intptr_t ExactnessOffsetFor(intptr_t num_args) { |
| return (num_args + 2); |
| } |
| |
| bool IsUsedAt(intptr_t i) const; |
| |
| void GetUsedCidsForTwoArgs(GrowableArray<intptr_t>* first, |
| GrowableArray<intptr_t>* second) const; |
| |
| void PrintToJSONArray(const JSONArray& jsarray, |
| TokenPosition token_pos) const; |
| |
| // Initialize the preallocated empty ICData entry arrays. |
| static void Init(); |
| |
| // Clear the preallocated empty ICData entry arrays. |
| static void Cleanup(); |
| |
| // We cache ICData with 0, 1, 2 arguments tested without exactness |
| // tracking and with 1 argument tested with exactness tracking. |
| enum { |
| kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx = 0, |
| kCachedICDataMaxArgsTestedWithoutExactnessTracking = 2, |
| kCachedICDataOneArgWithExactnessTrackingIdx = |
| kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx + |
| kCachedICDataMaxArgsTestedWithoutExactnessTracking + 1, |
| kCachedICDataArrayCount = kCachedICDataOneArgWithExactnessTrackingIdx + 1, |
| }; |
| |
| #if defined(TAG_IC_DATA) |
| using Tag = RawICData::Tag; |
| void set_tag(Tag value) const; |
| Tag tag() const { return raw_ptr()->tag_; } |
| #endif |
| |
| bool is_static_call() const; |
| |
| private: |
| static RawICData* New(); |
| |
| RawArray* ic_data() const { |
| return AtomicOperations::LoadAcquire(&raw_ptr()->ic_data_); |
| } |
| |
| void set_owner(const Function& value) const; |
| void set_target_name(const String& value) const; |
| void set_arguments_descriptor(const Array& value) const; |
| void set_deopt_id(intptr_t value) const; |
| void SetNumArgsTested(intptr_t value) const; |
| void set_ic_data_array(const Array& value) const; |
| void set_state_bits(uint32_t bits) const; |
| |
| bool ValidateInterceptor(const Function& target) const; |
| |
| enum { |
| kNumArgsTestedPos = 0, |
| kNumArgsTestedSize = 2, |
| kDeoptReasonPos = kNumArgsTestedPos + kNumArgsTestedSize, |
| kDeoptReasonSize = kLastRecordedDeoptReason + 1, |
| kRebindRulePos = kDeoptReasonPos + kDeoptReasonSize, |
| kRebindRuleSize = 3 |
| }; |
| |
| COMPILE_ASSERT(kNumRebindRules <= (1 << kRebindRuleSize)); |
| |
| class NumArgsTestedBits : public BitField<uint32_t, |
| uint32_t, |
| kNumArgsTestedPos, |
| kNumArgsTestedSize> {}; |
| class DeoptReasonBits : public BitField<uint32_t, |
| uint32_t, |
| ICData::kDeoptReasonPos, |
| ICData::kDeoptReasonSize> {}; |
| class RebindRuleBits : public BitField<uint32_t, |
| uint32_t, |
| ICData::kRebindRulePos, |
| ICData::kRebindRuleSize> {}; |
| #if defined(DEBUG) |
| // Used in asserts to verify that a check is not added twice. |
| bool HasCheck(const GrowableArray<intptr_t>& cids) const; |
| #endif // DEBUG |
| |
| intptr_t TestEntryLength() const; |
| static RawArray* NewNonCachedEmptyICDataArray(intptr_t num_args_tested, |
| bool tracking_exactness); |
| static RawArray* CachedEmptyICDataArray(intptr_t num_args_tested, |
| bool tracking_exactness); |
| static RawICData* NewDescriptor(Zone* zone, |
| const Function& owner, |
| const String& target_name, |
| const Array& arguments_descriptor, |
| intptr_t deopt_id, |
| intptr_t num_args_tested, |
| RebindRule rebind_rule, |
| const AbstractType& receiver_type); |
| |
| static void WriteSentinel(const Array& data, intptr_t test_entry_length); |
| |
| // A cache of VM heap allocated preinitialized empty ic data entry arrays. |
| static RawArray* cached_icdata_arrays_[kCachedICDataArrayCount]; |
| |
| FINAL_HEAP_OBJECT_IMPLEMENTATION(ICData, Object); |
| friend class Class; |
| friend class ICDataTestTask; |
| friend class Interpreter; |
| friend class SnapshotWriter; |
| friend class Serializer; |
| friend class Deserializer; |
| }; |
| |
| // Often used constants for number of free function type parameters. |
| enum { |
| kNoneFree = 0, |
| |
| // 'kCurrentAndEnclosingFree' is used when partially applying a signature |
| // function to a set of type arguments. It indicates that the set of type |
| // parameters declared by the current function and enclosing functions should |
| // be considered free, and the current function type parameters should be |
| // substituted as well. |
| // |
| // For instance, if the signature "<T>(T, R) => T" is instantiated with |
| // function type arguments [int, String] and kCurrentAndEnclosingFree is |
| // supplied, the result of the instantiation will be "(String, int) => int". |
| kCurrentAndEnclosingFree = kMaxInt32 - 1, |
| |
| // Only parameters declared by enclosing functions are free. |
| kAllFree = kMaxInt32, |
| }; |
| |
| class Function : public Object { |
| public: |
| RawString* name() const { return raw_ptr()->name_; } |
| RawString* UserVisibleName() const; // Same as scrubbed name. |
| RawString* QualifiedScrubbedName() const { |
| return QualifiedName(kScrubbedName); |
| } |
| RawString* QualifiedUserVisibleName() const { |
| return QualifiedName(kUserVisibleName); |
| } |
| virtual RawString* DictionaryName() const { return name(); } |
| |
| RawString* GetSource() const; |
| |
| // Return the type of this function's signature. It may not be canonical yet. |
| // For example, if this function has a signature of the form |
| // '(T, [B, C]) => R', where 'T' and 'R' are type parameters of the |
| // owner class of this function, then its signature type is a parameterized |
| // function type with uninstantiated type arguments 'T' and 'R' as elements of |
| // its type argument vector. |
| RawType* SignatureType() const; |
| RawType* ExistingSignatureType() const; |
| |
| // Update the signature type (with a canonical version). |
| void SetSignatureType(const Type& value) const; |
| |
| // Return a new function with instantiated result and parameter types. |
| RawFunction* InstantiateSignatureFrom( |
| const TypeArguments& instantiator_type_arguments, |
| const TypeArguments& function_type_arguments, |
| intptr_t num_free_fun_type_params, |
| Heap::Space space) const; |
| |
| // Build a string of the form '<T>(T, {B b, C c}) => R' representing the |
| // internal signature of the given function. In this example, T is a type |
| // parameter of this function and R is a type parameter of class C, the owner |
| // of the function. B and C are not type parameters. |
| RawString* Signature() const { return BuildSignature(kInternalName); } |
| |
| // Build a string of the form '<T>(T, {B b, C c}) => R' representing the |
| // user visible signature of the given function. In this example, T is a type |
| // parameter of this function and R is a type parameter of class C, the owner |
| // of the function. B and C are not type parameters. |
| // Implicit parameters are hidden. |
| RawString* UserVisibleSignature() const { |
| return BuildSignature(kUserVisibleName); |
| } |
| |
| // Returns true if the signature of this function is instantiated, i.e. if it |
| // does not involve generic parameter types or generic result type. |
| // Note that function type parameters declared by this function do not make |
| // its signature uninstantiated, only type parameters declared by parent |
| // generic functions or class type parameters. |
| bool HasInstantiatedSignature(Genericity genericity = kAny, |
| intptr_t num_free_fun_type_params = kAllFree, |
| TrailPtr trail = NULL) const; |
| |
| // Reloading support: |
| void Reparent(const Class& new_cls) const; |
| void ZeroEdgeCounters() const; |
| |
| RawClass* Owner() const; |
| void set_owner(const Object& value) const; |
| RawClass* origin() const; |
| RawScript* script() const; |
| RawObject* RawOwner() const { return raw_ptr()->owner_; } |
| |
| RawRegExp* regexp() const; |
| intptr_t string_specialization_cid() const; |
| bool is_sticky_specialization() const; |
| void SetRegExpData(const RegExp& regexp, |
| intptr_t string_specialization_cid, |
| bool sticky) const; |
| |
| RawString* native_name() const; |
| void set_native_name(const String& name) const; |
| |
| RawAbstractType* result_type() const { return raw_ptr()->result_type_; } |
| void set_result_type(const AbstractType& value) const; |
| |
| // The parameters, starting with NumImplicitParameters() parameters which are |
| // only visible to the VM, but not to Dart users. |
| // Note that type checks exclude implicit parameters. |
| RawAbstractType* ParameterTypeAt(intptr_t index) const; |
| void SetParameterTypeAt(intptr_t index, const AbstractType& value) const; |
| RawArray* parameter_types() const { return raw_ptr()->parameter_types_; } |
| void set_parameter_types(const Array& value) const; |
| |
| // Parameter names are valid for all valid parameter indices, and are not |
| // limited to named optional parameters. |
| RawString* ParameterNameAt(intptr_t index) const; |
| void SetParameterNameAt(intptr_t index, const String& value) const; |
| RawArray* parameter_names() const { return raw_ptr()->parameter_names_; } |
| void set_parameter_names(const Array& value) const; |
| |
| // The type parameters (and their bounds) are specified as an array of |
| // TypeParameter. |
| RawTypeArguments* type_parameters() const { |
| return raw_ptr()->type_parameters_; |
| } |
| void set_type_parameters(const TypeArguments& value) const; |
| intptr_t NumTypeParameters(Thread* thread) const; |
| intptr_t NumTypeParameters() const { |
| return NumTypeParameters(Thread::Current()); |
| } |
| |
| // Returns true if this function has the same number of type parameters with |
| // equal bounds as the other function. Type parameter names are ignored. |
| bool HasSameTypeParametersAndBounds(const Function& other) const; |
| |
| // Return the number of type parameters declared in parent generic functions. |
| intptr_t NumParentTypeParameters() const; |
| |
| // Print the signature type of this function and of all of its parents. |
| void PrintSignatureTypes() const; |
| |
| // Return a TypeParameter if the type_name is a type parameter of this |
| // function or of one of its parent functions. |
| // Unless NULL, adjust function_level accordingly (in and out parameter). |
| // Return null otherwise. |
| RawTypeParameter* LookupTypeParameter(const String& type_name, |
| intptr_t* function_level) const; |
| |
| // Return true if this function declares type parameters. |
| bool IsGeneric() const { return NumTypeParameters(Thread::Current()) > 0; } |
| |
| // Return true if any parent function of this function is generic. |
| bool HasGenericParent() const; |
| |
| // Not thread-safe; must be called in the main thread. |
| // Sets function's code and code's function. |
| void InstallOptimizedCode(const Code& code) const; |
| void AttachCode(const Code& value) const; |
| void SetInstructions(const Code& value) const; |
| void ClearCode() const; |
| |
| // Disables optimized code and switches to unoptimized code. |
| void SwitchToUnoptimizedCode() const; |
| |
| // Ensures that the function has code. If there is no code it compiles the |
| // unoptimized version of the code. If the code contains errors, it calls |
| // Exceptions::PropagateError and does not return. Normally returns the |
| // current code, whether it is optimized or unoptimized. |
| RawCode* EnsureHasCode() const; |
| |
| // Disables optimized code and switches to unoptimized code (or the lazy |
| // compilation stub). |
| void SwitchToLazyCompiledUnoptimizedCode() const; |
| |
| // Compiles unoptimized code (if necessary) and attaches it to the function. |
| void EnsureHasCompiledUnoptimizedCode() const; |
| |
| // Return the most recently compiled and installed code for this function. |
| // It is not the only Code object that points to this function. |
| RawCode* CurrentCode() const { return raw_ptr()->code_; } |
| |
| RawCode* unoptimized_code() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| return static_cast<RawCode*>(Object::null()); |
| #else |
| return raw_ptr()->unoptimized_code_; |
| #endif |
| } |
| void set_unoptimized_code(const Code& value) const; |
| bool HasCode() const; |
| static bool HasCode(RawFunction* function); |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| static bool HasBytecode(RawFunction* function); |
| #endif |
| |
| static intptr_t code_offset() { return OFFSET_OF(RawFunction, code_); } |
| |
| static intptr_t entry_point_offset() { |
| return OFFSET_OF(RawFunction, entry_point_); |
| } |
| |
| static intptr_t unchecked_entry_point_offset() { |
| return OFFSET_OF(RawFunction, unchecked_entry_point_); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| bool IsBytecodeAllowed(Zone* zone) const; |
| void AttachBytecode(const Bytecode& bytecode) const; |
| RawBytecode* bytecode() const { return raw_ptr()->bytecode_; } |
| bool HasBytecode() const; |
| #endif |
| |
| virtual intptr_t Hash() const; |
| |
| // Returns true if there is at least one debugger breakpoint |
| // set in this function. |
| bool HasBreakpoint() const; |
| |
| RawContextScope* context_scope() const; |
| void set_context_scope(const ContextScope& value) const; |
| |
| // Enclosing function of this local function. |
| RawFunction* parent_function() const; |
| |
| // Enclosing outermost function of this local function. |
| RawFunction* GetOutermostFunction() const; |
| |
| void set_extracted_method_closure(const Function& function) const; |
| RawFunction* extracted_method_closure() const; |
| |
| void set_saved_args_desc(const Array& array) const; |
| RawArray* saved_args_desc() const; |
| |
| void set_accessor_field(const Field& value) const; |
| RawField* accessor_field() const; |
| |
| bool IsMethodExtractor() const { |
| return kind() == RawFunction::kMethodExtractor; |
| } |
| |
| bool IsNoSuchMethodDispatcher() const { |
| return kind() == RawFunction::kNoSuchMethodDispatcher; |
| } |
| |
| bool IsInvokeFieldDispatcher() const { |
| return kind() == RawFunction::kInvokeFieldDispatcher; |
| } |
| |
| bool IsDynamicInvocationForwader() const { |
| return kind() == RawFunction::kDynamicInvocationForwarder; |
| } |
| |
| bool IsImplicitGetterOrSetter() const { |
| return kind() == RawFunction::kImplicitGetter || |
| kind() == RawFunction::kImplicitSetter || |
| kind() == RawFunction::kImplicitStaticFinalGetter; |
| } |
| |
| // Returns true iff an implicit closure function has been created |
| // for this function. |
| bool HasImplicitClosureFunction() const { |
| return implicit_closure_function() != null(); |
| } |
| |
| // Returns the closure function implicitly created for this function. If none |
| // exists yet, create one and remember it. Implicit closure functions are |
| // used in VM Closure instances that represent results of tear-off operations. |
| RawFunction* ImplicitClosureFunction() const; |
| void DropUncompiledImplicitClosureFunction() const; |
| |
| // Return the closure implicitly created for this function. |
| // If none exists yet, create one and remember it. |
| RawInstance* ImplicitStaticClosure() const; |
| |
| RawInstance* ImplicitInstanceClosure(const Instance& receiver) const; |
| |
| intptr_t ComputeClosureHash() const; |
| |
| // Redirection information for a redirecting factory. |
| bool IsRedirectingFactory() const; |
| RawType* RedirectionType() const; |
| void SetRedirectionType(const Type& type) const; |
| RawString* RedirectionIdentifier() const; |
| void SetRedirectionIdentifier(const String& identifier) const; |
| RawFunction* RedirectionTarget() const; |
| void SetRedirectionTarget(const Function& target) const; |
| |
| RawFunction::Kind kind() const { |
| return KindBits::decode(raw_ptr()->kind_tag_); |
| } |
| static RawFunction::Kind kind(RawFunction* function) { |
| return KindBits::decode(function->ptr()->kind_tag_); |
| } |
| |
| RawFunction::AsyncModifier modifier() const { |
| return ModifierBits::decode(raw_ptr()->kind_tag_); |
| } |
| |
| static const char* KindToCString(RawFunction::Kind kind); |
| |
| bool IsGenerativeConstructor() const { |
| return (kind() == RawFunction::kConstructor) && !is_static(); |
| } |
| bool IsImplicitConstructor() const; |
| bool IsFactory() const { |
| return (kind() == RawFunction::kConstructor) && is_static(); |
| } |
| bool IsDynamicFunction(bool allow_abstract = false) const { |
| if (is_static() || (!allow_abstract && is_abstract())) { |
| return false; |
| } |
| switch (kind()) { |
| case RawFunction::kRegularFunction: |
| case RawFunction::kGetterFunction: |
| case RawFunction::kSetterFunction: |
| case RawFunction::kImplicitGetter: |
| case RawFunction::kImplicitSetter: |
| case RawFunction::kMethodExtractor: |
| case RawFunction::kNoSuchMethodDispatcher: |
| case RawFunction::kInvokeFieldDispatcher: |
| case RawFunction::kDynamicInvocationForwarder: |
| return true; |
| case RawFunction::kClosureFunction: |
| case RawFunction::kImplicitClosureFunction: |
| case RawFunction::kSignatureFunction: |
| case RawFunction::kConstructor: |
| case RawFunction::kImplicitStaticFinalGetter: |
| case RawFunction::kIrregexpFunction: |
| return false; |
| default: |
| UNREACHABLE(); |
| return false; |
| } |
| } |
| bool IsStaticFunction() const { |
| if (!is_static()) { |
| return false; |
| } |
| switch (kind()) { |
| case RawFunction::kRegularFunction: |
| case RawFunction::kGetterFunction: |
| case RawFunction::kSetterFunction: |
| case RawFunction::kImplicitGetter: |
| case RawFunction::kImplicitSetter: |
| case RawFunction::kImplicitStaticFinalGetter: |
| case RawFunction::kIrregexpFunction: |
| return true; |
| case RawFunction::kClosureFunction: |
| case RawFunction::kImplicitClosureFunction: |
| case RawFunction::kSignatureFunction: |
| case RawFunction::kConstructor: |
| case RawFunction::kMethodExtractor: |
| case RawFunction::kNoSuchMethodDispatcher: |
| case RawFunction::kInvokeFieldDispatcher: |
| case RawFunction::kDynamicInvocationForwarder: |
| return false; |
| default: |
| UNREACHABLE(); |
| return false; |
| } |
| } |
| bool IsInFactoryScope() const; |
| |
| bool NeedsArgumentTypeChecks(Isolate* I) const { |
| if (!I->should_emit_strong_mode_checks()) { |
| return false; |
| } |
| return IsClosureFunction() || |
| !(is_static() || (kind() == RawFunction::kConstructor)); |
| } |
| |
| bool MayHaveUncheckedEntryPoint(Isolate* I) const; |
| |
| TokenPosition token_pos() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| return TokenPosition(); |
| #else |
| return raw_ptr()->token_pos_; |
| #endif |
| } |
| void set_token_pos(TokenPosition value) const; |
| |
| TokenPosition end_token_pos() const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| return TokenPosition(); |
| #else |
| return raw_ptr()->end_token_pos_; |
| #endif |
| } |
| void set_end_token_pos(TokenPosition value) const { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| StoreNonPointer(&raw_ptr()->end_token_pos_, value); |
| #endif |
| } |
| |
| intptr_t num_fixed_parameters() const { |
| return RawFunction::PackedNumFixedParameters::decode( |
| raw_ptr()->packed_fields_); |
| } |
| void set_num_fixed_parameters(intptr_t value) const; |
| |
| uint32_t packed_fields() const { return raw_ptr()->packed_fields_; } |
| void set_packed_fields(uint32_t packed_fields) const; |
| |
| bool HasOptionalParameters() const { |
| return RawFunction::PackedNumOptionalParameters::decode( |
| raw_ptr()->packed_fields_) > 0; |
| } |
| bool HasOptionalNamedParameters() const { |
| return HasOptionalParameters() && |
| RawFunction::PackedHasNamedOptionalParameters::decode( |
| raw_ptr()->packed_fields_); |
| } |
| bool HasOptionalPositionalParameters() const { |
| return HasOptionalParameters() && !HasOptionalNamedParameters(); |
| } |
| intptr_t NumOptionalParameters() const { |
| return RawFunction::PackedNumOptionalParameters::decode( |
| raw_ptr()->packed_fields_); |
| } |
| void SetNumOptionalParameters(intptr_t num_optional_parameters, |
| bool are_optional_positional) const; |
| |
| intptr_t NumOptionalPositionalParameters() const { |
| return HasOptionalPositionalParameters() ? NumOptionalParameters() : 0; |
| } |
| |
| intptr_t NumOptionalNamedParameters() const { |
| return HasOptionalNamedParameters() ? NumOptionalParameters() : 0; |
| } |
| |
| intptr_t NumParameters() const; |
| |
| intptr_t NumImplicitParameters() const; |
| |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| #define DEFINE_GETTERS_AND_SETTERS(return_type, type, name) \ |
| static intptr_t name##_offset() { \ |
| UNREACHABLE(); \ |
| return 0; \ |
| } \ |
| return_type name() const { return 0; } \ |
| \ |
| void set_##name(type value) const { UNREACHABLE(); } |
| #else |
| #define DEFINE_GETTERS_AND_SETTERS(return_type, type, name) \ |
| static intptr_t name##_offset() { return OFFSET_OF(RawFunction, name##_); } \ |
| return_type name() const { return raw_ptr()->name##_; } \ |
| \ |
| void set_##name(type value) const { \ |
| StoreNonPointer(&raw_ptr()->name##_, value); \ |
| } |
| #endif |
| |
| JIT_FUNCTION_COUNTERS(DEFINE_GETTERS_AND_SETTERS) |
| |
| #undef DEFINE_GETTERS_AND_SETTERS |
| |
| static const intptr_t kMaxInstructionCount = (1 << 16) - 1; |
| |
| void SetOptimizedInstructionCountClamped(uintptr_t value) const { |
| if (value > kMaxInstructionCount) value = kMaxInstructionCount; |
| set_optimized_instruction_count(value); |
| } |
| |
| void SetOptimizedCallSiteCountClamped(uintptr_t value) const { |
| if (value > kMaxInstructionCount) value = kMaxInstructionCount; |
| set_optimized_call_site_count(value); |
| } |
| |
| void SetKernelDataAndScript(const Script& script, |
| const ExternalTypedData& data, |
| intptr_t offset); |
| |
| intptr_t KernelDataProgramOffset() const; |
| |
| RawExternalTypedData* KernelData() const; |
| |
| bool IsOptimizable() const; |
| void SetIsOptimizable(bool value) const; |
| |
| bool CanBeInlined() const; |
| |
| MethodRecognizer::Kind recognized_kind() const { |
| return RecognizedBits::decode(raw_ptr()->kind_tag_); |
| } |
| void set_recognized_kind(MethodRecognizer::Kind value) const; |
| |
| bool IsRecognized() const { |
| return recognized_kind() != MethodRecognizer::kUnknown; |
| } |
| |
| bool HasOptimizedCode() const; |
| |
| // Whether the function is ready for compiler optimizations. |
| bool ShouldCompilerOptimize() const; |
| |
| // Returns true if the argument counts are valid for calling this function. |
| // Otherwise, it returns false and the reason (if error_message is not NULL). |
| bool AreValidArgumentCounts(intptr_t num_type_arguments, |
| intptr_t num_arguments, |
| intptr_t num_named_arguments, |
| String* error_message) const; |
| |
| // Returns a TypeError if the provided arguments don't match the function |
| // parameter types, NULL otherwise. Assumes AreValidArguments is called first. |
| RawObject* DoArgumentTypesMatch( |
| const Array& args, |
| const ArgumentsDescriptor& arg_names, |
| const TypeArguments& instantiator_type_args) const; |
| |
| // Returns true if the type argument count, total argument count and the names |
| // of optional arguments are valid for calling this function. |
| // Otherwise, it returns false and the reason (if error_message is not NULL). |
| bool AreValidArguments(intptr_t num_type_arguments, |
| intptr_t num_arguments, |
| const Array& argument_names, |
| String* error_message) const; |
| bool AreValidArguments(const ArgumentsDescriptor& args_desc, |
| String* error_message) const; |
| |
| // Fully qualified name uniquely identifying the function under gdb and during |
| // ast printing. The special ':' character, if present, is replaced by '_'. |
| const char* ToFullyQualifiedCString() const; |
| |
| const char* ToLibNamePrefixedQualifiedCString() const; |
| |
| const char* ToQualifiedCString() const; |
| |
| // Returns true if the type of this function is a subtype of the type of |
| // the other function. |
| bool IsSubtypeOf(const Function& other, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space) const { |
| return TypeTest(kIsSubtypeOf, other, bound_error, bound_trail, space); |
| } |
| |
| // Returns true if the type of this function is more specific than the type of |
| // the other function. |
| bool IsMoreSpecificThan(const Function& other, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space) const { |
| return TypeTest(kIsMoreSpecificThan, other, bound_error, bound_trail, |
| space); |
| } |
| |
| // Check the subtype or 'more specific' relationship. |
| bool TypeTest(TypeTestKind test_kind, |
| const Function& other, |
| Error* bound_error, |
| TrailPtr bound_trail, |
| Heap::Space space) const; |
| |
| bool IsDispatcherOrImplicitAccessor() const { |
| switch (kind()) { |
| case RawFunction::kImplicitGetter: |
| case RawFunction::kImplicitSetter: |
| case RawFunction::kNoSuchMethodDispatcher: |
| case RawFunction::kInvokeFieldDispatcher: |
| case RawFunction::kDynamicInvocationForwarder: |
| return true; |
| default: |
| return false; |
| } |
| } |
| |
| // Returns true if this function represents an explicit getter function. |
| bool IsGetterFunction() const { |
| return kind() == RawFunction::kGetterFunction; |
| } |
| |
| // Returns true if this function represents an implicit getter function. |
| bool IsImplicitGetterFunction() const { |
| return kind() == RawFunction::kImplicitGetter; |
| } |
| |
| // Returns true if this function represents an explicit setter function. |
| bool IsSetterFunction() const { |
| return kind() == RawFunction::kSetterFunction; |
| } |
| |
| // Returns true if this function represents an implicit setter function. |
| bool IsImplicitSetterFunction() const { |
| return kind() == RawFunction::kImplicitSetter; |
| } |
| |
| // Returns true if this function represents an implicit static field |
| // initializer function. |
| bool IsImplicitStaticFieldInitializer() const { |
| return kind() == RawFunction::kImplicitStaticFinalGetter; |
| } |
| |
| // Returns true if this function represents a (possibly implicit) closure |
| // function. |
| bool IsClosureFunction() const { |
| RawFunction::Kind k = kind(); |
| return (k == RawFunction::kClosureFunction) || |
| (k == RawFunction::kImplicitClosureFunction); |
| } |
| |
| // Returns true if this function represents a generated irregexp function. |
| bool IsIrregexpFunction() const { |
| return kind() == RawFunction::kIrregexpFunction; |
| } |
| |
| // Returns true if this function represents an implicit closure function. |
| bool IsImplicitClosureFunction() const { |
| return kind() == RawFunction::kImplicitClosureFunction; |
| } |
| |
| // Returns true if this function represents a non implicit closure function. |
| bool IsNonImplicitClosureFunction() const { |
| return IsClosureFunction() && !IsImplicitClosureFunction(); |
| } |
| |
| // Returns true if this function represents an implicit static closure |
| // function. |
| bool IsImplicitStaticClosureFunction() const { |
| return IsImplicitClosureFunction() && is_static(); |
| } |
| static bool IsImplicitStaticClosureFunction(RawFunction* func); |
| |
| // Returns true if this function represents an implicit instance closure |
| // function. |
| bool IsImplicitInstanceClosureFunction() const { |
| return IsImplicitClosureFunction() && !is_static(); |
| } |
| |
|