blob: 2239799140a8983cd3d7bf3ae7e781314fe23db2 [file] [log] [blame]
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#ifndef RUNTIME_VM_OBJECT_H_
#define RUNTIME_VM_OBJECT_H_
#include "include/dart_api.h"
#include "platform/assert.h"
#include "platform/utils.h"
#include "vm/json_stream.h"
#include "vm/bitmap.h"
#include "vm/dart.h"
#include "vm/flags.h"
#include "vm/globals.h"
#include "vm/growable_array.h"
#include "vm/handles.h"
#include "vm/heap.h"
#include "vm/isolate.h"
#include "vm/method_recognizer.h"
#include "vm/os.h"
#include "vm/raw_object.h"
#include "vm/report.h"
#include "vm/scanner.h"
#include "vm/tags.h"
#include "vm/thread.h"
#include "vm/token_position.h"
namespace dart {
// Forward declarations.
namespace kernel {
class Program;
class TreeNode;
}
#define DEFINE_FORWARD_DECLARATION(clazz) class clazz;
CLASS_LIST(DEFINE_FORWARD_DECLARATION)
#undef DEFINE_FORWARD_DECLARATION
class Api;
class ArgumentsDescriptor;
class Assembler;
class Closure;
class Code;
class DisassemblyFormatter;
class DeoptInstr;
class FinalizablePersistentHandle;
class LocalScope;
#define REUSABLE_FORWARD_DECLARATION(name) class Reusable##name##HandleScope;
REUSABLE_HANDLE_LIST(REUSABLE_FORWARD_DECLARATION)
#undef REUSABLE_FORWARD_DECLARATION
class Symbols;
#if defined(DEBUG)
#define CHECK_HANDLE() CheckHandle();
#else
#define CHECK_HANDLE()
#endif
#define BASE_OBJECT_IMPLEMENTATION(object, super) \
public: /* NOLINT */ \
Raw##object* raw() const { return reinterpret_cast<Raw##object*>(raw_); } \
bool Is##object() const { return true; } \
static object& Handle(Zone* zone, Raw##object* raw_ptr) { \
object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone)); \
initializeHandle(obj, raw_ptr); \
return *obj; \
} \
static object& Handle() { \
return Handle(Thread::Current()->zone(), object::null()); \
} \
static object& Handle(Zone* zone) { return Handle(zone, object::null()); } \
static object& Handle(Raw##object* raw_ptr) { \
return Handle(Thread::Current()->zone(), raw_ptr); \
} \
static object& CheckedHandle(Zone* zone, RawObject* raw_ptr) { \
object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone)); \
initializeHandle(obj, raw_ptr); \
if (!obj->Is##object()) { \
FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(), \
#object); \
} \
return *obj; \
} \
static object& CheckedHandle(RawObject* raw_ptr) { \
return CheckedHandle(Thread::Current()->zone(), raw_ptr); \
} \
static object& ZoneHandle(Zone* zone, Raw##object* raw_ptr) { \
object* obj = \
reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone)); \
initializeHandle(obj, raw_ptr); \
return *obj; \
} \
static object* ReadOnlyHandle() { \
object* obj = reinterpret_cast<object*>(Dart::AllocateReadOnlyHandle()); \
initializeHandle(obj, object::null()); \
return obj; \
} \
static object& ZoneHandle(Zone* zone) { \
return ZoneHandle(zone, object::null()); \
} \
static object& ZoneHandle() { \
return ZoneHandle(Thread::Current()->zone(), object::null()); \
} \
static object& ZoneHandle(Raw##object* raw_ptr) { \
return ZoneHandle(Thread::Current()->zone(), raw_ptr); \
} \
static object& CheckedZoneHandle(Zone* zone, RawObject* raw_ptr) { \
object* obj = \
reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone)); \
initializeHandle(obj, raw_ptr); \
if (!obj->Is##object()) { \
FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(), \
#object); \
} \
return *obj; \
} \
static object& CheckedZoneHandle(RawObject* raw_ptr) { \
return CheckedZoneHandle(Thread::Current()->zone(), raw_ptr); \
} \
/* T::Cast cannot be applied to a null Object, because the object vtable */ \
/* is not setup for type T, although some methods are supposed to work */ \
/* with null, for example Instance::Equals(). */ \
static const object& Cast(const Object& obj) { \
ASSERT(obj.Is##object()); \
return reinterpret_cast<const object&>(obj); \
} \
static Raw##object* RawCast(RawObject* raw) { \
ASSERT(Object::Handle(raw).Is##object()); \
return reinterpret_cast<Raw##object*>(raw); \
} \
static Raw##object* null() { \
return reinterpret_cast<Raw##object*>(Object::null()); \
} \
virtual const char* ToCString() const; \
static const ClassId kClassId = k##object##Cid; \
\
private: /* NOLINT */ \
/* Initialize the handle based on the raw_ptr in the presence of null. */ \
static void initializeHandle(object* obj, RawObject* raw_ptr) { \
if (raw_ptr != Object::null()) { \
obj->SetRaw(raw_ptr); \
} else { \
obj->raw_ = Object::null(); \
object fake_object; \
obj->set_vtable(fake_object.vtable()); \
} \
} \
/* Disallow allocation, copy constructors and override super assignment. */ \
public: /* NOLINT */ \
void operator delete(void* pointer) { UNREACHABLE(); } \
\
private: /* NOLINT */ \
void* operator new(size_t size); \
object(const object& value); \
void operator=(Raw##super* value); \
void operator=(const object& value); \
void operator=(const super& value);
// Conditionally include object_service.cc functionality in the vtable to avoid
// link errors like the following:
//
// object.o:(.rodata._ZTVN4....E[_ZTVN4...E]+0x278):
// undefined reference to
// `dart::Instance::PrintSharedInstanceJSON(dart::JSONObject*, bool) const'.
//
#ifndef PRODUCT
#define OBJECT_SERVICE_SUPPORT(object) \
protected: /* NOLINT */ \
/* Object is printed as JSON into stream. If ref is true only a header */ \
/* with an object id is printed. If ref is false the object is fully */ \
/* printed. */ \
virtual void PrintJSONImpl(JSONStream* stream, bool ref) const; \
virtual const char* JSONType() const { return "" #object; }
#else
#define OBJECT_SERVICE_SUPPORT(object) protected: /* NOLINT */
#endif // !PRODUCT
#define SNAPSHOT_READER_SUPPORT(object) \
static Raw##object* ReadFrom(SnapshotReader* reader, intptr_t object_id, \
intptr_t tags, Snapshot::Kind, \
bool as_reference); \
friend class SnapshotReader;
#define OBJECT_IMPLEMENTATION(object, super) \
public: /* NOLINT */ \
void operator=(Raw##object* value) { initializeHandle(this, value); } \
void operator^=(RawObject* value) { \
initializeHandle(this, value); \
ASSERT(IsNull() || Is##object()); \
} \
\
protected: /* NOLINT */ \
object() : super() {} \
BASE_OBJECT_IMPLEMENTATION(object, super) \
OBJECT_SERVICE_SUPPORT(object)
#define HEAP_OBJECT_IMPLEMENTATION(object, super) \
OBJECT_IMPLEMENTATION(object, super); \
const Raw##object* raw_ptr() const { \
ASSERT(raw() != null()); \
return raw()->ptr(); \
} \
SNAPSHOT_READER_SUPPORT(object) \
friend class StackFrame; \
friend class Thread;
// This macro is used to denote types that do not have a sub-type.
#define FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super) \
public: /* NOLINT */ \
void operator=(Raw##object* value) { \
raw_ = value; \
CHECK_HANDLE(); \
} \
void operator^=(RawObject* value) { \
raw_ = value; \
CHECK_HANDLE(); \
} \
\
private: /* NOLINT */ \
object() : super() {} \
BASE_OBJECT_IMPLEMENTATION(object, super) \
OBJECT_SERVICE_SUPPORT(object) \
const Raw##object* raw_ptr() const { \
ASSERT(raw() != null()); \
return raw()->ptr(); \
} \
static intptr_t NextFieldOffset() { return -kWordSize; } \
SNAPSHOT_READER_SUPPORT(rettype) \
friend class StackFrame; \
friend class Thread;
#define FINAL_HEAP_OBJECT_IMPLEMENTATION(object, super) \
FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, object, super)
#define MINT_OBJECT_IMPLEMENTATION(object, rettype, super) \
FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super)
class Object {
public:
virtual ~Object() {}
RawObject* raw() const { return raw_; }
void operator=(RawObject* value) { initializeHandle(this, value); }
uword CompareAndSwapTags(uword old_tags, uword new_tags) const {
return AtomicOperations::CompareAndSwapWord(&raw()->ptr()->tags_, old_tags,
new_tags);
}
bool IsCanonical() const { return raw()->IsCanonical(); }
void SetCanonical() const { raw()->SetCanonical(); }
void ClearCanonical() const { raw()->ClearCanonical(); }
intptr_t GetClassId() const {
return !raw()->IsHeapObject() ? static_cast<intptr_t>(kSmiCid)
: raw()->GetClassId();
}
inline RawClass* clazz() const;
static intptr_t tags_offset() { return OFFSET_OF(RawObject, tags_); }
// Class testers.
#define DEFINE_CLASS_TESTER(clazz) \
virtual bool Is##clazz() const { return false; }
CLASS_LIST_FOR_HANDLES(DEFINE_CLASS_TESTER);
#undef DEFINE_CLASS_TESTER
bool IsNull() const { return raw_ == null_; }
// Matches Object.toString on instances (except String::ToCString, bug 20583).
virtual const char* ToCString() const {
if (IsNull()) {
return "null";
} else {
return "Object";
}
}
#ifndef PRODUCT
void PrintJSON(JSONStream* stream, bool ref = true) const;
virtual void PrintJSONImpl(JSONStream* stream, bool ref) const;
virtual const char* JSONType() const { return IsNull() ? "null" : "Object"; }
#endif
// Returns the name that is used to identify an object in the
// namespace dictionary.
// Object::DictionaryName() returns String::null(). Only subclasses
// of Object that need to be entered in the library and library prefix
// namespaces need to provide an implementation.
virtual RawString* DictionaryName() const;
bool IsNew() const { return raw()->IsNewObject(); }
bool IsOld() const { return raw()->IsOldObject(); }
#if defined(DEBUG)
bool InVMHeap() const;
#else
bool InVMHeap() const { return raw()->IsVMHeapObject(); }
#endif // DEBUG
// Print the object on stdout for debugging.
void Print() const;
bool IsZoneHandle() const {
return VMHandles::IsZoneHandle(reinterpret_cast<uword>(this));
}
bool IsReadOnlyHandle() const;
bool IsNotTemporaryScopedHandle() const;
static Object& Handle(Zone* zone, RawObject* raw_ptr) {
Object* obj = reinterpret_cast<Object*>(VMHandles::AllocateHandle(zone));
initializeHandle(obj, raw_ptr);
return *obj;
}
static Object* ReadOnlyHandle() {
Object* obj = reinterpret_cast<Object*>(Dart::AllocateReadOnlyHandle());
initializeHandle(obj, Object::null());
return obj;
}
static Object& Handle() { return Handle(Thread::Current()->zone(), null_); }
static Object& Handle(Zone* zone) { return Handle(zone, null_); }
static Object& Handle(RawObject* raw_ptr) {
return Handle(Thread::Current()->zone(), raw_ptr);
}
static Object& ZoneHandle(Zone* zone, RawObject* raw_ptr) {
Object* obj =
reinterpret_cast<Object*>(VMHandles::AllocateZoneHandle(zone));
initializeHandle(obj, raw_ptr);
return *obj;
}
static Object& ZoneHandle() {
return ZoneHandle(Thread::Current()->zone(), null_);
}
static Object& ZoneHandle(RawObject* raw_ptr) {
return ZoneHandle(Thread::Current()->zone(), raw_ptr);
}
static RawObject* null() { return null_; }
static const Object& null_object() {
ASSERT(null_object_ != NULL);
return *null_object_;
}
static const Array& null_array() {
ASSERT(null_array_ != NULL);
return *null_array_;
}
static const String& null_string() {
ASSERT(null_string_ != NULL);
return *null_string_;
}
static const Instance& null_instance() {
ASSERT(null_instance_ != NULL);
return *null_instance_;
}
static const TypeArguments& null_type_arguments() {
ASSERT(null_type_arguments_ != NULL);
return *null_type_arguments_;
}
static const Array& empty_array() {
ASSERT(empty_array_ != NULL);
return *empty_array_;
}
static const Array& zero_array() {
ASSERT(zero_array_ != NULL);
return *zero_array_;
}
static const ContextScope& empty_context_scope() {
ASSERT(empty_context_scope_ != NULL);
return *empty_context_scope_;
}
static const ObjectPool& empty_object_pool() {
ASSERT(empty_object_pool_ != NULL);
return *empty_object_pool_;
}
static const PcDescriptors& empty_descriptors() {
ASSERT(empty_descriptors_ != NULL);
return *empty_descriptors_;
}
static const LocalVarDescriptors& empty_var_descriptors() {
ASSERT(empty_var_descriptors_ != NULL);
return *empty_var_descriptors_;
}
static const ExceptionHandlers& empty_exception_handlers() {
ASSERT(empty_exception_handlers_ != NULL);
return *empty_exception_handlers_;
}
static const Array& extractor_parameter_types() {
ASSERT(extractor_parameter_types_ != NULL);
return *extractor_parameter_types_;
}
static const Array& extractor_parameter_names() {
ASSERT(extractor_parameter_names_ != NULL);
return *extractor_parameter_names_;
}
// The sentinel is a value that cannot be produced by Dart code.
// It can be used to mark special values, for example to distinguish
// "uninitialized" fields.
static const Instance& sentinel() {
ASSERT(sentinel_ != NULL);
return *sentinel_;
}
// Value marking that we are transitioning from sentinel, e.g., computing
// a field value. Used to detect circular initialization.
static const Instance& transition_sentinel() {
ASSERT(transition_sentinel_ != NULL);
return *transition_sentinel_;
}
// Compiler's constant propagation constants.
static const Instance& unknown_constant() {
ASSERT(unknown_constant_ != NULL);
return *unknown_constant_;
}
static const Instance& non_constant() {
ASSERT(non_constant_ != NULL);
return *non_constant_;
}
static const Bool& bool_true() {
ASSERT(bool_true_ != NULL);
return *bool_true_;
}
static const Bool& bool_false() {
ASSERT(bool_false_ != NULL);
return *bool_false_;
}
static const Smi& smi_illegal_cid() {
ASSERT(smi_illegal_cid_ != NULL);
return *smi_illegal_cid_;
}
static const LanguageError& snapshot_writer_error() {
ASSERT(snapshot_writer_error_ != NULL);
return *snapshot_writer_error_;
}
static const LanguageError& branch_offset_error() {
ASSERT(branch_offset_error_ != NULL);
return *branch_offset_error_;
}
static const LanguageError& speculative_inlining_error() {
ASSERT(speculative_inlining_error_ != NULL);
return *speculative_inlining_error_;
}
static const LanguageError& background_compilation_error() {
ASSERT(background_compilation_error_ != NULL);
return *background_compilation_error_;
}
static const Array& vm_isolate_snapshot_object_table() {
ASSERT(vm_isolate_snapshot_object_table_ != NULL);
return *vm_isolate_snapshot_object_table_;
}
static const Type& dynamic_type() {
ASSERT(dynamic_type_ != NULL);
return *dynamic_type_;
}
static const Type& void_type() {
ASSERT(void_type_ != NULL);
return *void_type_;
}
static void set_vm_isolate_snapshot_object_table(const Array& table);
static RawClass* class_class() { return class_class_; }
static RawClass* dynamic_class() { return dynamic_class_; }
static RawClass* void_class() { return void_class_; }
static RawClass* unresolved_class_class() { return unresolved_class_class_; }
static RawClass* type_arguments_class() { return type_arguments_class_; }
static RawClass* patch_class_class() { return patch_class_class_; }
static RawClass* function_class() { return function_class_; }
static RawClass* closure_data_class() { return closure_data_class_; }
static RawClass* signature_data_class() { return signature_data_class_; }
static RawClass* redirection_data_class() { return redirection_data_class_; }
static RawClass* field_class() { return field_class_; }
static RawClass* literal_token_class() { return literal_token_class_; }
static RawClass* token_stream_class() { return token_stream_class_; }
static RawClass* script_class() { return script_class_; }
static RawClass* library_class() { return library_class_; }
static RawClass* namespace_class() { return namespace_class_; }
static RawClass* code_class() { return code_class_; }
static RawClass* instructions_class() { return instructions_class_; }
static RawClass* object_pool_class() { return object_pool_class_; }
static RawClass* pc_descriptors_class() { return pc_descriptors_class_; }
static RawClass* code_source_map_class() { return code_source_map_class_; }
static RawClass* stackmap_class() { return stackmap_class_; }
static RawClass* var_descriptors_class() { return var_descriptors_class_; }
static RawClass* exception_handlers_class() {
return exception_handlers_class_;
}
static RawClass* deopt_info_class() { return deopt_info_class_; }
static RawClass* context_class() { return context_class_; }
static RawClass* context_scope_class() { return context_scope_class_; }
static RawClass* api_error_class() { return api_error_class_; }
static RawClass* language_error_class() { return language_error_class_; }
static RawClass* unhandled_exception_class() {
return unhandled_exception_class_;
}
static RawClass* unwind_error_class() { return unwind_error_class_; }
static RawClass* singletargetcache_class() {
return singletargetcache_class_;
}
static RawClass* unlinkedcall_class() { return unlinkedcall_class_; }
static RawClass* icdata_class() { return icdata_class_; }
static RawClass* megamorphic_cache_class() {
return megamorphic_cache_class_;
}
static RawClass* subtypetestcache_class() { return subtypetestcache_class_; }
// Initialize the VM isolate.
static void InitNull(Isolate* isolate);
static void InitOnce(Isolate* isolate);
static void FinalizeVMIsolate(Isolate* isolate);
// Initialize a new isolate either from a Kernel IR, from source, or from a
// snapshot.
static RawError* Init(Isolate* isolate, kernel::Program* program);
static void MakeUnusedSpaceTraversable(const Object& obj,
intptr_t original_size,
intptr_t used_size);
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawObject));
}
static void VerifyBuiltinVtables();
static const ClassId kClassId = kObjectCid;
// Different kinds of type tests.
enum TypeTestKind { kIsSubtypeOf = 0, kIsMoreSpecificThan };
// Different kinds of name visibility.
enum NameVisibility {
// Internal names are the true names of classes, fields,
// etc. inside the vm. These names include privacy suffixes,
// getter prefixes, and trailing dots on unnamed constructors.
//
// The names of core implementation classes (like _OneByteString)
// are preserved as well.
//
// e.g.
// private getter -> get:foo@6be832b
// private constructor -> _MyClass@6b3832b.
// private named constructor -> _MyClass@6b3832b.named
// core impl class name shown -> _OneByteString
kInternalName = 0,
// Scrubbed names drop privacy suffixes, getter prefixes, and
// trailing dots on unnamed constructors. These names are used in
// the vm service.
//
// e.g.
// get:foo@6be832b -> foo
// _MyClass@6b3832b. -> _MyClass
// _MyClass@6b3832b.named -> _MyClass.named
// _OneByteString -> _OneByteString (not remapped)
kScrubbedName,
// User visible names are appropriate for reporting type errors
// directly to programmers. The names have been scrubbed and
// the names of core implementation classes are remapped to their
// public interface names.
//
// e.g.
// get:foo@6be832b -> foo
// _MyClass@6b3832b. -> _MyClass
// _MyClass@6b3832b.named -> _MyClass.named
// _OneByteString -> String (remapped)
kUserVisibleName
};
protected:
// Used for extracting the C++ vtable during bringup.
Object() : raw_(null_) {}
uword raw_value() const { return reinterpret_cast<uword>(raw()); }
inline void SetRaw(RawObject* value);
void CheckHandle() const;
cpp_vtable vtable() const { return bit_copy<cpp_vtable>(*this); }
void set_vtable(cpp_vtable value) { *vtable_address() = value; }
static RawObject* Allocate(intptr_t cls_id, intptr_t size, Heap::Space space);
static intptr_t RoundedAllocationSize(intptr_t size) {
return Utils::RoundUp(size, kObjectAlignment);
}
bool Contains(uword addr) const { return raw()->Contains(addr); }
// Start of field mutator guards.
//
// All writes to heap objects should ultimately pass through one of the
// methods below or their counterparts in RawObject, to ensure that the
// write barrier is correctly applied.
template <typename type>
void StorePointer(type const* addr, type value) const {
raw()->StorePointer(addr, value);
}
// Store a range of pointers [from, from + count) into [to, to + count).
// TODO(koda): Use this to fix Object::Clone's broken store buffer logic.
void StorePointers(RawObject* const* to,
RawObject* const* from,
intptr_t count) {
ASSERT(Contains(reinterpret_cast<uword>(to)));
if (raw()->IsNewObject()) {
memmove(const_cast<RawObject**>(to), from, count * kWordSize);
} else {
for (intptr_t i = 0; i < count; ++i) {
StorePointer(&to[i], from[i]);
}
}
}
// Use for storing into an explicitly Smi-typed field of an object
// (i.e., both the previous and new value are Smis).
void StoreSmi(RawSmi* const* addr, RawSmi* value) const {
raw()->StoreSmi(addr, value);
}
template <typename FieldType>
void StoreSimd128(const FieldType* addr, simd128_value_t value) const {
ASSERT(Contains(reinterpret_cast<uword>(addr)));
value.writeTo(const_cast<FieldType*>(addr));
}
// Needs two template arguments to allow assigning enums to fixed-size ints.
template <typename FieldType, typename ValueType>
void StoreNonPointer(const FieldType* addr, ValueType value) const {
// Can't use Contains, as it uses tags_, which is set through this method.
ASSERT(reinterpret_cast<uword>(addr) >= RawObject::ToAddr(raw()));
*const_cast<FieldType*>(addr) = value;
}
// Provides non-const access to non-pointer fields within the object. Such
// access does not need a write barrier, but it is *not* GC-safe, since the
// object might move, hence must be fully contained within a NoSafepointScope.
template <typename FieldType>
FieldType* UnsafeMutableNonPointer(const FieldType* addr) const {
// Allow pointers at the end of variable-length data, and disallow pointers
// within the header word.
ASSERT(Contains(reinterpret_cast<uword>(addr) - 1) &&
Contains(reinterpret_cast<uword>(addr) - kWordSize));
// At least check that there is a NoSafepointScope and hope it's big enough.
ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
return const_cast<FieldType*>(addr);
}
// Fail at link time if StoreNonPointer or UnsafeMutableNonPointer is
// instantiated with an object pointer type.
#define STORE_NON_POINTER_ILLEGAL_TYPE(type) \
template <typename ValueType> \
void StoreNonPointer(Raw##type* const* addr, ValueType value) const { \
UnimplementedMethod(); \
} \
Raw##type** UnsafeMutableNonPointer(Raw##type* const* addr) const { \
UnimplementedMethod(); \
return NULL; \
}
CLASS_LIST(STORE_NON_POINTER_ILLEGAL_TYPE);
void UnimplementedMethod() const;
#undef STORE_NON_POINTER_ILLEGAL_TYPE
// Allocate an object and copy the body of 'orig'.
static RawObject* Clone(const Object& orig, Heap::Space space);
// End of field mutator guards.
RawObject* raw_; // The raw object reference.
protected:
void AddCommonObjectProperties(JSONObject* jsobj,
const char* protocol_type,
bool ref) const;
private:
static intptr_t NextFieldOffset() {
// Indicates this class cannot be extended by dart code.
return -kWordSize;
}
static void InitializeObject(uword address,
intptr_t id,
intptr_t size,
bool is_vm_object);
static void RegisterClass(const Class& cls,
const String& name,
const Library& lib);
static void RegisterPrivateClass(const Class& cls,
const String& name,
const Library& lib);
/* Initialize the handle based on the raw_ptr in the presence of null. */
static void initializeHandle(Object* obj, RawObject* raw_ptr) {
if (raw_ptr != Object::null()) {
obj->SetRaw(raw_ptr);
} else {
obj->raw_ = Object::null();
Object fake_object;
obj->set_vtable(fake_object.vtable());
}
}
cpp_vtable* vtable_address() const {
uword vtable_addr = reinterpret_cast<uword>(this);
return reinterpret_cast<cpp_vtable*>(vtable_addr);
}
static cpp_vtable handle_vtable_;
static cpp_vtable builtin_vtables_[kNumPredefinedCids];
// The static values below are singletons shared between the different
// isolates. They are all allocated in the non-GC'd Dart::vm_isolate_.
static RawObject* null_;
static RawClass* class_class_; // Class of the Class vm object.
static RawClass* dynamic_class_; // Class of the 'dynamic' type.
static RawClass* void_class_; // Class of the 'void' type.
static RawClass* unresolved_class_class_; // Class of UnresolvedClass.
static RawClass* type_arguments_class_; // Class of TypeArguments vm object.
static RawClass* patch_class_class_; // Class of the PatchClass vm object.
static RawClass* function_class_; // Class of the Function vm object.
static RawClass* closure_data_class_; // Class of ClosureData vm obj.
static RawClass* signature_data_class_; // Class of SignatureData vm obj.
static RawClass* redirection_data_class_; // Class of RedirectionData vm obj.
static RawClass* field_class_; // Class of the Field vm object.
static RawClass* literal_token_class_; // Class of LiteralToken vm object.
static RawClass* token_stream_class_; // Class of the TokenStream vm object.
static RawClass* script_class_; // Class of the Script vm object.
static RawClass* library_class_; // Class of the Library vm object.
static RawClass* namespace_class_; // Class of Namespace vm object.
static RawClass* code_class_; // Class of the Code vm object.
static RawClass* instructions_class_; // Class of the Instructions vm object.
static RawClass* object_pool_class_; // Class of the ObjectPool vm object.
static RawClass* pc_descriptors_class_; // Class of PcDescriptors vm object.
static RawClass* code_source_map_class_; // Class of CodeSourceMap vm object.
static RawClass* stackmap_class_; // Class of StackMap vm object.
static RawClass* var_descriptors_class_; // Class of LocalVarDescriptors.
static RawClass* exception_handlers_class_; // Class of ExceptionHandlers.
static RawClass* deopt_info_class_; // Class of DeoptInfo.
static RawClass* context_class_; // Class of the Context vm object.
static RawClass* context_scope_class_; // Class of ContextScope vm object.
static RawClass* singletargetcache_class_; // Class of SingleTargetCache.
static RawClass* unlinkedcall_class_; // Class of UnlinkedCall.
static RawClass* icdata_class_; // Class of ICData.
static RawClass* megamorphic_cache_class_; // Class of MegamorphiCache.
static RawClass* subtypetestcache_class_; // Class of SubtypeTestCache.
static RawClass* api_error_class_; // Class of ApiError.
static RawClass* language_error_class_; // Class of LanguageError.
static RawClass* unhandled_exception_class_; // Class of UnhandledException.
static RawClass* unwind_error_class_; // Class of UnwindError.
// The static values below are read-only handle pointers for singleton
// objects that are shared between the different isolates.
static Object* null_object_;
static Array* null_array_;
static String* null_string_;
static Instance* null_instance_;
static TypeArguments* null_type_arguments_;
static Array* empty_array_;
static Array* zero_array_;
static ContextScope* empty_context_scope_;
static ObjectPool* empty_object_pool_;
static PcDescriptors* empty_descriptors_;
static LocalVarDescriptors* empty_var_descriptors_;
static ExceptionHandlers* empty_exception_handlers_;
static Array* extractor_parameter_types_;
static Array* extractor_parameter_names_;
static Instance* sentinel_;
static Instance* transition_sentinel_;
static Instance* unknown_constant_;
static Instance* non_constant_;
static Bool* bool_true_;
static Bool* bool_false_;
static Smi* smi_illegal_cid_;
static LanguageError* snapshot_writer_error_;
static LanguageError* branch_offset_error_;
static LanguageError* speculative_inlining_error_;
static LanguageError* background_compilation_error_;
static Array* vm_isolate_snapshot_object_table_;
static Type* dynamic_type_;
static Type* void_type_;
friend void ClassTable::Register(const Class& cls);
friend void RawObject::Validate(Isolate* isolate) const;
friend class Closure;
friend class SnapshotReader;
friend class InstanceDeserializationCluster;
friend class OneByteString;
friend class TwoByteString;
friend class ExternalOneByteString;
friend class ExternalTwoByteString;
friend class Thread;
#define REUSABLE_FRIEND_DECLARATION(name) \
friend class Reusable##name##HandleScope;
REUSABLE_HANDLE_LIST(REUSABLE_FRIEND_DECLARATION)
#undef REUSABLE_FRIEND_DECLARATION
DISALLOW_ALLOCATION();
DISALLOW_COPY_AND_ASSIGN(Object);
};
class PassiveObject : public Object {
public:
void operator=(RawObject* value) { raw_ = value; }
void operator^=(RawObject* value) { raw_ = value; }
static PassiveObject& Handle(Zone* zone, RawObject* raw_ptr) {
PassiveObject* obj =
reinterpret_cast<PassiveObject*>(VMHandles::AllocateHandle(zone));
obj->raw_ = raw_ptr;
obj->set_vtable(0);
return *obj;
}
static PassiveObject& Handle(RawObject* raw_ptr) {
return Handle(Thread::Current()->zone(), raw_ptr);
}
static PassiveObject& Handle() {
return Handle(Thread::Current()->zone(), Object::null());
}
static PassiveObject& Handle(Zone* zone) {
return Handle(zone, Object::null());
}
static PassiveObject& ZoneHandle(Zone* zone, RawObject* raw_ptr) {
PassiveObject* obj =
reinterpret_cast<PassiveObject*>(VMHandles::AllocateZoneHandle(zone));
obj->raw_ = raw_ptr;
obj->set_vtable(0);
return *obj;
}
static PassiveObject& ZoneHandle(RawObject* raw_ptr) {
return ZoneHandle(Thread::Current()->zone(), raw_ptr);
}
static PassiveObject& ZoneHandle() {
return ZoneHandle(Thread::Current()->zone(), Object::null());
}
static PassiveObject& ZoneHandle(Zone* zone) {
return ZoneHandle(zone, Object::null());
}
private:
PassiveObject() : Object() {}
DISALLOW_ALLOCATION();
DISALLOW_COPY_AND_ASSIGN(PassiveObject);
};
typedef ZoneGrowableHandlePtrArray<const AbstractType> Trail;
typedef ZoneGrowableHandlePtrArray<const AbstractType>* TrailPtr;
class Class : public Object {
public:
intptr_t instance_size() const {
ASSERT(is_finalized() || is_prefinalized());
return (raw_ptr()->instance_size_in_words_ * kWordSize);
}
void set_instance_size(intptr_t value_in_bytes) const {
ASSERT(kWordSize != 0);
set_instance_size_in_words(value_in_bytes / kWordSize);
}
void set_instance_size_in_words(intptr_t value) const {
ASSERT(Utils::IsAligned((value * kWordSize), kObjectAlignment));
StoreNonPointer(&raw_ptr()->instance_size_in_words_, value);
}
intptr_t next_field_offset() const {
return raw_ptr()->next_field_offset_in_words_ * kWordSize;
}
void set_next_field_offset(intptr_t value_in_bytes) const {
ASSERT(kWordSize != 0);
set_next_field_offset_in_words(value_in_bytes / kWordSize);
}
void set_next_field_offset_in_words(intptr_t value) const {
ASSERT((value == -1) ||
(Utils::IsAligned((value * kWordSize), kObjectAlignment) &&
(value == raw_ptr()->instance_size_in_words_)) ||
(!Utils::IsAligned((value * kWordSize), kObjectAlignment) &&
((value + 1) == raw_ptr()->instance_size_in_words_)));
StoreNonPointer(&raw_ptr()->next_field_offset_in_words_, value);
}
cpp_vtable handle_vtable() const { return raw_ptr()->handle_vtable_; }
void set_handle_vtable(cpp_vtable value) const {
StoreNonPointer(&raw_ptr()->handle_vtable_, value);
}
static bool is_valid_id(intptr_t value) {
return RawObject::ClassIdTag::is_valid(value);
}
intptr_t id() const { return raw_ptr()->id_; }
void set_id(intptr_t value) const {
ASSERT(is_valid_id(value));
StoreNonPointer(&raw_ptr()->id_, value);
}
RawString* Name() const;
RawString* ScrubbedName() const;
RawString* UserVisibleName() const;
bool IsInFullSnapshot() const;
virtual RawString* DictionaryName() const { return Name(); }
RawScript* script() const { return raw_ptr()->script_; }
void set_script(const Script& value) const;
TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
void set_token_pos(TokenPosition value) const;
TokenPosition ComputeEndTokenPos() const;
// This class represents a typedef if the signature function is not null.
RawFunction* signature_function() const {
return raw_ptr()->signature_function_;
}
void set_signature_function(const Function& value) const;
// Return the Type with type parameters declared by this class filled in with
// dynamic and type parameters declared in superclasses filled in as declared
// in superclass clauses.
RawAbstractType* RareType() const;
// Return the Type whose arguments are the type parameters declared by this
// class preceded by the type arguments declared for superclasses, etc.
// e.g. given
// class B<T, S>
// class C<R> extends B<R, int>
// C.DeclarationType() --> C [R, int, R]
RawAbstractType* DeclarationType() const;
RawLibrary* library() const { return raw_ptr()->library_; }
void set_library(const Library& value) const;
// The type parameters (and their bounds) are specified as an array of
// TypeParameter.
RawTypeArguments* type_parameters() const {
return raw_ptr()->type_parameters_;
}
void set_type_parameters(const TypeArguments& value) const;
intptr_t NumTypeParameters(Thread* thread) const;
intptr_t NumTypeParameters() const {
return NumTypeParameters(Thread::Current());
}
static intptr_t type_parameters_offset() {
return OFFSET_OF(RawClass, type_parameters_);
}
// Return a TypeParameter if the type_name is a type parameter of this class.
// Return null otherwise.
RawTypeParameter* LookupTypeParameter(const String& type_name) const;
// The type argument vector is flattened and includes the type arguments of
// the super class.
intptr_t NumTypeArguments() const;
// Return the number of type arguments that are specific to this class, i.e.
// not overlapping with the type arguments of the super class of this class.
intptr_t NumOwnTypeArguments() const;
// Return true if this class declares type parameters.
bool IsGeneric() const { return NumTypeParameters(Thread::Current()) > 0; }
// If this class is parameterized, each instance has a type_arguments field.
static const intptr_t kNoTypeArguments = -1;
intptr_t type_arguments_field_offset() const {
ASSERT(is_type_finalized() || is_prefinalized());
if (raw_ptr()->type_arguments_field_offset_in_words_ == kNoTypeArguments) {
return kNoTypeArguments;
}
return raw_ptr()->type_arguments_field_offset_in_words_ * kWordSize;
}
void set_type_arguments_field_offset(intptr_t value_in_bytes) const {
intptr_t value;
if (value_in_bytes == kNoTypeArguments) {
value = kNoTypeArguments;
} else {
ASSERT(kWordSize != 0);
value = value_in_bytes / kWordSize;
}
set_type_arguments_field_offset_in_words(value);
}
void set_type_arguments_field_offset_in_words(intptr_t value) const {
StoreNonPointer(&raw_ptr()->type_arguments_field_offset_in_words_, value);
}
static intptr_t type_arguments_field_offset_in_words_offset() {
return OFFSET_OF(RawClass, type_arguments_field_offset_in_words_);
}
// Returns the cached canonical type of this class, i.e. the canonical type
// whose type class is this class and whose type arguments are the
// uninstantiated type parameters declared by this class if it is generic,
// e.g. Map<K, V>.
// Returns Type::null() if the canonical type is not cached yet.
RawType* CanonicalType() const;
// Caches the canonical type of this class.
void SetCanonicalType(const Type& type) const;
static intptr_t canonical_type_offset() {
return OFFSET_OF(RawClass, canonical_type_);
}
// The super type of this class, Object type if not explicitly specified.
// Note that the super type may be bounded, as in this example:
// class C<T> extends S<T> { }; class S<T extends num> { };
RawAbstractType* super_type() const { return raw_ptr()->super_type_; }
void set_super_type(const AbstractType& value) const;
static intptr_t super_type_offset() {
return OFFSET_OF(RawClass, super_type_);
}
// Asserts that the class of the super type has been resolved.
// |original_classes| only has an effect when reloading. If true and we
// are reloading, it will prefer the original classes to the replacement
// classes.
RawClass* SuperClass(bool original_classes = false) const;
RawType* mixin() const { return raw_ptr()->mixin_; }
void set_mixin(const Type& value) const;
// Note this returns false for mixin application aliases.
bool IsMixinApplication() const;
RawClass* GetPatchClass() const;
// Interfaces is an array of Types.
RawArray* interfaces() const { return raw_ptr()->interfaces_; }
void set_interfaces(const Array& value) const;
static intptr_t interfaces_offset() {
return OFFSET_OF(RawClass, interfaces_);
}
// Returns the list of classes having this class as direct superclass.
RawGrowableObjectArray* direct_subclasses() const {
return raw_ptr()->direct_subclasses_;
}
void AddDirectSubclass(const Class& subclass) const;
void ClearDirectSubclasses() const;
// Check if this class represents the class of null.
bool IsNullClass() const { return id() == kNullCid; }
// Check if this class represents the 'dynamic' class.
bool IsDynamicClass() const { return id() == kDynamicCid; }
// Check if this class represents the 'void' class.
bool IsVoidClass() const { return id() == kVoidCid; }
// Check if this class represents the 'Object' class.
bool IsObjectClass() const { return id() == kInstanceCid; }
// Check if this class represents the 'Function' class.
bool IsDartFunctionClass() const;
// Check if this class represents the 'Closure' class.
bool IsClosureClass() const { return id() == kClosureCid; }
static bool IsClosureClass(RawClass* cls) {
NoSafepointScope no_safepoint;
return cls->ptr()->id_ == kClosureCid;
}
// Check if this class represents a typedef class.
bool IsTypedefClass() const { return signature_function() != Object::null(); }
static bool IsInFullSnapshot(RawClass* cls) {
NoSafepointScope no_safepoint;
return cls->ptr()->library_->ptr()->is_in_fullsnapshot_;
}
// Check the subtype relationship.
bool IsSubtypeOf(const TypeArguments& type_arguments,
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
return TypeTest(kIsSubtypeOf, type_arguments, other, other_type_arguments,
bound_error, bound_trail, space);
}
// Check the 'more specific' relationship.
bool IsMoreSpecificThan(const TypeArguments& type_arguments,
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
return TypeTest(kIsMoreSpecificThan, type_arguments, other,
other_type_arguments, bound_error, bound_trail, space);
}
// Check if this is the top level class.
bool IsTopLevel() const;
bool IsPrivate() const;
// Returns an array of instance and static fields defined by this class.
RawArray* fields() const { return raw_ptr()->fields_; }
void SetFields(const Array& value) const;
void AddField(const Field& field) const;
void AddFields(const GrowableArray<const Field*>& fields) const;
void InjectCIDFields() const;
// Returns an array of all instance fields of this class and its superclasses
// indexed by offset in words.
// |original_classes| only has an effect when reloading. If true and we
// are reloading, it will prefer the original classes to the replacement
// classes.
RawArray* OffsetToFieldMap(bool original_classes = false) const;
// Returns true if non-static fields are defined.
bool HasInstanceFields() const;
// TODO(koda): Unite w/ hash table.
RawArray* functions() const { return raw_ptr()->functions_; }
void SetFunctions(const Array& value) const;
void AddFunction(const Function& function) const;
void RemoveFunction(const Function& function) const;
RawFunction* FunctionFromIndex(intptr_t idx) const;
intptr_t FindImplicitClosureFunctionIndex(const Function& needle) const;
RawFunction* ImplicitClosureFunctionFromIndex(intptr_t idx) const;
RawFunction* LookupDynamicFunction(const String& name) const;
RawFunction* LookupDynamicFunctionAllowAbstract(const String& name) const;
RawFunction* LookupDynamicFunctionAllowPrivate(const String& name) const;
RawFunction* LookupStaticFunction(const String& name) const;
RawFunction* LookupStaticFunctionAllowPrivate(const String& name) const;
RawFunction* LookupConstructor(const String& name) const;
RawFunction* LookupConstructorAllowPrivate(const String& name) const;
RawFunction* LookupFactory(const String& name) const;
RawFunction* LookupFactoryAllowPrivate(const String& name) const;
RawFunction* LookupFunction(const String& name) const;
RawFunction* LookupFunctionAllowPrivate(const String& name) const;
RawFunction* LookupGetterFunction(const String& name) const;
RawFunction* LookupSetterFunction(const String& name) const;
RawFunction* LookupCallFunctionForTypeTest() const;
RawField* LookupInstanceField(const String& name) const;
RawField* LookupStaticField(const String& name) const;
RawField* LookupField(const String& name) const;
RawField* LookupFieldAllowPrivate(const String& name,
bool instance_only = false) const;
RawField* LookupInstanceFieldAllowPrivate(const String& name) const;
RawField* LookupStaticFieldAllowPrivate(const String& name) const;
RawLibraryPrefix* LookupLibraryPrefix(const String& name) const;
// Returns an instance of Double or Double::null().
// 'index' points to either:
// - constants_list_ position of found element, or
// - constants_list_ position where new canonical can be inserted.
RawDouble* LookupCanonicalDouble(Zone* zone,
double value,
intptr_t* index) const;
RawMint* LookupCanonicalMint(Zone* zone,
int64_t value,
intptr_t* index) const;
RawBigint* LookupCanonicalBigint(Zone* zone,
const Bigint& value,
intptr_t* index) const;
// The methods above are more efficient than this generic one.
RawInstance* LookupCanonicalInstance(Zone* zone, const Instance& value) const;
RawInstance* InsertCanonicalConstant(Zone* zone,
const Instance& constant) const;
void InsertCanonicalNumber(Zone* zone,
intptr_t index,
const Number& constant) const;
void RehashConstants(Zone* zone) const;
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawClass));
}
bool is_implemented() const {
return ImplementedBit::decode(raw_ptr()->state_bits_);
}
void set_is_implemented() const;
bool is_abstract() const {
return AbstractBit::decode(raw_ptr()->state_bits_);
}
void set_is_abstract() const;
bool is_type_finalized() const {
return TypeFinalizedBit::decode(raw_ptr()->state_bits_);
}
void set_is_type_finalized() const;
bool is_patch() const { return PatchBit::decode(raw_ptr()->state_bits_); }
void set_is_patch() const;
bool is_synthesized_class() const {
return SynthesizedClassBit::decode(raw_ptr()->state_bits_);
}
void set_is_synthesized_class() const;
bool is_enum_class() const { return EnumBit::decode(raw_ptr()->state_bits_); }
void set_is_enum_class() const;
bool is_finalized() const {
return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
RawClass::kFinalized;
}
void set_is_finalized() const;
bool is_prefinalized() const {
return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
RawClass::kPreFinalized;
}
void set_is_prefinalized() const;
bool is_refinalize_after_patch() const {
return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
RawClass::kRefinalizeAfterPatch;
}
void SetRefinalizeAfterPatch() const;
void ResetFinalization() const;
bool is_marked_for_parsing() const {
return MarkedForParsingBit::decode(raw_ptr()->state_bits_);
}
void set_is_marked_for_parsing() const;
void reset_is_marked_for_parsing() const;
bool is_const() const { return ConstBit::decode(raw_ptr()->state_bits_); }
void set_is_const() const;
bool is_mixin_app_alias() const {
return MixinAppAliasBit::decode(raw_ptr()->state_bits_);
}
void set_is_mixin_app_alias() const;
bool is_mixin_type_applied() const {
return MixinTypeAppliedBit::decode(raw_ptr()->state_bits_);
}
void set_is_mixin_type_applied() const;
bool is_fields_marked_nullable() const {
return FieldsMarkedNullableBit::decode(raw_ptr()->state_bits_);
}
void set_is_fields_marked_nullable() const;
bool is_cycle_free() const {
return CycleFreeBit::decode(raw_ptr()->state_bits_);
}
void set_is_cycle_free() const;
bool is_allocated() const {
return IsAllocatedBit::decode(raw_ptr()->state_bits_);
}
void set_is_allocated(bool value) const;
uint16_t num_native_fields() const { return raw_ptr()->num_native_fields_; }
void set_num_native_fields(uint16_t value) const {
StoreNonPointer(&raw_ptr()->num_native_fields_, value);
}
RawCode* allocation_stub() const { return raw_ptr()->allocation_stub_; }
void set_allocation_stub(const Code& value) const;
void DisableAllocationStub() const;
RawArray* constants() const;
void set_constants(const Array& value) const;
intptr_t FindInvocationDispatcherFunctionIndex(const Function& needle) const;
RawFunction* InvocationDispatcherFunctionFromIndex(intptr_t idx) const;
RawFunction* GetInvocationDispatcher(const String& target_name,
const Array& args_desc,
RawFunction::Kind kind,
bool create_if_absent) const;
void Finalize() const;
// Apply given patch class to this class.
// Return true on success, or false and error otherwise.
bool ApplyPatch(const Class& patch, Error* error) const;
// Evaluate the given expression as if it appeared in a static
// method of this class and return the resulting value, or an
// error object if evaluating the expression fails. The method has
// the formal parameters given in param_names, and is invoked with
// the argument values given in param_values.
RawObject* Evaluate(const String& expr,
const Array& param_names,
const Array& param_values) const;
RawError* EnsureIsFinalized(Thread* thread) const;
// Allocate a class used for VM internal objects.
template <class FakeObject>
static RawClass* New();
// Allocate instance classes.
static RawClass* New(const Library& lib,
const String& name,
const Script& script,
TokenPosition token_pos);
static RawClass* NewNativeWrapper(const Library& library,
const String& name,
int num_fields);
// Allocate the raw string classes.
static RawClass* NewStringClass(intptr_t class_id);
// Allocate the raw TypedData classes.
static RawClass* NewTypedDataClass(intptr_t class_id);
// Allocate the raw TypedDataView classes.
static RawClass* NewTypedDataViewClass(intptr_t class_id);
// Allocate the raw ExternalTypedData classes.
static RawClass* NewExternalTypedDataClass(intptr_t class_id);
// Register code that has used CHA for optimization.
// TODO(srdjan): Also register kind of CHA optimization (e.g.: leaf class,
// leaf method, ...).
void RegisterCHACode(const Code& code);
void DisableCHAOptimizedCode(const Class& subclass);
void DisableAllCHAOptimizedCode();
// Return the list of code objects that were compiled using CHA of this class.
// These code objects will be invalidated if new subclasses of this class
// are finalized.
RawArray* dependent_code() const { return raw_ptr()->dependent_code_; }
void set_dependent_code(const Array& array) const;
bool TraceAllocation(Isolate* isolate) const;
void SetTraceAllocation(bool trace_allocation) const;
bool ValidatePostFinalizePatch(const Class& orig_class, Error* error) const;
void ReplaceEnum(const Class& old_enum) const;
void CopyStaticFieldValues(const Class& old_cls) const;
void PatchFieldsAndFunctions() const;
void MigrateImplicitStaticClosures(IsolateReloadContext* context,
const Class& new_cls) const;
void CopyCanonicalConstants(const Class& old_cls) const;
void CopyCanonicalType(const Class& old_cls) const;
void CheckReload(const Class& replacement,
IsolateReloadContext* context) const;
private:
bool CanReloadFinalized(const Class& replacement,
IsolateReloadContext* context) const;
bool CanReloadPreFinalized(const Class& replacement,
IsolateReloadContext* context) const;
// Tells whether instances need morphing for reload.
bool RequiresInstanceMorphing(const Class& replacement) const;
template <class FakeObject>
static RawClass* NewCommon(intptr_t index);
enum MemberKind {
kAny = 0,
kStatic,
kInstance,
kInstanceAllowAbstract,
kConstructor,
kFactory,
};
enum StateBits {
kConstBit = 0,
kImplementedBit = 1,
kTypeFinalizedBit = 2,
kClassFinalizedPos = 3,
kClassFinalizedSize = 2,
kAbstractBit = kClassFinalizedPos + kClassFinalizedSize, // = 5
kPatchBit = 6,
kSynthesizedClassBit = 7,
kMarkedForParsingBit = 8,
kMixinAppAliasBit = 9,
kMixinTypeAppliedBit = 10,
kFieldsMarkedNullableBit = 11,
kCycleFreeBit = 12,
kEnumBit = 13,
kIsAllocatedBit = 15,
};
class ConstBit : public BitField<uint16_t, bool, kConstBit, 1> {};
class ImplementedBit : public BitField<uint16_t, bool, kImplementedBit, 1> {};
class TypeFinalizedBit
: public BitField<uint16_t, bool, kTypeFinalizedBit, 1> {};
class ClassFinalizedBits : public BitField<uint16_t,
RawClass::ClassFinalizedState,
kClassFinalizedPos,
kClassFinalizedSize> {};
class AbstractBit : public BitField<uint16_t, bool, kAbstractBit, 1> {};
class PatchBit : public BitField<uint16_t, bool, kPatchBit, 1> {};
class SynthesizedClassBit
: public BitField<uint16_t, bool, kSynthesizedClassBit, 1> {};
class MarkedForParsingBit
: public BitField<uint16_t, bool, kMarkedForParsingBit, 1> {};
class MixinAppAliasBit
: public BitField<uint16_t, bool, kMixinAppAliasBit, 1> {};
class MixinTypeAppliedBit
: public BitField<uint16_t, bool, kMixinTypeAppliedBit, 1> {};
class FieldsMarkedNullableBit
: public BitField<uint16_t, bool, kFieldsMarkedNullableBit, 1> {};
class CycleFreeBit : public BitField<uint16_t, bool, kCycleFreeBit, 1> {};
class EnumBit : public BitField<uint16_t, bool, kEnumBit, 1> {};
class IsAllocatedBit : public BitField<uint16_t, bool, kIsAllocatedBit, 1> {};
void set_name(const String& value) const;
void set_user_name(const String& value) const;
RawString* GenerateUserVisibleName() const;
void set_state_bits(intptr_t bits) const;
void set_canonical_type(const Type& value) const;
RawType* canonical_type() const;
RawArray* invocation_dispatcher_cache() const;
void set_invocation_dispatcher_cache(const Array& cache) const;
RawFunction* CreateInvocationDispatcher(const String& target_name,
const Array& args_desc,
RawFunction::Kind kind) const;
void CalculateFieldOffsets() const;
// functions_hash_table is in use iff there are at least this many functions.
static const intptr_t kFunctionLookupHashTreshold = 16;
// Initial value for the cached number of type arguments.
static const intptr_t kUnknownNumTypeArguments = -1;
int16_t num_type_arguments() const { return raw_ptr()->num_type_arguments_; }
void set_num_type_arguments(intptr_t value) const;
static intptr_t num_type_arguments_offset() {
return OFFSET_OF(RawClass, num_type_arguments_);
}
int16_t num_own_type_arguments() const {
return raw_ptr()->num_own_type_arguments_;
}
void set_num_own_type_arguments(intptr_t value) const;
// Assigns empty array to all raw class array fields.
void InitEmptyFields();
static RawFunction* CheckFunctionType(const Function& func, MemberKind kind);
RawFunction* LookupFunction(const String& name, MemberKind kind) const;
RawFunction* LookupFunctionAllowPrivate(const String& name,
MemberKind kind) const;
RawField* LookupField(const String& name, MemberKind kind) const;
RawFunction* LookupAccessorFunction(const char* prefix,
intptr_t prefix_length,
const String& name) const;
// Allocate an instance class which has a VM implementation.
template <class FakeInstance>
static RawClass* New(intptr_t id);
// Helper that calls 'Class::New<Instance>(kIllegalCid)'.
static RawClass* NewInstanceClass();
// Check the subtype or 'more specific' relationship.
bool TypeTest(TypeTestKind test_kind,
const TypeArguments& type_arguments,
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const;
static bool TypeTestNonRecursive(const Class& cls,
TypeTestKind test_kind,
const TypeArguments& type_arguments,
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space);
FINAL_HEAP_OBJECT_IMPLEMENTATION(Class, Object);
friend class AbstractType;
friend class Instance;
friend class Object;
friend class Type;
friend class Intrinsifier;
friend class ProgramVisitor;
};
// Unresolved class is used for storing unresolved names which will be resolved
// to a class after all classes have been loaded and finalized.
class UnresolvedClass : public Object {
public:
RawObject* library_or_library_prefix() const {
return raw_ptr()->library_or_library_prefix_;
}
RawString* ident() const { return raw_ptr()->ident_; }
TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
RawString* Name() const;
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawUnresolvedClass));
}
static RawUnresolvedClass* New(const Object& library_prefix,
const String& ident,
TokenPosition token_pos);
private:
void set_library_or_library_prefix(const Object& library_prefix) const;
void set_ident(const String& ident) const;
void set_token_pos(TokenPosition token_pos) const;
static RawUnresolvedClass* New();
FINAL_HEAP_OBJECT_IMPLEMENTATION(UnresolvedClass, Object);
friend class Class;
};
// Classification of type genericity according to type parameter owners.
enum Genericity {
kAny, // Consider type params of class and functions.
kClass, // Consider type params of class only.
kFunctions, // Consider type params of current and parent functions.
kCurrentFunction, // Consider type params of current function only.
kParentFunctions // Consider type params of parent functions only.
};
// A TypeArguments is an array of AbstractType.
class TypeArguments : public Object {
public:
// We use 30 bits for the hash code so hashes in a snapshot taken on a
// 64-bit architecture stay in Smi range when loaded on a 32-bit
// architecture.
static const intptr_t kHashBits = 30;
intptr_t Length() const;
RawAbstractType* TypeAt(intptr_t index) const;
static intptr_t type_at_offset(intptr_t index) {
return OFFSET_OF_RETURNED_VALUE(RawTypeArguments, types) +
index * kWordSize;
}
void SetTypeAt(intptr_t index, const AbstractType& value) const;
// The name of this type argument vector, e.g. "<T, dynamic, List<T>, Smi>".
RawString* Name() const { return SubvectorName(0, Length(), kInternalName); }
// The name of this type argument vector, e.g. "<T, dynamic, List<T>, int>".
// Names of internal classes are mapped to their public interfaces.
RawString* UserVisibleName() const {
return SubvectorName(0, Length(), kUserVisibleName);
}
// Check if the subvector of length 'len' starting at 'from_index' of this
// type argument vector consists solely of DynamicType.
bool IsRaw(intptr_t from_index, intptr_t len) const {
return IsDynamicTypes(false, from_index, len);
}
// Check if this type argument vector would consist solely of DynamicType if
// it was instantiated from a raw (null) instantiator, i.e. consider each type
// parameter as it would be first instantiated from a vector of dynamic types.
// Consider only a prefix of length 'len'.
bool IsRawInstantiatedRaw(intptr_t len) const {
return IsDynamicTypes(true, 0, len);
}
// Check the subtype relationship, considering only a subvector of length
// 'len' starting at 'from_index'.
bool IsSubtypeOf(const TypeArguments& other,
intptr_t from_index,
intptr_t len,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
return TypeTest(kIsSubtypeOf, other, from_index, len, bound_error,
bound_trail, space);
}
// Check the 'more specific' relationship, considering only a subvector of
// length 'len' starting at 'from_index'.
bool IsMoreSpecificThan(const TypeArguments& other,
intptr_t from_index,
intptr_t len,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const {
return TypeTest(kIsMoreSpecificThan, other, from_index, len, bound_error,
bound_trail, space);
}
// Check if the vectors are equal (they may be null).
bool Equals(const TypeArguments& other) const {
return IsSubvectorEquivalent(other, 0, IsNull() ? 0 : Length());
}
bool IsEquivalent(const TypeArguments& other, TrailPtr trail = NULL) const {
return IsSubvectorEquivalent(other, 0, IsNull() ? 0 : Length(), trail);
}
bool IsSubvectorEquivalent(const TypeArguments& other,
intptr_t from_index,
intptr_t len,
TrailPtr trail = NULL) const;
// Check if the vector is instantiated (it must not be null).
bool IsInstantiated(Genericity genericity = kAny,
TrailPtr trail = NULL) const {
return IsSubvectorInstantiated(0, Length(), genericity, trail);
}
bool IsSubvectorInstantiated(intptr_t from_index,
intptr_t len,
Genericity genericity = kAny,
TrailPtr trail = NULL) const;
bool IsUninstantiatedIdentity() const;
bool CanShareInstantiatorTypeArguments(const Class& instantiator_class) const;
// Return true if all types of this vector are respectively, resolved,
// finalized, or bounded.
bool IsResolved() const;
bool IsFinalized() const;
bool IsBounded() const;
// Return true if this vector contains a recursive type argument.
bool IsRecursive() const;
// Clone this type argument vector and clone all unfinalized type arguments.
// Finalized type arguments are shared.
RawTypeArguments* CloneUnfinalized() const;
// Clone this type argument vector and clone all uninstantiated type
// arguments, changing the class owner of type parameters.
// Instantiated type arguments are shared.
RawTypeArguments* CloneUninstantiated(const Class& new_owner,
TrailPtr trail = NULL) const;
// Canonicalize only if instantiated, otherwise returns 'this'.
RawTypeArguments* Canonicalize(TrailPtr trail = NULL) const;
// Returns a formatted list of occuring type arguments with their URI.
RawString* EnumerateURIs() const;
// Return 'this' if this type argument vector is instantiated, i.e. if it does
// not refer to type parameters. Otherwise, return a new type argument vector
// where each reference to a type parameter is replaced with the corresponding
// type of the instantiator type argument vector.
// If bound_error is not NULL, it may be set to reflect a bound error.
RawTypeArguments* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
TrailPtr instantiation_trail,
TrailPtr bound_trail,
Heap::Space space) const;
// Runtime instantiation with canonicalization. Not to be used during type
// finalization at compile time.
RawTypeArguments* InstantiateAndCanonicalizeFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error) const;
// Return true if this type argument vector has cached instantiations.
bool HasInstantiations() const;
// Return the number of cached instantiations for this type argument vector.
intptr_t NumInstantiations() const;
static intptr_t instantiations_offset() {
return OFFSET_OF(RawTypeArguments, instantiations_);
}
static const intptr_t kBytesPerElement = kWordSize;
static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
static intptr_t InstanceSize() {
ASSERT(sizeof(RawTypeArguments) ==
OFFSET_OF_RETURNED_VALUE(RawTypeArguments, types));
return 0;
}
static intptr_t InstanceSize(intptr_t len) {
// Ensure that the types() is not adding to the object size, which includes
// 3 fields: instantiations_, length_ and hash_.
ASSERT(sizeof(RawTypeArguments) ==
(sizeof(RawObject) + (kNumFields * kWordSize)));
ASSERT(0 <= len && len <= kMaxElements);
return RoundedAllocationSize(sizeof(RawTypeArguments) +
(len * kBytesPerElement));
}
intptr_t Hash() const;
static RawTypeArguments* New(intptr_t len, Heap::Space space = Heap::kOld);
private:
intptr_t ComputeHash() const;
void SetHash(intptr_t value) const;
// Check if the subvector of length 'len' starting at 'from_index' of this
// type argument vector consists solely of DynamicType.
// If raw_instantiated is true, consider each type parameter to be first
// instantiated from a vector of dynamic types.
bool IsDynamicTypes(bool raw_instantiated,
intptr_t from_index,
intptr_t len) const;
// Check the subtype or 'more specific' relationship, considering only a
// subvector of length 'len' starting at 'from_index'.
bool TypeTest(TypeTestKind test_kind,
const TypeArguments& other,
intptr_t from_index,
intptr_t len,
Error* bound_error,
TrailPtr bound_trail,
Heap::Space space) const;
// Return the internal or public name of a subvector of this type argument
// vector, e.g. "<T, dynamic, List<T>, int>".
RawString* SubvectorName(intptr_t from_index,
intptr_t len,
NameVisibility name_visibility) const;
RawArray* instantiations() const;
void set_instantiations(const Array& value) const;
RawAbstractType* const* TypeAddr(intptr_t index) const;
void SetLength(intptr_t value) const;
// Number of fields in the raw object=3 (instantiations_, length_ and hash_).
static const int kNumFields = 3;
FINAL_HEAP_OBJECT_IMPLEMENTATION(TypeArguments, Object);
friend class AbstractType;
friend class Class;
friend class ClearTypeHashVisitor;
};
class PatchClass : public Object {
public:
RawClass* patched_class() const { return raw_ptr()->patched_class_; }
RawClass* origin_class() const { return raw_ptr()->origin_class_; }
RawScript* script() const { return raw_ptr()->script_; }
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawPatchClass));
}
static bool IsInFullSnapshot(RawPatchClass* cls) {
NoSafepointScope no_safepoint;
return Class::IsInFullSnapshot(cls->ptr()->patched_class_);
}
static RawPatchClass* New(const Class& patched_class,
const Class& origin_class);
static RawPatchClass* New(const Class& patched_class, const Script& source);
private:
void set_patched_class(const Class& value) const;
void set_origin_class(const Class& value) const;
void set_script(const Script& value) const;
static RawPatchClass* New();
FINAL_HEAP_OBJECT_IMPLEMENTATION(PatchClass, Object);
friend class Class;
};
class SingleTargetCache : public Object {
public:
RawCode* target() const { return raw_ptr()->target_; }
void set_target(const Code& target) const;
static intptr_t target_offset() {
return OFFSET_OF(RawSingleTargetCache, target_);
}
#define DEFINE_NON_POINTER_FIELD_ACCESSORS(type, name) \
type name() const { return raw_ptr()->name##_; } \
void set_##name(type value) const { \
StoreNonPointer(&raw_ptr()->name##_, value); \
} \
static intptr_t name##_offset() { \
return OFFSET_OF(RawSingleTargetCache, name##_); \
}
DEFINE_NON_POINTER_FIELD_ACCESSORS(uword, entry_point);
DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, lower_limit);
DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, upper_limit);
#undef DEFINE_NON_POINTER_FIELD_ACCESSORS
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawSingleTargetCache));
}
static RawSingleTargetCache* New();
private:
FINAL_HEAP_OBJECT_IMPLEMENTATION(SingleTargetCache, Object);
friend class Class;
};
class UnlinkedCall : public Object {
public:
RawString* target_name() const { return raw_ptr()->target_name_; }
void set_target_name(const String& target_name) const;
RawArray* args_descriptor() const { return raw_ptr()->args_descriptor_; }
void set_args_descriptor(const Array& args_descriptor) const;
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawUnlinkedCall));
}
static RawUnlinkedCall* New();
private:
FINAL_HEAP_OBJECT_IMPLEMENTATION(UnlinkedCall, Object);
friend class Class;
};
// Object holding information about an IC: test classes and their
// corresponding targets. The owner of the ICData can be either the function
// or the original ICData object. In case of background compilation we
// copy the ICData in a child object, thus freezing it during background
// compilation. Code may contain only original ICData objects.
class ICData : public Object {
public:
RawFunction* Owner() const;
RawICData* Original() const;
void SetOriginal(const ICData& value) const;
bool IsOriginal() const { return Original() == this->raw(); }
RawString* target_name() const { return raw_ptr()->target_name_; }
RawArray* arguments_descriptor() const { return raw_ptr()->args_descriptor_; }
intptr_t NumArgsTested() const;
intptr_t deopt_id() const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
return -1;
#else
return raw_ptr()->deopt_id_;
#endif
}
bool IsImmutable() const;
void Reset(Zone* zone) const;
void ResetSwitchable(Zone* zone) const;
// Note: only deopts with reasons before Unknown in this list are recorded in
// the ICData. All other reasons are used purely for informational messages
// printed during deoptimization itself.
#define DEOPT_REASONS(V) \
V(BinarySmiOp) \
V(BinaryMintOp) \
V(DoubleToSmi) \
V(CheckSmi) \
V(Unknown) \
V(PolymorphicInstanceCallTestFail) \
V(UnaryMintOp) \
V(BinaryDoubleOp) \
V(UnaryOp) \
V(UnboxInteger) \
V(CheckClass) \
V(CheckArrayBound) \
V(AtCall) \
V(GuardField) \
V(TestCids) \
V(NumReasons)
enum DeoptReasonId {
#define DEFINE_ENUM_LIST(name) kDeopt##name,
DEOPT_REASONS(DEFINE_ENUM_LIST)
#undef DEFINE_ENUM_LIST
};
static const intptr_t kLastRecordedDeoptReason = kDeoptUnknown - 1;
enum DeoptFlags {
// Deoptimization is caused by an optimistically hoisted instruction.
kHoisted = 1 << 0,
// Deoptimization is caused by an optimistically generalized bounds check.
kGeneralized = 1 << 1
};
bool HasDeoptReasons() const { return DeoptReasons() != 0; }
uint32_t DeoptReasons() const;
void SetDeoptReasons(uint32_t reasons) const;
bool HasDeoptReason(ICData::DeoptReasonId reason) const;
void AddDeoptReason(ICData::DeoptReasonId reason) const;
// The length of the array. This includes all sentinel entries including
// the final one.
intptr_t Length() const;
// Takes O(result) time!
intptr_t NumberOfChecks() const;
// Discounts any checks with usage of zero.
// Takes O(result)) time!
intptr_t NumberOfUsedChecks() const;
// Takes O(n) time!
bool NumberOfChecksIs(intptr_t n) const;
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawICData));
}
static intptr_t target_name_offset() {
return OFFSET_OF(RawICData, target_name_);
}
static intptr_t state_bits_offset() {
return OFFSET_OF(RawICData, state_bits_);
}
static intptr_t NumArgsTestedShift() { return kNumArgsTestedPos; }
static intptr_t NumArgsTestedMask() {
return ((1 << kNumArgsTestedSize) - 1) << kNumArgsTestedPos;
}
static intptr_t arguments_descriptor_offset() {
return OFFSET_OF(RawICData, args_descriptor_);
}
static intptr_t ic_data_offset() { return OFFSET_OF(RawICData, ic_data_); }
static intptr_t owner_offset() { return OFFSET_OF(RawICData, owner_); }
// Replaces entry |index| with the sentinel.
void WriteSentinelAt(intptr_t index) const;
// Clears the count for entry |index|.
void ClearCountAt(intptr_t index) const;
// Clear all entries with the sentinel value (but will preserve initial
// smi smi checks).
void ClearWithSentinel() const;
// Clear all entries with the sentinel value and reset the first entry
// with the dummy target entry.
void ClearAndSetStaticTarget(const Function& func) const;
// Returns the first index that should be used to for a new entry. Will
// grow the array if necessary.
RawArray* FindFreeIndex(intptr_t* index) const;
void DebugDump() const;
// Returns true if this is a two arg smi operation.
bool AddSmiSmiCheckForFastSmiStubs() const;
// Used for unoptimized static calls when no class-ids are checked.
void AddTarget(const Function& target) const;
// Adding checks.
// Adds one more class test to ICData. Length of 'classes' must be equal to
// the number of arguments tested. Use only for num_args_tested > 1.
void AddCheck(const GrowableArray<intptr_t>& class_ids,
const Function& target,
intptr_t count = 1) const;
// Adds sorted so that Smi is the first class-id. Use only for
// num_args_tested == 1.
void AddReceiverCheck(intptr_t receiver_class_id,
const Function& target,
intptr_t count = 1) const;
// Does entry |index| contain the sentinel value?
bool IsSentinelAt(intptr_t index) const;
// Retrieving checks.
void GetCheckAt(intptr_t index,
GrowableArray<intptr_t>* class_ids,
Function* target) const;
void GetClassIdsAt(intptr_t index, GrowableArray<intptr_t>* class_ids) const;
// Only for 'num_args_checked == 1'.
void GetOneClassCheckAt(intptr_t index,
intptr_t* class_id,
Function* target) const;
// Only for 'num_args_checked == 1'.
intptr_t GetCidAt(intptr_t index) const;
intptr_t GetReceiverClassIdAt(intptr_t index) const;
intptr_t GetClassIdAt(intptr_t index, intptr_t arg_nr) const;
RawFunction* GetTargetAt(intptr_t index) const;
RawFunction* GetTargetForReceiverClassId(intptr_t class_id,
intptr_t* count_return) const;
RawObject* GetTargetOrCodeAt(intptr_t index) const;
void SetCodeAt(intptr_t index, const Code& value) const;
void SetEntryPointAt(intptr_t index, const Smi& value) const;
void IncrementCountAt(intptr_t index, intptr_t value) const;
void SetCountAt(intptr_t index, intptr_t value) const;
intptr_t GetCountAt(intptr_t index) const;
intptr_t AggregateCount() const;
// Returns this->raw() if num_args_tested == 1 and arg_nr == 1, otherwise
// returns a new ICData object containing only unique arg_nr checks.
// Returns only used entries.
RawICData* AsUnaryClassChecksForArgNr(intptr_t arg_nr) const;
RawICData* AsUnaryClassChecks() const {
return AsUnaryClassChecksForArgNr(0);
}
RawICData* AsUnaryClassChecksForCid(intptr_t cid,
const Function& target) const;
// Returns ICData with aggregated receiver count, sorted by highest count.
// Smi not first!! (the convention for ICData used in code generation is that
// Smi check is first)
// Used for printing and optimizations.
RawICData* AsUnaryClassChecksSortedByCount() const;
// Consider only used entries.
bool AllTargetsHaveSameOwner(intptr_t owner_cid) const;
bool AllReceiversAreNumbers() const;
bool HasOneTarget() const;
bool HasOnlyDispatcherOrImplicitAccessorTargets() const;
bool HasReceiverClassId(intptr_t class_id) const;
static RawICData* New(const Function& owner,
const String& target_name,
const Array& arguments_descriptor,
intptr_t deopt_id,
intptr_t num_args_tested,
bool is_static_call);
static RawICData* NewFrom(const ICData& from, intptr_t num_args_tested);
// Generates a new ICData with descriptor and data array copied (deep clone).
static RawICData* Clone(const ICData& from);
static intptr_t TestEntryLengthFor(intptr_t num_args);
static intptr_t TargetIndexFor(intptr_t num_args) { return num_args; }
static intptr_t CodeIndexFor(intptr_t num_args) { return num_args; }
static intptr_t CountIndexFor(intptr_t num_args) { return (num_args + 1); }
static intptr_t EntryPointIndexFor(intptr_t num_args) {
return (num_args + 1);
}
bool IsUsedAt(intptr_t i) const;
void GetUsedCidsForTwoArgs(GrowableArray<intptr_t>* first,
GrowableArray<intptr_t>* second) const;
void PrintToJSONArray(const JSONArray& jsarray,
TokenPosition token_pos) const;
// Initialize the preallocated empty ICData entry arrays.
static void InitOnce();
enum { kCachedICDataArrayCount = 4 };
#if defined(TAG_IC_DATA)
void set_tag(intptr_t value) const;
intptr_t tag() const { return raw_ptr()->tag_; }
#endif
void SetIsStaticCall(bool static_call) const;
bool is_static_call() const;
private:
static RawICData* New();
RawArray* ic_data() const { return raw_ptr()->ic_data_; }
void set_owner(const Function& value) const;
void set_target_name(const String& value) const;
void set_arguments_descriptor(const Array& value) const;
void set_deopt_id(intptr_t value) const;
void SetNumArgsTested(intptr_t value) const;
void set_ic_data_array(const Array& value) const;
void set_state_bits(uint32_t bits) const;
bool ValidateInterceptor(const Function& target) const;
enum {
kNumArgsTestedPos = 0,
kNumArgsTestedSize = 2,
kDeoptReasonPos = kNumArgsTestedPos + kNumArgsTestedSize,
kDeoptReasonSize = kLastRecordedDeoptReason + 1,
kStaticCallPos = kDeoptReasonPos + kDeoptReasonSize,
kStaticCallSize = 1,
};
class NumArgsTestedBits : public BitField<uint32_t,
uint32_t,
kNumArgsTestedPos,
kNumArgsTestedSize> {};
class DeoptReasonBits : public BitField<uint32_t,
uint32_t,
ICData::kDeoptReasonPos,
ICData::kDeoptReasonSize> {};
class StaticCallBit : public BitField<uint32_t,
bool,
ICData::kStaticCallPos,
ICData::kStaticCallSize> {};
#if defined(DEBUG)
// Used in asserts to verify that a check is not added twice.
bool HasCheck(const GrowableArray<intptr_t>& cids) const;
#endif // DEBUG
intptr_t TestEntryLength() const;
static RawArray* NewNonCachedEmptyICDataArray(intptr_t num_args_tested);
static RawArray* CachedEmptyICDataArray(intptr_t num_args_tested);
static RawICData* NewDescriptor(Zone* zone,
const Function& owner,
const String& target_name,
const Array& arguments_descriptor,
intptr_t deopt_id,
intptr_t num_args_tested,
bool is_static_call);
static void WriteSentinel(const Array& data, intptr_t test_entry_length);
// A cache of VM heap allocated preinitialized empty ic data entry arrays.
static RawArray* cached_icdata_arrays_[kCachedICDataArrayCount];
FINAL_HEAP_OBJECT_IMPLEMENTATION(ICData, Object);
friend class Class;
friend class SnapshotWriter;
friend class Serializer;
friend class Deserializer;
};
class Function : public Object {
public:
RawString* name() const { return raw_ptr()->name_; }
RawString* UserVisibleName() const; // Same as scrubbed name.
RawString* QualifiedScrubbedName() const {
return QualifiedName(kScrubbedName);
}
RawString* QualifiedUserVisibleName() const {
return QualifiedName(kUserVisibleName);
}
virtual RawString* DictionaryName() const { return name(); }
RawString* GetSource() const;
// Return the type of this function's signature. It may not be canonical yet.
// For example, if this function has a signature of the form
// '(T, [B, C]) => R', where 'T' and 'R' are type parameters of the
// owner class of this function, then its signature type is a parameterized
// function type with uninstantiated type arguments 'T' and 'R' as elements of
// its type argument vector.
RawType* SignatureType() const;
// Update the signature type (with a canonical version).
void SetSignatureType(const Type& value) const;
// Build a string of the form 'C<T, R>(T, {B b, C c}) => R' representing the
// internal signature of the given function. In this example, T and R are
// type parameters of class C, the owner of the function.
RawString* Signature() const {
const bool instantiate = false;
return BuildSignature(instantiate, kInternalName,
Object::null_type_arguments());
}
// Build a string of the form '(T, {B b, C c}) => R' representing the
// user visible signature of the given function. In this example, T and R are
// type parameters of class C, the owner of the function, also called the
// scope class of the function type.
// Implicit parameters are hidden, as well as the prefix denoting the
// scope class and its type parameters.
RawString* UserVisibleSignature() const {
const bool instantiate = false;
return BuildSignature(instantiate, kUserVisibleName,
Object::null_type_arguments());
}
// Build a string of the form '(A, {B b, C c}) => D' representing the
// signature of the given function, where all generic types (e.g. '<T, R>' in
// 'C<T, R>(T, {B b, C c}) => R') are instantiated using the given
// instantiator type argument vector of a C instance (e.g. '<A, D>').
RawString* InstantiatedSignatureFrom(const TypeArguments& instantiator,
NameVisibility name_visibility) const {
const bool instantiate = true;
return BuildSignature(instantiate, name_visibility, instantiator);
}
// Returns true if the signature of this function is instantiated, i.e. if it
// does not involve generic parameter types or generic result type.
bool HasInstantiatedSignature() const;
// Build a string of the form 'T, {B b, C c}' representing the user
// visible formal parameters of the function.
RawString* UserVisibleFormalParameters() const;
// Reloading support:
void Reparent(const Class& new_cls) const;
void ZeroEdgeCounters() const;
RawClass* Owner() const;
RawClass* origin() const;
RawScript* script() const;
RawObject* RawOwner() const { return raw_ptr()->owner_; }
RawRegExp* regexp() const;
intptr_t string_specialization_cid() const;
bool is_sticky_specialization() const;
void SetRegExpData(const RegExp& regexp,
intptr_t string_specialization_cid,
bool sticky) const;
RawString* native_name() const;
void set_native_name(const String& name) const;
RawAbstractType* result_type() const { return raw_ptr()->result_type_; }
void set_result_type(const AbstractType& value) const;
RawAbstractType* ParameterTypeAt(intptr_t index) const;
void SetParameterTypeAt(intptr_t index, const AbstractType& value) const;
RawArray* parameter_types() const { return raw_ptr()->parameter_types_; }
void set_parameter_types(const Array& value) const;
// Parameter names are valid for all valid parameter indices, and are not
// limited to named optional parameters.
RawString* ParameterNameAt(intptr_t index) const;
void SetParameterNameAt(intptr_t index, const String& value) const;
RawArray* parameter_names() const { return raw_ptr()->parameter_names_; }
void set_parameter_names(const Array& value) const;
// The type parameters (and their bounds) are specified as an array of
// TypeParameter.
RawTypeArguments* type_parameters() const {
return raw_ptr()->type_parameters_;
}
void set_type_parameters(const TypeArguments& value) const;
intptr_t NumTypeParameters(Thread* thread) const;
intptr_t NumTypeParameters() const {
return NumTypeParameters(Thread::Current());
}
// Return a TypeParameter if the type_name is a type parameter of this
// function or of one of its parent functions.
// Unless NULL, adjust function_level accordingly (in and out parameter).
// Return null otherwise.
RawTypeParameter* LookupTypeParameter(const String& type_name,
intptr_t* function_level) const;
// Return true if this function declares type parameters.
bool IsGeneric() const { return NumTypeParameters(Thread::Current()) > 0; }
// Return true if any parent function of this function is generic.
bool HasGenericParent() const;
// Not thread-safe; must be called in the main thread.
// Sets function's code and code's function.
void InstallOptimizedCode(const Code& code, bool is_osr) const;
void AttachCode(const Code& value) const;
void SetInstructions(const Code& value) const;
void ClearCode() const;
// Disables optimized code and switches to unoptimized code.
void SwitchToUnoptimizedCode() const;
// Disables optimized code and switches to unoptimized code (or the lazy
// compilation stub).
void SwitchToLazyCompiledUnoptimizedCode() const;
// Compiles unoptimized code (if necessary) and attaches it to the function.
void EnsureHasCompiledUnoptimizedCode() const;
// Return the most recently compiled and installed code for this function.
// It is not the only Code object that points to this function.
RawCode* CurrentCode() const { return raw_ptr()->code_; }
RawCode* unoptimized_code() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return static_cast<RawCode*>(Object::null());
#else
return raw_ptr()->unoptimized_code_;
#endif
}
void set_unoptimized_code(const Code& value) const;
bool HasCode() const;
static intptr_t code_offset() { return OFFSET_OF(RawFunction, code_); }
static intptr_t entry_point_offset() {
return OFFSET_OF(RawFunction, entry_point_);
}
virtual intptr_t Hash() const;
// Returns true if there is at least one debugger breakpoint
// set in this function.
bool HasBreakpoint() const;
RawContextScope* context_scope() const;
void set_context_scope(const ContextScope& value) const;
RawField* LookupImplicitGetterSetterField() const;
// Enclosing function of this local function.
RawFunction* parent_function() const;
void set_extracted_method_closure(const Function& function) const;
RawFunction* extracted_method_closure() const;
void set_saved_args_desc(const Array& array) const;
RawArray* saved_args_desc() const;
bool IsMethodExtractor() const {
return kind() == RawFunction::kMethodExtractor;
}
bool IsNoSuchMethodDispatcher() const {
return kind() == RawFunction::kNoSuchMethodDispatcher;
}
bool IsInvokeFieldDispatcher() const {
return kind() == RawFunction::kInvokeFieldDispatcher;
}
// Returns true iff an implicit closure function has been created
// for this function.
bool HasImplicitClosureFunction() const {
return implicit_closure_function() != null();
}
// Return the closure function implicitly created for this function.
// If none exists yet, create one and remember it.
RawFunction* ImplicitClosureFunction() const;
void DropUncompiledImplicitClosureFunction() const;
// Return the closure implicitly created for this function.
// If none exists yet, create one and remember it.
RawInstance* ImplicitStaticClosure() const;
RawInstance* ImplicitInstanceClosure(const Instance& receiver) const;
RawSmi* GetClosureHashCode() const;
// Redirection information for a redirecting factory.
bool IsRedirectingFactory() const;
RawType* RedirectionType() const;
void SetRedirectionType(const Type& type) const;
RawString* RedirectionIdentifier() const;
void SetRedirectionIdentifier(const String& identifier) const;
RawFunction* RedirectionTarget() const;
void SetRedirectionTarget(const Function& target) const;
RawFunction::Kind kind() const {
return KindBits::decode(raw_ptr()->kind_tag_);
}
RawFunction::AsyncModifier modifier() const {
return ModifierBits::decode(raw_ptr()->kind_tag_);
}
static const char* KindToCString(RawFunction::Kind kind);
bool IsGenerativeConstructor() const {
return (kind() == RawFunction::kConstructor) && !is_static();
}
bool IsImplicitConstructor() const;
bool IsFactory() const {
return (kind() == RawFunction::kConstructor) && is_static();
}
bool IsDynamicFunction(bool allow_abstract = false) const {
if (is_static() || (!allow_abstract && is_abstract())) {
return false;
}
switch (kind()) {
case RawFunction::kRegularFunction:
case RawFunction::kGetterFunction:
case RawFunction::kSetterFunction:
case RawFunction::kImplicitGetter:
case RawFunction::kImplicitSetter:
case RawFunction::kMethodExtractor:
case RawFunction::kNoSuchMethodDispatcher:
case RawFunction::kInvokeFieldDispatcher:
return true;
case RawFunction::kClosureFunction:
case RawFunction::kConstructor:
case RawFunction::kImplicitStaticFinalGetter:
case RawFunction::kIrregexpFunction:
return false;
default:
UNREACHABLE();
return false;
}
}
bool IsStaticFunction() const {
if (!is_static()) {
return false;
}
switch (kind()) {
case RawFunction::kRegularFunction:
case RawFunction::kGetterFunction:
case RawFunction::kSetterFunction:
case RawFunction::kImplicitGetter:
case RawFunction::kImplicitSetter:
case RawFunction::kImplicitStaticFinalGetter:
case RawFunction::kIrregexpFunction:
return true;
case RawFunction::kClosureFunction:
case RawFunction::kConstructor:
return false;
default:
UNREACHABLE();
return false;
}
}
bool IsInFactoryScope() const;
TokenPosition token_pos() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return TokenPosition();
#else
return raw_ptr()->token_pos_;
#endif
}
void set_token_pos(TokenPosition value) const;
TokenPosition end_token_pos() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return TokenPosition();
#else
return raw_ptr()->end_token_pos_;
#endif
}
void set_end_token_pos(TokenPosition value) const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
StoreNonPointer(&raw_ptr()->end_token_pos_, value);
#endif
}
intptr_t num_fixed_parameters() const {
return raw_ptr()->num_fixed_parameters_;
}
void set_num_fixed_parameters(intptr_t value) const;
bool HasOptionalParameters() const {
return raw_ptr()->num_optional_parameters_ != 0;
}
bool HasOptionalPositionalParameters() const {
return raw_ptr()->num_optional_parameters_ > 0;
}
bool HasOptionalNamedParameters() const {
return raw_ptr()->num_optional_parameters_ < 0;
}
intptr_t NumOptionalParameters() const {
const intptr_t num_opt_params = raw_ptr()->num_optional_parameters_;
return (num_opt_params >= 0) ? num_opt_params : -num_opt_params;
}
void SetNumOptionalParameters(intptr_t num_optional_parameters,
bool are_optional_positional) const;
intptr_t NumOptionalPositionalParameters() const {
const intptr_t num_opt_params = raw_ptr()->num_optional_parameters_;
return (num_opt_params > 0) ? num_opt_params : 0;
}
intptr_t NumOptionalNamedParameters() const {
const intptr_t num_opt_params = raw_ptr()->num_optional_parameters_;
return (num_opt_params < 0) ? -num_opt_params : 0;
}
intptr_t NumParameters() const;
intptr_t NumImplicitParameters() const;
static intptr_t usage_counter_offset() {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
return 0;
#else
return OFFSET_OF(RawFunction, usage_counter_);
#endif
}
intptr_t usage_counter() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return 0;
#else
return raw_ptr()->usage_counter_;
#endif
}
void set_usage_counter(intptr_t value) const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
// TODO(Srdjan): Assert that this is thread-safe, i.e., only
// set from mutator-thread or while at a safepoint (e.g., during marking).
StoreNonPointer(&raw_ptr()->usage_counter_, value);
#endif
}
int8_t deoptimization_counter() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return 0;
#else
return raw_ptr()->deoptimization_counter_;
#endif
}
void set_deoptimization_counter(int8_t value) const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
ASSERT(value >= 0);
StoreNonPointer(&raw_ptr()->deoptimization_counter_, value);
#endif
}
static const intptr_t kMaxInstructionCount = (1 << 16) - 1;
intptr_t optimized_instruction_count() const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
return 0;
#else
return raw_ptr()->optimized_instruction_count_;
#endif
}
void set_optimized_instruction_count(intptr_t value) const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
ASSERT(value >= 0);
if (value > kMaxInstructionCount) {
value = kMaxInstructionCount;
}
StoreNonPointer(&raw_ptr()->optimized_instruction_count_,
static_cast<uint16_t>(value));
#endif
}
intptr_t optimized_call_site_count() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return 0;
#else
return raw_ptr()->optimized_call_site_count_;
#endif
}
void set_optimized_call_site_count(intptr_t value) const {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
ASSERT(value >= 0);
if (value > kMaxInstructionCount) {
value = kMaxInstructionCount;
}
StoreNonPointer(&raw_ptr()->optimized_call_site_count_,
static_cast<uint16_t>(value));
#endif
}
void* kernel_function() const {
#if defined(DART_PRECOMPILED_RUNTIME)
return NULL;
#else
return raw_ptr()->kernel_function_;
#endif
}
void set_kernel_function(void* kernel_function) const {
#if !defined(DART_PRECOMPILED_RUNTIME)
StoreNonPointer(&raw_ptr()->kernel_function_, kernel_function);
#endif
}
bool IsOptimizable() const;
bool IsNativeAutoSetupScope() const;
void SetIsOptimizable(bool value) const;
void SetIsNativeAutoSetupScope(bool value) const;
bool CanBeInlined() const;
MethodRecognizer::Kind recognized_kind() const {
return RecognizedBits::decode(raw_ptr()->kind_tag_);
}
void set_recognized_kind(MethodRecognizer::Kind value) const;
bool IsRecognized() const {
return recognized_kind() != MethodRecognizer::kUnknown;
}
bool HasOptimizedCode() const;
// Returns true if the argument counts are valid for calling this function.
// Otherwise, it returns false and the reason (if error_message is not NULL).
bool AreValidArgumentCounts(intptr_t num_arguments,
intptr_t num_named_arguments,
String* error_message) const;
// Returns true if the total argument count and the names of optional
// arguments are valid for calling this function.
// Otherwise, it returns false and the reason (if error_message is not NULL).
bool AreValidArguments(intptr_t num_arguments,
const Array& argument_names,
String* error_message) const;
bool AreValidArguments(const ArgumentsDescriptor& args_desc,
String* error_message) const;
// Fully qualified name uniquely identifying the function under gdb and during
// ast printing. The special ':' character, if present, is replaced by '_'.
const char* ToFullyQualifiedCString() const;
const char* ToLibNamePrefixedQualifiedCString() const;
const char* ToQualifiedCString() const;
// Returns true if this function has parameters that are compatible with the
// parameters of the other function in order for this function to override the
// other function.
bool HasCompatibleParametersWith(const Function& other,
Error* bound_error) const;
// Returns true if the type of this function is a subtype of the type of
// the other function.
bool IsSubtypeOf(const TypeArguments& type_arguments,
const Function& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
Heap::Space space) const {
return TypeTest(kIsSubtypeOf, type_arguments, other, other_type_arguments,
bound_error, space);
}
// Returns true if the type of this function is more specific than the type of
// the other function.
bool IsMoreSpecificThan(const TypeArguments& type_arguments,
const Function& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
Heap::Space space) const {
return TypeTest(kIsMoreSpecificThan, type_arguments, other,
other_type_arguments, bound_error, space);
}
// Check the subtype or 'more specific' relationship.
bool TypeTest(TypeTestKind test_kind,
const TypeArguments& type_arguments,
const Function& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
Heap::Space space) const;
bool IsDispatcherOrImplicitAccessor() const {
switch (kind()) {
case RawFunction::kImplicitGetter:
case RawFunction::kImplicitSetter:
case RawFunction::kNoSuchMethodDispatcher:
case RawFunction::kInvokeFieldDispatcher:
return true;
default:
return false;
}
}
// Returns true if this function represents an explicit getter function.
bool IsGetterFunction() const {
return kind() == RawFunction::kGetterFunction;
}
// Returns true if this function represents an implicit getter function.
bool IsImplicitGetterFunction() const {
return kind() == RawFunction::kImplicitGetter;
}
// Returns true if this function represents an explicit setter function.
bool IsSetterFunction() const {
return kind() == RawFunction::kSetterFunction;
}
// Returns true if this function represents an implicit setter function.
bool IsImplicitSetterFunction() const {
return kind() == RawFunction::kImplicitSetter;
}
// Returns true if this function represents a (possibly implicit) closure
// function.
bool IsClosureFunction() const {
return kind() == RawFunction::kClosureFunction;
}
// Returns true if this function represents a generated irregexp function.
bool IsIrregexpFunction() const {
return kind() == RawFunction::kIrregexpFunction;
}
// Returns true if this function represents an implicit closure function.
bool IsImplicitClosureFunction() const;
// Returns true if this function represents a non implicit closure function.
bool IsNonImplicitClosureFunction() const {
return IsClosureFunction() && !IsImplicitClosureFunction();
}
// Returns true if this function represents an implicit static closure
// function.
bool IsImplicitStaticClosureFunction() const {
return is_static() && IsImplicitClosureFunction();
}
static bool IsImplicitStaticClosureFunction(RawFunction* func);
// Returns true if this function represents an implicit instance closure
// function.
bool IsImplicitInstanceClosureFunction() const {
return !is_static() && IsImplicitClosureFunction();
}
bool IsConstructorClosureFunction() const;
// Returns true if this function represents a local function.
bool IsLocalFunction() const { return parent_function() != Function::null(); }
// Returns true if this function represents a signature function without code.
bool IsSignatureFunction() const {
return kind() == RawFunction::kSignatureFunction;
}
static bool IsSignatureFunction(RawFunction* function) {
NoSafepointScope no_safepoint;
return KindBits::decode(function->ptr()->kind_tag_) ==
RawFunction::kSignatureFunction;
}
bool IsAsyncFunction() const {