| // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| #include "vm/clustered_snapshot.h" |
| |
| #include "platform/assert.h" |
| #include "vm/bootstrap.h" |
| #include "vm/class_finalizer.h" |
| #include "vm/dart.h" |
| #include "vm/dart_entry.h" |
| #include "vm/exceptions.h" |
| #include "vm/heap.h" |
| #include "vm/lockers.h" |
| #include "vm/longjump.h" |
| #include "vm/native_entry.h" |
| #include "vm/object.h" |
| #include "vm/object_store.h" |
| #include "vm/stub_code.h" |
| #include "vm/symbols.h" |
| #include "vm/timeline.h" |
| #include "vm/version.h" |
| |
| namespace dart { |
| |
| static RawObject* AllocateUninitialized(PageSpace* old_space, intptr_t size) { |
| ASSERT(Utils::IsAligned(size, kObjectAlignment)); |
| uword address = |
| old_space->TryAllocateDataBumpLocked(size, PageSpace::kForceGrowth); |
| if (address == 0) { |
| OUT_OF_MEMORY(); |
| } |
| return reinterpret_cast<RawObject*>(address + kHeapObjectTag); |
| } |
| |
| |
| void Deserializer::InitializeHeader(RawObject* raw, |
| intptr_t class_id, |
| intptr_t size, |
| bool is_vm_isolate, |
| bool is_canonical) { |
| ASSERT(Utils::IsAligned(size, kObjectAlignment)); |
| uword tags = 0; |
| tags = RawObject::ClassIdTag::update(class_id, tags); |
| tags = RawObject::SizeTag::update(size, tags); |
| tags = RawObject::VMHeapObjectTag::update(is_vm_isolate, tags); |
| tags = RawObject::CanonicalObjectTag::update(is_canonical, tags); |
| raw->ptr()->tags_ = tags; |
| } |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ClassSerializationCluster : public SerializationCluster { |
| public: |
| explicit ClassSerializationCluster(intptr_t num_cids) |
| : predefined_(kNumPredefinedCids), objects_(num_cids) {} |
| virtual ~ClassSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawClass* cls = Class::RawCast(object); |
| intptr_t class_id = cls->ptr()->id_; |
| |
| if (class_id < kNumPredefinedCids) { |
| // These classes are allocated by Object::Init or Object::InitOnce, so the |
| // deserializer must find them in the class table instead of allocating |
| // them. |
| predefined_.Add(cls); |
| } else { |
| objects_.Add(cls); |
| } |
| |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to_snapshot(s->kind()); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kClassCid); |
| intptr_t count = predefined_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawClass* cls = predefined_[i]; |
| intptr_t class_id = cls->ptr()->id_; |
| s->WriteCid(class_id); |
| s->AssignRef(cls); |
| } |
| count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawClass* cls = objects_[i]; |
| s->AssignRef(cls); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = predefined_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| WriteClass(s, predefined_[i]); |
| } |
| count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| WriteClass(s, objects_[i]); |
| } |
| } |
| |
| void WriteClass(Serializer* s, RawClass* cls) { |
| Snapshot::Kind kind = s->kind(); |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to_snapshot(kind); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| intptr_t class_id = cls->ptr()->id_; |
| if (class_id == kIllegalCid) { |
| FATAL1("Attempting to serialize class with illegal cid: %s\n", |
| Class::Handle(cls).ToCString()); |
| } |
| s->WriteCid(class_id); |
| s->Write<int32_t>(cls->ptr()->instance_size_in_words_); |
| s->Write<int32_t>(cls->ptr()->next_field_offset_in_words_); |
| s->Write<int32_t>(cls->ptr()->type_arguments_field_offset_in_words_); |
| s->Write<uint16_t>(cls->ptr()->num_type_arguments_); |
| s->Write<uint16_t>(cls->ptr()->num_own_type_arguments_); |
| s->Write<uint16_t>(cls->ptr()->num_native_fields_); |
| s->WriteTokenPosition(cls->ptr()->token_pos_); |
| s->Write<uint16_t>(cls->ptr()->state_bits_); |
| } |
| |
| private: |
| GrowableArray<RawClass*> predefined_; |
| GrowableArray<RawClass*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ClassDeserializationCluster : public DeserializationCluster { |
| public: |
| ClassDeserializationCluster() {} |
| virtual ~ClassDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| predefined_start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| ClassTable* table = d->isolate()->class_table(); |
| for (intptr_t i = 0; i < count; i++) { |
| intptr_t class_id = d->ReadCid(); |
| ASSERT(table->HasValidClassAt(class_id)); |
| RawClass* cls = table->At(class_id); |
| ASSERT(cls != NULL); |
| d->AssignRef(cls); |
| } |
| predefined_stop_index_ = d->next_index(); |
| |
| start_index_ = d->next_index(); |
| count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, Class::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| Snapshot::Kind kind = d->kind(); |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| ClassTable* table = d->isolate()->class_table(); |
| |
| for (intptr_t id = predefined_start_index_; id < predefined_stop_index_; |
| id++) { |
| RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id)); |
| RawObject** from = cls->from(); |
| RawObject** to_snapshot = cls->to_snapshot(kind); |
| for (RawObject** p = from; p <= to_snapshot; p++) { |
| *p = d->ReadRef(); |
| } |
| |
| intptr_t class_id = d->ReadCid(); |
| cls->ptr()->id_ = class_id; |
| if (!RawObject::IsInternalVMdefinedClassId(class_id)) { |
| cls->ptr()->instance_size_in_words_ = d->Read<int32_t>(); |
| cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>(); |
| } else { |
| d->Read<int32_t>(); // Skip. |
| d->Read<int32_t>(); // Skip. |
| } |
| cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>(); |
| cls->ptr()->num_type_arguments_ = d->Read<uint16_t>(); |
| cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>(); |
| cls->ptr()->num_native_fields_ = d->Read<uint16_t>(); |
| cls->ptr()->token_pos_ = d->ReadTokenPosition(); |
| cls->ptr()->state_bits_ = d->Read<uint16_t>(); |
| } |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id)); |
| Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize(), |
| is_vm_object); |
| RawObject** from = cls->from(); |
| RawObject** to_snapshot = cls->to_snapshot(kind); |
| RawObject** to = cls->to(); |
| for (RawObject** p = from; p <= to_snapshot; p++) { |
| *p = d->ReadRef(); |
| } |
| for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| *p = Object::null(); |
| } |
| |
| intptr_t class_id = d->ReadCid(); |
| |
| ASSERT(class_id >= kNumPredefinedCids); |
| Instance fake; |
| cls->ptr()->handle_vtable_ = fake.vtable(); |
| |
| cls->ptr()->id_ = class_id; |
| cls->ptr()->instance_size_in_words_ = d->Read<int32_t>(); |
| cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>(); |
| cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>(); |
| cls->ptr()->num_type_arguments_ = d->Read<uint16_t>(); |
| cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>(); |
| cls->ptr()->num_native_fields_ = d->Read<uint16_t>(); |
| cls->ptr()->token_pos_ = d->ReadTokenPosition(); |
| cls->ptr()->state_bits_ = d->Read<uint16_t>(); |
| |
| table->AllocateIndex(class_id); |
| table->SetAt(class_id, cls); |
| } |
| } |
| |
| void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| NOT_IN_PRODUCT(TimelineDurationScope tds( |
| Thread::Current(), Timeline::GetIsolateStream(), "PostLoadClass")); |
| |
| Class& cls = Class::Handle(zone); |
| for (intptr_t i = predefined_start_index_; i < predefined_stop_index_; |
| i++) { |
| cls ^= refs.At(i); |
| cls.RehashConstants(zone); |
| } |
| for (intptr_t i = start_index_; i < stop_index_; i++) { |
| cls ^= refs.At(i); |
| cls.RehashConstants(zone); |
| } |
| } |
| |
| private: |
| intptr_t predefined_start_index_; |
| intptr_t predefined_stop_index_; |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class UnresolvedClassSerializationCluster : public SerializationCluster { |
| public: |
| UnresolvedClassSerializationCluster() {} |
| virtual ~UnresolvedClassSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawUnresolvedClass* cls = UnresolvedClass::RawCast(object); |
| objects_.Add(cls); |
| |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kUnresolvedClassCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawUnresolvedClass* cls = objects_[i]; |
| s->AssignRef(cls); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawUnresolvedClass* cls = objects_[i]; |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| s->WriteTokenPosition(cls->ptr()->token_pos_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawUnresolvedClass*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class UnresolvedClassDeserializationCluster : public DeserializationCluster { |
| public: |
| UnresolvedClassDeserializationCluster() {} |
| virtual ~UnresolvedClassDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, UnresolvedClass::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawUnresolvedClass* cls = |
| reinterpret_cast<RawUnresolvedClass*>(d->Ref(id)); |
| Deserializer::InitializeHeader(cls, kUnresolvedClassCid, |
| UnresolvedClass::InstanceSize(), |
| is_vm_object); |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| cls->ptr()->token_pos_ = d->ReadTokenPosition(); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class TypeArgumentsSerializationCluster : public SerializationCluster { |
| public: |
| TypeArgumentsSerializationCluster() {} |
| virtual ~TypeArgumentsSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawTypeArguments* type_args = TypeArguments::RawCast(object); |
| objects_.Add(type_args); |
| |
| s->Push(type_args->ptr()->instantiations_); |
| intptr_t length = Smi::Value(type_args->ptr()->length_); |
| for (intptr_t i = 0; i < length; i++) { |
| s->Push(type_args->ptr()->types()[i]); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kTypeArgumentsCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawTypeArguments* type_args = objects_[i]; |
| intptr_t length = Smi::Value(type_args->ptr()->length_); |
| s->Write<int32_t>(length); |
| s->AssignRef(type_args); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawTypeArguments* type_args = objects_[i]; |
| intptr_t length = Smi::Value(type_args->ptr()->length_); |
| s->Write<int32_t>(length); |
| s->Write<bool>(type_args->IsCanonical()); |
| intptr_t hash = Smi::Value(type_args->ptr()->hash_); |
| s->Write<int32_t>(hash); |
| s->WriteRef(type_args->ptr()->instantiations_); |
| for (intptr_t j = 0; j < length; j++) { |
| s->WriteRef(type_args->ptr()->types()[j]); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawTypeArguments*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class TypeArgumentsDeserializationCluster : public DeserializationCluster { |
| public: |
| TypeArgumentsDeserializationCluster() {} |
| virtual ~TypeArgumentsDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| intptr_t length = d->Read<int32_t>(); |
| d->AssignRef(AllocateUninitialized(old_space, |
| TypeArguments::InstanceSize(length))); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawTypeArguments* type_args = |
| reinterpret_cast<RawTypeArguments*>(d->Ref(id)); |
| intptr_t length = d->Read<int32_t>(); |
| bool is_canonical = d->Read<bool>(); |
| Deserializer::InitializeHeader(type_args, kTypeArgumentsCid, |
| TypeArguments::InstanceSize(length), |
| is_vm_object, is_canonical); |
| type_args->ptr()->length_ = Smi::New(length); |
| type_args->ptr()->hash_ = Smi::New(d->Read<int32_t>()); |
| type_args->ptr()->instantiations_ = |
| reinterpret_cast<RawArray*>(d->ReadRef()); |
| for (intptr_t j = 0; j < length; j++) { |
| type_args->ptr()->types()[j] = |
| reinterpret_cast<RawAbstractType*>(d->ReadRef()); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class PatchClassSerializationCluster : public SerializationCluster { |
| public: |
| PatchClassSerializationCluster() {} |
| virtual ~PatchClassSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawPatchClass* cls = PatchClass::RawCast(object); |
| objects_.Add(cls); |
| |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kPatchClassCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawPatchClass* cls = objects_[i]; |
| s->AssignRef(cls); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawPatchClass* cls = objects_[i]; |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawPatchClass*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class PatchClassDeserializationCluster : public DeserializationCluster { |
| public: |
| PatchClassDeserializationCluster() {} |
| virtual ~PatchClassDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, PatchClass::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawPatchClass* cls = reinterpret_cast<RawPatchClass*>(d->Ref(id)); |
| Deserializer::InitializeHeader(cls, kPatchClassCid, |
| PatchClass::InstanceSize(), is_vm_object); |
| RawObject** from = cls->from(); |
| RawObject** to = cls->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class FunctionSerializationCluster : public SerializationCluster { |
| public: |
| FunctionSerializationCluster() {} |
| virtual ~FunctionSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawFunction* func = Function::RawCast(object); |
| objects_.Add(func); |
| |
| RawObject** from = func->from(); |
| RawObject** to = func->to_snapshot(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| if (s->kind() == Snapshot::kAppAOT) { |
| s->Push(func->ptr()->code_); |
| } else if (s->kind() == Snapshot::kAppJIT) { |
| NOT_IN_PRECOMPILED(s->Push(func->ptr()->unoptimized_code_)); |
| s->Push(func->ptr()->code_); |
| s->Push(func->ptr()->ic_data_array_); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kFunctionCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawFunction* func = objects_[i]; |
| s->AssignRef(func); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| Snapshot::Kind kind = s->kind(); |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawFunction* func = objects_[i]; |
| RawObject** from = func->from(); |
| RawObject** to = func->to_snapshot(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| if (kind == Snapshot::kAppAOT) { |
| s->WriteRef(func->ptr()->code_); |
| } else if (s->kind() == Snapshot::kAppJIT) { |
| NOT_IN_PRECOMPILED(s->WriteRef(func->ptr()->unoptimized_code_)); |
| s->WriteRef(func->ptr()->code_); |
| s->WriteRef(func->ptr()->ic_data_array_); |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (kind != Snapshot::kAppAOT) { |
| s->WriteTokenPosition(func->ptr()->token_pos_); |
| s->WriteTokenPosition(func->ptr()->end_token_pos_); |
| } |
| #endif |
| s->Write<int16_t>(func->ptr()->num_fixed_parameters_); |
| s->Write<int16_t>(func->ptr()->num_optional_parameters_); |
| s->Write<uint32_t>(func->ptr()->kind_tag_); |
| if (kind == Snapshot::kAppAOT) { |
| // Omit fields used to support de/reoptimization. |
| } else if (!Snapshot::IncludesCode(kind)) { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| bool is_optimized = Code::IsOptimized(func->ptr()->code_); |
| if (is_optimized) { |
| s->Write<int32_t>(FLAG_optimization_counter_threshold); |
| } else { |
| s->Write<int32_t>(0); |
| } |
| #endif |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawFunction*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class FunctionDeserializationCluster : public DeserializationCluster { |
| public: |
| FunctionDeserializationCluster() {} |
| virtual ~FunctionDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, Function::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| Snapshot::Kind kind = d->kind(); |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawFunction* func = reinterpret_cast<RawFunction*>(d->Ref(id)); |
| Deserializer::InitializeHeader(func, kFunctionCid, |
| Function::InstanceSize(), is_vm_object); |
| RawObject** from = func->from(); |
| RawObject** to_snapshot = func->to_snapshot(); |
| RawObject** to = func->to(); |
| for (RawObject** p = from; p <= to_snapshot; p++) { |
| *p = d->ReadRef(); |
| } |
| for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| *p = Object::null(); |
| } |
| if (kind == Snapshot::kAppAOT) { |
| func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef()); |
| } else if (kind == Snapshot::kAppJIT) { |
| NOT_IN_PRECOMPILED(func->ptr()->unoptimized_code_ = |
| reinterpret_cast<RawCode*>(d->ReadRef())); |
| func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef()); |
| func->ptr()->ic_data_array_ = reinterpret_cast<RawArray*>(d->ReadRef()); |
| } |
| |
| #if defined(DEBUG) |
| func->ptr()->entry_point_ = 0; |
| #endif |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (kind != Snapshot::kAppAOT) { |
| func->ptr()->token_pos_ = d->ReadTokenPosition(); |
| func->ptr()->end_token_pos_ = d->ReadTokenPosition(); |
| } |
| #endif |
| func->ptr()->num_fixed_parameters_ = d->Read<int16_t>(); |
| func->ptr()->num_optional_parameters_ = d->Read<int16_t>(); |
| func->ptr()->kind_tag_ = d->Read<uint32_t>(); |
| if (kind == Snapshot::kAppAOT) { |
| // Omit fields used to support de/reoptimization. |
| } else { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (Snapshot::IncludesCode(kind)) { |
| func->ptr()->usage_counter_ = 0; |
| } else { |
| func->ptr()->usage_counter_ = d->Read<int32_t>(); |
| } |
| func->ptr()->deoptimization_counter_ = 0; |
| func->ptr()->optimized_instruction_count_ = 0; |
| func->ptr()->optimized_call_site_count_ = 0; |
| #endif |
| } |
| } |
| } |
| |
| void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| NOT_IN_PRODUCT(TimelineDurationScope tds( |
| Thread::Current(), Timeline::GetIsolateStream(), "PostLoadFunction")); |
| |
| if (kind == Snapshot::kAppAOT) { |
| Function& func = Function::Handle(zone); |
| for (intptr_t i = start_index_; i < stop_index_; i++) { |
| func ^= refs.At(i); |
| ASSERT(func.raw()->ptr()->code_->IsCode()); |
| uword entry_point = func.raw()->ptr()->code_->ptr()->entry_point_; |
| ASSERT(entry_point != 0); |
| func.raw()->ptr()->entry_point_ = entry_point; |
| } |
| } else if (kind == Snapshot::kAppJIT) { |
| Function& func = Function::Handle(zone); |
| Code& code = Code::Handle(zone); |
| for (intptr_t i = start_index_; i < stop_index_; i++) { |
| func ^= refs.At(i); |
| code ^= func.CurrentCode(); |
| if (func.HasCode() && !code.IsDisabled()) { |
| func.SetInstructions(code); |
| func.set_was_compiled(true); |
| } else { |
| func.ClearCode(); |
| func.set_was_compiled(false); |
| } |
| } |
| } else { |
| Function& func = Function::Handle(zone); |
| for (intptr_t i = start_index_; i < stop_index_; i++) { |
| func ^= refs.At(i); |
| func.ClearICDataArray(); |
| func.ClearCode(); |
| func.set_was_compiled(false); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ClosureDataSerializationCluster : public SerializationCluster { |
| public: |
| ClosureDataSerializationCluster() {} |
| virtual ~ClosureDataSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawClosureData* data = ClosureData::RawCast(object); |
| objects_.Add(data); |
| |
| if (s->kind() != Snapshot::kAppAOT) { |
| s->Push(data->ptr()->context_scope_); |
| } |
| s->Push(data->ptr()->parent_function_); |
| s->Push(data->ptr()->signature_type_); |
| s->Push(data->ptr()->closure_); |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kClosureDataCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawClosureData* data = objects_[i]; |
| s->AssignRef(data); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawClosureData* data = objects_[i]; |
| if (s->kind() != Snapshot::kAppAOT) { |
| s->WriteRef(data->ptr()->context_scope_); |
| } |
| s->WriteRef(data->ptr()->parent_function_); |
| s->WriteRef(data->ptr()->signature_type_); |
| s->WriteRef(data->ptr()->closure_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawClosureData*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ClosureDataDeserializationCluster : public DeserializationCluster { |
| public: |
| ClosureDataDeserializationCluster() {} |
| virtual ~ClosureDataDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, ClosureData::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawClosureData* data = reinterpret_cast<RawClosureData*>(d->Ref(id)); |
| Deserializer::InitializeHeader(data, kClosureDataCid, |
| ClosureData::InstanceSize(), is_vm_object); |
| if (d->kind() == Snapshot::kAppAOT) { |
| data->ptr()->context_scope_ = ContextScope::null(); |
| } else { |
| data->ptr()->context_scope_ = |
| static_cast<RawContextScope*>(d->ReadRef()); |
| } |
| data->ptr()->parent_function_ = static_cast<RawFunction*>(d->ReadRef()); |
| data->ptr()->signature_type_ = static_cast<RawType*>(d->ReadRef()); |
| data->ptr()->closure_ = static_cast<RawInstance*>(d->ReadRef()); |
| data->ptr()->hash_ = Object::null(); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class SignatureDataSerializationCluster : public SerializationCluster { |
| public: |
| SignatureDataSerializationCluster() {} |
| virtual ~SignatureDataSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawSignatureData* data = SignatureData::RawCast(object); |
| objects_.Add(data); |
| |
| RawObject** from = data->from(); |
| RawObject** to = data->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kSignatureDataCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawSignatureData* data = objects_[i]; |
| s->AssignRef(data); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawSignatureData* data = objects_[i]; |
| RawObject** from = data->from(); |
| RawObject** to = data->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawSignatureData*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class SignatureDataDeserializationCluster : public DeserializationCluster { |
| public: |
| SignatureDataDeserializationCluster() {} |
| virtual ~SignatureDataDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, SignatureData::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawSignatureData* data = reinterpret_cast<RawSignatureData*>(d->Ref(id)); |
| Deserializer::InitializeHeader( |
| data, kSignatureDataCid, SignatureData::InstanceSize(), is_vm_object); |
| RawObject** from = data->from(); |
| RawObject** to = data->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class RedirectionDataSerializationCluster : public SerializationCluster { |
| public: |
| RedirectionDataSerializationCluster() {} |
| virtual ~RedirectionDataSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawRedirectionData* data = RedirectionData::RawCast(object); |
| objects_.Add(data); |
| |
| RawObject** from = data->from(); |
| RawObject** to = data->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kRedirectionDataCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawRedirectionData* data = objects_[i]; |
| s->AssignRef(data); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawRedirectionData* data = objects_[i]; |
| RawObject** from = data->from(); |
| RawObject** to = data->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawRedirectionData*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class RedirectionDataDeserializationCluster : public DeserializationCluster { |
| public: |
| RedirectionDataDeserializationCluster() {} |
| virtual ~RedirectionDataDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, RedirectionData::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawRedirectionData* data = |
| reinterpret_cast<RawRedirectionData*>(d->Ref(id)); |
| Deserializer::InitializeHeader(data, kRedirectionDataCid, |
| RedirectionData::InstanceSize(), |
| is_vm_object); |
| RawObject** from = data->from(); |
| RawObject** to = data->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class FieldSerializationCluster : public SerializationCluster { |
| public: |
| FieldSerializationCluster() {} |
| virtual ~FieldSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawField* field = Field::RawCast(object); |
| objects_.Add(field); |
| |
| Snapshot::Kind kind = s->kind(); |
| |
| s->Push(field->ptr()->name_); |
| s->Push(field->ptr()->owner_); |
| s->Push(field->ptr()->type_); |
| // Write out the initial static value or field offset. |
| if (Field::StaticBit::decode(field->ptr()->kind_bits_)) { |
| if (kind == Snapshot::kAppAOT) { |
| // For precompiled static fields, the value was already reset and |
| // initializer_ now contains a Function. |
| s->Push(field->ptr()->value_.static_value_); |
| } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) { |
| // Do not reset const fields. |
| s->Push(field->ptr()->value_.static_value_); |
| } else { |
| // Otherwise, for static fields we write out the initial static value. |
| s->Push(field->ptr()->initializer_.saved_value_); |
| } |
| } else { |
| s->Push(field->ptr()->value_.offset_); |
| } |
| // Write out the initializer function or saved initial value. |
| if (kind == Snapshot::kAppAOT) { |
| s->Push(field->ptr()->initializer_.precompiled_); |
| } else { |
| s->Push(field->ptr()->initializer_.saved_value_); |
| } |
| if (kind != Snapshot::kAppAOT) { |
| // Write out the guarded list length. |
| s->Push(field->ptr()->guarded_list_length_); |
| } |
| if (kind == Snapshot::kAppJIT) { |
| s->Push(field->ptr()->dependent_code_); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kFieldCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawField* field = objects_[i]; |
| s->AssignRef(field); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| Snapshot::Kind kind = s->kind(); |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawField* field = objects_[i]; |
| |
| s->WriteRef(field->ptr()->name_); |
| s->WriteRef(field->ptr()->owner_); |
| s->WriteRef(field->ptr()->type_); |
| // Write out the initial static value or field offset. |
| if (Field::StaticBit::decode(field->ptr()->kind_bits_)) { |
| if (kind == Snapshot::kAppAOT) { |
| // For precompiled static fields, the value was already reset and |
| // initializer_ now contains a Function. |
| s->WriteRef(field->ptr()->value_.static_value_); |
| } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) { |
| // Do not reset const fields. |
| s->WriteRef(field->ptr()->value_.static_value_); |
| } else { |
| // Otherwise, for static fields we write out the initial static value. |
| s->WriteRef(field->ptr()->initializer_.saved_value_); |
| } |
| } else { |
| s->WriteRef(field->ptr()->value_.offset_); |
| } |
| // Write out the initializer function or saved initial value. |
| if (kind == Snapshot::kAppAOT) { |
| s->WriteRef(field->ptr()->initializer_.precompiled_); |
| } else { |
| s->WriteRef(field->ptr()->initializer_.saved_value_); |
| } |
| if (kind != Snapshot::kAppAOT) { |
| // Write out the guarded list length. |
| s->WriteRef(field->ptr()->guarded_list_length_); |
| } |
| if (kind == Snapshot::kAppJIT) { |
| s->WriteRef(field->ptr()->dependent_code_); |
| } |
| |
| if (kind != Snapshot::kAppAOT) { |
| s->WriteTokenPosition(field->ptr()->token_pos_); |
| s->WriteCid(field->ptr()->guarded_cid_); |
| s->WriteCid(field->ptr()->is_nullable_); |
| } |
| s->Write<uint8_t>(field->ptr()->kind_bits_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawField*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class FieldDeserializationCluster : public DeserializationCluster { |
| public: |
| FieldDeserializationCluster() {} |
| virtual ~FieldDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, Field::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| Snapshot::Kind kind = d->kind(); |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawField* field = reinterpret_cast<RawField*>(d->Ref(id)); |
| Deserializer::InitializeHeader(field, kFieldCid, Field::InstanceSize(), |
| is_vm_object); |
| RawObject** from = field->from(); |
| RawObject** to_snapshot = field->to_snapshot(kind); |
| RawObject** to = field->to(); |
| for (RawObject** p = from; p <= to_snapshot; p++) { |
| *p = d->ReadRef(); |
| } |
| for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| *p = Object::null(); |
| } |
| |
| if (kind != Snapshot::kAppAOT) { |
| field->ptr()->token_pos_ = d->ReadTokenPosition(); |
| field->ptr()->guarded_cid_ = d->ReadCid(); |
| field->ptr()->is_nullable_ = d->ReadCid(); |
| } |
| field->ptr()->kind_bits_ = d->Read<uint8_t>(); |
| } |
| } |
| |
| void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| NOT_IN_PRODUCT(TimelineDurationScope tds( |
| Thread::Current(), Timeline::GetIsolateStream(), "PostLoadField")); |
| |
| Field& field = Field::Handle(zone); |
| if (!Isolate::Current()->use_field_guards()) { |
| for (intptr_t i = start_index_; i < stop_index_; i++) { |
| field ^= refs.At(i); |
| field.set_guarded_cid(kDynamicCid); |
| field.set_is_nullable(true); |
| field.set_guarded_list_length(Field::kNoFixedLength); |
| field.set_guarded_list_length_in_object_offset( |
| Field::kUnknownLengthOffset); |
| } |
| } else { |
| for (intptr_t i = start_index_; i < stop_index_; i++) { |
| field ^= refs.At(i); |
| field.InitializeGuardedListLengthInObjectOffset(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class LiteralTokenSerializationCluster : public SerializationCluster { |
| public: |
| LiteralTokenSerializationCluster() {} |
| virtual ~LiteralTokenSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawLiteralToken* token = LiteralToken::RawCast(object); |
| objects_.Add(token); |
| |
| RawObject** from = token->from(); |
| RawObject** to = token->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kLiteralTokenCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawLiteralToken* token = objects_[i]; |
| s->AssignRef(token); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawLiteralToken* token = objects_[i]; |
| RawObject** from = token->from(); |
| RawObject** to = token->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| s->Write<int32_t>(token->ptr()->kind_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawLiteralToken*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class LiteralTokenDeserializationCluster : public DeserializationCluster { |
| public: |
| LiteralTokenDeserializationCluster() {} |
| virtual ~LiteralTokenDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, LiteralToken::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawLiteralToken* token = reinterpret_cast<RawLiteralToken*>(d->Ref(id)); |
| Deserializer::InitializeHeader( |
| token, kLiteralTokenCid, LiteralToken::InstanceSize(), is_vm_object); |
| RawObject** from = token->from(); |
| RawObject** to = token->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| token->ptr()->kind_ = static_cast<Token::Kind>(d->Read<int32_t>()); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class TokenStreamSerializationCluster : public SerializationCluster { |
| public: |
| TokenStreamSerializationCluster() {} |
| virtual ~TokenStreamSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawTokenStream* stream = TokenStream::RawCast(object); |
| objects_.Add(stream); |
| |
| RawObject** from = stream->from(); |
| RawObject** to = stream->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kTokenStreamCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawTokenStream* stream = objects_[i]; |
| s->AssignRef(stream); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawTokenStream* stream = objects_[i]; |
| RawObject** from = stream->from(); |
| RawObject** to = stream->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawTokenStream*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class TokenStreamDeserializationCluster : public DeserializationCluster { |
| public: |
| TokenStreamDeserializationCluster() {} |
| virtual ~TokenStreamDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, TokenStream::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawTokenStream* stream = reinterpret_cast<RawTokenStream*>(d->Ref(id)); |
| Deserializer::InitializeHeader(stream, kTokenStreamCid, |
| TokenStream::InstanceSize(), is_vm_object); |
| RawObject** from = stream->from(); |
| RawObject** to = stream->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ScriptSerializationCluster : public SerializationCluster { |
| public: |
| ScriptSerializationCluster() {} |
| virtual ~ScriptSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawScript* script = Script::RawCast(object); |
| objects_.Add(script); |
| |
| RawObject** from = script->from(); |
| RawObject** to = script->to_snapshot(s->kind()); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kScriptCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawScript* script = objects_[i]; |
| s->AssignRef(script); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| Snapshot::Kind kind = s->kind(); |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawScript* script = objects_[i]; |
| RawObject** from = script->from(); |
| RawObject** to = script->to_snapshot(kind); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| |
| s->Write<int32_t>(script->ptr()->line_offset_); |
| s->Write<int32_t>(script->ptr()->col_offset_); |
| s->Write<int8_t>(script->ptr()->kind_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawScript*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ScriptDeserializationCluster : public DeserializationCluster { |
| public: |
| ScriptDeserializationCluster() {} |
| virtual ~ScriptDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, Script::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| Snapshot::Kind kind = d->kind(); |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawScript* script = reinterpret_cast<RawScript*>(d->Ref(id)); |
| Deserializer::InitializeHeader(script, kScriptCid, Script::InstanceSize(), |
| is_vm_object); |
| RawObject** from = script->from(); |
| RawObject** to_snapshot = script->to_snapshot(kind); |
| RawObject** to = script->to(); |
| for (RawObject** p = from; p <= to_snapshot; p++) { |
| *p = d->ReadRef(); |
| } |
| for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| *p = Object::null(); |
| } |
| |
| script->ptr()->line_offset_ = d->Read<int32_t>(); |
| script->ptr()->col_offset_ = d->Read<int32_t>(); |
| script->ptr()->kind_ = d->Read<int8_t>(); |
| script->ptr()->load_timestamp_ = 0; |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class LibrarySerializationCluster : public SerializationCluster { |
| public: |
| LibrarySerializationCluster() {} |
| virtual ~LibrarySerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawLibrary* lib = Library::RawCast(object); |
| objects_.Add(lib); |
| |
| RawObject** from = lib->from(); |
| RawObject** to = lib->to_snapshot(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kLibraryCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawLibrary* lib = objects_[i]; |
| s->AssignRef(lib); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawLibrary* lib = objects_[i]; |
| RawObject** from = lib->from(); |
| RawObject** to = lib->to_snapshot(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| |
| s->Write<int32_t>(lib->ptr()->index_); |
| s->Write<uint16_t>(lib->ptr()->num_imports_); |
| s->Write<int8_t>(lib->ptr()->load_state_); |
| s->Write<bool>(lib->ptr()->corelib_imported_); |
| s->Write<bool>(lib->ptr()->is_dart_scheme_); |
| s->Write<bool>(lib->ptr()->debuggable_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawLibrary*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class LibraryDeserializationCluster : public DeserializationCluster { |
| public: |
| LibraryDeserializationCluster() {} |
| virtual ~LibraryDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, Library::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawLibrary* lib = reinterpret_cast<RawLibrary*>(d->Ref(id)); |
| Deserializer::InitializeHeader(lib, kLibraryCid, Library::InstanceSize(), |
| is_vm_object); |
| RawObject** from = lib->from(); |
| RawObject** to_snapshot = lib->to_snapshot(); |
| RawObject** to = lib->to(); |
| for (RawObject** p = from; p <= to_snapshot; p++) { |
| *p = d->ReadRef(); |
| } |
| for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| *p = Object::null(); |
| } |
| |
| lib->ptr()->native_entry_resolver_ = NULL; |
| lib->ptr()->native_entry_symbol_resolver_ = NULL; |
| lib->ptr()->index_ = d->Read<int32_t>(); |
| lib->ptr()->num_imports_ = d->Read<uint16_t>(); |
| lib->ptr()->load_state_ = d->Read<int8_t>(); |
| lib->ptr()->corelib_imported_ = d->Read<bool>(); |
| lib->ptr()->is_dart_scheme_ = d->Read<bool>(); |
| lib->ptr()->debuggable_ = d->Read<bool>(); |
| lib->ptr()->is_in_fullsnapshot_ = true; |
| } |
| } |
| |
| void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| // TODO(rmacnak): This is surprisingly slow, roughly 20% of deserialization |
| // time for the JIT. Maybe make the lookups happy with a null? |
| |
| NOT_IN_PRODUCT(TimelineDurationScope tds( |
| Thread::Current(), Timeline::GetIsolateStream(), "PostLoadLibrary")); |
| |
| Library& lib = Library::Handle(zone); |
| for (intptr_t i = start_index_; i < stop_index_; i++) { |
| lib ^= refs.At(i); |
| const intptr_t kInitialNameCacheSize = 64; |
| lib.InitResolvedNamesCache(kInitialNameCacheSize); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class NamespaceSerializationCluster : public SerializationCluster { |
| public: |
| NamespaceSerializationCluster() {} |
| virtual ~NamespaceSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawNamespace* ns = Namespace::RawCast(object); |
| objects_.Add(ns); |
| |
| RawObject** from = ns->from(); |
| RawObject** to = ns->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kNamespaceCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawNamespace* ns = objects_[i]; |
| s->AssignRef(ns); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawNamespace* ns = objects_[i]; |
| RawObject** from = ns->from(); |
| RawObject** to = ns->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawNamespace*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class NamespaceDeserializationCluster : public DeserializationCluster { |
| public: |
| NamespaceDeserializationCluster() {} |
| virtual ~NamespaceDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, Namespace::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawNamespace* ns = reinterpret_cast<RawNamespace*>(d->Ref(id)); |
| Deserializer::InitializeHeader(ns, kNamespaceCid, |
| Namespace::InstanceSize(), is_vm_object); |
| RawObject** from = ns->from(); |
| RawObject** to = ns->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class CodeSerializationCluster : public SerializationCluster { |
| public: |
| CodeSerializationCluster() {} |
| virtual ~CodeSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawCode* code = Code::RawCast(object); |
| objects_.Add(code); |
| |
| s->Push(code->ptr()->object_pool_); |
| s->Push(code->ptr()->owner_); |
| s->Push(code->ptr()->exception_handlers_); |
| s->Push(code->ptr()->pc_descriptors_); |
| #if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER) |
| s->Push(code->ptr()->catch_entry_.catch_entry_state_maps_); |
| #else |
| s->Push(code->ptr()->catch_entry_.variables_); |
| #endif |
| s->Push(code->ptr()->stackmaps_); |
| if (!FLAG_dwarf_stack_traces) { |
| s->Push(code->ptr()->inlined_id_to_function_); |
| s->Push(code->ptr()->code_source_map_); |
| } |
| if (s->kind() != Snapshot::kAppAOT) { |
| s->Push(code->ptr()->await_token_positions_); |
| } |
| |
| if (s->kind() == Snapshot::kAppJIT) { |
| s->Push(code->ptr()->deopt_info_array_); |
| s->Push(code->ptr()->static_calls_target_table_); |
| NOT_IN_PRODUCT(s->Push(code->ptr()->return_address_metadata_)); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kCodeCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawCode* code = objects_[i]; |
| s->AssignRef(code); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| Snapshot::Kind kind = s->kind(); |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawCode* code = objects_[i]; |
| |
| intptr_t pointer_offsets_length = |
| Code::PtrOffBits::decode(code->ptr()->state_bits_); |
| if (pointer_offsets_length != 0) { |
| FATAL("Cannot serialize code with embedded pointers"); |
| } |
| if (kind == Snapshot::kAppAOT) { |
| // No disabled code in precompilation. |
| NOT_IN_PRECOMPILED(ASSERT(code->ptr()->instructions_ == |
| code->ptr()->active_instructions_)); |
| } |
| |
| RawInstructions* instr = code->ptr()->instructions_; |
| int32_t text_offset = s->GetTextOffset(instr, code); |
| s->Write<int32_t>(text_offset); |
| if (s->kind() == Snapshot::kAppJIT) { |
| // TODO(rmacnak): Fix references to disabled code before serializing. |
| if (code->ptr()->active_instructions_ != code->ptr()->instructions_) { |
| instr = code->ptr()->active_instructions_; |
| text_offset = s->GetTextOffset(instr, code); |
| } |
| s->Write<int32_t>(text_offset); |
| } |
| |
| s->WriteRef(code->ptr()->object_pool_); |
| s->WriteRef(code->ptr()->owner_); |
| s->WriteRef(code->ptr()->exception_handlers_); |
| s->WriteRef(code->ptr()->pc_descriptors_); |
| #if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER) |
| s->WriteRef(code->ptr()->catch_entry_.catch_entry_state_maps_); |
| #else |
| s->WriteRef(code->ptr()->catch_entry_.variables_); |
| #endif |
| s->WriteRef(code->ptr()->stackmaps_); |
| if (FLAG_dwarf_stack_traces) { |
| s->WriteRef(Array::null()); |
| s->WriteRef(CodeSourceMap::null()); |
| } else { |
| s->WriteRef(code->ptr()->inlined_id_to_function_); |
| s->WriteRef(code->ptr()->code_source_map_); |
| } |
| if (s->kind() != Snapshot::kAppAOT) { |
| s->WriteRef(code->ptr()->await_token_positions_); |
| } |
| if (s->kind() == Snapshot::kAppJIT) { |
| s->WriteRef(code->ptr()->deopt_info_array_); |
| s->WriteRef(code->ptr()->static_calls_target_table_); |
| NOT_IN_PRODUCT(s->WriteRef(code->ptr()->return_address_metadata_)); |
| } |
| |
| s->Write<int32_t>(code->ptr()->state_bits_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawCode*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class CodeDeserializationCluster : public DeserializationCluster { |
| public: |
| CodeDeserializationCluster() {} |
| virtual ~CodeDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, Code::InstanceSize(0))); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawCode* code = reinterpret_cast<RawCode*>(d->Ref(id)); |
| Deserializer::InitializeHeader(code, kCodeCid, Code::InstanceSize(0), |
| is_vm_object); |
| |
| int32_t text_offset = d->Read<int32_t>(); |
| RawInstructions* instr = d->GetInstructionsAt(text_offset); |
| |
| code->ptr()->entry_point_ = Instructions::UncheckedEntryPoint(instr); |
| code->ptr()->checked_entry_point_ = |
| Instructions::CheckedEntryPoint(instr); |
| NOT_IN_PRECOMPILED(code->ptr()->active_instructions_ = instr); |
| code->ptr()->instructions_ = instr; |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (d->kind() == Snapshot::kAppJIT) { |
| int32_t text_offset = d->Read<int32_t>(); |
| RawInstructions* instr = d->GetInstructionsAt(text_offset); |
| code->ptr()->active_instructions_ = instr; |
| code->ptr()->entry_point_ = Instructions::UncheckedEntryPoint(instr); |
| code->ptr()->checked_entry_point_ = |
| Instructions::CheckedEntryPoint(instr); |
| } |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| code->ptr()->object_pool_ = |
| reinterpret_cast<RawObjectPool*>(d->ReadRef()); |
| code->ptr()->owner_ = d->ReadRef(); |
| code->ptr()->exception_handlers_ = |
| reinterpret_cast<RawExceptionHandlers*>(d->ReadRef()); |
| code->ptr()->pc_descriptors_ = |
| reinterpret_cast<RawPcDescriptors*>(d->ReadRef()); |
| #if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER) |
| code->ptr()->catch_entry_.catch_entry_state_maps_ = |
| reinterpret_cast<RawTypedData*>(d->ReadRef()); |
| #else |
| code->ptr()->catch_entry_.variables_ = |
| reinterpret_cast<RawSmi*>(d->ReadRef()); |
| #endif |
| code->ptr()->stackmaps_ = reinterpret_cast<RawArray*>(d->ReadRef()); |
| code->ptr()->inlined_id_to_function_ = |
| reinterpret_cast<RawArray*>(d->ReadRef()); |
| code->ptr()->code_source_map_ = |
| reinterpret_cast<RawCodeSourceMap*>(d->ReadRef()); |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| code->ptr()->await_token_positions_ = |
| reinterpret_cast<RawArray*>(d->ReadRef()); |
| |
| if (d->kind() == Snapshot::kAppJIT) { |
| code->ptr()->deopt_info_array_ = |
| reinterpret_cast<RawArray*>(d->ReadRef()); |
| code->ptr()->static_calls_target_table_ = |
| reinterpret_cast<RawArray*>(d->ReadRef()); |
| #if defined(PRODUCT) |
| code->ptr()->return_address_metadata_ = Object::null(); |
| #else |
| code->ptr()->return_address_metadata_ = d->ReadRef(); |
| #endif |
| } else { |
| code->ptr()->deopt_info_array_ = Array::null(); |
| code->ptr()->static_calls_target_table_ = Array::null(); |
| code->ptr()->return_address_metadata_ = Object::null(); |
| } |
| |
| code->ptr()->var_descriptors_ = LocalVarDescriptors::null(); |
| code->ptr()->comments_ = Array::null(); |
| |
| code->ptr()->compile_timestamp_ = 0; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| code->ptr()->state_bits_ = d->Read<int32_t>(); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ObjectPoolSerializationCluster : public SerializationCluster { |
| public: |
| ObjectPoolSerializationCluster() {} |
| virtual ~ObjectPoolSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawObjectPool* pool = ObjectPool::RawCast(object); |
| objects_.Add(pool); |
| |
| intptr_t length = pool->ptr()->length_; |
| RawTypedData* info_array = pool->ptr()->info_array_; |
| |
| for (intptr_t i = 0; i < length; i++) { |
| ObjectPool::EntryType entry_type = |
| static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[i]); |
| if (entry_type == ObjectPool::kTaggedObject) { |
| s->Push(pool->ptr()->data()[i].raw_obj_); |
| } |
| } |
| |
| // TODO(rmacnak): Allocate the object pool and its info array together. |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kObjectPoolCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawObjectPool* pool = objects_[i]; |
| intptr_t length = pool->ptr()->length_; |
| s->Write<int32_t>(length); |
| s->AssignRef(pool); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawObjectPool* pool = objects_[i]; |
| RawTypedData* info_array = pool->ptr()->info_array_; |
| intptr_t length = pool->ptr()->length_; |
| s->Write<int32_t>(length); |
| for (intptr_t j = 0; j < length; j++) { |
| ObjectPool::EntryType entry_type = |
| static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[j]); |
| s->Write<int8_t>(entry_type); |
| RawObjectPool::Entry& entry = pool->ptr()->data()[j]; |
| switch (entry_type) { |
| case ObjectPool::kTaggedObject: { |
| #if !defined(TARGET_ARCH_DBC) |
| if (entry.raw_obj_ == |
| StubCode::CallNativeCFunction_entry()->code()) { |
| // Natives can run while precompiling, becoming linked and |
| // switching their stub. Reset to the initial stub used for |
| // lazy-linking. |
| s->WriteRef(StubCode::CallBootstrapCFunction_entry()->code()); |
| break; |
| } |
| #endif |
| s->WriteRef(entry.raw_obj_); |
| break; |
| } |
| case ObjectPool::kImmediate: { |
| s->Write<intptr_t>(entry.raw_value_); |
| break; |
| } |
| case ObjectPool::kNativeEntry: { |
| // Write nothing. Will initialize with the lazy link entry. |
| #if defined(TARGET_ARCH_DBC) |
| UNREACHABLE(); // DBC does not support lazy native call linking. |
| #endif |
| break; |
| } |
| default: |
| UNREACHABLE(); |
| } |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawObjectPool*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ObjectPoolDeserializationCluster : public DeserializationCluster { |
| public: |
| ObjectPoolDeserializationCluster() {} |
| virtual ~ObjectPoolDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| intptr_t length = d->Read<int32_t>(); |
| d->AssignRef( |
| AllocateUninitialized(old_space, ObjectPool::InstanceSize(length))); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| PageSpace* old_space = d->heap()->old_space(); |
| for (intptr_t id = start_index_; id < stop_index_; id += 1) { |
| intptr_t length = d->Read<int32_t>(); |
| RawTypedData* info_array = reinterpret_cast<RawTypedData*>( |
| AllocateUninitialized(old_space, TypedData::InstanceSize(length))); |
| Deserializer::InitializeHeader(info_array, kTypedDataUint8ArrayCid, |
| TypedData::InstanceSize(length), |
| is_vm_object); |
| info_array->ptr()->length_ = Smi::New(length); |
| RawObjectPool* pool = reinterpret_cast<RawObjectPool*>(d->Ref(id + 0)); |
| Deserializer::InitializeHeader( |
| pool, kObjectPoolCid, ObjectPool::InstanceSize(length), is_vm_object); |
| pool->ptr()->length_ = length; |
| pool->ptr()->info_array_ = info_array; |
| for (intptr_t j = 0; j < length; j++) { |
| ObjectPool::EntryType entry_type = |
| static_cast<ObjectPool::EntryType>(d->Read<int8_t>()); |
| info_array->ptr()->data()[j] = entry_type; |
| RawObjectPool::Entry& entry = pool->ptr()->data()[j]; |
| switch (entry_type) { |
| case ObjectPool::kTaggedObject: |
| entry.raw_obj_ = d->ReadRef(); |
| break; |
| case ObjectPool::kImmediate: |
| entry.raw_value_ = d->Read<intptr_t>(); |
| break; |
| case ObjectPool::kNativeEntry: { |
| #if !defined(TARGET_ARCH_DBC) |
| // Read nothing. Initialize with the lazy link entry. |
| uword new_entry = NativeEntry::LinkNativeCallEntry(); |
| entry.raw_value_ = static_cast<intptr_t>(new_entry); |
| #else |
| UNREACHABLE(); // DBC does not support lazy native call linking. |
| #endif |
| break; |
| } |
| default: |
| UNREACHABLE(); |
| } |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| // PcDescriptor, StackMap, OneByteString, TwoByteString |
| class RODataSerializationCluster : public SerializationCluster { |
| public: |
| explicit RODataSerializationCluster(intptr_t cid) : cid_(cid) {} |
| virtual ~RODataSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| objects_.Add(object); |
| |
| // A string's hash must already be computed when we write it because it |
| // will be loaded into read-only memory. |
| if (cid_ == kOneByteStringCid) { |
| RawOneByteString* str = static_cast<RawOneByteString*>(object); |
| if (str->ptr()->hash_ == Smi::New(0)) { |
| intptr_t hash = |
| String::Hash(str->ptr()->data(), Smi::Value(str->ptr()->length_)); |
| str->ptr()->hash_ = Smi::New(hash); |
| } |
| ASSERT(str->ptr()->hash_ != Smi::New(0)); |
| } else if (cid_ == kTwoByteStringCid) { |
| RawTwoByteString* str = static_cast<RawTwoByteString*>(object); |
| if (str->ptr()->hash_ == Smi::New(0)) { |
| intptr_t hash = String::Hash(str->ptr()->data(), |
| Smi::Value(str->ptr()->length_) * 2); |
| str->ptr()->hash_ = Smi::New(hash); |
| } |
| ASSERT(str->ptr()->hash_ != Smi::New(0)); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(cid_); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawObject* object = objects_[i]; |
| int32_t rodata_offset = s->GetRODataOffset(object); |
| s->Write<int32_t>(rodata_offset); |
| s->AssignRef(object); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| // No-op. |
| } |
| |
| private: |
| const intptr_t cid_; |
| GrowableArray<RawObject*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class RODataDeserializationCluster : public DeserializationCluster { |
| public: |
| RODataDeserializationCluster() {} |
| virtual ~RODataDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| int32_t rodata_offset = d->Read<int32_t>(); |
| d->AssignRef(d->GetObjectAt(rodata_offset)); |
| } |
| } |
| |
| void ReadFill(Deserializer* d) { |
| // No-op. |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ExceptionHandlersSerializationCluster : public SerializationCluster { |
| public: |
| ExceptionHandlersSerializationCluster() {} |
| virtual ~ExceptionHandlersSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawExceptionHandlers* handlers = ExceptionHandlers::RawCast(object); |
| objects_.Add(handlers); |
| |
| s->Push(handlers->ptr()->handled_types_data_); |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kExceptionHandlersCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawExceptionHandlers* handlers = objects_[i]; |
| intptr_t length = handlers->ptr()->num_entries_; |
| s->Write<int32_t>(length); |
| s->AssignRef(handlers); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawExceptionHandlers* handlers = objects_[i]; |
| intptr_t length = handlers->ptr()->num_entries_; |
| s->Write<int32_t>(length); |
| s->WriteRef(handlers->ptr()->handled_types_data_); |
| |
| uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data()); |
| intptr_t length_in_bytes = length * sizeof(ExceptionHandlerInfo); |
| s->WriteBytes(data, length_in_bytes); |
| } |
| } |
| |
| private: |
| GrowableArray<RawExceptionHandlers*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ExceptionHandlersDeserializationCluster : public DeserializationCluster { |
| public: |
| ExceptionHandlersDeserializationCluster() {} |
| virtual ~ExceptionHandlersDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| intptr_t length = d->Read<int32_t>(); |
| d->AssignRef(AllocateUninitialized( |
| old_space, ExceptionHandlers::InstanceSize(length))); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawExceptionHandlers* handlers = |
| reinterpret_cast<RawExceptionHandlers*>(d->Ref(id)); |
| intptr_t length = d->Read<int32_t>(); |
| Deserializer::InitializeHeader(handlers, kExceptionHandlersCid, |
| ExceptionHandlers::InstanceSize(length), |
| is_vm_object); |
| handlers->ptr()->num_entries_ = length; |
| handlers->ptr()->handled_types_data_ = |
| reinterpret_cast<RawArray*>(d->ReadRef()); |
| |
| uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data()); |
| intptr_t length_in_bytes = length * sizeof(ExceptionHandlerInfo); |
| d->ReadBytes(data, length_in_bytes); |
| } |
| } |
| }; |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ContextSerializationCluster : public SerializationCluster { |
| public: |
| ContextSerializationCluster() {} |
| virtual ~ContextSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawContext* context = Context::RawCast(object); |
| objects_.Add(context); |
| |
| s->Push(context->ptr()->parent_); |
| intptr_t length = context->ptr()->num_variables_; |
| for (intptr_t i = 0; i < length; i++) { |
| s->Push(context->ptr()->data()[i]); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kContextCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawContext* context = objects_[i]; |
| intptr_t length = context->ptr()->num_variables_; |
| s->Write<int32_t>(length); |
| s->AssignRef(context); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawContext* context = objects_[i]; |
| intptr_t length = context->ptr()->num_variables_; |
| s->Write<int32_t>(length); |
| s->WriteRef(context->ptr()->parent_); |
| for (intptr_t j = 0; j < length; j++) { |
| s->WriteRef(context->ptr()->data()[j]); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawContext*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ContextDeserializationCluster : public DeserializationCluster { |
| public: |
| ContextDeserializationCluster() {} |
| virtual ~ContextDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| intptr_t length = d->Read<int32_t>(); |
| d->AssignRef( |
| AllocateUninitialized(old_space, Context::InstanceSize(length))); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawContext* context = reinterpret_cast<RawContext*>(d->Ref(id)); |
| intptr_t length = d->Read<int32_t>(); |
| Deserializer::InitializeHeader( |
| context, kContextCid, Context::InstanceSize(length), is_vm_object); |
| context->ptr()->num_variables_ = length; |
| context->ptr()->parent_ = reinterpret_cast<RawContext*>(d->ReadRef()); |
| for (intptr_t j = 0; j < length; j++) { |
| context->ptr()->data()[j] = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ContextScopeSerializationCluster : public SerializationCluster { |
| public: |
| ContextScopeSerializationCluster() {} |
| virtual ~ContextScopeSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawContextScope* scope = ContextScope::RawCast(object); |
| objects_.Add(scope); |
| |
| intptr_t length = scope->ptr()->num_variables_; |
| RawObject** from = scope->from(); |
| RawObject** to = scope->to(length); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kContextScopeCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawContextScope* scope = objects_[i]; |
| intptr_t length = scope->ptr()->num_variables_; |
| s->Write<int32_t>(length); |
| s->AssignRef(scope); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawContextScope* scope = objects_[i]; |
| intptr_t length = scope->ptr()->num_variables_; |
| s->Write<int32_t>(length); |
| s->Write<bool>(scope->ptr()->is_implicit_); |
| RawObject** from = scope->from(); |
| RawObject** to = scope->to(length); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawContextScope*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ContextScopeDeserializationCluster : public DeserializationCluster { |
| public: |
| ContextScopeDeserializationCluster() {} |
| virtual ~ContextScopeDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| intptr_t length = d->Read<int32_t>(); |
| d->AssignRef( |
| AllocateUninitialized(old_space, ContextScope::InstanceSize(length))); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawContextScope* scope = reinterpret_cast<RawContextScope*>(d->Ref(id)); |
| intptr_t length = d->Read<int32_t>(); |
| Deserializer::InitializeHeader(scope, kContextScopeCid, |
| ContextScope::InstanceSize(length), |
| is_vm_object); |
| scope->ptr()->num_variables_ = length; |
| scope->ptr()->is_implicit_ = d->Read<bool>(); |
| RawObject** from = scope->from(); |
| RawObject** to = scope->to(length); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class UnlinkedCallSerializationCluster : public SerializationCluster { |
| public: |
| UnlinkedCallSerializationCluster() {} |
| virtual ~UnlinkedCallSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawUnlinkedCall* unlinked = UnlinkedCall::RawCast(object); |
| objects_.Add(unlinked); |
| |
| RawObject** from = unlinked->from(); |
| RawObject** to = unlinked->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kUnlinkedCallCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawUnlinkedCall* unlinked = objects_[i]; |
| s->AssignRef(unlinked); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawUnlinkedCall* unlinked = objects_[i]; |
| RawObject** from = unlinked->from(); |
| RawObject** to = unlinked->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| } |
| } |
| |
| private: |
| GrowableArray<RawUnlinkedCall*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class UnlinkedCallDeserializationCluster : public DeserializationCluster { |
| public: |
| UnlinkedCallDeserializationCluster() {} |
| virtual ~UnlinkedCallDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, UnlinkedCall::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawUnlinkedCall* unlinked = |
| reinterpret_cast<RawUnlinkedCall*>(d->Ref(id)); |
| Deserializer::InitializeHeader(unlinked, kUnlinkedCallCid, |
| UnlinkedCall::InstanceSize(), |
| is_vm_object); |
| RawObject** from = unlinked->from(); |
| RawObject** to = unlinked->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class ICDataSerializationCluster : public SerializationCluster { |
| public: |
| ICDataSerializationCluster() {} |
| virtual ~ICDataSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawICData* ic = ICData::RawCast(object); |
| objects_.Add(ic); |
| |
| RawObject** from = ic->from(); |
| RawObject** to = ic->to_snapshot(s->kind()); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kICDataCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawICData* ic = objects_[i]; |
| s->AssignRef(ic); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| Snapshot::Kind kind = s->kind(); |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawICData* ic = objects_[i]; |
| RawObject** from = ic->from(); |
| RawObject** to = ic->to_snapshot(kind); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| if (kind != Snapshot::kAppAOT) { |
| NOT_IN_PRECOMPILED(s->Write<int32_t>(ic->ptr()->deopt_id_)); |
| } |
| s->Write<uint32_t>(ic->ptr()->state_bits_); |
| #if defined(TAG_IC_DATA) |
| s->Write<int32_t>(ic->ptr()->tag_); |
| #endif |
| } |
| } |
| |
| private: |
| GrowableArray<RawICData*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class ICDataDeserializationCluster : public DeserializationCluster { |
| public: |
| ICDataDeserializationCluster() {} |
| virtual ~ICDataDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef(AllocateUninitialized(old_space, ICData::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| Snapshot::Kind kind = d->kind(); |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawICData* ic = reinterpret_cast<RawICData*>(d->Ref(id)); |
| Deserializer::InitializeHeader(ic, kICDataCid, ICData::InstanceSize(), |
| is_vm_object); |
| RawObject** from = ic->from(); |
| RawObject** to_snapshot = ic->to_snapshot(kind); |
| RawObject** to = ic->to(); |
| for (RawObject** p = from; p <= to_snapshot; p++) { |
| *p = d->ReadRef(); |
| } |
| for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| *p = Object::null(); |
| } |
| NOT_IN_PRECOMPILED(ic->ptr()->deopt_id_ = d->Read<int32_t>()); |
| ic->ptr()->state_bits_ = d->Read<int32_t>(); |
| #if defined(TAG_IC_DATA) |
| ic->ptr()->tag_ = d->Read<int32_t>(); |
| #endif |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class MegamorphicCacheSerializationCluster : public SerializationCluster { |
| public: |
| MegamorphicCacheSerializationCluster() {} |
| virtual ~MegamorphicCacheSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawMegamorphicCache* cache = MegamorphicCache::RawCast(object); |
| objects_.Add(cache); |
| |
| RawObject** from = cache->from(); |
| RawObject** to = cache->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kMegamorphicCacheCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawMegamorphicCache* cache = objects_[i]; |
| s->AssignRef(cache); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawMegamorphicCache* cache = objects_[i]; |
| RawObject** from = cache->from(); |
| RawObject** to = cache->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| s->Write<int32_t>(cache->ptr()->filled_entry_count_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawMegamorphicCache*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class MegamorphicCacheDeserializationCluster : public DeserializationCluster { |
| public: |
| MegamorphicCacheDeserializationCluster() {} |
| virtual ~MegamorphicCacheDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, MegamorphicCache::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawMegamorphicCache* cache = |
| reinterpret_cast<RawMegamorphicCache*>(d->Ref(id)); |
| Deserializer::InitializeHeader(cache, kMegamorphicCacheCid, |
| MegamorphicCache::InstanceSize(), |
| is_vm_object); |
| RawObject** from = cache->from(); |
| RawObject** to = cache->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| cache->ptr()->filled_entry_count_ = d->Read<int32_t>(); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class SubtypeTestCacheSerializationCluster : public SerializationCluster { |
| public: |
| SubtypeTestCacheSerializationCluster() {} |
| virtual ~SubtypeTestCacheSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawSubtypeTestCache* cache = SubtypeTestCache::RawCast(object); |
| objects_.Add(cache); |
| s->Push(cache->ptr()->cache_); |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kSubtypeTestCacheCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawSubtypeTestCache* cache = objects_[i]; |
| s->AssignRef(cache); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawSubtypeTestCache* cache = objects_[i]; |
| s->WriteRef(cache->ptr()->cache_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawSubtypeTestCache*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class SubtypeTestCacheDeserializationCluster : public DeserializationCluster { |
| public: |
| SubtypeTestCacheDeserializationCluster() {} |
| virtual ~SubtypeTestCacheDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, SubtypeTestCache::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawSubtypeTestCache* cache = |
| reinterpret_cast<RawSubtypeTestCache*>(d->Ref(id)); |
| Deserializer::InitializeHeader(cache, kSubtypeTestCacheCid, |
| SubtypeTestCache::InstanceSize(), |
| is_vm_object); |
| cache->ptr()->cache_ = reinterpret_cast<RawArray*>(d->ReadRef()); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class LanguageErrorSerializationCluster : public SerializationCluster { |
| public: |
| LanguageErrorSerializationCluster() {} |
| virtual ~LanguageErrorSerializationCluster() {} |
| |
| void Trace(Serializer* s, RawObject* object) { |
| RawLanguageError* error = LanguageError::RawCast(object); |
| objects_.Add(error); |
| |
| RawObject** from = error->from(); |
| RawObject** to = error->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->Push(*p); |
| } |
| } |
| |
| void WriteAlloc(Serializer* s) { |
| s->WriteCid(kLanguageErrorCid); |
| intptr_t count = objects_.length(); |
| s->Write<int32_t>(count); |
| for (intptr_t i = 0; i < count; i++) { |
| RawLanguageError* error = objects_[i]; |
| s->AssignRef(error); |
| } |
| } |
| |
| void WriteFill(Serializer* s) { |
| intptr_t count = objects_.length(); |
| for (intptr_t i = 0; i < count; i++) { |
| RawLanguageError* error = objects_[i]; |
| RawObject** from = error->from(); |
| RawObject** to = error->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| s->WriteRef(*p); |
| } |
| s->WriteTokenPosition(error->ptr()->token_pos_); |
| s->Write<bool>(error->ptr()->report_after_token_); |
| s->Write<int8_t>(error->ptr()->kind_); |
| } |
| } |
| |
| private: |
| GrowableArray<RawLanguageError*> objects_; |
| }; |
| #endif // !DART_PRECOMPILED_RUNTIME |
| |
| |
| class LanguageErrorDeserializationCluster : public DeserializationCluster { |
| public: |
| LanguageErrorDeserializationCluster() {} |
| virtual ~LanguageErrorDeserializationCluster() {} |
| |
| void ReadAlloc(Deserializer* d) { |
| start_index_ = d->next_index(); |
| PageSpace* old_space = d->heap()->old_space(); |
| intptr_t count = d->Read<int32_t>(); |
| for (intptr_t i = 0; i < count; i++) { |
| d->AssignRef( |
| AllocateUninitialized(old_space, LanguageError::InstanceSize())); |
| } |
| stop_index_ = d->next_index(); |
| } |
| |
| void ReadFill(Deserializer* d) { |
| bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| |
| for (intptr_t id = start_index_; id < stop_index_; id++) { |
| RawLanguageError* error = reinterpret_cast<RawLanguageError*>(d->Ref(id)); |
| Deserializer::InitializeHeader(error, kLanguageErrorCid, |
| LanguageError::InstanceSize(), |
| is_vm_object); |
| RawObject** from = error->from(); |
| RawObject** to = error->to(); |
| for (RawObject** p = from; p <= to; p++) { |
| *p = d->ReadRef(); |
| } |
| error->ptr()->token_pos_ = d->ReadTokenPosition(); |
| error->ptr()->report_after_token_ = d->Read<bool>(); |
| error->ptr()->kind_ = d->Read<int8_t>(); |
| } |
| } |
| }; |
| |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| class UnhandledExceptionSerializationCluster : public SerializationCluster { |
| public: |
| UnhandledExceptionSerializationCluster() {} |
| virtual ~UnhandledExceptionSerializationCluster() {} |
| |
| void Trace(Serializer* |