blob: 69082af1484cf14bb7183b2168c1eb793d29f8e6 [file] [log] [blame]
// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include "vm/clustered_snapshot.h"
#include "platform/assert.h"
#include "vm/bootstrap.h"
#include "vm/compiler/backend/code_statistics.h"
#include "vm/dart.h"
#include "vm/heap/heap.h"
#include "vm/image_snapshot.h"
#include "vm/native_entry.h"
#include "vm/object.h"
#include "vm/object_store.h"
#include "vm/program_visitor.h"
#include "vm/stub_code.h"
#include "vm/symbols.h"
#include "vm/timeline.h"
#include "vm/version.h"
#define LOG_SECTION_BOUNDARIES false
namespace dart {
static RawObject* AllocateUninitialized(PageSpace* old_space, intptr_t size) {
ASSERT(Utils::IsAligned(size, kObjectAlignment));
uword address =
old_space->TryAllocateDataBumpLocked(size, PageSpace::kForceGrowth);
if (address == 0) {
OUT_OF_MEMORY();
}
return RawObject::FromAddr(address);
}
static bool SnapshotContainsTypeTestingStubs(Snapshot::Kind kind) {
return kind == Snapshot::kFullAOT || kind == Snapshot::kFullJIT;
}
void Deserializer::InitializeHeader(RawObject* raw,
intptr_t class_id,
intptr_t size,
bool is_vm_isolate,
bool is_canonical) {
ASSERT(Utils::IsAligned(size, kObjectAlignment));
uint32_t tags = 0;
tags = RawObject::ClassIdTag::update(class_id, tags);
tags = RawObject::SizeTag::update(size, tags);
tags = RawObject::VMHeapObjectTag::update(is_vm_isolate, tags);
tags = RawObject::CanonicalObjectTag::update(is_canonical, tags);
tags = RawObject::OldBit::update(true, tags);
tags = RawObject::OldAndNotMarkedBit::update(true, tags);
tags = RawObject::OldAndNotRememberedBit::update(true, tags);
tags = RawObject::NewBit::update(false, tags);
raw->ptr()->tags_ = tags;
#if defined(HASH_IN_OBJECT_HEADER)
raw->ptr()->hash_ = 0;
#endif
}
void SerializationCluster::WriteAndMeasureAlloc(Serializer* serializer) {
if (LOG_SECTION_BOUNDARIES) {
OS::PrintErr("Data + %" Px ": Alloc %s\n", serializer->bytes_written(),
name_);
}
intptr_t start_size = serializer->bytes_written() + serializer->GetDataSize();
intptr_t start_objects = serializer->next_ref_index();
WriteAlloc(serializer);
intptr_t stop_size = serializer->bytes_written() + serializer->GetDataSize();
intptr_t stop_objects = serializer->next_ref_index();
size_ += (stop_size - start_size);
num_objects_ += (stop_objects - start_objects);
}
void SerializationCluster::WriteAndMeasureFill(Serializer* serializer) {
if (LOG_SECTION_BOUNDARIES) {
OS::PrintErr("Data + %" Px ": Fill %s\n", serializer->bytes_written(),
name_);
}
intptr_t start = serializer->bytes_written();
WriteFill(serializer);
intptr_t stop = serializer->bytes_written();
size_ += (stop - start);
}
#if !defined(DART_PRECOMPILED_RUNTIME)
class ClassSerializationCluster : public SerializationCluster {
public:
explicit ClassSerializationCluster(intptr_t num_cids)
: SerializationCluster("Class"),
predefined_(kNumPredefinedCids),
objects_(num_cids) {}
virtual ~ClassSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawClass* cls = Class::RawCast(object);
intptr_t class_id = cls->ptr()->id_;
if (class_id < kNumPredefinedCids) {
// These classes are allocated by Object::Init or Object::InitOnce, so the
// deserializer must find them in the class table instead of allocating
// them.
predefined_.Add(cls);
} else {
objects_.Add(cls);
}
RawObject** from = cls->from();
RawObject** to = cls->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kClassCid);
intptr_t count = predefined_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawClass* cls = predefined_[i];
intptr_t class_id = cls->ptr()->id_;
s->WriteCid(class_id);
s->AssignRef(cls);
}
count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawClass* cls = objects_[i];
s->AssignRef(cls);
}
}
void WriteFill(Serializer* s) {
intptr_t count = predefined_.length();
for (intptr_t i = 0; i < count; i++) {
WriteClass(s, predefined_[i]);
}
count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
WriteClass(s, objects_[i]);
}
}
void WriteClass(Serializer* s, RawClass* cls) {
Snapshot::Kind kind = s->kind();
RawObject** from = cls->from();
RawObject** to = cls->to_snapshot(kind);
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
intptr_t class_id = cls->ptr()->id_;
if (class_id == kIllegalCid) {
s->UnexpectedObject(cls, "Class with illegal cid");
}
s->WriteCid(class_id);
if (kind != Snapshot::kFullAOT) {
s->Write<int32_t>(cls->ptr()->kernel_offset_);
}
s->Write<int32_t>(cls->ptr()->instance_size_in_words_);
s->Write<int32_t>(cls->ptr()->next_field_offset_in_words_);
s->Write<int32_t>(cls->ptr()->type_arguments_field_offset_in_words_);
s->Write<uint16_t>(cls->ptr()->num_type_arguments_);
s->Write<uint16_t>(cls->ptr()->has_pragma_and_num_own_type_arguments_);
s->Write<uint16_t>(cls->ptr()->num_native_fields_);
s->WriteTokenPosition(cls->ptr()->token_pos_);
s->Write<uint16_t>(cls->ptr()->state_bits_);
}
private:
GrowableArray<RawClass*> predefined_;
GrowableArray<RawClass*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ClassDeserializationCluster : public DeserializationCluster {
public:
ClassDeserializationCluster() {}
virtual ~ClassDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
predefined_start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
ClassTable* table = d->isolate()->class_table();
for (intptr_t i = 0; i < count; i++) {
intptr_t class_id = d->ReadCid();
ASSERT(table->HasValidClassAt(class_id));
RawClass* cls = table->At(class_id);
ASSERT(cls != NULL);
d->AssignRef(cls);
}
predefined_stop_index_ = d->next_index();
start_index_ = d->next_index();
count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Class::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
Snapshot::Kind kind = d->kind();
bool is_vm_object = d->isolate() == Dart::vm_isolate();
ClassTable* table = d->isolate()->class_table();
for (intptr_t id = predefined_start_index_; id < predefined_stop_index_;
id++) {
RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id));
RawObject** from = cls->from();
RawObject** to_snapshot = cls->to_snapshot(kind);
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
intptr_t class_id = d->ReadCid();
cls->ptr()->id_ = class_id;
#if !defined(DART_PRECOMPILED_RUNTIME)
if (kind != Snapshot::kFullAOT) {
cls->ptr()->kernel_offset_ = d->Read<int32_t>();
}
#endif
if (!RawObject::IsInternalVMdefinedClassId(class_id)) {
cls->ptr()->instance_size_in_words_ = d->Read<int32_t>();
cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>();
} else {
d->Read<int32_t>(); // Skip.
d->Read<int32_t>(); // Skip.
}
cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>();
cls->ptr()->num_type_arguments_ = d->Read<uint16_t>();
cls->ptr()->has_pragma_and_num_own_type_arguments_ = d->Read<uint16_t>();
cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
cls->ptr()->token_pos_ = d->ReadTokenPosition();
cls->ptr()->state_bits_ = d->Read<uint16_t>();
}
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id));
Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize(),
is_vm_object);
RawObject** from = cls->from();
RawObject** to_snapshot = cls->to_snapshot(kind);
RawObject** to = cls->to();
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to_snapshot + 1; p <= to; p++) {
*p = Object::null();
}
intptr_t class_id = d->ReadCid();
ASSERT(class_id >= kNumPredefinedCids);
Instance fake;
cls->ptr()->handle_vtable_ = fake.vtable();
cls->ptr()->id_ = class_id;
#if !defined(DART_PRECOMPILED_RUNTIME)
if (kind != Snapshot::kFullAOT) {
cls->ptr()->kernel_offset_ = d->Read<int32_t>();
}
#endif
cls->ptr()->instance_size_in_words_ = d->Read<int32_t>();
cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>();
cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>();
cls->ptr()->num_type_arguments_ = d->Read<uint16_t>();
cls->ptr()->has_pragma_and_num_own_type_arguments_ = d->Read<uint16_t>();
cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
cls->ptr()->token_pos_ = d->ReadTokenPosition();
cls->ptr()->state_bits_ = d->Read<uint16_t>();
table->AllocateIndex(class_id);
table->SetAt(class_id, cls);
}
}
private:
intptr_t predefined_start_index_;
intptr_t predefined_stop_index_;
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class UnresolvedClassSerializationCluster : public SerializationCluster {
public:
UnresolvedClassSerializationCluster()
: SerializationCluster("UnresolvedClass") {}
virtual ~UnresolvedClassSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawUnresolvedClass* cls = UnresolvedClass::RawCast(object);
objects_.Add(cls);
RawObject** from = cls->from();
RawObject** to = cls->to();
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kUnresolvedClassCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawUnresolvedClass* cls = objects_[i];
s->AssignRef(cls);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawUnresolvedClass* cls = objects_[i];
RawObject** from = cls->from();
RawObject** to = cls->to();
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
s->WriteTokenPosition(cls->ptr()->token_pos_);
}
}
private:
GrowableArray<RawUnresolvedClass*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class UnresolvedClassDeserializationCluster : public DeserializationCluster {
public:
UnresolvedClassDeserializationCluster() {}
virtual ~UnresolvedClassDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, UnresolvedClass::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawUnresolvedClass* cls =
reinterpret_cast<RawUnresolvedClass*>(d->Ref(id));
Deserializer::InitializeHeader(cls, kUnresolvedClassCid,
UnresolvedClass::InstanceSize(),
is_vm_object);
RawObject** from = cls->from();
RawObject** to = cls->to();
for (RawObject** p = from; p <= to; p++) {
*p = d->ReadRef();
}
cls->ptr()->token_pos_ = d->ReadTokenPosition();
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class TypeArgumentsSerializationCluster : public SerializationCluster {
public:
TypeArgumentsSerializationCluster() : SerializationCluster("TypeArguments") {}
virtual ~TypeArgumentsSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawTypeArguments* type_args = TypeArguments::RawCast(object);
objects_.Add(type_args);
s->Push(type_args->ptr()->instantiations_);
intptr_t length = Smi::Value(type_args->ptr()->length_);
for (intptr_t i = 0; i < length; i++) {
s->Push(type_args->ptr()->types()[i]);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kTypeArgumentsCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawTypeArguments* type_args = objects_[i];
intptr_t length = Smi::Value(type_args->ptr()->length_);
s->WriteUnsigned(length);
s->AssignRef(type_args);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawTypeArguments* type_args = objects_[i];
intptr_t length = Smi::Value(type_args->ptr()->length_);
s->WriteUnsigned(length);
s->Write<bool>(type_args->IsCanonical());
intptr_t hash = Smi::Value(type_args->ptr()->hash_);
s->Write<int32_t>(hash);
s->WriteRef(type_args->ptr()->instantiations_);
for (intptr_t j = 0; j < length; j++) {
s->WriteRef(type_args->ptr()->types()[j]);
}
}
}
private:
GrowableArray<RawTypeArguments*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class TypeArgumentsDeserializationCluster : public DeserializationCluster {
public:
TypeArgumentsDeserializationCluster() {}
virtual ~TypeArgumentsDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
intptr_t length = d->ReadUnsigned();
d->AssignRef(AllocateUninitialized(old_space,
TypeArguments::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawTypeArguments* type_args =
reinterpret_cast<RawTypeArguments*>(d->Ref(id));
intptr_t length = d->ReadUnsigned();
bool is_canonical = d->Read<bool>();
Deserializer::InitializeHeader(type_args, kTypeArgumentsCid,
TypeArguments::InstanceSize(length),
is_vm_object, is_canonical);
type_args->ptr()->length_ = Smi::New(length);
type_args->ptr()->hash_ = Smi::New(d->Read<int32_t>());
type_args->ptr()->instantiations_ =
reinterpret_cast<RawArray*>(d->ReadRef());
for (intptr_t j = 0; j < length; j++) {
type_args->ptr()->types()[j] =
reinterpret_cast<RawAbstractType*>(d->ReadRef());
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class PatchClassSerializationCluster : public SerializationCluster {
public:
PatchClassSerializationCluster() : SerializationCluster("PatchClass") {}
virtual ~PatchClassSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawPatchClass* cls = PatchClass::RawCast(object);
objects_.Add(cls);
RawObject** from = cls->from();
RawObject** to = cls->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kPatchClassCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawPatchClass* cls = objects_[i];
s->AssignRef(cls);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawPatchClass* cls = objects_[i];
RawObject** from = cls->from();
RawObject** to = cls->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
if (s->kind() != Snapshot::kFullAOT) {
s->Write<int32_t>(cls->ptr()->library_kernel_offset_);
}
}
}
private:
GrowableArray<RawPatchClass*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class PatchClassDeserializationCluster : public DeserializationCluster {
public:
PatchClassDeserializationCluster() {}
virtual ~PatchClassDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, PatchClass::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawPatchClass* cls = reinterpret_cast<RawPatchClass*>(d->Ref(id));
Deserializer::InitializeHeader(cls, kPatchClassCid,
PatchClass::InstanceSize(), is_vm_object);
RawObject** from = cls->from();
RawObject** to_snapshot = cls->to_snapshot(d->kind());
RawObject** to = cls->to();
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to_snapshot + 1; p <= to; p++) {
*p = Object::null();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() != Snapshot::kFullAOT) {
cls->ptr()->library_kernel_offset_ = d->Read<int32_t>();
}
#endif
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class FunctionSerializationCluster : public SerializationCluster {
public:
FunctionSerializationCluster() : SerializationCluster("Function") {}
virtual ~FunctionSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawFunction* func = Function::RawCast(object);
objects_.Add(func);
RawObject** from = func->from();
RawObject** to = func->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
if (s->kind() == Snapshot::kFullAOT) {
s->Push(func->ptr()->code_);
} else if (s->kind() == Snapshot::kFullJIT) {
NOT_IN_PRECOMPILED(s->Push(func->ptr()->unoptimized_code_));
NOT_IN_PRECOMPILED(s->Push(func->ptr()->bytecode_));
s->Push(func->ptr()->code_);
s->Push(func->ptr()->ic_data_array_);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kFunctionCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawFunction* func = objects_[i];
s->AssignRef(func);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawFunction* func = objects_[i];
RawObject** from = func->from();
RawObject** to = func->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
if (kind == Snapshot::kFullAOT) {
s->WriteRef(func->ptr()->code_);
} else if (s->kind() == Snapshot::kFullJIT) {
NOT_IN_PRECOMPILED(s->WriteRef(func->ptr()->unoptimized_code_));
NOT_IN_PRECOMPILED(s->WriteRef(func->ptr()->bytecode_));
s->WriteRef(func->ptr()->code_);
s->WriteRef(func->ptr()->ic_data_array_);
}
#if !defined(DART_PRECOMPILED_RUNTIME)
if (kind != Snapshot::kFullAOT) {
s->WriteTokenPosition(func->ptr()->token_pos_);
s->WriteTokenPosition(func->ptr()->end_token_pos_);
s->Write<int32_t>(func->ptr()->kernel_offset_);
}
#endif
s->Write<uint32_t>(func->ptr()->packed_fields_);
s->Write<uint32_t>(func->ptr()->kind_tag_);
}
}
private:
GrowableArray<RawFunction*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class FunctionDeserializationCluster : public DeserializationCluster {
public:
FunctionDeserializationCluster() {}
virtual ~FunctionDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Function::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
Snapshot::Kind kind = d->kind();
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawFunction* func = reinterpret_cast<RawFunction*>(d->Ref(id));
Deserializer::InitializeHeader(func, kFunctionCid,
Function::InstanceSize(), is_vm_object);
RawObject** from = func->from();
RawObject** to_snapshot = func->to_snapshot(d->kind());
RawObject** to = func->to();
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to_snapshot + 1; p <= to; p++) {
*p = Object::null();
}
if (kind == Snapshot::kFullAOT) {
func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef());
} else if (kind == Snapshot::kFullJIT) {
NOT_IN_PRECOMPILED(func->ptr()->unoptimized_code_ =
reinterpret_cast<RawCode*>(d->ReadRef()));
NOT_IN_PRECOMPILED(func->ptr()->bytecode_ =
reinterpret_cast<RawCode*>(d->ReadRef()));
func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef());
func->ptr()->ic_data_array_ = reinterpret_cast<RawArray*>(d->ReadRef());
}
#if defined(DEBUG)
func->ptr()->entry_point_ = 0;
func->ptr()->unchecked_entry_point_ = 0;
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
if (kind != Snapshot::kFullAOT) {
func->ptr()->token_pos_ = d->ReadTokenPosition();
func->ptr()->end_token_pos_ = d->ReadTokenPosition();
func->ptr()->kernel_offset_ = d->Read<int32_t>();
}
#endif
func->ptr()->packed_fields_ = d->Read<uint32_t>();
func->ptr()->kind_tag_ = d->Read<uint32_t>();
if (kind == Snapshot::kFullAOT) {
// Omit fields used to support de/reoptimization.
} else {
#if !defined(DART_PRECOMPILED_RUNTIME)
func->ptr()->usage_counter_ = 0;
func->ptr()->optimized_instruction_count_ = 0;
func->ptr()->optimized_call_site_count_ = 0;
func->ptr()->deoptimization_counter_ = 0;
func->ptr()->state_bits_ = 0;
func->ptr()->inlining_depth_ = 0;
#endif
}
}
}
void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
NOT_IN_PRODUCT(TimelineDurationScope tds(
Thread::Current(), Timeline::GetIsolateStream(), "PostLoadFunction"));
if (kind == Snapshot::kFullAOT) {
Function& func = Function::Handle(zone);
for (intptr_t i = start_index_; i < stop_index_; i++) {
func ^= refs.At(i);
ASSERT(func.raw()->ptr()->code_->IsCode());
uword entry_point = func.raw()->ptr()->code_->ptr()->entry_point_;
ASSERT(entry_point != 0);
func.raw()->ptr()->entry_point_ = entry_point;
uword unchecked_entry_point =
func.raw()->ptr()->code_->ptr()->unchecked_entry_point_;
ASSERT(unchecked_entry_point != 0);
func.raw()->ptr()->unchecked_entry_point_ = unchecked_entry_point;
}
} else if (kind == Snapshot::kFullJIT) {
Function& func = Function::Handle(zone);
Code& code = Code::Handle(zone);
for (intptr_t i = start_index_; i < stop_index_; i++) {
func ^= refs.At(i);
code ^= func.CurrentCode();
if (func.HasCode() && !code.IsDisabled()) {
func.SetInstructions(code); // Set entrypoint.
func.SetWasCompiled(true);
#if !defined(DART_PRECOMPILED_RUNTIME)
} else if (FLAG_enable_interpreter && func.HasBytecode()) {
// Set the code entry_point to InterpretCall stub.
func.SetInstructions(
Code::Handle(StubCode::InterpretCall_entry()->code()));
} else if (FLAG_use_bytecode_compiler && func.HasBytecode()) {
func.SetInstructions(
Code::Handle(StubCode::LazyCompile_entry()->code()));
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
func.ClearCode(); // Set code and entrypoint to lazy compile stub.
}
}
} else {
Function& func = Function::Handle(zone);
for (intptr_t i = start_index_; i < stop_index_; i++) {
func ^= refs.At(i);
func.ClearCode(); // Set code and entrypoint to lazy compile stub.
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ClosureDataSerializationCluster : public SerializationCluster {
public:
ClosureDataSerializationCluster() : SerializationCluster("ClosureData") {}
virtual ~ClosureDataSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawClosureData* data = ClosureData::RawCast(object);
objects_.Add(data);
if (s->kind() != Snapshot::kFullAOT) {
s->Push(data->ptr()->context_scope_);
}
s->Push(data->ptr()->parent_function_);
s->Push(data->ptr()->signature_type_);
s->Push(data->ptr()->closure_);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kClosureDataCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawClosureData* data = objects_[i];
s->AssignRef(data);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawClosureData* data = objects_[i];
if (s->kind() != Snapshot::kFullAOT) {
s->WriteRef(data->ptr()->context_scope_);
}
s->WriteRef(data->ptr()->parent_function_);
s->WriteRef(data->ptr()->signature_type_);
s->WriteRef(data->ptr()->closure_);
}
}
private:
GrowableArray<RawClosureData*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ClosureDataDeserializationCluster : public DeserializationCluster {
public:
ClosureDataDeserializationCluster() {}
virtual ~ClosureDataDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, ClosureData::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawClosureData* data = reinterpret_cast<RawClosureData*>(d->Ref(id));
Deserializer::InitializeHeader(data, kClosureDataCid,
ClosureData::InstanceSize(), is_vm_object);
if (d->kind() == Snapshot::kFullAOT) {
data->ptr()->context_scope_ = ContextScope::null();
} else {
data->ptr()->context_scope_ =
static_cast<RawContextScope*>(d->ReadRef());
}
data->ptr()->parent_function_ = static_cast<RawFunction*>(d->ReadRef());
data->ptr()->signature_type_ = static_cast<RawType*>(d->ReadRef());
data->ptr()->closure_ = static_cast<RawInstance*>(d->ReadRef());
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class SignatureDataSerializationCluster : public SerializationCluster {
public:
SignatureDataSerializationCluster() : SerializationCluster("SignatureData") {}
virtual ~SignatureDataSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawSignatureData* data = SignatureData::RawCast(object);
objects_.Add(data);
RawObject** from = data->from();
RawObject** to = data->to();
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kSignatureDataCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawSignatureData* data = objects_[i];
s->AssignRef(data);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawSignatureData* data = objects_[i];
RawObject** from = data->from();
RawObject** to = data->to();
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
}
}
private:
GrowableArray<RawSignatureData*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class SignatureDataDeserializationCluster : public DeserializationCluster {
public:
SignatureDataDeserializationCluster() {}
virtual ~SignatureDataDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, SignatureData::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawSignatureData* data = reinterpret_cast<RawSignatureData*>(d->Ref(id));
Deserializer::InitializeHeader(
data, kSignatureDataCid, SignatureData::InstanceSize(), is_vm_object);
RawObject** from = data->from();
RawObject** to = data->to();
for (RawObject** p = from; p <= to; p++) {
*p = d->ReadRef();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class RedirectionDataSerializationCluster : public SerializationCluster {
public:
RedirectionDataSerializationCluster()
: SerializationCluster("RedirectionData") {}
virtual ~RedirectionDataSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawRedirectionData* data = RedirectionData::RawCast(object);
objects_.Add(data);
RawObject** from = data->from();
RawObject** to = data->to();
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kRedirectionDataCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawRedirectionData* data = objects_[i];
s->AssignRef(data);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawRedirectionData* data = objects_[i];
RawObject** from = data->from();
RawObject** to = data->to();
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
}
}
private:
GrowableArray<RawRedirectionData*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class RedirectionDataDeserializationCluster : public DeserializationCluster {
public:
RedirectionDataDeserializationCluster() {}
virtual ~RedirectionDataDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, RedirectionData::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawRedirectionData* data =
reinterpret_cast<RawRedirectionData*>(d->Ref(id));
Deserializer::InitializeHeader(data, kRedirectionDataCid,
RedirectionData::InstanceSize(),
is_vm_object);
RawObject** from = data->from();
RawObject** to = data->to();
for (RawObject** p = from; p <= to; p++) {
*p = d->ReadRef();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class FieldSerializationCluster : public SerializationCluster {
public:
FieldSerializationCluster() : SerializationCluster("Field") {}
virtual ~FieldSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawField* field = Field::RawCast(object);
objects_.Add(field);
Snapshot::Kind kind = s->kind();
s->Push(field->ptr()->name_);
s->Push(field->ptr()->owner_);
s->Push(field->ptr()->type_);
// Write out the initial static value or field offset.
if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
if (kind == Snapshot::kFullAOT) {
// For precompiled static fields, the value was already reset and
// initializer_ now contains a Function.
s->Push(field->ptr()->value_.static_value_);
} else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) {
// Do not reset const fields.
s->Push(field->ptr()->value_.static_value_);
} else {
// Otherwise, for static fields we write out the initial static value.
s->Push(field->ptr()->initializer_.saved_value_);
}
} else {
s->Push(field->ptr()->value_.offset_);
}
// Write out the initializer function or saved initial value.
if (kind == Snapshot::kFullAOT) {
s->Push(field->ptr()->initializer_.precompiled_);
} else {
s->Push(field->ptr()->initializer_.saved_value_);
}
if (kind != Snapshot::kFullAOT) {
// Write out the guarded list length.
s->Push(field->ptr()->guarded_list_length_);
}
if (kind == Snapshot::kFullJIT) {
s->Push(field->ptr()->dependent_code_);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kFieldCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawField* field = objects_[i];
s->AssignRef(field);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawField* field = objects_[i];
s->WriteRef(field->ptr()->name_);
s->WriteRef(field->ptr()->owner_);
s->WriteRef(field->ptr()->type_);
// Write out the initial static value or field offset.
if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
if (kind == Snapshot::kFullAOT) {
// For precompiled static fields, the value was already reset and
// initializer_ now contains a Function.
s->WriteRef(field->ptr()->value_.static_value_);
} else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) {
// Do not reset const fields.
s->WriteRef(field->ptr()->value_.static_value_);
} else {
// Otherwise, for static fields we write out the initial static value.
s->WriteRef(field->ptr()->initializer_.saved_value_);
}
} else {
s->WriteRef(field->ptr()->value_.offset_);
}
// Write out the initializer function or saved initial value.
if (kind == Snapshot::kFullAOT) {
s->WriteRef(field->ptr()->initializer_.precompiled_);
} else {
s->WriteRef(field->ptr()->initializer_.saved_value_);
}
if (kind != Snapshot::kFullAOT) {
// Write out the guarded list length.
s->WriteRef(field->ptr()->guarded_list_length_);
}
if (kind == Snapshot::kFullJIT) {
s->WriteRef(field->ptr()->dependent_code_);
}
if (kind != Snapshot::kFullAOT) {
s->WriteTokenPosition(field->ptr()->token_pos_);
s->WriteTokenPosition(field->ptr()->end_token_pos_);
s->WriteCid(field->ptr()->guarded_cid_);
s->WriteCid(field->ptr()->is_nullable_);
s->Write<int8_t>(field->ptr()->static_type_exactness_state_);
#if !defined(DART_PRECOMPILED_RUNTIME)
s->Write<int32_t>(field->ptr()->kernel_offset_);
#endif
}
s->Write<uint8_t>(field->ptr()->kind_bits_);
}
}
private:
GrowableArray<RawField*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class FieldDeserializationCluster : public DeserializationCluster {
public:
FieldDeserializationCluster() {}
virtual ~FieldDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Field::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
Snapshot::Kind kind = d->kind();
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawField* field = reinterpret_cast<RawField*>(d->Ref(id));
Deserializer::InitializeHeader(field, kFieldCid, Field::InstanceSize(),
is_vm_object);
RawObject** from = field->from();
RawObject** to_snapshot = field->to_snapshot(kind);
RawObject** to = field->to();
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to_snapshot + 1; p <= to; p++) {
*p = Object::null();
}
if (kind != Snapshot::kFullAOT) {
field->ptr()->token_pos_ = d->ReadTokenPosition();
field->ptr()->end_token_pos_ = d->ReadTokenPosition();
field->ptr()->guarded_cid_ = d->ReadCid();
field->ptr()->is_nullable_ = d->ReadCid();
field->ptr()->static_type_exactness_state_ = d->Read<int8_t>();
#if !defined(DART_PRECOMPILED_RUNTIME)
field->ptr()->kernel_offset_ = d->Read<int32_t>();
#endif
}
field->ptr()->kind_bits_ = d->Read<uint8_t>();
}
}
void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
NOT_IN_PRODUCT(TimelineDurationScope tds(
Thread::Current(), Timeline::GetIsolateStream(), "PostLoadField"));
Field& field = Field::Handle(zone);
if (!Isolate::Current()->use_field_guards()) {
for (intptr_t i = start_index_; i < stop_index_; i++) {
field ^= refs.At(i);
field.set_guarded_cid(kDynamicCid);
field.set_is_nullable(true);
field.set_guarded_list_length(Field::kNoFixedLength);
field.set_guarded_list_length_in_object_offset(
Field::kUnknownLengthOffset);
field.set_static_type_exactness_state(
StaticTypeExactnessState::NotTracking());
}
} else {
for (intptr_t i = start_index_; i < stop_index_; i++) {
field ^= refs.At(i);
field.InitializeGuardedListLengthInObjectOffset();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ScriptSerializationCluster : public SerializationCluster {
public:
ScriptSerializationCluster() : SerializationCluster("Script") {}
virtual ~ScriptSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawScript* script = Script::RawCast(object);
objects_.Add(script);
RawObject** from = script->from();
RawObject** to = script->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kScriptCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawScript* script = objects_[i];
s->AssignRef(script);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawScript* script = objects_[i];
RawObject** from = script->from();
RawObject** to = script->to_snapshot(kind);
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
s->Write<int32_t>(script->ptr()->line_offset_);
s->Write<int32_t>(script->ptr()->col_offset_);
s->Write<int8_t>(script->ptr()->kind_);
s->Write<int32_t>(script->ptr()->kernel_script_index_);
}
}
private:
GrowableArray<RawScript*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ScriptDeserializationCluster : public DeserializationCluster {
public:
ScriptDeserializationCluster() {}
virtual ~ScriptDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Script::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
Snapshot::Kind kind = d->kind();
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawScript* script = reinterpret_cast<RawScript*>(d->Ref(id));
Deserializer::InitializeHeader(script, kScriptCid, Script::InstanceSize(),
is_vm_object);
RawObject** from = script->from();
RawObject** to_snapshot = script->to_snapshot(kind);
RawObject** to = script->to();
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to_snapshot + 1; p <= to; p++) {
*p = Object::null();
}
script->ptr()->line_offset_ = d->Read<int32_t>();
script->ptr()->col_offset_ = d->Read<int32_t>();
script->ptr()->kind_ = d->Read<int8_t>();
script->ptr()->kernel_script_index_ = d->Read<int32_t>();
script->ptr()->load_timestamp_ = 0;
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class LibrarySerializationCluster : public SerializationCluster {
public:
LibrarySerializationCluster() : SerializationCluster("Library") {}
virtual ~LibrarySerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawLibrary* lib = Library::RawCast(object);
objects_.Add(lib);
RawObject** from = lib->from();
RawObject** to = lib->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kLibraryCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawLibrary* lib = objects_[i];
s->AssignRef(lib);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawLibrary* lib = objects_[i];
RawObject** from = lib->from();
RawObject** to = lib->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
s->Write<int32_t>(lib->ptr()->index_);
s->Write<uint16_t>(lib->ptr()->num_imports_);
s->Write<int8_t>(lib->ptr()->load_state_);
s->Write<bool>(lib->ptr()->corelib_imported_);
s->Write<bool>(lib->ptr()->is_dart_scheme_);
s->Write<bool>(lib->ptr()->debuggable_);
if (s->kind() != Snapshot::kFullAOT) {
s->Write<int32_t>(lib->ptr()->kernel_offset_);
}
}
}
private:
GrowableArray<RawLibrary*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class LibraryDeserializationCluster : public DeserializationCluster {
public:
LibraryDeserializationCluster() {}
virtual ~LibraryDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Library::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawLibrary* lib = reinterpret_cast<RawLibrary*>(d->Ref(id));
Deserializer::InitializeHeader(lib, kLibraryCid, Library::InstanceSize(),
is_vm_object);
RawObject** from = lib->from();
RawObject** to_snapshot = lib->to_snapshot(d->kind());
RawObject** to = lib->to();
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to_snapshot + 1; p <= to; p++) {
*p = Object::null();
}
lib->ptr()->native_entry_resolver_ = NULL;
lib->ptr()->native_entry_symbol_resolver_ = NULL;
lib->ptr()->index_ = d->Read<int32_t>();
lib->ptr()->num_imports_ = d->Read<uint16_t>();
lib->ptr()->load_state_ = d->Read<int8_t>();
lib->ptr()->corelib_imported_ = d->Read<bool>();
lib->ptr()->is_dart_scheme_ = d->Read<bool>();
lib->ptr()->debuggable_ = d->Read<bool>();
lib->ptr()->is_in_fullsnapshot_ = true;
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() != Snapshot::kFullAOT) {
lib->ptr()->kernel_offset_ = d->Read<int32_t>();
}
#endif
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class NamespaceSerializationCluster : public SerializationCluster {
public:
NamespaceSerializationCluster() : SerializationCluster("Namespace") {}
virtual ~NamespaceSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawNamespace* ns = Namespace::RawCast(object);
objects_.Add(ns);
RawObject** from = ns->from();
RawObject** to = ns->to();
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kNamespaceCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawNamespace* ns = objects_[i];
s->AssignRef(ns);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawNamespace* ns = objects_[i];
RawObject** from = ns->from();
RawObject** to = ns->to();
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
}
}
private:
GrowableArray<RawNamespace*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class NamespaceDeserializationCluster : public DeserializationCluster {
public:
NamespaceDeserializationCluster() {}
virtual ~NamespaceDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Namespace::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawNamespace* ns = reinterpret_cast<RawNamespace*>(d->Ref(id));
Deserializer::InitializeHeader(ns, kNamespaceCid,
Namespace::InstanceSize(), is_vm_object);
RawObject** from = ns->from();
RawObject** to = ns->to();
for (RawObject** p = from; p <= to; p++) {
*p = d->ReadRef();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
// KernelProgramInfo objects are not written into a full AOT snapshot.
class KernelProgramInfoSerializationCluster : public SerializationCluster {
public:
KernelProgramInfoSerializationCluster()
: SerializationCluster("KernelProgramInfo") {}
virtual ~KernelProgramInfoSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawKernelProgramInfo* info = KernelProgramInfo::RawCast(object);
objects_.Add(info);
RawObject** from = info->from();
RawObject** to = info->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kKernelProgramInfoCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawKernelProgramInfo* info = objects_[i];
s->AssignRef(info);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawKernelProgramInfo* info = objects_[i];
RawObject** from = info->from();
RawObject** to = info->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
}
}
private:
GrowableArray<RawKernelProgramInfo*> objects_;
};
// Since KernelProgramInfo objects are not written into full AOT snapshots,
// one will never need to read them from a full AOT snapshot.
class KernelProgramInfoDeserializationCluster : public DeserializationCluster {
public:
KernelProgramInfoDeserializationCluster() {}
virtual ~KernelProgramInfoDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, KernelProgramInfo::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawKernelProgramInfo* info =
reinterpret_cast<RawKernelProgramInfo*>(d->Ref(id));
Deserializer::InitializeHeader(info, kKernelProgramInfoCid,
KernelProgramInfo::InstanceSize(),
is_vm_object);
RawObject** from = info->from();
RawObject** to = info->to_snapshot(d->kind());
RawObject** end = info->to();
for (RawObject** p = from; p <= to; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to + 1; p <= end; p++) {
*p = Object::null();
}
}
}
void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
Array& array_ = Array::Handle(zone);
KernelProgramInfo& info_ = KernelProgramInfo::Handle(zone);
for (intptr_t id = start_index_; id < stop_index_; id++) {
info_ ^= refs.At(id);
array_ = HashTables::New<UnorderedHashMap<SmiTraits>>(16, Heap::kOld);
info_.set_libraries_cache(array_);
array_ = HashTables::New<UnorderedHashMap<SmiTraits>>(16, Heap::kOld);
info_.set_classes_cache(array_);
}
}
};
class CodeSerializationCluster : public SerializationCluster {
public:
CodeSerializationCluster() : SerializationCluster("Code") {}
virtual ~CodeSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawCode* code = Code::RawCast(object);
objects_.Add(code);
s->Push(code->ptr()->object_pool_);
s->Push(code->ptr()->owner_);
s->Push(code->ptr()->exception_handlers_);
s->Push(code->ptr()->pc_descriptors_);
#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
s->Push(code->ptr()->catch_entry_.catch_entry_moves_maps_);
#else
s->Push(code->ptr()->catch_entry_.variables_);
#endif
s->Push(code->ptr()->stackmaps_);
if (!FLAG_dwarf_stack_traces) {
s->Push(code->ptr()->inlined_id_to_function_);
s->Push(code->ptr()->code_source_map_);
}
if (s->kind() == Snapshot::kFullJIT) {
s->Push(code->ptr()->deopt_info_array_);
s->Push(code->ptr()->static_calls_target_table_);
}
NOT_IN_PRODUCT(s->Push(code->ptr()->await_token_positions_));
NOT_IN_PRODUCT(s->Push(code->ptr()->return_address_metadata_));
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kCodeCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawCode* code = objects_[i];
s->AssignRef(code);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawCode* code = objects_[i];
intptr_t pointer_offsets_length =
Code::PtrOffBits::decode(code->ptr()->state_bits_);
if (pointer_offsets_length != 0) {
FATAL("Cannot serialize code with embedded pointers");
}
if (kind == Snapshot::kFullAOT) {
if (code->ptr()->instructions_ != code->ptr()->active_instructions_) {
// Disabled code is fatal in AOT since we cannot recompile.
s->UnexpectedObject(code, "Disabled code");
}
}
s->WriteInstructions(code->ptr()->instructions_, code);
if (s->kind() == Snapshot::kFullJIT) {
// TODO(rmacnak): Fix references to disabled code before serializing.
// For now, we may write the FixCallersTarget or equivalent stub. This
// will cause a fixup if this code is called.
s->WriteInstructions(code->ptr()->active_instructions_, code);
}
s->WriteRef(code->ptr()->object_pool_);
s->WriteRef(code->ptr()->owner_);
s->WriteRef(code->ptr()->exception_handlers_);
s->WriteRef(code->ptr()->pc_descriptors_);
#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
s->WriteRef(code->ptr()->catch_entry_.catch_entry_moves_maps_);
#else
s->WriteRef(code->ptr()->catch_entry_.variables_);
#endif
s->WriteRef(code->ptr()->stackmaps_);
if (FLAG_dwarf_stack_traces) {
s->WriteRef(Array::null());
s->WriteRef(CodeSourceMap::null());
} else {
s->WriteRef(code->ptr()->inlined_id_to_function_);
s->WriteRef(code->ptr()->code_source_map_);
}
if (s->kind() == Snapshot::kFullJIT) {
s->WriteRef(code->ptr()->deopt_info_array_);
s->WriteRef(code->ptr()->static_calls_target_table_);
}
NOT_IN_PRODUCT(s->WriteRef(code->ptr()->await_token_positions_));
NOT_IN_PRODUCT(s->WriteRef(code->ptr()->return_address_metadata_));
s->Write<int32_t>(code->ptr()->state_bits_);
}
}
private:
GrowableArray<RawCode*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class CodeDeserializationCluster : public DeserializationCluster {
public:
CodeDeserializationCluster() {}
virtual ~CodeDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Code::InstanceSize(0)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawCode* code = reinterpret_cast<RawCode*>(d->Ref(id));
Deserializer::InitializeHeader(code, kCodeCid, Code::InstanceSize(0),
is_vm_object);
RawInstructions* instr = d->ReadInstructions();
code->ptr()->entry_point_ = Instructions::EntryPoint(instr);
code->ptr()->monomorphic_entry_point_ =
Instructions::MonomorphicEntryPoint(instr);
NOT_IN_PRECOMPILED(code->ptr()->active_instructions_ = instr);
code->ptr()->instructions_ = instr;
code->ptr()->unchecked_entry_point_ =
Instructions::UncheckedEntryPoint(instr);
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() == Snapshot::kFullJIT) {
RawInstructions* instr = d->ReadInstructions();
code->ptr()->active_instructions_ = instr;
code->ptr()->entry_point_ = Instructions::EntryPoint(instr);
code->ptr()->monomorphic_entry_point_ =
Instructions::MonomorphicEntryPoint(instr);
code->ptr()->unchecked_entry_point_ =
Instructions::UncheckedEntryPoint(instr);
}
#endif // !DART_PRECOMPILED_RUNTIME
code->ptr()->object_pool_ =
reinterpret_cast<RawObjectPool*>(d->ReadRef());
code->ptr()->owner_ = d->ReadRef();
code->ptr()->exception_handlers_ =
reinterpret_cast<RawExceptionHandlers*>(d->ReadRef());
code->ptr()->pc_descriptors_ =
reinterpret_cast<RawPcDescriptors*>(d->ReadRef());
#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
code->ptr()->catch_entry_.catch_entry_moves_maps_ =
reinterpret_cast<RawTypedData*>(d->ReadRef());
#else
code->ptr()->catch_entry_.variables_ =
reinterpret_cast<RawSmi*>(d->ReadRef());
#endif
code->ptr()->stackmaps_ = reinterpret_cast<RawArray*>(d->ReadRef());
code->ptr()->inlined_id_to_function_ =
reinterpret_cast<RawArray*>(d->ReadRef());
code->ptr()->code_source_map_ =
reinterpret_cast<RawCodeSourceMap*>(d->ReadRef());
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() == Snapshot::kFullJIT) {
code->ptr()->deopt_info_array_ =
reinterpret_cast<RawArray*>(d->ReadRef());
code->ptr()->static_calls_target_table_ =
reinterpret_cast<RawArray*>(d->ReadRef());
}
#endif // !DART_PRECOMPILED_RUNTIME
#if !defined(PRODUCT)
code->ptr()->await_token_positions_ =
reinterpret_cast<RawArray*>(d->ReadRef());
code->ptr()->return_address_metadata_ = d->ReadRef();
code->ptr()->var_descriptors_ = LocalVarDescriptors::null();
code->ptr()->comments_ = Array::null();
code->ptr()->compile_timestamp_ = 0;
#endif
code->ptr()->state_bits_ = d->Read<int32_t>();
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ObjectPoolSerializationCluster : public SerializationCluster {
public:
ObjectPoolSerializationCluster() : SerializationCluster("ObjectPool") {}
virtual ~ObjectPoolSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawObjectPool* pool = ObjectPool::RawCast(object);
objects_.Add(pool);
intptr_t length = pool->ptr()->length_;
uint8_t* entry_bits = pool->ptr()->entry_bits();
for (intptr_t i = 0; i < length; i++) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
if ((entry_type == ObjectPool::kTaggedObject) ||
(entry_type == ObjectPool::kNativeEntryData)) {
s->Push(pool->ptr()->data()[i].raw_obj_);
}
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kObjectPoolCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawObjectPool* pool = objects_[i];
intptr_t length = pool->ptr()->length_;
s->WriteUnsigned(length);
s->AssignRef(pool);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawObjectPool* pool = objects_[i];
intptr_t length = pool->ptr()->length_;
s->WriteUnsigned(length);
uint8_t* entry_bits = pool->ptr()->entry_bits();
for (intptr_t j = 0; j < length; j++) {
s->Write<uint8_t>(entry_bits[j]);
RawObjectPool::Entry& entry = pool->ptr()->data()[j];
switch (ObjectPool::TypeBits::decode(entry_bits[j])) {
case ObjectPool::kTaggedObject: {
#if !defined(TARGET_ARCH_DBC)
if ((entry.raw_obj_ ==
StubCode::CallNoScopeNative_entry()->code()) ||
(entry.raw_obj_ ==
StubCode::CallAutoScopeNative_entry()->code())) {
// Natives can run while precompiling, becoming linked and
// switching their stub. Reset to the initial stub used for
// lazy-linking.
s->WriteRef(StubCode::CallBootstrapNative_entry()->code());
break;
}
#endif
s->WriteRef(entry.raw_obj_);
break;
}
case ObjectPool::kImmediate: {
s->Write<intptr_t>(entry.raw_value_);
break;
}
case ObjectPool::kNativeEntryData: {
RawObject* raw = entry.raw_obj_;
RawTypedData* raw_data = reinterpret_cast<RawTypedData*>(raw);
// kNativeEntryData object pool entries are for linking natives for
// the interpreter. Before writing these entries into the snapshot,
// we need to unlink them by nulling out the 'trampoline' and
// 'native_function' fields.
NativeEntryData::Payload* payload =
NativeEntryData::FromTypedArray(raw_data);
if (payload->kind == MethodRecognizer::kUnknown) {
payload->trampoline = NULL;
payload->native_function = NULL;
}
s->WriteRef(raw);
break;
}
case ObjectPool::kNativeFunction:
case ObjectPool::kNativeFunctionWrapper: {
// Write nothing. Will initialize with the lazy link entry.
break;
}
default:
UNREACHABLE();
}
}
}
}
private:
GrowableArray<RawObjectPool*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ObjectPoolDeserializationCluster : public DeserializationCluster {
public:
ObjectPoolDeserializationCluster() {}
virtual ~ObjectPoolDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
intptr_t length = d->ReadUnsigned();
d->AssignRef(
AllocateUninitialized(old_space, ObjectPool::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id += 1) {
intptr_t length = d->ReadUnsigned();
RawObjectPool* pool = reinterpret_cast<RawObjectPool*>(d->Ref(id + 0));
Deserializer::InitializeHeader(
pool, kObjectPoolCid, ObjectPool::InstanceSize(length), is_vm_object);
pool->ptr()->length_ = length;
for (intptr_t j = 0; j < length; j++) {
const uint8_t entry_bits = d->Read<uint8_t>();
pool->ptr()->entry_bits()[j] = entry_bits;
RawObjectPool::Entry& entry = pool->ptr()->data()[j];
switch (ObjectPool::TypeBits::decode(entry_bits)) {
case ObjectPool::kNativeEntryData:
case ObjectPool::kTaggedObject:
entry.raw_obj_ = d->ReadRef();
break;
case ObjectPool::kImmediate:
entry.raw_value_ = d->Read<intptr_t>();
break;
case ObjectPool::kNativeFunction: {
// Read nothing. Initialize with the lazy link entry.
uword new_entry = NativeEntry::LinkNativeCallEntry();
entry.raw_value_ = static_cast<intptr_t>(new_entry);
break;
}
#if defined(TARGET_ARCH_DBC)
case ObjectPool::kNativeFunctionWrapper: {
// Read nothing. Initialize with the lazy link entry.
uword new_entry = NativeEntry::BootstrapNativeCallWrapperEntry();
entry.raw_value_ = static_cast<intptr_t>(new_entry);
break;
}
#endif
default:
UNREACHABLE();
}
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
// PcDescriptor, StackMap, OneByteString, TwoByteString
class RODataSerializationCluster : public SerializationCluster {
public:
RODataSerializationCluster(const char* name, intptr_t cid)
: SerializationCluster(name), cid_(cid) {}
virtual ~RODataSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
// A string's hash must already be computed when we write it because it
// will be loaded into read-only memory. Extra bytes due to allocation
// rounding need to be deterministically set for reliable deduplication in
// shared images.
if (object->IsVMHeapObject()) {
// This object is already read-only.
} else {
Object::FinalizeReadOnlyObject(object);
}
uint32_t ignored;
if (s->GetSharedDataOffset(object, &ignored)) {
shared_objects_.Add(object);
} else {
objects_.Add(object);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(cid_);
intptr_t count = shared_objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawObject* object = shared_objects_[i];
uint32_t offset;
if (!s->GetSharedDataOffset(object, &offset)) {
UNREACHABLE();
}
s->WriteUnsigned(offset);
s->AssignRef(object);
}
count = objects_.length();
s->WriteUnsigned(count);
uint32_t running_offset = 0;
for (intptr_t i = 0; i < count; i++) {
RawObject* object = objects_[i];
uint32_t offset = s->GetDataOffset(object);
ASSERT(Utils::IsAligned(offset, kObjectAlignment));
ASSERT(offset > running_offset);
s->WriteUnsigned((offset - running_offset) >> kObjectAlignmentLog2);
running_offset = offset;
s->AssignRef(object);
}
}
void WriteFill(Serializer* s) {
// No-op.
}
private:
const intptr_t cid_;
GrowableArray<RawObject*> objects_;
GrowableArray<RawObject*> shared_objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class RODataDeserializationCluster : public DeserializationCluster {
public:
RODataDeserializationCluster() {}
virtual ~RODataDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
uint32_t offset = d->ReadUnsigned();
d->AssignRef(d->GetSharedObjectAt(offset));
}
count = d->ReadUnsigned();
uint32_t running_offset = 0;
for (intptr_t i = 0; i < count; i++) {
running_offset += d->ReadUnsigned() << kObjectAlignmentLog2;
d->AssignRef(d->GetObjectAt(running_offset));
}
}
void ReadFill(Deserializer* d) {
// No-op.
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ExceptionHandlersSerializationCluster : public SerializationCluster {
public:
ExceptionHandlersSerializationCluster()
: SerializationCluster("ExceptionHandlers") {}
virtual ~ExceptionHandlersSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawExceptionHandlers* handlers = ExceptionHandlers::RawCast(object);
objects_.Add(handlers);
s->Push(handlers->ptr()->handled_types_data_);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kExceptionHandlersCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawExceptionHandlers* handlers = objects_[i];
intptr_t length = handlers->ptr()->num_entries_;
s->WriteUnsigned(length);
s->AssignRef(handlers);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawExceptionHandlers* handlers = objects_[i];
intptr_t length = handlers->ptr()->num_entries_;
s->WriteUnsigned(length);
s->WriteRef(handlers->ptr()->handled_types_data_);
for (intptr_t j = 0; j < length; j++) {
const ExceptionHandlerInfo& info = handlers->ptr()->data()[j];
s->Write<uint32_t>(info.handler_pc_offset);
s->Write<int16_t>(info.outer_try_index);
s->Write<int8_t>(info.needs_stacktrace);
s->Write<int8_t>(info.has_catch_all);
s->Write<int8_t>(info.is_generated);
}
}
}
private:
GrowableArray<RawExceptionHandlers*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ExceptionHandlersDeserializationCluster : public DeserializationCluster {
public:
ExceptionHandlersDeserializationCluster() {}
virtual ~ExceptionHandlersDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
intptr_t length = d->ReadUnsigned();
d->AssignRef(AllocateUninitialized(
old_space, ExceptionHandlers::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawExceptionHandlers* handlers =
reinterpret_cast<RawExceptionHandlers*>(d->Ref(id));
intptr_t length = d->ReadUnsigned();
Deserializer::InitializeHeader(handlers, kExceptionHandlersCid,
ExceptionHandlers::InstanceSize(length),
is_vm_object);
handlers->ptr()->num_entries_ = length;
handlers->ptr()->handled_types_data_ =
reinterpret_cast<RawArray*>(d->ReadRef());
for (intptr_t j = 0; j < length; j++) {
ExceptionHandlerInfo& info = handlers->ptr()->data()[j];
info.handler_pc_offset = d->Read<uint32_t>();
info.outer_try_index = d->Read<int16_t>();
info.needs_stacktrace = d->Read<int8_t>();
info.has_catch_all = d->Read<int8_t>();
info.is_generated = d->Read<int8_t>();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ContextSerializationCluster : public SerializationCluster {
public:
ContextSerializationCluster() : SerializationCluster("Context") {}
virtual ~ContextSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawContext* context = Context::RawCast(object);
objects_.Add(context);
s->Push(context->ptr()->parent_);
intptr_t length = context->ptr()->num_variables_;
for (intptr_t i = 0; i < length; i++) {
s->Push(context->ptr()->data()[i]);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kContextCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawContext* context = objects_[i];
intptr_t length = context->ptr()->num_variables_;
s->WriteUnsigned(length);
s->AssignRef(context);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawContext* context = objects_[i];
intptr_t length = context->ptr()->num_variables_;
s->WriteUnsigned(length);
s->WriteRef(context->ptr()->parent_);
for (intptr_t j = 0; j < length; j++) {
s->WriteRef(context->ptr()->data()[j]);
}
}
}
private:
GrowableArray<RawContext*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ContextDeserializationCluster : public DeserializationCluster {
public:
ContextDeserializationCluster() {}
virtual ~ContextDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
intptr_t length = d->ReadUnsigned();
d->AssignRef(
AllocateUninitialized(old_space, Context::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawContext* context = reinterpret_cast<RawContext*>(d->Ref(id));
intptr_t length = d->ReadUnsigned();
Deserializer::InitializeHeader(
context, kContextCid, Context::InstanceSize(length), is_vm_object);
context->ptr()->num_variables_ = length;
context->ptr()->parent_ = reinterpret_cast<RawContext*>(d->ReadRef());
for (intptr_t j = 0; j < length; j++) {
context->ptr()->data()[j] = d->ReadRef();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ContextScopeSerializationCluster : public SerializationCluster {
public:
ContextScopeSerializationCluster() : SerializationCluster("ContextScope") {}
virtual ~ContextScopeSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawContextScope* scope = ContextScope::RawCast(object);
objects_.Add(scope);
intptr_t length = scope->ptr()->num_variables_;
RawObject** from = scope->from();
RawObject** to = scope->to(length);
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kContextScopeCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawContextScope* scope = objects_[i];
intptr_t length = scope->ptr()->num_variables_;
s->WriteUnsigned(length);
s->AssignRef(scope);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawContextScope* scope = objects_[i];
intptr_t length = scope->ptr()->num_variables_;
s->WriteUnsigned(length);
s->Write<bool>(scope->ptr()->is_implicit_);
RawObject** from = scope->from();
RawObject** to = scope->to(length);
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
}
}
private:
GrowableArray<RawContextScope*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ContextScopeDeserializationCluster : public DeserializationCluster {
public:
ContextScopeDeserializationCluster() {}
virtual ~ContextScopeDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
intptr_t length = d->ReadUnsigned();
d->AssignRef(
AllocateUninitialized(old_space, ContextScope::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawContextScope* scope = reinterpret_cast<RawContextScope*>(d->Ref(id));
intptr_t length = d->ReadUnsigned();
Deserializer::InitializeHeader(scope, kContextScopeCid,
ContextScope::InstanceSize(length),
is_vm_object);
scope->ptr()->num_variables_ = length;
scope->ptr()->is_implicit_ = d->Read<bool>();
RawObject** from = scope->from();
RawObject** to = scope->to(length);
for (RawObject** p = from; p <= to; p++) {
*p = d->ReadRef();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class UnlinkedCallSerializationCluster : public SerializationCluster {
public:
UnlinkedCallSerializationCluster() : SerializationCluster("UnlinkedCall") {}
virtual ~UnlinkedCallSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawUnlinkedCall* unlinked = UnlinkedCall::RawCast(object);
objects_.Add(unlinked);
RawObject** from = unlinked->from();
RawObject** to = unlinked->to();
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kUnlinkedCallCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawUnlinkedCall* unlinked = objects_[i];
s->AssignRef(unlinked);
}
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawUnlinkedCall* unlinked = objects_[i];
RawObject** from = unlinked->from();
RawObject** to = unlinked->to();
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
}
}
private:
GrowableArray<RawUnlinkedCall*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class UnlinkedCallDeserializationCluster : public DeserializationCluster {
public:
UnlinkedCallDeserializationCluster() {}
virtual ~UnlinkedCallDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, UnlinkedCall::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawUnlinkedCall* unlinked =
reinterpret_cast<RawUnlinkedCall*>(d->Ref(id));
Deserializer::InitializeHeader(unlinked, kUnlinkedCallCid,
UnlinkedCall::InstanceSize(),
is_vm_object);
RawObject** from = unlinked->from();
RawObject** to = unlinked->to();
for (RawObject** p = from; p <= to; p++) {
*p = d->ReadRef();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ICDataSerializationCluster : public SerializationCluster {
public:
ICDataSerializationCluster() : SerializationCluster("ICData") {}
virtual ~ICDataSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
RawICData* ic = ICData::RawCast(object);
objects_.Add(ic);
RawObject** from = ic->from();
RawObject** to = ic->to_snapshot(s->kind());
for (RawObject** p = from; p <= to; p++) {
s->Push(*p);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kICDataCid);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
RawICData* ic = objects_[i];
s->AssignRef(ic);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
RawICData* ic = objects_[i];
RawObject** from = ic->from();
RawObject** to = ic->to_snapshot(kind);
for (RawObject** p = from; p <= to; p++) {
s->WriteRef(*p);
}
if (kind != Snapshot::kFullAOT) {
NOT_IN_PRECOMPILED(s->Write<int32_t>(ic->ptr()->deopt_id_));
}
s->Write<uint32_t>(ic->ptr()->state_bits_);
#if defined(TAG_IC_DATA)
s->Write<int32_t>(static_cast<int32_t>(ic->ptr()->tag_));
#endif
}
}
private:
GrowableArray<RawICData*> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ICDataDeserializationCluster : public DeserializationCluster {
public:
ICDataDeserializationCluster() {}
virtual ~ICDataDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, ICData::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d) {
Snapshot::Kind kind = d->kind();
bool is_vm_object = d->isolate() == Dart::vm_isolate();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawICData* ic = reinterpret_cast<RawICData*>(d->Ref(id));
Deserializer::InitializeHeader(ic, kICDataCid, ICData::InstanceSize(),
is_vm_object);
RawObject** from = ic->from();
RawObject** to_snapshot = ic->to_snapshot(kind);
RawObject** to = ic->to();
for (RawObject** p = from; p <= to_snapshot; p++) {
*p = d->ReadRef();
}
for (RawObject** p = to_snapshot + 1; p <= to; p++) {
*p = Object::null();
}