blob: b13f0ba1f96af9f54ad13311a37c3382f85f0a8c [file] [log] [blame]
// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include <memory>
#include "vm/clustered_snapshot.h"
#include "platform/assert.h"
#include "vm/bootstrap.h"
#include "vm/bss_relocs.h"
#include "vm/canonical_tables.h"
#include "vm/class_id.h"
#include "vm/code_observers.h"
#include "vm/compiler/api/print_filter.h"
#include "vm/compiler/assembler/disassembler.h"
#include "vm/dart.h"
#include "vm/dispatch_table.h"
#include "vm/flag_list.h"
#include "vm/growable_array.h"
#include "vm/heap/heap.h"
#include "vm/image_snapshot.h"
#include "vm/native_entry.h"
#include "vm/object.h"
#include "vm/object_store.h"
#include "vm/program_visitor.h"
#include "vm/stub_code.h"
#include "vm/symbols.h"
#include "vm/timeline.h"
#include "vm/version.h"
#include "vm/zone_text_buffer.h"
#if !defined(DART_PRECOMPILED_RUNTIME)
#include "vm/compiler/backend/code_statistics.h"
#include "vm/compiler/backend/il_printer.h"
#include "vm/compiler/relocation.h"
#endif // !defined(DART_PRECOMPILED_RUNTIME)
namespace dart {
#if !defined(DART_PRECOMPILED_RUNTIME)
DEFINE_FLAG(bool,
print_cluster_information,
false,
"Print information about clusters written to snapshot");
#endif
#if defined(DART_PRECOMPILER)
DEFINE_FLAG(charp,
write_v8_snapshot_profile_to,
NULL,
"Write a snapshot profile in V8 format to a file.");
#endif // defined(DART_PRECOMPILER)
#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
static void RelocateCodeObjects(
bool is_vm,
GrowableArray<CodePtr>* code_objects,
GrowableArray<ImageWriterCommand>* image_writer_commands) {
auto thread = Thread::Current();
auto isolate_group =
is_vm ? Dart::vm_isolate()->group() : thread->isolate_group();
WritableCodePages writable_code_pages(thread, isolate_group);
CodeRelocator::Relocate(thread, code_objects, image_writer_commands, is_vm);
}
class CodePtrKeyValueTrait {
public:
// Typedefs needed for the DirectChainedHashMap template.
typedef const CodePtr Key;
typedef const CodePtr Value;
typedef CodePtr Pair;
static Key KeyOf(Pair kv) { return kv; }
static Value ValueOf(Pair kv) { return kv; }
static inline intptr_t Hashcode(Key key) {
return static_cast<intptr_t>(key);
}
static inline bool IsKeyEqual(Pair pair, Key key) { return pair == key; }
};
typedef DirectChainedHashMap<CodePtrKeyValueTrait> RawCodeSet;
#endif // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
static ObjectPtr AllocateUninitialized(PageSpace* old_space, intptr_t size) {
ASSERT(Utils::IsAligned(size, kObjectAlignment));
uword address = old_space->TryAllocateDataBumpLocked(size);
if (address == 0) {
OUT_OF_MEMORY();
}
return UntaggedObject::FromAddr(address);
}
void Deserializer::InitializeHeader(ObjectPtr raw,
intptr_t class_id,
intptr_t size,
bool is_canonical) {
ASSERT(Utils::IsAligned(size, kObjectAlignment));
uword tags = 0;
tags = UntaggedObject::ClassIdTag::update(class_id, tags);
tags = UntaggedObject::SizeTag::update(size, tags);
tags = UntaggedObject::CanonicalBit::update(is_canonical, tags);
tags = UntaggedObject::OldBit::update(true, tags);
tags = UntaggedObject::OldAndNotMarkedBit::update(true, tags);
tags = UntaggedObject::OldAndNotRememberedBit::update(true, tags);
tags = UntaggedObject::NewBit::update(false, tags);
raw->untag()->tags_ = tags;
}
#if !defined(DART_PRECOMPILED_RUNTIME)
void SerializationCluster::WriteAndMeasureAlloc(Serializer* serializer) {
intptr_t start_size = serializer->bytes_written();
intptr_t start_data = serializer->GetDataSize();
intptr_t start_objects = serializer->next_ref_index();
WriteAlloc(serializer);
intptr_t stop_size = serializer->bytes_written();
intptr_t stop_data = serializer->GetDataSize();
intptr_t stop_objects = serializer->next_ref_index();
if (FLAG_print_cluster_information) {
OS::PrintErr("Snapshot 0x%" Pp " (%" Pd "), ", start_size,
stop_size - start_size);
OS::PrintErr("Data 0x%" Pp " (%" Pd "): ", start_data,
stop_data - start_data);
OS::PrintErr("Alloc %s (%" Pd ")\n", name(), stop_objects - start_objects);
}
size_ += (stop_size - start_size) + (stop_data - start_data);
num_objects_ += (stop_objects - start_objects);
target_memory_size_ += num_objects_ * target_instance_size_;
}
void SerializationCluster::WriteAndMeasureFill(Serializer* serializer) {
intptr_t start = serializer->bytes_written();
WriteFill(serializer);
intptr_t stop = serializer->bytes_written();
if (FLAG_print_cluster_information) {
OS::PrintErr("Snapshot 0x%" Pp " (%" Pd "): Fill %s\n", start, stop - start,
name());
}
size_ += (stop - start);
}
static UnboxedFieldBitmap CalculateTargetUnboxedFieldsBitmap(
Serializer* s,
intptr_t class_id) {
const auto unboxed_fields_bitmap_host =
s->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
UnboxedFieldBitmap unboxed_fields_bitmap;
if (unboxed_fields_bitmap_host.IsEmpty() ||
kWordSize == compiler::target::kWordSize) {
unboxed_fields_bitmap = unboxed_fields_bitmap_host;
} else {
ASSERT(kWordSize == 8 && compiler::target::kWordSize == 4);
// A new bitmap is built if the word sizes in the target and
// host are different
unboxed_fields_bitmap.Reset();
intptr_t target_i = 0, host_i = 0;
while (host_i < UnboxedFieldBitmap::Length()) {
// Each unboxed field has constant length, therefore the number of
// words used by it should double when compiling from 64-bit to 32-bit.
if (unboxed_fields_bitmap_host.Get(host_i++)) {
unboxed_fields_bitmap.Set(target_i++);
unboxed_fields_bitmap.Set(target_i++);
} else {
// For object pointers, the field is always one word length
target_i++;
}
}
}
return unboxed_fields_bitmap;
}
class ClassSerializationCluster : public SerializationCluster {
public:
explicit ClassSerializationCluster(intptr_t num_cids)
: SerializationCluster("Class", compiler::target::Class::InstanceSize()),
predefined_(kNumPredefinedCids),
objects_(num_cids) {}
~ClassSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
ClassPtr cls = Class::RawCast(object);
intptr_t class_id = cls->untag()->id_;
if (class_id == kIllegalCid) {
// Classes expected to be dropped by the precompiler should not be traced.
s->UnexpectedObject(cls, "Class with illegal cid");
}
if (class_id < kNumPredefinedCids) {
// These classes are allocated by Object::Init or Object::InitOnce, so the
// deserializer must find them in the class table instead of allocating
// them.
predefined_.Add(cls);
} else {
objects_.Add(cls);
}
PushFromTo(cls);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kClassCid);
intptr_t count = predefined_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
ClassPtr cls = predefined_[i];
s->AssignRef(cls);
AutoTraceObject(cls);
intptr_t class_id = cls->untag()->id_;
s->WriteCid(class_id);
}
count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
ClassPtr cls = objects_[i];
s->AssignRef(cls);
}
}
void WriteFill(Serializer* s) {
intptr_t count = predefined_.length();
for (intptr_t i = 0; i < count; i++) {
WriteClass(s, predefined_[i]);
}
count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
WriteClass(s, objects_[i]);
}
}
private:
void WriteClass(Serializer* s, ClassPtr cls) {
AutoTraceObjectName(cls, cls->untag()->name_);
WriteFromTo(cls);
intptr_t class_id = cls->untag()->id_;
if (class_id == kIllegalCid) {
s->UnexpectedObject(cls, "Class with illegal cid");
}
s->WriteCid(class_id);
if (s->kind() == Snapshot::kFullCore &&
RequireCanonicalTypeErasureOfConstants(cls)) {
s->UnexpectedObject(cls, "Class with non mode agnostic constants");
}
if (s->kind() != Snapshot::kFullAOT) {
s->Write<uint32_t>(cls->untag()->kernel_offset_);
}
s->Write<int32_t>(Class::target_instance_size_in_words(cls));
s->Write<int32_t>(Class::target_next_field_offset_in_words(cls));
s->Write<int32_t>(Class::target_type_arguments_field_offset_in_words(cls));
s->Write<int16_t>(cls->untag()->num_type_arguments_);
s->Write<uint16_t>(cls->untag()->num_native_fields_);
s->WriteTokenPosition(cls->untag()->token_pos_);
s->WriteTokenPosition(cls->untag()->end_token_pos_);
s->Write<uint32_t>(cls->untag()->state_bits_);
// In AOT, the bitmap of unboxed fields should also be serialized
if (FLAG_precompiled_mode && !ClassTable::IsTopLevelCid(class_id)) {
s->WriteUnsigned64(
CalculateTargetUnboxedFieldsBitmap(s, class_id).Value());
}
}
GrowableArray<ClassPtr> predefined_;
GrowableArray<ClassPtr> objects_;
bool RequireCanonicalTypeErasureOfConstants(ClassPtr cls) {
// Do not generate a core snapshot containing constants that would require
// a canonical erasure of their types if loaded in an isolate running in
// unsound nullability mode.
if (cls->untag()->host_type_arguments_field_offset_in_words_ ==
Class::kNoTypeArguments ||
cls->untag()->constants_ == Array::null()) {
return false;
}
Zone* zone = Thread::Current()->zone();
const Class& clazz = Class::Handle(zone, cls);
return clazz.RequireCanonicalTypeErasureOfConstants(zone);
}
};
#endif // !DART_PRECOMPILED_RUNTIME
class ClassDeserializationCluster : public DeserializationCluster {
public:
ClassDeserializationCluster() : DeserializationCluster("Class") {}
~ClassDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
predefined_start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
intptr_t count = d->ReadUnsigned();
ClassTable* table = d->isolate_group()->class_table();
for (intptr_t i = 0; i < count; i++) {
intptr_t class_id = d->ReadCid();
ASSERT(table->HasValidClassAt(class_id));
ClassPtr cls = table->At(class_id);
ASSERT(cls != nullptr);
d->AssignRef(cls);
}
predefined_stop_index_ = d->next_index();
start_index_ = d->next_index();
count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Class::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ClassTable* table = d->isolate_group()->class_table();
for (intptr_t id = predefined_start_index_; id < predefined_stop_index_;
id++) {
ClassPtr cls = static_cast<ClassPtr>(d->Ref(id));
ReadFromTo(cls);
intptr_t class_id = d->ReadCid();
cls->untag()->id_ = class_id;
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() != Snapshot::kFullAOT) {
cls->untag()->kernel_offset_ = d->Read<uint32_t>();
}
#endif
if (!IsInternalVMdefinedClassId(class_id)) {
cls->untag()->host_instance_size_in_words_ = d->Read<int32_t>();
cls->untag()->host_next_field_offset_in_words_ = d->Read<int32_t>();
#if !defined(DART_PRECOMPILED_RUNTIME)
// Only one pair is serialized. The target field only exists when
// DART_PRECOMPILED_RUNTIME is not defined
cls->untag()->target_instance_size_in_words_ =
cls->untag()->host_instance_size_in_words_;
cls->untag()->target_next_field_offset_in_words_ =
cls->untag()->host_next_field_offset_in_words_;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
d->Read<int32_t>(); // Skip.
d->Read<int32_t>(); // Skip.
}
cls->untag()->host_type_arguments_field_offset_in_words_ =
d->Read<int32_t>();
#if !defined(DART_PRECOMPILED_RUNTIME)
cls->untag()->target_type_arguments_field_offset_in_words_ =
cls->untag()->host_type_arguments_field_offset_in_words_;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
cls->untag()->num_type_arguments_ = d->Read<int16_t>();
cls->untag()->num_native_fields_ = d->Read<uint16_t>();
cls->untag()->token_pos_ = d->ReadTokenPosition();
cls->untag()->end_token_pos_ = d->ReadTokenPosition();
cls->untag()->state_bits_ = d->Read<uint32_t>();
if (FLAG_precompiled_mode) {
d->ReadUnsigned64(); // Skip unboxed fields bitmap.
}
}
auto shared_class_table = d->isolate_group()->shared_class_table();
for (intptr_t id = start_index_; id < stop_index_; id++) {
ClassPtr cls = static_cast<ClassPtr>(d->Ref(id));
Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize());
ReadFromTo(cls);
intptr_t class_id = d->ReadCid();
ASSERT(class_id >= kNumPredefinedCids);
cls->untag()->id_ = class_id;
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() != Snapshot::kFullAOT) {
cls->untag()->kernel_offset_ = d->Read<uint32_t>();
}
#endif
cls->untag()->host_instance_size_in_words_ = d->Read<int32_t>();
cls->untag()->host_next_field_offset_in_words_ = d->Read<int32_t>();
cls->untag()->host_type_arguments_field_offset_in_words_ =
d->Read<int32_t>();
#if !defined(DART_PRECOMPILED_RUNTIME)
cls->untag()->target_instance_size_in_words_ =
cls->untag()->host_instance_size_in_words_;
cls->untag()->target_next_field_offset_in_words_ =
cls->untag()->host_next_field_offset_in_words_;
cls->untag()->target_type_arguments_field_offset_in_words_ =
cls->untag()->host_type_arguments_field_offset_in_words_;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
cls->untag()->num_type_arguments_ = d->Read<int16_t>();
cls->untag()->num_native_fields_ = d->Read<uint16_t>();
cls->untag()->token_pos_ = d->ReadTokenPosition();
cls->untag()->end_token_pos_ = d->ReadTokenPosition();
cls->untag()->state_bits_ = d->Read<uint32_t>();
table->AllocateIndex(class_id);
table->SetAt(class_id, cls);
if (FLAG_precompiled_mode && !ClassTable::IsTopLevelCid(class_id)) {
const UnboxedFieldBitmap unboxed_fields_map(d->ReadUnsigned64());
shared_class_table->SetUnboxedFieldsMapAt(class_id, unboxed_fields_map);
}
}
}
private:
intptr_t predefined_start_index_;
intptr_t predefined_stop_index_;
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class TypeArgumentsSerializationCluster : public SerializationCluster {
public:
TypeArgumentsSerializationCluster() : SerializationCluster("TypeArguments") {}
~TypeArgumentsSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
TypeArgumentsPtr type_args = TypeArguments::RawCast(object);
objects_.Add(type_args);
s->Push(type_args->untag()->instantiations_);
const intptr_t length = Smi::Value(type_args->untag()->length_);
for (intptr_t i = 0; i < length; i++) {
s->Push(type_args->untag()->types()[i]);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kTypeArgumentsCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
TypeArgumentsPtr type_args = objects_[i];
s->AssignRef(type_args);
AutoTraceObject(type_args);
const intptr_t length = Smi::Value(type_args->untag()->length_);
s->WriteUnsigned(length);
target_memory_size_ +=
compiler::target::TypeArguments::InstanceSize(length);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
TypeArgumentsPtr type_args = objects_[i];
AutoTraceObject(type_args);
const intptr_t length = Smi::Value(type_args->untag()->length_);
s->WriteUnsigned(length);
intptr_t hash = Smi::Value(type_args->untag()->hash_);
s->Write<int32_t>(hash);
const intptr_t nullability = Smi::Value(type_args->untag()->nullability_);
s->WriteUnsigned(nullability);
WriteField(type_args, instantiations_);
for (intptr_t j = 0; j < length; j++) {
s->WriteElementRef(type_args->untag()->types()[j], j);
}
}
}
private:
GrowableArray<TypeArgumentsPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class TypeArgumentsDeserializationCluster : public DeserializationCluster {
public:
TypeArgumentsDeserializationCluster()
: DeserializationCluster("TypeArguments") {}
~TypeArgumentsDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
const intptr_t length = d->ReadUnsigned();
d->AssignRef(AllocateUninitialized(old_space,
TypeArguments::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
for (intptr_t id = start_index_; id < stop_index_; id++) {
TypeArgumentsPtr type_args = static_cast<TypeArgumentsPtr>(d->Ref(id));
const intptr_t length = d->ReadUnsigned();
Deserializer::InitializeHeader(type_args, kTypeArgumentsCid,
TypeArguments::InstanceSize(length),
stamp_canonical);
type_args->untag()->length_ = Smi::New(length);
type_args->untag()->hash_ = Smi::New(d->Read<int32_t>());
type_args->untag()->nullability_ = Smi::New(d->ReadUnsigned());
type_args->untag()->instantiations_ = static_cast<ArrayPtr>(d->ReadRef());
for (intptr_t j = 0; j < length; j++) {
type_args->untag()->types()[j] =
static_cast<AbstractTypePtr>(d->ReadRef());
}
}
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
if (canonicalize) {
Thread* thread = Thread::Current();
TypeArguments& type_arg = TypeArguments::Handle(d->zone());
for (intptr_t i = start_index_; i < stop_index_; i++) {
type_arg ^= refs.At(i);
type_arg = type_arg.Canonicalize(thread, nullptr);
refs.SetAt(i, type_arg);
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class PatchClassSerializationCluster : public SerializationCluster {
public:
PatchClassSerializationCluster()
: SerializationCluster("PatchClass",
compiler::target::PatchClass::InstanceSize()) {}
~PatchClassSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
PatchClassPtr cls = PatchClass::RawCast(object);
objects_.Add(cls);
PushFromTo(cls);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kPatchClassCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
PatchClassPtr cls = objects_[i];
s->AssignRef(cls);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
PatchClassPtr cls = objects_[i];
AutoTraceObject(cls);
WriteFromTo(cls);
if (s->kind() != Snapshot::kFullAOT) {
s->Write<int32_t>(cls->untag()->library_kernel_offset_);
}
}
}
private:
GrowableArray<PatchClassPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class PatchClassDeserializationCluster : public DeserializationCluster {
public:
PatchClassDeserializationCluster() : DeserializationCluster("PatchClass") {}
~PatchClassDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, PatchClass::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
PatchClassPtr cls = static_cast<PatchClassPtr>(d->Ref(id));
Deserializer::InitializeHeader(cls, kPatchClassCid,
PatchClass::InstanceSize());
ReadFromTo(cls);
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() != Snapshot::kFullAOT) {
cls->untag()->library_kernel_offset_ = d->Read<int32_t>();
}
#endif
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class FunctionSerializationCluster : public SerializationCluster {
public:
FunctionSerializationCluster()
: SerializationCluster("Function",
compiler::target::Function::InstanceSize()) {}
~FunctionSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
Snapshot::Kind kind = s->kind();
FunctionPtr func = Function::RawCast(object);
objects_.Add(func);
PushFromTo(func);
if (kind == Snapshot::kFullAOT) {
s->Push(func->untag()->code_);
} else if (kind == Snapshot::kFullJIT) {
NOT_IN_PRECOMPILED(s->Push(func->untag()->unoptimized_code_));
s->Push(func->untag()->code_);
s->Push(func->untag()->ic_data_array_);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kFunctionCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
FunctionPtr func = objects_[i];
s->AssignRef(func);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
FunctionPtr func = objects_[i];
AutoTraceObjectName(func, MakeDisambiguatedFunctionName(s, func));
WriteFromTo(func);
if (kind == Snapshot::kFullAOT) {
WriteField(func, code_);
} else if (s->kind() == Snapshot::kFullJIT) {
NOT_IN_PRECOMPILED(WriteField(func, unoptimized_code_));
WriteField(func, code_);
WriteField(func, ic_data_array_);
}
if (kind != Snapshot::kFullAOT) {
s->WriteTokenPosition(func->untag()->token_pos_);
s->WriteTokenPosition(func->untag()->end_token_pos_);
s->Write<uint32_t>(func->untag()->kernel_offset_);
}
s->Write<uint32_t>(func->untag()->packed_fields_);
s->Write<uint32_t>(func->untag()->kind_tag_);
}
}
static const char* MakeDisambiguatedFunctionName(Serializer* s,
FunctionPtr f) {
if (s->profile_writer() == nullptr) {
return nullptr;
}
REUSABLE_FUNCTION_HANDLESCOPE(s->thread());
Function& fun = reused_function_handle.Handle();
fun = f;
ZoneTextBuffer printer(s->thread()->zone());
fun.PrintName(NameFormattingParams::DisambiguatedUnqualified(
Object::NameVisibility::kInternalName),
&printer);
return printer.buffer();
}
private:
GrowableArray<FunctionPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class FunctionDeserializationCluster : public DeserializationCluster {
public:
FunctionDeserializationCluster() : DeserializationCluster("Function") {}
~FunctionDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Function::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
Snapshot::Kind kind = d->kind();
for (intptr_t id = start_index_; id < stop_index_; id++) {
FunctionPtr func = static_cast<FunctionPtr>(d->Ref(id));
Deserializer::InitializeHeader(func, kFunctionCid,
Function::InstanceSize());
ReadFromTo(func);
if (kind == Snapshot::kFullAOT) {
func->untag()->code_ = static_cast<CodePtr>(d->ReadRef());
} else if (kind == Snapshot::kFullJIT) {
NOT_IN_PRECOMPILED(func->untag()->unoptimized_code_ =
static_cast<CodePtr>(d->ReadRef()));
func->untag()->code_ = static_cast<CodePtr>(d->ReadRef());
func->untag()->ic_data_array_ = static_cast<ArrayPtr>(d->ReadRef());
}
#if defined(DEBUG)
func->untag()->entry_point_ = 0;
func->untag()->unchecked_entry_point_ = 0;
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
if (kind != Snapshot::kFullAOT) {
func->untag()->token_pos_ = d->ReadTokenPosition();
func->untag()->end_token_pos_ = d->ReadTokenPosition();
func->untag()->kernel_offset_ = d->Read<uint32_t>();
}
func->untag()->unboxed_parameters_info_.Reset();
#endif
func->untag()->packed_fields_ = d->Read<uint32_t>();
func->untag()->kind_tag_ = d->Read<uint32_t>();
if (kind == Snapshot::kFullAOT) {
// Omit fields used to support de/reoptimization.
} else {
#if !defined(DART_PRECOMPILED_RUNTIME)
func->untag()->usage_counter_ = 0;
func->untag()->optimized_instruction_count_ = 0;
func->untag()->optimized_call_site_count_ = 0;
func->untag()->deoptimization_counter_ = 0;
func->untag()->state_bits_ = 0;
func->untag()->inlining_depth_ = 0;
#endif
}
}
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
if (d->kind() == Snapshot::kFullAOT) {
Function& func = Function::Handle(d->zone());
for (intptr_t i = start_index_; i < stop_index_; i++) {
func ^= refs.At(i);
ASSERT(func.ptr()->untag()->code_->IsCode());
uword entry_point = func.ptr()->untag()->code_->untag()->entry_point_;
ASSERT(entry_point != 0);
func.ptr()->untag()->entry_point_ = entry_point;
uword unchecked_entry_point =
func.ptr()->untag()->code_->untag()->unchecked_entry_point_;
ASSERT(unchecked_entry_point != 0);
func.ptr()->untag()->unchecked_entry_point_ = unchecked_entry_point;
}
} else if (d->kind() == Snapshot::kFullJIT) {
Function& func = Function::Handle(d->zone());
Code& code = Code::Handle(d->zone());
for (intptr_t i = start_index_; i < stop_index_; i++) {
func ^= refs.At(i);
code = func.CurrentCode();
if (func.HasCode() && !code.IsDisabled()) {
func.SetInstructionsSafe(code); // Set entrypoint.
func.SetWasCompiled(true);
} else {
func.ClearCodeSafe(); // Set code and entrypoint to lazy compile stub
}
}
} else {
Function& func = Function::Handle(d->zone());
for (intptr_t i = start_index_; i < stop_index_; i++) {
func ^= refs.At(i);
func.ClearCodeSafe(); // Set code and entrypoint to lazy compile stub.
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ClosureDataSerializationCluster : public SerializationCluster {
public:
ClosureDataSerializationCluster()
: SerializationCluster("ClosureData",
compiler::target::ClosureData::InstanceSize()) {}
~ClosureDataSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
ClosureDataPtr data = ClosureData::RawCast(object);
objects_.Add(data);
if (s->kind() != Snapshot::kFullAOT) {
s->Push(data->untag()->context_scope_);
}
s->Push(data->untag()->parent_function_);
s->Push(data->untag()->closure_);
s->Push(data->untag()->default_type_arguments_);
s->Push(data->untag()->default_type_arguments_info_);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kClosureDataCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
ClosureDataPtr data = objects_[i];
s->AssignRef(data);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
ClosureDataPtr data = objects_[i];
AutoTraceObject(data);
if (s->kind() != Snapshot::kFullAOT) {
WriteField(data, context_scope_);
}
WriteField(data, parent_function_);
WriteField(data, closure_);
WriteField(data, default_type_arguments_);
WriteField(data, default_type_arguments_info_);
}
}
private:
GrowableArray<ClosureDataPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ClosureDataDeserializationCluster : public DeserializationCluster {
public:
ClosureDataDeserializationCluster() : DeserializationCluster("ClosureData") {}
~ClosureDataDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, ClosureData::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
ClosureDataPtr data = static_cast<ClosureDataPtr>(d->Ref(id));
Deserializer::InitializeHeader(data, kClosureDataCid,
ClosureData::InstanceSize());
if (d->kind() == Snapshot::kFullAOT) {
data->untag()->context_scope_ = ContextScope::null();
} else {
data->untag()->context_scope_ =
static_cast<ContextScopePtr>(d->ReadRef());
}
data->untag()->parent_function_ = static_cast<FunctionPtr>(d->ReadRef());
data->untag()->closure_ = static_cast<InstancePtr>(d->ReadRef());
data->untag()->default_type_arguments_ =
static_cast<TypeArgumentsPtr>(d->ReadRef());
data->untag()->default_type_arguments_info_ =
static_cast<SmiPtr>(d->ReadRef());
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class FfiTrampolineDataSerializationCluster : public SerializationCluster {
public:
FfiTrampolineDataSerializationCluster()
: SerializationCluster(
"FfiTrampolineData",
compiler::target::FfiTrampolineData::InstanceSize()) {}
~FfiTrampolineDataSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
FfiTrampolineDataPtr data = FfiTrampolineData::RawCast(object);
objects_.Add(data);
PushFromTo(data);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kFfiTrampolineDataCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
s->AssignRef(objects_[i]);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
FfiTrampolineDataPtr const data = objects_[i];
AutoTraceObject(data);
WriteFromTo(data);
if (s->kind() == Snapshot::kFullAOT) {
s->WriteUnsigned(data->untag()->callback_id_);
} else {
// FFI callbacks can only be written to AOT snapshots.
ASSERT(data->untag()->callback_target_ == Object::null());
}
}
}
private:
GrowableArray<FfiTrampolineDataPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class FfiTrampolineDataDeserializationCluster : public DeserializationCluster {
public:
FfiTrampolineDataDeserializationCluster()
: DeserializationCluster("FfiTrampolineData") {}
~FfiTrampolineDataDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, FfiTrampolineData::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
FfiTrampolineDataPtr data = static_cast<FfiTrampolineDataPtr>(d->Ref(id));
Deserializer::InitializeHeader(data, kFfiTrampolineDataCid,
FfiTrampolineData::InstanceSize());
ReadFromTo(data);
data->untag()->callback_id_ =
d->kind() == Snapshot::kFullAOT ? d->ReadUnsigned() : 0;
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class FieldSerializationCluster : public SerializationCluster {
public:
FieldSerializationCluster()
: SerializationCluster("Field", compiler::target::Field::InstanceSize()) {
}
~FieldSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
FieldPtr field = Field::RawCast(object);
objects_.Add(field);
Snapshot::Kind kind = s->kind();
s->Push(field->untag()->name_);
s->Push(field->untag()->owner_);
s->Push(field->untag()->type_);
// Write out the initializer function
s->Push(field->untag()->initializer_function_);
if (kind != Snapshot::kFullAOT) {
s->Push(field->untag()->guarded_list_length_);
}
if (kind == Snapshot::kFullJIT) {
s->Push(field->untag()->dependent_code_);
}
// Write out either the initial static value or field offset.
if (Field::StaticBit::decode(field->untag()->kind_bits_)) {
const intptr_t field_id =
Smi::Value(field->untag()->host_offset_or_field_id_);
s->Push(s->initial_field_table()->At(field_id));
} else {
s->Push(Smi::New(Field::TargetOffsetOf(field)));
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kFieldCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
FieldPtr field = objects_[i];
s->AssignRef(field);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
FieldPtr field = objects_[i];
AutoTraceObjectName(field, field->untag()->name_);
WriteField(field, name_);
WriteField(field, owner_);
WriteField(field, type_);
// Write out the initializer function and initial value if not in AOT.
WriteField(field, initializer_function_);
if (kind != Snapshot::kFullAOT) {
WriteField(field, guarded_list_length_);
}
if (kind == Snapshot::kFullJIT) {
WriteField(field, dependent_code_);
}
if (kind != Snapshot::kFullAOT) {
s->WriteTokenPosition(field->untag()->token_pos_);
s->WriteTokenPosition(field->untag()->end_token_pos_);
s->WriteCid(field->untag()->guarded_cid_);
s->WriteCid(field->untag()->is_nullable_);
s->Write<int8_t>(field->untag()->static_type_exactness_state_);
s->Write<uint32_t>(field->untag()->kernel_offset_);
}
s->Write<uint16_t>(field->untag()->kind_bits_);
// Write out either the initial static value or field offset.
if (Field::StaticBit::decode(field->untag()->kind_bits_)) {
const intptr_t field_id =
Smi::Value(field->untag()->host_offset_or_field_id_);
WriteFieldValue("static value", s->initial_field_table()->At(field_id));
s->WriteUnsigned(field_id);
} else {
WriteFieldValue("offset", Smi::New(Field::TargetOffsetOf(field)));
}
}
}
private:
GrowableArray<FieldPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class FieldDeserializationCluster : public DeserializationCluster {
public:
FieldDeserializationCluster() : DeserializationCluster("Field") {}
~FieldDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Field::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
Snapshot::Kind kind = d->kind();
for (intptr_t id = start_index_; id < stop_index_; id++) {
FieldPtr field = static_cast<FieldPtr>(d->Ref(id));
Deserializer::InitializeHeader(field, kFieldCid, Field::InstanceSize());
ReadFromTo(field);
if (kind != Snapshot::kFullAOT) {
field->untag()->guarded_list_length_ =
static_cast<SmiPtr>(d->ReadRef());
}
if (kind == Snapshot::kFullJIT) {
field->untag()->dependent_code_ = static_cast<ArrayPtr>(d->ReadRef());
}
if (kind != Snapshot::kFullAOT) {
field->untag()->token_pos_ = d->ReadTokenPosition();
field->untag()->end_token_pos_ = d->ReadTokenPosition();
field->untag()->guarded_cid_ = d->ReadCid();
field->untag()->is_nullable_ = d->ReadCid();
const int8_t static_type_exactness_state = d->Read<int8_t>();
#if defined(TARGET_ARCH_X64)
field->untag()->static_type_exactness_state_ =
static_type_exactness_state;
#else
// We might produce core snapshots using X64 VM and then consume
// them in IA32 or ARM VM. In which case we need to simply ignore
// static type exactness state written into snapshot because non-X64
// builds don't have this feature enabled.
// TODO(dartbug.com/34170) Support other architectures.
USE(static_type_exactness_state);
field->untag()->static_type_exactness_state_ =
StaticTypeExactnessState::NotTracking().Encode();
#endif // defined(TARGET_ARCH_X64)
#if !defined(DART_PRECOMPILED_RUNTIME)
field->untag()->kernel_offset_ = d->Read<uint32_t>();
#endif
}
field->untag()->kind_bits_ = d->Read<uint16_t>();
ObjectPtr value_or_offset = d->ReadRef();
if (Field::StaticBit::decode(field->untag()->kind_bits_)) {
const intptr_t field_id = d->ReadUnsigned();
d->initial_field_table()->SetAt(
field_id, static_cast<InstancePtr>(value_or_offset));
field->untag()->host_offset_or_field_id_ = Smi::New(field_id);
} else {
field->untag()->host_offset_or_field_id_ =
Smi::RawCast(value_or_offset);
#if !defined(DART_PRECOMPILED_RUNTIME)
field->untag()->target_offset_ =
Smi::Value(field->untag()->host_offset_or_field_id_);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
}
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
Field& field = Field::Handle(d->zone());
if (!IsolateGroup::Current()->use_field_guards()) {
for (intptr_t i = start_index_; i < stop_index_; i++) {
field ^= refs.At(i);
field.set_guarded_cid_unsafe(kDynamicCid);
field.set_is_nullable_unsafe(true);
field.set_guarded_list_length_unsafe(Field::kNoFixedLength);
field.set_guarded_list_length_in_object_offset_unsafe(
Field::kUnknownLengthOffset);
field.set_static_type_exactness_state(
StaticTypeExactnessState::NotTracking());
}
} else {
for (intptr_t i = start_index_; i < stop_index_; i++) {
field ^= refs.At(i);
field.InitializeGuardedListLengthInObjectOffset(/*unsafe=*/true);
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ScriptSerializationCluster : public SerializationCluster {
public:
ScriptSerializationCluster()
: SerializationCluster("Script",
compiler::target::Script::InstanceSize()) {}
~ScriptSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
ScriptPtr script = Script::RawCast(object);
objects_.Add(script);
PushFromTo(script);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kScriptCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
ScriptPtr script = objects_[i];
s->AssignRef(script);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
ScriptPtr script = objects_[i];
AutoTraceObjectName(script, script->untag()->url_);
WriteFromTo(script);
s->Write<int32_t>(script->untag()->line_offset_);
s->Write<int32_t>(script->untag()->col_offset_);
if (s->kind() != Snapshot::kFullAOT) {
// Clear out the max position cache in snapshots to ensure no
// differences in the snapshot due to triggering caching vs. not.
int32_t written_flags =
UntaggedScript::CachedMaxPositionBitField::update(
0, script->untag()->flags_and_max_position_);
written_flags = UntaggedScript::HasCachedMaxPositionBit::update(
false, written_flags);
s->Write<int32_t>(written_flags);
}
s->Write<int32_t>(script->untag()->kernel_script_index_);
}
}
private:
GrowableArray<ScriptPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ScriptDeserializationCluster : public DeserializationCluster {
public:
ScriptDeserializationCluster() : DeserializationCluster("Script") {}
~ScriptDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Script::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
ScriptPtr script = static_cast<ScriptPtr>(d->Ref(id));
Deserializer::InitializeHeader(script, kScriptCid,
Script::InstanceSize());
ReadFromTo(script);
script->untag()->line_offset_ = d->Read<int32_t>();
script->untag()->col_offset_ = d->Read<int32_t>();
#if !defined(DART_PRECOMPILED_RUNTIME)
script->untag()->flags_and_max_position_ = d->Read<int32_t>();
#endif
script->untag()->kernel_script_index_ = d->Read<int32_t>();
script->untag()->load_timestamp_ = 0;
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class LibrarySerializationCluster : public SerializationCluster {
public:
LibrarySerializationCluster()
: SerializationCluster("Library",
compiler::target::Library::InstanceSize()) {}
~LibrarySerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
LibraryPtr lib = Library::RawCast(object);
objects_.Add(lib);
PushFromTo(lib);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kLibraryCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
LibraryPtr lib = objects_[i];
s->AssignRef(lib);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
LibraryPtr lib = objects_[i];
AutoTraceObjectName(lib, lib->untag()->url_);
WriteFromTo(lib);
s->Write<int32_t>(lib->untag()->index_);
s->Write<uint16_t>(lib->untag()->num_imports_);
s->Write<int8_t>(lib->untag()->load_state_);
s->Write<uint8_t>(lib->untag()->flags_);
if (s->kind() != Snapshot::kFullAOT) {
s->Write<uint32_t>(lib->untag()->kernel_offset_);
}
}
}
private:
GrowableArray<LibraryPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class LibraryDeserializationCluster : public DeserializationCluster {
public:
LibraryDeserializationCluster() : DeserializationCluster("Library") {}
~LibraryDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Library::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
LibraryPtr lib = static_cast<LibraryPtr>(d->Ref(id));
Deserializer::InitializeHeader(lib, kLibraryCid, Library::InstanceSize());
ReadFromTo(lib);
lib->untag()->native_entry_resolver_ = NULL;
lib->untag()->native_entry_symbol_resolver_ = NULL;
lib->untag()->index_ = d->Read<int32_t>();
lib->untag()->num_imports_ = d->Read<uint16_t>();
lib->untag()->load_state_ = d->Read<int8_t>();
lib->untag()->flags_ =
UntaggedLibrary::InFullSnapshotBit::update(true, d->Read<uint8_t>());
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() != Snapshot::kFullAOT) {
lib->untag()->kernel_offset_ = d->Read<uint32_t>();
}
#endif
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class NamespaceSerializationCluster : public SerializationCluster {
public:
NamespaceSerializationCluster()
: SerializationCluster("Namespace",
compiler::target::Namespace::InstanceSize()) {}
~NamespaceSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
NamespacePtr ns = Namespace::RawCast(object);
objects_.Add(ns);
PushFromTo(ns);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kNamespaceCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
NamespacePtr ns = objects_[i];
s->AssignRef(ns);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
NamespacePtr ns = objects_[i];
AutoTraceObject(ns);
WriteFromTo(ns);
}
}
private:
GrowableArray<NamespacePtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class NamespaceDeserializationCluster : public DeserializationCluster {
public:
NamespaceDeserializationCluster() : DeserializationCluster("Namespace") {}
~NamespaceDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(AllocateUninitialized(old_space, Namespace::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
NamespacePtr ns = static_cast<NamespacePtr>(d->Ref(id));
Deserializer::InitializeHeader(ns, kNamespaceCid,
Namespace::InstanceSize());
ReadFromTo(ns);
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
// KernelProgramInfo objects are not written into a full AOT snapshot.
class KernelProgramInfoSerializationCluster : public SerializationCluster {
public:
KernelProgramInfoSerializationCluster()
: SerializationCluster(
"KernelProgramInfo",
compiler::target::KernelProgramInfo::InstanceSize()) {}
~KernelProgramInfoSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
KernelProgramInfoPtr info = KernelProgramInfo::RawCast(object);
objects_.Add(info);
PushFromTo(info);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kKernelProgramInfoCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
KernelProgramInfoPtr info = objects_[i];
s->AssignRef(info);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
KernelProgramInfoPtr info = objects_[i];
AutoTraceObject(info);
WriteFromTo(info);
s->Write<uint32_t>(info->untag()->kernel_binary_version_);
}
}
private:
GrowableArray<KernelProgramInfoPtr> objects_;
};
// Since KernelProgramInfo objects are not written into full AOT snapshots,
// one will never need to read them from a full AOT snapshot.
class KernelProgramInfoDeserializationCluster : public DeserializationCluster {
public:
KernelProgramInfoDeserializationCluster()
: DeserializationCluster("KernelProgramInfo") {}
~KernelProgramInfoDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(
AllocateUninitialized(old_space, KernelProgramInfo::InstanceSize()));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
KernelProgramInfoPtr info = static_cast<KernelProgramInfoPtr>(d->Ref(id));
Deserializer::InitializeHeader(info, kKernelProgramInfoCid,
KernelProgramInfo::InstanceSize());
ReadFromTo(info);
info->untag()->kernel_binary_version_ = d->Read<uint32_t>();
}
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
Array& array = Array::Handle(d->zone());
KernelProgramInfo& info = KernelProgramInfo::Handle(d->zone());
for (intptr_t id = start_index_; id < stop_index_; id++) {
info ^= refs.At(id);
array = HashTables::New<UnorderedHashMap<SmiTraits>>(16, Heap::kOld);
info.set_libraries_cache(array);
array = HashTables::New<UnorderedHashMap<SmiTraits>>(16, Heap::kOld);
info.set_classes_cache(array);
}
}
};
class CodeSerializationCluster : public SerializationCluster {
public:
explicit CodeSerializationCluster(Heap* heap)
: SerializationCluster("Code", compiler::target::Code::InstanceSize()),
array_(Array::Handle()) {}
~CodeSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
CodePtr code = Code::RawCast(object);
if (s->InCurrentLoadingUnit(code, /*record*/ true)) {
objects_.Add(code);
}
if (s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions) {
ObjectPoolPtr pool = code->untag()->object_pool_;
if ((pool != ObjectPool::null()) && s->InCurrentLoadingUnit(code)) {
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t i = 0; i < length; i++) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
if (entry_type == ObjectPool::EntryType::kTaggedObject) {
s->Push(pool->untag()->data()[i].raw_obj_);
}
}
}
} else {
if (s->InCurrentLoadingUnit(code->untag()->object_pool_)) {
s->Push(code->untag()->object_pool_);
}
}
s->Push(code->untag()->owner_);
s->Push(code->untag()->exception_handlers_);
s->Push(code->untag()->pc_descriptors_);
s->Push(code->untag()->catch_entry_);
if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
s->Push(code->untag()->compressed_stackmaps_);
}
if (!FLAG_precompiled_mode || !FLAG_dwarf_stack_traces_mode) {
s->Push(code->untag()->inlined_id_to_function_);
if (s->InCurrentLoadingUnit(code->untag()->code_source_map_)) {
s->Push(code->untag()->code_source_map_);
}
}
if (s->kind() == Snapshot::kFullJIT) {
s->Push(code->untag()->deopt_info_array_);
s->Push(code->untag()->static_calls_target_table_);
} else if (s->kind() == Snapshot::kFullAOT) {
#if defined(DART_PRECOMPILER)
auto const calls_array = code->untag()->static_calls_target_table_;
if (calls_array != Array::null()) {
// Some Code entries in the static calls target table may only be
// accessible via here, so push the Code objects.
array_ = calls_array;
for (auto entry : StaticCallsTable(array_)) {
auto kind = Code::KindField::decode(
Smi::Value(entry.Get<Code::kSCallTableKindAndOffset>()));
switch (kind) {
case Code::kCallViaCode:
// Code object in the pool.
continue;
case Code::kPcRelativeTTSCall:
// TTS will be reachable through type object which itself is
// in the pool.
continue;
case Code::kPcRelativeCall:
case Code::kPcRelativeTailCall:
auto destination = entry.Get<Code::kSCallTableCodeOrTypeTarget>();
ASSERT(destination->IsHeapObject() && destination->IsCode());
s->Push(destination);
}
}
}
#else
UNREACHABLE();
#endif
}
#if !defined(PRODUCT)
s->Push(code->untag()->return_address_metadata_);
if (FLAG_code_comments) {
s->Push(code->untag()->comments_);
}
#endif
}
struct CodeOrderInfo {
CodePtr code;
intptr_t order;
intptr_t original_index;
};
// We sort code objects in such a way that code objects with the same
// instructions are grouped together. To make sorting more stable between
// similar programs we also sort them further by their original indices -
// this helps to stabilize output of --print-instructions-sizes-to which uses
// the name of the first code object (among those pointing to the same
// instruction objects).
static int CompareCodeOrderInfo(CodeOrderInfo const* a,
CodeOrderInfo const* b) {
if (a->order < b->order) return -1;
if (a->order > b->order) return 1;
if (a->original_index < b->original_index) return -1;
if (a->original_index > b->original_index) return 1;
return 0;
}
static void Insert(GrowableArray<CodeOrderInfo>* order_list,
IntMap<intptr_t>* order_map,
CodePtr code,
intptr_t original_index) {
InstructionsPtr instr = code->untag()->instructions_;
intptr_t key = static_cast<intptr_t>(instr);
intptr_t order;
if (order_map->HasKey(key)) {
order = order_map->Lookup(key);
} else {
order = order_list->length() + 1;
order_map->Insert(key, order);
}
CodeOrderInfo info;
info.code = code;
info.order = order;
info.original_index = original_index;
order_list->Add(info);
}
static void Sort(GrowableArray<CodePtr>* codes) {
GrowableArray<CodeOrderInfo> order_list;
IntMap<intptr_t> order_map;
for (intptr_t i = 0; i < codes->length(); i++) {
Insert(&order_list, &order_map, (*codes)[i], i);
}
order_list.Sort(CompareCodeOrderInfo);
ASSERT(order_list.length() == codes->length());
for (intptr_t i = 0; i < order_list.length(); i++) {
(*codes)[i] = order_list[i].code;
}
}
static void Sort(GrowableArray<Code*>* codes) {
GrowableArray<CodeOrderInfo> order_list;
IntMap<intptr_t> order_map;
for (intptr_t i = 0; i < codes->length(); i++) {
Insert(&order_list, &order_map, (*codes)[i]->ptr(), i);
}
order_list.Sort(CompareCodeOrderInfo);
ASSERT(order_list.length() == codes->length());
for (intptr_t i = 0; i < order_list.length(); i++) {
*(*codes)[i] = order_list[i].code;
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kCodeCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
CodePtr code = objects_[i];
s->AssignRef(code);
}
const intptr_t deferred_count = deferred_objects_.length();
s->WriteUnsigned(deferred_count);
for (intptr_t i = 0; i < deferred_count; i++) {
CodePtr code = deferred_objects_[i];
s->AssignRef(code);
}
}
void WriteFill(Serializer* s) {
Snapshot::Kind kind = s->kind();
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
CodePtr code = objects_[i];
WriteFill(s, kind, code, false);
}
const intptr_t deferred_count = deferred_objects_.length();
for (intptr_t i = 0; i < deferred_count; i++) {
CodePtr code = deferred_objects_[i];
WriteFill(s, kind, code, true);
}
}
void WriteFill(Serializer* s,
Snapshot::Kind kind,
CodePtr code,
bool deferred) {
AutoTraceObjectName(code, MakeDisambiguatedCodeName(s, code));
intptr_t pointer_offsets_length =
Code::PtrOffBits::decode(code->untag()->state_bits_);
if (pointer_offsets_length != 0) {
FATAL("Cannot serialize code with embedded pointers");
}
if (kind == Snapshot::kFullAOT && Code::IsDisabled(code)) {
// Disabled code is fatal in AOT since we cannot recompile.
s->UnexpectedObject(code, "Disabled code");
}
s->WriteInstructions(code->untag()->instructions_,
code->untag()->unchecked_offset_, code, deferred);
if (kind == Snapshot::kFullJIT) {
// TODO(rmacnak): Fix references to disabled code before serializing.
// For now, we may write the FixCallersTarget or equivalent stub. This
// will cause a fixup if this code is called.
const uint32_t active_unchecked_offset =
code->untag()->unchecked_entry_point_ - code->untag()->entry_point_;
s->WriteInstructions(code->untag()->active_instructions_,
active_unchecked_offset, code, deferred);
}
if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
WriteField(code, compressed_stackmaps_);
} else {
WriteFieldValue(compressed_stackmaps_, CompressedStackMaps::null());
}
s->Write<int32_t>(code->untag()->state_bits_);
#if defined(DART_PRECOMPILER)
if (FLAG_write_v8_snapshot_profile_to != nullptr) {
// If we are writing V8 snapshot profile then attribute references going
// through the object pool and static calls to the code object itself.
if (kind == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
code->untag()->object_pool_ != ObjectPool::null()) {
ObjectPoolPtr pool = code->untag()->object_pool_;
for (intptr_t i = 0; i < pool->untag()->length_; i++) {
uint8_t bits = pool->untag()->entry_bits()[i];
if (ObjectPool::TypeBits::decode(bits) ==
ObjectPool::EntryType::kTaggedObject) {
s->AttributeElementRef(pool->untag()->data()[i].raw_obj_, i);
}
}
}
if (code->untag()->static_calls_target_table_ != Array::null()) {
array_ = code->untag()->static_calls_target_table_;
intptr_t index = code->untag()->object_pool_ != ObjectPool::null()
? code->untag()->object_pool_->untag()->length_
: 0;
for (auto entry : StaticCallsTable(array_)) {
auto kind = Code::KindField::decode(
Smi::Value(entry.Get<Code::kSCallTableKindAndOffset>()));
switch (kind) {
case Code::kCallViaCode:
// Code object in the pool.
continue;
case Code::kPcRelativeTTSCall:
// TTS will be reachable through type object which itself is
// in the pool.
continue;
case Code::kPcRelativeCall:
case Code::kPcRelativeTailCall:
auto destination = entry.Get<Code::kSCallTableCodeOrTypeTarget>();
ASSERT(destination->IsHeapObject() && destination->IsCode());
s->AttributeElementRef(destination, index++);
}
}
}
}
#endif // defined(DART_PRECOMPILER)
if (Code::IsDiscarded(code)) {
// Only write instructions, compressed stackmaps and state bits
// for the discarded Code objects.
ASSERT(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
FLAG_dwarf_stack_traces_mode && !FLAG_retain_code_objects);
return;
}
// No need to write object pool out if we are producing full AOT
// snapshot with bare instructions.
if (!(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
if (s->InCurrentLoadingUnit(code->untag()->object_pool_)) {
WriteField(code, object_pool_);
} else {
WriteFieldValue(object_pool_, ObjectPool::null());
}
}
WriteField(code, owner_);
WriteField(code, exception_handlers_);
WriteField(code, pc_descriptors_);
WriteField(code, catch_entry_);
if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) {
WriteFieldValue(inlined_id_to_function_, Array::null());
WriteFieldValue(code_source_map_, CodeSourceMap::null());
} else {
WriteField(code, inlined_id_to_function_);
if (s->InCurrentLoadingUnit(code->untag()->code_source_map_)) {
WriteField(code, code_source_map_);
} else {
WriteFieldValue(code_source_map_, CodeSourceMap::null());
}
}
if (kind == Snapshot::kFullJIT) {
WriteField(code, deopt_info_array_);
WriteField(code, static_calls_target_table_);
}
#if !defined(PRODUCT)
WriteField(code, return_address_metadata_);
if (FLAG_code_comments) {
WriteField(code, comments_);
}
#endif
}
GrowableArray<CodePtr>* objects() { return &objects_; }
GrowableArray<CodePtr>* deferred_objects() { return &deferred_objects_; }
// Some code objects would have their owners dropped from the snapshot,
// which makes it is impossible to recover program structure when
// analysing snapshot profile. To facilitate analysis of snapshot profiles
// we include artificial nodes into profile representing such dropped
// owners.
void WriteDroppedOwnersIntoProfile(Serializer* s) {
ASSERT(s->profile_writer() != nullptr);
for (auto code : objects_) {
ObjectPtr owner =
WeakSerializationReference::Unwrap(code->untag()->owner_);
if (s->CreateArtificalNodeIfNeeded(owner) || Code::IsDiscarded(code)) {
AutoTraceObject(code);
s->AttributePropertyRef(owner, ":owner_",
/*permit_artificial_ref=*/true);
}
}
}
private:
static const char* MakeDisambiguatedCodeName(Serializer* s, CodePtr c) {
if (s->profile_writer() == nullptr) {
return nullptr;
}
REUSABLE_CODE_HANDLESCOPE(s->thread());
Code& code = reused_code_handle.Handle();
code = c;
return code.QualifiedName(
NameFormattingParams::DisambiguatedWithoutClassName(
Object::NameVisibility::kInternalName));
}
GrowableArray<CodePtr> objects_;
GrowableArray<CodePtr> deferred_objects_;
Array& array_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class CodeDeserializationCluster : public DeserializationCluster {
public:
CodeDeserializationCluster() : DeserializationCluster("Code") {}
~CodeDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
PageSpace* old_space = d->heap()->old_space();
start_index_ = d->next_index();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
auto code = AllocateUninitialized(old_space, Code::InstanceSize(0));
d->AssignRef(code);
}
stop_index_ = d->next_index();
deferred_start_index_ = d->next_index();
const intptr_t deferred_count = d->ReadUnsigned();
for (intptr_t i = 0; i < deferred_count; i++) {
auto code = AllocateUninitialized(old_space, Code::InstanceSize(0));
d->AssignRef(code);
}
deferred_stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
ReadFill(d, id, false);
}
for (intptr_t id = deferred_start_index_; id < deferred_stop_index_; id++) {
ReadFill(d, id, true);
}
}
void ReadFill(Deserializer* d, intptr_t id, bool deferred) {
auto const code = static_cast<CodePtr>(d->Ref(id));
Deserializer::InitializeHeader(code, kCodeCid, Code::InstanceSize(0));
d->ReadInstructions(code, deferred);
code->untag()->compressed_stackmaps_ =
static_cast<CompressedStackMapsPtr>(d->ReadRef());
code->untag()->state_bits_ = d->Read<int32_t>();
#if defined(DART_PRECOMPILED_RUNTIME)
if (Code::IsDiscarded(code)) {
code->untag()->owner_ = Smi::New(kFunctionCid);
return;
}
#else
ASSERT(!Code::IsDiscarded(code));
#endif // defined(DART_PRECOMPILED_RUNTIME)
// There would be a single global pool if this is a full AOT snapshot
// with bare instructions.
if (!(d->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
code->untag()->object_pool_ = static_cast<ObjectPoolPtr>(d->ReadRef());
} else {
code->untag()->object_pool_ = ObjectPool::null();
}
code->untag()->owner_ = d->ReadRef();
code->untag()->exception_handlers_ =
static_cast<ExceptionHandlersPtr>(d->ReadRef());
code->untag()->pc_descriptors_ =
static_cast<PcDescriptorsPtr>(d->ReadRef());
code->untag()->catch_entry_ = d->ReadRef();
code->untag()->inlined_id_to_function_ =
static_cast<ArrayPtr>(d->ReadRef());
code->untag()->code_source_map_ =
static_cast<CodeSourceMapPtr>(d->ReadRef());
#if !defined(DART_PRECOMPILED_RUNTIME)
if (d->kind() == Snapshot::kFullJIT) {
code->untag()->deopt_info_array_ = static_cast<ArrayPtr>(d->ReadRef());
code->untag()->static_calls_target_table_ =
static_cast<ArrayPtr>(d->ReadRef());
}
#endif // !DART_PRECOMPILED_RUNTIME
#if !defined(PRODUCT)
code->untag()->return_address_metadata_ = d->ReadRef();
code->untag()->var_descriptors_ = LocalVarDescriptors::null();
code->untag()->comments_ = FLAG_code_comments
? static_cast<ArrayPtr>(d->ReadRef())
: Array::null();
code->untag()->compile_timestamp_ = 0;
#endif
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
d->EndInstructions(refs, start_index_, stop_index_);
#if !defined(PRODUCT)
if (!CodeObservers::AreActive() && !FLAG_support_disassembler) return;
#endif
Code& code = Code::Handle(d->zone());
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
Object& owner = Object::Handle(d->zone());
#endif
for (intptr_t id = start_index_; id < stop_index_; id++) {
code ^= refs.At(id);
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT)
if (CodeObservers::AreActive()) {
Code::NotifyCodeObservers(code, code.is_optimized());
}
#endif
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
owner = code.owner();
if (owner.IsFunction()) {
if ((FLAG_disassemble ||
(code.is_optimized() && FLAG_disassemble_optimized)) &&
compiler::PrintFilter::ShouldPrint(Function::Cast(owner))) {
Disassembler::DisassembleCode(Function::Cast(owner), code,
code.is_optimized());
}
} else if (FLAG_disassemble_stubs) {
Disassembler::DisassembleStub(code.Name(), code);
}
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
}
}
private:
intptr_t deferred_start_index_;
intptr_t deferred_stop_index_;
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ObjectPoolSerializationCluster : public SerializationCluster {
public:
ObjectPoolSerializationCluster() : SerializationCluster("ObjectPool") {}
~ObjectPoolSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
ObjectPoolPtr pool = ObjectPool::RawCast(object);
objects_.Add(pool);
if (s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions) {
// Treat pool as weak.
} else {
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t i = 0; i < length; i++) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
if (entry_type == ObjectPool::EntryType::kTaggedObject) {
s->Push(pool->untag()->data()[i].raw_obj_);
}
}
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kObjectPoolCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
ObjectPoolPtr pool = objects_[i];
s->AssignRef(pool);
AutoTraceObject(pool);
const intptr_t length = pool->untag()->length_;
s->WriteUnsigned(length);
target_memory_size_ += compiler::target::ObjectPool::InstanceSize(length);
}
}
void WriteFill(Serializer* s) {
bool weak = s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions;
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
ObjectPoolPtr pool = objects_[i];
AutoTraceObject(pool);
const intptr_t length = pool->untag()->length_;
s->WriteUnsigned(length);
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t j = 0; j < length; j++) {
s->Write<uint8_t>(entry_bits[j]);
UntaggedObjectPool::Entry& entry = pool->untag()->data()[j];
switch (ObjectPool::TypeBits::decode(entry_bits[j])) {
case ObjectPool::EntryType::kTaggedObject: {
if ((entry.raw_obj_ == StubCode::CallNoScopeNative().ptr()) ||
(entry.raw_obj_ == StubCode::CallAutoScopeNative().ptr())) {
// Natives can run while precompiling, becoming linked and
// switching their stub. Reset to the initial stub used for
// lazy-linking.
s->WriteElementRef(StubCode::CallBootstrapNative().ptr(), j);
break;
}
if (weak && !s->HasRef(entry.raw_obj_)) {
// Any value will do, but null has the shortest id.
s->WriteElementRef(Object::null(), j);
} else {
s->WriteElementRef(entry.raw_obj_, j);
}
break;
}
case ObjectPool::EntryType::kImmediate: {
s->Write<intptr_t>(entry.raw_value_);
break;
}
case ObjectPool::EntryType::kNativeFunction:
case ObjectPool::EntryType::kNativeFunctionWrapper: {
// Write nothing. Will initialize with the lazy link entry.
break;
}
default:
UNREACHABLE();
}
}
}
}
private:
GrowableArray<ObjectPoolPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ObjectPoolDeserializationCluster : public DeserializationCluster {
public:
ObjectPoolDeserializationCluster() : DeserializationCluster("ObjectPool") {}
~ObjectPoolDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
const intptr_t length = d->ReadUnsigned();
d->AssignRef(
AllocateUninitialized(old_space, ObjectPool::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
fill_position_ = d->position();
for (intptr_t id = start_index_; id < stop_index_; id++) {
const intptr_t length = d->ReadUnsigned();
ObjectPoolPtr pool = static_cast<ObjectPoolPtr>(d->Ref(id));
Deserializer::InitializeHeader(pool, kObjectPoolCid,
ObjectPool::InstanceSize(length));
pool->untag()->length_ = length;
for (intptr_t j = 0; j < length; j++) {
const uint8_t entry_bits = d->Read<uint8_t>();
pool->untag()->entry_bits()[j] = entry_bits;
UntaggedObjectPool::Entry& entry = pool->untag()->data()[j];
switch (ObjectPool::TypeBits::decode(entry_bits)) {
case ObjectPool::EntryType::kTaggedObject:
entry.raw_obj_ = d->ReadRef();
break;
case ObjectPool::EntryType::kImmediate:
entry.raw_value_ = d->Read<intptr_t>();
break;
case ObjectPool::EntryType::kNativeFunction: {
// Read nothing. Initialize with the lazy link entry.
uword new_entry = NativeEntry::LinkNativeCallEntry();
entry.raw_value_ = static_cast<intptr_t>(new_entry);
break;
}
default:
UNREACHABLE();
}
}
}
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
if (d->is_non_root_unit()) {
// If this is a non-root unit, some pool entries that should be canonical
// may have been replaced be with other objects during canonicalization.
intptr_t restore_position = d->position();
d->set_position(fill_position_);
ObjectPool& pool = ObjectPool::Handle();
Object& entry = Object::Handle();
for (intptr_t id = start_index_; id < stop_index_; id++) {
pool ^= refs.At(id);
const intptr_t length = d->ReadUnsigned();
for (intptr_t j = 0; j < length; j++) {
const uint8_t entry_bits = d->Read<uint8_t>();
switch (ObjectPool::TypeBits::decode(entry_bits)) {
case ObjectPool::EntryType::kTaggedObject:
entry = refs.At(d->ReadUnsigned());
pool.SetObjectAt(j, entry);
break;
case ObjectPool::EntryType::kImmediate:
d->Read<intptr_t>();
break;
case ObjectPool::EntryType::kNativeFunction: {
// Read nothing.
break;
}
default:
UNREACHABLE();
}
}
}
d->set_position(restore_position);
}
}
private:
intptr_t fill_position_ = 0;
};
#if defined(DART_PRECOMPILER)
class WeakSerializationReferenceSerializationCluster
: public SerializationCluster {
public:
WeakSerializationReferenceSerializationCluster()
: SerializationCluster(
"WeakSerializationReference",
compiler::target::WeakSerializationReference::InstanceSize()) {}
~WeakSerializationReferenceSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
ASSERT(s->kind() == Snapshot::kFullAOT);
WeakSerializationReferencePtr weak =
WeakSerializationReference::RawCast(object);
objects_.Add(weak);
}
intptr_t FinalizeWeak(Serializer* s) { return objects_.length(); }
void WriteAlloc(Serializer* s) {
s->WriteCid(kWeakSerializationReferenceCid);
}
void ForwardWeakRefs(Serializer* s) {
Heap* heap = s->heap();
for (intptr_t i = 0; i < objects_.length(); i++) {
WeakSerializationReferencePtr weak = objects_[i];
intptr_t id = heap->GetObjectId(weak->untag()->target());
if (id == kUnreachableReference) {
id = heap->GetObjectId(weak->untag()->replacement());
ASSERT(id != kUnreachableReference);
}
ASSERT(IsAllocatedReference(id));
heap->SetObjectId(weak, id);
}
}
void WriteFill(Serializer* s) {}
private:
GrowableArray<WeakSerializationReferencePtr> objects_;
};
#endif
#if defined(DART_PRECOMPILED_RUNTIME)
class WeakSerializationReferenceDeserializationCluster
: public DeserializationCluster {
public:
WeakSerializationReferenceDeserializationCluster()
: DeserializationCluster("WeakSerializationReference") {}
~WeakSerializationReferenceDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {}
void ReadFill(Deserializer* d, bool stamp_canonical) {}
};
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
class PcDescriptorsSerializationCluster : public SerializationCluster {
public:
PcDescriptorsSerializationCluster() : SerializationCluster("PcDescriptors") {}
~PcDescriptorsSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
PcDescriptorsPtr desc = PcDescriptors::RawCast(object);
objects_.Add(desc);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kPcDescriptorsCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
PcDescriptorsPtr desc = objects_[i];
s->AssignRef(desc);
AutoTraceObject(desc);
const intptr_t length = desc->untag()->length_;
s->WriteUnsigned(length);
target_memory_size_ +=
compiler::target::PcDescriptors::InstanceSize(length);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
PcDescriptorsPtr desc = objects_[i];
AutoTraceObject(desc);
const intptr_t length = desc->untag()->length_;
s->WriteUnsigned(length);
uint8_t* cdata = reinterpret_cast<uint8_t*>(desc->untag()->data());
s->WriteBytes(cdata, length);
}
}
private:
GrowableArray<PcDescriptorsPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class PcDescriptorsDeserializationCluster : public DeserializationCluster {
public:
PcDescriptorsDeserializationCluster()
: DeserializationCluster("PcDescriptors") {}
~PcDescriptorsDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
const intptr_t length = d->ReadUnsigned();
d->AssignRef(AllocateUninitialized(old_space,
PcDescriptors::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
const intptr_t length = d->ReadUnsigned();
PcDescriptorsPtr desc = static_cast<PcDescriptorsPtr>(d->Ref(id));
Deserializer::InitializeHeader(desc, kPcDescriptorsCid,
PcDescriptors::InstanceSize(length));
desc->untag()->length_ = length;
uint8_t* cdata = reinterpret_cast<uint8_t*>(desc->untag()->data());
d->ReadBytes(cdata, length);
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class CodeSourceMapSerializationCluster : public SerializationCluster {
public:
CodeSourceMapSerializationCluster() : SerializationCluster("CodeSourceMap") {}
~CodeSourceMapSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
CodeSourceMapPtr map = CodeSourceMap::RawCast(object);
objects_.Add(map);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kCodeSourceMapCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
CodeSourceMapPtr map = objects_[i];
s->AssignRef(map);
AutoTraceObject(map);
const intptr_t length = map->untag()->length_;
s->WriteUnsigned(length);
target_memory_size_ +=
compiler::target::PcDescriptors::InstanceSize(length);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
CodeSourceMapPtr map = objects_[i];
AutoTraceObject(map);
const intptr_t length = map->untag()->length_;
s->WriteUnsigned(length);
uint8_t* cdata = reinterpret_cast<uint8_t*>(map->untag()->data());
s->WriteBytes(cdata, length);
}
}
private:
GrowableArray<CodeSourceMapPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class CodeSourceMapDeserializationCluster : public DeserializationCluster {
public:
CodeSourceMapDeserializationCluster()
: DeserializationCluster("CodeSourceMap") {}
~CodeSourceMapDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool is_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
const intptr_t length = d->ReadUnsigned();
d->AssignRef(AllocateUninitialized(old_space,
CodeSourceMap::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool is_canonical) {
for (intptr_t id = start_index_; id < stop_index_; id++) {
const intptr_t length = d->ReadUnsigned();
CodeSourceMapPtr map = static_cast<CodeSourceMapPtr>(d->Ref(id));
Deserializer::InitializeHeader(map, kPcDescriptorsCid,
CodeSourceMap::InstanceSize(length));
map->untag()->length_ = length;
uint8_t* cdata = reinterpret_cast<uint8_t*>(map->untag()->data());
d->ReadBytes(cdata, length);
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class CompressedStackMapsSerializationCluster : public SerializationCluster {
public:
CompressedStackMapsSerializationCluster()
: SerializationCluster("CompressedStackMaps") {}
~CompressedStackMapsSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
CompressedStackMapsPtr desc = CompressedStackMaps::RawCast(object);
objects_.Add(desc);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kCompressedStackMapsCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
CompressedStackMapsPtr map = objects_[i];
s->AssignRef(map);
AutoTraceObject(map);
const intptr_t length = UntaggedCompressedStackMaps::SizeField::decode(
map->untag()->flags_and_size_);
s->WriteUnsigned(length);
target_memory_size_ +=
compiler::target::CompressedStackMaps::InstanceSize(length);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
CompressedStackMapsPtr map = objects_[i];
AutoTraceObject(map);
s->WriteUnsigned(map->untag()->flags_and_size_);
const intptr_t length = UntaggedCompressedStackMaps::SizeField::decode(
map->untag()->flags_and_size_);
uint8_t* cdata = reinterpret_cast<uint8_t*>(map->untag()->data());
s->WriteBytes(cdata, length);
}
}
private:
GrowableArray<CompressedStackMapsPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class CompressedStackMapsDeserializationCluster
: public DeserializationCluster {
public:
CompressedStackMapsDeserializationCluster()
: DeserializationCluster("CompressedStackMaps") {}
~CompressedStackMapsDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool is_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
const intptr_t length = d->ReadUnsigned();
d->AssignRef(AllocateUninitialized(
old_space, CompressedStackMaps::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool is_canonical) {
for (intptr_t id = start_index_; id < stop_index_; id++) {
const intptr_t flags_and_size = d->ReadUnsigned();
const intptr_t length =
UntaggedCompressedStackMaps::SizeField::decode(flags_and_size);
CompressedStackMapsPtr map =
static_cast<CompressedStackMapsPtr>(d->Ref(id));
Deserializer::InitializeHeader(map, kCompressedStackMapsCid,
CompressedStackMaps::InstanceSize(length));
map->untag()->flags_and_size_ = flags_and_size;
uint8_t* cdata = reinterpret_cast<uint8_t*>(map->untag()->data());
d->ReadBytes(cdata, length);
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(DART_COMPRESSED_POINTERS)
// PcDescriptor, CompressedStackMaps, OneByteString, TwoByteString
class RODataSerializationCluster : public SerializationCluster {
public:
RODataSerializationCluster(Zone* zone, const char* type, intptr_t cid)
: SerializationCluster(ImageWriter::TagObjectTypeAsReadOnly(zone, type)),
cid_(cid),
objects_(),
type_(type) {}
~RODataSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
// A string's hash must already be computed when we write it because it
// will be loaded into read-only memory. Extra bytes due to allocation
// rounding need to be deterministically set for reliable deduplication in
// shared images.
if (object->untag()->InVMIsolateHeap() ||
s->heap()->old_space()->IsObjectFromImagePages(object)) {
// This object is already read-only.
} else {
Object::FinalizeReadOnlyObject(object);
}
objects_.Add(object);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(cid_);
intptr_t count = objects_.length();
s->WriteUnsigned(count);
uint32_t running_offset = 0;
for (intptr_t i = 0; i < count; i++) {
ObjectPtr object = objects_[i];
s->AssignRef(object);
if (cid_ == kOneByteStringCid || cid_ == kTwoByteStringCid) {
s->TraceStartWritingObject(type_, object, String::RawCast(object));
} else {
s->TraceStartWritingObject(type_, object, nullptr);
}
uint32_t offset = s->GetDataOffset(object);
s->TraceDataOffset(offset);
ASSERT(Utils::IsAligned(
offset, compiler::target::ObjectAlignment::kObjectAlignment));
ASSERT(offset > running_offset);
s->WriteUnsigned((offset - running_offset) >>
compiler::target::ObjectAlignment::kObjectAlignmentLog2);
running_offset = offset;
s->TraceEndWritingObject();
}
}
void WriteFill(Serializer* s) {
// No-op.
}
private:
const intptr_t cid_;
GrowableArray<ObjectPtr> objects_;
const char* const type_;
};
#endif // !DART_PRECOMPILED_RUNTIME && !DART_COMPRESSED_POINTERS
#if !defined(DART_COMPRESSED_POINTERS)
class RODataDeserializationCluster : public DeserializationCluster {
public:
explicit RODataDeserializationCluster(intptr_t cid)
: DeserializationCluster("ROData"), cid_(cid) {}
~RODataDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
intptr_t count = d->ReadUnsigned();
uint32_t running_offset = 0;
for (intptr_t i = 0; i < count; i++) {
running_offset += d->ReadUnsigned() << kObjectAlignmentLog2;
d->AssignRef(d->GetObjectAt(running_offset));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
// No-op.
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
if (canonicalize) {
FATAL("Cannot recanonicalize RO objects.");
}
}
private:
const intptr_t cid_;
};
#endif // !DART_COMPRESSED_POINTERS
#if !defined(DART_PRECOMPILED_RUNTIME)
class ExceptionHandlersSerializationCluster : public SerializationCluster {
public:
ExceptionHandlersSerializationCluster()
: SerializationCluster("ExceptionHandlers") {}
~ExceptionHandlersSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
ExceptionHandlersPtr handlers = ExceptionHandlers::RawCast(object);
objects_.Add(handlers);
s->Push(handlers->untag()->handled_types_data_);
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kExceptionHandlersCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
ExceptionHandlersPtr handlers = objects_[i];
s->AssignRef(handlers);
AutoTraceObject(handlers);
const intptr_t length = handlers->untag()->num_entries_;
s->WriteUnsigned(length);
target_memory_size_ +=
compiler::target::ExceptionHandlers::InstanceSize(length);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
ExceptionHandlersPtr handlers = objects_[i];
AutoTraceObject(handlers);
const intptr_t length = handlers->untag()->num_entries_;
s->WriteUnsigned(length);
WriteField(handlers, handled_types_data_);
for (intptr_t j = 0; j < length; j++) {
const ExceptionHandlerInfo& info = handlers->untag()->data()[j];
s->Write<uint32_t>(info.handler_pc_offset);
s->Write<int16_t>(info.outer_try_index);
s->Write<int8_t>(info.needs_stacktrace);
s->Write<int8_t>(info.has_catch_all);
s->Write<int8_t>(info.is_generated);
}
}
}
private:
GrowableArray<ExceptionHandlersPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ExceptionHandlersDeserializationCluster : public DeserializationCluster {
public:
ExceptionHandlersDeserializationCluster()
: DeserializationCluster("ExceptionHandlers") {}
~ExceptionHandlersDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
const intptr_t length = d->ReadUnsigned();
d->AssignRef(AllocateUninitialized(
old_space, ExceptionHandlers::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
ExceptionHandlersPtr handlers =
static_cast<ExceptionHandlersPtr>(d->Ref(id));
const intptr_t length = d->ReadUnsigned();
Deserializer::InitializeHeader(handlers, kExceptionHandlersCid,
ExceptionHandlers::InstanceSize(length));
handlers->untag()->num_entries_ = length;
handlers->untag()->handled_types_data_ =
static_cast<ArrayPtr>(d->ReadRef());
for (intptr_t j = 0; j < length; j++) {
ExceptionHandlerInfo& info = handlers->untag()->data()[j];
info.handler_pc_offset = d->Read<uint32_t>();
info.outer_try_index = d->Read<int16_t>();
info.needs_stacktrace = d->Read<int8_t>();
info.has_catch_all = d->Read<int8_t>();
info.is_generated = d->Read<int8_t>();
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class ContextSerializationCluster : public SerializationCluster {
public:
ContextSerializationCluster() : SerializationCluster("Context") {}
~ContextSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
ContextPtr context = Context::RawCast(object);
objects_.Add(context);
s->Push(context->untag()->parent_);
const intptr_t length = context->untag()->num_variables_;
for (intptr_t i = 0; i < length; i++) {
s->Push(context->untag()->data()[i]);
}
}
void WriteAlloc(Serializer* s) {
s->WriteCid(kContextCid);
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
ContextPtr context = objects_[i];
s->AssignRef(context);
AutoTraceObject(context);
const intptr_t length = context->untag()->num_variables_;
s->WriteUnsigned(length);
target_memory_size_ += compiler::target::Context::InstanceSize(length);
}
}
void WriteFill(Serializer* s) {
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
ContextPtr context = objects_[i];
AutoTraceObject(context);
const intptr_t length = context->untag()->num_variables_;
s->WriteUnsigned(length);
WriteField(context, parent_);
for (intptr_t j = 0; j < length; j++) {
s->WriteElementRef(context->untag()->data()[j], j);
}
}
}
private:
GrowableArray<ContextPtr> objects_;
};
#endif // !DART_PRECOMPILED_RUNTIME
class ContextDeserializationCluster : public DeserializationCluster {
public:
ContextDeserializationCluster() : DeserializationCluster("Context") {}
~ContextDeserializationCluster() {}
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
start_index_ = d->next_index();
PageSpace* old_space = d->heap()->old_space();
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
const intptr_t length = d->ReadUnsigned();
d->AssignRef(
AllocateUninitialized(old_space, Context::InstanceSize(length)));
}
stop_index_ = d->next_index();
}
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
ContextPtr context = static_cast<ContextPtr>(d->Ref(id));
const intptr_t length = d->ReadUnsigned();
Deserializer::InitializeHeader(context, kContextCid,
Context::InstanceSize(length));
context->untag()->num_variables_ = length;