[vm] Free readonly header bit so it can be used for other uses.
This is reland of https://dart-review.googlesource.com/c/sdk/+/97340 rebased on top of removal of GraphMarked bit, which was not compatible with this ReadOnly->InVMIsolateHeap change(due to how write-pages are not covered by Contains check)
Change-Id: I34c6421afb4baeafa5a449787020dab9fa800d05
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/97545
Reviewed-by: Ryan Macnak <rmacnak@google.com>
Commit-Queue: Alexander Aprelev <aam@google.com>
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index a849f2d..f460574 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -76,7 +76,6 @@
uint32_t tags = 0;
tags = RawObject::ClassIdTag::update(class_id, tags);
tags = RawObject::SizeTag::update(size, tags);
- tags = RawObject::ReadOnlyBit::update(false, tags);
tags = RawObject::CanonicalBit::update(is_canonical, tags);
tags = RawObject::OldBit::update(true, tags);
tags = RawObject::OldAndNotMarkedBit::update(true, tags);
@@ -1761,7 +1760,8 @@
// will be loaded into read-only memory. Extra bytes due to allocation
// rounding need to be deterministically set for reliable deduplication in
// shared images.
- if (object->IsReadOnly()) {
+ if (object->InVMIsolateHeap() ||
+ s->isolate()->heap()->old_space()->IsObjectFromImagePages(object)) {
// This object is already read-only.
} else {
Object::FinalizeReadOnlyObject(object);
diff --git a/runtime/vm/compiler/aot/precompiler.cc b/runtime/vm/compiler/aot/precompiler.cc
index 31b9c6b..4289942 100644
--- a/runtime/vm/compiler/aot/precompiler.cc
+++ b/runtime/vm/compiler/aot/precompiler.cc
@@ -1506,7 +1506,7 @@
for (intptr_t i = 0; i < types.length(); i++) {
const AbstractType& type = types.At(i);
- if (type.IsReadOnly()) {
+ if (type.InVMIsolateHeap()) {
// The only important types in the vm isolate are "dynamic"/"void", which
// will get their optimized top-type testing stub installed at creation.
continue;
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
index 16dddf6..47ba86e 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
@@ -33,7 +33,7 @@
const auto& stub =
Code::ZoneHandle(object_store->write_barrier_wrappers_stub());
- if (!stub.IsReadOnly()) {
+ if (!stub.InVMIsolateHeap()) {
assembler_->generate_invoke_write_barrier_wrapper_ =
[&](Condition condition, Register reg) {
const intptr_t offset_into_target =
@@ -46,7 +46,7 @@
const auto& array_stub =
Code::ZoneHandle(object_store->array_write_barrier_stub());
- if (!array_stub.IsReadOnly()) {
+ if (!array_stub.InVMIsolateHeap()) {
assembler_->generate_invoke_array_write_barrier_ =
[&](Condition condition) {
AddPcRelativeCallStubTarget(array_stub);
@@ -952,7 +952,7 @@
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
- !stub.IsReadOnly()) {
+ !stub.InVMIsolateHeap()) {
AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
index d01b918..fe52d41 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
@@ -32,7 +32,7 @@
const auto& stub =
Code::ZoneHandle(object_store->write_barrier_wrappers_stub());
- if (!stub.IsReadOnly()) {
+ if (!stub.InVMIsolateHeap()) {
assembler_->generate_invoke_write_barrier_wrapper_ = [&](Register reg) {
const intptr_t offset_into_target =
Thread::WriteBarrierWrappersOffsetForRegister(reg);
@@ -43,7 +43,7 @@
const auto& array_stub =
Code::ZoneHandle(object_store->array_write_barrier_stub());
- if (!array_stub.IsReadOnly()) {
+ if (!array_stub.InVMIsolateHeap()) {
assembler_->generate_invoke_array_write_barrier_ = [&]() {
AddPcRelativeCallStubTarget(array_stub);
assembler_->GenerateUnRelocatedPcRelativeCall();
@@ -945,7 +945,7 @@
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
- !stub.IsReadOnly()) {
+ !stub.InVMIsolateHeap()) {
AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
index e5c711e..1a1a007 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
@@ -31,7 +31,7 @@
const auto& stub =
Code::ZoneHandle(object_store->write_barrier_wrappers_stub());
- if (!stub.IsReadOnly()) {
+ if (!stub.InVMIsolateHeap()) {
assembler_->generate_invoke_write_barrier_wrapper_ = [&](Register reg) {
const intptr_t offset_into_target =
Thread::WriteBarrierWrappersOffsetForRegister(reg);
@@ -42,7 +42,7 @@
const auto& array_stub =
Code::ZoneHandle(object_store->array_write_barrier_stub());
- if (!array_stub.IsReadOnly()) {
+ if (!array_stub.InVMIsolateHeap()) {
assembler_->generate_invoke_array_write_barrier_ = [&]() {
AddPcRelativeCallStubTarget(array_stub);
assembler_->GenerateUnRelocatedPcRelativeCall();
@@ -941,7 +941,7 @@
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
- !stub.IsReadOnly()) {
+ !stub.InVMIsolateHeap()) {
AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index 4d35d2f..10b0419 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -261,7 +261,7 @@
bool use_subtype_test,
bool include_abstract,
bool exclude_null) {
- if (dst_klass.IsReadOnly()) {
+ if (dst_klass.InVMIsolateHeap()) {
BuildRangesFor(table, ranges, dst_klass, use_subtype_test, include_abstract,
exclude_null);
return;
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 7164da81..f972d31 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -3121,7 +3121,7 @@
argument_names_(argument_names),
arguments_(arguments),
token_pos_(token_pos) {
- ASSERT(argument_names.IsZoneHandle() || argument_names.IsReadOnly());
+ ASSERT(argument_names.IsZoneHandle() || argument_names.InVMIsolateHeap());
}
RawString* Selector() {
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index 5babbfe..7a5d964 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -3087,7 +3087,7 @@
: object_store->stack_overflow_stub_without_fpu_regs_stub());
const bool using_shared_stub = locs()->call_on_shared_slow_path();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
- using_shared_stub && !stub.IsReadOnly()) {
+ using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall(LS);
@@ -5796,7 +5796,7 @@
: object_store->null_error_stub_without_fpu_regs_stub());
const bool using_shared_stub = locs()->call_on_shared_slow_path();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
- using_shared_stub && !stub.IsReadOnly()) {
+ using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall(EQUAL);
diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index 13a4e1a..7868cad 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -2826,7 +2826,7 @@
: object_store->stack_overflow_stub_without_fpu_regs_stub());
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
- using_shared_stub && !stub.IsReadOnly()) {
+ using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
@@ -5104,7 +5104,7 @@
live_fpu_regs ? object_store->null_error_stub_with_fpu_regs_stub()
: object_store->null_error_stub_without_fpu_regs_stub());
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
- using_shared_stub && !stub.IsReadOnly()) {
+ using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
compiler->assembler()->GenerateUnRelocatedPcRelativeCall();
return;
diff --git a/runtime/vm/compiler/cha.cc b/runtime/vm/compiler/cha.cc
index 7856c72..53e2480 100644
--- a/runtime/vm/compiler/cha.cc
+++ b/runtime/vm/compiler/cha.cc
@@ -39,7 +39,7 @@
// read-only.
// TODO(fschneider): Enable tracking of CHA dependent code for VM heap
// classes.
- if (cls.IsReadOnly()) return true;
+ if (cls.InVMIsolateHeap()) return true;
if (cls.IsObjectClass()) {
// Class Object has subclasses, although we do not keep track of them.
@@ -58,7 +58,7 @@
bool CHA::ConcreteSubclasses(const Class& cls,
GrowableArray<intptr_t>* class_ids) {
- if (cls.IsReadOnly()) return false;
+ if (cls.InVMIsolateHeap()) return false;
if (cls.IsObjectClass()) return false;
if (!cls.is_abstract()) {
@@ -87,7 +87,7 @@
// read-only.
// TODO(fschneider): Enable tracking of CHA dependent code for VM heap
// classes.
- if (cls.IsReadOnly()) return true;
+ if (cls.InVMIsolateHeap()) return true;
return cls.is_implemented();
}
@@ -129,7 +129,7 @@
// read-only.
// TODO(fschneider): Enable tracking of CHA dependent code for VM heap
// classes.
- if (cls.IsReadOnly()) return true;
+ if (cls.InVMIsolateHeap()) return true;
// Subclasses of Object are not tracked by CHA. Safely assume that overrides
// exist.
diff --git a/runtime/vm/compiler/frontend/constant_evaluator.cc b/runtime/vm/compiler/frontend/constant_evaluator.cc
index 74eb4cf..eaa3025 100644
--- a/runtime/vm/compiler/frontend/constant_evaluator.cc
+++ b/runtime/vm/compiler/frontend/constant_evaluator.cc
@@ -1045,7 +1045,7 @@
}
bool is_present = false;
- ASSERT(!script_.IsReadOnly());
+ ASSERT(!script_.InVMIsolateHeap());
if (script_.compile_time_constants() == Array::null()) {
return false;
}
@@ -1075,7 +1075,7 @@
return;
}
const intptr_t kInitialConstMapSize = 16;
- ASSERT(!script_.IsReadOnly());
+ ASSERT(!script_.InVMIsolateHeap());
if (script_.compile_time_constants() == Array::null()) {
const Array& array = Array::Handle(
HashTables::New<KernelConstantsMap>(kInitialConstMapSize, Heap::kNew));
diff --git a/runtime/vm/compiler/runtime_api.cc b/runtime/vm/compiler/runtime_api.cc
index ce66591..2406639 100644
--- a/runtime/vm/compiler/runtime_api.cc
+++ b/runtime/vm/compiler/runtime_api.cc
@@ -769,7 +769,7 @@
}
bool CanEmbedAsRawPointerInGeneratedCode(const dart::Object& obj) {
- return obj.IsSmi() || obj.IsReadOnly();
+ return obj.IsSmi() || obj.InVMIsolateHeap();
}
word ToRawPointer(const dart::Object& a) {
diff --git a/runtime/vm/dart_api_impl.cc b/runtime/vm/dart_api_impl.cc
index 0c51e1a..26de550 100644
--- a/runtime/vm/dart_api_impl.cc
+++ b/runtime/vm/dart_api_impl.cc
@@ -492,7 +492,7 @@
}
static Dart_Handle InitNewReadOnlyApiHandle(RawObject* raw) {
- ASSERT(raw->IsReadOnly());
+ ASSERT(raw->InVMIsolateHeap());
LocalHandle* ref = Dart::AllocateReadOnlyApiHandle();
ref->set_raw(raw);
return ref->apiHandle();
diff --git a/runtime/vm/hash_table.h b/runtime/vm/hash_table.h
index b84e35f..cd05fde 100644
--- a/runtime/vm/hash_table.h
+++ b/runtime/vm/hash_table.h
@@ -302,7 +302,7 @@
}
void UpdateCollisions(intptr_t collisions) const {
if (KeyTraits::ReportStats()) {
- if (data_->raw()->IsReadOnly()) {
+ if (data_->raw()->InVMIsolateHeap()) {
return;
}
AdjustSmiValueAt(kNumProbesIndex, collisions + 1);
diff --git a/runtime/vm/heap/become.cc b/runtime/vm/heap/become.cc
index 9cda40a..d6feee1 100644
--- a/runtime/vm/heap/become.cc
+++ b/runtime/vm/heap/become.cc
@@ -193,14 +193,14 @@
OS::PrintErr("BEFORE ADDRESS: %p\n", before_obj);
OS::PrintErr("BEFORE IS HEAP OBJECT: %s",
before_obj->IsHeapObject() ? "YES" : "NO");
- OS::PrintErr("BEFORE IS READ ONLY OBJECT: %s",
- before_obj->IsReadOnly() ? "YES" : "NO");
+ OS::PrintErr("BEFORE IN VMISOLATE HEAP OBJECT: %s",
+ before_obj->InVMIsolateHeap() ? "YES" : "NO");
OS::PrintErr("AFTER ADDRESS: %p\n", after_obj);
OS::PrintErr("AFTER IS HEAP OBJECT: %s",
after_obj->IsHeapObject() ? "YES" : "NO");
- OS::PrintErr("AFTER IS READ ONLY OBJECT: %s",
- after_obj->IsReadOnly() ? "YES" : "NO");
+ OS::PrintErr("AFTER IN VMISOLATE HEAP OBJECT: %s",
+ after_obj->InVMIsolateHeap() ? "YES" : "NO");
if (before_obj->IsHeapObject()) {
OS::PrintErr("BEFORE OBJECT CLASS ID=%" Pd "\n", before_obj->GetClassId());
@@ -240,7 +240,7 @@
CrashDump(before_obj, after_obj);
FATAL("become: Cannot become immediates");
}
- if (before_obj->IsReadOnly()) {
+ if (before_obj->InVMIsolateHeap()) {
CrashDump(before_obj, after_obj);
FATAL("become: Cannot forward VM heap objects");
}
diff --git a/runtime/vm/heap/pages.cc b/runtime/vm/heap/pages.cc
index 366a2e8..5ef2454 100644
--- a/runtime/vm/heap/pages.cc
+++ b/runtime/vm/heap/pages.cc
@@ -1371,6 +1371,18 @@
image_pages_ = page;
}
+bool PageSpace::IsObjectFromImagePages(dart::RawObject* object) {
+ uword object_addr = RawObject::ToAddr(object);
+ HeapPage* image_page = image_pages_;
+ while (image_page != nullptr) {
+ if (image_page->Contains(object_addr)) {
+ return true;
+ }
+ image_page = image_page->next();
+ }
+ return false;
+}
+
PageSpaceController::PageSpaceController(Heap* heap,
int heap_growth_ratio,
int heap_growth_max,
diff --git a/runtime/vm/heap/pages.h b/runtime/vm/heap/pages.h
index d7381d7..8844ee7 100644
--- a/runtime/vm/heap/pages.h
+++ b/runtime/vm/heap/pages.h
@@ -445,6 +445,8 @@
enable_concurrent_mark_ = enable_concurrent_mark;
}
+ bool IsObjectFromImagePages(RawObject* object);
+
private:
// Ids for time and data records in Heap::GCStats.
enum {
diff --git a/runtime/vm/image_snapshot.cc b/runtime/vm/image_snapshot.cc
index 37b9f7d..34ede80 100644
--- a/runtime/vm/image_snapshot.cc
+++ b/runtime/vm/image_snapshot.cc
@@ -336,7 +336,6 @@
// Write object header with the mark and read-only bits set.
uword marked_tags = obj.raw()->ptr()->tags_;
- marked_tags = RawObject::ReadOnlyBit::update(true, marked_tags);
marked_tags = RawObject::OldBit::update(true, marked_tags);
marked_tags = RawObject::OldAndNotMarkedBit::update(false, marked_tags);
marked_tags = RawObject::OldAndNotRememberedBit::update(true, marked_tags);
@@ -487,7 +486,6 @@
// Write Instructions with the mark and read-only bits set.
uword marked_tags = insns.raw_ptr()->tags_;
- marked_tags = RawObject::ReadOnlyBit::update(true, marked_tags);
marked_tags = RawObject::OldBit::update(true, marked_tags);
marked_tags = RawObject::OldAndNotMarkedBit::update(false, marked_tags);
marked_tags =
@@ -737,7 +735,6 @@
// Write Instructions with the mark and read-only bits set.
uword marked_tags = insns.raw_ptr()->tags_;
- marked_tags = RawObject::ReadOnlyBit::update(true, marked_tags);
marked_tags = RawObject::OldBit::update(true, marked_tags);
marked_tags = RawObject::OldAndNotMarkedBit::update(false, marked_tags);
marked_tags = RawObject::OldAndNotRememberedBit::update(true, marked_tags);
diff --git a/runtime/vm/message.cc b/runtime/vm/message.cc
index 7af0251..985c5b5 100644
--- a/runtime/vm/message.cc
+++ b/runtime/vm/message.cc
@@ -40,7 +40,7 @@
snapshot_length_(0),
finalizable_data_(NULL),
priority_(priority) {
- ASSERT(!raw_obj->IsHeapObject() || raw_obj->IsReadOnly());
+ ASSERT(!raw_obj->IsHeapObject() || raw_obj->InVMIsolateHeap());
ASSERT((priority == kNormalPriority) ||
(delivery_failure_port == kIllegalPort));
ASSERT(IsRaw());
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 1b0924a..3d3fc57 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -1034,7 +1034,6 @@
ASSERT(!obj->IsForwardingCorpse());
if (!obj->IsFreeListElement()) {
obj->SetMarkBitUnsynchronized();
- obj->SetReadOnlyUnsynchronized();
Object::FinalizeReadOnlyObject(obj);
#if defined(HASH_IN_OBJECT_HEADER)
// These objects end up in the read-only VM isolate which is shared
@@ -1224,7 +1223,6 @@
reinterpret_cast<RawTypedData*>(RawObject::FromAddr(addr));
uword new_tags = RawObject::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
new_tags = RawObject::SizeTag::update(leftover_size, new_tags);
- new_tags = RawObject::ReadOnlyBit::update(false, new_tags);
const bool is_old = obj.raw()->IsOldObject();
new_tags = RawObject::OldBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotMarkedBit::update(is_old, new_tags);
@@ -1255,7 +1253,6 @@
RawObject* raw = reinterpret_cast<RawObject*>(RawObject::FromAddr(addr));
uword new_tags = RawObject::ClassIdTag::update(kInstanceCid, 0);
new_tags = RawObject::SizeTag::update(leftover_size, new_tags);
- new_tags = RawObject::ReadOnlyBit::update(false, new_tags);
const bool is_old = obj.raw()->IsOldObject();
new_tags = RawObject::OldBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotMarkedBit::update(is_old, new_tags);
@@ -2040,8 +2037,8 @@
}
#if defined(DEBUG)
-bool Object::IsReadOnly() const {
- if (FLAG_verify_handles && raw()->IsReadOnly()) {
+bool Object::InVMIsolateHeap() const {
+ if (FLAG_verify_handles && raw()->InVMIsolateHeap()) {
Heap* vm_isolate_heap = Dart::vm_isolate()->heap();
uword addr = RawObject::ToAddr(raw());
if (!vm_isolate_heap->Contains(addr)) {
@@ -2050,7 +2047,7 @@
ASSERT(vm_isolate_heap->Contains(addr));
}
}
- return raw()->IsReadOnly();
+ return raw()->InVMIsolateHeap();
}
#endif // DEBUG
@@ -2076,7 +2073,6 @@
ASSERT(class_id != kIllegalCid);
tags = RawObject::ClassIdTag::update(class_id, tags);
tags = RawObject::SizeTag::update(size, tags);
- tags = RawObject::ReadOnlyBit::update(false, tags);
const bool is_old =
(address & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
tags = RawObject::OldBit::update(is_old, tags);
@@ -5681,7 +5677,7 @@
ASSERT(FLAG_enable_interpreter || FLAG_use_bytecode_compiler);
ASSERT(!value.IsNull());
// Finish setting up code before activating it.
- if (!value.IsReadOnly()) {
+ if (!value.InVMIsolateHeap()) {
value.set_function(*this);
}
StorePointer(&raw_ptr()->bytecode_, value.raw());
@@ -16098,7 +16094,7 @@
return result.raw();
}
if (IsNew()) {
- ASSERT((isolate == Dart::vm_isolate()) || !IsReadOnly());
+ ASSERT((isolate == Dart::vm_isolate()) || !InVMIsolateHeap());
// Create a canonical object in old space.
result ^= Object::Clone(*this, Heap::kOld);
} else {
@@ -17443,7 +17439,7 @@
ASSERT(!IsFunctionType());
Type& type = Type::Handle(zone, cls.declaration_type());
if (type.IsNull()) {
- ASSERT(!cls.raw()->IsReadOnly() || (isolate == Dart::vm_isolate()));
+ ASSERT(!cls.raw()->InVMIsolateHeap() || (isolate == Dart::vm_isolate()));
// Canonicalize the type arguments of the supertype, if any.
TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
type_args = type_args.Canonicalize(trail);
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 88ebd78..250aa7b 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -307,9 +307,9 @@
bool IsNew() const { return raw()->IsNewObject(); }
bool IsOld() const { return raw()->IsOldObject(); }
#if defined(DEBUG)
- bool IsReadOnly() const;
+ bool InVMIsolateHeap() const;
#else
- bool IsReadOnly() const { return raw()->IsReadOnly(); }
+ bool InVMIsolateHeap() const { return raw()->InVMIsolateHeap(); }
#endif // DEBUG
// Print the object on stdout for debugging.
diff --git a/runtime/vm/object_graph.cc b/runtime/vm/object_graph.cc
index 71f74b6..32d9b09 100644
--- a/runtime/vm/object_graph.cc
+++ b/runtime/vm/object_graph.cc
@@ -44,7 +44,7 @@
Heap* heap = isolate()->heap();
for (RawObject** current = first; current <= last; ++current) {
if ((*current)->IsHeapObject() &&
- !(*current)->IsReadOnly() &&
+ !(*current)->InVMIsolateHeap() &&
heap->GetObjectId(*current) == 0) { // not visited yet
if (!include_vm_objects_ && !IsUserClass((*current)->GetClassId())) {
continue;
@@ -513,7 +513,7 @@
virtual void VisitPointers(RawObject** first, RawObject** last) {
for (RawObject** current = first; current <= last; ++current) {
RawObject* object = *current;
- if (!object->IsHeapObject() || object->IsReadOnly()) {
+ if (!object->IsHeapObject() || object->InVMIsolateHeap()) {
// Ignore smis and objects in the VM isolate for now.
// TODO(koda): To track which field each pointer corresponds to,
// we'll need to encode which fields were omitted here.
diff --git a/runtime/vm/parser.h b/runtime/vm/parser.h
index 25dc2b6..efbb8f9 100644
--- a/runtime/vm/parser.h
+++ b/runtime/vm/parser.h
@@ -84,7 +84,7 @@
#if defined(DEBUG)
if (list == NULL) return;
for (intptr_t i = 0; i < list->length(); i++) {
- ASSERT(list->At(i)->IsZoneHandle() || list->At(i)->IsReadOnly());
+ ASSERT(list->At(i)->IsZoneHandle() || list->At(i)->InVMIsolateHeap());
}
#endif
}
diff --git a/runtime/vm/program_visitor.cc b/runtime/vm/program_visitor.cc
index 15457bf..dfc48dd 100644
--- a/runtime/vm/program_visitor.cc
+++ b/runtime/vm/program_visitor.cc
@@ -320,7 +320,7 @@
void Visit(const Function& function) {
bytecode_ = function.bytecode();
- if (!bytecode_.IsNull() && !bytecode_.IsReadOnly()) {
+ if (!bytecode_.IsNull() && !bytecode_.InVMIsolateHeap()) {
pc_descriptor_ = bytecode_.pc_descriptors();
if (!pc_descriptor_.IsNull()) {
pc_descriptor_ = DedupPcDescriptor(pc_descriptor_);
@@ -654,7 +654,7 @@
if (!function.IsSignatureFunction() &&
!function.IsClosureFunction() &&
(function.name() != Symbols::Call().raw()) &&
- !list_.IsReadOnly()) {
+ !list_.InVMIsolateHeap()) {
// Parameter types not needed for function type tests.
for (intptr_t i = 0; i < list_.Length(); i++) {
list_.SetAt(i, Object::dynamic_type());
@@ -669,7 +669,8 @@
if (!list_.IsNull()) {
// Preserve parameter names in case of recompilation for the JIT.
if (FLAG_precompiled_mode) {
- if (!function.HasOptionalNamedParameters() && !list_.IsReadOnly()) {
+ if (!function.HasOptionalNamedParameters() &&
+ !list_.InVMIsolateHeap()) {
// Parameter names not needed for resolution.
for (intptr_t i = 0; i < list_.Length(); i++) {
list_.SetAt(i, Symbols::OptimizedOut());
@@ -682,7 +683,7 @@
}
RawArray* DedupList(const Array& list) {
- if (list.IsReadOnly()) {
+ if (list.InVMIsolateHeap()) {
// Avoid using read-only VM objects for de-duplication.
return list.raw();
}
diff --git a/runtime/vm/raw_object.cc b/runtime/vm/raw_object.cc
index 1e933eb..70f1817 100644
--- a/runtime/vm/raw_object.cc
+++ b/runtime/vm/raw_object.cc
@@ -15,6 +15,10 @@
namespace dart {
+bool RawObject::InVMIsolateHeap() const {
+ return Dart::vm_isolate()->heap()->Contains(ToAddr(this));
+}
+
void RawObject::Validate(Isolate* isolate) const {
if (Object::void_class_ == reinterpret_cast<RawClass*>(kHeapObjectTag)) {
// Validation relies on properly initialized class classes. Skip if the
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index a130ae2..ecf3ce8 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -121,10 +121,10 @@
kOldBit = 3, // Incremental barrier source.
kOldAndNotRememberedBit = 4, // Generational barrier source.
kCanonicalBit = 5,
- kReadOnlyBit = 6,
- kReservedBit = 7,
+ kReservedTagPos = 6,
+ kReservedTagSize = 2,
- kSizeTagPos = 8,
+ kSizeTagPos = kReservedTagPos + kReservedTagSize, // = 8
kSizeTagSize = 8,
kClassIdTagPos = kSizeTagPos + kSizeTagSize, // = 16
kClassIdTagSize = 16,
@@ -192,15 +192,15 @@
class CanonicalBit : public BitField<uint32_t, bool, kCanonicalBit, 1> {};
- class ReservedBit : public BitField<uint32_t, bool, kReservedBit, 1> {};
-
- class ReadOnlyBit : public BitField<uint32_t, bool, kReadOnlyBit, 1> {};
-
class OldBit : public BitField<uint32_t, bool, kOldBit, 1> {};
class OldAndNotRememberedBit
: public BitField<uint32_t, bool, kOldAndNotRememberedBit, 1> {};
+ class ReservedBits
+ : public BitField<uint32_t, intptr_t, kReservedTagPos, kReservedTagSize> {
+ };
+
bool IsWellFormed() const {
uword value = reinterpret_cast<uword>(this);
return (value & kSmiTagMask) == 0 ||
@@ -283,13 +283,7 @@
void SetCanonical() { UpdateTagBit<CanonicalBit>(true); }
void ClearCanonical() { UpdateTagBit<CanonicalBit>(false); }
- // Objects in the VM-isolate's heap or on an image page from an AppJIT or
- // AppAOT snapshot are permanently read-only. They may never be modified
- // again. In particular, they cannot be marked.
- bool IsReadOnly() const { return ReadOnlyBit::decode(ptr()->tags_); }
- void SetReadOnlyUnsynchronized() {
- ptr()->tags_ = ReadOnlyBit::update(true, ptr()->tags_);
- }
+ bool InVMIsolateHeap() const;
// Support for GC remembered bit.
bool IsRemembered() const {
@@ -453,8 +447,6 @@
return reinterpret_cast<uword>(raw_obj->ptr());
}
- static bool IsReadOnly(intptr_t value) { return ReadOnlyBit::decode(value); }
-
static bool IsCanonical(intptr_t value) {
return CanonicalBit::decode(value);
}
diff --git a/runtime/vm/snapshot.cc b/runtime/vm/snapshot.cc
index 58b38e3..5937efe 100644
--- a/runtime/vm/snapshot.cc
+++ b/runtime/vm/snapshot.cc
@@ -1025,7 +1025,7 @@
// Now check if it is an object from the VM isolate. These objects are shared
// by all isolates.
- if (rawobj->IsReadOnly() && HandleVMIsolateObject(rawobj)) {
+ if (rawobj->InVMIsolateHeap() && HandleVMIsolateObject(rawobj)) {
return true;
}
diff --git a/runtime/vm/thread.cc b/runtime/vm/thread.cc
index b204514..2238c34 100644
--- a/runtime/vm/thread.cc
+++ b/runtime/vm/thread.cc
@@ -740,7 +740,7 @@
// [object] is in fact a [Code] object.
if (object.IsCode()) {
#define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \
- ASSERT((expr)->IsReadOnly()); \
+ ASSERT((expr)->InVMIsolateHeap()); \
if (object.raw() == expr) { \
return Thread::member_name##offset(); \
}