[vm/concurrency] Move background compiler related state from `Isolate` to `IsolateGroup`
As part of making the compiler independent of `Isolate` we have to move
various state from `Isolate` to `IsolateGroup`. This CL moves background
compiler and related state.
One background compiler will be shared for all isolates within a group.
Issue https://github.com/dart-lang/sdk/issues/36097
Fixes https://github.com/dart-lang/sdk/issues/44938
TEST=Mainly refactoring, stress tests for --enable-isolate-groups with JIT will come in the future.
Change-Id: I66e863651d0b37c431ce1864e505b5413850cbde
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/183401
Reviewed-by: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Alexander Aprelev <aam@google.com>
Commit-Queue: Martin Kustermann <kustermann@google.com>
diff --git a/runtime/observatory/tests/service/get_vm_timeline_rpc_test.dart b/runtime/observatory/tests/service/get_vm_timeline_rpc_test.dart
index 269b771..80f8741 100644
--- a/runtime/observatory/tests/service/get_vm_timeline_rpc_test.dart
+++ b/runtime/observatory/tests/service/get_vm_timeline_rpc_test.dart
@@ -111,7 +111,7 @@
Map arguments = event['args'];
expect(arguments, isA<Map>());
expect(arguments['isolateGroupId'], isA<String>());
- if (event['cat'] != 'GC') {
+ if (!const ['GC', 'Compiler', 'CompilerVerbose'].contains(event['cat'])) {
expect(arguments['isolateId'], isA<String>());
}
}
diff --git a/runtime/observatory_2/tests/service_2/get_vm_timeline_rpc_test.dart b/runtime/observatory_2/tests/service_2/get_vm_timeline_rpc_test.dart
index 8dc5899..1be667e 100644
--- a/runtime/observatory_2/tests/service_2/get_vm_timeline_rpc_test.dart
+++ b/runtime/observatory_2/tests/service_2/get_vm_timeline_rpc_test.dart
@@ -111,7 +111,7 @@
Map arguments = event['args'];
expect(arguments, isA<Map>());
expect(arguments['isolateGroupId'], isA<String>());
- if (event['cat'] != 'GC') {
+ if (!const ['GC', 'Compiler', 'CompilerVerbose'].contains(event['cat'])) {
expect(arguments['isolateId'], isA<String>());
}
}
diff --git a/runtime/vm/class_finalizer.cc b/runtime/vm/class_finalizer.cc
index 0dfec5c..95ae6e3 100644
--- a/runtime/vm/class_finalizer.cc
+++ b/runtime/vm/class_finalizer.cc
@@ -1309,7 +1309,6 @@
void ClassFinalizer::SortClasses() {
auto T = Thread::Current();
auto Z = T->zone();
- auto I = T->isolate();
auto IG = T->isolate_group();
// Prevent background compiler from adding deferred classes or canonicalizing
@@ -1384,11 +1383,11 @@
ASSERT(next_new_cid == num_cids);
RemapClassIds(old_to_new_cid.get());
RehashTypes(); // Types use cid's as part of their hashes.
- I->RehashConstants(); // Const objects use cid's as part of their hashes.
+ IG->RehashConstants(); // Const objects use cid's as part of their hashes.
// Ensure any newly spawned isolate will apply this permutation map right
// after kernel loading.
- I->group()->source()->cid_permutation_map = std::move(old_to_new_cid);
+ IG->source()->cid_permutation_map = std::move(old_to_new_cid);
}
class CidRewriteVisitor : public ObjectVisitor {
@@ -1551,7 +1550,6 @@
void ClassFinalizer::RehashTypes() {
auto T = Thread::Current();
auto Z = T->zone();
- auto I = T->isolate();
auto IG = T->isolate_group();
// Clear all cached hash values.
@@ -1635,7 +1633,7 @@
// The canonical constant tables use canonical hashcodes which can change
// due to cid-renumbering.
- I->RehashConstants();
+ IG->RehashConstants();
dict_size = Utils::RoundUpToPowerOfTwo(typeargs.Length() * 4 / 3);
CanonicalTypeArgumentsSet typeargs_table(
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 45bced2..897e24a 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -6963,7 +6963,7 @@
#if defined(DEBUG)
isolate()->ValidateClassTable();
- isolate()->ValidateConstants();
+ isolate_group()->ValidateConstants();
#endif // DEBUG
#if defined(DART_PRECOMPILER)
diff --git a/runtime/vm/compiler/backend/constant_propagator.h b/runtime/vm/compiler/backend/constant_propagator.h
index 36368d0..f2a6929 100644
--- a/runtime/vm/compiler/backend/constant_propagator.h
+++ b/runtime/vm/compiler/backend/constant_propagator.h
@@ -88,8 +88,6 @@
// thus should not be stored anywhere.
PhiInfo* GetPhiInfo(PhiInstr* phi);
- Isolate* isolate() const { return graph_->isolate(); }
-
FlowGraph* graph_;
// Sentinels for unknown constant and non-constant values.
diff --git a/runtime/vm/compiler/backend/flow_graph.h b/runtime/vm/compiler/backend/flow_graph.h
index 8a8bb32..09e3caf 100644
--- a/runtime/vm/compiler/backend/flow_graph.h
+++ b/runtime/vm/compiler/backend/flow_graph.h
@@ -238,7 +238,6 @@
Thread* thread() const { return thread_; }
Zone* zone() const { return thread()->zone(); }
- Isolate* isolate() const { return thread()->isolate(); }
IsolateGroup* isolate_group() const { return thread()->isolate_group(); }
intptr_t max_block_id() const { return max_block_id_; }
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc
index 7005c7b..6fac0ad 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc
@@ -300,9 +300,9 @@
bool FlowGraphCompiler::ForceSlowPathForStackOverflow() const {
#if !defined(PRODUCT)
- if ((FLAG_stacktrace_every > 0) || (FLAG_deoptimize_every > 0) ||
- (FLAG_gc_every > 0) ||
- (isolate()->reload_every_n_stack_overflow_checks() > 0)) {
+ if (FLAG_stacktrace_every > 0 || FLAG_deoptimize_every > 0 ||
+ FLAG_gc_every > 0 ||
+ (isolate_group()->reload_every_n_stack_overflow_checks() > 0)) {
if (!IsolateGroup::IsSystemIsolateGroup(isolate_group())) {
return true;
}
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.h b/runtime/vm/compiler/backend/flow_graph_compiler.h
index 7a145d1..f3f422e 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.h
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.h
@@ -945,7 +945,6 @@
}
Thread* thread() const { return thread_; }
- Isolate* isolate() const { return thread_->isolate(); }
IsolateGroup* isolate_group() const { return thread_->isolate_group(); }
Zone* zone() const { return zone_; }
diff --git a/runtime/vm/compiler/backend/inliner.cc b/runtime/vm/compiler/backend/inliner.cc
index acb987e..459aec0 100644
--- a/runtime/vm/compiler/backend/inliner.cc
+++ b/runtime/vm/compiler/backend/inliner.cc
@@ -83,7 +83,6 @@
// Quick access to the current zone.
#define Z (zone())
-#define I (isolate())
#define TRACE_INLINING(statement) \
do { \
@@ -608,7 +607,7 @@
TargetEntryInstr* BuildDecisionGraph();
- Isolate* isolate() const;
+ IsolateGroup* isolate_group() const;
Zone* zone() const;
intptr_t AllocateBlockId() const;
inline bool trace_inlining() const;
@@ -762,7 +761,6 @@
FlowGraph* caller_graph() const { return caller_graph_; }
Thread* thread() const { return caller_graph_->thread(); }
- Isolate* isolate() const { return caller_graph_->isolate(); }
Zone* zone() const { return caller_graph_->zone(); }
bool trace_inlining() const { return inliner_->trace_inlining(); }
@@ -1663,8 +1661,8 @@
exit_collector_(new (Z) InlineExitCollector(owner->caller_graph(), call)),
caller_function_(caller_function) {}
-Isolate* PolymorphicInliner::isolate() const {
- return owner_->caller_graph()->isolate();
+IsolateGroup* PolymorphicInliner::isolate_group() const {
+ return owner_->caller_graph()->isolate_group();
}
Zone* PolymorphicInliner::zone() const {
diff --git a/runtime/vm/compiler/backend/redundancy_elimination.h b/runtime/vm/compiler/backend/redundancy_elimination.h
index ab09270..877223b 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination.h
+++ b/runtime/vm/compiler/backend/redundancy_elimination.h
@@ -72,7 +72,6 @@
void EliminateAllocation(Definition* alloc);
- Isolate* isolate() const { return flow_graph_->isolate(); }
Zone* zone() const { return flow_graph_->zone(); }
FlowGraph* flow_graph_;
diff --git a/runtime/vm/compiler/call_specializer.h b/runtime/vm/compiler/call_specializer.h
index ccad4cb..c0ad62d 100644
--- a/runtime/vm/compiler/call_specializer.h
+++ b/runtime/vm/compiler/call_specializer.h
@@ -74,7 +74,6 @@
protected:
Thread* thread() const { return flow_graph_->thread(); }
- Isolate* isolate() const { return flow_graph_->isolate(); }
IsolateGroup* isolate_group() const { return flow_graph_->isolate_group(); }
Zone* zone() const { return flow_graph_->zone(); }
const Function& function() const { return flow_graph_->function(); }
diff --git a/runtime/vm/compiler/frontend/flow_graph_builder.h b/runtime/vm/compiler/frontend/flow_graph_builder.h
index f2d5364..25d74d6 100644
--- a/runtime/vm/compiler/frontend/flow_graph_builder.h
+++ b/runtime/vm/compiler/frontend/flow_graph_builder.h
@@ -73,7 +73,6 @@
Instruction** last_instruction,
intptr_t try_index);
- Isolate* isolate() const { return caller_graph_->isolate(); }
Zone* zone() const { return caller_graph_->zone(); }
FlowGraph* caller_graph_;
diff --git a/runtime/vm/compiler/jit/compiler.cc b/runtime/vm/compiler/jit/compiler.cc
index eeb7cd7..92c90ce 100644
--- a/runtime/vm/compiler/jit/compiler.cc
+++ b/runtime/vm/compiler/jit/compiler.cc
@@ -323,6 +323,7 @@
intptr_t osr_id() const { return osr_id_; }
Thread* thread() const { return thread_; }
Isolate* isolate() const { return thread_->isolate(); }
+ IsolateGroup* isolate_group() const { return thread_->isolate_group(); }
CodePtr FinalizeCompilation(compiler::Assembler* assembler,
FlowGraphCompiler* graph_compiler,
FlowGraph* flow_graph);
@@ -1110,8 +1111,8 @@
DISALLOW_COPY_AND_ASSIGN(BackgroundCompilerTask);
};
-BackgroundCompiler::BackgroundCompiler(Isolate* isolate)
- : isolate_(isolate),
+BackgroundCompiler::BackgroundCompiler(IsolateGroup* isolate_group)
+ : isolate_group_(isolate_group),
queue_monitor_(),
function_queue_(new BackgroundCompilationQueue()),
done_monitor_(),
@@ -1125,10 +1126,11 @@
}
void BackgroundCompiler::Run() {
- while (running_) {
+ while (true) {
// Maybe something is already in the queue, check first before waiting
// to be notified.
- bool result = Thread::EnterIsolateAsHelper(isolate_, Thread::kCompilerTask);
+ bool result = Thread::EnterIsolateGroupAsHelper(
+ isolate_group_, Thread::kCompilerTask, /*bypass_safepoint=*/false);
ASSERT(result);
{
Thread* thread = Thread::Current();
@@ -1173,13 +1175,16 @@
}
}
}
- Thread::ExitIsolateAsHelper();
+ Thread::ExitIsolateGroupAsHelper(/*bypass_safepoint=*/false);
{
// Wait to be notified when the work queue is not empty.
MonitorLocker ml(&queue_monitor_);
while (function_queue()->IsEmpty() && running_) {
ml.Wait();
}
+ if (!running_) {
+ break;
+ }
}
} // while running
@@ -1187,7 +1192,7 @@
// Notify that the thread is done.
MonitorLocker ml_done(&done_monitor_);
done_ = true;
- ml_done.Notify();
+ ml_done.NotifyAll();
}
}
@@ -1229,7 +1234,7 @@
void BackgroundCompiler::Stop() {
Thread* thread = Thread::Current();
- ASSERT(thread->IsMutatorThread());
+ ASSERT(thread->isolate() == nullptr || thread->IsMutatorThread());
ASSERT(!thread->IsAtSafepoint());
SafepointMonitorLocker ml_done(&done_monitor_);
diff --git a/runtime/vm/compiler/jit/compiler.h b/runtime/vm/compiler/jit/compiler.h
index 5ed55be..456c70b 100644
--- a/runtime/vm/compiler/jit/compiler.h
+++ b/runtime/vm/compiler/jit/compiler.h
@@ -119,12 +119,11 @@
// No OSR compilation in the background compiler.
class BackgroundCompiler {
public:
- explicit BackgroundCompiler(Isolate* isolate);
+ explicit BackgroundCompiler(IsolateGroup* isolate_group);
virtual ~BackgroundCompiler();
- static void Stop(Isolate* isolate) {
- ASSERT(Thread::Current()->IsMutatorThread());
- isolate->background_compiler()->Stop();
+ static void Stop(IsolateGroup* isolate_group) {
+ isolate_group->background_compiler()->Stop();
}
// Enqueues a function to be compiled in the background.
@@ -148,7 +147,7 @@
void Disable();
bool IsRunning() { return !done_; }
- Isolate* isolate_;
+ IsolateGroup* isolate_group_;
Monitor queue_monitor_; // Controls access to the queue.
BackgroundCompilationQueue* function_queue_;
@@ -165,13 +164,15 @@
class NoBackgroundCompilerScope : public StackResource {
public:
explicit NoBackgroundCompilerScope(Thread* thread)
- : StackResource(thread), isolate_(thread->isolate()) {
- isolate_->background_compiler()->Disable();
+ : StackResource(thread), isolate_group_(thread->isolate_group()) {
+ isolate_group_->background_compiler()->Disable();
}
- ~NoBackgroundCompilerScope() { isolate_->background_compiler()->Enable(); }
+ ~NoBackgroundCompilerScope() {
+ isolate_group_->background_compiler()->Enable();
+ }
private:
- Isolate* isolate_;
+ IsolateGroup* isolate_group_;
};
} // namespace dart
diff --git a/runtime/vm/compiler_test.cc b/runtime/vm/compiler_test.cc
index d5d9a5e..49df77f 100644
--- a/runtime/vm/compiler_test.cc
+++ b/runtime/vm/compiler_test.cc
@@ -88,8 +88,8 @@
// Constant in product mode.
FLAG_background_compilation = true;
#endif
- Isolate* isolate = thread->isolate();
- isolate->background_compiler()->EnqueueCompilation(func);
+ auto isolate_group = thread->isolate_group();
+ isolate_group->background_compiler()->EnqueueCompilation(func);
Monitor* m = new Monitor();
{
SafepointMonitorLocker ml(m);
diff --git a/runtime/vm/dart_api_impl.cc b/runtime/vm/dart_api_impl.cc
index 96d9b1b..21d315c 100644
--- a/runtime/vm/dart_api_impl.cc
+++ b/runtime/vm/dart_api_impl.cc
@@ -1435,7 +1435,7 @@
DART_EXPORT void Dart_ShutdownIsolate() {
Thread* T = Thread::Current();
- Isolate* I = T->isolate();
+ auto I = T->isolate();
CHECK_ISOLATE(I);
// The Thread structure is disassociated from the isolate, we do the
@@ -1461,7 +1461,7 @@
StackZone zone(T);
HandleScope handle_scope(T);
#if defined(DEBUG)
- I->ValidateConstants();
+ T->isolate_group()->ValidateConstants();
#endif
Dart::RunShutdownCallback();
}
diff --git a/runtime/vm/debugger.cc b/runtime/vm/debugger.cc
index 61be4a2..acf1abd 100644
--- a/runtime/vm/debugger.cc
+++ b/runtime/vm/debugger.cc
@@ -3506,9 +3506,23 @@
}
bool Debugger::IsDebugging(Thread* thread, const Function& func) {
- Debugger* debugger = thread->isolate()->debugger();
- return debugger->IsStepping() ||
- debugger->HasBreakpoint(func, thread->zone());
+ // TODO(dartbug.com/36097): We might need to adjust this once we start adding
+ // debugging support to --enable-isolate-groups.
+ auto isolate_group = thread->isolate_group();
+
+ bool has_breakpoint = false;
+ bool is_single_stepping = false;
+ isolate_group->ForEachIsolate(
+ [&](Isolate* isolate) {
+ if (isolate->debugger()->IsStepping()) {
+ is_single_stepping = true;
+ }
+ if (isolate->debugger()->HasBreakpoint(func, thread->zone())) {
+ has_breakpoint = true;
+ }
+ },
+ thread->IsAtSafepoint());
+ return has_breakpoint || is_single_stepping;
}
void Debugger::SignalPausedEvent(ActivationFrame* top_frame, Breakpoint* bpt) {
diff --git a/runtime/vm/isolate.cc b/runtime/vm/isolate.cc
index 5fff56a..3799f2c 100644
--- a/runtime/vm/isolate.cc
+++ b/runtime/vm/isolate.cc
@@ -342,6 +342,7 @@
random_(),
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
last_reload_timestamp_(OS::GetCurrentTimeMillis()),
+ reload_every_n_stack_overflow_checks_(FLAG_reload_every),
#endif
source_(std::move(source)),
api_state_(new ApiState()),
@@ -353,6 +354,9 @@
heap_(nullptr),
saved_unlinked_calls_(Array::null()),
initial_field_table_(new FieldTable(/*isolate=*/nullptr)),
+#if !defined(DART_PRECOMPILED_RUNTIME)
+ background_compiler_(new BackgroundCompiler(this)),
+#endif
symbols_lock_(new SafepointRwLock()),
type_canonicalization_mutex_(
NOT_IN_PRODUCT("IsolateGroup::type_canonicalization_mutex_")),
@@ -850,11 +854,15 @@
NoOOBMessageScope::NoOOBMessageScope(Thread* thread)
: ThreadStackResource(thread) {
- thread->DeferOOBMessageInterrupts();
+ if (thread->isolate() != nullptr) {
+ thread->DeferOOBMessageInterrupts();
+ }
}
NoOOBMessageScope::~NoOOBMessageScope() {
- thread()->RestoreOOBMessageInterrupts();
+ if (thread()->isolate() != nullptr) {
+ thread()->RestoreOOBMessageInterrupts();
+ }
}
NoReloadScope::NoReloadScope(IsolateGroup* isolate_group, Thread* thread)
@@ -934,7 +942,7 @@
}
}
-void Isolate::RehashConstants() {
+void IsolateGroup::RehashConstants() {
Thread* thread = Thread::Current();
StackZone stack_zone(thread);
Zone* zone = stack_zone.GetZone();
@@ -942,10 +950,10 @@
thread->heap()->ResetCanonicalHashTable();
Class& cls = Class::Handle(zone);
- intptr_t top = group()->class_table()->NumCids();
+ intptr_t top = class_table()->NumCids();
for (intptr_t cid = kInstanceCid; cid < top; cid++) {
- if (!group()->class_table()->IsValidIndex(cid) ||
- !group()->class_table()->HasValidClassAt(cid)) {
+ if (!class_table()->IsValidIndex(cid) ||
+ !class_table()->HasValidClassAt(cid)) {
continue;
}
if ((cid == kTypeArgumentsCid) || IsStringClassId(cid)) {
@@ -953,27 +961,27 @@
// that aren't based on address.
continue;
}
- cls = group()->class_table()->At(cid);
+ cls = class_table()->At(cid);
cls.RehashConstants(zone);
}
}
#if defined(DEBUG)
-void Isolate::ValidateConstants() {
+void IsolateGroup::ValidateConstants() {
if (FLAG_precompiled_mode) {
// TODO(27003)
return;
}
// Issue(https://dartbug.com/44862): Figure out why hot-reload causes
// existence of non-canonical constants.
- if (group()->HasAttemptedReload()) {
+ if (HasAttemptedReload()) {
return;
}
// Verify that all canonical instances are correctly setup in the
// corresponding canonical tables.
NoBackgroundCompilerScope no_bg_compiler(Thread::Current());
- group()->heap()->CollectAllGarbage();
+ heap()->CollectAllGarbage();
Thread* thread = Thread::Current();
HeapIterationScope iteration(thread);
VerifyCanonicalVisitor check_canonical(thread);
@@ -1705,7 +1713,6 @@
metric_##variable##_(),
ISOLATE_METRIC_LIST(ISOLATE_METRIC_CONSTRUCTORS)
#undef ISOLATE_METRIC_CONSTRUCTORS
- reload_every_n_stack_overflow_checks_(FLAG_reload_every),
#endif // !defined(PRODUCT)
start_time_micros_(OS::GetCurrentMonotonicMicros()),
on_shutdown_callback_(Isolate::ShutdownCallback()),
@@ -1734,8 +1741,6 @@
"which violates the Dart standard.\n"
" See dartbug.com/30524 for more information.\n");
}
-
- NOT_IN_PRECOMPILED(background_compiler_ = new BackgroundCompiler(this));
}
#undef REUSABLE_HANDLE_SCOPE_INIT
@@ -1747,9 +1752,6 @@
// RELEASE_ASSERT(program_reload_context_ == NULL);
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
- delete background_compiler_;
- background_compiler_ = nullptr;
-
#if !defined(PRODUCT)
delete debugger_;
debugger_ = nullptr;
@@ -1845,6 +1847,10 @@
result->set_pause_capability(result->random()->NextUInt64());
result->set_terminate_capability(result->random()->NextUInt64());
+#if !defined(PRODUCT)
+ result->debugger_ = new Debugger(result);
+#endif
+
// Now we register the isolate in the group. From this point on any GC would
// traverse the isolate roots (before this point, the roots are only pointing
// to vm-isolate objects, e.g. null)
@@ -1860,9 +1866,6 @@
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
-#if !defined(PRODUCT)
- result->debugger_ = new Debugger(result);
-#endif
if (FLAG_trace_isolates) {
if (name_prefix == nullptr || strcmp(name_prefix, "vm-isolate") != 0) {
OS::PrintErr(
@@ -2424,12 +2427,13 @@
}
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
-void Isolate::MaybeIncreaseReloadEveryNStackOverflowChecks() {
+void IsolateGroup::MaybeIncreaseReloadEveryNStackOverflowChecks() {
if (FLAG_reload_every_back_off) {
if (reload_every_n_stack_overflow_checks_ < 5000) {
reload_every_n_stack_overflow_checks_ += 99;
} else {
- reload_every_n_stack_overflow_checks_ *= 2;
+ const auto old_value = reload_every_n_stack_overflow_checks_;
+ reload_every_n_stack_overflow_checks_ = old_value * old_value;
}
// Cap the value.
if (reload_every_n_stack_overflow_checks_ > 1000000) {
@@ -2449,12 +2453,8 @@
}
void Isolate::Shutdown() {
- ASSERT(this == Isolate::Current());
- NOT_IN_PRECOMPILED(BackgroundCompiler::Stop(this));
- NOT_IN_PRECOMPILED(delete background_compiler_);
- background_compiler_ = nullptr;
-
Thread* thread = Thread::Current();
+ ASSERT(this == thread->isolate());
// Don't allow anymore dart code to execution on this isolate.
thread->ClearStackLimit();
@@ -2535,6 +2535,15 @@
const bool shutdown_group =
isolate_group->UnregisterIsolateDecrementCount(isolate);
if (shutdown_group) {
+#if !defined(DART_PRECOMPILED_RUNTIME)
+ if (!is_vm_isolate) {
+ Thread::EnterIsolateGroupAsHelper(isolate_group, Thread::kUnknownTask,
+ /*bypass_safepoint=*/false);
+ BackgroundCompiler::Stop(isolate_group);
+ Thread::ExitIsolateGroupAsHelper(/*bypass_safepoint=*/false);
+ }
+#endif // !defined(DART_PRECOMPILED_RUNTIME)
+
// The "vm-isolate" does not have a thread pool.
ASSERT(is_vm_isolate == (isolate_group->thread_pool() == nullptr));
if (is_vm_isolate ||
@@ -2612,10 +2621,6 @@
reinterpret_cast<ObjectPtr*>(®istered_service_extension_handlers_));
#endif // !defined(PRODUCT)
- if (background_compiler() != nullptr) {
- background_compiler()->VisitPointers(visitor);
- }
-
#if !defined(PRODUCT)
// Visit objects in the debugger.
if (debugger() != nullptr) {
@@ -2789,6 +2794,8 @@
// is guarded with a monitor. This means that we can visit it only
// when at safepoint or the field_list_mutex_ lock has been taken.
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&boxed_field_list_));
+
+ NOT_IN_PRECOMPILED(background_compiler()->VisitPointers(visitor));
}
void IsolateGroup::VisitStackPointers(ObjectPointerVisitor* visitor,
diff --git a/runtime/vm/isolate.h b/runtime/vm/isolate.h
index fefa565..1ec4245 100644
--- a/runtime/vm/isolate.h
+++ b/runtime/vm/isolate.h
@@ -341,6 +341,11 @@
Dart_IsolateFlags api_flags);
~IsolateGroup();
+ void RehashConstants();
+#if defined(DEBUG)
+ void ValidateConstants();
+#endif
+
IsolateGroupSource* source() const { return source_.get(); }
std::shared_ptr<IsolateGroupSource> shareable_source() const {
return source_;
@@ -352,6 +357,14 @@
Heap* heap() const { return heap_.get(); }
+ BackgroundCompiler* background_compiler() const {
+#if defined(DART_PRECOMPILED_RUNTIME)
+ return nullptr;
+#else
+ return background_compiler_.get();
+#endif
+ }
+
IdleTimeHandler* idle_time_handler() { return &idle_time_handler_; }
// Returns true if this is the first isolate registered.
@@ -488,6 +501,10 @@
isolate_group_flags_ =
HasAttemptedReloadBit::update(value, isolate_group_flags_);
}
+ void MaybeIncreaseReloadEveryNStackOverflowChecks();
+ intptr_t reload_every_n_stack_overflow_checks() const {
+ return reload_every_n_stack_overflow_checks_;
+ }
#else
bool HasAttemptedReload() const { return false; }
#endif // !defined(DART_PRECOMPILED_RUNTIME)
@@ -827,6 +844,8 @@
std::shared_ptr<IsolateGroupReloadContext> group_reload_context_;
RelaxedAtomic<intptr_t> no_reload_scope_depth_ =
0; // we can only reload when this is 0.
+ // Per-isolate-group copy of FLAG_reload_every.
+ RelaxedAtomic<intptr_t> reload_every_n_stack_overflow_checks_;
#endif
#define ISOLATE_METRIC_VARIABLE(type, variable, name, unit) \
@@ -865,6 +884,8 @@
std::shared_ptr<FieldTable> initial_field_table_;
uint32_t isolate_group_flags_ = 0;
+ NOT_IN_PRECOMPILED(std::unique_ptr<BackgroundCompiler> background_compiler_);
+
std::unique_ptr<SafepointRwLock> symbols_lock_;
Mutex type_canonicalization_mutex_;
Mutex type_arguments_canonicalization_mutex_;
@@ -962,11 +983,6 @@
void ValidateClassTable();
#endif
- void RehashConstants();
-#if defined(DEBUG)
- void ValidateConstants();
-#endif
-
ThreadRegistry* thread_registry() const { return group()->thread_registry(); }
SafepointHandler* safepoint_handler() const {
@@ -1225,10 +1241,6 @@
deopt_context_ = value;
}
- BackgroundCompiler* background_compiler() const {
- return background_compiler_;
- }
-
intptr_t BlockClassFinalization() {
ASSERT(defer_finalization_count_ >= 0);
return defer_finalization_count_++;
@@ -1425,20 +1437,12 @@
return IsolateGroup::IsSystemIsolateGroup(isolate->group());
}
-#if !defined(PRODUCT)
- intptr_t reload_every_n_stack_overflow_checks() const {
- return reload_every_n_stack_overflow_checks_;
- }
-#endif // !defined(PRODUCT)
-
HandlerInfoCache* handler_info_cache() { return &handler_info_cache_; }
CatchEntryMovesCache* catch_entry_moves_cache() {
return &catch_entry_moves_cache_;
}
- void MaybeIncreaseReloadEveryNStackOverflowChecks();
-
// The weak table used in the snapshot writer for the purpose of fast message
// sending.
WeakTable* forward_table_new() { return forward_table_new_.get(); }
@@ -1589,8 +1593,6 @@
uint32_t isolate_flags_ = 0;
- BackgroundCompiler* background_compiler_ = nullptr;
-
// Fields that aren't needed in a product build go here with boolean flags at
// the top.
#if !defined(PRODUCT)
@@ -1618,8 +1620,6 @@
ISOLATE_METRIC_LIST(ISOLATE_METRIC_VARIABLE);
#undef ISOLATE_METRIC_VARIABLE
- // Per-isolate copy of FLAG_reload_every.
- intptr_t reload_every_n_stack_overflow_checks_;
ProgramReloadContext* program_reload_context_ = nullptr;
// Ring buffer of objects assigned an id.
ObjectIdRing* object_id_ring_ = nullptr;
diff --git a/runtime/vm/isolate_reload.cc b/runtime/vm/isolate_reload.cc
index f5b4498..2a13446 100644
--- a/runtime/vm/isolate_reload.cc
+++ b/runtime/vm/isolate_reload.cc
@@ -664,11 +664,7 @@
[&](Isolate* isolate) { number_of_isolates++; });
// Disable the background compiler while we are performing the reload.
- ForEachIsolate([&](Isolate* isolate) {
- // TODO(dartbug.com/36097): Once the BG compiler moves from Isolate to
- // IsolateGroup this scope should cover most of this function.
- NoBackgroundCompilerScope stop_bg_compiler(isolate->mutator_thread());
- });
+ NoBackgroundCompilerScope stop_bg_compiler(thread);
// Wait for any concurrent marking tasks to finish and turn off the
// concurrent marker during reload as we might be allocating new instances
@@ -1705,11 +1701,11 @@
// content may have changed from fields being added or removed.
{
TIMELINE_SCOPE(RehashConstants);
- I->RehashConstants();
+ IG->RehashConstants();
}
#ifdef DEBUG
- I->ValidateConstants();
+ IG->ValidateConstants();
#endif
if (FLAG_identity_reload) {
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 4b38596..740793b 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -2614,7 +2614,7 @@
class WriteBarrierUpdateVisitor : public ObjectPointerVisitor {
public:
explicit WriteBarrierUpdateVisitor(Thread* thread, ObjectPtr obj)
- : ObjectPointerVisitor(thread->isolate()->group()),
+ : ObjectPointerVisitor(thread->isolate_group()),
thread_(thread),
old_obj_(obj) {
ASSERT(old_obj_->IsOldObject());
@@ -6695,8 +6695,19 @@
#if defined(PRODUCT)
return false;
#else
- Thread* thread = Thread::Current();
- return thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone());
+ // TODO(dartbug.com/36097): We might need to adjust this once we start adding
+ // debugging support to --enable-isolate-groups.
+ auto thread = Thread::Current();
+ auto zone = thread->zone();
+ auto isolate_group = thread->isolate_group();
+
+ bool has_breakpoint = false;
+ isolate_group->ForEachIsolate([&](Isolate* isolate) {
+ if (isolate->debugger()->HasBreakpoint(*this, zone)) {
+ has_breakpoint = true;
+ }
+ });
+ return has_breakpoint;
#endif
}
@@ -7742,12 +7753,9 @@
return CompilerState::Current().is_aot();
}
-#if !defined(PRODUCT)
- Thread* thread = Thread::Current();
- if (thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone())) {
+ if (HasBreakpoint()) {
return false;
}
-#endif // !defined(PRODUCT)
return is_inlinable() && !is_external() && !is_generated_body();
}
@@ -10312,8 +10320,7 @@
result.set_has_pragma(false);
result.set_static_type_exactness_state(
StaticTypeExactnessState::NotTracking());
- auto isolate = Isolate::Current();
- auto isolate_group = isolate->group();
+ auto isolate_group = IsolateGroup::Current();
// Use field guards if they are enabled and the isolate has never reloaded.
// TODO(johnmccutchan): The reload case assumes the worst case (everything is
@@ -16255,7 +16262,17 @@
#if defined(PRODUCT)
return false;
#else
- return Isolate::Current()->debugger()->HasBreakpoint(*this);
+ // TODO(dartbug.com/36097): We might need to adjust this once we start adding
+ // debugging support to --enable-isolate-groups.
+ auto isolate_group = Thread::Current()->isolate_group();
+
+ bool has_breakpoint = false;
+ isolate_group->ForEachIsolate([&](Isolate* isolate) {
+ if (isolate->debugger()->HasBreakpoint(*this)) {
+ has_breakpoint = true;
+ }
+ });
+ return has_breakpoint;
#endif
}
diff --git a/runtime/vm/runtime_entry.cc b/runtime/vm/runtime_entry.cc
index 2c78a12..e70b4fe 100644
--- a/runtime/vm/runtime_entry.cc
+++ b/runtime/vm/runtime_entry.cc
@@ -2481,7 +2481,7 @@
bool do_reload = false;
bool do_gc = false;
const intptr_t isolate_reload_every =
- isolate->reload_every_n_stack_overflow_checks();
+ isolate->group()->reload_every_n_stack_overflow_checks();
if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) ||
(FLAG_gc_every > 0) || (isolate_reload_every > 0)) {
if (!Isolate::IsSystemIsolate(isolate)) {
@@ -2548,7 +2548,7 @@
if (do_reload) {
JSONStream js;
// Maybe adjust the rate of future reloads.
- isolate->MaybeIncreaseReloadEveryNStackOverflowChecks();
+ isolate->group()->MaybeIncreaseReloadEveryNStackOverflowChecks();
const char* script_uri;
{
@@ -2773,15 +2773,15 @@
ASSERT(function.HasCode());
if (Compiler::CanOptimizeFunction(thread, function)) {
+ auto isolate_group = thread->isolate_group();
if (FLAG_background_compilation) {
if (function.is_background_optimizable() &&
- isolate->background_compiler()->EnqueueCompilation(function)) {
+ isolate_group->background_compiler()->EnqueueCompilation(function)) {
// Reduce the chance of triggering a compilation while the function is
// being compiled in the background. INT32_MIN should ensure that it
// takes long time to trigger a compilation.
// Note that the background compilation queue rejects duplicate entries.
function.SetUsageCounter(INT32_MIN);
-
// Continue in the same code.
arguments.SetReturn(function);
return;