[vm/compiler] Move reorder_blocks onto the graph.
Make FlowGraph constructor compute whether we are planning to reorder
blocks before code generation or not. This makes the state of the
graph at the end of the compilation more clear.
This is just a refactoring without any functional changes.
TEST=ci
Change-Id: Ieefb02237cc1ebd69d5d2b217bdc8ebbfdbf15c7
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/358446
Commit-Queue: Slava Egorov <vegorov@google.com>
Reviewed-by: Alexander Markov <alexmarkov@google.com>
diff --git a/runtime/vm/compiler/aot/precompiler.cc b/runtime/vm/compiler/aot/precompiler.cc
index 6bface4..5b124fe 100644
--- a/runtime/vm/compiler/aot/precompiler.cc
+++ b/runtime/vm/compiler/aot/precompiler.cc
@@ -3516,8 +3516,6 @@
CompilerPassState pass_state(thread(), flow_graph, &speculative_policy,
precompiler_);
- pass_state.reorder_blocks =
- FlowGraph::ShouldReorderBlocks(function, optimized());
if (function.ForceOptimize()) {
ASSERT(optimized());
diff --git a/runtime/vm/compiler/backend/block_scheduler.cc b/runtime/vm/compiler/backend/block_scheduler.cc
index 3ecb67a..2065f58 100644
--- a/runtime/vm/compiler/backend/block_scheduler.cc
+++ b/runtime/vm/compiler/backend/block_scheduler.cc
@@ -156,6 +156,10 @@
}
void BlockScheduler::ReorderBlocks(FlowGraph* flow_graph) {
+ if (!flow_graph->should_reorder_blocks()) {
+ return;
+ }
+
if (CompilerState::Current().is_aot()) {
ReorderBlocksAOT(flow_graph);
} else {
@@ -164,10 +168,6 @@
}
void BlockScheduler::ReorderBlocksJIT(FlowGraph* flow_graph) {
- if (!FLAG_reorder_basic_blocks) {
- return;
- }
-
// Add every block to a chain of length 1 and compute a list of edges
// sorted by weight.
intptr_t block_count = flow_graph->preorder().length();
@@ -219,10 +219,10 @@
// Ensure the checked entry remains first to avoid needing another offset on
// Instructions, compare Code::EntryPointOf.
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
- flow_graph->CodegenBlockOrder(true)->Add(graph_entry);
+ flow_graph->CodegenBlockOrder()->Add(graph_entry);
FunctionEntryInstr* checked_entry = graph_entry->normal_entry();
if (checked_entry != nullptr) {
- flow_graph->CodegenBlockOrder(true)->Add(checked_entry);
+ flow_graph->CodegenBlockOrder()->Add(checked_entry);
}
// Build a new block order. Emit each chain when its first block occurs
// in the original reverse postorder ordering (which gives a topological
@@ -231,7 +231,7 @@
if (chains[i]->first->block == flow_graph->postorder()[i]) {
for (Link* link = chains[i]->first; link != nullptr; link = link->next) {
if ((link->block != checked_entry) && (link->block != graph_entry)) {
- flow_graph->CodegenBlockOrder(true)->Add(link->block);
+ flow_graph->CodegenBlockOrder()->Add(link->block);
}
}
}
@@ -241,10 +241,6 @@
// Moves blocks ending in a throw/rethrow, as well as any block post-dominated
// by such a throwing block, to the end.
void BlockScheduler::ReorderBlocksAOT(FlowGraph* flow_graph) {
- if (!FLAG_reorder_basic_blocks) {
- return;
- }
-
auto& reverse_postorder = flow_graph->reverse_postorder();
const intptr_t block_count = reverse_postorder.length();
GrowableArray<bool> is_terminating(block_count);
@@ -283,7 +279,7 @@
// Emit code in reverse postorder but move any throwing blocks (except the
// function entry, which needs to come first) to the very end.
- auto codegen_order = flow_graph->CodegenBlockOrder(true);
+ auto codegen_order = flow_graph->CodegenBlockOrder();
for (intptr_t i = 0; i < block_count; ++i) {
auto block = reverse_postorder[i];
const intptr_t preorder_nr = block->preorder_number();
diff --git a/runtime/vm/compiler/backend/flow_graph.cc b/runtime/vm/compiler/backend/flow_graph.cc
index 7303b02..47effb2 100644
--- a/runtime/vm/compiler/backend/flow_graph.cc
+++ b/runtime/vm/compiler/backend/flow_graph.cc
@@ -34,10 +34,17 @@
// Quick access to the current zone.
#define Z (zone())
+static bool ShouldReorderBlocks(const Function& function,
+ FlowGraph::CompilationMode mode) {
+ return (mode == FlowGraph::CompilationMode::kOptimized) &&
+ FLAG_reorder_basic_blocks && !function.IsFfiCallbackTrampoline();
+}
+
FlowGraph::FlowGraph(const ParsedFunction& parsed_function,
GraphEntryInstr* graph_entry,
intptr_t max_block_id,
- PrologueInfo prologue_info)
+ PrologueInfo prologue_info,
+ CompilationMode compilation_mode)
: thread_(Thread::Current()),
parent_(),
current_ssa_temp_index_(0),
@@ -56,6 +63,8 @@
constant_null_(nullptr),
constant_dead_(nullptr),
licm_allowed_(true),
+ should_reorder_blocks_(
+ ShouldReorderBlocks(parsed_function.function(), compilation_mode)),
prologue_info_(prologue_info),
loop_hierarchy_(nullptr),
loop_invariant_loads_(nullptr),
@@ -152,16 +161,14 @@
iterator->RemoveCurrentFromGraph();
}
-bool FlowGraph::ShouldReorderBlocks(const Function& function,
- bool is_optimized) {
- return is_optimized && FLAG_reorder_basic_blocks &&
- !function.is_intrinsic() && !function.IsFfiCallbackTrampoline();
+GrowableArray<BlockEntryInstr*>* FlowGraph::CodegenBlockOrder() {
+ return should_reorder_blocks() ? &optimized_block_order_
+ : &reverse_postorder_;
}
-GrowableArray<BlockEntryInstr*>* FlowGraph::CodegenBlockOrder(
- bool is_optimized) {
- return ShouldReorderBlocks(function(), is_optimized) ? &optimized_block_order_
- : &reverse_postorder_;
+const GrowableArray<BlockEntryInstr*>* FlowGraph::CodegenBlockOrder() const {
+ return should_reorder_blocks() ? &optimized_block_order_
+ : &reverse_postorder_;
}
ConstantInstr* FlowGraph::GetExistingConstant(
diff --git a/runtime/vm/compiler/backend/flow_graph.h b/runtime/vm/compiler/backend/flow_graph.h
index 9d32fde..50fade6 100644
--- a/runtime/vm/compiler/backend/flow_graph.h
+++ b/runtime/vm/compiler/backend/flow_graph.h
@@ -113,10 +113,17 @@
// Class to encapsulate the construction and manipulation of the flow graph.
class FlowGraph : public ZoneAllocated {
public:
+ enum class CompilationMode {
+ kUnoptimized,
+ kOptimized,
+ kIntrinsic,
+ };
+
FlowGraph(const ParsedFunction& parsed_function,
GraphEntryInstr* graph_entry,
intptr_t max_block_id,
- PrologueInfo prologue_info);
+ PrologueInfo prologue_info,
+ CompilationMode compilation_mode);
// Function properties.
const ParsedFunction& parsed_function() const { return parsed_function_; }
@@ -203,8 +210,8 @@
const GrowableArray<BlockEntryInstr*>& optimized_block_order() const {
return optimized_block_order_;
}
- static bool ShouldReorderBlocks(const Function& function, bool is_optimized);
- GrowableArray<BlockEntryInstr*>* CodegenBlockOrder(bool is_optimized);
+ GrowableArray<BlockEntryInstr*>* CodegenBlockOrder();
+ const GrowableArray<BlockEntryInstr*>* CodegenBlockOrder() const;
// Iterators.
BlockIterator reverse_postorder_iterator() const {
@@ -491,6 +498,8 @@
return compiler_pass_filters_;
}
+ bool should_reorder_blocks() const { return should_reorder_blocks_; }
+
//
// High-level utilities.
//
@@ -562,6 +571,11 @@
static intptr_t ComputeArgumentsSizeInWords(const Function& function,
intptr_t arguments_count);
+ static constexpr CompilationMode CompilationModeFrom(bool is_optimizing) {
+ return is_optimizing ? CompilationMode::kOptimized
+ : CompilationMode::kUnoptimized;
+ }
+
private:
friend class FlowGraphCompiler; // TODO(ajcbik): restructure
friend class FlowGraphChecker;
@@ -678,6 +692,7 @@
bool licm_allowed_;
bool unmatched_representations_allowed_ = true;
bool huge_method_ = false;
+ const bool should_reorder_blocks_;
const PrologueInfo prologue_info_;
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc
index b06a133..ec74508 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc
@@ -142,7 +142,7 @@
assembler_(assembler),
parsed_function_(parsed_function),
flow_graph_(*flow_graph),
- block_order_(*flow_graph->CodegenBlockOrder(is_optimizing)),
+ block_order_(*flow_graph->CodegenBlockOrder()),
current_block_(nullptr),
exception_handlers_list_(nullptr),
pc_descriptors_list_(nullptr),
diff --git a/runtime/vm/compiler/backend/il_serializer.cc b/runtime/vm/compiler/backend/il_serializer.cc
index a34c42a..d3ea751 100644
--- a/runtime/vm/compiler/backend/il_serializer.cc
+++ b/runtime/vm/compiler/backend/il_serializer.cc
@@ -743,8 +743,9 @@
instr->ReadExtra(this);
}
- FlowGraph* flow_graph = new (Z)
- FlowGraph(parsed_function(), graph_entry_, max_block_id, prologue_info);
+ FlowGraph* flow_graph =
+ new (Z) FlowGraph(parsed_function(), graph_entry_, max_block_id,
+ prologue_info, FlowGraph::CompilationMode::kOptimized);
flow_graph->set_current_ssa_temp_index(current_ssa_temp_index);
flow_graph->CreateCommonConstants();
flow_graph->disallow_licm();
@@ -754,7 +755,7 @@
{
const intptr_t num_blocks = Read<intptr_t>();
if (num_blocks != 0) {
- auto* codegen_block_order = flow_graph->CodegenBlockOrder(true);
+ auto* codegen_block_order = flow_graph->CodegenBlockOrder();
ASSERT(codegen_block_order == &flow_graph->optimized_block_order());
for (intptr_t i = 0; i < num_blocks; ++i) {
codegen_block_order->Add(ReadRef<BlockEntryInstr*>());
diff --git a/runtime/vm/compiler/backend/il_test_helper.cc b/runtime/vm/compiler/backend/il_test_helper.cc
index c768ed2..4c6ea83 100644
--- a/runtime/vm/compiler/backend/il_test_helper.cc
+++ b/runtime/vm/compiler/backend/il_test_helper.cc
@@ -140,15 +140,12 @@
flow_graph_->PopulateWithICData(function_);
}
- const bool reorder_blocks =
- FlowGraph::ShouldReorderBlocks(function_, optimized);
- if (mode_ == CompilerPass::kJIT && reorder_blocks) {
+ if (mode_ == CompilerPass::kJIT && flow_graph_->should_reorder_blocks()) {
BlockScheduler::AssignEdgeWeights(flow_graph_);
}
pass_state_ =
new CompilerPassState(thread, flow_graph_, speculative_policy_.get());
- pass_state_->reorder_blocks = reorder_blocks;
if (optimized) {
JitCallSpecializer jit_call_specializer(flow_graph_,
diff --git a/runtime/vm/compiler/backend/il_test_helper.h b/runtime/vm/compiler/backend/il_test_helper.h
index 64747ff..b507236 100644
--- a/runtime/vm/compiler/backend/il_test_helper.h
+++ b/runtime/vm/compiler/backend/il_test_helper.h
@@ -272,7 +272,9 @@
public:
explicit FlowGraphBuilderHelper(intptr_t num_parameters = 0)
: state_(CompilerState::Current()),
- flow_graph_(MakeDummyGraph(Thread::Current(), num_parameters)) {
+ flow_graph_(MakeDummyGraph(Thread::Current(),
+ num_parameters,
+ state_.is_optimizing())) {
flow_graph_.CreateCommonConstants();
}
@@ -375,7 +377,9 @@
FlowGraph* flow_graph() { return &flow_graph_; }
private:
- static FlowGraph& MakeDummyGraph(Thread* thread, intptr_t num_parameters) {
+ static FlowGraph& MakeDummyGraph(Thread* thread,
+ intptr_t num_parameters,
+ bool is_optimizing) {
const FunctionType& signature =
FunctionType::ZoneHandle(FunctionType::New());
signature.set_num_fixed_parameters(num_parameters);
@@ -403,7 +407,8 @@
new FunctionEntryInstr(graph_entry, block_id, kInvalidTryIndex,
CompilerState::Current().GetNextDeoptId()));
return *new FlowGraph(*parsed_function, graph_entry, block_id,
- PrologueInfo{-1, -1});
+ PrologueInfo{-1, -1},
+ FlowGraph::CompilationModeFrom(is_optimizing));
}
CompilerState& state_;
diff --git a/runtime/vm/compiler/compiler_pass.cc b/runtime/vm/compiler/compiler_pass.cc
index 73fde3d..90ced05 100644
--- a/runtime/vm/compiler/compiler_pass.cc
+++ b/runtime/vm/compiler/compiler_pass.cc
@@ -60,7 +60,6 @@
sinking(nullptr),
call_specializer(nullptr),
speculative_policy(speculative_policy),
- reorder_blocks(false),
sticky_flags(0),
flow_graph_(flow_graph) {
// Top scope function is at inlining id 0.
@@ -571,9 +570,7 @@
});
COMPILER_PASS(ReorderBlocks, {
- if (state->reorder_blocks) {
- BlockScheduler::ReorderBlocks(flow_graph);
- }
+ BlockScheduler::ReorderBlocks(flow_graph);
// This is the last compiler pass.
// Test that round-trip IL serialization works before generating code.
diff --git a/runtime/vm/compiler/compiler_pass.h b/runtime/vm/compiler/compiler_pass.h
index 5bcf368..c65d802 100644
--- a/runtime/vm/compiler/compiler_pass.h
+++ b/runtime/vm/compiler/compiler_pass.h
@@ -94,8 +94,6 @@
SpeculativeInliningPolicy* speculative_policy;
- bool reorder_blocks;
-
intptr_t sticky_flags;
FlowGraphCompiler* graph_compiler = nullptr;
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index 9e3d0bc..abdf1c0 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -69,8 +69,10 @@
if (B->IsCompiledForOsr()) {
B->graph_entry_->RelinkToOsrEntry(Z, B->last_used_block_id_ + 1);
}
- return new (Z) FlowGraph(*parsed_function(), B->graph_entry_,
- B->last_used_block_id_, prologue_info);
+ return new (Z) FlowGraph(
+ *parsed_function(), B->graph_entry_, B->last_used_block_id_,
+ prologue_info,
+ FlowGraph::CompilationModeFrom(flow_graph_builder_->optimizing_));
}
void StreamingFlowGraphBuilder::SetupDefaultParameterValues() {
@@ -892,9 +894,10 @@
graph_entry->RelinkToOsrEntry(Z,
flow_graph_builder_->last_used_block_id_ + 1);
}
- return new (Z)
- FlowGraph(*parsed_function(), graph_entry,
- flow_graph_builder_->last_used_block_id_, prologue_info);
+ return new (Z) FlowGraph(
+ *parsed_function(), graph_entry, flow_graph_builder_->last_used_block_id_,
+ prologue_info,
+ FlowGraph::CompilationModeFrom(flow_graph_builder_->optimizing_));
}
FlowGraph* StreamingFlowGraphBuilder::BuildGraph() {
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.cc b/runtime/vm/compiler/frontend/kernel_to_il.cc
index 3b4efd5..280bf3c 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.cc
+++ b/runtime/vm/compiler/frontend/kernel_to_il.cc
@@ -1855,8 +1855,9 @@
Return(TokenPosition::kNoSource, /* omit_result_type_check = */ true);
}
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
@@ -2505,8 +2506,9 @@
// There is no prologue code for a method extractor.
PrologueInfo prologue_info(-1, -1);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
@@ -2604,8 +2606,9 @@
/* argument_count = */ 2, ICData::kNSMDispatch);
body += Return(TokenPosition::kNoSource);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
FlowGraph* FlowGraphBuilder::BuildGraphOfRecordFieldGetter(
@@ -2752,8 +2755,9 @@
// There is no prologue code for a record field getter.
PrologueInfo prologue_info(-1, -1);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
// Information used by the various dynamic closure call fragment builders.
@@ -3679,8 +3683,9 @@
body += Return(TokenPosition::kNoSource);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
@@ -3949,8 +3954,9 @@
}
body += Return(TokenPosition::kNoSource);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
Fragment FlowGraphBuilder::BuildDefaultTypeHandling(const Function& function) {
@@ -4202,8 +4208,9 @@
// Return the result.
closure += Return(function.end_token_pos());
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
@@ -4315,8 +4322,9 @@
body += Return(TokenPosition::kNoSource);
PrologueInfo prologue_info(-1, -1);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
FlowGraph* FlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder(
@@ -4412,8 +4420,9 @@
if (IsCompiledForOsr()) {
graph_entry_->RelinkToOsrEntry(Z, last_used_block_id_ + 1);
}
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
void FlowGraphBuilder::SetConstantRangeOfCurrentDefinition(
@@ -5668,8 +5677,9 @@
--catch_depth_;
PrologueInfo prologue_info(-1, -1);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
@@ -5741,8 +5751,9 @@
--catch_depth_;
PrologueInfo prologue_info(-1, -1);
- return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
- prologue_info);
+ return new (Z)
+ FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
+ prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
}
void FlowGraphBuilder::SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block) {
diff --git a/runtime/vm/compiler/graph_intrinsifier.cc b/runtime/vm/compiler/graph_intrinsifier.cc
index f19fe2d..7a5f384 100644
--- a/runtime/vm/compiler/graph_intrinsifier.cc
+++ b/runtime/vm/compiler/graph_intrinsifier.cc
@@ -81,7 +81,8 @@
CompilerState::Current().GetNextDeoptId()));
FlowGraph* graph =
- new FlowGraph(parsed_function, graph_entry, block_id, prologue_info);
+ new FlowGraph(parsed_function, graph_entry, block_id, prologue_info,
+ FlowGraph::CompilationMode::kIntrinsic);
compiler->set_intrinsic_flow_graph(*graph);
const Function& function = parsed_function.function();
diff --git a/runtime/vm/compiler/jit/compiler.cc b/runtime/vm/compiler/jit/compiler.cc
index 46b07a2..5d229c9 100644
--- a/runtime/vm/compiler/jit/compiler.cc
+++ b/runtime/vm/compiler/jit/compiler.cc
@@ -194,8 +194,9 @@
result.graph_entry->RelinkToOsrEntry(zone, result.num_blocks);
}
PrologueInfo prologue_info(-1, -1);
- return new (zone) FlowGraph(*parsed_function, result.graph_entry,
- result.num_blocks, prologue_info);
+ return new (zone)
+ FlowGraph(*parsed_function, result.graph_entry, result.num_blocks,
+ prologue_info, FlowGraph::CompilationModeFrom(optimized));
}
CompilationPipeline* CompilationPipeline::New(Zone* zone,
@@ -557,16 +558,13 @@
FlowGraphPrinter::PrintGraph("Unoptimized Compilation", flow_graph);
}
- const bool reorder_blocks =
- FlowGraph::ShouldReorderBlocks(function, optimized());
- if (reorder_blocks) {
+ if (flow_graph->should_reorder_blocks()) {
TIMELINE_DURATION(thread(), CompilerVerbose,
"BlockScheduler::AssignEdgeWeights");
BlockScheduler::AssignEdgeWeights(flow_graph);
}
CompilerPassState pass_state(thread(), flow_graph, &speculative_policy);
- pass_state.reorder_blocks = reorder_blocks;
if (function.ForceOptimize()) {
ASSERT(optimized());