Reland "[VM] Use IR for code in [CatchEntryInstr]s to populate captured exception/stacktrace variables"
This fixes an issue when a program got loaded via dill, a function
with a try-catch got optimized and the exception/stacktrace variables
got captured.
Change-Id: Icb8ea5f3557080f8274f7db2af09e33154820e5b
Reviewed-on: https://dart-review.googlesource.com/55721
Commit-Queue: Martin Kustermann <kustermann@google.com>
Reviewed-by: Vyacheslav Egorov <vegorov@google.com>
diff --git a/runtime/observatory/tests/service/service_kernel.status b/runtime/observatory/tests/service/service_kernel.status
index 4253ba1..8003fb4 100644
--- a/runtime/observatory/tests/service/service_kernel.status
+++ b/runtime/observatory/tests/service/service_kernel.status
@@ -30,7 +30,6 @@
complex_reload_test: RuntimeError
[ $arch == simdbc64 && $compiler == dartk ]
-coverage_optimized_function_test: Crash # Please triage
get_cpu_profile_timeline_rpc_test: Pass, RuntimeError # http://dartbug.com/31794
pause_on_unhandled_async_exceptions_test: RuntimeError, Timeout # Issue 31765
diff --git a/runtime/platform/growable_array.h b/runtime/platform/growable_array.h
index 32ffcc4..f6c0598 100644
--- a/runtime/platform/growable_array.h
+++ b/runtime/platform/growable_array.h
@@ -70,6 +70,16 @@
}
}
+ void EnsureLength(intptr_t new_length, const T& default_value) {
+ const intptr_t old_length = length_;
+ if (old_length < new_length) {
+ Resize(new_length);
+ for (intptr_t i = old_length; i < new_length; ++i) {
+ (*this)[i] = default_value;
+ }
+ }
+ }
+
const T& At(intptr_t index) const { return operator[](index); }
T& Last() const {
diff --git a/runtime/vm/compiler/backend/flow_graph.cc b/runtime/vm/compiler/backend/flow_graph.cc
index 5411f90..4cc0e2e 100644
--- a/runtime/vm/compiler/backend/flow_graph.cc
+++ b/runtime/vm/compiler/backend/flow_graph.cc
@@ -1081,10 +1081,34 @@
}
}
}
- } else if (block_entry->IsCatchBlockEntry()) {
+ } else if (CatchBlockEntryInstr* catch_entry =
+ block_entry->AsCatchBlockEntry()) {
+ const intptr_t raw_exception_var_envindex =
+ catch_entry->raw_exception_var() != nullptr
+ ? catch_entry->raw_exception_var()->BitIndexIn(
+ num_direct_parameters_)
+ : -1;
+ const intptr_t raw_stacktrace_var_envindex =
+ catch_entry->raw_stacktrace_var() != nullptr
+ ? catch_entry->raw_stacktrace_var()->BitIndexIn(
+ num_direct_parameters_)
+ : -1;
+
// Add real definitions for all locals and parameters.
for (intptr_t i = 0; i < env->length(); ++i) {
- ParameterInstr* param = new (zone()) ParameterInstr(i, block_entry);
+ // Replace usages of the raw exception/stacktrace variables with
+ // [SpecialParameterInstr]s.
+ Definition* param = nullptr;
+ if (raw_exception_var_envindex == i) {
+ param = new SpecialParameterInstr(SpecialParameterInstr::kException,
+ Thread::kNoDeoptId);
+ } else if (raw_stacktrace_var_envindex == i) {
+ param = new SpecialParameterInstr(SpecialParameterInstr::kStackTrace,
+ Thread::kNoDeoptId);
+ } else {
+ param = new (zone()) ParameterInstr(i, block_entry);
+ }
+
param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp.
(*env)[i] = param;
block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param);
@@ -1106,6 +1130,30 @@
// Attach environment to the block entry.
AttachEnvironment(block_entry, env);
+#if defined(TARGET_ARCH_DBC)
+ // On DBC the exception/stacktrace variables are in special registers when
+ // entering the catch block. The only usage of those special registers is
+ // within the catch block. A possible lazy-deopt at the beginning of the
+ // catch does not need to move those around, since the registers will be
+ // up-to-date when arriving in the unoptimized code and unoptimized code will
+ // take care of moving them to appropriate slots.
+ if (CatchBlockEntryInstr* catch_entry = block_entry->AsCatchBlockEntry()) {
+ Environment* deopt_env = catch_entry->env();
+ const LocalVariable* raw_exception_var = catch_entry->raw_exception_var();
+ const LocalVariable* raw_stacktrace_var = catch_entry->raw_stacktrace_var();
+ if (raw_exception_var != nullptr) {
+ Value* value = deopt_env->ValueAt(
+ raw_exception_var->BitIndexIn(num_direct_parameters_));
+ value->BindToEnvironment(constant_null());
+ }
+ if (raw_stacktrace_var != nullptr) {
+ Value* value = deopt_env->ValueAt(
+ raw_stacktrace_var->BitIndexIn(num_direct_parameters_));
+ value->BindToEnvironment(constant_null());
+ }
+ }
+#endif // defined(TARGET_ARCH_DBC)
+
// 2. Process normal instructions.
for (ForwardInstructionIterator it(block_entry); !it.Done(); it.Advance()) {
Instruction* current = it.Current();
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_dbc.cc b/runtime/vm/compiler/backend/flow_graph_compiler_dbc.cc
index 0e9f214..557f69a 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_dbc.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_dbc.cc
@@ -17,6 +17,7 @@
#include "vm/instructions.h"
#include "vm/object_store.h"
#include "vm/parser.h"
+#include "vm/simulator.h"
#include "vm/stack_frame.h"
#include "vm/stub_code.h"
#include "vm/symbols.h"
@@ -383,6 +384,12 @@
} else if (source.IsArgsDescRegister()) {
ASSERT(destination.IsRegister());
__ LoadArgDescriptorOpt(destination.reg());
+ } else if (source.IsExceptionRegister()) {
+ ASSERT(destination.IsRegister());
+ __ MoveSpecial(destination.reg(), Simulator::kExceptionSpecialIndex);
+ } else if (source.IsStackTraceRegister()) {
+ ASSERT(destination.IsRegister());
+ __ MoveSpecial(destination.reg(), Simulator::kStackTraceSpecialIndex);
} else if (source.IsConstant() && destination.IsRegister()) {
if (source.constant_instruction()->representation() == kUnboxedDouble) {
const Register result = destination.reg();
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index a957568..9b33e12 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -1641,7 +1641,8 @@
const LocalVariable& stacktrace_var,
bool needs_stacktrace,
intptr_t deopt_id,
- bool should_restore_closure_context = false)
+ const LocalVariable* raw_exception_var,
+ const LocalVariable* raw_stacktrace_var)
: BlockEntryInstr(block_id, try_index, deopt_id),
graph_entry_(graph_entry),
predecessor_(NULL),
@@ -1649,8 +1650,9 @@
catch_try_index_(catch_try_index),
exception_var_(exception_var),
stacktrace_var_(stacktrace_var),
+ raw_exception_var_(raw_exception_var),
+ raw_stacktrace_var_(raw_stacktrace_var),
needs_stacktrace_(needs_stacktrace),
- should_restore_closure_context_(should_restore_closure_context),
handler_token_pos_(handler_token_pos),
is_generated_(is_generated) {}
@@ -1669,6 +1671,11 @@
const LocalVariable& exception_var() const { return exception_var_; }
const LocalVariable& stacktrace_var() const { return stacktrace_var_; }
+ const LocalVariable* raw_exception_var() const { return raw_exception_var_; }
+ const LocalVariable* raw_stacktrace_var() const {
+ return raw_stacktrace_var_;
+ }
+
bool needs_stacktrace() const { return needs_stacktrace_; }
bool is_generated() const { return is_generated_; }
@@ -1692,12 +1699,6 @@
predecessor_ = predecessor;
}
- bool should_restore_closure_context() const {
- ASSERT(exception_var_.is_captured() == stacktrace_var_.is_captured());
- ASSERT(!exception_var_.is_captured() || should_restore_closure_context_);
- return should_restore_closure_context_;
- }
-
GraphEntryInstr* graph_entry_;
BlockEntryInstr* predecessor_;
const Array& catch_handler_types_;
@@ -1705,8 +1706,9 @@
GrowableArray<Definition*> initial_definitions_;
const LocalVariable& exception_var_;
const LocalVariable& stacktrace_var_;
+ const LocalVariable* raw_exception_var_;
+ const LocalVariable* raw_stacktrace_var_;
const bool needs_stacktrace_;
- const bool should_restore_closure_context_;
TokenPosition handler_token_pos_;
bool is_generated_;
@@ -2998,7 +3000,13 @@
// the type arguments of a generic function or an arguments descriptor.
class SpecialParameterInstr : public TemplateDefinition<0, NoThrow> {
public:
- enum SpecialParameterKind { kContext, kTypeArgs, kArgDescriptor };
+ enum SpecialParameterKind {
+ kContext,
+ kTypeArgs,
+ kArgDescriptor,
+ kException,
+ kStackTrace
+ };
SpecialParameterInstr(SpecialParameterKind kind, intptr_t deopt_id)
: TemplateDefinition(deopt_id), kind_(kind) {}
@@ -3027,6 +3035,10 @@
return "kTypeArgs";
case kArgDescriptor:
return "kArgDescriptor";
+ case kException:
+ return "kException";
+ case kStackTrace:
+ return "kStackTrace";
}
UNREACHABLE();
return NULL;
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index 9b5729c..3d7b61d 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -2888,38 +2888,15 @@
ASSERT(fp_sp_dist <= 0);
__ AddImmediate(SP, FP, fp_sp_dist);
- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ LoadFromOffset(kWord, R6, FP, closure_parameter->index() * kWordSize);
- __ LoadFieldFromOffset(kWord, R6, R6, Closure::context_offset());
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ StoreToOffset(kWord, R6, FP, context_index * kWordSize);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObjectOffset(R6,
- Context::variable_offset(exception_var().index()),
- kExceptionObjectReg);
- __ StoreIntoObjectOffset(R6,
- Context::variable_offset(stacktrace_var().index()),
- kStackTraceObjectReg);
- } else {
- __ StoreToOffset(kWord, kExceptionObjectReg, FP,
- exception_var().index() * kWordSize);
- __ StoreToOffset(kWord, kStackTraceObjectReg, FP,
- stacktrace_var().index() * kWordSize);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != nullptr) {
+ __ StoreToOffset(kWord, kExceptionObjectReg, FP,
+ raw_exception_var_->index() * kWordSize);
+ }
+ if (raw_stacktrace_var_ != nullptr) {
+ __ StoreToOffset(kWord, kStackTraceObjectReg, FP,
+ raw_stacktrace_var_->index() * kWordSize);
+ }
}
}
diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index 9c0a698..9a9bc82 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -2637,40 +2637,15 @@
ASSERT(fp_sp_dist <= 0);
__ AddImmediate(SP, FP, fp_sp_dist);
- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ LoadFromOffset(R28, FP, closure_parameter->index() * kWordSize);
- __ LoadFieldFromOffset(R28, R28, Closure::context_offset());
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ StoreToOffset(R28, FP, context_index * kWordSize);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObjectOffset(R28,
- Context::variable_offset(exception_var().index()),
- kExceptionObjectReg);
- __ StoreIntoObjectOffset(R28,
- Context::variable_offset(stacktrace_var().index()),
- kStackTraceObjectReg);
- } else {
- // Restore stack and initialize the two exception variables:
- // exception and stack trace variables.
- __ StoreToOffset(kExceptionObjectReg, FP,
- exception_var().index() * kWordSize);
- __ StoreToOffset(kStackTraceObjectReg, FP,
- stacktrace_var().index() * kWordSize);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != nullptr) {
+ __ StoreToOffset(kExceptionObjectReg, FP,
+ raw_exception_var_->index() * kWordSize);
+ }
+ if (raw_stacktrace_var_ != nullptr) {
+ __ StoreToOffset(kStackTraceObjectReg, FP,
+ raw_stacktrace_var_->index() * kWordSize);
+ }
}
}
diff --git a/runtime/vm/compiler/backend/il_dbc.cc b/runtime/vm/compiler/backend/il_dbc.cc
index 093c340..c00e4e9 100644
--- a/runtime/vm/compiler/backend/il_dbc.cc
+++ b/runtime/vm/compiler/backend/il_dbc.cc
@@ -1184,69 +1184,18 @@
if (HasParallelMove()) {
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
+ __ SetFrame(compiler->StackSize());
- Register context_reg = kNoRegister;
-
- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
-
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
-
- const LocalVariable& current_context_var =
- *parsed_function.current_context_var();
-
- context_reg = compiler->is_optimizing()
- ? compiler->CatchEntryRegForVariable(current_context_var)
- : LocalVarIndex(0, current_context_var.index());
-
- Register closure_reg;
- if (closure_parameter->index() > 0) {
- __ Move(context_reg, LocalVarIndex(0, closure_parameter->index()));
- closure_reg = context_reg;
- } else {
- closure_reg = LocalVarIndex(0, closure_parameter->index());
- }
-
- __ LoadField(context_reg, closure_reg,
- Closure::context_offset() / kWordSize);
- }
-
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- ASSERT(context_reg != kNoRegister);
- // This will be SP[1] register so we are free to use it as a temporary.
- const Register temp = compiler->StackSize();
- __ MoveSpecial(temp, Simulator::kExceptionSpecialIndex);
- __ StoreField(context_reg,
- Context::variable_offset(exception_var().index()) / kWordSize,
- temp);
- __ MoveSpecial(temp, Simulator::kStackTraceSpecialIndex);
- __ StoreField(
- context_reg,
- Context::variable_offset(stacktrace_var().index()) / kWordSize, temp);
- } else {
- if (compiler->is_optimizing()) {
- const intptr_t exception_reg =
- compiler->CatchEntryRegForVariable(exception_var());
- const intptr_t stacktrace_reg =
- compiler->CatchEntryRegForVariable(stacktrace_var());
- __ MoveSpecial(exception_reg, Simulator::kExceptionSpecialIndex);
- __ MoveSpecial(stacktrace_reg, Simulator::kStackTraceSpecialIndex);
- } else {
- __ MoveSpecial(LocalVarIndex(0, exception_var().index()),
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != nullptr) {
+ __ MoveSpecial(LocalVarIndex(0, raw_exception_var_->index()),
Simulator::kExceptionSpecialIndex);
- __ MoveSpecial(LocalVarIndex(0, stacktrace_var().index()),
+ }
+ if (raw_stacktrace_var_ != nullptr) {
+ __ MoveSpecial(LocalVarIndex(0, raw_stacktrace_var_->index()),
Simulator::kStackTraceSpecialIndex);
}
}
- __ SetFrame(compiler->StackSize());
}
EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) {
diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc
index c92b517..e8ec6e3 100644
--- a/runtime/vm/compiler/backend/il_ia32.cc
+++ b/runtime/vm/compiler/backend/il_ia32.cc
@@ -2496,51 +2496,15 @@
ASSERT(fp_sp_dist <= 0);
__ leal(ESP, Address(EBP, fp_sp_dist));
- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ movl(EDI, Address(EBP, closure_parameter->index() * kWordSize));
- __ movl(EDI, FieldAddress(EDI, Closure::context_offset()));
-
-#ifdef DEBUG
- Label ok;
- __ LoadClassId(EBX, EDI);
- __ cmpl(EBX, Immediate(kContextCid));
- __ j(EQUAL, &ok, Assembler::kNearJump);
- __ Stop("Incorrect context at entry");
- __ Bind(&ok);
-#endif
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ movl(Address(EBP, context_index * kWordSize), EDI);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObject(
- EDI,
- FieldAddress(EDI, Context::variable_offset(exception_var().index())),
- kExceptionObjectReg);
- __ StoreIntoObject(
- EDI,
- FieldAddress(EDI, Context::variable_offset(stacktrace_var().index())),
- kStackTraceObjectReg);
- } else {
- // Restore stack and initialize the two exception variables:
- // exception and stack trace variables.
- __ movl(Address(EBP, exception_var().index() * kWordSize),
- kExceptionObjectReg);
- __ movl(Address(EBP, stacktrace_var().index() * kWordSize),
- kStackTraceObjectReg);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != nullptr) {
+ __ movl(Address(EBP, raw_exception_var_->index() * kWordSize),
+ kExceptionObjectReg);
+ }
+ if (raw_stacktrace_var_ != nullptr) {
+ __ movl(Address(EBP, raw_stacktrace_var_->index() * kWordSize),
+ kStackTraceObjectReg);
+ }
}
}
diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc
index b44d650..cbbd640 100644
--- a/runtime/vm/compiler/backend/il_x64.cc
+++ b/runtime/vm/compiler/backend/il_x64.cc
@@ -2571,49 +2571,15 @@
ASSERT(fp_sp_dist <= 0);
__ leaq(RSP, Address(RBP, fp_sp_dist));
- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ movq(R12, Address(RBP, closure_parameter->index() * kWordSize));
- __ movq(R12, FieldAddress(R12, Closure::context_offset()));
-
-#ifdef DEBUG
- Label ok;
- __ LoadClassId(RBX, R12);
- __ cmpq(RBX, Immediate(kContextCid));
- __ j(EQUAL, &ok, Assembler::kNearJump);
- __ Stop("Incorrect context at entry");
- __ Bind(&ok);
-#endif
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ movq(Address(RBP, context_index * kWordSize), R12);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObject(
- R12,
- FieldAddress(R12, Context::variable_offset(exception_var().index())),
- kExceptionObjectReg);
- __ StoreIntoObject(
- R12,
- FieldAddress(R12, Context::variable_offset(stacktrace_var().index())),
- kStackTraceObjectReg);
- } else {
- __ movq(Address(RBP, exception_var().index() * kWordSize),
- kExceptionObjectReg);
- __ movq(Address(RBP, stacktrace_var().index() * kWordSize),
- kStackTraceObjectReg);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != nullptr) {
+ __ movq(Address(RBP, raw_exception_var_->index() * kWordSize),
+ kExceptionObjectReg);
+ }
+ if (raw_stacktrace_var_ != nullptr) {
+ __ movq(Address(RBP, raw_stacktrace_var_->index() * kWordSize),
+ kStackTraceObjectReg);
+ }
}
}
diff --git a/runtime/vm/compiler/backend/linearscan.cc b/runtime/vm/compiler/backend/linearscan.cc
index 10d2c09..f96a09d 100644
--- a/runtime/vm/compiler/backend/linearscan.cc
+++ b/runtime/vm/compiler/backend/linearscan.cc
@@ -595,23 +595,6 @@
range->DefineAt(catch_entry->start_pos()); // Defined at block entry.
ProcessInitialDefinition(defn, range, catch_entry);
}
- // Block the two fixed registers used by CatchBlockEntryInstr from the
- // block start to until the end of the instruction so that they are
- // preserved.
- intptr_t start = catch_entry->start_pos();
-#if !defined(TARGET_ARCH_DBC)
- const Register exception_reg = kExceptionObjectReg;
- const Register stacktrace_reg = kStackTraceObjectReg;
-#else
- const intptr_t exception_reg =
- LocalVarIndex(0, catch_entry->exception_var().index());
- const intptr_t stacktrace_reg =
- LocalVarIndex(0, catch_entry->stacktrace_var().index());
-#endif
- BlockLocation(Location::RegisterLocation(exception_reg), start,
- ToInstructionEnd(start));
- BlockLocation(Location::RegisterLocation(stacktrace_reg), start,
- ToInstructionEnd(start));
}
}
@@ -639,11 +622,42 @@
void FlowGraphAllocator::ProcessInitialDefinition(Definition* defn,
LiveRange* range,
BlockEntryInstr* block) {
-#if defined(TARGET_ARCH_DBC)
+ // Save the range end because it may change below.
+ const intptr_t range_end = range->End();
+
+ // TODO(31956): Clean up this code and factor common functionality out.
+ // Consider also making a separate [ProcessInitialDefinition] for
+ // [CatchBlockEntry]'s.
if (block->IsCatchBlockEntry()) {
- if (defn->IsParameter()) {
+ if (SpecialParameterInstr* param = defn->AsSpecialParameter()) {
+ Location loc;
+ switch (param->kind()) {
+ case SpecialParameterInstr::kException:
+ loc = Location::ExceptionLocation();
+ break;
+ case SpecialParameterInstr::kStackTrace:
+ loc = Location::StackTraceLocation();
+ break;
+ default:
+ UNREACHABLE();
+ }
+ range->set_assigned_location(loc);
+ AssignSafepoints(defn, range);
+ range->finger()->Initialize(range);
+ SplitInitialDefinitionAt(range, block->lifetime_position() + 1);
+ ConvertAllUses(range);
+
+ // On non-DBC we'll have exception/stacktrace in a register and need to
+ // ensure this register is not available for register allocation during
+ // the [CatchBlockEntry] to ensure it's not overwritten.
+ if (loc.IsRegister()) {
+ BlockLocation(loc, block->lifetime_position(),
+ block->lifetime_position() + 1);
+ }
+ return;
+#if defined(TARGET_ARCH_DBC)
+ } else if (ParameterInstr* param = defn->AsParameter()) {
// This must be in sync with FlowGraphCompiler::CatchEntryRegForVariable.
- ParameterInstr* param = defn->AsParameter();
intptr_t slot_index = param->index();
AssignSafepoints(defn, range);
range->finger()->Initialize(range);
@@ -652,8 +666,8 @@
SplitInitialDefinitionAt(range, block->lifetime_position() + 2);
ConvertAllUses(range);
BlockLocation(Location::RegisterLocation(slot_index), 0, kMaxPosition);
- } else {
- ConstantInstr* constant = defn->AsConstant();
+ return;
+ } else if (ConstantInstr* constant = defn->AsConstant()) {
ASSERT(constant != NULL);
range->set_assigned_location(Location::Constant(constant));
range->set_spill_slot(Location::Constant(constant));
@@ -668,13 +682,11 @@
CompleteRange(tail, Location::kRegister);
}
ConvertAllUses(range);
+ return;
+#endif // defined(TARGET_ARCH_DBC)
}
- return;
}
-#endif
- // Save the range end because it may change below.
- intptr_t range_end = range->End();
if (defn->IsParameter()) {
ParameterInstr* param = defn->AsParameter();
intptr_t slot_index = param->index();
@@ -705,16 +717,14 @@
Location loc;
#if defined(TARGET_ARCH_DBC)
loc = Location::ArgumentsDescriptorLocation();
- range->set_assigned_location(loc);
#else
loc = Location::RegisterLocation(ARGS_DESC_REG);
- range->set_assigned_location(loc);
#endif // defined(TARGET_ARCH_DBC)
+ range->set_assigned_location(loc);
if (loc.IsRegister()) {
AssignSafepoints(defn, range);
if (range->End() > kNormalEntryPos) {
- LiveRange* tail = range->SplitAt(kNormalEntryPos);
- CompleteRange(tail, Location::kRegister);
+ SplitInitialDefinitionAt(range, kNormalEntryPos);
}
ConvertAllUses(range);
return;
@@ -1963,7 +1973,15 @@
}
}
+ while (idx > spill_slots_.length()) {
+ spill_slots_.Add(kMaxPosition);
+ quad_spill_slots_.Add(false);
+ untagged_spill_slots_.Add(false);
+ }
+
if (idx == spill_slots_.length()) {
+ idx = spill_slots_.length();
+
// No free spill slot found. Allocate a new one.
spill_slots_.Add(0);
quad_spill_slots_.Add(need_quad);
diff --git a/runtime/vm/compiler/backend/locations.cc b/runtime/vm/compiler/backend/locations.cc
index a01abb2..44c88ee 100644
--- a/runtime/vm/compiler/backend/locations.cc
+++ b/runtime/vm/compiler/backend/locations.cc
@@ -180,8 +180,17 @@
}
UNREACHABLE();
#if TARGET_ARCH_DBC
- case kArgsDescRegister:
- return "ArgDesc";
+ case kSpecialDbcRegister:
+ switch (payload()) {
+ case kArgsDescriptorReg:
+ return "ArgDescReg";
+ case kExceptionReg:
+ return "ExceptionReg";
+ case kStackTraceReg:
+ return "StackTraceReg";
+ default:
+ UNREACHABLE();
+ }
#endif
default:
if (IsConstant()) {
diff --git a/runtime/vm/compiler/backend/locations.h b/runtime/vm/compiler/backend/locations.h
index f6ffbab..fd4d576 100644
--- a/runtime/vm/compiler/backend/locations.h
+++ b/runtime/vm/compiler/backend/locations.h
@@ -62,6 +62,14 @@
static const uword kLocationTagMask = 0x3;
public:
+#if defined(TARGET_ARCH_DBC)
+ enum SpecialDbcRegister{
+ kArgsDescriptorReg,
+ kExceptionReg,
+ kStackTraceReg,
+ };
+#endif
+
// Constant payload can overlap with kind field so Kind values
// have to be chosen in a way that their last 2 bits are never
// the same as kConstantTag or kPairLocationTag.
@@ -99,9 +107,9 @@
kFpuRegister = 12,
#ifdef TARGET_ARCH_DBC
- // We use this to signify a special `Location` where the arguments
- // descriptor can be found on DBC.
- kArgsDescRegister = 15,
+ // We use this to signify a special `Location` where the different
+ // [SpecialDbcRegister]s can be found on DBC.
+ kSpecialDbcRegister = 15,
#endif
};
@@ -130,8 +138,9 @@
COMPILE_ASSERT((kFpuRegister & kLocationTagMask) != kPairLocationTag);
#ifdef TARGET_ARCH_DBC
- COMPILE_ASSERT((kArgsDescRegister & kLocationTagMask) != kConstantTag);
- COMPILE_ASSERT((kArgsDescRegister & kLocationTagMask) != kPairLocationTag);
+ COMPILE_ASSERT((kSpecialDbcRegister & kLocationTagMask) != kConstantTag);
+ COMPILE_ASSERT((kSpecialDbcRegister & kLocationTagMask) !=
+ kPairLocationTag);
#endif
// Verify tags and tagmask.
@@ -247,12 +256,44 @@
bool IsFpuRegister() const { return kind() == kFpuRegister; }
-#ifdef TARGET_ARCH_DBC
static Location ArgumentsDescriptorLocation() {
- return Location(kArgsDescRegister, 0);
+#ifdef TARGET_ARCH_DBC
+ return Location(kSpecialDbcRegister, kArgsDescriptorReg);
+#else
+ return Location::RegisterLocation(ARGS_DESC_REG);
+#endif
}
- bool IsArgsDescRegister() const { return kind() == kArgsDescRegister; }
+ static Location ExceptionLocation() {
+#ifdef TARGET_ARCH_DBC
+ return Location(kSpecialDbcRegister, kExceptionReg);
+#else
+ return Location::RegisterLocation(kExceptionObjectReg);
+#endif
+ }
+
+ static Location StackTraceLocation() {
+#ifdef TARGET_ARCH_DBC
+ return Location(kSpecialDbcRegister, kStackTraceReg);
+#else
+ return Location::RegisterLocation(kStackTraceObjectReg);
+#endif
+ }
+
+#ifdef TARGET_ARCH_DBC
+ bool IsArgsDescRegister() const {
+ return IsSpecialDbcRegister(kArgsDescriptorReg);
+ }
+ bool IsExceptionRegister() const {
+ return IsSpecialDbcRegister(kExceptionReg);
+ }
+ bool IsStackTraceRegister() const {
+ return IsSpecialDbcRegister(kStackTraceReg);
+ }
+
+ bool IsSpecialDbcRegister(SpecialDbcRegister reg) const {
+ return kind() == kSpecialDbcRegister && payload() == reg;
+ }
#endif
FpuRegister fpu_reg() const {
diff --git a/runtime/vm/compiler/backend/redundancy_elimination.cc b/runtime/vm/compiler/backend/redundancy_elimination.cc
index 2623846..449ace0 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination.cc
@@ -3157,12 +3157,9 @@
// not tracked in the environment anyway.
const intptr_t parameter_count = flow_graph->num_direct_parameters();
- if (!catch_entry->exception_var().is_captured()) {
- cdefs[catch_entry->exception_var().BitIndexIn(parameter_count)] = NULL;
- }
- if (!catch_entry->stacktrace_var().is_captured()) {
- cdefs[catch_entry->stacktrace_var().BitIndexIn(parameter_count)] = NULL;
- }
+ cdefs[catch_entry->raw_exception_var()->BitIndexIn(parameter_count)] = NULL;
+ cdefs[catch_entry->raw_stacktrace_var()->BitIndexIn(parameter_count)] =
+ NULL;
for (BlockIterator block_it = flow_graph->reverse_postorder_iterator();
!block_it.Done(); block_it.Advance()) {
diff --git a/runtime/vm/compiler/backend/type_propagator.cc b/runtime/vm/compiler/backend/type_propagator.cc
index 6b9c5bb..1f0f476 100644
--- a/runtime/vm/compiler/backend/type_propagator.cc
+++ b/runtime/vm/compiler/backend/type_propagator.cc
@@ -1051,6 +1051,13 @@
return CompileType::FromCid(kTypeArgumentsCid);
case kArgDescriptor:
return CompileType::FromCid(kImmutableArrayCid);
+ case kException:
+ return CompileType(CompileType::kNonNullable, kDynamicCid,
+ &Object::dynamic_type());
+ case kStackTrace:
+ // We cannot use [kStackTraceCid] here because any kind of object can be
+ // used as a stack trace via `new Future.error(..., <obj>)` :-/
+ return CompileType::Dynamic();
}
UNREACHABLE();
return CompileType::Dynamic();
diff --git a/runtime/vm/compiler/frontend/flow_graph_builder.cc b/runtime/vm/compiler/frontend/flow_graph_builder.cc
index c26f4a6..aa65f19 100644
--- a/runtime/vm/compiler/frontend/flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/flow_graph_builder.cc
@@ -4116,7 +4116,8 @@
owner()->AllocateBlockId(), catch_handler_index, owner()->graph_entry(),
catch_block->handler_types(), try_handler_index,
catch_block->exception_var(), catch_block->stacktrace_var(),
- catch_block->needs_stacktrace(), owner()->GetNextDeoptId());
+ catch_block->needs_stacktrace(), owner()->GetNextDeoptId(),
+ &catch_block->exception_var(), &catch_block->stacktrace_var());
owner()->AddCatchEntry(catch_entry);
AppendFragment(catch_entry, for_catch);
@@ -4162,7 +4163,8 @@
owner()->AllocateBlockId(), original_handler_index,
owner()->graph_entry(), types, catch_handler_index,
catch_block->exception_var(), catch_block->stacktrace_var(),
- catch_block->needs_stacktrace(), owner()->GetNextDeoptId());
+ catch_block->needs_stacktrace(), owner()->GetNextDeoptId(),
+ &catch_block->exception_var(), &catch_block->stacktrace_var());
owner()->AddCatchEntry(finally_entry);
AppendFragment(finally_entry, for_finally);
}
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index 4c13e12..95b1c04 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -2155,6 +2155,9 @@
ExitScope(builder_->reader_.min_position(),
builder_->reader_.max_position());
}
+
+ FinalizeCatchVariables();
+
--depth_.catch_;
return;
}
@@ -2172,6 +2175,8 @@
VisitStatement(); // read finalizer.
+ FinalizeCatchVariables();
+
--depth_.catch_;
return;
}
@@ -2596,6 +2601,30 @@
variables->Add(v);
}
+void StreamingScopeBuilder::FinalizeExceptionVariable(
+ GrowableArray<LocalVariable*>* variables,
+ GrowableArray<LocalVariable*>* raw_variables,
+ const String& symbol,
+ intptr_t nesting_depth) {
+ // No need to create variables for try/catch-statements inside
+ // nested functions.
+ if (depth_.function_ > 0) return;
+
+ LocalVariable* variable = (*variables)[nesting_depth - 1];
+ LocalVariable* raw_variable;
+ if (variable->is_captured()) {
+ raw_variable =
+ new LocalVariable(TokenPosition::kNoSource, TokenPosition::kNoSource,
+ symbol, AbstractType::dynamic_type());
+ const bool ok = scope_->AddVariable(raw_variable);
+ ASSERT(ok);
+ } else {
+ raw_variable = variable;
+ }
+ raw_variables->EnsureLength(nesting_depth, nullptr);
+ (*raw_variables)[nesting_depth - 1] = raw_variable;
+}
+
void StreamingScopeBuilder::AddTryVariables() {
AddExceptionVariable(&result_->catch_context_variables,
":saved_try_context_var", depth_.try_);
@@ -2608,6 +2637,16 @@
depth_.catch_);
}
+void StreamingScopeBuilder::FinalizeCatchVariables() {
+ const intptr_t unique_id = result_->raw_variable_counter_++;
+ FinalizeExceptionVariable(
+ &result_->exception_variables, &result_->raw_exception_variables,
+ GenerateName(":raw_exception", unique_id), depth_.catch_);
+ FinalizeExceptionVariable(
+ &result_->stack_trace_variables, &result_->raw_stack_trace_variables,
+ GenerateName(":raw_stacktrace", unique_id), depth_.catch_);
+}
+
void StreamingScopeBuilder::AddIteratorVariable() {
if (depth_.function_ > 0) return;
if (result_->iterator_variables.length() >= depth_.for_in_) return;
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
index 4b71c8f..15a54cc 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
@@ -822,8 +822,14 @@
const char* prefix,
intptr_t nesting_depth);
+ void FinalizeExceptionVariable(GrowableArray<LocalVariable*>* variables,
+ GrowableArray<LocalVariable*>* raw_variables,
+ const String& symbol,
+ intptr_t nesting_depth);
+
void AddTryVariables();
void AddCatchVariables();
+ void FinalizeCatchVariables();
void AddIteratorVariable();
void AddSwitchVariable();
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.cc b/runtime/vm/compiler/frontend/kernel_to_il.cc
index ee05a9c..8470128 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.cc
+++ b/runtime/vm/compiler/frontend/kernel_to_il.cc
@@ -1178,19 +1178,54 @@
intptr_t handler_index,
bool needs_stacktrace,
bool is_synthesized) {
- ASSERT(CurrentException()->is_captured() ==
- CurrentStackTrace()->is_captured());
- const bool should_restore_closure_context =
- CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
+ LocalVariable* exception_var = CurrentException();
+ LocalVariable* stacktrace_var = CurrentStackTrace();
+ LocalVariable* raw_exception_var = CurrentRawException();
+ LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
+
CatchBlockEntryInstr* entry = new (Z) CatchBlockEntryInstr(
TokenPosition::kNoSource, // Token position of catch block.
is_synthesized, // whether catch block was synthesized by FE compiler
AllocateBlockId(), CurrentTryIndex(), graph_entry_, handler_types,
- handler_index, *CurrentException(), *CurrentStackTrace(),
- needs_stacktrace, GetNextDeoptId(), should_restore_closure_context);
+ handler_index, *exception_var, *stacktrace_var, needs_stacktrace,
+ GetNextDeoptId(), raw_exception_var, raw_stacktrace_var);
graph_entry_->AddCatchEntry(entry);
+
Fragment instructions(entry);
+ // Auxiliary variables introduced by the try catch can be captured if we are
+ // inside a function with yield/resume points. In this case we first need
+ // to restore the context to match the context at entry into the closure.
+ const bool should_restore_closure_context =
+ CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
+ LocalVariable* context_variable = parsed_function_->current_context_var();
+ if (should_restore_closure_context) {
+ ASSERT(parsed_function_->function().IsClosureFunction());
+ LocalScope* scope = parsed_function_->node_sequence()->scope();
+
+ LocalVariable* closure_parameter = scope->VariableAt(0);
+ ASSERT(!closure_parameter->is_captured());
+ instructions += LoadLocal(closure_parameter);
+ instructions += LoadField(Closure::context_offset());
+ instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
+ instructions += Drop();
+ }
+
+ if (exception_var->is_captured()) {
+ instructions += LoadLocal(context_variable);
+ instructions += LoadLocal(raw_exception_var);
+ instructions +=
+ StoreInstanceField(TokenPosition::kNoSource,
+ Context::variable_offset(exception_var->index()));
+ }
+ if (stacktrace_var->is_captured()) {
+ instructions += LoadLocal(context_variable);
+ instructions += LoadLocal(raw_stacktrace_var);
+ instructions +=
+ StoreInstanceField(TokenPosition::kNoSource,
+ Context::variable_offset(stacktrace_var->index()));
+ }
+
// :saved_try_context_var can be captured in the context of
// of the closure, in this case CatchBlockEntryInstr restores
// :current_context_var to point to closure context in the
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.h b/runtime/vm/compiler/frontend/kernel_to_il.h
index 17ca4fa..0181499 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.h
+++ b/runtime/vm/compiler/frontend/kernel_to_il.h
@@ -485,7 +485,8 @@
finally_return_variable(NULL),
setter_value(NULL),
yield_jump_variable(NULL),
- yield_context_variable(NULL) {}
+ yield_context_variable(NULL),
+ raw_variable_counter_(0) {}
IntMap<LocalVariable*> locals;
IntMap<LocalScope*> scopes;
@@ -522,6 +523,12 @@
GrowableArray<LocalVariable*> stack_trace_variables;
GrowableArray<LocalVariable*> catch_context_variables;
+ // These are used to access the raw exception/stacktrace variables (and are
+ // used to put them into the captured variables in the context).
+ GrowableArray<LocalVariable*> raw_exception_variables;
+ GrowableArray<LocalVariable*> raw_stack_trace_variables;
+ intptr_t raw_variable_counter_;
+
// For-in iterators, one per for-in nesting level.
GrowableArray<LocalVariable*> iterator_variables;
};
@@ -871,6 +878,12 @@
LocalVariable* CurrentStackTrace() {
return scopes_->stack_trace_variables[catch_depth_ - 1];
}
+ LocalVariable* CurrentRawException() {
+ return scopes_->raw_exception_variables[catch_depth_ - 1];
+ }
+ LocalVariable* CurrentRawStackTrace() {
+ return scopes_->raw_stack_trace_variables[catch_depth_ - 1];
+ }
LocalVariable* CurrentCatchContext() {
return scopes_->catch_context_variables[try_depth_];
}
diff --git a/tests/language_2/vm/regress_31946_test.dart b/tests/language_2/vm/regress_31946_test.dart
new file mode 100644
index 0000000..19554a0
--- /dev/null
+++ b/tests/language_2/vm/regress_31946_test.dart
@@ -0,0 +1,32 @@
+// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// VMOptions=--no_background_compilation --optimization_counter_threshold=10
+
+import 'dart:async';
+
+import 'package:expect/expect.dart';
+
+var root = [];
+var tests = <dynamic>[
+ () async {},
+ () async {
+ await new Future.value();
+ root.singleWhere((f) => f.name == 'optimizedFunction');
+ },
+];
+
+main(args) async {
+ for (int i = 0; i < 100; ++i) {
+ int exceptions = 0;
+ for (final test in tests) {
+ try {
+ await test();
+ } on StateError {
+ exceptions++;
+ }
+ }
+ Expect.isTrue(exceptions == 1);
+ }
+}