blob: 3669dc254f61d0d6a4c267d8d0fdf2cb40068179 [file] [log] [blame]
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include "vm/debugger.h"
#include "include/dart_api.h"
#include "vm/code_descriptors.h"
#include "vm/code_patcher.h"
#include "vm/compiler/api/deopt_id.h"
#include "vm/compiler/assembler/disassembler.h"
#include "vm/compiler/assembler/disassembler_kbc.h"
#include "vm/compiler/jit/compiler.h"
#include "vm/dart_entry.h"
#include "vm/flags.h"
#include "vm/globals.h"
#include "vm/interpreter.h"
#include "vm/isolate_reload.h"
#include "vm/json_stream.h"
#include "vm/kernel.h"
#include "vm/longjump.h"
#include "vm/message_handler.h"
#include "vm/object.h"
#include "vm/object_store.h"
#include "vm/os.h"
#include "vm/parser.h"
#include "vm/port.h"
#include "vm/runtime_entry.h"
#include "vm/service.h"
#include "vm/service_event.h"
#include "vm/service_isolate.h"
#include "vm/stack_frame.h"
#include "vm/stack_trace.h"
#include "vm/stub_code.h"
#include "vm/symbols.h"
#include "vm/thread_interrupter.h"
#include "vm/timeline.h"
#include "vm/token_position.h"
#include "vm/visitor.h"
#if !defined(DART_PRECOMPILED_RUNTIME)
#include "vm/compiler/frontend/bytecode_reader.h"
#include "vm/deopt_instructions.h"
#endif // !defined(DART_PRECOMPILED_RUNTIME)
namespace dart {
DEFINE_FLAG(bool,
trace_debugger_stacktrace,
false,
"Trace debugger stacktrace collection");
DEFINE_FLAG(bool, trace_rewind, false, "Trace frame rewind");
DEFINE_FLAG(bool, verbose_debug, false, "Verbose debugger messages");
DECLARE_FLAG(bool, enable_interpreter);
DECLARE_FLAG(bool, trace_deoptimization);
DECLARE_FLAG(bool, warn_on_pause_with_no_debugger);
#ifndef PRODUCT
// Create an unresolved breakpoint in given token range and script.
BreakpointLocation::BreakpointLocation(const Script& script,
TokenPosition token_pos,
TokenPosition end_token_pos,
intptr_t requested_line_number,
intptr_t requested_column_number)
: script_(script.raw()),
url_(script.url()),
token_pos_(token_pos),
end_token_pos_(end_token_pos),
next_(NULL),
conditions_(NULL),
requested_line_number_(requested_line_number),
requested_column_number_(requested_column_number),
function_(Function::null()),
bytecode_token_pos_(TokenPosition::kNoSource),
code_token_pos_(TokenPosition::kNoSource) {
ASSERT(!script.IsNull());
ASSERT(token_pos_.IsReal());
}
// Create a latent breakpoint at given url and line number.
BreakpointLocation::BreakpointLocation(const String& url,
intptr_t requested_line_number,
intptr_t requested_column_number)
: script_(Script::null()),
url_(url.raw()),
token_pos_(TokenPosition::kNoSource),
end_token_pos_(TokenPosition::kNoSource),
next_(NULL),
conditions_(NULL),
requested_line_number_(requested_line_number),
requested_column_number_(requested_column_number),
function_(Function::null()),
bytecode_token_pos_(TokenPosition::kNoSource),
code_token_pos_(TokenPosition::kNoSource) {
ASSERT(requested_line_number_ >= 0);
}
BreakpointLocation::~BreakpointLocation() {
Breakpoint* bpt = breakpoints();
while (bpt != NULL) {
Breakpoint* temp = bpt;
bpt = bpt->next();
delete temp;
}
}
bool BreakpointLocation::AnyEnabled() const {
return breakpoints() != NULL;
}
void BreakpointLocation::SetResolved(bool in_bytecode,
const Function& func,
TokenPosition token_pos) {
ASSERT(!IsLatent());
ASSERT(func.script() == script_);
ASSERT((func.token_pos() <= token_pos) &&
(token_pos <= func.end_token_pos()));
ASSERT(func.is_debuggable());
function_ = func.raw();
token_pos_ = token_pos;
end_token_pos_ = token_pos;
if (in_bytecode) {
bytecode_token_pos_ = token_pos;
} else {
code_token_pos_ = token_pos;
}
}
// Returned resolved pos is either in code or in bytecode.
void BreakpointLocation::GetCodeLocation(Script* script,
TokenPosition* pos) const {
if (IsLatent()) {
*script = Script::null();
*pos = TokenPosition::kNoSource;
} else {
*script = this->script();
*pos = token_pos_;
}
}
void Breakpoint::set_bpt_location(BreakpointLocation* new_bpt_location) {
// Only latent breakpoints can be moved.
ASSERT((new_bpt_location == NULL) || bpt_location_->IsLatent());
bpt_location_ = new_bpt_location;
}
void Breakpoint::VisitObjectPointers(ObjectPointerVisitor* visitor) {
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&closure_));
}
void BreakpointLocation::VisitObjectPointers(ObjectPointerVisitor* visitor) {
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&script_));
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&url_));
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&function_));
Breakpoint* bpt = conditions_;
while (bpt != NULL) {
bpt->VisitObjectPointers(visitor);
bpt = bpt->next();
}
}
void Breakpoint::PrintJSON(JSONStream* stream) {
JSONObject jsobj(stream);
jsobj.AddProperty("type", "Breakpoint");
jsobj.AddFixedServiceId("breakpoints/%" Pd "", id());
jsobj.AddProperty("breakpointNumber", id());
if (is_synthetic_async()) {
jsobj.AddProperty("isSyntheticAsyncContinuation", is_synthetic_async());
}
jsobj.AddProperty("resolved", bpt_location_->IsResolved());
if (bpt_location_->IsResolved()) {
jsobj.AddLocation(bpt_location_);
} else {
jsobj.AddUnresolvedLocation(bpt_location_);
}
}
void CodeBreakpoint::VisitObjectPointers(ObjectPointerVisitor* visitor) {
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&code_));
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&bytecode_));
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&saved_value_));
}
ActivationFrame::ActivationFrame(uword pc,
uword fp,
uword sp,
const Code& code,
const Array& deopt_frame,
intptr_t deopt_frame_offset,
ActivationFrame::Kind kind)
: pc_(pc),
fp_(fp),
sp_(sp),
ctx_(Context::ZoneHandle()),
code_(Code::ZoneHandle(code.raw())),
bytecode_(Bytecode::ZoneHandle()),
function_(Function::ZoneHandle(code.function())),
live_frame_((kind == kRegular) || (kind == kAsyncActivation)),
token_pos_initialized_(false),
token_pos_(TokenPosition::kNoSource),
try_index_(-1),
deopt_id_(DeoptId::kNone),
line_number_(-1),
column_number_(-1),
context_level_(-1),
deopt_frame_(Array::ZoneHandle(deopt_frame.raw())),
deopt_frame_offset_(deopt_frame_offset),
kind_(kind),
vars_initialized_(false),
var_descriptors_(LocalVarDescriptors::ZoneHandle()),
desc_indices_(8),
pc_desc_(PcDescriptors::ZoneHandle()) {
ASSERT(!function_.IsNull());
}
#if !defined(DART_PRECOMPILED_RUNTIME)
ActivationFrame::ActivationFrame(uword pc,
uword fp,
uword sp,
const Bytecode& bytecode,
ActivationFrame::Kind kind)
: pc_(pc),
fp_(fp),
sp_(sp),
ctx_(Context::ZoneHandle()),
code_(Code::ZoneHandle()),
bytecode_(Bytecode::ZoneHandle(bytecode.raw())),
function_(Function::ZoneHandle(bytecode.function())),
live_frame_((kind == kRegular) || (kind == kAsyncActivation)),
token_pos_initialized_(false),
token_pos_(TokenPosition::kNoSource),
try_index_(-1),
deopt_id_(DeoptId::kNone),
line_number_(-1),
column_number_(-1),
context_level_(-1),
deopt_frame_(Array::ZoneHandle()),
deopt_frame_offset_(0),
kind_(kind),
vars_initialized_(false),
var_descriptors_(LocalVarDescriptors::ZoneHandle()),
desc_indices_(8),
pc_desc_(PcDescriptors::ZoneHandle()) {
// The frame of a bytecode stub has a null function. It may be encountered
// when single stepping.
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
ActivationFrame::ActivationFrame(Kind kind)
: pc_(0),
fp_(0),
sp_(0),
ctx_(Context::ZoneHandle()),
code_(Code::ZoneHandle()),
bytecode_(Bytecode::ZoneHandle()),
function_(Function::ZoneHandle()),
live_frame_(kind == kRegular),
token_pos_initialized_(false),
token_pos_(TokenPosition::kNoSource),
try_index_(-1),
deopt_id_(DeoptId::kNone),
line_number_(-1),
column_number_(-1),
context_level_(-1),
deopt_frame_(Array::ZoneHandle()),
deopt_frame_offset_(0),
kind_(kind),
vars_initialized_(false),
var_descriptors_(LocalVarDescriptors::ZoneHandle()),
desc_indices_(8),
pc_desc_(PcDescriptors::ZoneHandle()) {}
ActivationFrame::ActivationFrame(const Closure& async_activation)
: pc_(0),
fp_(0),
sp_(0),
ctx_(Context::ZoneHandle()),
code_(Code::ZoneHandle()),
bytecode_(Bytecode::ZoneHandle()),
function_(Function::ZoneHandle()),
live_frame_(false),
token_pos_initialized_(false),
token_pos_(TokenPosition::kNoSource),
try_index_(-1),
deopt_id_(DeoptId::kNone),
line_number_(-1),
column_number_(-1),
context_level_(-1),
deopt_frame_(Array::ZoneHandle()),
deopt_frame_offset_(0),
kind_(kAsyncActivation),
vars_initialized_(false),
var_descriptors_(LocalVarDescriptors::ZoneHandle()),
desc_indices_(8),
pc_desc_(PcDescriptors::ZoneHandle()) {
// Extract the function and the code from the asynchronous activation.
function_ = async_activation.function();
#if !defined(DART_PRECOMPILED_RUNTIME)
if (!function_.HasCode() && function_.HasBytecode()) {
bytecode_ = function_.bytecode();
}
#endif
if (bytecode_.IsNull()) {
// Force-optimize functions should not be debuggable.
ASSERT(!function_.ForceOptimize());
function_.EnsureHasCompiledUnoptimizedCode();
code_ = function_.unoptimized_code();
}
ctx_ = async_activation.context();
ASSERT(fp_ == 0);
ASSERT(!ctx_.IsNull());
}
bool Debugger::NeedsIsolateEvents() {
return !Isolate::IsVMInternalIsolate(isolate_) &&
Service::isolate_stream.enabled();
}
bool Debugger::NeedsDebugEvents() {
ASSERT(!Isolate::IsVMInternalIsolate(isolate_));
return FLAG_warn_on_pause_with_no_debugger || Service::debug_stream.enabled();
}
void Debugger::InvokeEventHandler(ServiceEvent* event) {
ASSERT(!event->IsPause()); // For pause events, call Pause instead.
Service::HandleEvent(event);
}
ErrorPtr Debugger::PauseInterrupted() {
return PauseRequest(ServiceEvent::kPauseInterrupted);
}
ErrorPtr Debugger::PausePostRequest() {
return PauseRequest(ServiceEvent::kPausePostRequest);
}
ErrorPtr Debugger::PauseRequest(ServiceEvent::EventKind kind) {
if (ignore_breakpoints_ || IsPaused()) {
// We don't let the isolate get interrupted if we are already
// paused or ignoring breakpoints.
return Thread::Current()->StealStickyError();
}
ServiceEvent event(isolate_, kind);
DebuggerStackTrace* trace = CollectStackTrace();
if (trace->Length() > 0) {
event.set_top_frame(trace->FrameAt(0));
}
CacheStackTraces(trace, CollectAsyncCausalStackTrace(),
CollectAwaiterReturnStackTrace());
resume_action_ = kContinue;
Pause(&event);
HandleSteppingRequest(trace);
ClearCachedStackTraces();
// If any error occurred while in the debug message loop, return it here.
NoSafepointScope no_safepoint;
ErrorPtr error = Thread::Current()->StealStickyError();
ASSERT((error == Error::null()) || error->IsUnwindError());
return error;
}
void Debugger::SendBreakpointEvent(ServiceEvent::EventKind kind,
Breakpoint* bpt) {
if (NeedsDebugEvents()) {
// TODO(turnidge): Currently we send single-shot breakpoint events
// to the vm service. Do we want to change this?
ServiceEvent event(isolate_, kind);
event.set_breakpoint(bpt);
InvokeEventHandler(&event);
}
}
void BreakpointLocation::AddBreakpoint(Breakpoint* bpt, Debugger* dbg) {
bpt->set_next(breakpoints());
set_breakpoints(bpt);
dbg->SyncBreakpointLocation(this);
dbg->SendBreakpointEvent(ServiceEvent::kBreakpointAdded, bpt);
}
Breakpoint* BreakpointLocation::AddRepeated(Debugger* dbg) {
Breakpoint* bpt = breakpoints();
while (bpt != NULL) {
if (bpt->IsRepeated()) break;
bpt = bpt->next();
}
if (bpt == NULL) {
bpt = new Breakpoint(dbg->nextId(), this);
bpt->SetIsRepeated();
AddBreakpoint(bpt, dbg);
}
return bpt;
}
Breakpoint* BreakpointLocation::AddSingleShot(Debugger* dbg) {
Breakpoint* bpt = breakpoints();
while (bpt != NULL) {
if (bpt->IsSingleShot()) break;
bpt = bpt->next();
}
if (bpt == NULL) {
bpt = new Breakpoint(dbg->nextId(), this);
bpt->SetIsSingleShot();
AddBreakpoint(bpt, dbg);
}
return bpt;
}
Breakpoint* BreakpointLocation::AddPerClosure(Debugger* dbg,
const Instance& closure,
bool for_over_await) {
Breakpoint* bpt = NULL;
// Do not reuse existing breakpoints for stepping over await clauses.
// A second async step-over command will set a new breakpoint before
// the existing one gets deleted when first async step-over resumes.
if (!for_over_await) {
bpt = breakpoints();
while (bpt != NULL) {
if (bpt->IsPerClosure() && (bpt->closure() == closure.raw())) break;
bpt = bpt->next();
}
}
if (bpt == NULL) {
bpt = new Breakpoint(dbg->nextId(), this);
bpt->SetIsPerClosure(closure);
bpt->set_is_synthetic_async(for_over_await);
AddBreakpoint(bpt, dbg);
}
return bpt;
}
const char* Debugger::QualifiedFunctionName(const Function& func) {
const String& func_name = String::Handle(func.name());
Class& func_class = Class::Handle(func.Owner());
String& class_name = String::Handle(func_class.Name());
return OS::SCreate(Thread::Current()->zone(), "%s%s%s",
func_class.IsTopLevel() ? "" : class_name.ToCString(),
func_class.IsTopLevel() ? "" : ".", func_name.ToCString());
}
// Returns true if the function |func| overlaps the token range
// [|token_pos|, |end_token_pos|] in |script|.
static bool FunctionOverlaps(const Function& func,
const Script& script,
TokenPosition token_pos,
TokenPosition end_token_pos) {
TokenPosition func_start = func.token_pos();
if (((func_start <= token_pos) && (token_pos <= func.end_token_pos())) ||
((token_pos <= func_start) && (func_start <= end_token_pos))) {
// Check script equality second because it allocates
// handles as a side effect.
return func.script() == script.raw();
}
return false;
}
static bool IsImplicitFunction(const Function& func) {
switch (func.kind()) {
case FunctionLayout::kImplicitGetter:
case FunctionLayout::kImplicitSetter:
case FunctionLayout::kImplicitStaticGetter:
case FunctionLayout::kFieldInitializer:
case FunctionLayout::kMethodExtractor:
case FunctionLayout::kNoSuchMethodDispatcher:
case FunctionLayout::kInvokeFieldDispatcher:
case FunctionLayout::kIrregexpFunction:
return true;
default:
if (func.token_pos() == func.end_token_pos()) {
// |func| could be an implicit constructor for example.
return true;
}
}
return false;
}
bool Debugger::HasBreakpoint(const Function& func, Zone* zone) {
if (!func.HasCode() && !func.HasBytecode()) {
// If the function is not compiled yet, just check whether there
// is a user-defined breakpoint that falls into the token
// range of the function. This may be a false positive: the breakpoint
// might be inside a local closure.
Script& script = Script::Handle(zone);
BreakpointLocation* sbpt = breakpoint_locations_;
while (sbpt != NULL) {
script = sbpt->script();
if (FunctionOverlaps(func, script, sbpt->token_pos(),
sbpt->end_token_pos())) {
return true;
}
sbpt = sbpt->next_;
}
return false;
}
CodeBreakpoint* cbpt = code_breakpoints_;
while (cbpt != NULL) {
if (func.raw() == cbpt->function()) {
return true;
}
cbpt = cbpt->next_;
}
return false;
}
bool Debugger::HasBreakpoint(const Code& code) {
CodeBreakpoint* cbpt = code_breakpoints_;
while (cbpt != NULL) {
if (code.raw() == cbpt->code_) {
return true;
}
cbpt = cbpt->next_;
}
return false;
}
void Debugger::PrintBreakpointsToJSONArray(JSONArray* jsarr) const {
PrintBreakpointsListToJSONArray(breakpoint_locations_, jsarr);
PrintBreakpointsListToJSONArray(latent_locations_, jsarr);
}
void Debugger::PrintBreakpointsListToJSONArray(BreakpointLocation* sbpt,
JSONArray* jsarr) const {
while (sbpt != NULL) {
Breakpoint* bpt = sbpt->breakpoints();
while (bpt != NULL) {
jsarr->AddValue(bpt);
bpt = bpt->next();
}
sbpt = sbpt->next_;
}
}
void Debugger::PrintSettingsToJSONObject(JSONObject* jsobj) const {
// This won't cut it when we support filtering by class, etc.
switch (GetExceptionPauseInfo()) {
case kNoPauseOnExceptions:
jsobj->AddProperty("_exceptions", "none");
break;
case kPauseOnAllExceptions:
jsobj->AddProperty("_exceptions", "all");
break;
case kPauseOnUnhandledExceptions:
jsobj->AddProperty("_exceptions", "unhandled");
break;
default:
UNREACHABLE();
}
}
// If the current top Dart frame is interpreted, returns the fp of the caller
// in compiled code that invoked the interpreter, or 0 if not found.
// If the current top Dart frame is compiled, returns the fp of the caller in
// interpreted bytecode that invoked compiled code, or ULONG_MAX if not found.
// Returning compiled code fp 0 (or bytecode fp ULONG_MAX) as fp value insures
// that the fp will compare as a callee of any valid frame pointer of the same
// mode (compiled code or bytecode).
static uword CrossCallerFp() {
StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames,
Thread::Current(),
StackFrameIterator::kNoCrossThreadIteration);
StackFrame* frame;
do {
frame = iterator.NextFrame();
RELEASE_ASSERT(frame != nullptr);
} while (!frame->IsDartFrame());
const bool top_is_interpreted = frame->is_interpreted();
do {
frame = iterator.NextFrame();
if (frame == nullptr) {
return top_is_interpreted ? 0 : ULONG_MAX;
}
if (!frame->IsDartFrame()) {
continue;
}
} while (top_is_interpreted == frame->is_interpreted());
return frame->fp();
}
ActivationFrame::Relation ActivationFrame::CompareTo(
uword other_fp,
bool other_is_interpreted) const {
if (fp() == other_fp) {
ASSERT(IsInterpreted() == other_is_interpreted);
return kSelf;
}
if (IsInterpreted()) {
if (!other_is_interpreted) {
// Instead of fp(), use the fp of the compiled frame that called into the
// interpreter (CrossCallerFp).
// Note that if CrossCallerFp == other_fp, it must compare as a caller.
return IsCalleeFrameOf(other_fp, CrossCallerFp()) ? kCallee : kCaller;
}
return IsBytecodeCalleeFrameOf(other_fp, fp()) ? kCallee : kCaller;
}
if (other_is_interpreted) {
// Instead of fp(), use the fp of the interpreted frame that called into
// compiled code (CrossCallerFp).
// Note that if CrossCallerFp == other_fp, it must compare as a caller.
return IsBytecodeCalleeFrameOf(other_fp, CrossCallerFp()) ? kCallee
: kCaller;
}
return IsCalleeFrameOf(other_fp, fp()) ? kCallee : kCaller;
}
StringPtr ActivationFrame::QualifiedFunctionName() {
return String::New(Debugger::QualifiedFunctionName(function()));
}
StringPtr ActivationFrame::SourceUrl() {
const Script& script = Script::Handle(SourceScript());
return script.url();
}
ScriptPtr ActivationFrame::SourceScript() {
return function().script();
}
LibraryPtr ActivationFrame::Library() {
const Class& cls = Class::Handle(function().origin());
return cls.library();
}
void ActivationFrame::GetPcDescriptors() {
ASSERT(!IsInterpreted()); // We need to set try_index_ simultaneously.
if (pc_desc_.IsNull()) {
pc_desc_ = code().pc_descriptors();
ASSERT(!pc_desc_.IsNull());
}
}
// If not token_pos_initialized_, compute token_pos_, try_index_ and,
// if not IsInterpreted(), also compute deopt_id_.
TokenPosition ActivationFrame::TokenPos() {
if (!token_pos_initialized_) {
token_pos_initialized_ = true;
if (IsInterpreted()) {
token_pos_ = bytecode().GetTokenIndexOfPC(pc_);
try_index_ = bytecode().GetTryIndexAtPc(pc_);
return token_pos_;
}
token_pos_ = TokenPosition::kNoSource;
GetPcDescriptors();
PcDescriptors::Iterator iter(pc_desc_, PcDescriptorsLayout::kAnyKind);
const uword pc_offset = pc_ - code().PayloadStart();
while (iter.MoveNext()) {
if (iter.PcOffset() == pc_offset) {
try_index_ = iter.TryIndex();
token_pos_ = iter.TokenPos();
deopt_id_ = iter.DeoptId();
break;
}
}
}
return token_pos_;
}
intptr_t ActivationFrame::TryIndex() {
if (!token_pos_initialized_) {
TokenPos(); // Side effect: computes token_pos_initialized_, try_index_.
}
return try_index_;
}
intptr_t ActivationFrame::DeoptId() {
ASSERT(!IsInterpreted());
if (!token_pos_initialized_) {
TokenPos(); // Side effect: computes token_pos_initialized_, try_index_.
}
return deopt_id_;
}
intptr_t ActivationFrame::LineNumber() {
// Compute line number lazily since it causes scanning of the script.
if ((line_number_ < 0) && TokenPos().IsSourcePosition()) {
const TokenPosition token_pos = TokenPos().SourcePosition();
const Script& script = Script::Handle(SourceScript());
script.GetTokenLocation(token_pos, &line_number_, NULL);
}
return line_number_;
}
intptr_t ActivationFrame::ColumnNumber() {
// Compute column number lazily since it causes scanning of the script.
if ((column_number_ < 0) && TokenPos().IsSourcePosition()) {
const TokenPosition token_pos = TokenPos().SourcePosition();
const Script& script = Script::Handle(SourceScript());
if (script.HasSource()) {
script.GetTokenLocation(token_pos, &line_number_, &column_number_);
} else {
column_number_ = -1;
}
}
return column_number_;
}
void ActivationFrame::GetVarDescriptors() {
if (var_descriptors_.IsNull()) {
if (IsInterpreted()) {
var_descriptors_ = bytecode().GetLocalVarDescriptors();
ASSERT(!var_descriptors_.IsNull());
return;
}
Code& unoptimized_code = Code::Handle(function().unoptimized_code());
if (unoptimized_code.IsNull()) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const Error& error = Error::Handle(
zone, Compiler::EnsureUnoptimizedCode(thread, function()));
if (!error.IsNull()) {
Exceptions::PropagateError(error);
}
unoptimized_code = function().unoptimized_code();
}
ASSERT(!unoptimized_code.IsNull());
var_descriptors_ = unoptimized_code.GetLocalVarDescriptors();
ASSERT(!var_descriptors_.IsNull());
}
}
bool ActivationFrame::IsDebuggable() const {
// When stepping in bytecode stub, function is null.
return !function().IsNull() && Debugger::IsDebuggable(function());
}
void ActivationFrame::PrintDescriptorsError(const char* message) {
OS::PrintErr("Bad descriptors: %s\n", message);
OS::PrintErr("function %s\n", function().ToQualifiedCString());
OS::PrintErr("pc_ %" Px "\n", pc_);
OS::PrintErr("deopt_id_ %" Px "\n", deopt_id_);
OS::PrintErr("context_level_ %" Px "\n", context_level_);
OS::PrintErr("token_pos_ %s\n", token_pos_.ToCString());
if (function().is_declared_in_bytecode()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
KernelBytecodeDisassembler::Disassemble(function());
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
if (!IsInterpreted()) {
DisassembleToStdout formatter;
code().Disassemble(&formatter);
PcDescriptors::Handle(code().pc_descriptors()).Print();
}
StackFrameIterator frames(ValidationPolicy::kDontValidateFrames,
Thread::Current(),
StackFrameIterator::kNoCrossThreadIteration);
StackFrame* frame = frames.NextFrame();
while (frame != NULL) {
OS::PrintErr("%s\n", frame->ToCString());
frame = frames.NextFrame();
}
OS::Abort();
}
// Calculate the context level at the current pc of the frame.
intptr_t ActivationFrame::ContextLevel() {
ASSERT(live_frame_);
const Context& ctx = GetSavedCurrentContext();
if (context_level_ < 0 && !ctx.IsNull()) {
if (IsInterpreted()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
const auto& bytecode = Bytecode::Handle(zone, function_.bytecode());
if (!bytecode.HasLocalVariablesInfo()) {
PrintDescriptorsError("Missing local variables info");
}
intptr_t pc_offset = pc_ - bytecode.PayloadStart();
// Look for innermost scope, i.e. with the highest context level.
// Since scopes are ordered by StartPC(), the last scope which includes
// pc_offset will be the innermost one.
kernel::BytecodeLocalVariablesIterator local_vars(zone, bytecode);
while (local_vars.MoveNext()) {
if (local_vars.Kind() ==
kernel::BytecodeLocalVariablesIterator::kScope) {
if (local_vars.StartPC() > pc_offset) {
break;
}
if (pc_offset <= local_vars.EndPC()) {
ASSERT(context_level_ <= local_vars.ContextLevel());
context_level_ = local_vars.ContextLevel();
}
}
}
if (context_level_ < 0) {
PrintDescriptorsError("Missing context level in local variables info");
}
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
ASSERT(!code_.is_optimized());
GetVarDescriptors();
intptr_t deopt_id = DeoptId();
if (deopt_id == DeoptId::kNone) {
PrintDescriptorsError("Missing deopt id");
}
intptr_t var_desc_len = var_descriptors_.Length();
bool found = false;
for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) {
LocalVarDescriptorsLayout::VarInfo var_info;
var_descriptors_.GetInfo(cur_idx, &var_info);
const int8_t kind = var_info.kind();
if ((kind == LocalVarDescriptorsLayout::kContextLevel) &&
(deopt_id >= var_info.begin_pos.value()) &&
(deopt_id <= var_info.end_pos.value())) {
context_level_ = var_info.index();
found = true;
break;
}
}
if (!found) {
PrintDescriptorsError("Missing context level in var descriptors");
}
ASSERT(context_level_ >= 0);
}
}
return context_level_;
}
ObjectPtr ActivationFrame::GetAsyncContextVariable(const String& name) {
if (!function_.IsAsyncClosure() && !function_.IsAsyncGenClosure()) {
return Object::null();
}
GetVarDescriptors();
intptr_t var_ctxt_level = -1;
intptr_t ctxt_slot = -1;
intptr_t var_desc_len = var_descriptors_.Length();
for (intptr_t i = 0; i < var_desc_len; i++) {
LocalVarDescriptorsLayout::VarInfo var_info;
var_descriptors_.GetInfo(i, &var_info);
if (var_descriptors_.GetName(i) == name.raw()) {
const int8_t kind = var_info.kind();
if (!live_frame_) {
ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
}
const auto variable_index = VariableIndex(var_info.index());
if (kind == LocalVarDescriptorsLayout::kStackVar) {
return GetStackVar(variable_index);
} else {
ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
// Variable descriptors constructed from bytecode have all variables of
// enclosing functions, even shadowed by the current function.
// Pick the variable with the highest context level.
if (var_info.scope_id > var_ctxt_level) {
var_ctxt_level = var_info.scope_id;
ctxt_slot = variable_index.value();
}
}
}
}
if (var_ctxt_level >= 0) {
if (!live_frame_) {
ASSERT(!ctx_.IsNull());
// Compiled code uses relative context levels, i.e. the frame context
// level is always 0 on entry.
// Bytecode uses absolute context levels, i.e. the frame context level
// on entry must be calculated.
const intptr_t frame_ctx_level =
function().is_declared_in_bytecode() ? ctx_.GetLevel() : 0;
return GetRelativeContextVar(var_ctxt_level, ctxt_slot, frame_ctx_level);
}
return GetContextVar(var_ctxt_level, ctxt_slot);
}
return Object::null();
}
ObjectPtr ActivationFrame::GetAsyncCompleter() {
return GetAsyncContextVariable(Symbols::AsyncCompleter());
}
ObjectPtr ActivationFrame::GetAsyncCompleterAwaiter(const Object& completer) {
DEBUG_ASSERT(Thread::Current()->TopErrorHandlerIsExitFrame());
Object& future = Object::Handle();
const Class& completer_cls = Class::Handle(completer.clazz());
ASSERT(!completer_cls.IsNull());
const Function& future_getter = Function::Handle(
completer_cls.LookupGetterFunction(Symbols::CompleterFuture()));
ASSERT(!future_getter.IsNull());
const Array& args = Array::Handle(Array::New(1));
args.SetAt(0, Instance::Cast(completer));
future = DartEntry::InvokeFunction(future_getter, args);
if (future.IsError()) {
Exceptions::PropagateError(Error::Cast(future));
}
if (future.IsNull()) {
// The completer object may not be fully initialized yet.
return Object::null();
}
const Class& future_cls = Class::Handle(future.clazz());
ASSERT(!future_cls.IsNull());
const Field& awaiter_field = Field::Handle(
future_cls.LookupInstanceFieldAllowPrivate(Symbols::_Awaiter()));
ASSERT(!awaiter_field.IsNull());
return Instance::Cast(future).GetField(awaiter_field);
}
ObjectPtr ActivationFrame::GetAsyncStreamControllerStream() {
return GetAsyncContextVariable(Symbols::ControllerStream());
}
ObjectPtr ActivationFrame::GetAsyncStreamControllerStreamAwaiter(
const Object& stream) {
const Class& stream_cls = Class::Handle(stream.clazz());
ASSERT(!stream_cls.IsNull());
const Class& stream_impl_cls = Class::Handle(stream_cls.SuperClass());
const Field& awaiter_field = Field::Handle(
stream_impl_cls.LookupInstanceFieldAllowPrivate(Symbols::_Awaiter()));
ASSERT(!awaiter_field.IsNull());
return Instance::Cast(stream).GetField(awaiter_field);
}
ObjectPtr ActivationFrame::GetAsyncAwaiter() {
const Object& async_stream_controller_stream =
Object::Handle(GetAsyncStreamControllerStream());
if (!async_stream_controller_stream.IsNull()) {
return GetAsyncStreamControllerStreamAwaiter(
async_stream_controller_stream);
}
const Object& completer = Object::Handle(GetAsyncCompleter());
if (!completer.IsNull()) {
return GetAsyncCompleterAwaiter(completer);
}
return Object::null();
}
ObjectPtr ActivationFrame::GetCausalStack() {
return GetAsyncContextVariable(Symbols::AsyncStackTraceVar());
}
bool ActivationFrame::HandlesException(const Instance& exc_obj) {
if ((kind_ == kAsyncSuspensionMarker) || (kind_ == kAsyncCausal)) {
// These frames are historical.
return false;
}
intptr_t try_index = TryIndex();
if (try_index < 0) {
return false;
}
ExceptionHandlers& handlers = ExceptionHandlers::Handle();
Array& handled_types = Array::Handle();
AbstractType& type = Type::Handle();
const bool is_async =
function().IsAsyncClosure() || function().IsAsyncGenClosure();
if (IsInterpreted()) {
handlers = bytecode().exception_handlers();
} else {
handlers = code().exception_handlers();
}
ASSERT(!handlers.IsNull());
intptr_t num_handlers_checked = 0;
while (try_index != kInvalidTryIndex) {
// Detect circles in the exception handler data.
num_handlers_checked++;
ASSERT(num_handlers_checked <= handlers.num_entries());
// Only consider user written handlers for async methods.
if (!is_async || !handlers.IsGenerated(try_index)) {
handled_types = handlers.GetHandledTypes(try_index);
const intptr_t num_types = handled_types.Length();
for (intptr_t k = 0; k < num_types; k++) {
type ^= handled_types.At(k);
ASSERT(!type.IsNull());
// Uninstantiated types are not added to ExceptionHandlers data.
ASSERT(type.IsInstantiated());
if (type.IsDynamicType()) {
return true;
}
if (exc_obj.IsInstanceOf(type, Object::null_type_arguments(),
Object::null_type_arguments())) {
return true;
}
}
}
try_index = handlers.OuterTryIndex(try_index);
}
return false;
}
intptr_t ActivationFrame::GetAwaitJumpVariable() {
GetVarDescriptors();
intptr_t var_ctxt_level = -1;
intptr_t ctxt_slot = -1;
intptr_t var_desc_len = var_descriptors_.Length();
intptr_t await_jump_var = -1;
for (intptr_t i = 0; i < var_desc_len; i++) {
LocalVarDescriptorsLayout::VarInfo var_info;
var_descriptors_.GetInfo(i, &var_info);
const int8_t kind = var_info.kind();
if (var_descriptors_.GetName(i) == Symbols::AwaitJumpVar().raw()) {
ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
ASSERT(!ctx_.IsNull());
// Variable descriptors constructed from bytecode have all variables of
// enclosing functions, even shadowed by the current function.
// Pick the :await_jump_var variable with the highest context level.
if (var_info.scope_id > var_ctxt_level) {
var_ctxt_level = var_info.scope_id;
ctxt_slot = var_info.index();
}
}
}
if (var_ctxt_level >= 0) {
Object& await_jump_index = Object::Handle(ctx_.At(ctxt_slot));
ASSERT(await_jump_index.IsSmi());
await_jump_var = Smi::Cast(await_jump_index).Value();
}
return await_jump_var;
}
void ActivationFrame::ExtractTokenPositionFromAsyncClosure() {
// Attempt to determine the token pos and try index from the async closure.
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
ASSERT(function_.IsAsyncGenClosure() || function_.IsAsyncClosure());
// This should only be called on frames that aren't active on the stack.
ASSERT(fp() == 0);
if (function_.is_declared_in_bytecode()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
const auto& bytecode = Bytecode::Handle(zone, function_.bytecode());
if (!bytecode.HasSourcePositions()) {
return;
}
const intptr_t await_jump_var = GetAwaitJumpVariable();
if (await_jump_var < 0) {
return;
}
// Yield points are counted from 1 (0 is reserved for normal entry).
intptr_t yield_point_index = 1;
kernel::BytecodeSourcePositionsIterator iter(zone, bytecode);
while (iter.MoveNext()) {
if (iter.IsYieldPoint()) {
if (yield_point_index == await_jump_var) {
token_pos_ = iter.TokenPos();
token_pos_initialized_ = true;
const uword return_address =
KernelBytecode::Next(bytecode.PayloadStart() + iter.PcOffset());
try_index_ = bytecode.GetTryIndexAtPc(return_address);
return;
}
++yield_point_index;
}
}
return;
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
ASSERT(!IsInterpreted());
const intptr_t await_jump_var = GetAwaitJumpVariable();
if (await_jump_var < 0) {
return;
}
const auto& pc_descriptors =
PcDescriptors::Handle(zone, code().pc_descriptors());
ASSERT(!pc_descriptors.IsNull());
PcDescriptors::Iterator it(pc_descriptors, PcDescriptorsLayout::kOther);
while (it.MoveNext()) {
if (it.YieldIndex() == await_jump_var) {
try_index_ = it.TryIndex();
token_pos_ = it.TokenPos();
token_pos_initialized_ = true;
return;
}
}
}
bool ActivationFrame::IsAsyncMachinery() const {
if (function_.IsNull()) {
ASSERT(IsInterpreted()); // This frame is a bytecode stub frame.
return false;
}
Isolate* isolate = Isolate::Current();
if (function_.raw() == isolate->object_store()->complete_on_async_return()) {
// We are completing an async function's completer.
return true;
}
if (function_.Owner() ==
isolate->object_store()->async_star_stream_controller()) {
// We are inside the async* stream controller code.
return true;
}
return false;
}
// Get the saved current context of this activation.
const Context& ActivationFrame::GetSavedCurrentContext() {
if (!ctx_.IsNull()) return ctx_;
GetVarDescriptors();
intptr_t var_desc_len = var_descriptors_.Length();
Object& obj = Object::Handle();
for (intptr_t i = 0; i < var_desc_len; i++) {
LocalVarDescriptorsLayout::VarInfo var_info;
var_descriptors_.GetInfo(i, &var_info);
const int8_t kind = var_info.kind();
if (kind == LocalVarDescriptorsLayout::kSavedCurrentContext) {
if (FLAG_trace_debugger_stacktrace) {
OS::PrintErr("\tFound saved current ctx at index %d\n",
var_info.index());
}
const auto variable_index = VariableIndex(var_info.index());
obj = GetStackVar(variable_index);
if (obj.IsClosure()) {
ASSERT(function().name() == Symbols::Call().raw());
ASSERT(function().IsInvokeFieldDispatcher());
// Closure.call frames.
ctx_ = Closure::Cast(obj).context();
} else if (obj.IsContext()) {
ctx_ = Context::Cast(obj).raw();
} else {
ASSERT(obj.IsNull() || obj.raw() == Symbols::OptimizedOut().raw());
ctx_ = Context::null();
}
return ctx_;
}
}
return ctx_;
}
ObjectPtr ActivationFrame::GetAsyncOperation() {
if (function().name() == Symbols::AsyncOperation().raw()) {
return GetParameter(0);
}
return Object::null();
}
ActivationFrame* DebuggerStackTrace::GetHandlerFrame(
const Instance& exc_obj) const {
for (intptr_t frame_index = 0; frame_index < Length(); frame_index++) {
ActivationFrame* frame = FrameAt(frame_index);
if (FLAG_trace_debugger_stacktrace) {
OS::PrintErr("GetHandlerFrame: #%04" Pd " %s", frame_index,
frame->ToCString());
}
if (frame->HandlesException(exc_obj)) {
return frame;
}
}
return NULL;
}
void ActivationFrame::GetDescIndices() {
if (vars_initialized_) {
return;
}
GetVarDescriptors();
TokenPosition activation_token_pos = TokenPos();
if (!activation_token_pos.IsDebugPause() || !live_frame_) {
// We don't have a token position for this frame, so can't determine
// which variables are visible.
vars_initialized_ = true;
return;
}
GrowableArray<String*> var_names(8);
intptr_t var_desc_len = var_descriptors_.Length();
for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) {
ASSERT(var_names.length() == desc_indices_.length());
LocalVarDescriptorsLayout::VarInfo var_info;
var_descriptors_.GetInfo(cur_idx, &var_info);
const int8_t kind = var_info.kind();
if ((kind != LocalVarDescriptorsLayout::kStackVar) &&
(kind != LocalVarDescriptorsLayout::kContextVar)) {
continue;
}
if ((var_info.begin_pos <= activation_token_pos) &&
(activation_token_pos <= var_info.end_pos)) {
if ((kind == LocalVarDescriptorsLayout::kContextVar) &&
(ContextLevel() < var_info.scope_id)) {
// The variable is textually in scope but the context level
// at the activation frame's PC is lower than the context
// level of the variable. The context containing the variable
// has already been removed from the chain. This can happen when we
// break at a return statement, since the contexts get discarded
// before the debugger gets called.
continue;
}
// The current variable is textually in scope. Now check whether
// there is another local variable with the same name that shadows
// or is shadowed by this variable.
String& var_name = String::Handle(var_descriptors_.GetName(cur_idx));
intptr_t indices_len = desc_indices_.length();
bool name_match_found = false;
for (intptr_t i = 0; i < indices_len; i++) {
if (var_name.Equals(*var_names[i])) {
// Found two local variables with the same name. Now determine
// which one is shadowed.
name_match_found = true;
LocalVarDescriptorsLayout::VarInfo i_var_info;
var_descriptors_.GetInfo(desc_indices_[i], &i_var_info);
if (i_var_info.begin_pos < var_info.begin_pos) {
// The variable we found earlier is in an outer scope
// and is shadowed by the current variable. Replace the
// descriptor index of the previously found variable
// with the descriptor index of the current variable.
desc_indices_[i] = cur_idx;
} else {
// The variable we found earlier is in an inner scope
// and shadows the current variable. Skip the current
// variable. (Nothing to do.)
}
break; // Stop looking for name matches.
}
}
if (!name_match_found) {
// No duplicate name found. Add the current descriptor index to the
// list of visible variables.
desc_indices_.Add(cur_idx);
var_names.Add(&var_name);
}
}
}
vars_initialized_ = true;
}
intptr_t ActivationFrame::NumLocalVariables() {
GetDescIndices();
return desc_indices_.length();
}
DART_FORCE_INLINE static ObjectPtr GetVariableValue(uword addr) {
return *reinterpret_cast<ObjectPtr*>(addr);
}
// Caution: GetParameter only works for fixed parameters.
ObjectPtr ActivationFrame::GetParameter(intptr_t index) {
intptr_t num_parameters = function().num_fixed_parameters();
ASSERT(0 <= index && index < num_parameters);
if (IsInterpreted()) {
if (function().NumOptionalParameters() > 0) {
// Note that we do not access optional but only fixed parameters, hence
// we do not need to replicate the logic of IndexFor() in bytecode reader.
return GetVariableValue(fp() + index * kWordSize);
} else {
return GetVariableValue(
fp() - (kKBCParamEndSlotFromFp + num_parameters - index) * kWordSize);
}
}
if (function().NumOptionalParameters() > 0) {
// If the function has optional parameters, the first positional parameter
// can be in a number of places in the caller's frame depending on how many
// were actually supplied at the call site, but they are copied to a fixed
// place in the callee's frame.
return GetVariableValue(LocalVarAddress(
fp(), runtime_frame_layout.FrameSlotForVariableIndex(-index)));
} else {
intptr_t reverse_index = num_parameters - index;
return GetVariableValue(ParamAddress(fp(), reverse_index));
}
}
ObjectPtr ActivationFrame::GetClosure() {
ASSERT(function().IsClosureFunction());
return GetParameter(0);
}
ObjectPtr ActivationFrame::GetStackVar(VariableIndex variable_index) {
if (IsInterpreted()) {
intptr_t slot_index = -variable_index.value();
if (slot_index < 0) {
slot_index -= kKBCParamEndSlotFromFp; // Accessing a parameter.
}
return GetVariableValue(fp() + slot_index * kWordSize);
}
const intptr_t slot_index =
runtime_frame_layout.FrameSlotForVariableIndex(variable_index.value());
if (deopt_frame_.IsNull()) {
return GetVariableValue(LocalVarAddress(fp(), slot_index));
} else {
return deopt_frame_.At(LocalVarIndex(deopt_frame_offset_, slot_index));
}
}
bool ActivationFrame::IsRewindable() const {
if (deopt_frame_.IsNull()) {
return true;
}
// TODO(turnidge): This is conservative. It looks at all values in
// the deopt_frame_ even though some of them may correspond to other
// inlined frames.
Object& obj = Object::Handle();
for (int i = 0; i < deopt_frame_.Length(); i++) {
obj = deopt_frame_.At(i);
if (obj.raw() == Symbols::OptimizedOut().raw()) {
return false;
}
}
return true;
}
void ActivationFrame::PrintContextMismatchError(intptr_t ctx_slot,
intptr_t frame_ctx_level,
intptr_t var_ctx_level) {
OS::PrintErr(
"-------------------------\n"
"Encountered context mismatch\n"
"\tctx_slot: %" Pd
"\n"
"\tframe_ctx_level: %" Pd
"\n"
"\tvar_ctx_level: %" Pd "\n\n",
ctx_slot, frame_ctx_level, var_ctx_level);
OS::PrintErr(
"-------------------------\n"
"Current frame:\n%s\n",
this->ToCString());
OS::PrintErr(
"-------------------------\n"
"Context contents:\n");
const Context& ctx = GetSavedCurrentContext();
ctx.Dump(8);
OS::PrintErr(
"-------------------------\n"
"Debugger stack trace...\n\n");
DebuggerStackTrace* stack = Isolate::Current()->debugger()->StackTrace();
intptr_t num_frames = stack->Length();
for (intptr_t i = 0; i < num_frames; i++) {
ActivationFrame* frame = stack->FrameAt(i);
OS::PrintErr("#%04" Pd " %s", i, frame->ToCString());
}
OS::PrintErr(
"-------------------------\n"
"All frames...\n\n");
StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames,
Thread::Current(),
StackFrameIterator::kNoCrossThreadIteration);
StackFrame* frame = iterator.NextFrame();
intptr_t num = 0;
while ((frame != NULL)) {
OS::PrintErr("#%04" Pd " %s\n", num++, frame->ToCString());
frame = iterator.NextFrame();
}
}
void ActivationFrame::VariableAt(intptr_t i,
String* name,
TokenPosition* declaration_token_pos,
TokenPosition* visible_start_token_pos,
TokenPosition* visible_end_token_pos,
Object* value) {
GetDescIndices();
ASSERT(i < desc_indices_.length());
intptr_t desc_index = desc_indices_[i];
ASSERT(name != NULL);
*name = var_descriptors_.GetName(desc_index);
LocalVarDescriptorsLayout::VarInfo var_info;
var_descriptors_.GetInfo(desc_index, &var_info);
ASSERT(declaration_token_pos != NULL);
*declaration_token_pos = var_info.declaration_pos;
ASSERT(visible_start_token_pos != NULL);
*visible_start_token_pos = var_info.begin_pos;
ASSERT(visible_end_token_pos != NULL);
*visible_end_token_pos = var_info.end_pos;
ASSERT(value != NULL);
const int8_t kind = var_info.kind();
const auto variable_index = VariableIndex(var_info.index());
if (kind == LocalVarDescriptorsLayout::kStackVar) {
*value = GetStackVar(variable_index);
} else {
ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
*value = GetContextVar(var_info.scope_id, variable_index.value());
}
}
ObjectPtr ActivationFrame::GetContextVar(intptr_t var_ctx_level,
intptr_t ctx_slot) {
// The context level at the PC/token index of this activation frame.
intptr_t frame_ctx_level = ContextLevel();
return GetRelativeContextVar(var_ctx_level, ctx_slot, frame_ctx_level);
}
ObjectPtr ActivationFrame::GetRelativeContextVar(intptr_t var_ctx_level,
intptr_t ctx_slot,
intptr_t frame_ctx_level) {
const Context& ctx = GetSavedCurrentContext();
// It's possible that ctx was optimized out as no locals were captured by the
// context. See issue #38182.
if (ctx.IsNull()) {
return Symbols::OptimizedOut().raw();
}
intptr_t level_diff = frame_ctx_level - var_ctx_level;
if (level_diff == 0) {
if ((ctx_slot < 0) || (ctx_slot >= ctx.num_variables())) {
PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level);
}
ASSERT((ctx_slot >= 0) && (ctx_slot < ctx.num_variables()));
return ctx.At(ctx_slot);
} else if (level_diff > 0) {
Context& var_ctx = Context::Handle(ctx.raw());
while (level_diff > 0 && !var_ctx.IsNull()) {
level_diff--;
var_ctx = var_ctx.parent();
}
if (var_ctx.IsNull() || (ctx_slot < 0) ||
(ctx_slot >= var_ctx.num_variables())) {
PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level);
}
ASSERT(!var_ctx.IsNull());
ASSERT((ctx_slot >= 0) && (ctx_slot < var_ctx.num_variables()));
return var_ctx.At(ctx_slot);
} else {
PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level);
return Object::null();
}
}
ArrayPtr ActivationFrame::GetLocalVariables() {
GetDescIndices();
intptr_t num_variables = desc_indices_.length();
String& var_name = String::Handle();
Object& value = Instance::Handle();
const Array& list = Array::Handle(Array::New(2 * num_variables));
for (intptr_t i = 0; i < num_variables; i++) {
TokenPosition ignore;
VariableAt(i, &var_name, &ignore, &ignore, &ignore, &value);
list.SetAt(2 * i, var_name);
list.SetAt((2 * i) + 1, value);
}
return list.raw();
}
ObjectPtr ActivationFrame::GetReceiver() {
GetDescIndices();
intptr_t num_variables = desc_indices_.length();
String& var_name = String::Handle();
Instance& value = Instance::Handle();
for (intptr_t i = 0; i < num_variables; i++) {
TokenPosition ignore;
VariableAt(i, &var_name, &ignore, &ignore, &ignore, &value);
if (var_name.Equals(Symbols::This())) {
return value.raw();
}
}
return Symbols::OptimizedOut().raw();
}
static bool IsSyntheticVariableName(const String& var_name) {
return (var_name.Length() >= 1) && (var_name.CharAt(0) == ':');
}
static bool IsPrivateVariableName(const String& var_name) {
return (var_name.Length() >= 1) && (var_name.CharAt(0) == '_');
}
ObjectPtr ActivationFrame::EvaluateCompiledExpression(
const ExternalTypedData& kernel_buffer,
const Array& type_definitions,
const Array& arguments,
const TypeArguments& type_arguments) {
if (function().is_static()) {
const Class& cls = Class::Handle(function().Owner());
return cls.EvaluateCompiledExpression(kernel_buffer, type_definitions,
arguments, type_arguments);
} else {
const Object& receiver = Object::Handle(GetReceiver());
const Class& method_cls = Class::Handle(function().origin());
ASSERT(receiver.IsInstance() || receiver.IsNull());
if (!(receiver.IsInstance() || receiver.IsNull())) {
return Object::null();
}
const Instance& inst = Instance::Cast(receiver);
return inst.EvaluateCompiledExpression(
method_cls, kernel_buffer, type_definitions, arguments, type_arguments);
}
}
TypeArgumentsPtr ActivationFrame::BuildParameters(
const GrowableObjectArray& param_names,
const GrowableObjectArray& param_values,
const GrowableObjectArray& type_params_names) {
GetDescIndices();
bool type_arguments_available = false;
String& name = String::Handle();
String& existing_name = String::Handle();
Object& value = Instance::Handle();
TypeArguments& type_arguments = TypeArguments::Handle();
intptr_t num_variables = desc_indices_.length();
for (intptr_t i = 0; i < num_variables; i++) {
TokenPosition ignore;
VariableAt(i, &name, &ignore, &ignore, &ignore, &value);
if (name.Equals(Symbols::FunctionTypeArgumentsVar())) {
type_arguments_available = true;
type_arguments ^= value.raw();
} else if (!name.Equals(Symbols::This()) &&
!IsSyntheticVariableName(name)) {
if (IsPrivateVariableName(name)) {
name = Symbols::New(Thread::Current(), String::ScrubName(name));
}
bool conflict = false;
for (intptr_t j = 0; j < param_names.Length(); j++) {
existing_name ^= param_names.At(j);
if (name.Equals(existing_name)) {
conflict = true;
break;
}
}
// If local has the same name as a binding in the incoming scope, prefer
// the one from the incoming scope, since it is logically a child scope
// of the activation's current scope.
if (!conflict) {
param_names.Add(name);
param_values.Add(value);
}
}
}
if ((function().IsGeneric() || function().HasGenericParent()) &&
type_arguments_available) {
intptr_t num_vars =
function().NumTypeParameters() + function().NumParentTypeParameters();
type_params_names.Grow(num_vars);
type_params_names.SetLength(num_vars);
TypeArguments& type_params = TypeArguments::Handle();
TypeParameter& type_param = TypeParameter::Handle();
Function& current = Function::Handle(function().raw());
intptr_t mapping_offset = num_vars;
for (intptr_t i = 0; !current.IsNull(); i += current.NumTypeParameters(),
current = current.parent_function()) {
type_params = current.type_parameters();
intptr_t size = current.NumTypeParameters();
ASSERT(mapping_offset >= size);
mapping_offset -= size;
for (intptr_t j = 0; j < size; ++j) {
type_param = TypeParameter::RawCast(type_params.TypeAt(j));
name = type_param.name();
// Write the names in backwards in terms of chain of functions.
// But keep the order of names within the same function. so they
// match up with the order of the types in 'type_arguments'.
// Index:0 1 2 3 ...
// |Names in Grandparent| |Names in Parent| ..|Names in Child|
type_params_names.SetAt(mapping_offset + j, name);
}
}
if (!type_arguments.IsNull()) {
if (type_arguments.Length() == 0) {
for (intptr_t i = 0; i < num_vars; ++i) {
type_arguments.SetTypeAt(i, Object::dynamic_type());
}
}
ASSERT(type_arguments.Length() == num_vars);
}
}
return type_arguments.raw();
}
const char* ActivationFrame::ToCString() {
if (function().IsNull()) {
return Thread::Current()->zone()->PrintToString("[ Frame kind: %s]\n",
KindToCString(kind_));
}
const String& url = String::Handle(SourceUrl());
intptr_t line = LineNumber();
const char* func_name = function().ToFullyQualifiedCString();
if (live_frame_) {
return Thread::Current()->zone()->PrintToString(
"[ Frame pc(0x%" Px " %s offset:0x%" Px ") fp(0x%" Px ") sp(0x%" Px
")\n"
"\tfunction = %s\n"
"\turl = %s\n"
"\tline = %" Pd
"\n"
"\tcontext = %s\n"
"\tcontext level = %" Pd " ]\n",
pc(), IsInterpreted() ? "bytecode" : "code",
pc() - (IsInterpreted() ? bytecode().PayloadStart()
: code().PayloadStart()),
fp(), sp(), func_name, url.ToCString(), line, ctx_.ToCString(),
ContextLevel());
} else {
return Thread::Current()->zone()->PrintToString(
"[ Frame %s function = %s\n"
"\turl = %s\n"
"\tline = %" Pd
"\n"
"\tcontext = %s]\n",
IsInterpreted() ? "bytecode" : "code", func_name, url.ToCString(), line,
ctx_.ToCString());
}
}
void ActivationFrame::PrintToJSONObject(JSONObject* jsobj) {
if (kind_ == kRegular || kind_ == kAsyncActivation) {
PrintToJSONObjectRegular(jsobj);
} else if (kind_ == kAsyncCausal) {
PrintToJSONObjectAsyncCausal(jsobj);
} else if (kind_ == kAsyncSuspensionMarker) {
PrintToJSONObjectAsyncSuspensionMarker(jsobj);
} else {
UNIMPLEMENTED();
}
}
void ActivationFrame::PrintToJSONObjectRegular(JSONObject* jsobj) {
const Script& script = Script::Handle(SourceScript());
jsobj->AddProperty("type", "Frame");
jsobj->AddProperty("kind", KindToCString(kind_));
const TokenPosition pos = TokenPos().SourcePosition();
jsobj->AddLocation(script, pos);
jsobj->AddProperty("function", function());
if (IsInterpreted()) {
jsobj->AddProperty("code", bytecode());
} else {
jsobj->AddProperty("code", code());
}
{
JSONArray jsvars(jsobj, "vars");
const int num_vars = NumLocalVariables();
for (intptr_t v = 0; v < num_vars; v++) {
String& var_name = String::Handle();
Instance& var_value = Instance::Handle();
TokenPosition declaration_token_pos;
TokenPosition visible_start_token_pos;
TokenPosition visible_end_token_pos;
VariableAt(v, &var_name, &declaration_token_pos, &visible_start_token_pos,
&visible_end_token_pos, &var_value);
if (!IsSyntheticVariableName(var_name)) {
JSONObject jsvar(&jsvars);
jsvar.AddProperty("type", "BoundVariable");
const char* scrubbed_var_name = String::ScrubName(var_name);
jsvar.AddProperty("name", scrubbed_var_name);
jsvar.AddProperty("value", var_value);
// Where was the variable declared?
jsvar.AddProperty("declarationTokenPos", declaration_token_pos);
// When the variable becomes visible to the scope.
jsvar.AddProperty("scopeStartTokenPos", visible_start_token_pos);
// When the variable stops being visible to the scope.
jsvar.AddProperty("scopeEndTokenPos", visible_end_token_pos);
}
}
}
}
void ActivationFrame::PrintToJSONObjectAsyncCausal(JSONObject* jsobj) {
jsobj->AddProperty("type", "Frame");
jsobj->AddProperty("kind", KindToCString(kind_));
const Script& script = Script::Handle(SourceScript());
const TokenPosition pos = TokenPos().SourcePosition();
jsobj->AddLocation(script, pos);
jsobj->AddProperty("function", function());
if (IsInterpreted()) {
jsobj->AddProperty("code", bytecode());
} else {
jsobj->AddProperty("code", code());
}
}
void ActivationFrame::PrintToJSONObjectAsyncSuspensionMarker(
JSONObject* jsobj) {
jsobj->AddProperty("type", "Frame");
jsobj->AddProperty("kind", KindToCString(kind_));
jsobj->AddProperty("marker", "AsynchronousSuspension");
}
static bool IsFunctionVisible(const Function& function) {
return FLAG_show_invisible_frames || function.is_visible();
}
void DebuggerStackTrace::AddActivation(ActivationFrame* frame) {
if (IsFunctionVisible(frame->function())) {
trace_.Add(frame);
}
}
void DebuggerStackTrace::AddMarker(ActivationFrame::Kind marker) {
ASSERT(marker == ActivationFrame::kAsyncSuspensionMarker);
trace_.Add(new ActivationFrame(marker));
}
void DebuggerStackTrace::AddAsyncCausalFrame(uword pc, const Code& code) {
trace_.Add(new ActivationFrame(pc, 0, 0, code, Array::Handle(), 0,
ActivationFrame::kAsyncCausal));
}
#if !defined(DART_PRECOMPILED_RUNTIME)
void DebuggerStackTrace::AddAsyncCausalFrame(uword pc,
const Bytecode& bytecode) {
trace_.Add(
new ActivationFrame(pc, 0, 0, bytecode, ActivationFrame::kAsyncCausal));
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
const uint8_t kSafepointKind = PcDescriptorsLayout::kIcCall |
PcDescriptorsLayout::kUnoptStaticCall |
PcDescriptorsLayout::kRuntimeCall;
CodeBreakpoint::CodeBreakpoint(const Code& code,
TokenPosition token_pos,
uword pc,
PcDescriptorsLayout::Kind kind)
: code_(code.raw()),
bytecode_(Bytecode::null()),
token_pos_(token_pos),
pc_(pc),
line_number_(-1),
is_enabled_(false),
bpt_location_(NULL),
next_(NULL),
breakpoint_kind_(kind),
saved_value_(Code::null()) {
ASSERT(!code.IsNull());
ASSERT(token_pos_.IsReal());
ASSERT(pc_ != 0);
ASSERT((breakpoint_kind_ & kSafepointKind) != 0);
}
CodeBreakpoint::CodeBreakpoint(const Bytecode& bytecode,
TokenPosition token_pos,
uword pc)
: code_(Code::null()),
bytecode_(bytecode.raw()),
token_pos_(token_pos),
pc_(pc),
line_number_(-1),
is_enabled_(false),
bpt_location_(NULL),
next_(NULL),
breakpoint_kind_(PcDescriptorsLayout::kAnyKind),
saved_value_(Code::null()) {
ASSERT(!bytecode.IsNull());
ASSERT(FLAG_enable_interpreter);
ASSERT(token_pos_.IsReal());
ASSERT(pc_ != 0);
}
CodeBreakpoint::~CodeBreakpoint() {
// Make sure we don't leave patched code behind.
ASSERT(!IsEnabled());
// Poison the data so we catch use after free errors.
#ifdef DEBUG
code_ = Code::null();
bytecode_ = Bytecode::null();
pc_ = 0ul;
bpt_location_ = NULL;
next_ = NULL;
breakpoint_kind_ = PcDescriptorsLayout::kOther;
#endif
}
FunctionPtr CodeBreakpoint::function() const {
if (IsInterpreted()) {
ASSERT(Bytecode::Handle(bytecode_).function() != Function::null());
return Bytecode::Handle(bytecode_).function();
} else {
return Code::Handle(code_).function();
}
}
ScriptPtr CodeBreakpoint::SourceCode() {
const Function& func = Function::Handle(this->function());
return func.script();
}
StringPtr CodeBreakpoint::SourceUrl() {
const Script& script = Script::Handle(SourceCode());
return script.url();
}
intptr_t CodeBreakpoint::LineNumber() {
// Compute line number lazily since it causes scanning of the script.
if (line_number_ < 0) {
const Script& script = Script::Handle(SourceCode());
script.GetTokenLocation(token_pos_, &line_number_, NULL);
}
return line_number_;
}
void CodeBreakpoint::Enable() {
if (!is_enabled_) {
if (IsInterpreted()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
SetBytecodeBreakpoint();
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
PatchCode();
}
}
ASSERT(is_enabled_);
}
void CodeBreakpoint::Disable() {
if (is_enabled_) {
if (IsInterpreted()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
UnsetBytecodeBreakpoint();
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
RestoreCode();
}
}
ASSERT(!is_enabled_);
}
Debugger::Debugger(Isolate* isolate)
: isolate_(isolate),
next_id_(1),
latent_locations_(NULL),
breakpoint_locations_(NULL),
code_breakpoints_(NULL),
resume_action_(kContinue),
resume_frame_index_(-1),
post_deopt_frame_index_(-1),
ignore_breakpoints_(false),
pause_event_(NULL),
stack_trace_(NULL),
async_causal_stack_trace_(NULL),
awaiter_stack_trace_(NULL),
stepping_fp_(0),
interpreted_stepping_(false),
last_stepping_fp_(0),
last_stepping_pos_(TokenPosition::kNoSource),
async_stepping_fp_(0),
interpreted_async_stepping_(false),
top_frame_awaiter_(Object::null()),
skip_next_step_(false),
needs_breakpoint_cleanup_(false),
synthetic_async_breakpoint_(NULL),
exc_pause_info_(kNoPauseOnExceptions) {}
Debugger::~Debugger() {
ASSERT(!IsPaused());
ASSERT(latent_locations_ == NULL);
ASSERT(breakpoint_locations_ == NULL);
ASSERT(code_breakpoints_ == NULL);
ASSERT(stack_trace_ == NULL);
ASSERT(async_causal_stack_trace_ == NULL);
ASSERT(synthetic_async_breakpoint_ == NULL);
}
void Debugger::Shutdown() {
// TODO(johnmccutchan): Do not create a debugger for isolates that don't need
// them. Then, assert here that isolate_ is not one of those isolates.
if (Isolate::IsVMInternalIsolate(isolate_)) {
return;
}
while (breakpoint_locations_ != NULL) {
BreakpointLocation* loc = breakpoint_locations_;
breakpoint_locations_ = breakpoint_locations_->next();
delete loc;
}
while (latent_locations_ != NULL) {
BreakpointLocation* loc = latent_locations_;
latent_locations_ = latent_locations_->next();
delete loc;
}
while (code_breakpoints_ != NULL) {
CodeBreakpoint* cbpt = code_breakpoints_;
code_breakpoints_ = code_breakpoints_->next();
cbpt->Disable();
delete cbpt;
}
if (NeedsIsolateEvents()) {
ServiceEvent event(isolate_, ServiceEvent::kIsolateExit);
InvokeEventHandler(&event);
}
}
void Debugger::OnIsolateRunnable() {}
bool Debugger::SetupStepOverAsyncSuspension(const char** error) {
ActivationFrame* top_frame = TopDartFrame();
if (!IsAtAsyncJump(top_frame)) {
// Not at an async operation.
if (error != nullptr) {
*error = "Isolate must be paused at an async suspension point";
}
return false;
}
Object& closure = Object::Handle(top_frame->GetAsyncOperation());
ASSERT(!closure.IsNull());
ASSERT(closure.IsInstance());
ASSERT(Instance::Cast(closure).IsClosure());
Breakpoint* bpt = SetBreakpointAtActivation(Instance::Cast(closure), true);
if (bpt == NULL) {
// Unable to set the breakpoint.
if (error != nullptr) {
*error = "Unable to set breakpoint at async suspension point";
}
return false;
}
return true;
}
bool Debugger::SetResumeAction(ResumeAction action,
intptr_t frame_index,
const char** error) {
if (error != nullptr) {
*error = NULL;
}
resume_frame_index_ = -1;
switch (action) {
case kStepInto:
case kStepOver:
case kStepOut:
case kContinue:
resume_action_ = action;
return true;
case kStepRewind:
if (!CanRewindFrame(frame_index, error)) {
return false;
}
resume_action_ = kStepRewind;
resume_frame_index_ = frame_index;
return true;
case kStepOverAsyncSuspension:
return SetupStepOverAsyncSuspension(error);
default:
UNREACHABLE();
return false;
}
}
// Deoptimize all functions in the isolate.
// TODO(hausner): Actually we only need to deoptimize those functions
// that inline the function that contains the newly created breakpoint.
// We currently don't have this info so we deoptimize all functions.
void Debugger::DeoptimizeWorld() {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
BackgroundCompiler::Stop(isolate_);
if (FLAG_trace_deoptimization) {
THR_Print("Deopt for debugger\n");
}
isolate_->set_has_attempted_stepping(true);
DeoptimizeFunctionsOnStack();
// Iterate over all classes, deoptimize functions.
// TODO(hausner): Could possibly be combined with RemoveOptimizedCode()
const ClassTable& class_table = *isolate_->class_table();
Zone* zone = Thread::Current()->zone();
CallSiteResetter resetter(zone);
Class& cls = Class::Handle(zone);
Array& functions = Array::Handle(zone);
GrowableObjectArray& closures = GrowableObjectArray::Handle(zone);
Function& function = Function::Handle(zone);
Code& code = Code::Handle(zone);
const intptr_t num_classes = class_table.NumCids();
const intptr_t num_tlc_classes = class_table.NumTopLevelCids();
for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) {
const classid_t cid =
i < num_classes ? i : ClassTable::CidFromTopLevelIndex(i - num_classes);
if (class_table.HasValidClassAt(cid)) {
cls = class_table.At(cid);
// Disable optimized functions.
functions = cls.functions();
if (!functions.IsNull()) {
intptr_t num_functions = functions.Length();
for (intptr_t pos = 0; pos < num_functions; pos++) {
function ^= functions.At(pos);
ASSERT(!function.IsNull());
// Force-optimized functions don't have unoptimized code and can't
// deoptimize. Their optimized codes are still valid.
if (function.ForceOptimize()) {
ASSERT(!function.HasImplicitClosureFunction());
continue;
}
if (function.HasOptimizedCode()) {
function.SwitchToUnoptimizedCode();
}
code = function.unoptimized_code();
if (!code.IsNull()) {
resetter.ResetSwitchableCalls(code);
}
// Also disable any optimized implicit closure functions.
if (function.HasImplicitClosureFunction()) {
function = function.ImplicitClosureFunction();
if (function.HasOptimizedCode()) {
function.SwitchToUnoptimizedCode();
}
code = function.unoptimized_code();
if (!code.IsNull()) {
resetter.ResetSwitchableCalls(code);
}
}
}
}
}
}
// Disable optimized closure functions.
closures = isolate_->object_store()->closure_functions();
const intptr_t num_closures = closures.Length();
for (intptr_t pos = 0; pos < num_closures; pos++) {
function ^= closures.At(pos);
ASSERT(!function.IsNull());
if (function.HasOptimizedCode()) {
function.SwitchToUnoptimizedCode();
}
code = function.unoptimized_code();
if (!code.IsNull()) {
resetter.ResetSwitchableCalls(code);
}
}
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
void Debugger::NotifySingleStepping(bool value) const {
isolate_->set_single_step(value);
#if !defined(DART_PRECOMPILED_RUNTIME)
// Do not call Interpreter::Current(), which may allocate an interpreter.
Interpreter* interpreter = Thread::Current()->interpreter();
if (interpreter != nullptr) {
// Do not reset is_debugging to false if bytecode debug breaks are enabled.
interpreter->set_is_debugging(value || HasEnabledBytecodeBreakpoints());
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
ActivationFrame* Debugger::CollectDartFrame(Isolate* isolate,
uword pc,
StackFrame* frame,
const Code& code,
const Array& deopt_frame,
intptr_t deopt_frame_offset,
ActivationFrame::Kind kind) {
ASSERT(code.ContainsInstructionAt(pc));
ActivationFrame* activation =
new ActivationFrame(pc, frame->fp(), frame->sp(), code, deopt_frame,
deopt_frame_offset, kind);
if (FLAG_trace_debugger_stacktrace) {
const Context& ctx = activation->GetSavedCurrentContext();
OS::PrintErr("\tUsing saved context: %s\n", ctx.ToCString());
OS::PrintErr("\tLine number: %" Pd "\n", activation->LineNumber());
}
return activation;
}
#if !defined(DART_PRECOMPILED_RUNTIME)
ActivationFrame* Debugger::CollectDartFrame(Isolate* isolate,
uword pc,
StackFrame* frame,
const Bytecode& bytecode,
ActivationFrame::Kind kind) {
ASSERT(bytecode.ContainsInstructionAt(pc));
ActivationFrame* activation =
new ActivationFrame(pc, frame->fp(), frame->sp(), bytecode, kind);
if (FLAG_trace_debugger_stacktrace) {
const Context& ctx = activation->GetSavedCurrentContext();
OS::PrintErr("\tUsing saved context: %s\n", ctx.ToCString());
OS::PrintErr("\tLine number: %" Pd "\n", activation->LineNumber());
}
return activation;
}
ArrayPtr Debugger::DeoptimizeToArray(Thread* thread,
StackFrame* frame,
const Code& code) {
ASSERT(code.is_optimized() && !code.is_force_optimized());
Isolate* isolate = thread->isolate();
// Create the DeoptContext for this deoptimization.
DeoptContext* deopt_context =
new DeoptContext(frame, code, DeoptContext::kDestIsAllocated, NULL, NULL,
true, false /* deoptimizing_code */);
isolate->set_deopt_context(deopt_context);
deopt_context->FillDestFrame();
deopt_context->MaterializeDeferredObjects();
const Array& dest_frame =
Array::Handle(thread->zone(), deopt_context->DestFrameAsArray());
isolate->set_deopt_context(NULL);
delete deopt_context;
return dest_frame.raw();
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
DebuggerStackTrace* Debugger::CollectStackTrace() {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8);
StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames,
Thread::Current(),
StackFrameIterator::kNoCrossThreadIteration);
Code& code = Code::Handle(zone);
#if !defined(DART_PRECOMPILED_RUNTIME)
Bytecode& bytecode = Bytecode::Handle(zone);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
Code& inlined_code = Code::Handle(zone);
Array& deopt_frame = Array::Handle(zone);
for (StackFrame* frame = iterator.NextFrame(); frame != NULL;
frame = iterator.NextFrame()) {
ASSERT(frame->IsValid());
if (FLAG_trace_debugger_stacktrace) {
OS::PrintErr("CollectStackTrace: visiting frame:\n\t%s\n",
frame->ToCString());
}
if (frame->IsDartFrame()) {
if (frame->is_interpreted()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
bytecode = frame->LookupDartBytecode();
if (bytecode.function() == Function::null()) {
continue; // Skip bytecode stub frame.
}
stack_trace->AddActivation(
CollectDartFrame(isolate, frame->pc(), frame, bytecode));
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
code = frame->LookupDartCode();
AppendCodeFrames(thread, isolate, zone, stack_trace, frame, &code,
&inlined_code, &deopt_frame);
}
}
}
return stack_trace;
}
void Debugger::AppendCodeFrames(Thread* thread,
Isolate* isolate,
Zone* zone,
DebuggerStackTrace* stack_trace,
StackFrame* frame,
Code* code,
Code* inlined_code,
Array* deopt_frame) {
#if !defined(DART_PRECOMPILED_RUNTIME)
if (code->is_optimized()) {
if (code->is_force_optimized()) {
if (FLAG_trace_debugger_stacktrace) {
const Function& function = Function::Handle(zone, code->function());
ASSERT(!function.IsNull());
OS::PrintErr(
"CollectStackTrace: skipping force-optimized function: %s\n",
function.ToFullyQualifiedCString());
}
return; // Skip frame of force-optimized (and non-debuggable) function.
}
// TODO(rmacnak): Use CodeSourceMap
*deopt_frame = DeoptimizeToArray(thread, frame, *code);
for (InlinedFunctionsIterator it(*code, frame->pc()); !it.Done();
it.Advance()) {
*inlined_code = it.code();
if (FLAG_trace_debugger_stacktrace) {
const Function& function = Function::Handle(zone, it.function());
ASSERT(!function.IsNull());
OS::PrintErr("CollectStackTrace: visiting inlined function: %s\n",
function.ToFullyQualifiedCString());
}
intptr_t deopt_frame_offset = it.GetDeoptFpOffset();
stack_trace->AddActivation(CollectDartFrame(isolate, it.pc(), frame,
*inlined_code, *deopt_frame,
deopt_frame_offset));
}
return;
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
stack_trace->AddActivation(CollectDartFrame(isolate, frame->pc(), frame,
*code, Object::null_array(), 0));
}
DebuggerStackTrace* Debugger::CollectAsyncCausalStackTrace() {
if (FLAG_lazy_async_stacks) {
return CollectAsyncLazyStackTrace();
}
if (!FLAG_causal_async_stacks) {
return NULL;
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8);
Object& code_obj = Object::Handle(zone);
Code& code = Code::Handle(zone);
#if !defined(DART_PRECOMPILED_RUNTIME)
Bytecode& bytecode = Bytecode::Handle(zone);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
Smi& offset = Smi::Handle();
Code& inlined_code = Code::Handle(zone);
Array& deopt_frame = Array::Handle(zone);
Function& async_function = Function::Handle(zone);
class StackTrace& async_stack_trace = StackTrace::Handle(zone);
Array& async_code_array = Array::Handle(zone);
Array& async_pc_offset_array = Array::Handle(zone);
// Extract the eagerly recorded async stack from the current thread.
StackTraceUtils::ExtractAsyncStackTraceInfo(
thread, &async_function, &async_stack_trace, &async_code_array,
&async_pc_offset_array);
if (async_function.IsNull()) {
return NULL;
}
bool sync_async_end = false;
intptr_t synchronous_stack_trace_length =
StackTraceUtils::CountFrames(thread, 0, async_function, &sync_async_end);
// Append the top frames from the synchronous stack trace, up until the active
// asynchronous function. We truncate the remainder of the synchronous
// stack trace because it contains activations that are part of the
// asynchronous dispatch mechanisms.
StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames,
Thread::Current(),
StackFrameIterator::kNoCrossThreadIteration);
StackFrame* frame = iterator.NextFrame();
while (synchronous_stack_trace_length > 0) {
ASSERT(frame != NULL);
if (frame->IsDartFrame()) {
if (frame->is_interpreted()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
bytecode = frame->LookupDartBytecode();
if (bytecode.function() == Function::null()) {
continue; // Skip bytecode stub frame.
}
stack_trace->AddActivation(
CollectDartFrame(isolate, frame->pc(), frame, bytecode));
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
code = frame->LookupDartCode();
AppendCodeFrames(thread, isolate, zone, stack_trace, frame, &code,
&inlined_code, &deopt_frame);
}
synchronous_stack_trace_length--;
}
frame = iterator.NextFrame();
}
// Now we append the asynchronous causal stack trace. These are not active
// frames but a historical record of how this asynchronous function was
// activated.
intptr_t frame_skip =
sync_async_end ? StackTrace::kSyncAsyncCroppedFrames : 0;
while (!async_stack_trace.IsNull()) {
for (intptr_t i = frame_skip; i < async_stack_trace.Length(); i++) {
code_obj = async_stack_trace.CodeAtFrame(i);
if (code_obj.IsNull()) {
break;
}
if (code_obj.raw() == StubCode::AsynchronousGapMarker().raw()) {
stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker);
// The frame immediately below the asynchronous gap marker is the
// identical to the frame above the marker. Skip the frame to enhance
// the readability of the trace.
i++;
} else {
offset = Smi::RawCast(async_stack_trace.PcOffsetAtFrame(i));
#if !defined(DART_PRECOMPILED_RUNTIME)
if (code_obj.IsBytecode()) {
bytecode ^= code_obj.raw();
uword pc = bytecode.PayloadStart() + offset.Value();
stack_trace->AddAsyncCausalFrame(pc, bytecode);
continue;
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
code ^= code_obj.raw();
uword pc = code.PayloadStart() + offset.Value();
if (code.is_optimized()) {
for (InlinedFunctionsIterator it(code, pc); !it.Done();
it.Advance()) {
inlined_code = it.code();
stack_trace->AddAsyncCausalFrame(it.pc(), inlined_code);
}
} else {
stack_trace->AddAsyncCausalFrame(pc, code);
}
}
}
// Follow the link.
frame_skip = async_stack_trace.skip_sync_start_in_parent_stack()
? StackTrace::kSyncAsyncCroppedFrames
: 0;
async_stack_trace = async_stack_trace.async_link();
}
return stack_trace;
}
DebuggerStackTrace* Debugger::CollectAsyncLazyStackTrace() {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
Code& code = Code::Handle(zone);
Code& inlined_code = Code::Handle(zone);
Smi& offset = Smi::Handle();
Array& deopt_frame = Array::Handle(zone);
#if !defined(DART_PRECOMPILED_RUNTIME)
Bytecode& bytecode = Bytecode::Handle(zone);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
constexpr intptr_t kDefaultStackAllocation = 8;
auto stack_trace = new DebuggerStackTrace(kDefaultStackAllocation);
std::function<void(StackFrame*)> on_sync_frame = [&](StackFrame* frame) {
if (frame->is_interpreted()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
bytecode = frame->LookupDartBytecode();
stack_trace->AddActivation(
CollectDartFrame(isolate, frame->pc(), frame, bytecode));
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
code = frame->LookupDartCode();
AppendCodeFrames(thread, isolate, zone, stack_trace, frame, &code,
&inlined_code, &deopt_frame);
}
};
const auto& code_array = GrowableObjectArray::ZoneHandle(
zone, GrowableObjectArray::New(kDefaultStackAllocation));
const auto& pc_offset_array = GrowableObjectArray::ZoneHandle(
zone, GrowableObjectArray::New(kDefaultStackAllocation));
bool has_async = false;
StackTraceUtils::CollectFramesLazy(thread, code_array, pc_offset_array,
/*skip_frames=*/0, &on_sync_frame,
&has_async);
if (!has_async) {
return nullptr;
}
const intptr_t length = code_array.Length();
for (intptr_t i = stack_trace->Length(); i < length; ++i) {
code ^= code_array.At(i);
offset ^= pc_offset_array.At(i);
if (code.raw() == StubCode::AsynchronousGapMarker().raw()) {
stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker);
} else {
const uword absolute_pc = code.PayloadStart() + offset.Value();
stack_trace->AddAsyncCausalFrame(absolute_pc, code);
}
}
return stack_trace;
}
DebuggerStackTrace* Debugger::CollectAwaiterReturnStackTrace() {
#if defined(DART_PRECOMPILED_RUNTIME)
// Causal async stacks are not supported in the AOT runtime.
ASSERT(!FLAG_async_debugger);
return NULL;
#else
if (!FLAG_async_debugger) {
return NULL;
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
Isolate* isolate = thread->isolate();
DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8);
StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames,
Thread::Current(),
StackFrameIterator::kNoCrossThreadIteration);
Object& code_object = Object::Handle(zone);
Code& code = Code::Handle(zone);
Bytecode& bytecode = Bytecode::Handle(zone);
Smi& offset = Smi::Handle(zone);
Function& function = Function::Handle(zone);
Code& inlined_code = Code::Handle(zone);
Closure& async_activation = Closure::Handle(zone);
Object& next_async_activation = Object::Handle(zone);
Array& deopt_frame = Array::Handle(zone);
class StackTrace& async_stack_trace = StackTrace::Handle(zone);
bool stack_has_async_function = false;
// Number of frames we are trying to skip that form "sync async" entry.
int skip_sync_async_frames_count = -1;
String& function_name = String::Handle(zone);
for (StackFrame* frame = iterator.NextFrame(); frame != NULL;
frame = iterator.NextFrame()) {
ASSERT(frame->IsValid());
if (FLAG_trace_debugger_stacktrace) {
OS::PrintErr("CollectAwaiterReturnStackTrace: visiting frame:\n\t%s\n",
frame->ToCString());
}
if (frame->IsDartFrame()) {
if (frame->is_interpreted()) {
bytecode = frame->LookupDartBytecode();
function = bytecode.function();
if (function.IsNull()) {
continue; // Skip bytecode stub frame.
}
if (skip_sync_async_frames_count > 0) {
function_name = function.QualifiedScrubbedName();
if (!StackTraceUtils::CheckAndSkipAsync(&skip_sync_async_frames_count,
function_name)) {
// Unexpected function in synchronous call of async function.
break;
}
}
if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) {
ActivationFrame* activation =
CollectDartFrame(isolate, frame->pc(), frame, bytecode,
ActivationFrame::kAsyncActivation);
ASSERT(activation != NULL);
stack_trace->AddActivation(activation);
stack_has_async_function = true;
// Grab the awaiter.
async_activation ^= activation->GetAsyncAwaiter();
async_stack_trace ^= activation->GetCausalStack();
// Interpreted bytecode does not invoke _ClosureCall().
// Skip _AsyncAwaitCompleterStart() only.
skip_sync_async_frames_count = 1;
} else {
stack_trace->AddActivation(
CollectDartFrame(isolate, frame->pc(), frame, bytecode));
}
} else {
code = frame->LookupDartCode();
if (code.is_optimized()) {
if (code.is_force_optimized()) {
if (FLAG_trace_debugger_stacktrace) {
function = code.function();
ASSERT(!function.IsNull());
OS::PrintErr(
"CollectAwaiterReturnStackTrace: "
"skipping force-optimized function: %s\n",
function.ToFullyQualifiedCString());
}
// Skip frame of force-optimized (and non-debuggable) function.
continue;
}
deopt_frame = DeoptimizeToArray(thread, frame, code);
bool found_async_awaiter = false;
bool abort_attempt_to_navigate_through_sync_async = false;
for (InlinedFunctionsIterator it(code, frame->pc()); !it.Done();
it.Advance()) {
inlined_code = it.code();
function = it.function();
if (skip_sync_async_frames_count > 0) {
function_name ^= function.QualifiedScrubbedName();
if (!StackTraceUtils::CheckAndSkipAsync(
&skip_sync_async_frames_count, function_name)) {
// Unexpected function in sync async call
skip_sync_async_frames_count = -1;
abort_attempt_to_navigate_through_sync_async = true;
break;
}
}
if (FLAG_trace_debugger_stacktrace) {
ASSERT(!function.IsNull());
OS::PrintErr(
"CollectAwaiterReturnStackTrace: "
"visiting inlined function: %s\n ",
function.ToFullyQualifiedCString());
}
intptr_t deopt_frame_offset = it.GetDeoptFpOffset();
if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) {
ActivationFrame* activation = CollectDartFrame(
isolate, it.pc(), frame, inlined_code, deopt_frame,
deopt_frame_offset, ActivationFrame::kAsyncActivation);
ASSERT(activation != NULL);
stack_trace->AddActivation(activation);
stack_has_async_function = true;
// Grab the awaiter.
async_activation ^= activation->GetAsyncAwaiter();
found_async_awaiter = true;
// async function might have been called synchronously, in which
// case we need to keep going down the stack.
// To determine how we are called we peek few more frames further
// expecting to see Closure_call followed by
// AsyncAwaitCompleter_start.
// If we are able to see those functions we continue going down
// thestack, if we are not, we break out of the loop as we are
// not interested in exploring rest of the stack - there is only
// dart-internal code left.
skip_sync_async_frames_count = 2;
} else {
stack_trace->AddActivation(
CollectDartFrame(isolate, it.pc(), frame, inlined_code,
deopt_frame, deopt_frame_offset));
}
}
// Break out of outer loop.
if (found_async_awaiter ||
abort_attempt_to_navigate_through_sync_async) {
break;
}
} else {
function = code.function();
if (skip_sync_async_frames_count > 0) {
function_name ^= function.QualifiedScrubbedName();
if (!StackTraceUtils::CheckAndSkipAsync(
&skip_sync_async_frames_count, function_name)) {
// Unexpected function in synchronous call of async function.
break;
}
}
if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) {
ActivationFrame* activation = CollectDartFrame(
isolate, frame->pc(), frame, code, Object::null_array(), 0,
ActivationFrame::kAsyncActivation);
ASSERT(activation != NULL);
stack_trace->AddActivation(activation);
stack_has_async_function = true;
// Grab the awaiter.
async_activation ^= activation->GetAsyncAwaiter();
async_stack_trace ^= activation->GetCausalStack();
// see comment regarding skipping frames of async functions called
// synchronously above.
skip_sync_async_frames_count = 2;
} else {
stack_trace->AddActivation(CollectDartFrame(
isolate, frame->pc(), frame, code, Object::null_array(), 0));
}
}
}
}
}
// If the stack doesn't have any async functions on it, return NULL.
if (!stack_has_async_function) {
return NULL;
}
// Append the awaiter return call stack.
while (!async_activation.IsNull()) {
ActivationFrame* activation = new (zone) ActivationFrame(async_activation);
activation->ExtractTokenPositionFromAsyncClosure();
stack_trace->AddActivation(activation);
if (FLAG_trace_debugger_stacktrace) {
OS::PrintErr(
"CollectAwaiterReturnStackTrace: visiting awaiter return "
"closures:\n\t%s\n",
activation->function().ToFullyQualifiedCString());
}
next_async_activation = activation->GetAsyncAwaiter();
if (next_async_activation.IsNull()) {
// No more awaiters. Extract the causal stack trace (if it exists).
async_stack_trace ^= activation->GetCausalStack();
break;
}
async_activation = Closure::RawCast(next_async_activation.raw());
}
// Now we append the asynchronous causal stack trace. These are not active
// frames but a historical record of how this asynchronous function was
// activated.
while (!async_stack_trace.IsNull()) {
for (intptr_t i = 0; i < async_stack_trace.Length(); i++) {
if (async_stack_trace.CodeAtFrame(i) == Code::null()) {
// Incomplete OutOfMemory/StackOverflow trace OR array padding.
break;
}
if (async_stack_trace.CodeAtFrame(i) ==
StubCode::AsynchronousGapMarker().raw()) {
stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker);
// The frame immediately below the asynchronous gap marker is the
// identical to the frame above the marker. Skip the frame to enhance
// the readability of the trace.
i++;
} else {
code_object = async_stack_trace.CodeAtFrame(i);
offset = Smi::RawCast(async_stack_trace.PcOffsetAtFrame(i));
if (code_object.IsBytecode()) {
bytecode ^= code_object.raw();
if (FLAG_trace_debugger_stacktrace) {
OS::PrintErr("CollectAwaiterReturnStackTrace: visiting frame %" Pd
" in async causal stack trace:\n\t%s\n",
i,
Function::Handle(bytecode.function())
.ToFullyQualifiedCString());
}
uword pc = bytecode.PayloadStart() + offset.Value();
stack_trace->AddAsyncCausalFrame(pc, bytecode);
} else {
code ^= code_object.raw();
if (FLAG_trace_debugger_stacktrace) {
OS::PrintErr(
"CollectAwaiterReturnStackTrace: visiting frame %" Pd
" in async causal stack trace:\n\t%s\n",
i, Function::Handle(code.function()).ToFullyQualifiedCString());
}
uword pc = code.PayloadStart() + offset.Value();
if (code.is_optimized()) {
for (InlinedFunctionsIterator it(code, pc); !it.Done();
it.Advance()) {
inlined_code = it.code();
stack_trace->AddAsyncCausalFrame(it.pc(), inlined_code);
}
} else {
stack_trace->AddAsyncCausalFrame(pc, code);
}
}
}
}
// Follow the link.
async_stack_trace = async_stack_trace.async_link();
}
return stack_trace;
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
ActivationFrame* Debugger::TopDartFrame() const {
StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames,
Thread::Current(),
StackFrameIterator::kNoCrossThreadIteration);
StackFrame* frame;
while (true) {
frame = iterator.NextFrame();
RELEASE_ASSERT(frame != nullptr);
if (!frame->IsDartFrame()) {
continue;
}
#if !defined(DART_PRECOMPILED_RUNTIME)
if (frame->is_interpreted()) {
Bytecode& bytecode = Bytecode::Handle(frame->LookupDartBytecode());
// Note that we do not skip bytecode stub frame (with a null function),
// so that we can ignore a single stepping breakpoint in such a frame.
// A bytecode stub contains a VM internal bytecode followed by a
// ReturnTOS bytecode. The single step on the ReturnTOS bytecode
// needs to be skipped.
ActivationFrame* activation =
new ActivationFrame(frame->pc(), frame->fp(), frame->sp(), bytecode);
return activation;
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
Code& code = Code::Handle(frame->LookupDartCode());
ActivationFrame* activation = new ActivationFrame(
frame->pc(), frame->fp(), frame->sp(), code, Object::null_array(), 0);
return activation;
}
}
DebuggerStackTrace* Debugger::StackTrace() {
return (stack_trace_ != NULL) ? stack_trace_ : CollectStackTrace();
}
DebuggerStackTrace* Debugger::CurrentStackTrace() {
return CollectStackTrace();
}
DebuggerStackTrace* Debugger::AsyncCausalStackTrace() {
return (async_causal_stack_trace_ != NULL) ? async_causal_stack_trace_
: CollectAsyncCausalStackTrace();
}
DebuggerStackTrace* Debugger::CurrentAsyncCausalStackTrace() {
return CollectAsyncCausalStackTrace();
}
DebuggerStackTrace* Debugger::AwaiterStackTrace() {
return (awaiter_stack_trace_ != NULL) ? awaiter_stack_trace_
: CollectAwaiterReturnStackTrace();
}
DebuggerStackTrace* Debugger::CurrentAwaiterStackTrace() {
return CollectAwaiterReturnStackTrace();
}
DebuggerStackTrace* Debugger::StackTraceFrom(const class StackTrace& ex_trace) {
DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8);
Function& function = Function::Handle();
Object& code_object = Object::Handle();
Code& code = Code::Handle();
#if !defined(DART_PRECOMPILED_RUNTIME)
Bytecode& bytecode = Bytecode::Handle();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
const uword fp = 0;
const uword sp = 0;
const Array& deopt_frame = Array::Handle();
const intptr_t deopt_frame_offset = -1;
for (intptr_t i = 0; i < ex_trace.Length(); i++) {
code_object = ex_trace.CodeAtFrame(i);
// Pre-allocated StackTraces may include empty slots, either (a) to indicate
// where frames were omitted in the case a stack has more frames than the
// pre-allocated trace (such as a stack overflow) or (b) because a stack has
// fewer frames that the pre-allocated trace (such as memory exhaustion with
// a shallow stack).
if (!code_object.IsNull()) {
if (code_object.IsBytecode()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
bytecode ^= code_object.raw();
function = bytecode.function();
// Skip bytecode stub frames and frames with invisible function.
if (!function.IsNull() && function.is_visible()) {
ASSERT(function.raw() == bytecode.function());
uword pc =
bytecode.PayloadStart() + Smi::Value(ex_trace.PcOffsetAtFrame(i));
ActivationFrame* activation =
new ActivationFrame(pc, fp, sp, bytecode);
stack_trace->AddActivation(activation);
}
#else
UNREACHABLE();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} else {
code ^= code_object.raw();
ASSERT(code.IsFunctionCode());
function = code.function();
if (function.is_visible()) {
ASSERT(function.raw() == code.function());
uword pc =
code.PayloadStart() + Smi::Value(ex_trace.PcOffsetAtFrame(i));
if (code.is_optimized() && ex_trace.expand_inlined()) {
// Traverse inlined frames.
for (InlinedFunctionsIterator it(code, pc); !it.Done();
it.Advance()) {
function = it.function();
code = it.code();
ASSERT(function.raw() == code.function());
uword pc = it.pc();
ASSERT(pc != 0);
ASSERT(code.PayloadStart() <= pc);
ASSERT(pc < (code.PayloadStart() + code.Size()));
ActivationFrame* activation = new ActivationFrame(
pc, fp, sp, code, deopt_frame, deopt_frame_offset);
stack_trace->AddActivation(activation);
}
} else {
ActivationFrame* activation = new ActivationFrame(
pc, fp, sp, code, deopt_frame, deopt_frame_offset);
stack_trace->AddActivation(activation);
}
}
}
}
}
return stack_trace;
}
void Debugger::SetExceptionPauseInfo(Dart_ExceptionPauseInfo pause_info) {
ASSERT((pause_info == kNoPauseOnExceptions) ||
(pause_info == kPauseOnUnhandledExceptions) ||
(pause_info == kPauseOnAllExceptions));
exc_pause_info_ = pause_info;
}
Dart_ExceptionPauseInfo Debugger::GetExceptionPauseInfo() const {
return exc_pause_info_;
}
bool Debugger::ShouldPauseOnException(DebuggerStackTrace* stack_trace,
const Instance& exception) {
if (exc_pause_info_ == kNoPauseOnExceptions) {
return false;
}
if (exc_pause_info_ == kPauseOnAllExceptions) {
return true;
}
ASSERT(exc_pause_info_ == kPauseOnUnhandledExceptions);
// Exceptions coming from invalid token positions should be skipped
ActivationFrame* top_frame = stack_trace->FrameAt(0);
if (!top_frame->TokenPos().IsReal() && top_frame->TryIndex() != -1) {
return false;
}
ActivationFrame* handler_frame = stack_trace->GetHandlerFrame(exception);
if