| // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| #include "vm/debugger.h" |
| |
| #include "include/dart_api.h" |
| |
| #include "vm/closure_functions_cache.h" |
| #include "vm/code_descriptors.h" |
| #include "vm/code_patcher.h" |
| #include "vm/compiler/api/deopt_id.h" |
| #include "vm/compiler/assembler/disassembler.h" |
| #include "vm/compiler/jit/compiler.h" |
| #include "vm/dart_entry.h" |
| #include "vm/flags.h" |
| #include "vm/globals.h" |
| #include "vm/isolate_reload.h" |
| #include "vm/json_stream.h" |
| #include "vm/kernel.h" |
| #include "vm/longjump.h" |
| #include "vm/message_handler.h" |
| #include "vm/object.h" |
| #include "vm/object_store.h" |
| #include "vm/os.h" |
| #include "vm/parser.h" |
| #include "vm/port.h" |
| #include "vm/runtime_entry.h" |
| #include "vm/service.h" |
| #include "vm/service_event.h" |
| #include "vm/service_isolate.h" |
| #include "vm/stack_frame.h" |
| #include "vm/stack_trace.h" |
| #include "vm/stub_code.h" |
| #include "vm/symbols.h" |
| #include "vm/thread_interrupter.h" |
| #include "vm/timeline.h" |
| #include "vm/token_position.h" |
| #include "vm/visitor.h" |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| #include "vm/deopt_instructions.h" |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| namespace dart { |
| |
| DEFINE_FLAG(bool, |
| trace_debugger_stacktrace, |
| false, |
| "Trace debugger stacktrace collection"); |
| DEFINE_FLAG(bool, trace_rewind, false, "Trace frame rewind"); |
| DEFINE_FLAG(bool, verbose_debug, false, "Verbose debugger messages"); |
| |
| DECLARE_FLAG(bool, trace_deoptimization); |
| DECLARE_FLAG(bool, warn_on_pause_with_no_debugger); |
| |
| #ifndef PRODUCT |
| |
| // Create an unresolved breakpoint in given token range and script. |
| BreakpointLocation::BreakpointLocation( |
| Debugger* debugger, |
| const GrowableHandlePtrArray<const Script>& scripts, |
| TokenPosition token_pos, |
| TokenPosition end_token_pos, |
| intptr_t requested_line_number, |
| intptr_t requested_column_number) |
| : debugger_(debugger), |
| scripts_(MallocGrowableArray<ScriptPtr>(scripts.length())), |
| url_(scripts.At(0).url()), |
| line_number_lock_(new SafepointRwLock()), |
| line_number_(-1), // lazily computed |
| token_pos_(token_pos), |
| end_token_pos_(end_token_pos), |
| next_(NULL), |
| conditions_(NULL), |
| requested_line_number_(requested_line_number), |
| requested_column_number_(requested_column_number), |
| code_token_pos_(TokenPosition::kNoSource) { |
| ASSERT(scripts.length() > 0); |
| ASSERT(token_pos.IsReal()); |
| for (intptr_t i = 0; i < scripts.length(); ++i) { |
| scripts_.Add(scripts.At(i).ptr()); |
| } |
| } |
| |
| // Create a latent breakpoint at given url and line number. |
| BreakpointLocation::BreakpointLocation(Debugger* debugger, |
| const String& url, |
| intptr_t requested_line_number, |
| intptr_t requested_column_number) |
| : debugger_(debugger), |
| scripts_(MallocGrowableArray<ScriptPtr>(0)), |
| url_(url.ptr()), |
| line_number_lock_(new SafepointRwLock()), |
| line_number_(-1), // lazily computed |
| token_pos_(TokenPosition::kNoSource), |
| end_token_pos_(TokenPosition::kNoSource), |
| next_(NULL), |
| conditions_(NULL), |
| requested_line_number_(requested_line_number), |
| requested_column_number_(requested_column_number), |
| code_token_pos_(TokenPosition::kNoSource) { |
| ASSERT(requested_line_number_ >= 0); |
| } |
| |
| BreakpointLocation::~BreakpointLocation() { |
| Breakpoint* bpt = breakpoints(); |
| while (bpt != NULL) { |
| Breakpoint* temp = bpt; |
| bpt = bpt->next(); |
| delete temp; |
| } |
| } |
| |
| bool BreakpointLocation::AnyEnabled() const { |
| Breakpoint* bpt = breakpoints(); |
| while (bpt != nullptr) { |
| if (bpt->is_enabled()) { |
| return true; |
| } |
| bpt = bpt->next(); |
| } |
| return false; |
| } |
| |
| void BreakpointLocation::SetResolved(const Function& func, |
| TokenPosition token_pos) { |
| #if defined(DEBUG) |
| const Script& func_script = Script::Handle(func.script()); |
| const String& func_url = String::Handle(func_script.url()); |
| const String& script_url = String::Handle(url_); |
| ASSERT(script_url.Equals(func_url)); |
| #endif // defined(DEBUG) |
| ASSERT(!IsLatent()); |
| ASSERT(func.is_generated_body() || |
| token_pos.IsWithin(func.token_pos(), func.end_token_pos())); |
| ASSERT(func.is_debuggable()); |
| token_pos_.store(token_pos); |
| end_token_pos_.store(token_pos); |
| code_token_pos_ = token_pos; |
| } |
| |
| void BreakpointLocation::GetCodeLocation(Script* script, |
| TokenPosition* pos) const { |
| if (IsLatent()) { |
| *script = Script::null(); |
| *pos = TokenPosition::kNoSource; |
| } else { |
| *script = this->script(); |
| *pos = token_pos_; |
| } |
| } |
| |
| intptr_t BreakpointLocation::line_number() { |
| // Compute line number lazily since it causes scanning of the script. |
| { |
| SafepointReadRwLocker sl(Thread::Current(), line_number_lock()); |
| if (line_number_ >= 0) { |
| return line_number_; |
| } |
| } |
| SafepointWriteRwLocker sl(Thread::Current(), line_number_lock()); |
| if (line_number_ < 0) { |
| Script::Handle(script()).GetTokenLocation(token_pos(), &line_number_); |
| } |
| return line_number_; |
| } |
| |
| void Breakpoint::set_bpt_location(BreakpointLocation* new_bpt_location) { |
| // Only latent breakpoints can be moved. |
| ASSERT((new_bpt_location == NULL) || bpt_location_->IsLatent()); |
| bpt_location_ = new_bpt_location; |
| } |
| |
| void Breakpoint::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&closure_)); |
| } |
| |
| void BreakpointLocation::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| for (intptr_t i = 0; i < scripts_.length(); ++i) { |
| visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&scripts_.data()[i])); |
| } |
| visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&url_)); |
| |
| Breakpoint* bpt = conditions_; |
| while (bpt != NULL) { |
| bpt->VisitObjectPointers(visitor); |
| bpt = bpt->next(); |
| } |
| } |
| |
| void Breakpoint::PrintJSON(JSONStream* stream) { |
| JSONObject jsobj(stream); |
| jsobj.AddProperty("type", "Breakpoint"); |
| |
| jsobj.AddFixedServiceId("breakpoints/%" Pd "", id()); |
| jsobj.AddProperty("enabled", enabled_); |
| jsobj.AddProperty("breakpointNumber", id()); |
| if (is_synthetic_async()) { |
| jsobj.AddProperty("isSyntheticAsyncContinuation", is_synthetic_async()); |
| } |
| jsobj.AddProperty("resolved", bpt_location_->IsResolved()); |
| if (bpt_location_->IsResolved()) { |
| jsobj.AddLocation(bpt_location_); |
| } else { |
| jsobj.AddUnresolvedLocation(bpt_location_); |
| } |
| } |
| |
| void CodeBreakpoint::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&code_)); |
| visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&saved_value_)); |
| } |
| |
| const char* CodeBreakpoint::ToCString() const { |
| if (breakpoint_locations_.length() == 0) { |
| return "unlinked breakpoint"; |
| } |
| |
| char buffer[1024]; |
| BufferFormatter f(buffer, sizeof(buffer)); |
| // Pick the first, all other should have same script/line number. |
| BreakpointLocation* breakpoint_location = breakpoint_locations_.At(0); |
| String& source_url = String::Handle(breakpoint_location->url()); |
| intptr_t line_number = breakpoint_location->line_number(); |
| |
| f.Printf("breakpoint at %s:%" Pd, source_url.ToCString(), line_number); |
| return Thread::Current()->zone()->MakeCopyOfString(buffer); |
| } |
| |
| ActivationFrame::ActivationFrame(uword pc, |
| uword fp, |
| uword sp, |
| const Code& code, |
| const Array& deopt_frame, |
| intptr_t deopt_frame_offset, |
| ActivationFrame::Kind kind) |
| : pc_(pc), |
| fp_(fp), |
| sp_(sp), |
| ctx_(Context::ZoneHandle()), |
| code_(Code::ZoneHandle(code.ptr())), |
| function_(Function::ZoneHandle(code.function())), |
| live_frame_((kind == kRegular) || (kind == kAsyncActivation)), |
| token_pos_initialized_(false), |
| token_pos_(TokenPosition::kNoSource), |
| try_index_(-1), |
| deopt_id_(DeoptId::kNone), |
| line_number_(-1), |
| column_number_(-1), |
| context_level_(-1), |
| deopt_frame_(Array::ZoneHandle(deopt_frame.ptr())), |
| deopt_frame_offset_(deopt_frame_offset), |
| kind_(kind), |
| vars_initialized_(false), |
| var_descriptors_(LocalVarDescriptors::ZoneHandle()), |
| desc_indices_(8), |
| pc_desc_(PcDescriptors::ZoneHandle()) { |
| ASSERT(!function_.IsNull()); |
| } |
| |
| ActivationFrame::ActivationFrame(Kind kind) |
| : pc_(0), |
| fp_(0), |
| sp_(0), |
| ctx_(Context::ZoneHandle()), |
| code_(Code::ZoneHandle()), |
| function_(Function::ZoneHandle()), |
| live_frame_(kind == kRegular), |
| token_pos_initialized_(false), |
| token_pos_(TokenPosition::kNoSource), |
| try_index_(-1), |
| deopt_id_(DeoptId::kNone), |
| line_number_(-1), |
| column_number_(-1), |
| context_level_(-1), |
| deopt_frame_(Array::ZoneHandle()), |
| deopt_frame_offset_(0), |
| kind_(kind), |
| vars_initialized_(false), |
| var_descriptors_(LocalVarDescriptors::ZoneHandle()), |
| desc_indices_(8), |
| pc_desc_(PcDescriptors::ZoneHandle()) {} |
| |
| ActivationFrame::ActivationFrame(const Closure& async_activation) |
| : pc_(0), |
| fp_(0), |
| sp_(0), |
| ctx_(Context::ZoneHandle()), |
| code_(Code::ZoneHandle()), |
| function_(Function::ZoneHandle()), |
| live_frame_(false), |
| token_pos_initialized_(false), |
| token_pos_(TokenPosition::kNoSource), |
| try_index_(-1), |
| deopt_id_(DeoptId::kNone), |
| line_number_(-1), |
| column_number_(-1), |
| context_level_(-1), |
| deopt_frame_(Array::ZoneHandle()), |
| deopt_frame_offset_(0), |
| kind_(kAsyncActivation), |
| vars_initialized_(false), |
| var_descriptors_(LocalVarDescriptors::ZoneHandle()), |
| desc_indices_(8), |
| pc_desc_(PcDescriptors::ZoneHandle()) { |
| // Extract the function and the code from the asynchronous activation. |
| function_ = async_activation.function(); |
| // Force-optimize functions should not be debuggable. |
| ASSERT(!function_.ForceOptimize()); |
| function_.EnsureHasCompiledUnoptimizedCode(); |
| code_ = function_.unoptimized_code(); |
| ctx_ = async_activation.context(); |
| ASSERT(fp_ == 0); |
| ASSERT(!ctx_.IsNull()); |
| } |
| |
| bool Debugger::NeedsIsolateEvents() { |
| return !Isolate::IsSystemIsolate(isolate_) && |
| Service::isolate_stream.enabled(); |
| } |
| |
| bool Debugger::NeedsDebugEvents() { |
| ASSERT(!Isolate::IsSystemIsolate(isolate_)); |
| return FLAG_warn_on_pause_with_no_debugger || Service::debug_stream.enabled(); |
| } |
| |
| static void InvokeEventHandler(ServiceEvent* event) { |
| ASSERT(!event->IsPause()); // For pause events, call Pause instead. |
| Service::HandleEvent(event, /*enter_safepoint*/ false); |
| } |
| |
| ErrorPtr Debugger::PauseInterrupted() { |
| return PauseRequest(ServiceEvent::kPauseInterrupted); |
| } |
| |
| ErrorPtr Debugger::PausePostRequest() { |
| return PauseRequest(ServiceEvent::kPausePostRequest); |
| } |
| |
| ErrorPtr Debugger::PauseRequest(ServiceEvent::EventKind kind) { |
| if (ignore_breakpoints_ || IsPaused()) { |
| // We don't let the isolate get interrupted if we are already |
| // paused or ignoring breakpoints. |
| return Thread::Current()->StealStickyError(); |
| } |
| ServiceEvent event(isolate_, kind); |
| DebuggerStackTrace* trace = DebuggerStackTrace::Collect(); |
| if (trace->Length() > 0) { |
| event.set_top_frame(trace->FrameAt(0)); |
| } |
| CacheStackTraces(trace, DebuggerStackTrace::CollectAsyncCausal(), |
| DebuggerStackTrace::CollectAwaiterReturn()); |
| set_resume_action(kContinue); |
| Pause(&event); |
| HandleSteppingRequest(trace); |
| ClearCachedStackTraces(); |
| |
| // If any error occurred while in the debug message loop, return it here. |
| NoSafepointScope no_safepoint; |
| ErrorPtr error = Thread::Current()->StealStickyError(); |
| ASSERT((error == Error::null()) || error->IsUnwindError()); |
| return error; |
| } |
| |
| void Debugger::SendBreakpointEvent(ServiceEvent::EventKind kind, |
| Breakpoint* bpt) { |
| if (NeedsDebugEvents()) { |
| // TODO(turnidge): Currently we send single-shot breakpoint events |
| // to the vm service. Do we want to change this? |
| ServiceEvent event(isolate_, kind); |
| event.set_breakpoint(bpt); |
| InvokeEventHandler(&event); |
| } |
| } |
| |
| void BreakpointLocation::AddBreakpoint(Breakpoint* bpt, Debugger* dbg) { |
| bpt->set_next(breakpoints()); |
| set_breakpoints(bpt); |
| bpt->Enable(); |
| dbg->group_debugger()->SyncBreakpointLocation(this); |
| dbg->SendBreakpointEvent(ServiceEvent::kBreakpointAdded, bpt); |
| } |
| |
| Breakpoint* BreakpointLocation::AddRepeated(Debugger* dbg) { |
| Breakpoint* bpt = breakpoints(); |
| while (bpt != NULL) { |
| if (bpt->IsRepeated()) break; |
| bpt = bpt->next(); |
| } |
| if (bpt == NULL) { |
| bpt = new Breakpoint(dbg->nextId(), this); |
| bpt->SetIsRepeated(); |
| AddBreakpoint(bpt, dbg); |
| } |
| return bpt; |
| } |
| |
| Breakpoint* BreakpointLocation::AddSingleShot(Debugger* dbg) { |
| Breakpoint* bpt = breakpoints(); |
| while (bpt != NULL) { |
| if (bpt->IsSingleShot()) break; |
| bpt = bpt->next(); |
| } |
| if (bpt == NULL) { |
| bpt = new Breakpoint(dbg->nextId(), this); |
| bpt->SetIsSingleShot(); |
| AddBreakpoint(bpt, dbg); |
| } |
| return bpt; |
| } |
| |
| Breakpoint* BreakpointLocation::AddPerClosure(Debugger* dbg, |
| const Instance& closure, |
| bool for_over_await) { |
| Breakpoint* bpt = NULL; |
| // Do not reuse existing breakpoints for stepping over await clauses. |
| // A second async step-over command will set a new breakpoint before |
| // the existing one gets deleted when first async step-over resumes. |
| if (!for_over_await) { |
| bpt = breakpoints(); |
| while (bpt != NULL) { |
| if (bpt->IsPerClosure() && (bpt->closure() == closure.ptr())) break; |
| bpt = bpt->next(); |
| } |
| } |
| if (bpt == NULL) { |
| bpt = new Breakpoint(dbg->nextId(), this); |
| bpt->SetIsPerClosure(closure); |
| bpt->set_is_synthetic_async(for_over_await); |
| AddBreakpoint(bpt, dbg); |
| } |
| return bpt; |
| } |
| |
| static const char* QualifiedFunctionName(const Function& func) { |
| const String& func_name = String::Handle(func.name()); |
| Class& func_class = Class::Handle(func.Owner()); |
| String& class_name = String::Handle(func_class.Name()); |
| |
| return OS::SCreate(Thread::Current()->zone(), "%s%s%s", |
| func_class.IsTopLevel() ? "" : class_name.ToCString(), |
| func_class.IsTopLevel() ? "" : ".", func_name.ToCString()); |
| } |
| |
| // Returns true if the function |func| overlaps the token range |
| // [|token_pos|, |end_token_pos|] in |script|. |
| static bool FunctionOverlaps(const Function& func, |
| const String& script_url, |
| TokenPosition token_pos, |
| TokenPosition end_token_pos) { |
| const TokenPosition& func_start = func.token_pos(); |
| if (token_pos.IsWithin(func_start, func.end_token_pos()) || |
| func_start.IsWithin(token_pos, end_token_pos)) { |
| // Check script equality last because it allocates handles as a side effect. |
| Script& func_script = Script::Handle(func.script()); |
| String& url = String::Handle(func_script.url()); |
| return script_url.Equals(url); |
| } |
| return false; |
| } |
| |
| static bool IsImplicitFunction(const Function& func) { |
| switch (func.kind()) { |
| case UntaggedFunction::kImplicitGetter: |
| case UntaggedFunction::kImplicitSetter: |
| case UntaggedFunction::kImplicitStaticGetter: |
| case UntaggedFunction::kFieldInitializer: |
| case UntaggedFunction::kMethodExtractor: |
| case UntaggedFunction::kNoSuchMethodDispatcher: |
| case UntaggedFunction::kInvokeFieldDispatcher: |
| case UntaggedFunction::kIrregexpFunction: |
| return true; |
| default: |
| if (func.token_pos() == func.end_token_pos()) { |
| // |func| could be an implicit constructor for example. |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| bool GroupDebugger::HasCodeBreakpointInFunction(const Function& func) { |
| auto thread = Thread::Current(); |
| return RunUnderReadLockIfNeeded(thread, code_breakpoints_lock(), [&]() { |
| CodeBreakpoint* cbpt = code_breakpoints_; |
| while (cbpt != NULL) { |
| if (func.ptr() == cbpt->function()) { |
| return true; |
| } |
| cbpt = cbpt->next_; |
| } |
| return false; |
| }); |
| } |
| |
| bool GroupDebugger::HasBreakpointInCode(const Code& code) { |
| auto thread = Thread::Current(); |
| return RunUnderReadLockIfNeeded(thread, code_breakpoints_lock(), [&]() { |
| CodeBreakpoint* cbpt = code_breakpoints_; |
| while (cbpt != NULL) { |
| if (code.ptr() == cbpt->code_) { |
| return true; |
| } |
| cbpt = cbpt->next_; |
| } |
| return false; |
| }); |
| } |
| |
| void Debugger::PrintBreakpointsToJSONArray(JSONArray* jsarr) const { |
| PrintBreakpointsListToJSONArray(breakpoint_locations_, jsarr); |
| PrintBreakpointsListToJSONArray(latent_locations_, jsarr); |
| } |
| |
| void Debugger::PrintBreakpointsListToJSONArray(BreakpointLocation* sbpt, |
| JSONArray* jsarr) const { |
| while (sbpt != NULL) { |
| Breakpoint* bpt = sbpt->breakpoints(); |
| while (bpt != NULL) { |
| jsarr->AddValue(bpt); |
| bpt = bpt->next(); |
| } |
| sbpt = sbpt->next_; |
| } |
| } |
| |
| void Debugger::PrintSettingsToJSONObject(JSONObject* jsobj) const { |
| // This won't cut it when we support filtering by class, etc. |
| switch (GetExceptionPauseInfo()) { |
| case kNoPauseOnExceptions: |
| jsobj->AddProperty("_exceptions", "none"); |
| break; |
| case kPauseOnAllExceptions: |
| jsobj->AddProperty("_exceptions", "all"); |
| break; |
| case kPauseOnUnhandledExceptions: |
| jsobj->AddProperty("_exceptions", "unhandled"); |
| break; |
| default: |
| UNREACHABLE(); |
| } |
| } |
| |
| ActivationFrame::Relation ActivationFrame::CompareTo(uword other_fp) const { |
| if (fp() == other_fp) { |
| return kSelf; |
| } |
| return IsCalleeFrameOf(other_fp, fp()) ? kCallee : kCaller; |
| } |
| |
| StringPtr ActivationFrame::QualifiedFunctionName() { |
| return String::New(::dart::QualifiedFunctionName(function())); |
| } |
| |
| StringPtr ActivationFrame::SourceUrl() { |
| const Script& script = Script::Handle(SourceScript()); |
| return script.url(); |
| } |
| |
| ScriptPtr ActivationFrame::SourceScript() { |
| return function().script(); |
| } |
| |
| LibraryPtr ActivationFrame::Library() { |
| const Class& cls = Class::Handle(function().origin()); |
| return cls.library(); |
| } |
| |
| void ActivationFrame::GetPcDescriptors() { |
| if (pc_desc_.IsNull()) { |
| pc_desc_ = code().pc_descriptors(); |
| ASSERT(!pc_desc_.IsNull()); |
| } |
| } |
| |
| // If not token_pos_initialized_, compute token_pos_, try_index_ and |
| // deopt_id_. |
| TokenPosition ActivationFrame::TokenPos() { |
| if (!token_pos_initialized_) { |
| token_pos_initialized_ = true; |
| token_pos_ = TokenPosition::kNoSource; |
| GetPcDescriptors(); |
| PcDescriptors::Iterator iter(pc_desc_, UntaggedPcDescriptors::kAnyKind); |
| const uword pc_offset = pc_ - code().PayloadStart(); |
| while (iter.MoveNext()) { |
| if (iter.PcOffset() == pc_offset) { |
| try_index_ = iter.TryIndex(); |
| token_pos_ = iter.TokenPos(); |
| deopt_id_ = iter.DeoptId(); |
| break; |
| } |
| } |
| } |
| return token_pos_; |
| } |
| |
| intptr_t ActivationFrame::TryIndex() { |
| if (!token_pos_initialized_) { |
| TokenPos(); // Side effect: computes token_pos_initialized_, try_index_. |
| } |
| return try_index_; |
| } |
| |
| intptr_t ActivationFrame::DeoptId() { |
| if (!token_pos_initialized_) { |
| TokenPos(); // Side effect: computes token_pos_initialized_, try_index_. |
| } |
| return deopt_id_; |
| } |
| |
| intptr_t ActivationFrame::LineNumber() { |
| // Compute line number lazily since it causes scanning of the script. |
| const TokenPosition& token_pos = TokenPos(); |
| if ((line_number_ < 0) && token_pos.IsReal()) { |
| const Script& script = Script::Handle(SourceScript()); |
| script.GetTokenLocation(token_pos, &line_number_, &column_number_); |
| } |
| return line_number_; |
| } |
| |
| intptr_t ActivationFrame::ColumnNumber() { |
| // Compute column number lazily since it causes scanning of the script. |
| const TokenPosition& token_pos = TokenPos(); |
| if ((column_number_ < 0) && token_pos.IsReal()) { |
| const Script& script = Script::Handle(SourceScript()); |
| script.GetTokenLocation(token_pos, &line_number_, &column_number_); |
| } |
| return column_number_; |
| } |
| |
| void ActivationFrame::GetVarDescriptors() { |
| if (var_descriptors_.IsNull()) { |
| Code& unoptimized_code = Code::Handle(function().unoptimized_code()); |
| if (unoptimized_code.IsNull()) { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| const Error& error = Error::Handle( |
| zone, Compiler::EnsureUnoptimizedCode(thread, function())); |
| if (!error.IsNull()) { |
| Exceptions::PropagateError(error); |
| } |
| unoptimized_code = function().unoptimized_code(); |
| } |
| ASSERT(!unoptimized_code.IsNull()); |
| var_descriptors_ = unoptimized_code.GetLocalVarDescriptors(); |
| ASSERT(!var_descriptors_.IsNull()); |
| } |
| } |
| |
| bool ActivationFrame::IsDebuggable() const { |
| ASSERT(!function().IsNull()); |
| return Debugger::IsDebuggable(function()); |
| } |
| |
| void ActivationFrame::PrintDescriptorsError(const char* message) { |
| OS::PrintErr("Bad descriptors: %s\n", message); |
| OS::PrintErr("function %s\n", function().ToQualifiedCString()); |
| OS::PrintErr("pc_ %" Px "\n", pc_); |
| OS::PrintErr("deopt_id_ %" Px "\n", deopt_id_); |
| OS::PrintErr("context_level_ %" Px "\n", context_level_); |
| OS::PrintErr("token_pos_ %s\n", token_pos_.ToCString()); |
| { |
| DisassembleToStdout formatter; |
| code().Disassemble(&formatter); |
| PcDescriptors::Handle(code().pc_descriptors()).Print(); |
| } |
| StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, |
| Thread::Current(), |
| StackFrameIterator::kNoCrossThreadIteration); |
| StackFrame* frame = frames.NextFrame(); |
| while (frame != NULL) { |
| OS::PrintErr("%s\n", frame->ToCString()); |
| frame = frames.NextFrame(); |
| } |
| OS::Abort(); |
| } |
| |
| // Calculate the context level at the current pc of the frame. |
| intptr_t ActivationFrame::ContextLevel() { |
| ASSERT(live_frame_); |
| const Context& ctx = GetSavedCurrentContext(); |
| if (context_level_ < 0 && !ctx.IsNull()) { |
| ASSERT(!code_.is_optimized()); |
| GetVarDescriptors(); |
| intptr_t deopt_id = DeoptId(); |
| if (deopt_id == DeoptId::kNone) { |
| PrintDescriptorsError("Missing deopt id"); |
| } |
| intptr_t var_desc_len = var_descriptors_.Length(); |
| bool found = false; |
| // We store the deopt ids as real token positions. |
| const auto to_compare = TokenPosition::Deserialize(deopt_id); |
| for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) { |
| UntaggedLocalVarDescriptors::VarInfo var_info; |
| var_descriptors_.GetInfo(cur_idx, &var_info); |
| const int8_t kind = var_info.kind(); |
| if ((kind == UntaggedLocalVarDescriptors::kContextLevel) && |
| to_compare.IsWithin(var_info.begin_pos, var_info.end_pos)) { |
| context_level_ = var_info.index(); |
| found = true; |
| break; |
| } |
| } |
| if (!found) { |
| PrintDescriptorsError("Missing context level in var descriptors"); |
| } |
| ASSERT(context_level_ >= 0); |
| } |
| return context_level_; |
| } |
| |
| ObjectPtr ActivationFrame::GetAsyncContextVariable(const String& name) { |
| if (!function_.IsAsyncClosure() && !function_.IsAsyncGenClosure()) { |
| return Object::null(); |
| } |
| GetVarDescriptors(); |
| intptr_t var_ctxt_level = -1; |
| intptr_t ctxt_slot = -1; |
| intptr_t var_desc_len = var_descriptors_.Length(); |
| for (intptr_t i = 0; i < var_desc_len; i++) { |
| UntaggedLocalVarDescriptors::VarInfo var_info; |
| var_descriptors_.GetInfo(i, &var_info); |
| if (var_descriptors_.GetName(i) == name.ptr()) { |
| const int8_t kind = var_info.kind(); |
| if (!live_frame_) { |
| ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar); |
| } |
| const auto variable_index = VariableIndex(var_info.index()); |
| if (kind == UntaggedLocalVarDescriptors::kStackVar) { |
| return GetStackVar(variable_index); |
| } else { |
| ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar); |
| var_ctxt_level = var_info.scope_id; |
| ctxt_slot = variable_index.value(); |
| break; |
| } |
| } |
| } |
| if (var_ctxt_level >= 0) { |
| if (!live_frame_) { |
| ASSERT(!ctx_.IsNull()); |
| // Compiled code uses relative context levels, i.e. the frame context |
| // level is always 0 on entry. |
| const intptr_t frame_ctx_level = 0; |
| return GetRelativeContextVar(var_ctxt_level, ctxt_slot, frame_ctx_level); |
| } |
| return GetContextVar(var_ctxt_level, ctxt_slot); |
| } |
| return Object::null(); |
| } |
| |
| ObjectPtr ActivationFrame::GetAsyncAwaiter( |
| CallerClosureFinder* caller_closure_finder) { |
| if (fp() != 0 && !function_.IsNull() && |
| (function_.IsAsyncClosure() || function_.IsAsyncGenClosure())) { |
| // Look up caller's closure on the stack. |
| ObjectPtr* last_caller_obj = reinterpret_cast<ObjectPtr*>(GetCallerSp()); |
| Closure& closure = Closure::Handle(); |
| closure = StackTraceUtils::FindClosureInFrame(last_caller_obj, function_); |
| |
| if (!closure.IsNull() && caller_closure_finder->IsRunningAsync(closure)) { |
| closure = caller_closure_finder->FindCaller(closure); |
| return closure.ptr(); |
| } |
| } |
| |
| return Object::null(); |
| } |
| |
| bool ActivationFrame::HandlesException(const Instance& exc_obj) { |
| if ((kind_ == kAsyncSuspensionMarker) || (kind_ == kAsyncCausal)) { |
| // These frames are historical. |
| return false; |
| } |
| intptr_t try_index = TryIndex(); |
| if (try_index < 0) { |
| return false; |
| } |
| ExceptionHandlers& handlers = ExceptionHandlers::Handle(); |
| Array& handled_types = Array::Handle(); |
| AbstractType& type = Type::Handle(); |
| const bool is_async = |
| function().IsAsyncClosure() || function().IsAsyncGenClosure(); |
| handlers = code().exception_handlers(); |
| ASSERT(!handlers.IsNull()); |
| intptr_t num_handlers_checked = 0; |
| while (try_index != kInvalidTryIndex) { |
| // Detect circles in the exception handler data. |
| num_handlers_checked++; |
| ASSERT(num_handlers_checked <= handlers.num_entries()); |
| // Only consider user written handlers and ignore synthesized try/catch in |
| // async methods as well as synthetic try/catch hiding inside try/finally. |
| if (!handlers.IsGenerated(try_index)) { |
| handled_types = handlers.GetHandledTypes(try_index); |
| const intptr_t num_types = handled_types.Length(); |
| for (intptr_t k = 0; k < num_types; k++) { |
| type ^= handled_types.At(k); |
| ASSERT(!type.IsNull()); |
| // Uninstantiated types are not added to ExceptionHandlers data. |
| ASSERT(type.IsInstantiated()); |
| if (type.IsDynamicType()) { |
| return true; |
| } |
| if (exc_obj.IsInstanceOf(type, Object::null_type_arguments(), |
| Object::null_type_arguments())) { |
| return true; |
| } |
| } |
| } |
| try_index = handlers.OuterTryIndex(try_index); |
| } |
| // Async functions might have indirect exception handlers in the form of |
| // `Future.catchError`. Check the Closure's _FutureListeners. |
| if (fp() != 0 && is_async) { |
| CallerClosureFinder caller_closure_finder(Thread::Current()->zone()); |
| ObjectPtr* last_caller_obj = reinterpret_cast<ObjectPtr*>(GetCallerSp()); |
| Closure& closure = Closure::Handle( |
| StackTraceUtils::FindClosureInFrame(last_caller_obj, function())); |
| if (!caller_closure_finder.IsRunningAsync(closure)) { |
| return false; |
| } |
| Object& futureOrListener = |
| Object::Handle(caller_closure_finder.GetAsyncFuture(closure)); |
| futureOrListener = |
| caller_closure_finder.GetFutureFutureListener(futureOrListener); |
| return caller_closure_finder.HasCatchError(futureOrListener); |
| } |
| |
| return false; |
| } |
| |
| intptr_t ActivationFrame::GetAwaitJumpVariable() { |
| GetVarDescriptors(); |
| intptr_t var_ctxt_level = -1; |
| intptr_t ctxt_slot = -1; |
| intptr_t var_desc_len = var_descriptors_.Length(); |
| intptr_t await_jump_var = -1; |
| for (intptr_t i = 0; i < var_desc_len; i++) { |
| UntaggedLocalVarDescriptors::VarInfo var_info; |
| var_descriptors_.GetInfo(i, &var_info); |
| const int8_t kind = var_info.kind(); |
| if (var_descriptors_.GetName(i) == Symbols::AwaitJumpVar().ptr()) { |
| ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar); |
| ASSERT(!ctx_.IsNull()); |
| var_ctxt_level = var_info.scope_id; |
| ctxt_slot = var_info.index(); |
| break; |
| } |
| } |
| if (var_ctxt_level >= 0) { |
| Object& await_jump_index = Object::Handle(ctx_.At(ctxt_slot)); |
| ASSERT(await_jump_index.IsSmi()); |
| await_jump_var = Smi::Cast(await_jump_index).Value(); |
| } |
| return await_jump_var; |
| } |
| |
| void ActivationFrame::ExtractTokenPositionFromAsyncClosure() { |
| // Attempt to determine the token pos and try index from the async closure. |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| |
| ASSERT(function_.IsAsyncGenClosure() || function_.IsAsyncClosure()); |
| // This should only be called on frames that aren't active on the stack. |
| ASSERT(fp() == 0); |
| |
| const intptr_t await_jump_var = GetAwaitJumpVariable(); |
| if (await_jump_var < 0) { |
| return; |
| } |
| |
| const auto& pc_descriptors = |
| PcDescriptors::Handle(zone, code().pc_descriptors()); |
| ASSERT(!pc_descriptors.IsNull()); |
| PcDescriptors::Iterator it(pc_descriptors, UntaggedPcDescriptors::kOther); |
| while (it.MoveNext()) { |
| if (it.YieldIndex() == await_jump_var) { |
| try_index_ = it.TryIndex(); |
| token_pos_ = it.TokenPos(); |
| token_pos_initialized_ = true; |
| return; |
| } |
| } |
| } |
| |
| bool ActivationFrame::IsAsyncMachinery() const { |
| ASSERT(!function_.IsNull()); |
| auto isolate_group = IsolateGroup::Current(); |
| if (function_.ptr() == |
| isolate_group->object_store()->complete_on_async_return() || |
| function_.ptr() == isolate_group->object_store() |
| ->complete_with_no_future_on_async_return()) { |
| // We are completing an async function's completer. |
| return true; |
| } |
| if (function_.Owner() == |
| isolate_group->object_store()->async_star_stream_controller()) { |
| // We are inside the async* stream controller code. |
| return true; |
| } |
| return false; |
| } |
| |
| // Get the saved current context of this activation. |
| const Context& ActivationFrame::GetSavedCurrentContext() { |
| if (!ctx_.IsNull()) return ctx_; |
| GetVarDescriptors(); |
| intptr_t var_desc_len = var_descriptors_.Length(); |
| Object& obj = Object::Handle(); |
| for (intptr_t i = 0; i < var_desc_len; i++) { |
| UntaggedLocalVarDescriptors::VarInfo var_info; |
| var_descriptors_.GetInfo(i, &var_info); |
| const int8_t kind = var_info.kind(); |
| if (kind == UntaggedLocalVarDescriptors::kSavedCurrentContext) { |
| if (FLAG_trace_debugger_stacktrace) { |
| OS::PrintErr("\tFound saved current ctx at index %d\n", |
| var_info.index()); |
| } |
| const auto variable_index = VariableIndex(var_info.index()); |
| obj = GetStackVar(variable_index); |
| if (obj.IsClosure()) { |
| ASSERT(function().name() == Symbols::Call().ptr()); |
| ASSERT(function().IsInvokeFieldDispatcher()); |
| // Closure.call frames. |
| ctx_ = Closure::Cast(obj).context(); |
| } else if (obj.IsContext()) { |
| ctx_ = Context::Cast(obj).ptr(); |
| } else { |
| ASSERT(obj.IsNull() || obj.ptr() == Symbols::OptimizedOut().ptr()); |
| ctx_ = Context::null(); |
| } |
| return ctx_; |
| } |
| } |
| return ctx_; |
| } |
| |
| ObjectPtr ActivationFrame::GetAsyncOperation() { |
| if (function().name() == Symbols::AsyncOperation().ptr()) { |
| return GetParameter(0); |
| } |
| return Object::null(); |
| } |
| |
| ActivationFrame* DebuggerStackTrace::GetHandlerFrame( |
| const Instance& exc_obj) const { |
| for (intptr_t frame_index = 0; frame_index < Length(); frame_index++) { |
| ActivationFrame* frame = FrameAt(frame_index); |
| if (FLAG_trace_debugger_stacktrace) { |
| OS::PrintErr("GetHandlerFrame: #%04" Pd " %s", frame_index, |
| frame->ToCString()); |
| } |
| if (frame->HandlesException(exc_obj)) { |
| return frame; |
| } |
| } |
| return NULL; |
| } |
| |
| void ActivationFrame::GetDescIndices() { |
| if (vars_initialized_) { |
| return; |
| } |
| GetVarDescriptors(); |
| |
| TokenPosition activation_token_pos = TokenPos(); |
| if (!activation_token_pos.IsDebugPause() || !live_frame_) { |
| // We don't have a token position for this frame, so can't determine |
| // which variables are visible. |
| vars_initialized_ = true; |
| return; |
| } |
| |
| GrowableArray<String*> var_names(8); |
| intptr_t var_desc_len = var_descriptors_.Length(); |
| for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) { |
| ASSERT(var_names.length() == desc_indices_.length()); |
| UntaggedLocalVarDescriptors::VarInfo var_info; |
| var_descriptors_.GetInfo(cur_idx, &var_info); |
| const int8_t kind = var_info.kind(); |
| if ((kind != UntaggedLocalVarDescriptors::kStackVar) && |
| (kind != UntaggedLocalVarDescriptors::kContextVar)) { |
| continue; |
| } |
| if (!activation_token_pos.IsWithin(var_info.begin_pos, var_info.end_pos)) { |
| continue; |
| } |
| if ((kind == UntaggedLocalVarDescriptors::kContextVar) && |
| (ContextLevel() < var_info.scope_id)) { |
| // The variable is textually in scope but the context level |
| // at the activation frame's PC is lower than the context |
| // level of the variable. The context containing the variable |
| // has already been removed from the chain. This can happen when we |
| // break at a return statement, since the contexts get discarded |
| // before the debugger gets called. |
| continue; |
| } |
| // The current variable is textually in scope. Now check whether |
| // there is another local variable with the same name that shadows |
| // or is shadowed by this variable. |
| String& var_name = String::Handle(var_descriptors_.GetName(cur_idx)); |
| intptr_t indices_len = desc_indices_.length(); |
| bool name_match_found = false; |
| for (intptr_t i = 0; i < indices_len; i++) { |
| if (var_name.Equals(*var_names[i])) { |
| // Found two local variables with the same name. Now determine |
| // which one is shadowed. |
| name_match_found = true; |
| UntaggedLocalVarDescriptors::VarInfo i_var_info; |
| var_descriptors_.GetInfo(desc_indices_[i], &i_var_info); |
| if (i_var_info.begin_pos < var_info.begin_pos) { |
| // The variable we found earlier is in an outer scope |
| // and is shadowed by the current variable. Replace the |
| // descriptor index of the previously found variable |
| // with the descriptor index of the current variable. |
| desc_indices_[i] = cur_idx; |
| } else { |
| // The variable we found earlier is in an inner scope |
| // and shadows the current variable. Skip the current |
| // variable. (Nothing to do.) |
| } |
| break; // Stop looking for name matches. |
| } |
| } |
| if (!name_match_found) { |
| // No duplicate name found. Add the current descriptor index to the |
| // list of visible variables. |
| desc_indices_.Add(cur_idx); |
| var_names.Add(&var_name); |
| } |
| } |
| vars_initialized_ = true; |
| } |
| |
| intptr_t ActivationFrame::NumLocalVariables() { |
| GetDescIndices(); |
| return desc_indices_.length(); |
| } |
| |
| DART_FORCE_INLINE static ObjectPtr GetVariableValue(uword addr) { |
| return *reinterpret_cast<ObjectPtr*>(addr); |
| } |
| |
| // Caution: GetParameter only works for fixed parameters. |
| ObjectPtr ActivationFrame::GetParameter(intptr_t index) { |
| intptr_t num_parameters = function().num_fixed_parameters(); |
| ASSERT(0 <= index && index < num_parameters); |
| |
| // fp will be a nullptr if the frame isn't active on the stack. |
| if (fp() == 0) { |
| return Object::null(); |
| } |
| |
| if (function().NumOptionalParameters() > 0) { |
| // If the function has optional parameters, the first positional parameter |
| // can be in a number of places in the caller's frame depending on how many |
| // were actually supplied at the call site, but they are copied to a fixed |
| // place in the callee's frame. |
| |
| return GetVariableValue(LocalVarAddress( |
| fp(), runtime_frame_layout.FrameSlotForVariableIndex(-index))); |
| } else { |
| intptr_t reverse_index = num_parameters - index; |
| return GetVariableValue(ParamAddress(fp(), reverse_index)); |
| } |
| } |
| |
| ClosurePtr ActivationFrame::GetClosure() { |
| ASSERT(function().IsClosureFunction()); |
| Object& param = Object::Handle(GetParameter(0)); |
| ASSERT(param.IsInstance()); |
| ASSERT(Instance::Cast(param).IsClosure()); |
| return Closure::Cast(param).ptr(); |
| } |
| |
| ObjectPtr ActivationFrame::GetStackVar(VariableIndex variable_index) { |
| const intptr_t slot_index = |
| runtime_frame_layout.FrameSlotForVariableIndex(variable_index.value()); |
| if (deopt_frame_.IsNull()) { |
| return GetVariableValue(LocalVarAddress(fp(), slot_index)); |
| } else { |
| return deopt_frame_.At(LocalVarIndex(deopt_frame_offset_, slot_index)); |
| } |
| } |
| |
| bool ActivationFrame::IsRewindable() const { |
| if (deopt_frame_.IsNull()) { |
| return true; |
| } |
| // TODO(turnidge): This is conservative. It looks at all values in |
| // the deopt_frame_ even though some of them may correspond to other |
| // inlined frames. |
| Object& obj = Object::Handle(); |
| for (int i = 0; i < deopt_frame_.Length(); i++) { |
| obj = deopt_frame_.At(i); |
| if (obj.ptr() == Symbols::OptimizedOut().ptr()) { |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| void ActivationFrame::PrintContextMismatchError(intptr_t ctx_slot, |
| intptr_t frame_ctx_level, |
| intptr_t var_ctx_level) { |
| OS::PrintErr( |
| "-------------------------\n" |
| "Encountered context mismatch\n" |
| "\tctx_slot: %" Pd |
| "\n" |
| "\tframe_ctx_level: %" Pd |
| "\n" |
| "\tvar_ctx_level: %" Pd "\n\n", |
| ctx_slot, frame_ctx_level, var_ctx_level); |
| |
| OS::PrintErr( |
| "-------------------------\n" |
| "Current frame:\n%s\n", |
| this->ToCString()); |
| |
| OS::PrintErr( |
| "-------------------------\n" |
| "Context contents:\n"); |
| const Context& ctx = GetSavedCurrentContext(); |
| ctx.Dump(8); |
| |
| OS::PrintErr( |
| "-------------------------\n" |
| "Debugger stack trace...\n\n"); |
| DebuggerStackTrace* stack = Isolate::Current()->debugger()->StackTrace(); |
| intptr_t num_frames = stack->Length(); |
| for (intptr_t i = 0; i < num_frames; i++) { |
| ActivationFrame* frame = stack->FrameAt(i); |
| OS::PrintErr("#%04" Pd " %s", i, frame->ToCString()); |
| } |
| |
| OS::PrintErr( |
| "-------------------------\n" |
| "All frames...\n\n"); |
| StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
| Thread::Current(), |
| StackFrameIterator::kNoCrossThreadIteration); |
| StackFrame* frame = iterator.NextFrame(); |
| intptr_t num = 0; |
| while ((frame != NULL)) { |
| OS::PrintErr("#%04" Pd " %s\n", num++, frame->ToCString()); |
| frame = iterator.NextFrame(); |
| } |
| } |
| |
| void ActivationFrame::VariableAt(intptr_t i, |
| String* name, |
| TokenPosition* declaration_token_pos, |
| TokenPosition* visible_start_token_pos, |
| TokenPosition* visible_end_token_pos, |
| Object* value) { |
| GetDescIndices(); |
| ASSERT(i < desc_indices_.length()); |
| intptr_t desc_index = desc_indices_[i]; |
| ASSERT(name != NULL); |
| |
| *name = var_descriptors_.GetName(desc_index); |
| |
| UntaggedLocalVarDescriptors::VarInfo var_info; |
| var_descriptors_.GetInfo(desc_index, &var_info); |
| ASSERT(declaration_token_pos != NULL); |
| *declaration_token_pos = var_info.declaration_pos; |
| ASSERT(visible_start_token_pos != NULL); |
| *visible_start_token_pos = var_info.begin_pos; |
| ASSERT(visible_end_token_pos != NULL); |
| *visible_end_token_pos = var_info.end_pos; |
| ASSERT(value != NULL); |
| const int8_t kind = var_info.kind(); |
| const auto variable_index = VariableIndex(var_info.index()); |
| if (kind == UntaggedLocalVarDescriptors::kStackVar) { |
| *value = GetStackVar(variable_index); |
| } else { |
| ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar); |
| *value = GetContextVar(var_info.scope_id, variable_index.value()); |
| } |
| } |
| |
| ObjectPtr ActivationFrame::GetContextVar(intptr_t var_ctx_level, |
| intptr_t ctx_slot) { |
| // The context level at the PC/token index of this activation frame. |
| intptr_t frame_ctx_level = ContextLevel(); |
| |
| return GetRelativeContextVar(var_ctx_level, ctx_slot, frame_ctx_level); |
| } |
| |
| ObjectPtr ActivationFrame::GetRelativeContextVar(intptr_t var_ctx_level, |
| intptr_t ctx_slot, |
| intptr_t frame_ctx_level) { |
| const Context& ctx = GetSavedCurrentContext(); |
| |
| // It's possible that ctx was optimized out as no locals were captured by the |
| // context. See issue #38182. |
| if (ctx.IsNull()) { |
| return Symbols::OptimizedOut().ptr(); |
| } |
| |
| intptr_t level_diff = frame_ctx_level - var_ctx_level; |
| if (level_diff == 0) { |
| if ((ctx_slot < 0) || (ctx_slot >= ctx.num_variables())) { |
| PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level); |
| } |
| ASSERT((ctx_slot >= 0) && (ctx_slot < ctx.num_variables())); |
| return ctx.At(ctx_slot); |
| } else if (level_diff > 0) { |
| Context& var_ctx = Context::Handle(ctx.ptr()); |
| while (level_diff > 0 && !var_ctx.IsNull()) { |
| level_diff--; |
| var_ctx = var_ctx.parent(); |
| } |
| if (var_ctx.IsNull() || (ctx_slot < 0) || |
| (ctx_slot >= var_ctx.num_variables())) { |
| PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level); |
| } |
| ASSERT(!var_ctx.IsNull()); |
| ASSERT((ctx_slot >= 0) && (ctx_slot < var_ctx.num_variables())); |
| return var_ctx.At(ctx_slot); |
| } else { |
| PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level); |
| return Object::null(); |
| } |
| } |
| |
| ArrayPtr ActivationFrame::GetLocalVariables() { |
| GetDescIndices(); |
| intptr_t num_variables = desc_indices_.length(); |
| String& var_name = String::Handle(); |
| Object& value = Instance::Handle(); |
| const Array& list = Array::Handle(Array::New(2 * num_variables)); |
| for (intptr_t i = 0; i < num_variables; i++) { |
| TokenPosition ignore = TokenPosition::kNoSource; |
| VariableAt(i, &var_name, &ignore, &ignore, &ignore, &value); |
| list.SetAt(2 * i, var_name); |
| list.SetAt((2 * i) + 1, value); |
| } |
| return list.ptr(); |
| } |
| |
| ObjectPtr ActivationFrame::GetReceiver() { |
| GetDescIndices(); |
| intptr_t num_variables = desc_indices_.length(); |
| String& var_name = String::Handle(); |
| Instance& value = Instance::Handle(); |
| for (intptr_t i = 0; i < num_variables; i++) { |
| TokenPosition ignore = TokenPosition::kNoSource; |
| VariableAt(i, &var_name, &ignore, &ignore, &ignore, &value); |
| if (var_name.Equals(Symbols::This())) { |
| return value.ptr(); |
| } |
| } |
| return Symbols::OptimizedOut().ptr(); |
| } |
| |
| static bool IsSyntheticVariableName(const String& var_name) { |
| return (var_name.Length() >= 1) && (var_name.CharAt(0) == ':'); |
| } |
| |
| static bool IsPrivateVariableName(const String& var_name) { |
| return (var_name.Length() >= 1) && (var_name.CharAt(0) == '_'); |
| } |
| |
| ObjectPtr ActivationFrame::EvaluateCompiledExpression( |
| const ExternalTypedData& kernel_buffer, |
| const Array& type_definitions, |
| const Array& arguments, |
| const TypeArguments& type_arguments) { |
| if (function().is_static()) { |
| const Class& cls = Class::Handle(function().Owner()); |
| return cls.EvaluateCompiledExpression(kernel_buffer, type_definitions, |
| arguments, type_arguments); |
| } else { |
| const Object& receiver = Object::Handle(GetReceiver()); |
| const Class& method_cls = Class::Handle(function().origin()); |
| ASSERT(receiver.IsInstance() || receiver.IsNull()); |
| if (!(receiver.IsInstance() || receiver.IsNull())) { |
| return Object::null(); |
| } |
| const Instance& inst = Instance::Cast(receiver); |
| return inst.EvaluateCompiledExpression( |
| method_cls, kernel_buffer, type_definitions, arguments, type_arguments); |
| } |
| } |
| |
| TypeArgumentsPtr ActivationFrame::BuildParameters( |
| const GrowableObjectArray& param_names, |
| const GrowableObjectArray& param_values, |
| const GrowableObjectArray& type_params_names, |
| const GrowableObjectArray& type_params_bounds, |
| const GrowableObjectArray& type_params_defaults) { |
| GetDescIndices(); |
| bool type_arguments_available = false; |
| String& name = String::Handle(); |
| String& existing_name = String::Handle(); |
| Object& value = Instance::Handle(); |
| TypeArguments& type_arguments = TypeArguments::Handle(); |
| intptr_t num_variables = desc_indices_.length(); |
| for (intptr_t i = 0; i < num_variables; i++) { |
| TokenPosition ignore = TokenPosition::kNoSource; |
| VariableAt(i, &name, &ignore, &ignore, &ignore, &value); |
| if (name.Equals(Symbols::FunctionTypeArgumentsVar())) { |
| type_arguments_available = true; |
| type_arguments ^= value.ptr(); |
| } else if (!name.Equals(Symbols::This()) && |
| !IsSyntheticVariableName(name)) { |
| if (IsPrivateVariableName(name)) { |
| name = Symbols::New(Thread::Current(), String::ScrubName(name)); |
| } |
| bool conflict = false; |
| for (intptr_t j = 0; j < param_names.Length(); j++) { |
| existing_name ^= param_names.At(j); |
| if (name.Equals(existing_name)) { |
| conflict = true; |
| break; |
| } |
| } |
| // If local has the same name as a binding in the incoming scope, prefer |
| // the one from the incoming scope, since it is logically a child scope |
| // of the activation's current scope. |
| if (!conflict) { |
| param_names.Add(name); |
| param_values.Add(value); |
| } |
| } |
| } |
| |
| if ((function().IsGeneric() || function().HasGenericParent()) && |
| type_arguments_available) { |
| intptr_t num_vars = function().NumTypeArguments(); |
| type_params_names.Grow(num_vars); |
| type_params_names.SetLength(num_vars); |
| type_params_bounds.Grow(num_vars); |
| type_params_bounds.SetLength(num_vars); |
| type_params_defaults.Grow(num_vars); |
| type_params_defaults.SetLength(num_vars); |
| AbstractType& bound = AbstractType::Handle(); |
| AbstractType& defaultType = AbstractType::Handle(); |
| TypeParameters& type_params = TypeParameters::Handle(); |
| Function& current = Function::Handle(function().ptr()); |
| intptr_t mapping_offset = num_vars; |
| for (; !current.IsNull(); current = current.parent_function()) { |
| type_params = current.type_parameters(); |
| if (type_params.IsNull()) continue; |
| intptr_t size = current.NumTypeParameters(); |
| ASSERT(size > 0 && type_params.Length() == size); |
| ASSERT(mapping_offset >= size); |
| mapping_offset -= size; |
| for (intptr_t j = 0; j < size; ++j) { |
| name = type_params.NameAt(j); |
| bound = type_params.BoundAt(j); |
| defaultType = type_params.DefaultAt(j); |
| // Write the names in backwards in terms of chain of functions. |
| // But keep the order of names within the same function. so they |
| // match up with the order of the types in 'type_arguments'. |
| // Index:0 1 2 3 ... |
| // |Names in Grandparent| |Names in Parent| ..|Names in Child| |
| type_params_names.SetAt(mapping_offset + j, name); |
| type_params_bounds.SetAt(mapping_offset + j, bound); |
| type_params_defaults.SetAt(mapping_offset + j, defaultType); |
| } |
| } |
| if (!type_arguments.IsNull()) { |
| if (type_arguments.Length() == 0) { |
| for (intptr_t i = 0; i < num_vars; ++i) { |
| type_arguments.SetTypeAt(i, Object::dynamic_type()); |
| } |
| } |
| ASSERT(type_arguments.Length() == num_vars); |
| } |
| } |
| |
| return type_arguments.ptr(); |
| } |
| |
| const char* ActivationFrame::ToCString() { |
| if (function().IsNull()) { |
| return Thread::Current()->zone()->PrintToString("[ Frame kind: %s]\n", |
| KindToCString(kind_)); |
| } |
| const String& url = String::Handle(SourceUrl()); |
| intptr_t line = LineNumber(); |
| const char* func_name = function().ToFullyQualifiedCString(); |
| if (live_frame_) { |
| return Thread::Current()->zone()->PrintToString( |
| "[ Frame pc(0x%" Px " code offset:0x%" Px ") fp(0x%" Px ") sp(0x%" Px |
| ")\n" |
| "\tfunction = %s\n" |
| "\turl = %s\n" |
| "\tline = %" Pd |
| "\n" |
| "\tcontext = %s\n" |
| "\tcontext level = %" Pd " ]\n", |
| pc(), pc() - code().PayloadStart(), fp(), sp(), func_name, |
| url.ToCString(), line, ctx_.ToCString(), ContextLevel()); |
| } else { |
| return Thread::Current()->zone()->PrintToString( |
| "[ Frame code function = %s\n" |
| "\turl = %s\n" |
| "\tline = %" Pd |
| "\n" |
| "\tcontext = %s]\n", |
| func_name, url.ToCString(), line, ctx_.ToCString()); |
| } |
| } |
| |
| void ActivationFrame::PrintToJSONObject(JSONObject* jsobj) { |
| if (kind_ == kRegular || kind_ == kAsyncActivation) { |
| PrintToJSONObjectRegular(jsobj); |
| } else if (kind_ == kAsyncCausal) { |
| PrintToJSONObjectAsyncCausal(jsobj); |
| } else if (kind_ == kAsyncSuspensionMarker) { |
| PrintToJSONObjectAsyncSuspensionMarker(jsobj); |
| } else { |
| UNIMPLEMENTED(); |
| } |
| } |
| |
| void ActivationFrame::PrintToJSONObjectRegular(JSONObject* jsobj) { |
| const Script& script = Script::Handle(SourceScript()); |
| jsobj->AddProperty("type", "Frame"); |
| jsobj->AddProperty("kind", KindToCString(kind_)); |
| const TokenPosition& pos = TokenPos(); |
| jsobj->AddLocation(script, pos); |
| jsobj->AddProperty("function", function()); |
| jsobj->AddProperty("code", code()); |
| { |
| JSONArray jsvars(jsobj, "vars"); |
| const int num_vars = NumLocalVariables(); |
| for (intptr_t v = 0; v < num_vars; v++) { |
| String& var_name = String::Handle(); |
| Instance& var_value = Instance::Handle(); |
| TokenPosition declaration_token_pos = TokenPosition::kNoSource; |
| TokenPosition visible_start_token_pos = TokenPosition::kNoSource; |
| TokenPosition visible_end_token_pos = TokenPosition::kNoSource; |
| VariableAt(v, &var_name, &declaration_token_pos, &visible_start_token_pos, |
| &visible_end_token_pos, &var_value); |
| if (!IsSyntheticVariableName(var_name)) { |
| JSONObject jsvar(&jsvars); |
| jsvar.AddProperty("type", "BoundVariable"); |
| const char* scrubbed_var_name = String::ScrubName(var_name); |
| jsvar.AddProperty("name", scrubbed_var_name); |
| jsvar.AddProperty("value", var_value); |
| // Where was the variable declared? |
| jsvar.AddProperty("declarationTokenPos", declaration_token_pos); |
| // When the variable becomes visible to the scope. |
| jsvar.AddProperty("scopeStartTokenPos", visible_start_token_pos); |
| // When the variable stops being visible to the scope. |
| jsvar.AddProperty("scopeEndTokenPos", visible_end_token_pos); |
| } |
| } |
| } |
| } |
| |
| void ActivationFrame::PrintToJSONObjectAsyncCausal(JSONObject* jsobj) { |
| jsobj->AddProperty("type", "Frame"); |
| jsobj->AddProperty("kind", KindToCString(kind_)); |
| const Script& script = Script::Handle(SourceScript()); |
| const TokenPosition& pos = TokenPos(); |
| jsobj->AddLocation(script, pos); |
| jsobj->AddProperty("function", function()); |
| jsobj->AddProperty("code", code()); |
| } |
| |
| void ActivationFrame::PrintToJSONObjectAsyncSuspensionMarker( |
| JSONObject* jsobj) { |
| jsobj->AddProperty("type", "Frame"); |
| jsobj->AddProperty("kind", KindToCString(kind_)); |
| jsobj->AddProperty("marker", "AsynchronousSuspension"); |
| } |
| |
| static bool IsFunctionVisible(const Function& function) { |
| return FLAG_show_invisible_frames || function.is_visible(); |
| } |
| |
| void DebuggerStackTrace::AddActivation(ActivationFrame* frame) { |
| if (IsFunctionVisible(frame->function())) { |
| trace_.Add(frame); |
| } |
| } |
| |
| void DebuggerStackTrace::AddMarker(ActivationFrame::Kind marker) { |
| ASSERT(marker == ActivationFrame::kAsyncSuspensionMarker); |
| trace_.Add(new ActivationFrame(marker)); |
| } |
| |
| void DebuggerStackTrace::AddAsyncCausalFrame(uword pc, const Code& code) { |
| trace_.Add(new ActivationFrame(pc, 0, 0, code, Array::Handle(), 0, |
| ActivationFrame::kAsyncCausal)); |
| } |
| |
| const uint8_t kSafepointKind = UntaggedPcDescriptors::kIcCall | |
| UntaggedPcDescriptors::kUnoptStaticCall | |
| UntaggedPcDescriptors::kRuntimeCall; |
| |
| CodeBreakpoint::CodeBreakpoint(const Code& code, |
| BreakpointLocation* breakpoint_location, |
| uword pc, |
| UntaggedPcDescriptors::Kind kind) |
| : code_(code.ptr()), |
| pc_(pc), |
| enabled_count_(0), |
| next_(NULL), |
| breakpoint_kind_(kind), |
| saved_value_(Code::null()) { |
| ASSERT(!code.IsNull()); |
| ASSERT(pc_ != 0); |
| ASSERT((breakpoint_kind_ & kSafepointKind) != 0); |
| AddBreakpointLocation(breakpoint_location); |
| ASSERT(breakpoint_location->token_pos().IsReal()); |
| } |
| |
| CodeBreakpoint::~CodeBreakpoint() { |
| // Make sure we don't leave patched code behind. |
| ASSERT(!IsEnabled()); |
| // Poison the data so we catch use after free errors. |
| #ifdef DEBUG |
| code_ = Code::null(); |
| pc_ = 0ul; |
| next_ = NULL; |
| breakpoint_kind_ = UntaggedPcDescriptors::kOther; |
| #endif |
| } |
| |
| void CodeBreakpoint::Enable() { |
| if (enabled_count_ == 0) { |
| PatchCode(); |
| } |
| ++enabled_count_; |
| } |
| |
| void CodeBreakpoint::Disable() { |
| if (enabled_count_ == 1) { |
| RestoreCode(); |
| } |
| --enabled_count_; |
| } |
| |
| bool CodeBreakpoint::HasBreakpointLocation( |
| BreakpointLocation* breakpoint_location) { |
| for (intptr_t i = 0; i < breakpoint_locations_.length(); i++) { |
| if (breakpoint_locations_[i] == breakpoint_location) { |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| bool CodeBreakpoint::FindAndDeleteBreakpointLocation( |
| BreakpointLocation* breakpoint_location) { |
| for (intptr_t i = 0; i < breakpoint_locations_.length(); i++) { |
| if (breakpoint_locations_[i] == breakpoint_location) { |
| breakpoint_locations_.EraseAt(i); |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| BreakpointLocation* CodeBreakpoint::FindBreakpointForDebugger( |
| Debugger* debugger) { |
| for (intptr_t i = 0; i < breakpoint_locations_.length(); i++) { |
| if (breakpoint_locations_[i]->debugger() == debugger) { |
| return breakpoint_locations_[i]; |
| } |
| } |
| return nullptr; |
| } |
| |
| GroupDebugger::GroupDebugger(IsolateGroup* isolate_group) |
| : isolate_group_(isolate_group), |
| code_breakpoints_lock_(new SafepointRwLock()), |
| code_breakpoints_(nullptr), |
| breakpoint_locations_lock_(new SafepointRwLock()), |
| single_stepping_set_lock_(new SafepointRwLock()), |
| needs_breakpoint_cleanup_(false) {} |
| |
| GroupDebugger::~GroupDebugger() { |
| while (code_breakpoints_ != nullptr) { |
| CodeBreakpoint* cbpt = code_breakpoints_; |
| code_breakpoints_ = code_breakpoints_->next(); |
| ASSERT(!cbpt->IsEnabled()); |
| delete cbpt; |
| } |
| } |
| |
| Debugger::Debugger(Isolate* isolate) |
| : isolate_(isolate), |
| next_id_(1), |
| latent_locations_(NULL), |
| breakpoint_locations_(NULL), |
| resume_action_(kContinue), |
| resume_frame_index_(-1), |
| post_deopt_frame_index_(-1), |
| ignore_breakpoints_(false), |
| pause_event_(NULL), |
| stack_trace_(NULL), |
| async_causal_stack_trace_(NULL), |
| awaiter_stack_trace_(NULL), |
| stepping_fp_(0), |
| last_stepping_fp_(0), |
| last_stepping_pos_(TokenPosition::kNoSource), |
| async_stepping_fp_(0), |
| top_frame_awaiter_(Object::null()), |
| skip_next_step_(false), |
| synthetic_async_breakpoint_(NULL), |
| exc_pause_info_(kNoPauseOnExceptions) {} |
| |
| Debugger::~Debugger() { |
| ASSERT(!IsPaused()); |
| ASSERT(latent_locations_ == NULL); |
| ASSERT(breakpoint_locations_ == NULL); |
| ASSERT(stack_trace_ == NULL); |
| ASSERT(async_causal_stack_trace_ == NULL); |
| ASSERT(synthetic_async_breakpoint_ == NULL); |
| } |
| |
| void Debugger::Shutdown() { |
| // TODO(johnmccutchan): Do not create a debugger for isolates that don't need |
| // them. Then, assert here that isolate_ is not one of those isolates. |
| if (Isolate::IsSystemIsolate(isolate_)) { |
| return; |
| } |
| { |
| SafepointWriteRwLocker sl(Thread::Current(), |
| group_debugger()->breakpoint_locations_lock()); |
| while (breakpoint_locations_ != nullptr) { |
| BreakpointLocation* loc = breakpoint_locations_; |
| group_debugger()->UnlinkCodeBreakpoints(loc); |
| group_debugger()->UnregisterBreakpointLocation(loc); |
| breakpoint_locations_ = breakpoint_locations_->next(); |
| delete loc; |
| } |
| while (latent_locations_ != nullptr) { |
| BreakpointLocation* loc = latent_locations_; |
| group_debugger()->UnlinkCodeBreakpoints(loc); |
| group_debugger()->UnregisterBreakpointLocation(loc); |
| latent_locations_ = latent_locations_->next(); |
| delete loc; |
| } |
| } |
| if (NeedsIsolateEvents()) { |
| ServiceEvent event(isolate_, ServiceEvent::kIsolateExit); |
| InvokeEventHandler(&event); |
| } |
| } |
| |
| static ActivationFrame* TopDartFrame(); |
| static bool IsAtAsyncJump(ActivationFrame* top_frame); |
| |
| bool Debugger::SetupStepOverAsyncSuspension(const char** error) { |
| ActivationFrame* top_frame = TopDartFrame(); |
| if (!IsAtAsyncJump(top_frame)) { |
| // Not at an async operation. |
| if (error != nullptr) { |
| *error = "Isolate must be paused at an async suspension point"; |
| } |
| return false; |
| } |
| Object& closure = Object::Handle(top_frame->GetAsyncOperation()); |
| ASSERT(!closure.IsNull()); |
| ASSERT(closure.IsInstance()); |
| ASSERT(Instance::Cast(closure).IsClosure()); |
| Breakpoint* bpt = SetBreakpointAtActivation(Instance::Cast(closure), true); |
| if (bpt == NULL) { |
| // Unable to set the breakpoint. |
| if (error != nullptr) { |
| *error = "Unable to set breakpoint at async suspension point"; |
| } |
| return false; |
| } |
| return true; |
| } |
| |
| static bool CanRewindFrame(intptr_t frame_index, const char** error); |
| |
| bool Debugger::SetResumeAction(ResumeAction action, |
| intptr_t frame_index, |
| const char** error) { |
| if (error != nullptr) { |
| *error = NULL; |
| } |
| resume_frame_index_ = -1; |
| switch (action) { |
| case kStepInto: |
| case kStepOver: |
| case kStepOut: |
| case kContinue: |
| set_resume_action(action); |
| return true; |
| case kStepRewind: |
| if (!CanRewindFrame(frame_index, error)) { |
| return false; |
| } |
| set_resume_action(kStepRewind); |
| resume_frame_index_ = frame_index; |
| return true; |
| case kStepOverAsyncSuspension: |
| return SetupStepOverAsyncSuspension(error); |
| default: |
| UNREACHABLE(); |
| return false; |
| } |
| } |
| |
| // Deoptimize all functions in the isolate. |
| // TODO(hausner): Actually we only need to deoptimize those functions |
| // that inline the function that contains the newly created breakpoint. |
| // We currently don't have this info so we deoptimize all functions. |
| void Debugger::DeoptimizeWorld() { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| UNREACHABLE(); |
| #else |
| NoBackgroundCompilerScope no_bg_compiler(Thread::Current()); |
| if (FLAG_trace_deoptimization) { |
| THR_Print("Deopt for debugger\n"); |
| } |
| isolate_->set_has_attempted_stepping(true); |
| |
| DeoptimizeFunctionsOnStack(); |
| |
| // Iterate over all classes, deoptimize functions. |
| // TODO(hausner): Could possibly be combined with RemoveOptimizedCode() |
| const ClassTable& class_table = *isolate_->group()->class_table(); |
| auto thread = Thread::Current(); |
| auto isolate_group = thread->isolate_group(); |
| auto zone = thread->zone(); |
| CallSiteResetter resetter(zone); |
| Class& cls = Class::Handle(zone); |
| Array& functions = Array::Handle(zone); |
| Function& function = Function::Handle(zone); |
| Code& code = Code::Handle(zone); |
| |
| const intptr_t num_classes = class_table.NumCids(); |
| const intptr_t num_tlc_classes = class_table.NumTopLevelCids(); |
| // TODO(dartbug.com/36097): Need to stop other mutators running in same IG |
| // before deoptimizing the world. |
| SafepointWriteRwLocker ml(thread, isolate_group->program_lock()); |
| for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) { |
| const classid_t cid = |
| i < num_classes ? i : ClassTable::CidFromTopLevelIndex(i - num_classes); |
| if (class_table.HasValidClassAt(cid)) { |
| cls = class_table.At(cid); |
| |
| // Disable optimized functions. |
| functions = cls.functions(); |
| if (!functions.IsNull()) { |
| intptr_t num_functions = functions.Length(); |
| for (intptr_t pos = 0; pos < num_functions; pos++) { |
| function ^= functions.At(pos); |
| ASSERT(!function.IsNull()); |
| // Force-optimized functions don't have unoptimized code and can't |
| // deoptimize. Their optimized codes are still valid. |
| if (function.ForceOptimize()) { |
| ASSERT(!function.HasImplicitClosureFunction()); |
| continue; |
| } |
| if (function.HasOptimizedCode()) { |
| function.SwitchToUnoptimizedCode(); |
| } |
| code = function.unoptimized_code(); |
| if (!code.IsNull()) { |
| resetter.ResetSwitchableCalls(code); |
| } |
| // Also disable any optimized implicit closure functions. |
| if (function.HasImplicitClosureFunction()) { |
| function = function.ImplicitClosureFunction(); |
| if (function.HasOptimizedCode()) { |
| function.SwitchToUnoptimizedCode(); |
| } |
| code = function.unoptimized_code(); |
| if (!code.IsNull()) { |
| resetter.ResetSwitchableCalls(code); |
| } |
| } |
| } |
| } |
| } |
| } |
| |
| // Disable optimized closure functions. |
| ClosureFunctionsCache::ForAllClosureFunctions([&](const Function& function) { |
| if (function.HasOptimizedCode()) { |
| function.SwitchToUnoptimizedCode(); |
| } |
| code = function.unoptimized_code(); |
| if (!code.IsNull()) { |
| resetter.ResetSwitchableCalls(code); |
| } |
| return true; // Continue iteration. |
| }); |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| void Debugger::NotifySingleStepping(bool value) const { |
| isolate_->set_single_step(value); |
| } |
| |
| static ActivationFrame* CollectDartFrame( |
| Isolate* isolate, |
| uword pc, |
| StackFrame* frame, |
| const Code& code, |
| const Array& deopt_frame, |
| intptr_t deopt_frame_offset, |
| ActivationFrame::Kind kind = ActivationFrame::kRegular) { |
| ASSERT(code.ContainsInstructionAt(pc)); |
| ActivationFrame* activation = |
| new ActivationFrame(pc, frame->fp(), frame->sp(), code, deopt_frame, |
| deopt_frame_offset, kind); |
| if (FLAG_trace_debugger_stacktrace) { |
| const Context& ctx = activation->GetSavedCurrentContext(); |
| OS::PrintErr("\tUsing saved context: %s\n", ctx.ToCString()); |
| OS::PrintErr("\tLine number: %" Pd "\n", activation->LineNumber()); |
| } |
| return activation; |
| } |
| |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| static ArrayPtr DeoptimizeToArray(Thread* thread, |
| StackFrame* frame, |
| const Code& code) { |
| ASSERT(code.is_optimized() && !code.is_force_optimized()); |
| Isolate* isolate = thread->isolate(); |
| // Create the DeoptContext for this deoptimization. |
| DeoptContext* deopt_context = |
| new DeoptContext(frame, code, DeoptContext::kDestIsAllocated, NULL, NULL, |
| true, false /* deoptimizing_code */); |
| isolate->set_deopt_context(deopt_context); |
| |
| deopt_context->FillDestFrame(); |
| deopt_context->MaterializeDeferredObjects(); |
| const Array& dest_frame = |
| Array::Handle(thread->zone(), deopt_context->DestFrameAsArray()); |
| |
| isolate->set_deopt_context(NULL); |
| delete deopt_context; |
| |
| return dest_frame.ptr(); |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| |
| DebuggerStackTrace* DebuggerStackTrace::Collect() { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| Isolate* isolate = thread->isolate(); |
| DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8); |
| StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
| Thread::Current(), |
| StackFrameIterator::kNoCrossThreadIteration); |
| Code& code = Code::Handle(zone); |
| Code& inlined_code = Code::Handle(zone); |
| Array& deopt_frame = Array::Handle(zone); |
| |
| for (StackFrame* frame = iterator.NextFrame(); frame != NULL; |
| frame = iterator.NextFrame()) { |
| ASSERT(frame->IsValid()); |
| if (FLAG_trace_debugger_stacktrace) { |
| OS::PrintErr("CollectStackTrace: visiting frame:\n\t%s\n", |
| frame->ToCString()); |
| } |
| if (frame->IsDartFrame()) { |
| code = frame->LookupDartCode(); |
| stack_trace->AppendCodeFrames(thread, isolate, zone, frame, &code, |
| &inlined_code, &deopt_frame); |
| } |
| } |
| return stack_trace; |
| } |
| |
| // Appends at least one stack frame. Multiple frames will be appended |
| // if |code| at the frame's pc contains inlined functions. |
| void DebuggerStackTrace::AppendCodeFrames(Thread* thread, |
| Isolate* isolate, |
| Zone* zone, |
| StackFrame* frame, |
| Code* code, |
| Code* inlined_code, |
| Array* deopt_frame) { |
| #if !defined(DART_PRECOMPILED_RUNTIME) |
| if (code->is_optimized()) { |
| if (code->is_force_optimized()) { |
| if (FLAG_trace_debugger_stacktrace) { |
| const Function& function = Function::Handle(zone, code->function()); |
| ASSERT(!function.IsNull()); |
| OS::PrintErr( |
| "CollectStackTrace: skipping force-optimized function: %s\n", |
| function.ToFullyQualifiedCString()); |
| } |
| return; // Skip frame of force-optimized (and non-debuggable) function. |
| } |
| // TODO(rmacnak): Use CodeSourceMap |
| *deopt_frame = DeoptimizeToArray(thread, frame, *code); |
| for (InlinedFunctionsIterator it(*code, frame->pc()); !it.Done(); |
| it.Advance()) { |
| *inlined_code = it.code(); |
| if (FLAG_trace_debugger_stacktrace) { |
| const Function& function = Function::Handle(zone, it.function()); |
| ASSERT(!function.IsNull()); |
| OS::PrintErr("CollectStackTrace: visiting inlined function: %s\n", |
| function.ToFullyQualifiedCString()); |
| } |
| intptr_t deopt_frame_offset = it.GetDeoptFpOffset(); |
| AddActivation(CollectDartFrame(isolate, it.pc(), frame, *inlined_code, |
| *deopt_frame, deopt_frame_offset)); |
| } |
| return; |
| } |
| #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| AddActivation(CollectDartFrame(isolate, frame->pc(), frame, *code, |
| Object::null_array(), 0)); |
| } |
| |
| DebuggerStackTrace* DebuggerStackTrace::CollectAsyncCausal() { |
| if (FLAG_lazy_async_stacks) { |
| return CollectAsyncLazy(); |
| } |
| return nullptr; |
| } |
| |
| DebuggerStackTrace* DebuggerStackTrace::CollectAsyncLazy() { |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| Isolate* isolate = thread->isolate(); |
| |
| Code& code = Code::Handle(zone); |
| Code& inlined_code = Code::Handle(zone); |
| Array& deopt_frame = Array::Handle(zone); |
| Function& function = Function::Handle(zone); |
| |
| constexpr intptr_t kDefaultStackAllocation = 8; |
| auto stack_trace = new DebuggerStackTrace(kDefaultStackAllocation); |
| |
| const auto& code_array = GrowableObjectArray::ZoneHandle( |
| zone, GrowableObjectArray::New(kDefaultStackAllocation)); |
| GrowableArray<uword> pc_offset_array(kDefaultStackAllocation); |
| bool has_async = false; |
| |
| std::function<void(StackFrame*)> on_sync_frame = [&](StackFrame* frame) { |
| code = frame->LookupDartCode(); |
| stack_trace->AppendCodeFrames(thread, isolate, zone, frame, &code, |
| &inlined_code, &deopt_frame); |
| }; |
| |
| StackTraceUtils::CollectFramesLazy(thread, code_array, &pc_offset_array, |
| /*skip_frames=*/0, &on_sync_frame, |
| &has_async); |
| |
| // If the entire stack is sync, return no (async) trace. |
| if (!has_async) { |
| return nullptr; |
| } |
| |
| const intptr_t length = code_array.Length(); |
| bool async_frames = false; |
| bool skip_next_gap_marker = false; |
| for (intptr_t i = 0; i < length; ++i) { |
| code ^= code_array.At(i); |
| if (code.ptr() == StubCode::AsynchronousGapMarker().ptr()) { |
| if (!skip_next_gap_marker) { |
| stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker); |
| } |
| skip_next_gap_marker = false; |
| |
| // Once we reach a gap, the rest is async. |
| async_frames = true; |
| continue; |
| } |
| |
| // Skip the sync frames since they've been added (and un-inlined) above. |
| if (!async_frames) { |
| continue; |
| } |
| |
| if (!code.IsFunctionCode()) { |
| continue; |
| } |
| |
| // Skip invisible function frames. |
| function ^= code.function(); |
| if (!function.is_visible()) { |
| skip_next_gap_marker = true; |
| continue; |
| } |
| |
| const uword pc_offset = pc_offset_array[i]; |
| const uword absolute_pc = code.PayloadStart() + pc_offset; |
| stack_trace->AddAsyncCausalFrame(absolute_pc, code); |
| } |
| |
| return stack_trace; |
| } |
| |
| DebuggerStackTrace* DebuggerStackTrace::CollectAwaiterReturn() { |
| #if defined(DART_PRECOMPILED_RUNTIME) |
| // AOT does not support debugging. |
| ASSERT(!FLAG_async_debugger); |
| return nullptr; |
| #else |
| if (!FLAG_async_debugger) { |
| return nullptr; |
| } |
| |
| Thread* thread = Thread::Current(); |
| Zone* zone = thread->zone(); |
| Isolate* isolate = thread->isolate(); |
| DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8); |
| |
| StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
| Thread::Current(), |
| StackFrameIterator::kNoCrossThreadIteration); |
| |
| Code& code = Code::Handle(zone); |
| Function& function = Function::Handle(zone); |
| Code& inlined_code = Code::Handle(zone); |
| Closure& async_activation = Closure::Handle(zone); |
| Array& deopt_frame = Array::Handle(zone); |
| bool stack_has_async_function = false; |
| Closure& closure = Closure::Handle(); |
| |
| CallerClosureFinder caller_closure_finder(zone); |
| |
| for (StackFrame* frame = iterator.NextFrame(); frame != nullptr; |
| frame = iterator.NextFrame()) { |
| ASSERT(frame->IsValid()); |
| if (FLAG_trace_debugger_stacktrace) { |
| OS::PrintErr("CollectAwaiterReturnStackTrace: visiting frame:\n\t%s\n", |
| frame->ToCString()); |
| } |
| |
| if (!frame->IsDartFrame()) { |
| continue; |
| } |
| |
| code = frame->LookupDartCode(); |
| |
| // Simple frame. Just add the one. |
| if (!code.is_optimized()) { |
| function = code.function(); |
| if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) { |
| ActivationFrame* activation = CollectDartFrame( |
| isolate, frame->pc(), frame, code, Object::null_array(), 0, |
| ActivationFrame::kAsyncActivation); |
| ASSERT(activation != nullptr); |
| stack_trace->AddActivation(activation); |
| stack_has_async_function = true; |
| // Grab the awaiter. |
| async_activation ^= activation->GetAsyncAwaiter(&caller_closure_finder); |
| // Bail if we've reach the end of sync execution stack. |
| ObjectPtr* last_caller_obj = |
| reinterpret_cast<ObjectPtr*>(frame->GetCallerSp()); |
| closure = |
| StackTraceUtils::FindClosureInFrame(last_caller_obj, function); |
| if (caller_closure_finder.IsRunningAsync(closure)) { |
| break; |
| } |
| } else { |
| stack_trace->AddActivation(CollectDartFrame( |
| isolate, frame->pc(), frame, code, Object::null_array(), 0)); |
| } |
| |
| continue; |
| } |
| |
| if (code.is_force_optimized()) { |
| if (FLAG_trace_debugger_stacktrace) { |
| function = code.function(); |
| ASSERT(!function.IsNull()); |
| OS::PrintErr( |
| "CollectAwaiterReturnStackTrace: " |
| "skipping force-optimized function: %s\n", |
| function.ToFullyQualifiedCString()); |
| } |
| // Skip frame of force-optimized (and non-debuggable) function. |
| continue; |
| } |
| |
| deopt_frame = DeoptimizeToArray(thread, frame, code); |
| bool found_async_awaiter = false; |
| for (InlinedFunctionsIterator it(code, frame->pc()); !it.Done(); |
| it.Advance()) { |
| inlined_code = it.code(); |
| function = it.function(); |
| |
| if (FLAG_trace_debugger_stacktrace) { |
| ASSERT(!function.IsNull()); |
| OS::PrintErr( |
| "CollectAwaiterReturnStackTrace: " |
| "visiting inlined function: %s\n ", |
| function.ToFullyQualifiedCString()); |
| } |
| |
| intptr_t deopt_frame_offset = it.GetDeoptFpOffset(); |
| if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) { |
| ActivationFrame* activation = CollectDartFrame( |
| isolate, it.pc(), frame, inlined_code, deopt_frame, |
| deopt_frame_offset, ActivationFrame::kAsyncActivation); |
| ASSERT(activation != NULL); |
| stack_trace->AddActivation(activation); |
| stack_has_async_function = true; |
| // Grab the awaiter. |
| async_activation ^= activation->GetAsyncAwaiter(&caller_closure_finder); |
| found_async_awaiter = true; |
| } else { |
| stack_trace->AddActivation(CollectDartFrame(isolate, it.pc(), frame, |
| inlined_code, deopt_frame, |
| deopt_frame_offset)); |
| } |
| } |
| |
| // Break out of outer loop. |
| if (found_async_awaiter) { |
| break; |
| } |
| } |
| |
| // If the stack doesn't have any async functions on it, return nullptr. |
| if (!stack_has_async_function) { |
| return nullptr; |
| } |
| |
| // Append the awaiter return call stack. |
| while (!async_activation.IsNull() && |
| async_activation.context() != Object::null()) { |
| ActivationFrame* activation = new (zone) ActivationFrame(async_activation); |
| if (activation->function().IsAsyncClosure() || |
| activation->function().IsAsyncGenClosure()) { |
| activation->ExtractTokenPositionFromAsyncClosure(); |
| } |
| stack_trace->AddActivation(activation); |
| if (FLAG_trace_debugger_stacktrace) { |
| OS::PrintErr( |
| "CollectAwaiterReturnStackTrace: visiting awaiter return " |
| "closures:\n\t%s\n", |
| activation->function().ToFullyQualifiedCString()); |
| } |
| async_activation = caller_closure_finder.FindCaller(async_activation); |
| } |
| |
| return stack_trace; |
| #endif // defined(DART_PRECOMPILED_RUNTIME) |
| } |
| |
| static ActivationFrame* TopDartFrame() { |
| StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
| Thread::Current(), |
| StackFrameIterator::kNoCrossThreadIteration); |
| StackFrame* frame; |
| while (true) { |
| frame = iterator.NextFrame(); |
| RELEASE_ASSERT(frame != nullptr); |
| if (!frame->IsDartFrame()) { |
| continue; |
| } |
| Code& code = Code::Handle(frame->LookupDartCode()); |
| ActivationFrame* activation = new ActivationFrame( |
| frame->pc(), frame->fp(), frame->sp(), code, Object::null_array(), 0); |
| return activation; |
| } |
| } |
| |
| DebuggerStackTrace* Debugger::StackTrace() { |
| return (stack_trace_ != NULL) ? stack_trace_ : DebuggerStackTrace::Collect(); |
| } |
| |
| DebuggerStackTrace* Debugger::AsyncCausalStackTrace() { |
| return (async_causal_stack_trace_ != NULL) |
| ? async_causal_stack_trace_ |
| : DebuggerStackTrace::CollectAsyncCausal(); |
| } |
| |
| DebuggerStackTrace* Debugger::AwaiterStackTrace() { |
| return (awaiter_stack_trace_ != NULL) |
| ? awaiter_stack_trace_ |
| : DebuggerStackTrace::CollectAwaiterReturn(); |
| } |
| |
| DebuggerStackTrace* DebuggerStackTrace::From(const class StackTrace& ex_trace) { |
| DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8); |
| Function& function = Function::Handle(); |
| Object& code_object = Object::Handle(); |
| Code& code = Code::Handle(); |
| |
| const uword fp = 0; |
| const uword sp = 0; |
| const Array& deopt_frame = Array::Handle(); |
| const intptr_t deopt_frame_offset = -1; |
| |
| for (intptr_t i = 0; i < ex_trace.Length(); i++) { |
| code_object = ex_trace.CodeAtFrame(i); |
| // Pre-allocated StackTraces may include empty slots, either (a) to indicate |
| // where frames were omitted in the case a stack has more frames than the |
| // pre-allocated trace (such as a stack overflow) or (b) because a stack has |
| // fewer frames that the pre-allocated trace (such as memory exhaustion with |
| // a shallow stack). |
| if (!code_object.IsNull()) { |
| code ^= code_object.ptr(); |
| ASSERT(code.IsFunctionCode()); |
| function = code.function(); |
| if (function.is_visible()) { |
| ASSERT(function.ptr() == code.function()); |
| uword pc = code.PayloadStart() + ex_trace.PcOffsetAtFrame(i); |
| if (code.is_optimized() && ex_trace.expand_inlined()) { |
| // Traverse inlined frames. |
| for (InlinedFunctionsIterator it(code, pc); !it.Done(); |
| it.Advance()) { |
| function = it.function(); |
| code = it.code(); |
| ASSERT(function.ptr() == code.function()); |
| uword pc = it.pc(); |
| ASSERT(pc != 0); |
| ASSERT(code.PayloadStart() <= pc); |
| ASSERT(pc < (code.PayloadStart() + code.Size())); |
| |
| ActivationFrame* activation = new ActivationFrame( |
| pc, fp, sp, code, deopt_frame, deopt_frame_offset); |
| stack_trace->AddActivation(activation); |
| } |
| } else { |
| ActivationFrame* activation = new ActivationFrame( |
| pc, fp, sp, code, deopt_frame, deopt_frame_offset); |
| stack_trace->AddActivation(activation); |
| } |
| } |
| } |
| } |
| return stack_trace; |
| } |
| |
| void Debugger::SetExceptionPauseInfo(Dart_ExceptionPauseInfo pause_info) { |
| ASSERT((pause_info == kNoPauseOnExceptions) || |
| (pause_info == kPauseOnUnhandledExceptions) || |
| (pause_info == kPauseOnAllExceptions)); |
| exc_pause_info_ = pause_info; |
| } |
| |
| Dart_ExceptionPauseInfo Debugger::GetExceptionPauseInfo() const { |
| return exc_pause_info_; |
| } |
| |
| bool Debugger::ShouldPauseOnException(DebuggerStackTrace* stack_trace, |
| const Instance& exception) { |
| if (exc_pause_info_ == kNoPauseOnExceptions) { |
| return false; |
| } |
| if (exc_pause_info_ == kPauseOnAllExceptions) { |
| return true; |
| } |
| ASSERT(exc_pause_info_ == kPauseOnUnhandledExceptions); |
| // Exceptions coming from invalid token positions should be skipped |
| ActivationFrame* top_frame = stack_trace->FrameAt(0); |
| if (!top_frame->TokenPos().IsReal() && top_frame->TryIndex() != -1) { |
| return false; |
| } |
| ActivationFrame* handler_frame = stack_trace->GetHandlerFrame(exception); |
| if (handler_frame == nullptr) { |
| // Did not find an exception handler that catches this exception. |
| // Note that this check is not precise, since we can't check |
| // uninstantiated types, i.e. types containing type parameters. |
| // Thus, we may report an exception as unhandled when in fact |
| // it will be caught once we unwind the stack. |
| return true; |
| } |
| |
| auto& handler_function = Function::Handle(handler_frame->function().ptr()); |
| // If the handler function is an synthetic inner function, we need to look for |
| // the annotations on the outer function. |
| if (handler_function.IsAsyncClosure()) { |
| // async :async_op |
| handler_function = handler_function.parent_function(); |
| } else if (handler_frame->function().IsAsyncGenClosure()) { |
| // async* :async_op |
| handler_function = handler_function.parent_function(); |
| } else if (handler_frame->function().IsSyncGenClosure()) { |
| // sync* :sync_op + :sync_op_gen |
| handler_function = handler_function.parent_function(); |
| handler_function = handler_function.parent_function(); |
| } |
| |
| // If handler_frame's function is annotated with |
| // @pragma('vm:notify-debugger-on-exception'), we specifically want to notify |
| // the debugger of this otherwise ignored exception. |
| if (Library::FindPragma(Thread::Current(), /*only_core=*/false, |
| handler_function, |
| Symbols::vm_notify_debugger_on_exception())) { |
| return true; |
| } |
| return false; |
| } |
| |
| void Debugger::PauseException(const Instance& exc) { |
| if (FLAG_stress_async_stacks) { |
| DebuggerStackTrace::CollectAwaiterReturn(); |
| } |
| // We ignore this exception event when the VM is executing code invoked |
| // by the debugger to evaluate variables values, when we see a nested |
| // breakpoint or exception event, or if the debugger is not |
| // interested in exception events. |
| if (ignore_breakpoints_ || IsPaused() || |
| (exc_pause_info_ == kNoPauseOnExceptions)) { |
| return; |
| } |
| DebuggerStackTrace* awaiter_stack_trace = |
| DebuggerStackTrace::CollectAwaiterReturn(); |
| DebuggerStackTrace* stack_trace = DebuggerStackTrace::Collect(); |
| if (awaiter_stack_trace != NULL) { |
| if (!ShouldPauseOnException(awaiter_stack_trace, exc)) { |
| return; |
| } |
| } else { |
| if (!ShouldPauseOnException(stack_trace, exc)) { |
| return; |
| } |
| } |
| ServiceEvent event(isolate_, ServiceEvent::kPauseException); |
| event.set_exception(&exc); |
| if (stack_trace->Length() > 0) { |
| event.set_top_frame(stack_trace->FrameAt(0)); |
| } |
| CacheStackTraces(stack_trace, DebuggerStackTrace::CollectAsyncCausal(), |
| DebuggerStackTrace::CollectAwaiterReturn()); |
| Pause(&event); |
| HandleSteppingRequest(stack_trace_); // we may get a rewind request |
| ClearCachedStackTraces(); |
| } |
| |
| // Helper to refine the resolved token pos. |
| static void RefineBreakpointPos(const Script& script, |
| TokenPosition pos, |
| TokenPosition next_closest_token_position, |
| TokenPosition requested_token_pos, |
| TokenPosition last_token_pos, |
| intptr_t requested_column, |
| TokenPosition exact_token_pos, |
| TokenPosition* best_fit_pos, |
| intptr_t* best_column, |
| intptr_t* best_line, |
| TokenPosition* best_token_pos) { |
| intptr_t token_start_column = -1; |
| intptr_t token_line = -1; |
| if (requested_column >= 0) { |
| TokenPosition ignored = TokenPosition::kNoSource; |
| TokenPosition end_of_line_pos = TokenPosition::kNoSource; |
| script.GetTokenLocation(pos, &token_line, &token_start_column); |
| script.TokenRangeAtLine(token_line, &ignored, &end_of_line_pos); |
| TokenPosition token_end_pos = |
| TokenPosition::Min(next_closest_token_position, end_of_line_pos); |
| |
| if ((token_end_pos.IsReal() && exact_token_pos.IsReal() && |
| (token_end_pos < exact_token_pos)) || |
| (token_start_column > *best_column)) { |
| // Prefer the token with the lowest column number compatible |
| // with the requested column. |
| return; |
| } |
| } |
| |
| // Prefer the lowest (first) token pos. |
| if (pos < *best_fit_pos) { |
| *best_fit_pos = pos; |
| *best_line = token_line; |
| *best_column = token_start_column; |
| // best_token_pos should only be real when the column number is specified. |
| if (requested_column >= 0 && exact_token_pos.IsReal()) { |
| *best_token_pos = TokenPosition::Deserialize( |
| exact_token_pos.Pos() - (requested_column - *best_column)); |
| } |
| } |
| } |
| |
| // Returns the best fit token position for a breakpoint. |
| // |
| // Takes a range of tokens [requested_token_pos, last_token_pos] and |
| // an optional column (requested_column). The range of tokens usually |
| // represents one line of the program text, but can represent a larger |
| // range on recursive calls. |
| // |
| // The best fit is found in two passes. |
| // |
| // The first pass finds a candidate token which: |
| // |
| // - is a safepoint, |
| // - has the lowest column number compatible with the requested column |
| // if a column has been specified, |
| // and: |
| // - has the lowest token position number which satisfies the above. |
| // |
| // When we consider a column number, we look for the token which |
| // intersects the desired column. For example: |
| // |
| // 1 2 3 |
| // 12345678901234567890 0 |
| // |
| // var x = function(function(y)); |
| // ^ |
| // |
| // If we request a breakpoint at column 14, the lowest column number |
| // compatible with that would for column 11 (beginning of the |
| // 'function' token) in the example above. |
| // |
| // Once this candidate token from the first pass is found, we then |
| // have a second pass which considers only those tokens on the same |
| // line as the candidate token. |
| // |
| // The second pass finds a best fit token which: |
| // |
| // - is a safepoint, |
| // - has the same column number as the candidate token (perhaps |
| // more than one token has the same column number), |
| // and: |
| // - has the lowest code address in the generated code. |
| // |
| // We prefer the lowest compiled code address, because this tends to |
| // select the first subexpression on a line. For example in a line |
| // with nested function calls f(g(x)), the call to g() will have a |
| // lower compiled code address than the call to f(). |
| // |
| // If no best fit token can be found, the search is expanded, |
| // searching through the rest of the current function by calling this |
| // function recursively. |
| // |
| // TODO(turnidge): Given that we usually call this function with a |
| // token range restricted to a single line, this could be a one-pass |
| // algorithm, which would be simpler. I believe that it only needs |
| // two passes to support the recursive try-the-whole-function case. |
| // Rewrite this later, once there are more tests in place. |
| static TokenPosition ResolveBreakpointPos(const Function& func, |
| TokenPosition requested_token_pos, |
| TokenPosition last_token_pos, |
| intptr_t requested_column, |
| TokenPosition exact_token_pos) { |
| ASSERT(!func.HasOptimizedCode()); |
| |
| requested_token_pos = |
| TokenPosition::Max(requested_token_pos, func.token_pos()); |
| last_token_pos = TokenPosition::Min(last_token_pos, func.end_token_pos()); |
| |
| Zone* zone = Thread::Current()->zone(); |
| Script& script = Script::Handle(zone, func.script()); |
| Code& code = Code::Handle(zone); |
| PcDescriptors& desc = PcDescriptors::Handle(zone); |
| ASSERT(func.HasCode()); |
| code = func.unoptimized_code(); |
| ASSERT(!code.IsNull()); |
| desc = code.pc_descriptors(); |
| |
| // First pass: find the safe point which is closest to the beginning |
| // of the given token range. |
| TokenPosition best_fit_pos = TokenPosition::kMaxSource; |
| intptr_t best_column = INT_MAX; |
| intptr_t best_line = INT_MAX; |
| // best_token_pos is only set to a real position if a real exact_token_pos |
| // and a column number are provided. |
| TokenPosition best_token_pos = TokenPosition::kNoSource; |
| |
| PcDescriptors::Iterator iter(desc, kSafepointKind); |
| while (iter.MoveNext()) { |
| const TokenPosition& pos = iter.TokenPos(); |
| if (pos.IsSynthetic() && pos == requested_token_pos) { |
| // if there's a safepoint for a synthetic function start and the start |
| // was requested, we're done. |
| return pos; |
| } |
| if (!pos.IsWithin(requested_token_pos, last_token_pos)) { |
| // Token is not in the target range. |
| continue; |
| } |
| TokenPosition next_closest_token_position = TokenPosition::kMaxSource; |
| if (requested_column >= 0) { |
| // Find next closest safepoint |
| PcDescriptors::Iterator iter2(desc, kSafepointKind); |
| while (iter2.MoveNext()) { |
| const TokenPosition& next = iter2.TokenPos(); |
| if (!next.IsReal()) continue; |
| if ((pos < next) && (next < next_closest_token_position)) { |
| next_closest_token_position = next; |
| } |
| } |
| } |
| RefineBreakpointPos(script, pos, next_closest_token_position, |
| requested_token_pos, last_token_pos, requested_column, |
| exact_token_pos, &best_fit_pos, &best_column, |
| &best_line, &best_token_pos); |
| } |
| |
| // Second pass (if we found a safe point in the first pass). Find |
| // the token on the line which is at the best fit column (if column |
| // was specified) and has the lowest code address. |
| if (best_fit_pos != TokenPosition::kMaxSource) { |
| ASSERT(best_fit_pos.IsReal()); |
| const Script& script = Script::Handle(zone, func.script()); |
| const TokenPosition begin_pos = best_fit_pos; |
| |
| TokenPosition end_of_line_pos = TokenPosition::kNoSource; |
| if (best_line < 0) { |
| script.GetTokenLocation(begin_pos, &best_line); |
| } |
| ASSERT(best_line > 0); |
| TokenPosition ignored = TokenPosition::kNoSource; |
| script.TokenRangeAtLine(best_line, &ignored, &end_of_line_pos); |
| end_of_line_pos = TokenPosition::Max(end_of_line_pos, begin_pos); |
| |
| uword lowest_pc_offset = kUwordMax; |
| PcDescriptors::Iterator iter(desc, kSafepointKind); |
| while (iter.MoveNext()) { |
| const TokenPosition& pos = iter.TokenPos(); |
| if (best_token_pos.IsReal()) { |
| if (pos != best_token_pos) { |
| // Not an match for the requested column. |
| continue; |
| } |
| } else if (!pos.IsWithin(begin_pos, end_of_line_pos)) { |
| // Token is not on same line as best fit. |
| continue; |
| } |
| |
| // Prefer the lowest pc offset. |
| if (iter.PcOffset() < lowest_pc_offset) { |
| lowest_pc_offset = iter.PcOffset(); |
| best_fit_pos = pos; |
| } |
| } |
| return best_fit_pos; |
| } |
| |
| // We didn't find a safe point in the given token range. Try and |
| // find a safe point in the remaining source code of the function. |
| // Since we have moved to the next line of the function, we no |
| // longer are requesting a specific column number. |
| if (last_token_pos < func.end_token_pos()) { |
| return ResolveBreakpointPos(func, last_token_pos, func.end_token_pos(), |
| -1 /* no column */, TokenPosition::kNoSource); |
| } |
| return TokenPosition::kNoSource; |
| } |
| |
| bool BreakpointLocation::EnsureIsResolved(const Function& target_function, |
| TokenPosition exact_token_pos) { |
| if (IsResolved()) { |
| return true; |
| } |
| |
| // Resolve source breakpoint in the newly compiled function. |
| TokenPosition resolved_pos = |
| ResolveBreakpointPos(target_function, token_pos(), end_token_pos(), |
| requested_column_number(), exact_token_pos); |
| if (!resolved_pos.IsDebugPause()) { |
| if (FLAG_verbose_debug) { |
| OS::PrintErr("Failed resolving breakpoint for function '%s'\n", |
| target_function.ToFullyQualifiedCString()); |
| } |
| return false; |
| } |
| TokenPosition requested_pos = token_pos(); |
| TokenPosition requested_end_pos = end_token_pos(); |
| SetResolved(target_function, resolved_pos); |
| Breakpoint* breakpoint = breakpoints(); |
| while (breakpoint != nullptr) { |
| if (FLAG_verbose_debug) { |
| OS::PrintErr("Resolved breakpoint %" Pd |
| " to pos %s, function '%s' (requested range %s-%s, " |
| "requested col %" Pd ")\n", |
| breakpoint->id(), token_pos().ToCString(), |
| target_function.ToFullyQualifiedCString(), |
| requested_pos.ToCString(), requested_end_pos.ToCString(), |
| requested_column_number()); |
| } |
| debugger()->SendBreakpointEvent(ServiceEvent::kBreakpointResolved, |
| breakpoint); |
| breakpoint = breakpoint->next(); |
| } |
| |
| return true; |
| } |
| |
| void GroupDebugger::MakeCodeBreakpointAt(const Function& func, |
| BreakpointLocation* loc) { |
| ASSERT(loc->token_pos().IsReal()); |
| ASSERT((loc != NULL) && loc->IsResolved()); |
| ASSERT(!func.HasOptimizedCode()); |
| ASSERT(func.HasCode()); |
| Code& code = Code::Handle(func.unoptimized_code()); |
| ASSERT(!code.IsNull()); |
| PcDescriptors& desc = PcDescriptors::Handle(code.pc_descriptors()); |
| uword lowest_pc_offset = kUwordMax; |
| UntaggedPcDescriptors::Kind lowest_kind = UntaggedPcDescriptors::kAnyKind; |
| // Find the safe point with the lowest compiled code address |
| // that maps to the token position of the source breakpoint. |
| PcDescriptors::Iterator iter(desc, kSafepointKind); |
| while (iter.MoveNext()) { |
| if (iter.TokenPos() == loc->token_pos_) { |
| if (iter.PcOffset() < lowest_pc_offset) { |
| lowest_pc_offset = iter.PcOffset(); |
| lowest_kind = iter.Kind(); |
| } |
| } |
| } |
| if (lowest_pc_offset == kUwordMax) { |
| return; |
| } |
| |
| uword lowest_pc = code.PayloadStart() + lowest_pc_offset; |
| SafepointWriteRwLocker sl(Thread::Current(), code_breakpoints_lock()); |
| CodeBreakpoint* code_bpt = GetCodeBreakpoint(lowest_pc); |
| if (code_bpt == nullptr) { |
| // No code breakpoint for this code exists; create one. |
| code_bpt = new CodeBreakpoint(code, loc, lowest_pc, lowest_kind); |
| if (FLAG_verbose_debug) { |
| OS::PrintErr("Setting code breakpoint at pos %s pc %#" Px " offset %#" Px |
| "\n", |
| loc->token_pos().ToCString(), lowest_pc, |
| lowest_pc - code.PayloadStart()); |
| } |
| RegisterCodeBreakpoint(code_bpt); |
| } else { |
| if (FLAG_verbose_debug) { |
| OS::PrintErr( |
| "Adding location to existing code breakpoint at pos %s pc %#" Px |
| " offset %#" Px "\n", |
| loc->token_pos().ToCString(), lowest_pc, |
| lowest_pc - code.PayloadStart()); |
| } |
| if (!code_bpt->HasBreakpointLocation(loc)) { |
| code_bpt->AddBreakpointLocation(loc); |
| } |
| } |
| if (loc->AnyEnabled()) { |
| code_bpt->Enable(); |
| } |
| } |
| |
| void Debugger::FindCompiledFunctions( |
| const GrowableHandlePtrArray<const Script>& scripts, |
| TokenPosition start_pos, |
| TokenPosition end_pos, |
| GrowableObjectArray* code_function_list) { |
| auto thread = Thread::Current(); |
| auto zone = thread->zone(); |
| Script& script = Script::Handle(zone); |
| for (intptr_t i = 0; i < scripts.length(); ++i) { |
| script = scripts.At(i).ptr(); |
| ClosureFunctionsCache::ForAllClosureFunctions( |
| [&](const Function& function) { |
| ASSERT(!function.IsNull()); |
| if ((function.token_pos() == start_pos) && |
| (function.end_token_pos() == end_pos) && |
| (function.script() == script.ptr())) { |
| if (function.is_debuggable() && function.HasCode()) { |
| code_function_list->Add(function); |
| } |
| ASSERT(!function.HasImplicitClosureFunction()); |
| } |
| return true; // Continue iteration. |
| }); |
| |
| Class& cls = Class::Handle(zone); |
| Function& function = Function::Handle(zone); |
| Array& functions = Array::Handle(zone); |
| |
| const ClassTable& class_table = *isolate_->group()->class_table(); |
| const intptr_t num_classes = class_table.NumCids(); |
| const intptr_t num_tlc_classes = class_table.NumTopLevelCids(); |
| for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) { |
| const classid_t cid = |
| i < num_classes ? i |
| : ClassTable::CidFromTopLevelIndex(i - num_classes); |
| if (class_table.HasValidClassAt(cid)) { |
| cls = class_table.At(cid); |
| // If the class is not finalized, e.g. if it hasn't been parsed |
| // yet entirely, we can ignore it. If it contains a function with |
| // an unresolved breakpoint, we will detect it if and when the |
| // function gets compiled. |
| if (!cls.is_finalized()) { |
| continue; |
| } |
| // Note: we need to check the functions of this class even if |
| // the class is defined in a different 'script'. There could |
| // be mixin functions from the given script in this class. |
| functions = cls.current_functions(); |
| if (!functions.IsNull()) { |
| const intptr_t num_functions = functions.Length(); |
| for (intptr_t pos = 0; pos < num_functions; pos++) { |
| function ^= functions.At(pos); |
| ASSERT(!function.IsNull()); |
| bool function_added = false; |
| if (function.is_debuggable() && function.HasCode() && |
| function.token_pos() == start_pos && |
| function.end_token_pos() == end_pos && |
| function.script() == script.ptr()) { |
| code_function_list->Add(function); |
| function_added = true; |
| } |
| if (function_added && function.HasImplicitClosureFunction()) { |
| function = function.ImplicitClosureFunction(); |
| if (function.is_debuggable() && function.HasCode()) { |
| code_function_list->Add(function); |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
| |
| static void UpdateBestFit(Function* best_fit, const Function& func) { |
| if (best_fit->IsNull()) { |
| *best_fit = func.ptr(); |
| } else if ((best_fit->token_pos().IsSynthetic() || |
| func.token_pos().IsSynthetic() || |
| (best_fit->token_pos() < func.token_pos())) && |
| (func.end_token_pos() <= best_fit->end_token_pos())) { |
| *best_fit = func.ptr(); |
| } |
| } |
| |
| // Returns true if a best fit is found. A best fit can either be a function |
| // or a field. If it is a function, then the best fit function is returned |
| // in |best_fit|. If a best fit is a field, it means that a latent |
| // breakpoint can be set in the range |token_pos| to |last_token_pos|. |
| bool Debugger::FindBestFit(const Script& script, |
| TokenPosition token_pos, |
| TokenPosition last_token_pos, |
| Function* best_fit) { |
| auto thread = Thread::Current(); |
| auto isolate_group = thread->isolate_group(); |
| Zone* zone = thread->zone(); |
| Class& cls = Class::Handle(zone); |
| |
| // A single script can belong to several libraries because of mixins. |
| // Go through all libraries and for each that contains the script, try to find |
| // a fit there. |
| // Return the first fit found, but if a library doesn't contain a fit, |
| // process the next one. |
| const GrowableObjectArray& libs = GrowableObjectArray::Handle( |
| zone, isolate_group->object_store()->libraries()); |
| Library& lib = Library::Handle(zone); |
| for (int i = 0; i < libs.Length(); i++) { |
| lib ^= libs.At(i); |
| ASSERT(!lib.IsNull()); |
| const Array& scripts = Array::Handle(zone, lib.LoadedScripts()); |
| bool lib_has_script = false; |
| for (intptr_t j = 0; j < scripts.Length(); j++) { |
| if (scripts.At(j) == script.ptr()) { |
| lib_has_script = true; |
| break; |
| } |
| } |
| if (!lib_has_script) { |
| continue; |
| } |
| |
| if (!lib.IsDebuggable()) { |
| if (FLAG_verbose_debug) { |
| OS::PrintErr("Library '%s' has been marked as non-debuggable\n", |
| lib.ToCString()); |
| } |
| continue; |
| } |
| |
| const String& script_url = String::Handle(zone, script.url()); |
| ClosureFunctionsCache::ForAllClosureFunctions([&](const Function& fun) { |
| if (FunctionOverlaps(fun, script_url, token_pos, last_token_pos)) { |
| // Select the inner most closure. |
| UpdateBestFit(best_fit, fun); |
| } |
| return true; // Continue iteration |
| }); |
| |
| if (!best_fit->IsNull()) { |
| // The inner most closure found will be the best fit. Going |
| // over class functions below will not help in any further |
| // narrowing. |
| return true; |
| } |
| |
| Array& functions = Array::Handle(zone); |
| Function& function = Function::Handle(zone); |
| Array& fields = Array::Handle(zone); |
| Field& field = Field::Handle(zone); |
| Error& error = Error::Handle(zone); |
| |
| const ClassTable& class_table = *isolate_->group()->class_table(); |
|
|