Version 2.12.0-167.0.dev

Merge commit 'd017b8eee8e89545cfd1f23b88519faecff7e2f0' into 'dev'
diff --git a/.dart_tool/package_config.json b/.dart_tool/package_config.json
index 0ab3d98..d95fe07 100644
--- a/.dart_tool/package_config.json
+++ b/.dart_tool/package_config.json
@@ -11,7 +11,7 @@
     "constraint, update this by running tools/generate_package_config.dart."
   ],
   "configVersion": 2,
-  "generated": "2020-12-03T14:26:48.568312",
+  "generated": "2020-12-16T11:30:30.799202",
   "generator": "tools/generate_package_config.dart",
   "packages": [
     {
@@ -740,7 +740,7 @@
       "name": "watcher",
       "rootUri": "../third_party/pkg/watcher",
       "packageUri": "lib/",
-      "languageVersion": "2.2"
+      "languageVersion": "2.12"
     },
     {
       "name": "web_components",
diff --git a/DEPS b/DEPS
index 012166d..e2f8f86 100644
--- a/DEPS
+++ b/DEPS
@@ -161,7 +161,7 @@
   "typed_data_tag": "f94fc57b8e8c0e4fe4ff6cfd8290b94af52d3719",
   "usage_tag": "16fbfd90c58f16e016a295a880bc722d2547d2c9",
   "vector_math_rev": "0c9f5d68c047813a6dcdeb88ba7a42daddf25025",
-  "watcher_rev": "64e254eba16f56d41f10d72c0b1cb24e130e1f8b",
+  "watcher_rev": "1fb0a84acd8d195103f10aba03ba6bd6fdb424e5",
   "webdriver_rev": "5a8d6805d9cf8a3cbb4fcd64849b538b7491e50e",
   "web_components_rev": "8f57dac273412a7172c8ade6f361b407e2e4ed02",
   "web_socket_channel_rev": "490061ef0e22d3c8460ad2802f9948219365ad6b",
diff --git a/runtime/platform/utils.h b/runtime/platform/utils.h
index 12bb667..eecab24 100644
--- a/runtime/platform/utils.h
+++ b/runtime/platform/utils.h
@@ -361,15 +361,17 @@
     return ((-0x20000000000000LL <= value) && (value <= 0x20000000000000LL));
   }
 
+  static constexpr uword NBitMaskUnsafe(uint32_t n) {
+    static_assert((sizeof(uword) * kBitsPerByte) == kBitsPerWord,
+                  "Unexpected uword size");
+    return n == kBitsPerWord ? std::numeric_limits<uword>::max()
+                             : (static_cast<uword>(1) << n) - 1;
+  }
+
   // The lowest n bits are 1, the others are 0.
   static uword NBitMask(uint32_t n) {
     ASSERT(n <= kBitsPerWord);
-    if (n == kBitsPerWord) {
-      static_assert((sizeof(uword) * kBitsPerByte) == kBitsPerWord,
-                            "Unexpected uword size");
-      return std::numeric_limits<uword>::max();
-    }
-    return (static_cast<uword>(1) << n) - 1;
+    return NBitMaskUnsafe(n);
   }
 
   static word SignedNBitMask(uint32_t n) {
diff --git a/runtime/vm/bitfield.h b/runtime/vm/bitfield.h
index c033379..1eaa94d 100644
--- a/runtime/vm/bitfield.h
+++ b/runtime/vm/bitfield.h
@@ -14,7 +14,10 @@
 
 // BitField is a template for encoding and decoding a value of type T
 // inside a storage of type S.
-template <typename S, typename T, int position, int size>
+template <typename S,
+          typename T,
+          int position,
+          int size = (sizeof(S) * kBitsPerByte) - position>
 class BitField {
  public:
   typedef T Type;
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 992db17..a4ecb71 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -1184,7 +1184,15 @@
       WriteFromTo(script);
       s->Write<int32_t>(script->ptr()->line_offset_);
       s->Write<int32_t>(script->ptr()->col_offset_);
-      s->Write<uint8_t>(script->ptr()->flags_);
+      if (s->kind() != Snapshot::kFullAOT) {
+        // Clear out the max position cache in snapshots to ensure no
+        // differences in the snapshot due to triggering caching vs. not.
+        int32_t written_flags = ScriptLayout::CachedMaxPositionBitField::update(
+            0, script->ptr()->flags_and_max_position_);
+        written_flags =
+            ScriptLayout::HasCachedMaxPositionBit::update(false, written_flags);
+        s->Write<int32_t>(written_flags);
+      }
       s->Write<int32_t>(script->ptr()->kernel_script_index_);
     }
   }
@@ -1217,7 +1225,9 @@
       ReadFromTo(script);
       script->ptr()->line_offset_ = d->Read<int32_t>();
       script->ptr()->col_offset_ = d->Read<int32_t>();
-      script->ptr()->flags_ = d->Read<uint8_t>();
+#if !defined(DART_PRECOMPILED_RUNTIME)
+      script->ptr()->flags_and_max_position_ = d->Read<int32_t>();
+#endif
       script->ptr()->kernel_script_index_ = d->Read<int32_t>();
       script->ptr()->load_timestamp_ = 0;
     }
diff --git a/runtime/vm/code_descriptors.cc b/runtime/vm/code_descriptors.cc
index c52c525..6a75f04 100644
--- a/runtime/vm/code_descriptors.cc
+++ b/runtime/vm/code_descriptors.cc
@@ -6,7 +6,9 @@
 
 #include "platform/utils.h"
 #include "vm/compiler/api/deopt_id.h"
+#include "vm/flags.h"
 #include "vm/log.h"
+#include "vm/object.h"
 #include "vm/object_store.h"
 #include "vm/zone_text_buffer.h"
 
@@ -31,7 +33,7 @@
 void DescriptorList::AddDescriptor(PcDescriptorsLayout::Kind kind,
                                    intptr_t pc_offset,
                                    intptr_t deopt_id,
-                                   TokenPosition token_pos,
+                                   const TokenPosition token_pos,
                                    intptr_t try_index,
                                    intptr_t yield_index) {
   // yield index 0 is reserved for normal entry.
@@ -68,8 +70,7 @@
                 function_.token_pos().ToCString(),
                 function_.end_token_pos().ToCString());
         }
-        intptr_t line;
-        if (!script_.IsNull() && !script_.GetTokenLocation(token_pos, &line)) {
+        if (!script_.IsNull() && !script_.IsValidTokenPosition(token_pos)) {
           FATAL("Token position %s for PC descriptor %s at offset 0x%" Px
                 " invalid for script %s of function %s",
                 token_pos.ToCString(), PcDescriptorsLayout::KindToCString(kind),
@@ -236,6 +237,51 @@
 }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
+uint8_t CodeSourceMapOps::Read(ReadStream* stream,
+                               int32_t* arg1,
+                               int32_t* arg2) {
+  ASSERT(stream != nullptr && arg1 != nullptr);
+  const int32_t n = stream->Read<int32_t>();
+  const uint8_t op = OpField::decode(n);
+  *arg1 = ArgField::decode(n);
+  if (*arg1 > kMaxArgValue) {
+    *arg1 |= kSignBits;
+  }
+#if defined(DART_PRECOMPILER)
+  // The special handling for non-symbolic stack trace mode only needs to
+  // happen in the precompiler, because those CSMs are not serialized in
+  // precompiled snapshots.
+  if (op == kChangePosition && FLAG_dwarf_stack_traces_mode) {
+    const int32_t m = stream->Read<int32_t>();
+    if (arg2 != nullptr) {
+      *arg2 = m;
+    }
+  }
+#endif
+  return op;
+}
+
+void CodeSourceMapOps::Write(BaseWriteStream* stream,
+                             uint8_t op,
+                             int32_t arg1,
+                             int32_t arg2) {
+  ASSERT(stream != nullptr);
+  ASSERT(arg1 >= kMinArgValue && arg1 <= kMaxArgValue);
+  if (arg1 < 0) {
+    arg1 &= ~kSignBits;
+  }
+  const int32_t n = OpField::encode(op) | ArgField::encode(arg1);
+  stream->Write(n);
+#if defined(DART_PRECOMPILER)
+  if (op == kChangePosition && FLAG_dwarf_stack_traces_mode) {
+    // For non-symbolic stack traces, the CodeSourceMaps are not serialized,
+    // so we need not worry about increasing snapshot size by including more
+    // information here.
+    stream->Write(arg2);
+  }
+#endif
+}
+
 const TokenPosition& CodeSourceMapBuilder::kInitialPosition =
     TokenPosition::kDartCodePrologue;
 
@@ -245,7 +291,8 @@
     const GrowableArray<intptr_t>& caller_inline_id,
     const GrowableArray<TokenPosition>& inline_id_to_token_pos,
     const GrowableArray<const Function*>& inline_id_to_function)
-    : buffered_pc_offset_(0),
+    : zone_(zone),
+      buffered_pc_offset_(0),
       buffered_inline_id_stack_(),
       buffered_token_pos_stack_(),
       written_pc_offset_(0),
@@ -256,7 +303,7 @@
       inline_id_to_function_(inline_id_to_function),
       inlined_functions_(
           GrowableObjectArray::Handle(GrowableObjectArray::New(Heap::kOld))),
-      script_(Script::Handle(zone)),
+      script_(Script::Handle(zone, Script::null())),
       stream_(zone, 64),
       stack_traces_only_(stack_traces_only) {
   buffered_inline_id_stack_.Add(0);
@@ -266,64 +313,70 @@
 }
 
 void CodeSourceMapBuilder::FlushBuffer() {
-  FlushBufferStack();
-  FlushBufferPosition();
-  FlushBufferPC();
-}
-
-void CodeSourceMapBuilder::FlushBufferStack() {
-  for (intptr_t i = buffered_inline_id_stack_.length() - 1; i >= 0; i--) {
-    intptr_t buffered_id = buffered_inline_id_stack_[i];
-    if (i < written_inline_id_stack_.length()) {
-      intptr_t written_id = written_inline_id_stack_[i];
+  // 1. Flush the inlining stack.
+  //
+  // The top-most position where the buffered and written stack match.
+  intptr_t common_index;
+  for (common_index = buffered_inline_id_stack_.length() - 1; common_index >= 0;
+       common_index--) {
+    intptr_t buffered_id = buffered_inline_id_stack_[common_index];
+    if (common_index < written_inline_id_stack_.length()) {
+      intptr_t written_id = written_inline_id_stack_[common_index];
       if (buffered_id == written_id) {
-        // i is the top-most position where the buffered and written stack
-        // match.
-        while (written_inline_id_stack_.length() > i + 1) {
-          WritePop();
-        }
-        for (intptr_t j = i + 1; j < buffered_inline_id_stack_.length(); j++) {
-          TokenPosition buffered_pos = buffered_token_pos_stack_[j - 1];
-          TokenPosition written_pos = written_token_pos_stack_[j - 1];
-          if (buffered_pos != written_pos) {
-            WriteChangePosition(buffered_pos);
-          }
-          WritePush(buffered_inline_id_stack_[j]);
-        }
-        return;
+        break;
       }
     }
   }
-  UNREACHABLE();
-}
+  if (common_index < 0) {
+    // The base, which is the root function, should _always_ match.
+    UNREACHABLE();
+  }
+  while (written_inline_id_stack_.length() > common_index + 1) {
+    WritePop();
+  }
+  for (intptr_t j = common_index + 1; j < buffered_inline_id_stack_.length();
+       j++) {
+    const auto& buffered_pos = buffered_token_pos_stack_[j - 1];
+    const auto& written_pos = written_token_pos_stack_[j - 1];
+    if (buffered_pos != written_pos) {
+      WriteChangePosition(buffered_pos);
+    }
+    WritePush(buffered_inline_id_stack_[j]);
+  }
 
-void CodeSourceMapBuilder::FlushBufferPosition() {
-  ASSERT(buffered_token_pos_stack_.length() ==
-         written_token_pos_stack_.length());
+  ASSERT_EQUAL(buffered_token_pos_stack_.length(),
+               written_token_pos_stack_.length());
 
+  // 2. Flush the current token position.
   intptr_t top = buffered_token_pos_stack_.length() - 1;
-  TokenPosition buffered_pos = buffered_token_pos_stack_[top];
-  TokenPosition written_pos = written_token_pos_stack_[top];
+  const auto& buffered_pos = buffered_token_pos_stack_[top];
+  const auto& written_pos = written_token_pos_stack_[top];
   if (buffered_pos != written_pos) {
     WriteChangePosition(buffered_pos);
   }
-}
 
-void CodeSourceMapBuilder::FlushBufferPC() {
+  // 3. Flush the current PC offset.
   if (buffered_pc_offset_ != written_pc_offset_) {
     WriteAdvancePC(buffered_pc_offset_ - written_pc_offset_);
   }
 }
 
-void CodeSourceMapBuilder::StartInliningInterval(int32_t pc_offset,
-                                                 intptr_t inline_id) {
-  if (buffered_inline_id_stack_.Last() == inline_id) {
+void CodeSourceMapBuilder::StartInliningInterval(
+    int32_t pc_offset,
+    const InstructionSource& source) {
+  if (!source.token_pos.IsReal() && !source.token_pos.IsSynthetic()) {
+    // Only record inlining intervals for token positions that might need
+    // to be checked against the appropriate function and/or script.
+    return;
+  }
+
+  if (buffered_inline_id_stack_.Last() == source.inlining_id) {
     // No change in function stack.
     return;
   }
-  if (inline_id == -1) {
-    // We're missing an inlining ID for some reason: for now, just assume it
-    // should have the current inlining ID.
+
+  if (source.inlining_id < 0) {
+    // Inlining ID is unset for this source, so assume the current inlining ID.
     return;
   }
 
@@ -335,7 +388,7 @@
   // stack.
 
   // Pop to a common ancestor.
-  intptr_t common_parent = inline_id;
+  intptr_t common_parent = source.inlining_id;
   while (!IsOnBufferedStack(common_parent)) {
     common_parent = caller_inline_id_[common_parent];
   }
@@ -345,69 +398,64 @@
 
   // Push to the new top-of-stack function.
   GrowableArray<intptr_t> to_push;
-  intptr_t id = inline_id;
-  while (id != common_parent) {
+  for (intptr_t id = source.inlining_id; id != common_parent;
+       id = caller_inline_id_[id]) {
     to_push.Add(id);
-    id = caller_inline_id_[id];
   }
   for (intptr_t i = to_push.length() - 1; i >= 0; i--) {
     intptr_t callee_id = to_push[i];
-    TokenPosition call_token = TokenPosition::kNoSource;
-    if (callee_id != 0) {
-      // TODO(rmacnak): Should make this array line up with the others.
-      call_token = inline_id_to_token_pos_[callee_id - 1];
-    } else {
-      UNREACHABLE();
-    }
-
-    // Report caller as at the position of the call.
-    BufferChangePosition(call_token);
-
+    // We should never push the root function or its "caller".
+    ASSERT(callee_id > 0);
+    BufferChangePosition(inline_id_to_token_pos_[callee_id - 1]);
     BufferPush(callee_id);
   }
+  if (FLAG_check_token_positions) {
+    // Only update the cached script_ on inlining interval changes, since it's
+    // a non-trivial computation.
+    script_ = inline_id_to_function_[source.inlining_id]->script();
+  }
 }
 
-void CodeSourceMapBuilder::BeginCodeSourceRange(int32_t pc_offset,
-                                                intptr_t inline_id,
-                                                const TokenPosition& pos) {
-  if (pos.IsReal() || pos.IsSynthetic()) {
-    // Only record inlining intervals for token positions that might need
-    // to be checked against the appropriate function and/or script.
-    StartInliningInterval(pc_offset, inline_id);
-  }
+void CodeSourceMapBuilder::BeginCodeSourceRange(
+    int32_t pc_offset,
+    const InstructionSource& source) {
+  StartInliningInterval(pc_offset, source);
 }
 
 void CodeSourceMapBuilder::EndCodeSourceRange(int32_t pc_offset,
-                                              const TokenPosition& pos) {
+                                              const InstructionSource& source) {
   if (pc_offset == buffered_pc_offset_) {
     return;  // Empty intermediate instruction.
   }
-  if (pos != buffered_token_pos_stack_.Last()) {
+  StartInliningInterval(pc_offset, source);
+  if (source.token_pos != buffered_token_pos_stack_.Last()) {
     if (!stack_traces_only_) {
       FlushBuffer();
     }
-    BufferChangePosition(pos);
+    BufferChangePosition(source.token_pos);
   }
   BufferAdvancePC(pc_offset - buffered_pc_offset_);
 }
 
 void CodeSourceMapBuilder::NoteDescriptor(PcDescriptorsLayout::Kind kind,
                                           int32_t pc_offset,
-                                          TokenPosition pos) {
+                                          const InstructionSource& source) {
   const uint8_t kCanThrow =
       PcDescriptorsLayout::kIcCall | PcDescriptorsLayout::kUnoptStaticCall |
       PcDescriptorsLayout::kRuntimeCall | PcDescriptorsLayout::kOther;
   if ((kind & kCanThrow) != 0) {
-    BufferChangePosition(pos);
+    StartInliningInterval(pc_offset, source);
+    BufferChangePosition(source.token_pos);
     BufferAdvancePC(pc_offset - buffered_pc_offset_);
     FlushBuffer();
   }
 }
 
 void CodeSourceMapBuilder::NoteNullCheck(int32_t pc_offset,
-                                         TokenPosition pos,
+                                         const InstructionSource& source,
                                          intptr_t name_index) {
-  BufferChangePosition(pos);
+  StartInliningInterval(pc_offset, source);
+  BufferChangePosition(source.token_pos);
   BufferAdvancePC(pc_offset - buffered_pc_offset_);
   FlushBuffer();
   WriteNullCheck(name_index);
@@ -425,6 +473,17 @@
   return inlined_functions_.Length() - 1;
 }
 
+TokenPosition CodeSourceMapBuilder::RootPosition(
+    const InstructionSource& source) {
+  if (source.inlining_id <= 0) return source.token_pos;
+
+  intptr_t id = source.inlining_id;
+  while (caller_inline_id_[id] != 0) {
+    id = caller_inline_id_[id];
+  }
+  return inline_id_to_token_pos_[id - 1];
+}
+
 ArrayPtr CodeSourceMapBuilder::InliningIdToFunction() {
   if (inlined_functions_.Length() == 0) {
     return Object::empty_array().raw();
@@ -437,7 +496,7 @@
     FlushBuffer();
   }
   intptr_t length = stream_.bytes_written();
-  const CodeSourceMap& map = CodeSourceMap::Handle(CodeSourceMap::New(length));
+  const auto& map = CodeSourceMap::Handle(zone_, CodeSourceMap::New(length));
   NoSafepointScope no_safepoint;
   memmove(map.Data(), stream_.buffer(), length);
   return map.raw();
@@ -447,7 +506,8 @@
   if (FLAG_check_token_positions && pos.IsReal()) {
     const intptr_t inline_id = buffered_inline_id_stack_.Last();
     const auto& function = *inline_id_to_function_[inline_id];
-    if (!pos.IsWithin(function.token_pos(), function.end_token_pos())) {
+    if (function.end_token_pos().IsReal() &&
+        !pos.IsWithin(function.token_pos(), function.end_token_pos())) {
       TextBuffer buffer(256);
       buffer.Printf("Token position %s is invalid for function %s (%s, %s)",
                     pos.ToCString(), function.ToFullyQualifiedCString(),
@@ -460,14 +520,13 @@
       FATAL("%s", buffer.buffer());
     }
     script_ = function.script();
-    intptr_t line;
-    if (!script_.IsNull() && !script_.GetTokenLocation(pos, &line)) {
+    if (!script_.IsNull() && !script_.IsValidTokenPosition(pos)) {
       TextBuffer buffer(256);
       buffer.Printf("Token position %s is invalid for script %s of function %s",
                     pos.ToCString(), script_.ToCString(),
                     function.ToFullyQualifiedCString());
-      if (inline_id > 0) {
-        buffer.Printf(" while compiling function %s",
+      if (inline_id != 0) {
+        buffer.Printf(" inlined into function %s",
                       inline_id_to_function_[0]->ToFullyQualifiedCString());
       }
       FATAL("%s", buffer.buffer());
@@ -476,11 +535,12 @@
   buffered_token_pos_stack_.Last() = pos;
 }
 
-void CodeSourceMapBuilder::WriteChangePosition(TokenPosition pos) {
-  stream_.Write<uint8_t>(kChangePosition);
-  intptr_t position_or_line = pos.Serialize();
-#if defined(DART_PRECOMPILER)
+void CodeSourceMapBuilder::WriteChangePosition(const TokenPosition pos) {
+  const TokenPosition& last_written = written_token_pos_stack_.Last();
+  intptr_t position_or_line =
+      Utils::SubWithWrapAround(pos.Serialize(), last_written.Serialize());
   intptr_t column = TokenPosition::kNoSource.Serialize();
+#if defined(DART_PRECOMPILER)
   if (FLAG_precompiled_mode) {
     // Don't use the raw position value directly in precompiled mode. Instead,
     // use the value of kNoSource as a fallback when no line or column
@@ -490,17 +550,14 @@
     ASSERT(inline_id < inline_id_to_function_.length());
     script_ = inline_id_to_function_[inline_id]->script();
     script_.GetTokenLocation(pos, &position_or_line, &column);
+    intptr_t old_line = TokenPosition::kNoSource.Serialize();
+    script_.GetTokenLocation(last_written, &old_line);
+    position_or_line =
+        Utils::SubWithWrapAround<int32_t>(position_or_line, old_line);
   }
 #endif
-  stream_.Write<int32_t>(position_or_line);
-#if defined(DART_PRECOMPILER)
-  // For non-symbolic stack traces, the CodeSourceMaps are not serialized,
-  // so we need not worry about increasing snapshot size by including more
-  // information here.
-  if (FLAG_dwarf_stack_traces_mode) {
-    stream_.Write<int32_t>(column);
-  }
-#endif
+  CodeSourceMapOps::Write(&stream_, CodeSourceMapOps::kChangePosition,
+                          position_or_line, column);
   written_token_pos_stack_.Last() = pos;
 }
 
@@ -519,29 +576,31 @@
   token_positions->Add(InitialPosition());
 
   while (stream.PendingBytes() > 0) {
-    uint8_t opcode = stream.Read<uint8_t>();
+    int32_t arg;
+    const uint8_t opcode = CodeSourceMapOps::Read(&stream, &arg);
     switch (opcode) {
-      case CodeSourceMapBuilder::kChangePosition: {
+      case CodeSourceMapOps::kChangePosition: {
+        const TokenPosition& old_token =
+            (*token_positions)[token_positions->length() - 1];
         (*token_positions)[token_positions->length() - 1] =
-            ReadPosition(&stream);
+            TokenPosition::Deserialize(
+                Utils::AddWithWrapAround(arg, old_token.Serialize()));
         break;
       }
-      case CodeSourceMapBuilder::kAdvancePC: {
-        int32_t delta = stream.Read<int32_t>();
-        current_pc_offset += delta;
+      case CodeSourceMapOps::kAdvancePC: {
+        current_pc_offset += arg;
         if (current_pc_offset > pc_offset) {
           return;
         }
         break;
       }
-      case CodeSourceMapBuilder::kPushFunction: {
-        int32_t func = stream.Read<int32_t>();
+      case CodeSourceMapOps::kPushFunction: {
         function_stack->Add(
-            &Function::Handle(Function::RawCast(functions_.At(func))));
+            &Function::Handle(Function::RawCast(functions_.At(arg))));
         token_positions->Add(InitialPosition());
         break;
       }
-      case CodeSourceMapBuilder::kPopFunction: {
+      case CodeSourceMapOps::kPopFunction: {
         // We never pop the root function.
         ASSERT(function_stack->length() > 1);
         ASSERT(token_positions->length() > 1);
@@ -549,8 +608,7 @@
         token_positions->RemoveLast();
         break;
       }
-      case CodeSourceMapBuilder::kNullCheck: {
-        stream.Read<int32_t>();
+      case CodeSourceMapOps::kNullCheck: {
         break;
       }
       default:
@@ -580,38 +638,35 @@
   function_stack.Add(0);
 
   while (stream.PendingBytes() > 0) {
-    uint8_t opcode = stream.Read<uint8_t>();
+    int32_t arg;
+    const uint8_t opcode = CodeSourceMapOps::Read(&stream, &arg);
     switch (opcode) {
-      case CodeSourceMapBuilder::kChangePosition: {
-        ReadPosition(&stream);
+      case CodeSourceMapOps::kChangePosition: {
         break;
       }
-      case CodeSourceMapBuilder::kAdvancePC: {
-        int32_t delta = stream.Read<int32_t>();
+      case CodeSourceMapOps::kAdvancePC: {
         // Format: [start, end, inline functions...]
         JSONArray inline_interval(&inline_intervals);
         inline_interval.AddValue(static_cast<intptr_t>(current_pc_offset));
         inline_interval.AddValue(
-            static_cast<intptr_t>(current_pc_offset + delta - 1));
+            static_cast<intptr_t>(current_pc_offset + arg - 1));
         for (intptr_t i = 0; i < function_stack.length(); i++) {
           inline_interval.AddValue(function_stack[i]);
         }
-        current_pc_offset += delta;
+        current_pc_offset += arg;
         break;
       }
-      case CodeSourceMapBuilder::kPushFunction: {
-        int32_t func = stream.Read<int32_t>();
-        function_stack.Add(func);
+      case CodeSourceMapOps::kPushFunction: {
+        function_stack.Add(arg);
         break;
       }
-      case CodeSourceMapBuilder::kPopFunction: {
+      case CodeSourceMapOps::kPopFunction: {
         // We never pop the root function.
         ASSERT(function_stack.length() > 1);
         function_stack.RemoveLast();
         break;
       }
-      case CodeSourceMapBuilder::kNullCheck: {
-        stream.Read<int32_t>();
+      case CodeSourceMapOps::kNullCheck: {
         break;
       }
       default:
@@ -633,37 +688,34 @@
   THR_Print("Inline intervals for function '%s' {\n",
             root_.ToFullyQualifiedCString());
   while (stream.PendingBytes() > 0) {
-    uint8_t opcode = stream.Read<uint8_t>();
+    int32_t arg;
+    const uint8_t opcode = CodeSourceMapOps::Read(&stream, &arg);
     switch (opcode) {
-      case CodeSourceMapBuilder::kChangePosition: {
-        ReadPosition(&stream);
+      case CodeSourceMapOps::kChangePosition: {
         break;
       }
-      case CodeSourceMapBuilder::kAdvancePC: {
-        int32_t delta = stream.Read<int32_t>();
+      case CodeSourceMapOps::kAdvancePC: {
         THR_Print("%" Px "-%" Px ": ", start + current_pc_offset,
-                  start + current_pc_offset + delta - 1);
+                  start + current_pc_offset + arg - 1);
         for (intptr_t i = 0; i < function_stack.length(); i++) {
           THR_Print("%s ", function_stack[i]->ToCString());
         }
         THR_Print("\n");
-        current_pc_offset += delta;
+        current_pc_offset += arg;
         break;
       }
-      case CodeSourceMapBuilder::kPushFunction: {
-        int32_t func = stream.Read<int32_t>();
+      case CodeSourceMapOps::kPushFunction: {
         function_stack.Add(
-            &Function::Handle(Function::RawCast(functions_.At(func))));
+            &Function::Handle(Function::RawCast(functions_.At(arg))));
         break;
       }
-      case CodeSourceMapBuilder::kPopFunction: {
+      case CodeSourceMapOps::kPopFunction: {
         // We never pop the root function.
         ASSERT(function_stack.length() > 1);
         function_stack.RemoveLast();
         break;
       }
-      case CodeSourceMapBuilder::kNullCheck: {
-        stream.Read<int32_t>();
+      case CodeSourceMapOps::kNullCheck: {
         break;
       }
       default:
@@ -687,32 +739,35 @@
   THR_Print("Source positions for function '%s' {\n",
             root_.ToFullyQualifiedCString());
   while (stream.PendingBytes() > 0) {
-    uint8_t opcode = stream.Read<uint8_t>();
+    int32_t arg;
+    const uint8_t opcode = CodeSourceMapOps::Read(&stream, &arg);
     switch (opcode) {
-      case CodeSourceMapBuilder::kChangePosition: {
-        token_positions[token_positions.length() - 1] = ReadPosition(&stream);
+      case CodeSourceMapOps::kChangePosition: {
+        const TokenPosition& old_token =
+            token_positions[token_positions.length() - 1];
+        token_positions[token_positions.length() - 1] =
+            TokenPosition::Deserialize(
+                Utils::AddWithWrapAround(arg, old_token.Serialize()));
         break;
       }
-      case CodeSourceMapBuilder::kAdvancePC: {
-        int32_t delta = stream.Read<int32_t>();
+      case CodeSourceMapOps::kAdvancePC: {
         THR_Print("%" Px "-%" Px ": ", start + current_pc_offset,
-                  start + current_pc_offset + delta - 1);
+                  start + current_pc_offset + arg - 1);
         for (intptr_t i = 0; i < function_stack.length(); i++) {
           THR_Print("%s@%s", function_stack[i]->ToCString(),
                     token_positions[i].ToCString());
         }
         THR_Print("\n");
-        current_pc_offset += delta;
+        current_pc_offset += arg;
         break;
       }
-      case CodeSourceMapBuilder::kPushFunction: {
-        int32_t func = stream.Read<int32_t>();
+      case CodeSourceMapOps::kPushFunction: {
         function_stack.Add(
-            &Function::Handle(Function::RawCast(functions_.At(func))));
+            &Function::Handle(Function::RawCast(functions_.At(arg))));
         token_positions.Add(InitialPosition());
         break;
       }
-      case CodeSourceMapBuilder::kPopFunction: {
+      case CodeSourceMapOps::kPopFunction: {
         // We never pop the root function.
         ASSERT(function_stack.length() > 1);
         ASSERT(token_positions.length() > 1);
@@ -720,11 +775,9 @@
         token_positions.RemoveLast();
         break;
       }
-      case CodeSourceMapBuilder::kNullCheck: {
-        const intptr_t name_index = stream.Read<int32_t>();
-        THR_Print("%" Px "-%" Px ": null check PP#%" Pd "\n",
-                  start + current_pc_offset, start + current_pc_offset,
-                  name_index);
+      case CodeSourceMapOps::kNullCheck: {
+        THR_Print("%" Px "-%" Px ": null check PP#%" Pd32 "\n",
+                  start + current_pc_offset, start + current_pc_offset, arg);
         break;
       }
       default:
@@ -741,29 +794,26 @@
   int32_t current_pc_offset = 0;
 
   while (stream.PendingBytes() > 0) {
-    uint8_t opcode = stream.Read<uint8_t>();
+    int32_t arg;
+    const uint8_t opcode = CodeSourceMapOps::Read(&stream, &arg);
     switch (opcode) {
-      case CodeSourceMapBuilder::kChangePosition: {
-        ReadPosition(&stream);
+      case CodeSourceMapOps::kChangePosition: {
         break;
       }
-      case CodeSourceMapBuilder::kAdvancePC: {
-        int32_t delta = stream.Read<int32_t>();
-        current_pc_offset += delta;
+      case CodeSourceMapOps::kAdvancePC: {
+        current_pc_offset += arg;
         RELEASE_ASSERT(current_pc_offset <= pc_offset);
         break;
       }
-      case CodeSourceMapBuilder::kPushFunction: {
-        stream.Read<int32_t>();
+      case CodeSourceMapOps::kPushFunction: {
         break;
       }
-      case CodeSourceMapBuilder::kPopFunction: {
+      case CodeSourceMapOps::kPopFunction: {
         break;
       }
-      case CodeSourceMapBuilder::kNullCheck: {
-        const int32_t name_index = stream.Read<int32_t>();
+      case CodeSourceMapOps::kNullCheck: {
         if (current_pc_offset == pc_offset) {
-          return name_index;
+          return arg;
         }
         break;
       }
@@ -776,18 +826,4 @@
   return -1;
 }
 
-TokenPosition CodeSourceMapReader::ReadPosition(ReadStream* stream) {
-  const TokenPosition line =
-      TokenPosition::Deserialize(stream->Read<int32_t>());
-#if defined(DART_PRECOMPILER)
-  // The special handling for non-symbolic stack trace mode only needs to
-  // happen in the precompiler, because those CSMs are not serialized in
-  // precompiled snapshots.
-  if (FLAG_dwarf_stack_traces_mode) {
-    stream->Read<int32_t>();  // Discard the column information.
-  }
-#endif
-  return line;
-}
-
 }  // namespace dart
diff --git a/runtime/vm/code_descriptors.h b/runtime/vm/code_descriptors.h
index 014b900..5356c50 100644
--- a/runtime/vm/code_descriptors.h
+++ b/runtime/vm/code_descriptors.h
@@ -9,8 +9,8 @@
 #include "vm/globals.h"
 #include "vm/growable_array.h"
 #include "vm/log.h"
-#include "vm/object.h"
 #include "vm/runtime_entry.h"
+#include "vm/token_position.h"
 
 namespace dart {
 
@@ -164,6 +164,56 @@
 };
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
+// Instructions have two pieces of information needed to get accurate source
+// locations: the token position and the inlining id. The inlining id tells us
+// which function, and thus which script, to use for this instruction and the
+// token position, when real, tells us the position in the source for the
+// script for the instruction.
+//
+// Thus, we bundle the two pieces of information in InstructionSource structs
+// when copying or retrieving to lower the likelihood that the token position
+// is used without the appropriate inlining id.
+struct InstructionSource {
+  // Treat an instruction source without inlining id information as unset.
+  InstructionSource() : InstructionSource(TokenPosition::kNoSource) {}
+  explicit InstructionSource(TokenPosition pos) : InstructionSource(pos, -1) {}
+  InstructionSource(TokenPosition pos, intptr_t id)
+      : token_pos(pos), inlining_id(id) {}
+
+  const TokenPosition token_pos;
+  const intptr_t inlining_id;
+
+  DISALLOW_ALLOCATION();
+};
+
+struct CodeSourceMapOps : AllStatic {
+  static const uint8_t kChangePosition = 0;
+  static const uint8_t kAdvancePC = 1;
+  static const uint8_t kPushFunction = 2;
+  static const uint8_t kPopFunction = 3;
+  static const uint8_t kNullCheck = 4;
+
+  static uint8_t Read(ReadStream* stream,
+                      int32_t* arg1,
+                      int32_t* arg2 = nullptr);
+
+  static void Write(BaseWriteStream* stream,
+                    uint8_t op,
+                    int32_t arg1 = 0,
+                    int32_t arg2 = 0);
+
+ private:
+  static constexpr intptr_t kOpBits = 3;
+
+  using OpField = BitField<int32_t, uint8_t, 0, kOpBits>;
+  using ArgField = BitField<int32_t, int32_t, OpField::kNextBit>;
+
+  static constexpr int32_t kMaxArgValue =
+      Utils::NBitMaskUnsafe(ArgField::bitsize() - 1);
+  static constexpr int32_t kMinArgValue = ~kMaxArgValue;
+  static constexpr int32_t kSignBits = static_cast<uint32_t>(kMinArgValue) << 1;
+};
+
 // A CodeSourceMap maps from pc offsets to a stack of inlined functions and
 // their positions. This is encoded as a little bytecode that pushes and pops
 // functions and changes the top function's position as the PC advances.
@@ -188,21 +238,18 @@
   // since it is the most common.
   static const TokenPosition& kInitialPosition;
 
-  static const uint8_t kChangePosition = 0;
-  static const uint8_t kAdvancePC = 1;
-  static const uint8_t kPushFunction = 2;
-  static const uint8_t kPopFunction = 3;
-  static const uint8_t kNullCheck = 4;
-
-  void BeginCodeSourceRange(int32_t pc_offset,
-                            intptr_t inline_id,
-                            const TokenPosition& token_pos);
-  void EndCodeSourceRange(int32_t pc_offset, const TokenPosition& token_pos);
+  void BeginCodeSourceRange(int32_t pc_offset, const InstructionSource& source);
+  void EndCodeSourceRange(int32_t pc_offset, const InstructionSource& source);
   void NoteDescriptor(PcDescriptorsLayout::Kind kind,
                       int32_t pc_offset,
-                      TokenPosition pos);
-  void NoteNullCheck(int32_t pc_offset, TokenPosition pos, intptr_t name_index);
+                      const InstructionSource& source);
+  void NoteNullCheck(int32_t pc_offset,
+                     const InstructionSource& source,
+                     intptr_t name_index);
 
+  // If source is from an inlined call, returns the token position of the
+  // original call in the root function, otherwise the source's token position.
+  TokenPosition RootPosition(const InstructionSource& source);
   ArrayPtr InliningIdToFunction();
   CodeSourceMapPtr Finalize();
 
@@ -212,14 +259,14 @@
 
  private:
   intptr_t GetFunctionId(intptr_t inline_id);
-  void StartInliningInterval(int32_t pc_offset, intptr_t inline_id);
+  void StartInliningInterval(int32_t pc_offset,
+                             const InstructionSource& source);
 
   void BufferChangePosition(TokenPosition pos);
   void WriteChangePosition(TokenPosition pos);
   void BufferAdvancePC(int32_t distance) { buffered_pc_offset_ += distance; }
   void WriteAdvancePC(int32_t distance) {
-    stream_.Write<uint8_t>(kAdvancePC);
-    stream_.Write<int32_t>(distance);
+    CodeSourceMapOps::Write(&stream_, CodeSourceMapOps::kAdvancePC, distance);
     written_pc_offset_ += distance;
   }
   void BufferPush(intptr_t inline_id) {
@@ -227,8 +274,8 @@
     buffered_token_pos_stack_.Add(kInitialPosition);
   }
   void WritePush(intptr_t inline_id) {
-    stream_.Write<uint8_t>(kPushFunction);
-    stream_.Write<int32_t>(GetFunctionId(inline_id));
+    CodeSourceMapOps::Write(&stream_, CodeSourceMapOps::kPushFunction,
+                            GetFunctionId(inline_id));
     written_inline_id_stack_.Add(inline_id);
     written_token_pos_stack_.Add(kInitialPosition);
   }
@@ -237,19 +284,15 @@
     buffered_token_pos_stack_.RemoveLast();
   }
   void WritePop() {
-    stream_.Write<uint8_t>(kPopFunction);
+    CodeSourceMapOps::Write(&stream_, CodeSourceMapOps::kPopFunction);
     written_inline_id_stack_.RemoveLast();
     written_token_pos_stack_.RemoveLast();
   }
   void WriteNullCheck(int32_t name_index) {
-    stream_.Write<uint8_t>(kNullCheck);
-    stream_.Write<int32_t>(name_index);
+    CodeSourceMapOps::Write(&stream_, CodeSourceMapOps::kNullCheck, name_index);
   }
 
   void FlushBuffer();
-  void FlushBufferStack();
-  void FlushBufferPosition();
-  void FlushBufferPC();
 
   bool IsOnBufferedStack(intptr_t inline_id) {
     for (intptr_t i = 0; i < buffered_inline_id_stack_.length(); i++) {
@@ -258,6 +301,7 @@
     return false;
   }
 
+  Zone* const zone_;
   intptr_t buffered_pc_offset_;
   GrowableArray<intptr_t> buffered_inline_id_stack_;
   GrowableArray<TokenPosition> buffered_token_pos_stack_;
diff --git a/runtime/vm/compiler/aot/aot_call_specializer.cc b/runtime/vm/compiler/aot/aot_call_specializer.cc
index 6493a81..ddfc55b 100644
--- a/runtime/vm/compiler/aot/aot_call_specializer.cc
+++ b/runtime/vm/compiler/aot/aot_call_specializer.cc
@@ -203,7 +203,7 @@
     args->Add(right->ArgumentValueAt(0)->CopyWithType(Z));
     const intptr_t kTypeArgsLen = 0;
     StaticCallInstr* static_call = new (Z) StaticCallInstr(
-        call->token_pos(), have_same_runtime_type, kTypeArgsLen,
+        call->source(), have_same_runtime_type, kTypeArgsLen,
         Object::null_array(),  // argument_names
         args, call->deopt_id(), call->CallCount(), ICData::kOptimized);
     static_call->SetResultType(Z, CompileType::FromCid(kBoolCid));
@@ -311,7 +311,7 @@
     Definition* conversion = NULL;
 
     if (input->Type()->ToNullableCid() == kSmiCid) {
-      conversion = new (Z) SmiToDoubleInstr(input, call->token_pos());
+      conversion = new (Z) SmiToDoubleInstr(input, call->source());
     } else if (FlowGraphCompiler::SupportsUnboxedInt64() &&
                FlowGraphCompiler::CanConvertInt64ToDouble()) {
       conversion = new (Z) Int64ToDoubleInstr(input, DeoptId::kNone,
@@ -512,7 +512,7 @@
         // We prefer equality compare, since it doesn't require boxing.
         if (!can_use_equality_compare && can_use_strict_compare) {
           replacement = new (Z) StrictCompareInstr(
-              instr->token_pos(),
+              instr->source(),
               (op_kind == Token::kEQ) ? Token::kEQ_STRICT : Token::kNE_STRICT,
               left_value->CopyWithType(Z), right_value->CopyWithType(Z),
               /*needs_number_check=*/false, DeoptId::kNone);
@@ -522,7 +522,7 @@
         if (supports_unboxed_int) {
           if (can_use_equality_compare) {
             replacement = new (Z) EqualityCompareInstr(
-                instr->token_pos(), op_kind, left_value->CopyWithType(Z),
+                instr->source(), op_kind, left_value->CopyWithType(Z),
                 right_value->CopyWithType(Z), kMintCid, DeoptId::kNone,
                 Instruction::kNotSpeculative);
             break;
@@ -530,7 +530,7 @@
             left_value = PrepareStaticOpInput(left_value, kMintCid, instr);
             right_value = PrepareStaticOpInput(right_value, kMintCid, instr);
             replacement = new (Z) RelationalOpInstr(
-                instr->token_pos(), op_kind, left_value, right_value, kMintCid,
+                instr->source(), op_kind, left_value, right_value, kMintCid,
                 DeoptId::kNone, Instruction::kNotSpeculative);
             break;
           } else {
@@ -672,7 +672,7 @@
           left_value = PrepareStaticOpInput(left_value, kDoubleCid, instr);
           right_value = PrepareStaticOpInput(right_value, kDoubleCid, instr);
           replacement = new (Z) EqualityCompareInstr(
-              instr->token_pos(), op_kind, left_value, right_value, kDoubleCid,
+              instr->source(), op_kind, left_value, right_value, kDoubleCid,
               DeoptId::kNone, Instruction::kNotSpeculative);
           break;
         }
@@ -688,7 +688,7 @@
         left_value = PrepareStaticOpInput(left_value, kDoubleCid, instr);
         right_value = PrepareStaticOpInput(right_value, kDoubleCid, instr);
         replacement = new (Z) RelationalOpInstr(
-            instr->token_pos(), op_kind, left_value, right_value, kDoubleCid,
+            instr->source(), op_kind, left_value, right_value, kDoubleCid,
             DeoptId::kNone, Instruction::kNotSpeculative);
         break;
       }
@@ -706,8 +706,8 @@
         left_value = PrepareStaticOpInput(left_value, kDoubleCid, instr);
         right_value = PrepareStaticOpInput(right_value, kDoubleCid, instr);
         replacement = new (Z) BinaryDoubleOpInstr(
-            op_kind, left_value, right_value, DeoptId::kNone,
-            instr->token_pos(), Instruction::kNotSpeculative);
+            op_kind, left_value, right_value, DeoptId::kNone, instr->source(),
+            Instruction::kNotSpeculative);
         break;
       }
 
@@ -903,7 +903,7 @@
       }
       if (is_object_eq) {
         auto* replacement = new (Z) StrictCompareInstr(
-            instr->token_pos(),
+            instr->source(),
             (instr->token_kind() == Token::kEQ) ? Token::kEQ_STRICT
                                                 : Token::kNE_STRICT,
             instr->ArgumentValueAt(0)->CopyWithType(Z),
@@ -1098,12 +1098,12 @@
 
   InputsArray* get_arguments = new (Z) InputsArray(Z, 1);
   get_arguments->Add(call->ArgumentValueAt(receiver_idx)->CopyWithType(Z));
-  InstanceCallInstr* invoke_get = new (Z) InstanceCallInstr(
-      call->token_pos(), getter_name, Token::kGET, get_arguments,
-      /*type_args_len=*/0,
-      /*argument_names=*/Object::empty_array(),
-      /*checked_argument_count=*/1,
-      thread()->compiler_state().GetNextDeoptId());
+  InstanceCallInstr* invoke_get = new (Z)
+      InstanceCallInstr(call->source(), getter_name, Token::kGET, get_arguments,
+                        /*type_args_len=*/0,
+                        /*argument_names=*/Object::empty_array(),
+                        /*checked_argument_count=*/1,
+                        thread()->compiler_state().GetNextDeoptId());
 
   // Arguments to the .call() are the same as arguments to the
   // original call (including type arguments), but receiver
@@ -1118,7 +1118,7 @@
   }
 
   InstanceCallInstr* invoke_call = new (Z) InstanceCallInstr(
-      call->token_pos(), Symbols::Call(), Token::kILLEGAL, call_arguments,
+      call->source(), Symbols::Call(), Token::kILLEGAL, call_arguments,
       call->type_args_len(), call->argument_names(),
       /*checked_argument_count=*/1,
       thread()->compiler_state().GetNextDeoptId());
@@ -1204,7 +1204,7 @@
 
   if (lower_limit == upper_limit) {
     StrictCompareInstr* check_cid = new (Z)
-        StrictCompareInstr(call->token_pos(), Token::kEQ_STRICT,
+        StrictCompareInstr(call->source(), Token::kEQ_STRICT,
                            new (Z) Value(left_cid), new (Z) Value(lower_cid),
                            /* number_check = */ false, DeoptId::kNone);
     ReplaceCall(call, check_cid);
@@ -1229,7 +1229,7 @@
 
   const intptr_t kTypeArgsLen = 0;
   StaticCallInstr* new_call = new (Z) StaticCallInstr(
-      call->token_pos(), target, kTypeArgsLen,
+      call->source(), target, kTypeArgsLen,
       Object::null_array(),  // argument_names
       args, call->deopt_id(), call->CallCount(), ICData::kOptimized);
   Environment* copy =
diff --git a/runtime/vm/compiler/backend/block_builder.h b/runtime/vm/compiler/backend/block_builder.h
index 99d3764..f5854b1 100644
--- a/runtime/vm/compiler/backend/block_builder.h
+++ b/runtime/vm/compiler/backend/block_builder.h
@@ -20,6 +20,8 @@
  public:
   BlockBuilder(FlowGraph* flow_graph, BlockEntryInstr* entry)
       : flow_graph_(flow_graph),
+        source_(InstructionSource(flow_graph_->function().token_pos(),
+                                  flow_graph->inlining_id())),
         entry_(entry),
         current_(entry),
         dummy_env_(
@@ -63,7 +65,7 @@
     const auto& function = flow_graph_->function();
     const auto representation = FlowGraph::ReturnRepresentationOf(function);
     ReturnInstr* instr = new ReturnInstr(
-        TokenPos(), value, CompilerState::Current().GetNextDeoptId(),
+        Source(), value, CompilerState::Current().GetNextDeoptId(),
         PcDescriptorsLayout::kInvalidYieldIndex, representation);
     AddInstruction(instr);
     entry_->set_last_instruction(instr);
@@ -88,7 +90,8 @@
                            with_frame ? FPREG : SPREG));
   }
 
-  TokenPosition TokenPos() { return flow_graph_->function().token_pos(); }
+  TokenPosition TokenPos() const { return source_.token_pos; }
+  const InstructionSource& Source() const { return source_; }
 
   Definition* AddNullDefinition() {
     return flow_graph_->GetConstant(Object::ZoneHandle());
@@ -158,7 +161,8 @@
     }
   }
 
-  FlowGraph* flow_graph_;
+  FlowGraph* const flow_graph_;
+  const InstructionSource source_;
   BlockEntryInstr* entry_;
   Instruction* current_;
   Environment* dummy_env_;
diff --git a/runtime/vm/compiler/backend/constant_propagator_test.cc b/runtime/vm/compiler/backend/constant_propagator_test.cc
index b4a9ebe..3eb3218 100644
--- a/runtime/vm/compiler/backend/constant_propagator_test.cc
+++ b/runtime/vm/compiler/backend/constant_propagator_test.cc
@@ -53,15 +53,14 @@
     BlockBuilder builder(H.flow_graph(), b2);
     v1 = H.Phi(b2, {{b1, v0}, {b3, FlowGraphBuilderHelper::kPhiSelfReference}});
     builder.AddPhi(v1);
-    auto v2 = builder.AddDefinition(new EqualityCompareInstr(
-        TokenPosition::kNoSource, Token::kEQ, new Value(v1), new Value(v0),
-        kSmiCid, S.GetNextDeoptId()));
-    builder.AddBranch(
-        new StrictCompareInstr(
-            TokenPosition::kNoSource, Token::kEQ_STRICT, new Value(v2),
-            new Value(H.flow_graph()->GetConstant(Bool::True())),
-            /*needs_number_check=*/false, S.GetNextDeoptId()),
-        b4, b3);
+    auto v2 = builder.AddDefinition(
+        new EqualityCompareInstr(InstructionSource(), Token::kEQ, new Value(v1),
+                                 new Value(v0), kSmiCid, S.GetNextDeoptId()));
+    builder.AddBranch(new StrictCompareInstr(
+                          InstructionSource(), Token::kEQ_STRICT, new Value(v2),
+                          new Value(H.flow_graph()->GetConstant(Bool::True())),
+                          /*needs_number_check=*/false, S.GetNextDeoptId()),
+                      b4, b3);
   }
 
   {
@@ -158,12 +157,12 @@
     BlockBuilder builder(H.flow_graph(), b1);
     auto v0 = builder.AddParameter(/*index=*/0, /*param_offset=*/0,
                                    /*with_frame=*/true, kTagged);
-    builder.AddBranch(new StrictCompareInstr(
-                          TokenPosition::kNoSource, Token::kEQ_STRICT,
-                          new Value(H.IntConstant(1)),
-                          new Value(redundant_phi ? H.IntConstant(1) : v0),
-                          /*needs_number_check=*/false, S.GetNextDeoptId()),
-                      b2, b3);
+    builder.AddBranch(
+        new StrictCompareInstr(
+            InstructionSource(), Token::kEQ_STRICT, new Value(H.IntConstant(1)),
+            new Value(redundant_phi ? H.IntConstant(1) : v0),
+            /*needs_number_check=*/false, S.GetNextDeoptId()),
+        b2, b3);
   }
 
   {
diff --git a/runtime/vm/compiler/backend/flow_graph.cc b/runtime/vm/compiler/backend/flow_graph.cc
index 0b7acfc..7e10988 100644
--- a/runtime/vm/compiler/backend/flow_graph.cc
+++ b/runtime/vm/compiler/backend/flow_graph.cc
@@ -585,13 +585,13 @@
 Instruction* FlowGraph::CreateCheckClass(Definition* to_check,
                                          const Cids& cids,
                                          intptr_t deopt_id,
-                                         TokenPosition token_pos) {
+                                         const InstructionSource& source) {
   if (cids.IsMonomorphic() && cids.MonomorphicReceiverCid() == kSmiCid) {
     return new (zone())
-        CheckSmiInstr(new (zone()) Value(to_check), deopt_id, token_pos);
+        CheckSmiInstr(new (zone()) Value(to_check), deopt_id, source);
   }
   return new (zone())
-      CheckClassInstr(new (zone()) Value(to_check), deopt_id, cids, token_pos);
+      CheckClassInstr(new (zone()) Value(to_check), deopt_id, cids, source);
 }
 
 Definition* FlowGraph::CreateCheckBound(Definition* length,
@@ -612,7 +612,7 @@
 
   Definition* load_type_args = new (zone()) LoadFieldInstr(
       call->Receiver()->CopyWithType(),
-      Slot::GetTypeArgumentsSlotFor(thread(), cls), call->token_pos());
+      Slot::GetTypeArgumentsSlotFor(thread(), cls), call->source());
   InsertBefore(call, load_type_args, call->env(), FlowGraph::kValue);
 
   const AbstractType& type =
@@ -620,7 +620,7 @@
   ASSERT(!type.IsNull());
   const TypeArguments& args = TypeArguments::Handle(zone(), type.arguments());
   Instruction* guard = new (zone()) CheckConditionInstr(
-      new StrictCompareInstr(call->token_pos(), Token::kEQ_STRICT,
+      new StrictCompareInstr(call->source(), Token::kEQ_STRICT,
                              new (zone()) Value(load_type_args),
                              new (zone()) Value(GetConstant(args)),
                              /*needs_number_check=*/false, call->deopt_id()),
@@ -2742,7 +2742,7 @@
   PhiInstr* phi =
       AddPhi(mid_point, condition.oper2, GetConstant(Bool::False()));
   StrictCompareInstr* circuit = new (zone()) StrictCompareInstr(
-      inherit->token_pos(), Token::kEQ_STRICT, new (zone()) Value(phi),
+      inherit->source(), Token::kEQ_STRICT, new (zone()) Value(phi),
       new (zone()) Value(GetConstant(Bool::True())), false,
       DeoptId::kNone);  // don't inherit
 
diff --git a/runtime/vm/compiler/backend/flow_graph.h b/runtime/vm/compiler/backend/flow_graph.h
index a288e47..6f0c5b9 100644
--- a/runtime/vm/compiler/backend/flow_graph.h
+++ b/runtime/vm/compiler/backend/flow_graph.h
@@ -211,7 +211,7 @@
   Instruction* CreateCheckClass(Definition* to_check,
                                 const Cids& cids,
                                 intptr_t deopt_id,
-                                TokenPosition token_pos);
+                                const InstructionSource& source);
 
   Definition* CreateCheckBound(Definition* length,
                                Definition* index,
diff --git a/runtime/vm/compiler/backend/flow_graph_checker.cc b/runtime/vm/compiler/backend/flow_graph_checker.cc
index e6f5b05..67c01d8 100644
--- a/runtime/vm/compiler/backend/flow_graph_checker.cc
+++ b/runtime/vm/compiler/backend/flow_graph_checker.cc
@@ -283,7 +283,8 @@
       ASSERT(instruction->has_inlining_id());
       const intptr_t inlining_id = instruction->inlining_id();
       const auto& function = *inline_id_to_function_[inlining_id];
-      if (!pos.IsWithin(function.token_pos(), function.end_token_pos())) {
+      if (function.end_token_pos().IsReal() &&
+          !pos.IsWithin(function.token_pos(), function.end_token_pos())) {
         TextBuffer buffer(256);
         buffer.Printf("Token position %s is invalid for function %s (%s, %s)",
                       pos.ToCString(), function.ToFullyQualifiedCString(),
@@ -296,9 +297,7 @@
         FATAL("%s", buffer.buffer());
       }
       script_ = function.script();
-      intptr_t line;
-      if (!script_.IsNull() && pos.IsReal() &&
-          !script_.GetTokenLocation(pos, &line)) {
+      if (!script_.IsNull() && !script_.IsValidTokenPosition(pos)) {
         TextBuffer buffer(256);
         buffer.Printf(
             "Token position %s is invalid for script %s of function %s",
diff --git a/runtime/vm/compiler/backend/flow_graph_checker.h b/runtime/vm/compiler/backend/flow_graph_checker.h
index 175fedd..411b1fe 100644
--- a/runtime/vm/compiler/backend/flow_graph_checker.h
+++ b/runtime/vm/compiler/backend/flow_graph_checker.h
@@ -32,9 +32,8 @@
   // Constructs graph checker. The checker uses some custom-made
   // visitation to perform additional checks, and uses the
   // FlowGraphVisitor structure for anything else.
-  explicit FlowGraphChecker(
-      FlowGraph* flow_graph,
-      const GrowableArray<const Function*>& inline_id_to_function)
+  FlowGraphChecker(FlowGraph* flow_graph,
+                   const GrowableArray<const Function*>& inline_id_to_function)
       : FlowGraphVisitor(flow_graph->preorder()),
         flow_graph_(flow_graph),
         inline_id_to_function_(inline_id_to_function),
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc
index 8ba00e0..521f371 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc
@@ -191,6 +191,10 @@
 #else
   const bool stack_traces_only = false;
 #endif
+  // Make sure that the function is at the position for inline_id 0.
+  ASSERT(inline_id_to_function.length() >= 1);
+  ASSERT(inline_id_to_function[0]->raw() ==
+         flow_graph->parsed_function().function().raw());
   code_source_map_builder_ = new (zone_)
       CodeSourceMapBuilder(zone_, stack_traces_only, caller_inline_id,
                            inline_id_to_token_pos, inline_id_to_function);
@@ -287,7 +291,7 @@
 void FlowGraphCompiler::InsertBSSRelocation(BSS::Relocation reloc) {
   const intptr_t offset = assembler()->InsertAlignedRelocation(reloc);
   AddDescriptor(PcDescriptorsLayout::kBSSRelocation, /*pc_offset=*/offset,
-                /*deopt_id=*/DeoptId::kNone, TokenPosition::kNoSource,
+                /*deopt_id=*/DeoptId::kNone, InstructionSource(),
                 /*try_index=*/-1);
 }
 
@@ -488,12 +492,12 @@
 #endif  // defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
 }
 
-void FlowGraphCompiler::EmitCallsiteMetadata(TokenPosition token_pos,
+void FlowGraphCompiler::EmitCallsiteMetadata(const InstructionSource& source,
                                              intptr_t deopt_id,
                                              PcDescriptorsLayout::Kind kind,
                                              LocationSummary* locs,
                                              Environment* env) {
-  AddCurrentDescriptor(kind, deopt_id, token_pos);
+  AddCurrentDescriptor(kind, deopt_id, source);
   RecordSafepoint(locs);
   RecordCatchEntryMoves(env);
   if ((deopt_id != DeoptId::kNone) && !FLAG_precompiled_mode) {
@@ -505,16 +509,16 @@
     } else {
       // Add deoptimization continuation point after the call and before the
       // arguments are removed.
-      AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after,
-                           token_pos);
+      AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
     }
   }
 }
 
-void FlowGraphCompiler::EmitYieldPositionMetadata(TokenPosition token_pos,
-                                                  intptr_t yield_index) {
+void FlowGraphCompiler::EmitYieldPositionMetadata(
+    const InstructionSource& source,
+    intptr_t yield_index) {
   AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
-                DeoptId::kNone, token_pos, CurrentTryIndex(), yield_index);
+                DeoptId::kNone, source, CurrentTryIndex(), yield_index);
 }
 
 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
@@ -524,24 +528,28 @@
       // PcDescriptor corresponding to their deopt id. GotoInstr records its
       // own so that it can control the placement.
       AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, instr->deopt_id(),
-                           instr->token_pos());
+                           instr->source());
     }
     AllocateRegistersLocally(instr);
   }
 }
 
 void FlowGraphCompiler::EmitSourceLine(Instruction* instr) {
-  if (!instr->token_pos().IsReal() || (instr->env() == NULL)) {
+  if (!instr->token_pos().IsReal()) {
     return;
   }
-  const Script& script =
-      Script::Handle(zone(), instr->env()->function().script());
-  intptr_t line_nr, column_nr;
-  if (script.GetTokenLocation(instr->token_pos(), &line_nr, &column_nr)) {
+  const InstructionSource& source = instr->source();
+  const intptr_t inlining_id = source.inlining_id < 0 ? 0 : source.inlining_id;
+  const Function& function =
+      *code_source_map_builder_->inline_id_to_function()[inlining_id];
+  ASSERT(instr->env() == nullptr ||
+         instr->env()->function().raw() == function.raw());
+  const auto& script = Script::Handle(zone(), function.script());
+  intptr_t line_nr;
+  if (script.GetTokenLocation(source.token_pos, &line_nr)) {
     const String& line = String::Handle(zone(), script.GetLine(line_nr));
     assembler()->Comment("Line %" Pd " in '%s':\n           %s", line_nr,
-                         instr->env()->function().ToFullyQualifiedCString(),
-                         line.ToCString());
+                         function.ToFullyQualifiedCString(), line.ToCString());
   }
 }
 
@@ -618,7 +626,7 @@
       }
     }
 
-    BeginCodeSourceRange(entry->inlining_id(), entry->token_pos());
+    BeginCodeSourceRange(entry->source());
     ASSERT(pending_deoptimization_env_ == NULL);
     pending_deoptimization_env_ = entry->env();
     set_current_instruction(entry);
@@ -627,7 +635,7 @@
     StatsEnd(entry);
     set_current_instruction(nullptr);
     pending_deoptimization_env_ = NULL;
-    EndCodeSourceRange(entry->token_pos());
+    EndCodeSourceRange(entry->source());
 
     if (skip_body_compilation()) {
       ASSERT(entry == flow_graph().graph_entry()->normal_entry());
@@ -650,7 +658,7 @@
       if (instr->IsParallelMove()) {
         parallel_move_resolver_.EmitNativeCode(instr->AsParallelMove());
       } else {
-        BeginCodeSourceRange(instr->inlining_id(), instr->token_pos());
+        BeginCodeSourceRange(instr->source());
         EmitInstructionPrologue(instr);
         ASSERT(pending_deoptimization_env_ == NULL);
         pending_deoptimization_env_ = instr->env();
@@ -664,7 +672,7 @@
         } else {
           EmitInstructionEpilogue(instr);
         }
-        EndCodeSourceRange(instr->token_pos());
+        EndCodeSourceRange(instr->source());
       }
 
 #if defined(DEBUG)
@@ -755,24 +763,25 @@
 #endif  // defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
     set_current_instruction(slow_path->instruction());
     SpecialStatsBegin(stats_tag);
-    BeginCodeSourceRange(slow_path->instruction()->inlining_id(),
-                         slow_path->instruction()->token_pos());
+    BeginCodeSourceRange(slow_path->instruction()->source());
     DEBUG_ONLY(current_instruction_ = slow_path->instruction());
     slow_path->GenerateCode(this);
     DEBUG_ONLY(current_instruction_ = nullptr);
-    EndCodeSourceRange(slow_path->instruction()->token_pos());
+    EndCodeSourceRange(slow_path->instruction()->source());
     SpecialStatsEnd(stats_tag);
     set_current_instruction(nullptr);
   }
+  // All code generated by deferred deopt info is treated as in the root
+  // function.
+  const InstructionSource deopt_source(TokenPosition::kDeferredDeoptInfo,
+                                       /*inlining_id=*/0);
   for (intptr_t i = 0; i < deopt_infos_.length(); i++) {
-    // Code generated from CompilerDeoptInfo objects is considered in the
-    // root function.
-    BeginCodeSourceRange(/*inline_id=*/0, TokenPosition::kDeferredDeoptInfo);
+    BeginCodeSourceRange(deopt_source);
 #if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
     assembler()->set_lr_state(lr_state);
 #endif  // defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
     deopt_infos_[i]->GenerateCode(this, i);
-    EndCodeSourceRange(TokenPosition::kDeferredDeoptInfo);
+    EndCodeSourceRange(deopt_source);
   }
 }
 
@@ -794,25 +803,28 @@
 void FlowGraphCompiler::AddDescriptor(PcDescriptorsLayout::Kind kind,
                                       intptr_t pc_offset,
                                       intptr_t deopt_id,
-                                      TokenPosition token_pos,
+                                      const InstructionSource& source,
                                       intptr_t try_index,
                                       intptr_t yield_index) {
-  code_source_map_builder_->NoteDescriptor(kind, pc_offset, token_pos);
+  code_source_map_builder_->NoteDescriptor(kind, pc_offset, source);
   // Don't emit deopt-descriptors in AOT mode.
   if (FLAG_precompiled_mode && (kind == PcDescriptorsLayout::kDeopt)) return;
-  pc_descriptors_list_->AddDescriptor(kind, pc_offset, deopt_id, token_pos,
+  // Use the token position of the original call in the root function if source
+  // has an inlining id.
+  const auto& root_pos = code_source_map_builder_->RootPosition(source);
+  pc_descriptors_list_->AddDescriptor(kind, pc_offset, deopt_id, root_pos,
                                       try_index, yield_index);
 }
 
 // Uses current pc position and try-index.
 void FlowGraphCompiler::AddCurrentDescriptor(PcDescriptorsLayout::Kind kind,
                                              intptr_t deopt_id,
-                                             TokenPosition token_pos) {
-  AddDescriptor(kind, assembler()->CodeSize(), deopt_id, token_pos,
+                                             const InstructionSource& source) {
+  AddDescriptor(kind, assembler()->CodeSize(), deopt_id, source,
                 CurrentTryIndex());
 }
 
-void FlowGraphCompiler::AddNullCheck(TokenPosition token_pos,
+void FlowGraphCompiler::AddNullCheck(const InstructionSource& source,
                                      const String& name) {
 #if defined(DART_PRECOMPILER)
   // If we are generating an AOT snapshot and have DWARF stack traces enabled,
@@ -823,7 +835,7 @@
 #endif
   const intptr_t name_index =
       assembler()->object_pool_builder().FindObject(name);
-  code_source_map_builder_->NoteNullCheck(assembler()->CodeSize(), token_pos,
+  code_source_map_builder_->NoteNullCheck(assembler()->CodeSize(), source,
                                           name_index);
 }
 
@@ -1375,14 +1387,14 @@
   return complete;
 }
 
-void FlowGraphCompiler::GenerateStubCall(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateStubCall(const InstructionSource& source,
                                          const Code& stub,
                                          PcDescriptorsLayout::Kind kind,
                                          LocationSummary* locs,
                                          intptr_t deopt_id,
                                          Environment* env) {
   EmitCallToStub(stub);
-  EmitCallsiteMetadata(token_pos, deopt_id, kind, locs, env);
+  EmitCallsiteMetadata(source, deopt_id, kind, locs, env);
 }
 
 static const Code& StubEntryFor(const ICData& ic_data, bool optimized) {
@@ -1414,7 +1426,7 @@
 }
 
 void FlowGraphCompiler::GenerateInstanceCall(intptr_t deopt_id,
-                                             TokenPosition token_pos,
+                                             const InstructionSource& source,
                                              LocationSummary* locs,
                                              const ICData& ic_data_in,
                                              Code::EntryKind entry_kind,
@@ -1422,7 +1434,7 @@
   ICData& ic_data = ICData::ZoneHandle(ic_data_in.Original());
   if (FLAG_precompiled_mode) {
     ic_data = ic_data.AsUnaryClassChecks();
-    EmitInstanceCallAOT(ic_data, deopt_id, token_pos, locs, entry_kind,
+    EmitInstanceCallAOT(ic_data, deopt_id, source, locs, entry_kind,
                         receiver_can_be_smi);
     return;
   }
@@ -1432,22 +1444,22 @@
     // function entry.
     ASSERT(may_reoptimize() || flow_graph().IsCompiledForOsr());
     EmitOptimizedInstanceCall(StubEntryFor(ic_data, /*optimized=*/true),
-                              ic_data, deopt_id, token_pos, locs, entry_kind);
+                              ic_data, deopt_id, source, locs, entry_kind);
     return;
   }
 
   if (is_optimizing()) {
-    EmitMegamorphicInstanceCall(ic_data_in, deopt_id, token_pos, locs,
+    EmitMegamorphicInstanceCall(ic_data_in, deopt_id, source, locs,
                                 kInvalidTryIndex);
     return;
   }
 
   EmitInstanceCallJIT(StubEntryFor(ic_data, /*optimized=*/false), ic_data,
-                      deopt_id, token_pos, locs, entry_kind);
+                      deopt_id, source, locs, entry_kind);
 }
 
 void FlowGraphCompiler::GenerateStaticCall(intptr_t deopt_id,
-                                           TokenPosition token_pos,
+                                           const InstructionSource& source,
                                            const Function& function,
                                            ArgumentsInfo args_info,
                                            LocationSummary* locs,
@@ -1468,7 +1480,7 @@
   // optimized static calls.
   if (is_optimizing() && (!ForcedOptimization() || FLAG_precompiled_mode)) {
     EmitOptimizedStaticCall(function, arguments_descriptor,
-                            args_info.size_with_type_args, deopt_id, token_pos,
+                            args_info.size_with_type_args, deopt_id, source,
                             locs, entry_kind);
   } else {
     ICData& call_ic_data = ICData::ZoneHandle(zone(), ic_data.raw());
@@ -1480,9 +1492,9 @@
               ->raw();
       call_ic_data = call_ic_data.Original();
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRewind, deopt_id, token_pos);
-    EmitUnoptimizedStaticCall(args_info.size_with_type_args, deopt_id,
-                              token_pos, locs, call_ic_data, entry_kind);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRewind, deopt_id, source);
+    EmitUnoptimizedStaticCall(args_info.size_with_type_args, deopt_id, source,
+                              locs, call_ic_data, entry_kind);
   }
 }
 
@@ -1687,22 +1699,23 @@
 }
 
 ParallelMoveResolver::ParallelMoveResolver(FlowGraphCompiler* compiler)
-    : compiler_(compiler), moves_(32), inlining_id_(-1) {}
+    : compiler_(compiler), moves_(32) {}
 
 void ParallelMoveResolver::EmitNativeCode(ParallelMoveInstr* parallel_move) {
   ASSERT(moves_.is_empty());
-  inlining_id_ = parallel_move->inlining_id();
 
   // Build up a worklist of moves.
   BuildInitialMoveList(parallel_move);
 
+  const InstructionSource& move_source = InstructionSource(
+      TokenPosition::kParallelMove, parallel_move->inlining_id());
   for (int i = 0; i < moves_.length(); ++i) {
     const MoveOperands& move = *moves_[i];
     // Skip constants to perform them last.  They don't block other moves
     // and skipping such moves with register destinations keeps those
     // registers free for the whole algorithm.
     if (!move.IsEliminated() && !move.src().IsConstant()) {
-      PerformMove(i);
+      PerformMove(move_source, i);
     }
   }
 
@@ -1711,15 +1724,13 @@
     const MoveOperands& move = *moves_[i];
     if (!move.IsEliminated()) {
       ASSERT(move.src().IsConstant());
-      compiler_->BeginCodeSourceRange(inlining_id_,
-                                      TokenPosition::kParallelMove);
+      compiler_->BeginCodeSourceRange(move_source);
       EmitMove(i);
-      compiler_->EndCodeSourceRange(TokenPosition::kParallelMove);
+      compiler_->EndCodeSourceRange(move_source);
     }
   }
 
   moves_.Clear();
-  inlining_id_ = -1;
 }
 
 void ParallelMoveResolver::BuildInitialMoveList(
@@ -1734,7 +1745,8 @@
   }
 }
 
-void ParallelMoveResolver::PerformMove(int index) {
+void ParallelMoveResolver::PerformMove(const InstructionSource& source,
+                                       int index) {
   // Each call to this function performs a move and deletes it from the move
   // graph.  We first recursively perform any move blocking this one.  We
   // mark a move as "pending" on entry to PerformMove in order to detect
@@ -1767,7 +1779,7 @@
       // only a single incoming edge to an operand, this move must also be
       // involved in the same cycle.  In that case, the blocking move will
       // be created but will be "pending" when we return from PerformMove.
-      PerformMove(i);
+      PerformMove(source, i);
     }
   }
 
@@ -1789,18 +1801,17 @@
     const MoveOperands& other_move = *moves_[i];
     if (other_move.Blocks(destination)) {
       ASSERT(other_move.IsPending());
-      compiler_->BeginCodeSourceRange(inlining_id_,
-                                      TokenPosition::kParallelMove);
+      compiler_->BeginCodeSourceRange(source);
       EmitSwap(index);
-      compiler_->EndCodeSourceRange(TokenPosition::kParallelMove);
+      compiler_->EndCodeSourceRange(source);
       return;
     }
   }
 
   // This move is not blocked.
-  compiler_->BeginCodeSourceRange(inlining_id_, TokenPosition::kParallelMove);
+  compiler_->BeginCodeSourceRange(source);
   EmitMove(index);
-  compiler_->EndCodeSourceRange(TokenPosition::kParallelMove);
+  compiler_->EndCodeSourceRange(source);
 }
 
 void ParallelMoveResolver::EmitMove(int index) {
@@ -2075,15 +2086,13 @@
   }
 }
 
-void FlowGraphCompiler::BeginCodeSourceRange(intptr_t inline_id,
-                                             const TokenPosition& token_pos) {
+void FlowGraphCompiler::BeginCodeSourceRange(const InstructionSource& source) {
   code_source_map_builder_->BeginCodeSourceRange(assembler()->CodeSize(),
-                                                 inline_id, token_pos);
+                                                 source);
 }
 
-void FlowGraphCompiler::EndCodeSourceRange(const TokenPosition& token_pos) {
-  code_source_map_builder_->EndCodeSourceRange(assembler()->CodeSize(),
-                                               token_pos);
+void FlowGraphCompiler::EndCodeSourceRange(const InstructionSource& source) {
+  code_source_map_builder_->EndCodeSourceRange(assembler()->CodeSize(), source);
 }
 
 const CallTargets* FlowGraphCompiler::ResolveCallTargetsForReceiverCid(
@@ -2139,7 +2148,7 @@
     const CallTargets& targets,
     ArgumentsInfo args_info,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     bool complete,
     intptr_t total_ic_calls,
@@ -2152,7 +2161,7 @@
     EmitTestAndCall(targets, call->function_name(), args_info,
                     deopt,  // No cid match.
                     &ok,    // Found cid.
-                    deopt_id, token_pos, locs, complete, total_ic_calls,
+                    deopt_id, source, locs, complete, total_ic_calls,
                     call->entry_kind());
     assembler()->Bind(&ok);
   } else {
@@ -2161,13 +2170,13 @@
       EmitTestAndCall(targets, call->function_name(), args_info,
                       NULL,  // No cid match.
                       &ok,   // Found cid.
-                      deopt_id, token_pos, locs, true, total_ic_calls,
+                      deopt_id, source, locs, true, total_ic_calls,
                       call->entry_kind());
       assembler()->Bind(&ok);
     } else {
       const ICData& unary_checks =
           ICData::ZoneHandle(zone(), call->ic_data()->AsUnaryClassChecks());
-      EmitInstanceCallAOT(unary_checks, deopt_id, token_pos, locs,
+      EmitInstanceCallAOT(unary_checks, deopt_id, source, locs,
                           call->entry_kind(), receiver_can_be_smi);
     }
   }
@@ -2192,7 +2201,7 @@
                                         compiler::Label* failed,
                                         compiler::Label* match_found,
                                         intptr_t deopt_id,
-                                        TokenPosition token_index,
+                                        const InstructionSource& source_index,
                                         LocationSummary* locs,
                                         bool complete,
                                         intptr_t total_ic_calls,
@@ -2244,7 +2253,7 @@
     // Do not use the code from the function, but let the code be patched so
     // that we can record the outgoing edges to other code.
     const Function& function = *targets.TargetAt(smi_case)->target;
-    GenerateStaticDartCall(deopt_id, token_index, PcDescriptorsLayout::kOther,
+    GenerateStaticDartCall(deopt_id, source_index, PcDescriptorsLayout::kOther,
                            locs, function, entry_kind);
     __ Drop(args_info.size_with_type_args);
     if (match_found != NULL) {
@@ -2294,7 +2303,7 @@
     // Do not use the code from the function, but let the code be patched so
     // that we can record the outgoing edges to other code.
     const Function& function = *targets.TargetAt(i)->target;
-    GenerateStaticDartCall(deopt_id, token_index, PcDescriptorsLayout::kOther,
+    GenerateStaticDartCall(deopt_id, source_index, PcDescriptorsLayout::kOther,
                            locs, function, entry_kind);
     __ Drop(args_info.size_with_type_args);
     if (!is_last_check || add_megamorphic_call) {
@@ -2305,7 +2314,7 @@
   if (add_megamorphic_call) {
     int try_index = kInvalidTryIndex;
     EmitMegamorphicInstanceCall(function_name, arguments_descriptor, deopt_id,
-                                token_index, locs, try_index);
+                                source_index, locs, try_index);
   }
 }
 
@@ -2393,7 +2402,7 @@
 //
 // See [GenerateInlineInstanceof] for calling convention.
 SubtypeTestCachePtr FlowGraphCompiler::GenerateFunctionTypeTest(
-    TokenPosition token_pos,
+    const InstructionSource& source,
     const AbstractType& type,
     compiler::Label* is_instance_lbl,
     compiler::Label* is_not_instance_lbl) {
@@ -2424,14 +2433,14 @@
 // may fall through to it. Otherwise, this inline code will jump to the label
 // is_instance or to the label is_not_instance.
 SubtypeTestCachePtr FlowGraphCompiler::GenerateInlineInstanceof(
-    TokenPosition token_pos,
+    const InstructionSource& source,
     const AbstractType& type,
     compiler::Label* is_instance_lbl,
     compiler::Label* is_not_instance_lbl) {
   __ Comment("InlineInstanceof");
 
   if (type.IsFunctionType()) {
-    return GenerateFunctionTypeTest(token_pos, type, is_instance_lbl,
+    return GenerateFunctionTypeTest(source, type, is_instance_lbl,
                                     is_not_instance_lbl);
   }
 
@@ -2442,21 +2451,21 @@
     // a parameterized class.
     if (type_class.NumTypeArguments() > 0) {
       return GenerateInstantiatedTypeWithArgumentsTest(
-          token_pos, type, is_instance_lbl, is_not_instance_lbl);
+          source, type, is_instance_lbl, is_not_instance_lbl);
       // Fall through to runtime call.
     }
     const bool has_fall_through = GenerateInstantiatedTypeNoArgumentsTest(
-        token_pos, type, is_instance_lbl, is_not_instance_lbl);
+        source, type, is_instance_lbl, is_not_instance_lbl);
     if (has_fall_through) {
       // If test non-conclusive so far, try the inlined type-test cache.
       // 'type' is known at compile time.
       return GenerateSubtype1TestCacheLookup(
-          token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
+          source, type_class, is_instance_lbl, is_not_instance_lbl);
     } else {
       return SubtypeTestCache::null();
     }
   }
-  return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl,
+  return GenerateUninstantiatedTypeTest(source, type, is_instance_lbl,
                                         is_not_instance_lbl);
 }
 
@@ -2482,7 +2491,7 @@
 //
 // See [GenerateInlineInstanceof] for calling convention.
 SubtypeTestCachePtr FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
-    TokenPosition token_pos,
+    const InstructionSource& source,
     const Class& type_class,
     compiler::Label* is_instance_lbl,
     compiler::Label* is_not_instance_lbl) {
@@ -2540,7 +2549,7 @@
 // See [GenerateInlineInstanceof] for calling convention.
 SubtypeTestCachePtr
 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
-    TokenPosition token_pos,
+    const InstructionSource& source,
     const AbstractType& type,
     compiler::Label* is_instance_lbl,
     compiler::Label* is_not_instance_lbl) {
@@ -2574,8 +2583,8 @@
     if (IsListClass(type_class)) {
       GenerateListTypeCheck(kScratchReg, is_instance_lbl);
     }
-    return GenerateSubtype1TestCacheLookup(
-        token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
+    return GenerateSubtype1TestCacheLookup(source, type_class, is_instance_lbl,
+                                           is_not_instance_lbl);
   }
   // If one type argument only, check if type argument is a top type.
   if (type_arguments.Length() == 1) {
@@ -2584,7 +2593,7 @@
     if (tp_argument.IsTopTypeForSubtyping()) {
       // Instance class test only necessary.
       return GenerateSubtype1TestCacheLookup(
-          token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
+          source, type_class, is_instance_lbl, is_not_instance_lbl);
     }
   }
 
@@ -2604,7 +2613,7 @@
 //
 // Uses kScratchReg, so this implementation cannot be shared with IA32.
 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
-    TokenPosition token_pos,
+    const InstructionSource& source,
     const AbstractType& type,
     compiler::Label* is_instance_lbl,
     compiler::Label* is_not_instance_lbl) {
@@ -2662,7 +2671,7 @@
 //
 // See [GenerateInlineInstanceof] for calling convention.
 SubtypeTestCachePtr FlowGraphCompiler::GenerateUninstantiatedTypeTest(
-    TokenPosition token_pos,
+    const InstructionSource& source,
     const AbstractType& type,
     compiler::Label* is_instance_lbl,
     compiler::Label* is_not_instance_lbl) {
@@ -2745,7 +2754,7 @@
 // - kFunctionTypeArgumentsReg: function type arguments or raw_null.
 // Returns:
 // - true or false in kInstanceOfResultReg.
-void FlowGraphCompiler::GenerateInstanceOf(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateInstanceOf(const InstructionSource& source,
                                            intptr_t deopt_id,
                                            const AbstractType& type,
                                            LocationSummary* locs) {
@@ -2774,7 +2783,7 @@
   // kInstanceReg, kInstantiatorTypeArgumentsReg, and kFunctionTypeArgumentsReg
   // are preserved across the call.
   test_cache =
-      GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance);
+      GenerateInlineInstanceof(source, type, &is_instance, &is_not_instance);
 
   // test_cache is null if there is no fall-through.
   compiler::Label done;
@@ -2782,7 +2791,7 @@
     // Generate Runtime call.
     __ LoadUniqueObject(TypeTestABI::kDstTypeReg, type);
     __ LoadUniqueObject(TypeTestABI::kSubtypeTestCacheReg, test_cache);
-    GenerateStubCall(token_pos, StubCode::InstanceOf(),
+    GenerateStubCall(source, StubCode::InstanceOf(),
                      /*kind=*/PcDescriptorsLayout::kOther, locs, deopt_id);
     __ Jump(&done, compiler::Assembler::kNearJump);
   }
@@ -2843,12 +2852,13 @@
 //
 // Performance notes: positive checks must be quick, negative checks can be slow
 // as they throw an exception.
-void FlowGraphCompiler::GenerateAssertAssignable(CompileType* receiver_type,
-                                                 TokenPosition token_pos,
-                                                 intptr_t deopt_id,
-                                                 const String& dst_name,
-                                                 LocationSummary* locs) {
-  ASSERT(!token_pos.IsClassifying());
+void FlowGraphCompiler::GenerateAssertAssignable(
+    CompileType* receiver_type,
+    const InstructionSource& source,
+    intptr_t deopt_id,
+    const String& dst_name,
+    LocationSummary* locs) {
+  ASSERT(!source.token_pos.IsClassifying());
   ASSERT(CheckAssertAssignableTypeTestingABILocations(*locs));
 
   // Non-null if we have a constant destination type.
@@ -2878,14 +2888,14 @@
     }
   }
 
-  GenerateTTSCall(token_pos, deopt_id, type_reg, dst_type, dst_name, locs);
+  GenerateTTSCall(source, deopt_id, type_reg, dst_type, dst_name, locs);
   __ Bind(&done);
 }
 
 // Generates a call to the type testing stub for the type in [reg_with_type].
 // Provide a non-null [dst_type] and [dst_name] if they are known at compile
 // time.
-void FlowGraphCompiler::GenerateTTSCall(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateTTSCall(const InstructionSource& source,
                                         intptr_t deopt_id,
                                         Register reg_with_type,
                                         const AbstractType& dst_type,
@@ -2917,7 +2927,7 @@
   } else {
     GenerateIndirectTTSCall(assembler(), reg_with_type, sub_type_cache_index);
   }
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
 }
 
 // Optimize assignable type check by adding inlined tests for:
@@ -3160,7 +3170,7 @@
   const intptr_t deopt_id = instruction()->deopt_id();
   compiler->AddDescriptor(PcDescriptorsLayout::kOther,
                           compiler->assembler()->CodeSize(), deopt_id,
-                          instruction()->token_pos(), try_index_);
+                          instruction()->source(), try_index_);
   AddMetadataForRuntimeCall(compiler);
   compiler->RecordSafepoint(locs, num_args);
   if (!FLAG_precompiled_mode || (try_index_ != kInvalidTryIndex) ||
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.h b/runtime/vm/compiler/backend/flow_graph_compiler.h
index 13d9fda..69db3f6 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.h
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.h
@@ -134,7 +134,7 @@
 
   // Perform the move at the moves_ index in question (possibly requiring
   // other moves to satisfy dependencies).
-  void PerformMove(int index);
+  void PerformMove(const InstructionSource& source, int index);
 
   // Emit a move and remove it from the move graph.
   void EmitMove(int index);
@@ -162,8 +162,6 @@
 
   // List of moves not yet resolved.
   GrowableArray<MoveOperands*> moves_;
-  // Inlining id for the current instruction.
-  intptr_t inlining_id_;
 };
 
 // Used for describing a deoptimization point after call (lazy deoptimization).
@@ -591,7 +589,7 @@
       const LocationSummary& locs);
 
   void GenerateAssertAssignable(CompileType* receiver_type,
-                                TokenPosition token_pos,
+                                const InstructionSource& source,
                                 intptr_t deopt_id,
                                 const String& dst_name,
                                 LocationSummary* locs);
@@ -601,7 +599,7 @@
                                                const AbstractType& dst_type,
                                                compiler::Label* done);
 
-  void GenerateTTSCall(TokenPosition token_pos,
+  void GenerateTTSCall(const InstructionSource& source,
                        intptr_t deopt_id,
                        Register reg_with_type,
                        const AbstractType& dst_type,
@@ -613,26 +611,26 @@
                                       intptr_t sub_type_cache_index);
 #endif
 
-  void GenerateRuntimeCall(TokenPosition token_pos,
+  void GenerateRuntimeCall(const InstructionSource& source,
                            intptr_t deopt_id,
                            const RuntimeEntry& entry,
                            intptr_t argument_count,
                            LocationSummary* locs);
 
-  void GenerateStubCall(TokenPosition token_pos,
+  void GenerateStubCall(const InstructionSource& source,
                         const Code& stub,
                         PcDescriptorsLayout::Kind kind,
                         LocationSummary* locs,
                         intptr_t deopt_id = DeoptId::kNone,
                         Environment* env = nullptr);
 
-  void GeneratePatchableCall(TokenPosition token_pos,
+  void GeneratePatchableCall(const InstructionSource& source,
                              const Code& stub,
                              PcDescriptorsLayout::Kind kind,
                              LocationSummary* locs);
 
   void GenerateDartCall(intptr_t deopt_id,
-                        TokenPosition token_pos,
+                        const InstructionSource& source,
                         const Code& stub,
                         PcDescriptorsLayout::Kind kind,
                         LocationSummary* locs,
@@ -640,19 +638,19 @@
 
   void GenerateStaticDartCall(
       intptr_t deopt_id,
-      TokenPosition token_pos,
+      const InstructionSource& source,
       PcDescriptorsLayout::Kind kind,
       LocationSummary* locs,
       const Function& target,
       Code::EntryKind entry_kind = Code::EntryKind::kNormal);
 
-  void GenerateInstanceOf(TokenPosition token_pos,
+  void GenerateInstanceOf(const InstructionSource& source,
                           intptr_t deopt_id,
                           const AbstractType& type,
                           LocationSummary* locs);
 
   void GenerateInstanceCall(intptr_t deopt_id,
-                            TokenPosition token_pos,
+                            const InstructionSource& source,
                             LocationSummary* locs,
                             const ICData& ic_data,
                             Code::EntryKind entry_kind,
@@ -660,7 +658,7 @@
 
   void GenerateStaticCall(
       intptr_t deopt_id,
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const Function& function,
       ArgumentsInfo args_info,
       LocationSummary* locs,
@@ -704,14 +702,14 @@
       const Code& stub,
       const ICData& ic_data,
       intptr_t deopt_id,
-      TokenPosition token_pos,
+      const InstructionSource& source,
       LocationSummary* locs,
       Code::EntryKind entry_kind = Code::EntryKind::kNormal);
 
   void EmitInstanceCallJIT(const Code& stub,
                            const ICData& ic_data,
                            intptr_t deopt_id,
-                           TokenPosition token_pos,
+                           const InstructionSource& source,
                            LocationSummary* locs,
                            Code::EntryKind entry_kind);
 
@@ -719,7 +717,7 @@
                                    const CallTargets& targets,
                                    ArgumentsInfo args_info,
                                    intptr_t deopt_id,
-                                   TokenPosition token_pos,
+                                   const InstructionSource& source,
                                    LocationSummary* locs,
                                    bool complete,
                                    intptr_t total_call_count,
@@ -727,14 +725,14 @@
 
   void EmitMegamorphicInstanceCall(const ICData& icdata,
                                    intptr_t deopt_id,
-                                   TokenPosition token_pos,
+                                   const InstructionSource& source,
                                    LocationSummary* locs,
                                    intptr_t try_index,
                                    intptr_t slow_path_argument_count = 0) {
     const String& name = String::Handle(icdata.target_name());
     const Array& arguments_descriptor =
         Array::Handle(icdata.arguments_descriptor());
-    EmitMegamorphicInstanceCall(name, arguments_descriptor, deopt_id, token_pos,
+    EmitMegamorphicInstanceCall(name, arguments_descriptor, deopt_id, source,
                                 locs, try_index);
   }
 
@@ -742,7 +740,7 @@
   void EmitMegamorphicInstanceCall(const String& function_name,
                                    const Array& arguments_descriptor,
                                    intptr_t deopt_id,
-                                   TokenPosition token_pos,
+                                   const InstructionSource& source,
                                    LocationSummary* locs,
                                    intptr_t try_index,
                                    intptr_t slow_path_argument_count = 0);
@@ -750,7 +748,7 @@
   void EmitInstanceCallAOT(
       const ICData& ic_data,
       intptr_t deopt_id,
-      TokenPosition token_pos,
+      const InstructionSource& source,
       LocationSummary* locs,
       Code::EntryKind entry_kind = Code::EntryKind::kNormal,
       bool receiver_can_be_smi = true);
@@ -761,7 +759,7 @@
                        compiler::Label* failed,
                        compiler::Label* match_found,
                        intptr_t deopt_id,
-                       TokenPosition token_index,
+                       const InstructionSource& source_index,
                        LocationSummary* locs,
                        bool complete,
                        intptr_t total_ic_calls,
@@ -774,12 +772,12 @@
   Condition EmitEqualityRegConstCompare(Register reg,
                                         const Object& obj,
                                         bool needs_number_check,
-                                        TokenPosition token_pos,
+                                        const InstructionSource& source,
                                         intptr_t deopt_id);
   Condition EmitEqualityRegRegCompare(Register left,
                                       Register right,
                                       bool needs_number_check,
-                                      TokenPosition token_pos,
+                                      const InstructionSource& source,
                                       intptr_t deopt_id);
   Condition EmitBoolTest(Register value, BranchLabels labels, bool invert);
 
@@ -802,13 +800,14 @@
   //
   // If [env] is not `nullptr` it will be used instead of the
   // `pending_deoptimization_env`.
-  void EmitCallsiteMetadata(TokenPosition token_pos,
+  void EmitCallsiteMetadata(const InstructionSource& source,
                             intptr_t deopt_id,
                             PcDescriptorsLayout::Kind kind,
                             LocationSummary* locs,
                             Environment* env = nullptr);
 
-  void EmitYieldPositionMetadata(TokenPosition token_pos, intptr_t yield_index);
+  void EmitYieldPositionMetadata(const InstructionSource& source,
+                                 intptr_t yield_index);
 
   void EmitComment(Instruction* instr);
 
@@ -844,17 +843,17 @@
   void SetNeedsStackTrace(intptr_t try_index);
   void AddCurrentDescriptor(PcDescriptorsLayout::Kind kind,
                             intptr_t deopt_id,
-                            TokenPosition token_pos);
+                            const InstructionSource& source);
   void AddDescriptor(
       PcDescriptorsLayout::Kind kind,
       intptr_t pc_offset,
       intptr_t deopt_id,
-      TokenPosition token_pos,
+      const InstructionSource& source,
       intptr_t try_index,
       intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex);
 
   // Add NullCheck information for the current PC.
-  void AddNullCheck(TokenPosition token_pos, const String& name);
+  void AddNullCheck(const InstructionSource& source, const String& name);
 
   void RecordSafepoint(LocationSummary* locs,
                        intptr_t slow_path_argument_count = 0);
@@ -956,8 +955,8 @@
 
   ArrayPtr InliningIdToFunction() const;
 
-  void BeginCodeSourceRange(intptr_t inline_id, const TokenPosition& token_pos);
-  void EndCodeSourceRange(const TokenPosition& token_pos);
+  void BeginCodeSourceRange(const InstructionSource& source);
+  void EndCodeSourceRange(const InstructionSource& source);
 
   static bool LookupMethodFor(int class_id,
                               const String& name,
@@ -1017,14 +1016,14 @@
       const Array& arguments_descriptor,
       intptr_t size_with_type_args,
       intptr_t deopt_id,
-      TokenPosition token_pos,
+      const InstructionSource& source,
       LocationSummary* locs,
       Code::EntryKind entry_kind = Code::EntryKind::kNormal);
 
   void EmitUnoptimizedStaticCall(
       intptr_t size_with_type_args,
       intptr_t deopt_id,
-      TokenPosition token_pos,
+      const InstructionSource& source,
       LocationSummary* locs,
       const ICData& ic_data,
       Code::EntryKind entry_kind = Code::EntryKind::kNormal);
@@ -1052,37 +1051,37 @@
                      compiler::Label* is_not_instance_lbl);
 
   SubtypeTestCachePtr GenerateInlineInstanceof(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const AbstractType& type,
       compiler::Label* is_instance_lbl,
       compiler::Label* is_not_instance_lbl);
 
   SubtypeTestCachePtr GenerateInstantiatedTypeWithArgumentsTest(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const AbstractType& dst_type,
       compiler::Label* is_instance_lbl,
       compiler::Label* is_not_instance_lbl);
 
   bool GenerateInstantiatedTypeNoArgumentsTest(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const AbstractType& dst_type,
       compiler::Label* is_instance_lbl,
       compiler::Label* is_not_instance_lbl);
 
   SubtypeTestCachePtr GenerateUninstantiatedTypeTest(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const AbstractType& dst_type,
       compiler::Label* is_instance_lbl,
       compiler::Label* is_not_instance_label);
 
   SubtypeTestCachePtr GenerateFunctionTypeTest(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const AbstractType& dst_type,
       compiler::Label* is_instance_lbl,
       compiler::Label* is_not_instance_label);
 
   SubtypeTestCachePtr GenerateSubtype1TestCacheLookup(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const Class& type_class,
       compiler::Label* is_instance_lbl,
       compiler::Label* is_not_instance_lbl);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
index d1a3a1c..2459797 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
@@ -329,9 +329,11 @@
   }
 }
 
+static const InstructionSource kPrologueSource(TokenPosition::kDartCodePrologue,
+                                               /*inlining_id=*/0);
+
 void FlowGraphCompiler::EmitPrologue() {
-  // Prologue is in the root function.
-  BeginCodeSourceRange(/*inlining_id=*/0, TokenPosition::kDartCodePrologue);
+  BeginCodeSourceRange(kPrologueSource);
 
   EmitFrameEntry();
   ASSERT(assembler()->constant_pool_allowed());
@@ -358,7 +360,7 @@
     }
   }
 
-  EndCodeSourceRange(TokenPosition::kDartCodePrologue);
+  EndCodeSourceRange(kPrologueSource);
 }
 
 // Input parameters:
@@ -422,27 +424,27 @@
   }
 }
 
-void FlowGraphCompiler::GeneratePatchableCall(TokenPosition token_pos,
+void FlowGraphCompiler::GeneratePatchableCall(const InstructionSource& source,
                                               const Code& stub,
                                               PcDescriptorsLayout::Kind kind,
                                               LocationSummary* locs) {
   __ BranchLinkPatchable(stub);
-  EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);
+  EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs);
 }
 
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
-                                         TokenPosition token_pos,
+                                         const InstructionSource& source,
                                          const Code& stub,
                                          PcDescriptorsLayout::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   __ BranchLinkPatchable(stub, entry_kind);
-  EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+  EmitCallsiteMetadata(source, deopt_id, kind, locs);
 }
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
-                                               TokenPosition token_pos,
+                                               const InstructionSource& source,
                                                PcDescriptorsLayout::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
@@ -451,7 +453,7 @@
   if (CanPcRelativeCall(target)) {
     __ GenerateUnRelocatedPcRelativeCall();
     AddPcRelativeCallTarget(target, entry_kind);
-    EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+    EmitCallsiteMetadata(source, deopt_id, kind, locs);
   } else {
     ASSERT(is_optimizing());
     // Call sites to the same target can share object pool entries. These
@@ -460,18 +462,18 @@
     // instead.
     const auto& stub = StubCode::CallStaticFunction();
     __ BranchLinkWithEquivalence(stub, target, entry_kind);
-    EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+    EmitCallsiteMetadata(source, deopt_id, kind, locs);
     AddStaticCallTarget(target, entry_kind);
   }
 }
 
-void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateRuntimeCall(const InstructionSource& source,
                                             intptr_t deopt_id,
                                             const RuntimeEntry& entry,
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
 }
 
 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) {
@@ -498,12 +500,13 @@
 #endif  // DEBUG
 }
 
-void FlowGraphCompiler::EmitOptimizedInstanceCall(const Code& stub,
-                                                  const ICData& ic_data,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitOptimizedInstanceCall(
+    const Code& stub,
+    const ICData& ic_data,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
   // Each ICData propagated from unoptimized to optimized code contains the
@@ -516,15 +519,15 @@
   __ LoadObject(R8, parsed_function().function());
   __ LoadFromOffset(R0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
   __ LoadUniqueObject(R9, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub, PcDescriptorsLayout::kIcCall,
-                   locs, entry_kind);
+  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+                   entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
 void FlowGraphCompiler::EmitInstanceCallJIT(const Code& stub,
                                             const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -539,7 +542,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ Call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
@@ -547,7 +550,7 @@
     const String& name,
     const Array& arguments_descriptor,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     intptr_t try_index,
     intptr_t slow_path_argument_count) {
@@ -594,18 +597,15 @@
       try_index = CurrentTryIndex();
     }
     AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
-                  DeoptId::kNone, token_pos, try_index);
+                  DeoptId::kNone, source, try_index);
   } else if (is_optimizing()) {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
     AddDeoptIndexAtCall(deopt_id_after);
   } else {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs());
@@ -613,7 +613,7 @@
 
 void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind,
                                             bool receiver_can_be_smi) {
@@ -652,22 +652,23 @@
   __ LoadUniqueObject(R9, data);
   CLOBBERS_LR(__ blx(LR));
 
-  EmitCallsiteMetadata(token_pos, DeoptId::kNone, PcDescriptorsLayout::kOther,
+  EmitCallsiteMetadata(source, DeoptId::kNone, PcDescriptorsLayout::kOther,
                        locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
-void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t size_with_type_args,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  const ICData& ic_data,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitUnoptimizedStaticCall(
+    intptr_t size_with_type_args,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    const ICData& ic_data,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   const Code& stub =
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(R9, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub,
+  GenerateDartCall(deopt_id, source, stub,
                    PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -677,7 +678,7 @@
     const Array& arguments_descriptor,
     intptr_t size_with_type_args,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -691,7 +692,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, token_pos, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -729,7 +730,7 @@
     Register reg,
     const Object& obj,
     bool needs_number_check,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     intptr_t deopt_id) {
   if (needs_number_check) {
     ASSERT(!obj.IsMint() && !obj.IsDouble());
@@ -740,8 +741,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     __ Drop(1);   // Discard constant.
     __ Pop(reg);  // Restore 'reg'.
@@ -751,11 +751,12 @@
   return EQ;
 }
 
-Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
-                                                       Register right,
-                                                       bool needs_number_check,
-                                                       TokenPosition token_pos,
-                                                       intptr_t deopt_id) {
+Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
+    Register left,
+    Register right,
+    bool needs_number_check,
+    const InstructionSource& source,
+    intptr_t deopt_id) {
   if (needs_number_check) {
     __ Push(left);
     __ Push(right);
@@ -764,8 +765,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     __ Pop(right);
     __ Pop(left);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
index a4d8403..2ccf7b4 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
@@ -321,9 +321,11 @@
   }
 }
 
+static const InstructionSource kPrologueSource(TokenPosition::kDartCodePrologue,
+                                               /*inlining_id=*/0);
+
 void FlowGraphCompiler::EmitPrologue() {
-  // Prologue is in the root function.
-  BeginCodeSourceRange(/*inlining_id=*/0, TokenPosition::kDartCodePrologue);
+  BeginCodeSourceRange(kPrologueSource);
 
   EmitFrameEntry();
   ASSERT(assembler()->constant_pool_allowed());
@@ -350,7 +352,7 @@
     }
   }
 
-  EndCodeSourceRange(TokenPosition::kDartCodePrologue);
+  EndCodeSourceRange(kPrologueSource);
 }
 
 // Input parameters:
@@ -415,27 +417,27 @@
   }
 }
 
-void FlowGraphCompiler::GeneratePatchableCall(TokenPosition token_pos,
+void FlowGraphCompiler::GeneratePatchableCall(const InstructionSource& source,
                                               const Code& stub,
                                               PcDescriptorsLayout::Kind kind,
                                               LocationSummary* locs) {
   __ BranchLinkPatchable(stub);
-  EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);
+  EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs);
 }
 
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
-                                         TokenPosition token_pos,
+                                         const InstructionSource& source,
                                          const Code& stub,
                                          PcDescriptorsLayout::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   __ BranchLinkPatchable(stub, entry_kind);
-  EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+  EmitCallsiteMetadata(source, deopt_id, kind, locs);
 }
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
-                                               TokenPosition token_pos,
+                                               const InstructionSource& source,
                                                PcDescriptorsLayout::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
@@ -444,7 +446,7 @@
   if (CanPcRelativeCall(target)) {
     __ GenerateUnRelocatedPcRelativeCall();
     AddPcRelativeCallTarget(target, entry_kind);
-    EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+    EmitCallsiteMetadata(source, deopt_id, kind, locs);
   } else {
     // Call sites to the same target can share object pool entries. These
     // call sites are never patched for breakpoints: the function is deoptimized
@@ -453,18 +455,18 @@
     ASSERT(is_optimizing());
     const auto& stub = StubCode::CallStaticFunction();
     __ BranchLinkWithEquivalence(stub, target, entry_kind);
-    EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+    EmitCallsiteMetadata(source, deopt_id, kind, locs);
     AddStaticCallTarget(target, entry_kind);
   }
 }
 
-void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateRuntimeCall(const InstructionSource& source,
                                             intptr_t deopt_id,
                                             const RuntimeEntry& entry,
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
 }
 
 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) {
@@ -482,12 +484,13 @@
   __ StoreFieldToOffset(TMP, R0, Array::element_offset(edge_id));
 }
 
-void FlowGraphCompiler::EmitOptimizedInstanceCall(const Code& stub,
-                                                  const ICData& ic_data,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitOptimizedInstanceCall(
+    const Code& stub,
+    const ICData& ic_data,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
   // Each ICData propagated from unoptimized to optimized code contains the
@@ -500,15 +503,15 @@
   __ LoadObject(R6, parsed_function().function());
   __ LoadFromOffset(R0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
   __ LoadUniqueObject(R5, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub, PcDescriptorsLayout::kIcCall,
-                   locs, entry_kind);
+  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+                   entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
 void FlowGraphCompiler::EmitInstanceCallJIT(const Code& stub,
                                             const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -529,7 +532,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ Call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
@@ -537,7 +540,7 @@
     const String& name,
     const Array& arguments_descriptor,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     intptr_t try_index,
     intptr_t slow_path_argument_count) {
@@ -580,18 +583,15 @@
       try_index = CurrentTryIndex();
     }
     AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
-                  DeoptId::kNone, token_pos, try_index);
+                  DeoptId::kNone, source, try_index);
   } else if (is_optimizing()) {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
     AddDeoptIndexAtCall(deopt_id_after);
   } else {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs());
@@ -599,7 +599,7 @@
 
 void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind,
                                             bool receiver_can_be_smi) {
@@ -645,22 +645,23 @@
   }
   CLOBBERS_LR(__ blr(LR));
 
-  EmitCallsiteMetadata(token_pos, DeoptId::kNone, PcDescriptorsLayout::kOther,
+  EmitCallsiteMetadata(source, DeoptId::kNone, PcDescriptorsLayout::kOther,
                        locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
-void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t size_with_type_args,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  const ICData& ic_data,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitUnoptimizedStaticCall(
+    intptr_t size_with_type_args,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    const ICData& ic_data,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   const Code& stub =
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(R5, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub,
+  GenerateDartCall(deopt_id, source, stub,
                    PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -670,7 +671,7 @@
     const Array& arguments_descriptor,
     intptr_t size_with_type_args,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -684,7 +685,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, token_pos, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -708,7 +709,7 @@
     Register reg,
     const Object& obj,
     bool needs_number_check,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     intptr_t deopt_id) {
   if (needs_number_check) {
     ASSERT(!obj.IsMint() && !obj.IsDouble());
@@ -719,8 +720,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     // Discard constant.
     // Restore 'reg'.
@@ -731,11 +731,12 @@
   return EQ;
 }
 
-Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
-                                                       Register right,
-                                                       bool needs_number_check,
-                                                       TokenPosition token_pos,
-                                                       intptr_t deopt_id) {
+Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
+    Register left,
+    Register right,
+    bool needs_number_check,
+    const InstructionSource& source,
+    intptr_t deopt_id) {
   if (needs_number_check) {
     __ PushPair(right, left);
     if (is_optimizing()) {
@@ -743,8 +744,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     __ PopPair(right, left);
   } else {
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
index 63bad46..ecc4cae 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
@@ -263,7 +263,7 @@
 // - ECX: function type arguments or raw_null.
 // Returns:
 // - true or false in EAX.
-void FlowGraphCompiler::GenerateInstanceOf(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateInstanceOf(const InstructionSource& source,
                                            intptr_t deopt_id,
                                            const AbstractType& type,
                                            LocationSummary* locs) {
@@ -291,7 +291,7 @@
   // Generate inline instanceof test.
   SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
   test_cache =
-      GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance);
+      GenerateInlineInstanceof(source, type, &is_instance, &is_not_instance);
 
   // test_cache is null if there is no fall-through.
   compiler::Label done;
@@ -305,7 +305,7 @@
     // Can reuse kInstanceReg as scratch here since it was pushed above.
     __ LoadObject(TypeTestABI::kInstanceReg, test_cache);
     __ pushl(TypeTestABI::kInstanceReg);
-    GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 5, locs);
+    GenerateRuntimeCall(source, deopt_id, kInstanceofRuntimeEntry, 5, locs);
     // Pop the parameters supplied to the runtime entry. The result of the
     // instanceof runtime call will be left as the result of the operation.
     __ Drop(5);
@@ -334,12 +334,13 @@
 // - object in EAX for successful assignable check (or throws TypeError).
 // Performance notes: positive checks must be quick, negative checks can be slow
 // as they throw an exception.
-void FlowGraphCompiler::GenerateAssertAssignable(CompileType* receiver_type,
-                                                 TokenPosition token_pos,
-                                                 intptr_t deopt_id,
-                                                 const String& dst_name,
-                                                 LocationSummary* locs) {
-  ASSERT(!token_pos.IsClassifying());
+void FlowGraphCompiler::GenerateAssertAssignable(
+    CompileType* receiver_type,
+    const InstructionSource& source,
+    intptr_t deopt_id,
+    const String& dst_name,
+    LocationSummary* locs) {
+  ASSERT(!source.token_pos.IsClassifying());
   ASSERT(CheckAssertAssignableTypeTestingABILocations(*locs));
 
   const auto& dst_type =
@@ -360,7 +361,7 @@
     // kDstTypeReg should already contain the destination type.
     const bool null_safety =
         Isolate::Current()->use_strict_null_safety_checks();
-    GenerateStubCall(token_pos,
+    GenerateStubCall(source,
                      null_safety ? StubCode::TypeIsTopTypeForSubtypingNullSafe()
                                  : StubCode::TypeIsTopTypeForSubtyping(),
                      PcDescriptorsLayout::kOther, locs, deopt_id);
@@ -368,7 +369,7 @@
     __ BranchIfZero(TypeTestABI::kSubtypeTestCacheReg, &is_assignable,
                     compiler::Assembler::kNearJump);
 
-    GenerateStubCall(token_pos,
+    GenerateStubCall(source,
                      null_safety ? StubCode::NullIsAssignableToTypeNullSafe()
                                  : StubCode::NullIsAssignableToType(),
                      PcDescriptorsLayout::kOther, locs, deopt_id);
@@ -389,7 +390,7 @@
     }
 
     // Generate inline type check, linking to runtime call if not assignable.
-    test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable,
+    test_cache = GenerateInlineInstanceof(source, dst_type, &is_assignable,
                                           &runtime_call);
   }
 
@@ -409,7 +410,7 @@
   __ LoadObject(TypeTestABI::kInstanceReg, test_cache);
   __ pushl(TypeTestABI::kInstanceReg);
   __ PushObject(Smi::ZoneHandle(zone(), Smi::New(kTypeCheckFromInline)));
-  GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 7, locs);
+  GenerateRuntimeCall(source, deopt_id, kTypeCheckRuntimeEntry, 7, locs);
   // Pop the parameters supplied to the runtime entry. The result of the
   // type check runtime call is the checked value.
   __ Drop(7);
@@ -474,9 +475,11 @@
   }
 }
 
+static const InstructionSource kPrologueSource(TokenPosition::kDartCodePrologue,
+                                               /*inlining_id=*/0);
+
 void FlowGraphCompiler::EmitPrologue() {
-  // Prologue is in the root function.
-  BeginCodeSourceRange(/*inlining_id=*/0, TokenPosition::kDartCodePrologue);
+  BeginCodeSourceRange(kPrologueSource);
 
   EmitFrameEntry();
 
@@ -504,7 +507,7 @@
     }
   }
 
-  EndCodeSourceRange(TokenPosition::kDartCodePrologue);
+  EndCodeSourceRange(kPrologueSource);
 }
 
 void FlowGraphCompiler::CompileGraph() {
@@ -531,18 +534,18 @@
 }
 
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
-                                         TokenPosition token_pos,
+                                         const InstructionSource& source,
                                          const Code& stub,
                                          PcDescriptorsLayout::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   __ Call(stub, /*moveable_target=*/false, entry_kind);
-  EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+  EmitCallsiteMetadata(source, deopt_id, kind, locs);
 }
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
-                                               TokenPosition token_pos,
+                                               const InstructionSource& source,
                                                PcDescriptorsLayout::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
@@ -550,30 +553,31 @@
   ASSERT(CanCallDart());
   const auto& stub = StubCode::CallStaticFunction();
   __ Call(stub, /*movable_target=*/true, entry_kind);
-  EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+  EmitCallsiteMetadata(source, deopt_id, kind, locs);
   AddStaticCallTarget(target, entry_kind);
 }
 
-void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateRuntimeCall(const InstructionSource& source,
                                             intptr_t deopt_id,
                                             const RuntimeEntry& entry,
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
 }
 
-void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t size_with_type_args,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  const ICData& ic_data,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitUnoptimizedStaticCall(
+    intptr_t size_with_type_args,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    const ICData& ic_data,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   const Code& stub =
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(ECX, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub,
+  GenerateDartCall(deopt_id, source, stub,
                    PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -591,12 +595,13 @@
       compiler::FieldAddress(EAX, Array::element_offset(edge_id)), 1);
 }
 
-void FlowGraphCompiler::EmitOptimizedInstanceCall(const Code& stub,
-                                                  const ICData& ic_data,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitOptimizedInstanceCall(
+    const Code& stub,
+    const ICData& ic_data,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0);
   // Each ICData propagated from unoptimized to optimized code contains the
@@ -610,15 +615,15 @@
   __ movl(EBX, compiler::Address(
                    ESP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize));
   __ LoadObject(ECX, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub, PcDescriptorsLayout::kIcCall,
-                   locs, entry_kind);
+  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+                   entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
 void FlowGraphCompiler::EmitInstanceCallJIT(const Code& stub,
                                             const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -635,7 +640,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
@@ -643,7 +648,7 @@
     const String& name,
     const Array& arguments_descriptor,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     intptr_t try_index,
     intptr_t slow_path_argument_count) {
@@ -662,7 +667,7 @@
   __ call(compiler::FieldAddress(
       CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
 
-  AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, token_pos);
+  AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
   RecordSafepoint(locs, slow_path_argument_count);
   const intptr_t deopt_id_after = DeoptId::ToDeoptAfter(deopt_id);
   // Precompilation not implemented on ia32 platform.
@@ -672,8 +677,7 @@
   } else {
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs());
@@ -681,7 +685,7 @@
 
 void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind,
                                             bool receiver_can_be_smi) {
@@ -694,7 +698,7 @@
     const Array& arguments_descriptor,
     intptr_t size_with_type_args,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -705,7 +709,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, token_pos, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -722,7 +726,7 @@
     Register reg,
     const Object& obj,
     bool needs_number_check,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     intptr_t deopt_id) {
   ASSERT(!needs_number_check || (!obj.IsMint() && !obj.IsDouble()));
 
@@ -740,8 +744,7 @@
     } else {
       __ Call(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpl, we need ZF computed).
     __ popl(reg);  // Discard constant.
     __ popl(reg);  // Restore 'reg'.
@@ -751,11 +754,12 @@
   return EQUAL;
 }
 
-Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
-                                                       Register right,
-                                                       bool needs_number_check,
-                                                       TokenPosition token_pos,
-                                                       intptr_t deopt_id) {
+Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
+    Register left,
+    Register right,
+    bool needs_number_check,
+    const InstructionSource& source,
+    intptr_t deopt_id) {
   if (needs_number_check) {
     __ pushl(left);
     __ pushl(right);
@@ -764,8 +768,7 @@
     } else {
       __ Call(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpl, we need ZF computed).
     __ popl(right);
     __ popl(left);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
index 01d836c..2f8db78 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
@@ -330,9 +330,11 @@
   }
 }
 
+static const InstructionSource kPrologueSource(TokenPosition::kDartCodePrologue,
+                                               /*inlining_id=*/0);
+
 void FlowGraphCompiler::EmitPrologue() {
-  // Prologue is in the root function.
-  BeginCodeSourceRange(/*inlining_id=*/0, TokenPosition::kDartCodePrologue);
+  BeginCodeSourceRange(kPrologueSource);
 
   EmitFrameEntry();
   ASSERT(assembler()->constant_pool_allowed());
@@ -359,7 +361,7 @@
     }
   }
 
-  EndCodeSourceRange(TokenPosition::kDartCodePrologue);
+  EndCodeSourceRange(kPrologueSource);
 }
 
 void FlowGraphCompiler::CompileGraph() {
@@ -418,27 +420,27 @@
   }
 }
 
-void FlowGraphCompiler::GeneratePatchableCall(TokenPosition token_pos,
+void FlowGraphCompiler::GeneratePatchableCall(const InstructionSource& source,
                                               const Code& stub,
                                               PcDescriptorsLayout::Kind kind,
                                               LocationSummary* locs) {
   __ CallPatchable(stub);
-  EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);
+  EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs);
 }
 
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
-                                         TokenPosition token_pos,
+                                         const InstructionSource& source,
                                          const Code& stub,
                                          PcDescriptorsLayout::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   __ CallPatchable(stub, entry_kind);
-  EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+  EmitCallsiteMetadata(source, deopt_id, kind, locs);
 }
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
-                                               TokenPosition token_pos,
+                                               const InstructionSource& source,
                                                PcDescriptorsLayout::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
@@ -448,7 +450,7 @@
   if (CanPcRelativeCall(target)) {
     __ GenerateUnRelocatedPcRelativeCall();
     AddPcRelativeCallTarget(target, entry_kind);
-    EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+    EmitCallsiteMetadata(source, deopt_id, kind, locs);
   } else {
     // Call sites to the same target can share object pool entries. These
     // call sites are never patched for breakpoints: the function is deoptimized
@@ -456,31 +458,32 @@
     // instead.
     const auto& stub_entry = StubCode::CallStaticFunction();
     __ CallWithEquivalence(stub_entry, target, entry_kind);
-    EmitCallsiteMetadata(token_pos, deopt_id, kind, locs);
+    EmitCallsiteMetadata(source, deopt_id, kind, locs);
     AddStaticCallTarget(target, entry_kind);
   }
 }
 
-void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos,
+void FlowGraphCompiler::GenerateRuntimeCall(const InstructionSource& source,
                                             intptr_t deopt_id,
                                             const RuntimeEntry& entry,
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
 }
 
-void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t size_with_type_args,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  const ICData& ic_data,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitUnoptimizedStaticCall(
+    intptr_t size_with_type_args,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    const ICData& ic_data,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   const Code& stub =
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(RBX, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub,
+  GenerateDartCall(deopt_id, source, stub,
                    PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args, RCX);
 }
@@ -499,12 +502,13 @@
       compiler::FieldAddress(RAX, Array::element_offset(edge_id)), 1);
 }
 
-void FlowGraphCompiler::EmitOptimizedInstanceCall(const Code& stub,
-                                                  const ICData& ic_data,
-                                                  intptr_t deopt_id,
-                                                  TokenPosition token_pos,
-                                                  LocationSummary* locs,
-                                                  Code::EntryKind entry_kind) {
+void FlowGraphCompiler::EmitOptimizedInstanceCall(
+    const Code& stub,
+    const ICData& ic_data,
+    intptr_t deopt_id,
+    const InstructionSource& source,
+    LocationSummary* locs,
+    Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
   ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
   // Each ICData propagated from unoptimized to optimized code contains the
@@ -518,15 +522,15 @@
   __ movq(RDX, compiler::Address(
                    RSP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize));
   __ LoadUniqueObject(RBX, ic_data);
-  GenerateDartCall(deopt_id, token_pos, stub, PcDescriptorsLayout::kIcCall,
-                   locs, entry_kind);
+  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+                   entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs(), RCX);
 }
 
 void FlowGraphCompiler::EmitInstanceCallJIT(const Code& stub,
                                             const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -543,7 +547,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs(), RCX);
 }
 
@@ -551,7 +555,7 @@
     const String& name,
     const Array& arguments_descriptor,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     intptr_t try_index,
     intptr_t slow_path_argument_count) {
@@ -595,18 +599,15 @@
       try_index = CurrentTryIndex();
     }
     AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
-                  DeoptId::kNone, token_pos, try_index);
+                  DeoptId::kNone, source, try_index);
   } else if (is_optimizing()) {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
     AddDeoptIndexAtCall(deopt_id_after);
   } else {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs(), RCX);
@@ -614,7 +615,7 @@
 
 void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
                                             intptr_t deopt_id,
-                                            TokenPosition token_pos,
+                                            const InstructionSource& source,
                                             LocationSummary* locs,
                                             Code::EntryKind entry_kind,
                                             bool receiver_can_be_smi) {
@@ -649,7 +650,7 @@
   __ LoadUniqueObject(RBX, data);
   __ call(RCX);
 
-  EmitCallsiteMetadata(token_pos, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
   __ Drop(ic_data.SizeWithTypeArgs(), RCX);
 }
 
@@ -658,7 +659,7 @@
     const Array& arguments_descriptor,
     intptr_t size_with_type_args,
     intptr_t deopt_id,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     LocationSummary* locs,
     Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -672,7 +673,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, token_pos, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args, RCX);
 }
@@ -698,7 +699,7 @@
     Register reg,
     const Object& obj,
     bool needs_number_check,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     intptr_t deopt_id) {
   ASSERT(!needs_number_check || (!obj.IsMint() && !obj.IsDouble()));
 
@@ -716,8 +717,7 @@
     } else {
       __ CallPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpq, we need ZF computed).
     __ popq(reg);  // Discard constant.
     __ popq(reg);  // Restore 'reg'.
@@ -727,11 +727,12 @@
   return EQUAL;
 }
 
-Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
-                                                       Register right,
-                                                       bool needs_number_check,
-                                                       TokenPosition token_pos,
-                                                       intptr_t deopt_id) {
+Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
+    Register left,
+    Register right,
+    bool needs_number_check,
+    const InstructionSource& source,
+    intptr_t deopt_id) {
   if (needs_number_check) {
     __ pushq(left);
     __ pushq(right);
@@ -740,8 +741,7 @@
     } else {
       __ CallPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id,
-                         token_pos);
+    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpq, we need ZF computed).
     __ popq(right);
     __ popq(left);
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index bc5a039..938488f 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -566,8 +566,6 @@
 }
 #endif  // DEBUG
 
-Definition::Definition(intptr_t deopt_id) : Instruction(deopt_id) {}
-
 // A value in the constant propagation lattice.
 //    - non-constant sentinel
 //    - a constant (any non-sentinel value)
@@ -886,12 +884,12 @@
 CheckClassInstr::CheckClassInstr(Value* value,
                                  intptr_t deopt_id,
                                  const Cids& cids,
-                                 TokenPosition token_pos)
-    : TemplateInstruction(deopt_id),
+                                 const InstructionSource& source)
+    : TemplateInstruction(source, deopt_id),
       cids_(cids),
       licm_hoisted_(false),
       is_bit_test_(IsCompactCidRange(cids)),
-      token_pos_(token_pos) {
+      token_pos_(source.token_pos) {
   // Expected useful check data.
   const intptr_t number_of_checks = cids.length();
   ASSERT(number_of_checks > 0);
@@ -997,9 +995,9 @@
 }
 
 AllocateUninitializedContextInstr::AllocateUninitializedContextInstr(
-    TokenPosition token_pos,
+    const InstructionSource& source,
     intptr_t num_context_variables)
-    : TemplateAllocation(token_pos),
+    : TemplateAllocation(source),
       num_context_variables_(num_context_variables) {
   // This instruction is not used in AOT for code size reasons.
   ASSERT(!CompilerState::Current().is_aot());
@@ -1021,7 +1019,7 @@
 void AllocateTypedDataInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForTypedData(class_id()));
-  compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                              locs());
 }
 
@@ -1156,8 +1154,9 @@
   return this;
 }
 
-ConstantInstr::ConstantInstr(const Object& value, TokenPosition token_pos)
-    : value_(value), token_pos_(token_pos) {
+ConstantInstr::ConstantInstr(const Object& value,
+                             const InstructionSource& source)
+    : TemplateDefinition(source), value_(value), token_pos_(source.token_pos) {
   // Check that the value is not an incorrect Integer representation.
   ASSERT(!value.IsMint() || !Smi::IsValid(Mint::Cast(value).AsInt64Value()));
   // Check that clones of fields are not stored as constants.
@@ -2462,11 +2461,11 @@
     Definition* replacement = NULL;
     if (Token::IsRelationalOperator(kind())) {
       replacement = new RelationalOpInstr(
-          token_pos(), kind(), left()->CopyWithType(), right()->CopyWithType(),
+          source(), kind(), left()->CopyWithType(), right()->CopyWithType(),
           op_cid, DeoptId::kNone, speculative_mode);
     } else if (Token::IsEqualityOperator(kind())) {
       replacement = new EqualityCompareInstr(
-          token_pos(), kind(), left()->CopyWithType(), right()->CopyWithType(),
+          source(), kind(), left()->CopyWithType(), right()->CopyWithType(),
           op_cid, DeoptId::kNone, speculative_mode);
     }
     if (replacement != NULL) {
@@ -3604,7 +3603,7 @@
         THR_Print("Merging test smi v%" Pd "\n", bit_and->ssa_temp_index());
       }
       TestSmiInstr* test = new TestSmiInstr(
-          comparison()->token_pos(),
+          comparison()->source(),
           negate ? Token::NegateComparison(comparison()->kind())
                  : comparison()->kind(),
           bit_and->left()->Copy(zone), bit_and->right()->Copy(zone));
@@ -3663,12 +3662,12 @@
   return this;
 }
 
-TestCidsInstr::TestCidsInstr(TokenPosition token_pos,
+TestCidsInstr::TestCidsInstr(const InstructionSource& source,
                              Token::Kind kind,
                              Value* value,
                              const ZoneGrowableArray<intptr_t>& cid_results,
                              intptr_t deopt_id)
-    : TemplateComparison(token_pos, kind, deopt_id),
+    : TemplateComparison(source, kind, deopt_id),
       cid_results_(cid_results),
       licm_hoisted_(false) {
   ASSERT((kind == Token::kIS) || (kind == Token::kISNOT));
@@ -4067,7 +4066,7 @@
   __ Bind(compiler->GetJumpLabel(this));
   if (!compiler->is_optimizing()) {
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
-                                   TokenPosition::kNoSource);
+                                   InstructionSource());
   }
   if (HasParallelMove()) {
     compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
@@ -4094,7 +4093,7 @@
     // uniformity with ARM, where we can reuse pattern matching code that
     // matches backwards from the end of the pattern.
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
-                                   TokenPosition::kNoSource);
+                                   InstructionSource());
   }
   if (HasParallelMove()) {
     if (compiler::Assembler::EmittingComments()) {
@@ -4168,7 +4167,7 @@
     // uniformity with ARM, where we can reuse pattern matching code that
     // matches backwards from the end of the pattern.
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
-                                   TokenPosition::kNoSource);
+                                   InstructionSource());
   }
   if (HasParallelMove()) {
     if (compiler::Assembler::EmittingComments()) {
@@ -4339,7 +4338,7 @@
     auto object_store = compiler->isolate()->object_store();
     const auto& init_static_field_stub = Code::ZoneHandle(
         compiler->zone(), object_store->init_static_field_stub());
-    compiler->GenerateStubCall(token_pos(), init_static_field_stub,
+    compiler->GenerateStubCall(source(), init_static_field_stub,
                                /*kind=*/PcDescriptorsLayout::kOther, locs(),
                                deopt_id());
     __ Bind(&no_call);
@@ -4399,7 +4398,7 @@
   // Instruction inputs are popped from the stack at this point,
   // so deoptimization environment has to be adjusted.
   // This adjustment is done in FlowGraph::AttachEnvironment.
-  compiler->GenerateStubCall(token_pos(), stub,
+  compiler->GenerateStubCall(source(), stub,
                              /*kind=*/PcDescriptorsLayout::kOther, locs(),
                              deopt_id());
   __ Bind(&no_call);
@@ -4419,7 +4418,7 @@
   const auto& throw_stub =
       Code::ZoneHandle(compiler->zone(), object_store->throw_stub());
 
-  compiler->GenerateStubCall(token_pos(), throw_stub,
+  compiler->GenerateStubCall(source(), throw_stub,
                              /*kind=*/PcDescriptorsLayout::kOther, locs(),
                              deopt_id());
   // Issue(dartbug.com/41353): Right now we have to emit an extra breakpoint
@@ -4447,7 +4446,7 @@
       Code::ZoneHandle(compiler->zone(), object_store->re_throw_stub());
 
   compiler->SetNeedsStackTrace(catch_try_index());
-  compiler->GenerateStubCall(token_pos(), re_throw_stub,
+  compiler->GenerateStubCall(source(), re_throw_stub,
                              /*kind=*/PcDescriptorsLayout::kOther, locs(),
                              deopt_id());
   // Issue(dartbug.com/41353): Right now we have to emit an extra breakpoint
@@ -4481,7 +4480,7 @@
   compiler::Label done;
   __ CompareObject(AssertBooleanABI::kObjectReg, Object::null_instance());
   __ BranchIf(NOT_EQUAL, &done);
-  compiler->GenerateStubCall(token_pos(), assert_boolean_stub,
+  compiler->GenerateStubCall(source(), assert_boolean_stub,
                              /*kind=*/PcDescriptorsLayout::kOther, locs(),
                              deopt_id());
   __ Bind(&done);
@@ -4689,13 +4688,13 @@
   __ Drop(num_temps());
 }
 
-StrictCompareInstr::StrictCompareInstr(TokenPosition token_pos,
+StrictCompareInstr::StrictCompareInstr(const InstructionSource& source,
                                        Token::Kind kind,
                                        Value* left,
                                        Value* right,
                                        bool needs_number_check,
                                        intptr_t deopt_id)
-    : TemplateComparison(token_pos, kind, deopt_id),
+    : TemplateComparison(source, kind, deopt_id),
       needs_number_check_(needs_number_check) {
   ASSERT((kind == Token::kEQ_STRICT) || (kind == Token::kNE_STRICT));
   SetInputAt(0, left);
@@ -4724,7 +4723,7 @@
                                                    right.constant());
   } else {
     true_condition = compiler->EmitEqualityRegRegCompare(
-        left.reg(), right.reg(), needs_number_check(), token_pos(), deopt_id());
+        left.reg(), right.reg(), needs_number_check(), source(), deopt_id());
   }
   return true_condition != kInvalidCondition && (kind() != Token::kEQ_STRICT)
              ? InvertCondition(true_condition)
@@ -4896,19 +4895,19 @@
     if (ic_data()->NumberOfUsedChecks() > 0) {
       const ICData& unary_ic_data =
           ICData::ZoneHandle(zone, ic_data()->AsUnaryClassChecks());
-      compiler->GenerateInstanceCall(deopt_id(), token_pos(), locs(),
+      compiler->GenerateInstanceCall(deopt_id(), source(), locs(),
                                      unary_ic_data, entry_kind(),
                                      !receiver_is_not_smi());
     } else {
       // Call was not visited yet, use original ICData in order to populate it.
-      compiler->GenerateInstanceCall(deopt_id(), token_pos(), locs(),
+      compiler->GenerateInstanceCall(deopt_id(), source(), locs(),
                                      *call_ic_data, entry_kind(),
                                      !receiver_is_not_smi());
     }
   } else {
     // Unoptimized code.
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kRewind, deopt_id(),
-                                   token_pos());
+                                   source());
 
     // If the ICData contains a (Smi, Smi, <binary-smi-op-target>) stub already
     // we will call the specialized IC Stub that works as a normal IC Stub but
@@ -4928,10 +4927,10 @@
     if (use_specialized_smi_ic_stub) {
       ASSERT(ArgumentCount() == 2);
       compiler->EmitInstanceCallJIT(specialized_binary_smi_ic_stub,
-                                    *call_ic_data, deopt_id(), token_pos(),
-                                    locs(), entry_kind());
+                                    *call_ic_data, deopt_id(), source(), locs(),
+                                    entry_kind());
     } else {
-      compiler->GenerateInstanceCall(deopt_id(), token_pos(), locs(),
+      compiler->GenerateInstanceCall(deopt_id(), source(), locs(),
                                      *call_ic_data, entry_kind(),
                                      !receiver_is_not_smi());
     }
@@ -5019,12 +5018,9 @@
     args->Add(call->ArgumentValueAt(i)->CopyWithType());
   }
   args->Add(cid);
-  auto dispatch_table_call = new (zone) DispatchTableCallInstr(
-      call->token_pos(), interface_target, selector, args,
-      call->type_args_len(), call->argument_names());
-  if (call->has_inlining_id()) {
-    dispatch_table_call->set_inlining_id(call->inlining_id());
-  }
+  auto dispatch_table_call = new (zone)
+      DispatchTableCallInstr(call->source(), interface_target, selector, args,
+                             call->type_args_len(), call->argument_names());
   return dispatch_table_call;
 }
 
@@ -5038,14 +5034,14 @@
   const Register cid_reg = locs()->in(0).reg();
   compiler->EmitDispatchTableCall(cid_reg, selector()->offset,
                                   arguments_descriptor);
-  compiler->EmitCallsiteMetadata(token_pos(), DeoptId::kNone,
+  compiler->EmitCallsiteMetadata(source(), DeoptId::kNone,
                                  PcDescriptorsLayout::kOther, locs());
   if (selector()->called_on_null && !selector()->on_null_interface) {
     Value* receiver = ArgumentValueAt(FirstArgIndex());
     if (receiver->Type()->is_nullable()) {
       const String& function_name =
           String::ZoneHandle(interface_target().name());
-      compiler->AddNullCheck(token_pos(), function_name);
+      compiler->AddNullCheck(source(), function_name);
     }
   }
   __ Drop(ArgumentsSize());
@@ -5166,7 +5162,7 @@
                           argument_names());
   UpdateReceiverSminess(compiler->zone());
   compiler->EmitPolymorphicInstanceCall(
-      this, targets(), args_info, deopt_id(), token_pos(), locs(), complete(),
+      this, targets(), args_info, deopt_id(), source(), locs(), complete(),
       total_call_count(), !receiver_is_not_smi());
 }
 
@@ -5319,7 +5315,7 @@
   }
   ArgumentsInfo args_info(type_args_len(), ArgumentCount(), ArgumentsSize(),
                           argument_names());
-  compiler->GenerateStaticCall(deopt_id(), token_pos(), function(), args_info,
+  compiler->GenerateStaticCall(deopt_id(), source(), function(), args_info,
                                locs(), *call_ic_data, rebind_rule_,
                                entry_kind());
   if (function().IsFactory()) {
@@ -5348,7 +5344,7 @@
 }
 
 void AssertAssignableInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  compiler->GenerateAssertAssignable(value()->Type(), token_pos(), deopt_id(),
+  compiler->GenerateAssertAssignable(value()->Type(), source(), deopt_id(),
                                      dst_name(), locs());
   ASSERT(locs()->in(kInstancePos).reg() == locs()->out(0).reg());
 }
@@ -5381,12 +5377,12 @@
   __ PushRegister(AssertSubtypeABI::kSubTypeReg);
   __ PushRegister(AssertSubtypeABI::kSuperTypeReg);
   __ PushRegister(AssertSubtypeABI::kDstNameReg);
-  compiler->GenerateRuntimeCall(token_pos(), deopt_id(),
-                                kSubtypeCheckRuntimeEntry, 5, locs());
+  compiler->GenerateRuntimeCall(source(), deopt_id(), kSubtypeCheckRuntimeEntry,
+                                5, locs());
 
   __ Drop(5);
 #else
-  compiler->GenerateStubCall(token_pos(), StubCode::AssertSubtype(),
+  compiler->GenerateStubCall(source(), StubCode::AssertSubtype(),
                              PcDescriptorsLayout::kOther, locs());
 #endif
 }
@@ -5499,7 +5495,7 @@
 
 void CheckNullInstr::AddMetadataForRuntimeCall(CheckNullInstr* check_null,
                                                FlowGraphCompiler* compiler) {
-  compiler->AddNullCheck(check_null->token_pos(), check_null->function_name());
+  compiler->AddNullCheck(check_null->source(), check_null->function_name());
 }
 
 void RangeErrorSlowPath::EmitSharedStubCall(FlowGraphCompiler* compiler,
@@ -5703,31 +5699,31 @@
 
 ComparisonInstr* EqualityCompareInstr::CopyWithNewOperands(Value* new_left,
                                                            Value* new_right) {
-  return new EqualityCompareInstr(token_pos(), kind(), new_left, new_right,
+  return new EqualityCompareInstr(source(), kind(), new_left, new_right,
                                   operation_cid(), deopt_id());
 }
 
 ComparisonInstr* RelationalOpInstr::CopyWithNewOperands(Value* new_left,
                                                         Value* new_right) {
-  return new RelationalOpInstr(token_pos(), kind(), new_left, new_right,
+  return new RelationalOpInstr(source(), kind(), new_left, new_right,
                                operation_cid(), deopt_id(),
                                SpeculativeModeOfInputs());
 }
 
 ComparisonInstr* StrictCompareInstr::CopyWithNewOperands(Value* new_left,
                                                          Value* new_right) {
-  return new StrictCompareInstr(token_pos(), kind(), new_left, new_right,
+  return new StrictCompareInstr(source(), kind(), new_left, new_right,
                                 needs_number_check(), DeoptId::kNone);
 }
 
 ComparisonInstr* TestSmiInstr::CopyWithNewOperands(Value* new_left,
                                                    Value* new_right) {
-  return new TestSmiInstr(token_pos(), kind(), new_left, new_right);
+  return new TestSmiInstr(source(), kind(), new_left, new_right);
 }
 
 ComparisonInstr* TestCidsInstr::CopyWithNewOperands(Value* new_left,
                                                     Value* new_right) {
-  return new TestCidsInstr(token_pos(), kind(), new_left, cid_results(),
+  return new TestCidsInstr(source(), kind(), new_left, cid_results(),
                            deopt_id());
 }
 
@@ -5994,14 +5990,14 @@
                                    intptr_t class_id,
                                    AlignmentType alignment,
                                    intptr_t deopt_id,
-                                   TokenPosition token_pos,
+                                   const InstructionSource& source,
                                    CompileType* result_type)
-    : TemplateDefinition(deopt_id),
+    : TemplateDefinition(source, deopt_id),
       index_unboxed_(index_unboxed),
       index_scale_(index_scale),
       class_id_(class_id),
       alignment_(StrengthenAlignment(class_id, alignment)),
-      token_pos_(token_pos),
+      token_pos_(source.token_pos),
       result_type_(result_type) {
   SetInputAt(0, array);
   SetInputAt(1, index);
@@ -6015,7 +6011,7 @@
       auto load = new (Z) LoadIndexedInstr(
           array()->CopyWithType(Z), box->value()->CopyWithType(Z),
           /*index_unboxed=*/true, index_scale(), class_id(), alignment_,
-          GetDeoptId(), token_pos(), result_type_);
+          GetDeoptId(), source(), result_type_);
       flow_graph->InsertBefore(this, load, env(), FlowGraph::kValue);
       return load;
     }
@@ -6032,15 +6028,15 @@
                                      intptr_t class_id,
                                      AlignmentType alignment,
                                      intptr_t deopt_id,
-                                     TokenPosition token_pos,
+                                     const InstructionSource& source,
                                      SpeculativeMode speculative_mode)
-    : TemplateInstruction(deopt_id),
+    : TemplateInstruction(source, deopt_id),
       emit_store_barrier_(emit_store_barrier),
       index_unboxed_(index_unboxed),
       index_scale_(index_scale),
       class_id_(class_id),
       alignment_(StrengthenAlignment(class_id, alignment)),
-      token_pos_(token_pos),
+      token_pos_(source.token_pos),
       speculative_mode_(speculative_mode) {
   SetInputAt(kArrayPos, array);
   SetInputAt(kIndexPos, index);
@@ -6056,7 +6052,7 @@
           array()->CopyWithType(Z), box->value()->CopyWithType(Z),
           value()->CopyWithType(Z), emit_store_barrier_,
           /*index_unboxed=*/true, index_scale(), class_id(), alignment_,
-          GetDeoptId(), token_pos(), speculative_mode_);
+          GetDeoptId(), source(), speculative_mode_);
       flow_graph->InsertBefore(this, store, env(), FlowGraph::kEffect);
       return nullptr;
     }
@@ -6072,11 +6068,11 @@
     ZoneGrowableArray<Value*>* inputs,
     intptr_t deopt_id,
     MethodRecognizer::Kind recognized_kind,
-    TokenPosition token_pos)
-    : PureDefinition(deopt_id),
+    const InstructionSource& source)
+    : PureDefinition(source, deopt_id),
       inputs_(inputs),
       recognized_kind_(recognized_kind),
-      token_pos_(token_pos) {
+      token_pos_(source.token_pos) {
   ASSERT(inputs_->length() == ArgumentCountFor(recognized_kind_));
   for (intptr_t i = 0; i < inputs_->length(); ++i) {
     ASSERT((*inputs)[i] != NULL);
@@ -6202,6 +6198,9 @@
   NativeFunction native_function = NativeEntry::ResolveNative(
       library, native_name(), num_params, &auto_setup_scope);
   if (native_function == NULL) {
+    if (has_inlining_id()) {
+      UNIMPLEMENTED();
+    }
     Report::MessageF(Report::kError, Script::Handle(function().script()),
                      function().token_pos(), Report::AtLocation,
                      "native function '%s' (%" Pd " arguments) cannot be found",
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 4c26d9d..9b39ae5 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -790,13 +790,19 @@
     kNotSpeculative
   };
 
-  explicit Instruction(intptr_t deopt_id = DeoptId::kNone)
+  // If the source has the inlining ID of the root function, then don't set
+  // the inlining ID to that; instead, treat it as unset.
+  explicit Instruction(const InstructionSource& source,
+                       intptr_t deopt_id = DeoptId::kNone)
       : deopt_id_(deopt_id),
         previous_(NULL),
         next_(NULL),
         env_(NULL),
         locs_(NULL),
-        inlining_id_(-1) {}
+        inlining_id_(source.inlining_id) {}
+
+  explicit Instruction(intptr_t deopt_id = DeoptId::kNone)
+      : Instruction(InstructionSource(), deopt_id) {}
 
   virtual ~Instruction() {}
 
@@ -817,6 +823,11 @@
 
   virtual TokenPosition token_pos() const { return TokenPosition::kNoSource; }
 
+  // Returns the source information for this instruction.
+  InstructionSource source() const {
+    return InstructionSource(token_pos(), inlining_id());
+  }
+
   virtual intptr_t InputCount() const = 0;
   virtual Value* InputAt(intptr_t i) const = 0;
   void SetInputAt(intptr_t i, Value* value) {
@@ -1077,13 +1088,13 @@
   // Get the block entry for this instruction.
   virtual BlockEntryInstr* GetBlock();
 
-  intptr_t inlining_id() const { return inlining_id_; }
-  void set_inlining_id(intptr_t value) {
+  virtual intptr_t inlining_id() const { return inlining_id_; }
+  virtual void set_inlining_id(intptr_t value) {
     ASSERT(value >= 0);
-    ASSERT_EQUAL(inlining_id_, -1);
+    ASSERT(!has_inlining_id() || inlining_id_ == value);
     inlining_id_ = value;
   }
-  bool has_inlining_id() const { return inlining_id_ >= 0; }
+  virtual bool has_inlining_id() const { return inlining_id_ >= 0; }
 
   // Returns a hash code for use with hash maps.
   virtual intptr_t Hashcode() const;
@@ -1215,6 +1226,8 @@
 class PureInstruction : public Instruction {
  public:
   explicit PureInstruction(intptr_t deopt_id) : Instruction(deopt_id) {}
+  explicit PureInstruction(const InstructionSource& source, intptr_t deopt_id)
+      : Instruction(source, deopt_id) {}
 
   virtual bool AllowsCSE() const { return true; }
   virtual bool HasUnknownSideEffects() const { return false; }
@@ -1251,6 +1264,11 @@
   explicit TemplateInstruction(intptr_t deopt_id = DeoptId::kNone)
       : CSETrait<Instruction, PureInstruction>::Base(deopt_id), inputs_() {}
 
+  TemplateInstruction(const InstructionSource& source,
+                      intptr_t deopt_id = DeoptId::kNone)
+      : CSETrait<Instruction, PureInstruction>::Base(source, deopt_id),
+        inputs_() {}
+
   virtual intptr_t InputCount() const { return N; }
   virtual Value* InputAt(intptr_t i) const { return inputs_[i]; }
 
@@ -2196,7 +2214,12 @@
 // Abstract super-class of all instructions that define a value (Bind, Phi).
 class Definition : public Instruction {
  public:
-  explicit Definition(intptr_t deopt_id = DeoptId::kNone);
+  explicit Definition(intptr_t deopt_id = DeoptId::kNone)
+      : Instruction(deopt_id) {}
+
+  explicit Definition(const InstructionSource& source,
+                      intptr_t deopt_id = DeoptId::kNone)
+      : Instruction(source, deopt_id) {}
 
   // Overridden by definitions that have call counts.
   virtual intptr_t CallCount() const { return -1; }
@@ -2408,6 +2431,8 @@
 class PureDefinition : public Definition {
  public:
   explicit PureDefinition(intptr_t deopt_id) : Definition(deopt_id) {}
+  explicit PureDefinition(const InstructionSource& source, intptr_t deopt_id)
+      : Definition(source, deopt_id) {}
 
   virtual bool AllowsCSE() const { return true; }
   virtual bool HasUnknownSideEffects() const { return false; }
@@ -2420,6 +2445,10 @@
  public:
   explicit TemplateDefinition(intptr_t deopt_id = DeoptId::kNone)
       : CSETrait<Definition, PureDefinition>::Base(deopt_id), inputs_() {}
+  TemplateDefinition(const InstructionSource& source,
+                     intptr_t deopt_id = DeoptId::kNone)
+      : CSETrait<Definition, PureDefinition>::Base(source, deopt_id),
+        inputs_() {}
 
   virtual intptr_t InputCount() const { return N; }
   virtual Value* InputAt(intptr_t i) const { return inputs_[i]; }
@@ -2940,13 +2969,13 @@
  public:
   // The [yield_index], if provided, will cause the instruction to emit extra
   // yield_index -> pc offset into the [PcDescriptors].
-  ReturnInstr(TokenPosition token_pos,
+  ReturnInstr(const InstructionSource& source,
               Value* value,
               intptr_t deopt_id,
               intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex,
               Representation representation = kTagged)
-      : TemplateInstruction(deopt_id),
-        token_pos_(token_pos),
+      : TemplateInstruction(source, deopt_id),
+        token_pos_(source.token_pos),
         yield_index_(yield_index),
         representation_(representation) {
     SetInputAt(0, value);
@@ -3001,11 +3030,11 @@
 // Represents a return from a Dart function into native code.
 class NativeReturnInstr : public ReturnInstr {
  public:
-  NativeReturnInstr(TokenPosition token_pos,
+  NativeReturnInstr(const InstructionSource& source,
                     Value* value,
                     const compiler::ffi::CallbackMarshaller& marshaller,
                     intptr_t deopt_id)
-      : ReturnInstr(token_pos, value, deopt_id), marshaller_(marshaller) {}
+      : ReturnInstr(source, value, deopt_id), marshaller_(marshaller) {}
 
   DECLARE_INSTRUCTION(NativeReturn)
 
@@ -3032,10 +3061,10 @@
 
 class ThrowInstr : public TemplateInstruction<1, Throws> {
  public:
-  explicit ThrowInstr(TokenPosition token_pos,
+  explicit ThrowInstr(const InstructionSource& source,
                       intptr_t deopt_id,
                       Value* exception)
-      : TemplateInstruction(deopt_id), token_pos_(token_pos) {
+      : TemplateInstruction(source, deopt_id), token_pos_(source.token_pos) {
     SetInputAt(0, exception);
   }
 
@@ -3060,13 +3089,13 @@
  public:
   // 'catch_try_index' can be kInvalidTryIndex if the
   // rethrow has been artificially generated by the parser.
-  ReThrowInstr(TokenPosition token_pos,
+  ReThrowInstr(const InstructionSource& source,
                intptr_t catch_try_index,
                intptr_t deopt_id,
                Value* exception,
                Value* stacktrace)
-      : TemplateInstruction(deopt_id),
-        token_pos_(token_pos),
+      : TemplateInstruction(source, deopt_id),
+        token_pos_(source.token_pos),
         catch_try_index_(catch_try_index) {
     SetInputAt(0, exception);
     SetInputAt(1, stacktrace);
@@ -3281,11 +3310,11 @@
   DEFINE_INSTRUCTION_TYPE_CHECK(Comparison)
 
  protected:
-  ComparisonInstr(TokenPosition token_pos,
+  ComparisonInstr(const InstructionSource& source,
                   Token::Kind kind,
                   intptr_t deopt_id = DeoptId::kNone)
-      : Definition(deopt_id),
-        token_pos_(token_pos),
+      : Definition(source, deopt_id),
+        token_pos_(source.token_pos),
         kind_(kind),
         operation_cid_(kIllegalCid) {}
 
@@ -3303,8 +3332,10 @@
   virtual bool HasUnknownSideEffects() const { return false; }
 
  protected:
-  PureComparison(TokenPosition token_pos, Token::Kind kind, intptr_t deopt_id)
-      : ComparisonInstr(token_pos, kind, deopt_id) {}
+  PureComparison(const InstructionSource& source,
+                 Token::Kind kind,
+                 intptr_t deopt_id)
+      : ComparisonInstr(source, kind, deopt_id) {}
 };
 
 template <intptr_t N,
@@ -3313,12 +3344,10 @@
 class TemplateComparison
     : public CSETrait<ComparisonInstr, PureComparison>::Base {
  public:
-  TemplateComparison(TokenPosition token_pos,
+  TemplateComparison(const InstructionSource& source,
                      Token::Kind kind,
                      intptr_t deopt_id = DeoptId::kNone)
-      : CSETrait<ComparisonInstr, PureComparison>::Base(token_pos,
-                                                        kind,
-                                                        deopt_id),
+      : CSETrait<ComparisonInstr, PureComparison>::Base(source, kind, deopt_id),
         inputs_() {}
 
   virtual intptr_t InputCount() const { return N; }
@@ -3360,6 +3389,13 @@
   Value* InputAt(intptr_t i) const { return comparison()->InputAt(i); }
 
   virtual TokenPosition token_pos() const { return comparison_->token_pos(); }
+  virtual intptr_t inlining_id() const { return comparison_->inlining_id(); }
+  virtual void set_inlining_id(intptr_t value) {
+    return comparison_->set_inlining_id(value);
+  }
+  virtual bool has_inlining_id() const {
+    return comparison_->has_inlining_id();
+  }
 
   virtual bool ComputeCanDeoptimize() const {
     return comparison()->ComputeCanDeoptimize();
@@ -3532,8 +3568,9 @@
 
 class ConstantInstr : public TemplateDefinition<0, NoThrow, Pure> {
  public:
-  ConstantInstr(const Object& value,
-                TokenPosition token_pos = TokenPosition::kConstant);
+  explicit ConstantInstr(const Object& value)
+      : ConstantInstr(value, InstructionSource(TokenPosition::kConstant)) {}
+  ConstantInstr(const Object& value, const InstructionSource& source);
 
   DECLARE_INSTRUCTION(Constant)
   virtual CompileType ComputeType() const;
@@ -3601,14 +3638,14 @@
     kDstNamePos = 4,
   };
 
-  AssertSubtypeInstr(TokenPosition token_pos,
+  AssertSubtypeInstr(const InstructionSource& source,
                      Value* instantiator_type_arguments,
                      Value* function_type_arguments,
                      Value* sub_type,
                      Value* super_type,
                      Value* dst_name,
                      intptr_t deopt_id)
-      : TemplateInstruction(deopt_id), token_pos_(token_pos) {
+      : TemplateInstruction(source, deopt_id), token_pos_(source.token_pos) {
     SetInputAt(kInstantiatorTAVPos, instantiator_type_arguments);
     SetInputAt(kFunctionTAVPos, function_type_arguments);
     SetInputAt(kSubTypePos, sub_type);
@@ -3668,7 +3705,7 @@
     kFunctionTAVPos = 3,
   };
 
-  AssertAssignableInstr(TokenPosition token_pos,
+  AssertAssignableInstr(const InstructionSource& source,
                         Value* value,
                         Value* dst_type,
                         Value* instantiator_type_arguments,
@@ -3676,8 +3713,8 @@
                         const String& dst_name,
                         intptr_t deopt_id,
                         Kind kind = kUnknown)
-      : TemplateDefinition(deopt_id),
-        token_pos_(token_pos),
+      : TemplateDefinition(source, deopt_id),
+        token_pos_(source.token_pos),
         dst_name_(dst_name),
         kind_(kind) {
     ASSERT(!dst_name.IsNull());
@@ -3732,8 +3769,10 @@
 
 class AssertBooleanInstr : public TemplateDefinition<1, Throws, Pure> {
  public:
-  AssertBooleanInstr(TokenPosition token_pos, Value* value, intptr_t deopt_id)
-      : TemplateDefinition(deopt_id), token_pos_(token_pos) {
+  AssertBooleanInstr(const InstructionSource& source,
+                     Value* value,
+                     intptr_t deopt_id)
+      : TemplateDefinition(source, deopt_id), token_pos_(source.token_pos) {
     SetInputAt(0, value);
   }
 
@@ -3850,12 +3889,12 @@
                    intptr_t type_args_len,
                    const Array& argument_names,
                    InputsArray* inputs,
-                   TokenPosition token_pos)
-      : Definition(deopt_id),
+                   const InstructionSource& source)
+      : Definition(source, deopt_id),
         type_args_len_(type_args_len),
         argument_names_(argument_names),
         inputs_(inputs),
-        token_pos_(token_pos) {
+        token_pos_(source.token_pos) {
     ASSERT(argument_names.IsZoneHandle() || argument_names.InVMIsolateHeap());
     ASSERT(inputs_->length() >= kExtraInputs);
     for (intptr_t i = 0, n = inputs_->length(); i < n; ++i) {
@@ -3939,14 +3978,14 @@
   ClosureCallInstr(InputsArray* inputs,
                    intptr_t type_args_len,
                    const Array& argument_names,
-                   TokenPosition token_pos,
+                   const InstructionSource& source,
                    intptr_t deopt_id,
                    Code::EntryKind entry_kind = Code::EntryKind::kNormal)
       : TemplateDartCall(deopt_id,
                          type_args_len,
                          argument_names,
                          inputs,
-                         token_pos),
+                         source),
         entry_kind_(entry_kind) {}
 
   DECLARE_INSTRUCTION(ClosureCall)
@@ -3975,7 +4014,7 @@
 // (InstanceCallInstr, PolymorphicInstanceCallInstr).
 class InstanceCallBaseInstr : public TemplateDartCall<0> {
  public:
-  InstanceCallBaseInstr(TokenPosition token_pos,
+  InstanceCallBaseInstr(const InstructionSource& source,
                         const String& function_name,
                         Token::Kind token_kind,
                         InputsArray* arguments,
@@ -3989,7 +4028,7 @@
                          type_args_len,
                          argument_names,
                          arguments,
-                         token_pos),
+                         source),
         ic_data_(ic_data),
         function_name_(function_name),
         token_kind_(token_kind),
@@ -4115,7 +4154,7 @@
 class InstanceCallInstr : public InstanceCallBaseInstr {
  public:
   InstanceCallInstr(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const String& function_name,
       Token::Kind token_kind,
       InputsArray* arguments,
@@ -4127,7 +4166,7 @@
       const Function& interface_target = Function::null_function(),
       const Function& tearoff_interface_target = Function::null_function())
       : InstanceCallBaseInstr(
-            token_pos,
+            source,
             function_name,
             token_kind,
             arguments,
@@ -4140,7 +4179,7 @@
         checked_argument_count_(checked_argument_count) {}
 
   InstanceCallInstr(
-      TokenPosition token_pos,
+      const InstructionSource& source,
       const String& function_name,
       Token::Kind token_kind,
       InputsArray* arguments,
@@ -4150,7 +4189,7 @@
       intptr_t deopt_id,
       const Function& interface_target = Function::null_function(),
       const Function& tearoff_interface_target = Function::null_function())
-      : InstanceCallBaseInstr(token_pos,
+      : InstanceCallBaseInstr(source,
                               function_name,
                               token_kind,
                               arguments,
@@ -4212,13 +4251,10 @@
       args->Add(call->ArgumentValueAt(i)->CopyWithType(zone));
     }
     auto new_call = new (zone) PolymorphicInstanceCallInstr(
-        call->token_pos(), call->function_name(), call->token_kind(), args,
+        call->source(), call->function_name(), call->token_kind(), args,
         call->type_args_len(), call->argument_names(), call->ic_data(),
         call->deopt_id(), call->interface_target(),
         call->tearoff_interface_target(), targets, complete);
-    if (call->has_inlining_id()) {
-      new_call->set_inlining_id(call->inlining_id());
-    }
     new_call->set_result_type(call->result_type());
     new_call->set_entry_kind(call->entry_kind());
     new_call->set_has_unique_selector(call->has_unique_selector());
@@ -4259,7 +4295,7 @@
   ADD_EXTRA_INFO_TO_S_EXPRESSION_SUPPORT
 
  private:
-  PolymorphicInstanceCallInstr(TokenPosition token_pos,
+  PolymorphicInstanceCallInstr(const InstructionSource& source,
                                const String& function_name,
                                Token::Kind token_kind,
                                InputsArray* arguments,
@@ -4271,7 +4307,7 @@
                                const Function& tearoff_interface_target,
                                const CallTargets& targets,
                                bool complete)
-      : InstanceCallBaseInstr(token_pos,
+      : InstanceCallBaseInstr(source,
                               function_name,
                               token_kind,
                               arguments,
@@ -4301,7 +4337,7 @@
 // Takes untagged ClassId of the receiver as extra input.
 class DispatchTableCallInstr : public TemplateDartCall<1> {
  public:
-  DispatchTableCallInstr(TokenPosition token_pos,
+  DispatchTableCallInstr(const InstructionSource& source,
                          const Function& interface_target,
                          const compiler::TableSelector* selector,
                          InputsArray* arguments,
@@ -4311,7 +4347,7 @@
                          type_args_len,
                          argument_names,
                          arguments,
-                         token_pos),
+                         source),
         interface_target_(interface_target),
         selector_(selector) {
     ASSERT(selector != nullptr);
@@ -4373,7 +4409,7 @@
 
 class StrictCompareInstr : public TemplateComparison<2, NoThrow, Pure> {
  public:
-  StrictCompareInstr(TokenPosition token_pos,
+  StrictCompareInstr(const InstructionSource& source,
                      Token::Kind kind,
                      Value* left,
                      Value* right,
@@ -4420,11 +4456,11 @@
 // comparison pattern.
 class TestSmiInstr : public TemplateComparison<2, NoThrow, Pure> {
  public:
-  TestSmiInstr(TokenPosition token_pos,
+  TestSmiInstr(const InstructionSource& source,
                Token::Kind kind,
                Value* left,
                Value* right)
-      : TemplateComparison(token_pos, kind) {
+      : TemplateComparison(source, kind) {
     ASSERT(kind == Token::kEQ || kind == Token::kNE);
     SetInputAt(0, left);
     SetInputAt(1, right);
@@ -4455,7 +4491,7 @@
 // other results even in the no-deopt case.
 class TestCidsInstr : public TemplateComparison<1, NoThrow, Pure> {
  public:
-  TestCidsInstr(TokenPosition token_pos,
+  TestCidsInstr(const InstructionSource& source,
                 Token::Kind kind,
                 Value* value,
                 const ZoneGrowableArray<intptr_t>& cid_results,
@@ -4495,14 +4531,14 @@
 
 class EqualityCompareInstr : public TemplateComparison<2, NoThrow, Pure> {
  public:
-  EqualityCompareInstr(TokenPosition token_pos,
+  EqualityCompareInstr(const InstructionSource& source,
                        Token::Kind kind,
                        Value* left,
                        Value* right,
                        intptr_t cid,
                        intptr_t deopt_id,
                        SpeculativeMode speculative_mode = kGuardInputs)
-      : TemplateComparison(token_pos, kind, deopt_id),
+      : TemplateComparison(source, kind, deopt_id),
         speculative_mode_(speculative_mode) {
     ASSERT(Token::IsEqualityOperator(kind));
     SetInputAt(0, left);
@@ -4543,14 +4579,14 @@
 
 class RelationalOpInstr : public TemplateComparison<2, NoThrow, Pure> {
  public:
-  RelationalOpInstr(TokenPosition token_pos,
+  RelationalOpInstr(const InstructionSource& source,
                     Token::Kind kind,
                     Value* left,
                     Value* right,
                     intptr_t cid,
                     intptr_t deopt_id,
                     SpeculativeMode speculative_mode = kGuardInputs)
-      : TemplateComparison(token_pos, kind, deopt_id),
+      : TemplateComparison(source, kind, deopt_id),
         speculative_mode_(speculative_mode) {
     ASSERT(Token::IsRelationalOperator(kind));
     SetInputAt(0, left);
@@ -4674,7 +4710,7 @@
 
 class StaticCallInstr : public TemplateDartCall<0> {
  public:
-  StaticCallInstr(TokenPosition token_pos,
+  StaticCallInstr(const InstructionSource& source,
                   const Function& function,
                   intptr_t type_args_len,
                   const Array& argument_names,
@@ -4686,20 +4722,19 @@
                          type_args_len,
                          argument_names,
                          arguments,
-                         token_pos),
-        ic_data_(NULL),
+                         source),
+        ic_data_(GetICData(ic_data_array, deopt_id, /*is_static_call=*/true)),
         call_count_(0),
         function_(function),
         rebind_rule_(rebind_rule),
         result_type_(NULL),
         is_known_list_constructor_(false),
         identity_(AliasIdentity::Unknown()) {
-    ic_data_ = GetICData(ic_data_array, deopt_id, /*is_static_call=*/true);
     ASSERT(function.IsZoneHandle());
     ASSERT(!function.IsNull());
   }
 
-  StaticCallInstr(TokenPosition token_pos,
+  StaticCallInstr(const InstructionSource& source,
                   const Function& function,
                   intptr_t type_args_len,
                   const Array& argument_names,
@@ -4711,7 +4746,7 @@
                          type_args_len,
                          argument_names,
                          arguments,
-                         token_pos),
+                         source),
         ic_data_(NULL),
         call_count_(call_count),
         function_(function),
@@ -4735,16 +4770,12 @@
     for (intptr_t i = 0; i < call->ArgumentCount(); i++) {
       args->Add(call->ArgumentValueAt(i)->CopyWithType());
     }
-    StaticCallInstr* new_call = new (zone)
-        StaticCallInstr(call->token_pos(), target, call->type_args_len(),
-                        call->argument_names(), args, call->deopt_id(),
-                        call_count, ICData::kNoRebind);
+    StaticCallInstr* new_call = new (zone) StaticCallInstr(
+        call->source(), target, call->type_args_len(), call->argument_names(),
+        args, call->deopt_id(), call_count, ICData::kNoRebind);
     if (call->result_type() != NULL) {
       new_call->result_type_ = call->result_type();
     }
-    if (call->has_inlining_id()) {
-      new_call->set_inlining_id(call->inlining_id());
-    }
     new_call->set_entry_kind(call->entry_kind());
     return new_call;
   }
@@ -4858,8 +4889,11 @@
 
 class LoadLocalInstr : public TemplateDefinition<0, NoThrow> {
  public:
-  LoadLocalInstr(const LocalVariable& local, TokenPosition token_pos)
-      : local_(local), is_last_(false), token_pos_(token_pos) {}
+  LoadLocalInstr(const LocalVariable& local, const InstructionSource& source)
+      : TemplateDefinition(source),
+        local_(local),
+        is_last_(false),
+        token_pos_(source.token_pos) {}
 
   DECLARE_INSTRUCTION(LoadLocal)
   virtual CompileType ComputeType() const;
@@ -4980,8 +5014,12 @@
  public:
   StoreLocalInstr(const LocalVariable& local,
                   Value* value,
-                  TokenPosition token_pos)
-      : local_(local), is_dead_(false), is_last_(false), token_pos_(token_pos) {
+                  const InstructionSource& source)
+      : TemplateDefinition(source),
+        local_(local),
+        is_dead_(false),
+        is_last_(false),
+        token_pos_(source.token_pos) {
     SetInputAt(0, value);
   }
 
@@ -5023,20 +5061,16 @@
   NativeCallInstr(const String* name,
                   const Function* function,
                   bool link_lazily,
-                  TokenPosition position,
+                  const InstructionSource& source,
                   InputsArray* args)
-      : TemplateDartCall(DeoptId::kNone,
-                         0,
-                         Array::null_array(),
-                         args,
-                         position),
+      : TemplateDartCall(DeoptId::kNone, 0, Array::null_array(), args, source),
         native_name_(name),
         function_(function),
         native_c_function_(NULL),
         is_bootstrap_native_(false),
         is_auto_scope_(true),
         link_lazily_(link_lazily),
-        token_pos_(position) {
+        token_pos_(source.token_pos) {
     ASSERT(name->IsZoneHandle());
     ASSERT(function->IsZoneHandle());
   }
@@ -5241,11 +5275,11 @@
 
 class DebugStepCheckInstr : public TemplateInstruction<0, NoThrow> {
  public:
-  DebugStepCheckInstr(TokenPosition token_pos,
+  DebugStepCheckInstr(const InstructionSource& source,
                       PcDescriptorsLayout::Kind stub_kind,
                       intptr_t deopt_id)
-      : TemplateInstruction<0, NoThrow>(deopt_id),
-        token_pos_(token_pos),
+      : TemplateInstruction(source, deopt_id),
+        token_pos_(source.token_pos),
         stub_kind_(stub_kind) {}
 
   DECLARE_INSTRUCTION(DebugStepCheck)
@@ -5301,11 +5335,12 @@
                           Value* instance,
                           Value* value,
                           StoreBarrierType emit_store_barrier,
-                          TokenPosition token_pos,
+                          const InstructionSource& source,
                           Kind kind = Kind::kOther)
-      : slot_(slot),
+      : TemplateInstruction(source),
+        slot_(slot),
         emit_store_barrier_(emit_store_barrier),
-        token_pos_(token_pos),
+        token_pos_(source.token_pos),
         is_initialization_(kind == Kind::kInitializing) {
     SetInputAt(kInstancePos, instance);
     SetInputAt(kValuePos, value);
@@ -5316,14 +5351,14 @@
                           Value* instance,
                           Value* value,
                           StoreBarrierType emit_store_barrier,
-                          TokenPosition token_pos,
+                          const InstructionSource& source,
                           const ParsedFunction* parsed_function,
                           Kind kind = Kind::kOther)
       : StoreInstanceFieldInstr(Slot::Get(field, parsed_function),
                                 instance,
                                 value,
                                 emit_store_barrier,
-                                token_pos,
+                                source,
                                 kind) {}
 
   virtual SpeculativeMode SpeculativeModeOfInput(intptr_t index) const {
@@ -5495,12 +5530,12 @@
 class LoadStaticFieldInstr : public TemplateDefinition<0, Throws> {
  public:
   LoadStaticFieldInstr(const Field& field,
-                       TokenPosition token_pos,
+                       const InstructionSource& source,
                        bool calls_initializer = false,
                        intptr_t deopt_id = DeoptId::kNone)
-      : TemplateDefinition(deopt_id),
+      : TemplateDefinition(source, deopt_id),
         field_(field),
-        token_pos_(token_pos),
+        token_pos_(source.token_pos),
         calls_initializer_(calls_initializer) {
     ASSERT(!calls_initializer || (deopt_id != DeoptId::kNone));
   }
@@ -5544,8 +5579,10 @@
  public:
   StoreStaticFieldInstr(const Field& field,
                         Value* value,
-                        TokenPosition token_pos)
-      : field_(field), token_pos_(token_pos) {
+                        const InstructionSource& source)
+      : TemplateDefinition(source),
+        field_(field),
+        token_pos_(source.token_pos) {
     ASSERT(field.IsZoneHandle());
     SetInputAt(kValuePos, value);
     CheckField(field);
@@ -5596,7 +5633,7 @@
                    intptr_t class_id,
                    AlignmentType alignment,
                    intptr_t deopt_id,
-                   TokenPosition token_pos,
+                   const InstructionSource& source,
                    CompileType* result_type = nullptr);
 
   TokenPosition token_pos() const { return token_pos_; }
@@ -5668,9 +5705,10 @@
                      Value* index,
                      intptr_t element_count,
                      intptr_t class_id,
-                     TokenPosition token_pos)
-      : class_id_(class_id),
-        token_pos_(token_pos),
+                     const InstructionSource& source)
+      : TemplateDefinition(source),
+        class_id_(class_id),
+        token_pos_(source.token_pos),
         element_count_(element_count),
         representation_(kTagged) {
     ASSERT(element_count == 1 || element_count == 2 || element_count == 4);
@@ -5775,10 +5813,10 @@
 class StringInterpolateInstr : public TemplateDefinition<1, Throws> {
  public:
   StringInterpolateInstr(Value* value,
-                         TokenPosition token_pos,
+                         const InstructionSource& source,
                          intptr_t deopt_id)
-      : TemplateDefinition(deopt_id),
-        token_pos_(token_pos),
+      : TemplateDefinition(source, deopt_id),
+        token_pos_(source.token_pos),
         function_(Function::ZoneHandle()) {
     SetInputAt(0, value);
   }
@@ -5888,7 +5926,7 @@
                     intptr_t class_id,
                     AlignmentType alignment,
                     intptr_t deopt_id,
-                    TokenPosition token_pos,
+                    const InstructionSource& source,
                     SpeculativeMode speculative_mode = kGuardInputs);
   DECLARE_INSTRUCTION(StoreIndexed)
 
@@ -5984,13 +6022,15 @@
 
 class InstanceOfInstr : public TemplateDefinition<3, Throws> {
  public:
-  InstanceOfInstr(TokenPosition token_pos,
+  InstanceOfInstr(const InstructionSource& source,
                   Value* value,
                   Value* instantiator_type_arguments,
                   Value* function_type_arguments,
                   const AbstractType& type,
                   intptr_t deopt_id)
-      : TemplateDefinition(deopt_id), token_pos_(token_pos), type_(type) {
+      : TemplateDefinition(source, deopt_id),
+        token_pos_(source.token_pos),
+        type_(type) {
     ASSERT(!type.IsNull());
     SetInputAt(0, value);
     SetInputAt(1, instantiator_type_arguments);
@@ -6029,10 +6069,10 @@
 // either reside in new space or be in the store buffer.
 class AllocationInstr : public Definition {
  public:
-  explicit AllocationInstr(TokenPosition token_pos,
+  explicit AllocationInstr(const InstructionSource& source,
                            intptr_t deopt_id = DeoptId::kNone)
-      : Definition(deopt_id),
-        token_pos_(token_pos),
+      : Definition(source, deopt_id),
+        token_pos_(source.token_pos),
         identity_(AliasIdentity::Unknown()) {}
 
   virtual TokenPosition token_pos() const { return token_pos_; }
@@ -6056,9 +6096,9 @@
 template <intptr_t N, typename ThrowsTrait>
 class TemplateAllocation : public AllocationInstr {
  public:
-  explicit TemplateAllocation(TokenPosition token_pos,
+  explicit TemplateAllocation(const InstructionSource& source,
                               intptr_t deopt_id = DeoptId::kNone)
-      : AllocationInstr(token_pos, deopt_id), inputs_() {}
+      : AllocationInstr(source, deopt_id), inputs_() {}
 
   virtual intptr_t InputCount() const { return N; }
   virtual Value* InputAt(intptr_t i) const { return inputs_[i]; }
@@ -6077,10 +6117,10 @@
 
 class AllocateObjectInstr : public AllocationInstr {
  public:
-  AllocateObjectInstr(TokenPosition token_pos,
+  AllocateObjectInstr(const InstructionSource& source,
                       const Class& cls,
                       Value* type_arguments = nullptr)
-      : AllocationInstr(token_pos),
+      : AllocationInstr(source),
         cls_(cls),
         type_arguments_(type_arguments),
         closure_function_(Function::ZoneHandle()) {
@@ -6144,7 +6184,7 @@
 class AllocateUninitializedContextInstr
     : public TemplateAllocation<0, NoThrow> {
  public:
-  AllocateUninitializedContextInstr(TokenPosition token_pos,
+  AllocateUninitializedContextInstr(const InstructionSource& source,
                                     intptr_t num_context_variables);
 
   DECLARE_INSTRUCTION(AllocateUninitializedContext)
@@ -6256,8 +6296,9 @@
 
 class ArrayAllocationInstr : public AllocationInstr {
  public:
-  explicit ArrayAllocationInstr(TokenPosition token_pos, intptr_t deopt_id)
-      : AllocationInstr(token_pos, deopt_id) {}
+  explicit ArrayAllocationInstr(const InstructionSource& source,
+                                intptr_t deopt_id)
+      : AllocationInstr(source, deopt_id) {}
 
   virtual Value* num_elements() const = 0;
 
@@ -6277,8 +6318,9 @@
 template <intptr_t N, typename ThrowsTrait>
 class TemplateArrayAllocation : public ArrayAllocationInstr {
  public:
-  explicit TemplateArrayAllocation(TokenPosition token_pos, intptr_t deopt_id)
-      : ArrayAllocationInstr(token_pos, deopt_id), inputs_() {}
+  explicit TemplateArrayAllocation(const InstructionSource& source,
+                                   intptr_t deopt_id)
+      : ArrayAllocationInstr(source, deopt_id), inputs_() {}
 
   virtual intptr_t InputCount() const { return N; }
   virtual Value* InputAt(intptr_t i) const { return inputs_[i]; }
@@ -6294,11 +6336,11 @@
 
 class CreateArrayInstr : public TemplateArrayAllocation<2, Throws> {
  public:
-  CreateArrayInstr(TokenPosition token_pos,
+  CreateArrayInstr(const InstructionSource& source,
                    Value* element_type,
                    Value* num_elements,
                    intptr_t deopt_id)
-      : TemplateArrayAllocation(token_pos, deopt_id) {
+      : TemplateArrayAllocation(source, deopt_id) {
     SetInputAt(kElementTypePos, element_type);
     SetInputAt(kLengthPos, num_elements);
   }
@@ -6332,11 +6374,11 @@
 
 class AllocateTypedDataInstr : public TemplateArrayAllocation<1, Throws> {
  public:
-  AllocateTypedDataInstr(TokenPosition token_pos,
+  AllocateTypedDataInstr(const InstructionSource& source,
                          classid_t class_id,
                          Value* num_elements,
                          intptr_t deopt_id)
-      : TemplateArrayAllocation(token_pos, deopt_id), class_id_(class_id) {
+      : TemplateArrayAllocation(source, deopt_id), class_id_(class_id) {
     SetInputAt(kLengthPos, num_elements);
   }
 
@@ -6501,12 +6543,12 @@
  public:
   LoadFieldInstr(Value* instance,
                  const Slot& slot,
-                 TokenPosition token_pos,
+                 const InstructionSource& source,
                  bool calls_initializer = false,
                  intptr_t deopt_id = DeoptId::kNone)
-      : TemplateDefinition(deopt_id),
+      : TemplateDefinition(source, deopt_id),
         slot_(slot),
-        token_pos_(token_pos),
+        token_pos_(source.token_pos),
         calls_initializer_(calls_initializer),
         throw_exception_on_initialization_(false) {
     ASSERT(!calls_initializer || (deopt_id != DeoptId::kNone));
@@ -6609,12 +6651,14 @@
 
 class InstantiateTypeInstr : public TemplateDefinition<2, Throws> {
  public:
-  InstantiateTypeInstr(TokenPosition token_pos,
+  InstantiateTypeInstr(const InstructionSource& source,
                        const AbstractType& type,
                        Value* instantiator_type_arguments,
                        Value* function_type_arguments,
                        intptr_t deopt_id)
-      : TemplateDefinition(deopt_id), token_pos_(token_pos), type_(type) {
+      : TemplateDefinition(source, deopt_id),
+        token_pos_(source.token_pos),
+        type_(type) {
     ASSERT(type.IsZoneHandle() || type.IsReadOnlyHandle());
     SetInputAt(0, instantiator_type_arguments);
     SetInputAt(1, function_type_arguments);
@@ -6644,15 +6688,15 @@
 
 class InstantiateTypeArgumentsInstr : public TemplateDefinition<3, Throws> {
  public:
-  InstantiateTypeArgumentsInstr(TokenPosition token_pos,
+  InstantiateTypeArgumentsInstr(const InstructionSource& source,
                                 Value* instantiator_type_arguments,
                                 Value* function_type_arguments,
                                 Value* type_arguments,
                                 const Class& instantiator_class,
                                 const Function& function,
                                 intptr_t deopt_id)
-      : TemplateDefinition(deopt_id),
-        token_pos_(token_pos),
+      : TemplateDefinition(source, deopt_id),
+        token_pos_(source.token_pos),
         instantiator_class_(instantiator_class),
         function_(function) {
     ASSERT(instantiator_class.IsReadOnlyHandle() ||
@@ -6729,9 +6773,9 @@
 // for the given [context_variables].
 class AllocateContextInstr : public TemplateAllocation<0, NoThrow> {
  public:
-  AllocateContextInstr(TokenPosition token_pos,
+  AllocateContextInstr(const InstructionSource& source,
                        const ZoneGrowableArray<const Slot*>& context_slots)
-      : TemplateAllocation(token_pos), context_slots_(context_slots) {}
+      : TemplateAllocation(source), context_slots_(context_slots) {}
 
   DECLARE_INSTRUCTION(AllocateContext)
   virtual CompileType ComputeType() const;
@@ -6763,12 +6807,12 @@
 // it contains exactly the provided [context_variables].
 class CloneContextInstr : public TemplateDefinition<1, NoThrow> {
  public:
-  CloneContextInstr(TokenPosition token_pos,
+  CloneContextInstr(const InstructionSource& source,
                     Value* context_value,
                     const ZoneGrowableArray<const Slot*>& context_slots,
                     intptr_t deopt_id)
-      : TemplateDefinition(deopt_id),
-        token_pos_(token_pos),
+      : TemplateDefinition(source, deopt_id),
+        token_pos_(source.token_pos),
         context_slots_(context_slots) {
     SetInputAt(0, context_value);
   }
@@ -7338,11 +7382,11 @@
                       Value* left,
                       Value* right,
                       intptr_t deopt_id,
-                      TokenPosition token_pos,
+                      const InstructionSource& source,
                       SpeculativeMode speculative_mode = kGuardInputs)
-      : TemplateDefinition(deopt_id),
+      : TemplateDefinition(source, deopt_id),
         op_kind_(op_kind),
-        token_pos_(token_pos),
+        token_pos_(source.token_pos),
         speculative_mode_(speculative_mode) {
     SetInputAt(0, left);
     SetInputAt(1, right);
@@ -7400,8 +7444,8 @@
   DoubleTestOpInstr(MethodRecognizer::Kind op_kind,
                     Value* value,
                     intptr_t deopt_id,
-                    TokenPosition token_pos)
-      : TemplateComparison(token_pos, Token::kEQ, deopt_id), op_kind_(op_kind) {
+                    const InstructionSource& source)
+      : TemplateComparison(source, Token::kEQ, deopt_id), op_kind_(op_kind) {
     SetInputAt(0, value);
   }
 
@@ -7606,7 +7650,7 @@
                             Value* left,
                             Value* right,
                             TemplateDartCall<0>* call)
-      : TemplateComparison(call->token_pos(), op_kind, call->deopt_id()),
+      : TemplateComparison(call->source(), op_kind, call->deopt_id()),
         call_(call),
         is_negated_(false) {
     ASSERT(call->type_args_len() == 0);
@@ -8133,13 +8177,13 @@
     kOsrOnly,
   };
 
-  CheckStackOverflowInstr(TokenPosition token_pos,
+  CheckStackOverflowInstr(const InstructionSource& source,
                           intptr_t stack_depth,
                           intptr_t loop_depth,
                           intptr_t deopt_id,
                           Kind kind)
-      : TemplateInstruction(deopt_id),
-        token_pos_(token_pos),
+      : TemplateInstruction(source, deopt_id),
+        token_pos_(source.token_pos),
         stack_depth_(stack_depth),
         loop_depth_(loop_depth),
         kind_(kind) {
@@ -8180,8 +8224,8 @@
 // TODO(vegorov): remove this instruction in favor of Int32ToDouble.
 class SmiToDoubleInstr : public TemplateDefinition<1, NoThrow, Pure> {
  public:
-  SmiToDoubleInstr(Value* value, TokenPosition token_pos)
-      : token_pos_(token_pos) {
+  SmiToDoubleInstr(Value* value, const InstructionSource& source)
+      : TemplateDefinition(source), token_pos_(source.token_pos) {
     SetInputAt(0, value);
   }
 
@@ -8448,7 +8492,7 @@
   InvokeMathCFunctionInstr(ZoneGrowableArray<Value*>* inputs,
                            intptr_t deopt_id,
                            MethodRecognizer::Kind recognized_kind,
-                           TokenPosition token_pos);
+                           const InstructionSource& source);
 
   static intptr_t ArgumentCountFor(MethodRecognizer::Kind recognized_kind_);
 
@@ -8595,7 +8639,7 @@
   CheckClassInstr(Value* value,
                   intptr_t deopt_id,
                   const Cids& cids,
-                  TokenPosition token_pos);
+                  const InstructionSource& source);
 
   DECLARE_INSTRUCTION(CheckClass)
 
@@ -8654,9 +8698,11 @@
 
 class CheckSmiInstr : public TemplateInstruction<1, NoThrow, Pure> {
  public:
-  CheckSmiInstr(Value* value, intptr_t deopt_id, TokenPosition token_pos)
-      : TemplateInstruction(deopt_id),
-        token_pos_(token_pos),
+  CheckSmiInstr(Value* value,
+                intptr_t deopt_id,
+                const InstructionSource& source)
+      : TemplateInstruction(source, deopt_id),
+        token_pos_(source.token_pos),
         licm_hoisted_(false) {
     SetInputAt(0, value);
   }
@@ -8696,10 +8742,10 @@
   CheckNullInstr(Value* value,
                  const String& function_name,
                  intptr_t deopt_id,
-                 TokenPosition token_pos,
+                 const InstructionSource& source,
                  ExceptionType exception_type = kNoSuchMethod)
-      : TemplateDefinition(deopt_id),
-        token_pos_(token_pos),
+      : TemplateDefinition(source, deopt_id),
+        token_pos_(source.token_pos),
         function_name_(function_name),
         exception_type_(exception_type) {
     ASSERT(function_name.IsNotTemporaryScopedHandle());
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index 999f9a5..c4d4140 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -505,7 +505,7 @@
 #endif
   ASSERT(__ constant_pool_allowed());
   if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
-    compiler->EmitYieldPositionMetadata(token_pos(), yield_index());
+    compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveDartFrameAndReturn();  // Disallows constant pool use.
   // This ReturnInstr may be emitted out of order by the optimizer. The next
@@ -624,7 +624,7 @@
     __ LoadImmediate(R9, 0);
   }
   __ blx(R2);
-  compiler->EmitCallsiteMetadata(token_pos(), deopt_id(),
+  compiler->EmitCallsiteMetadata(source(), deopt_id(),
                                  PcDescriptorsLayout::kOther, locs());
   __ Drop(argument_count);
 }
@@ -1269,10 +1269,10 @@
                          ? compiler::ObjectPoolBuilderEntry::kPatchable
                          : compiler::ObjectPoolBuilderEntry::kNotPatchable);
   if (link_lazily()) {
-    compiler->GeneratePatchableCall(token_pos(), *stub,
+    compiler->GeneratePatchableCall(source(), *stub,
                                     PcDescriptorsLayout::kOther, locs());
   } else {
-    compiler->GenerateStubCall(token_pos(), *stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
                                locs());
   }
   __ Pop(result);
@@ -1314,7 +1314,7 @@
   // For historical reasons, the PC on ARM points 8 bytes past the current
   // instruction. Therefore we emit the metadata here, 8 bytes (2 instructions)
   // after the original mov.
-  compiler->EmitCallsiteMetadata(TokenPosition::kNoSource, deopt_id(),
+  compiler->EmitCallsiteMetadata(InstructionSource(), deopt_id(),
                                  PcDescriptorsLayout::Kind::kOther, locs());
 
   // Update information in the thread object and enter a safepoint.
@@ -1595,9 +1595,8 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), token_pos(), CallFunction(),
-                               args_info, locs(), ICData::Handle(),
-                               ICData::kStatic);
+  compiler->GenerateStaticCall(deopt_id(), source(), CallFunction(), args_info,
+                               locs(), ICData::Handle(), ICData::kStatic);
   ASSERT(locs()->out(0).reg() == R0);
 }
 
@@ -2620,7 +2619,7 @@
     locs->live_registers()->Remove(Location::RegisterLocation(result_));
 
     compiler->SaveLiveRegisters(locs);
-    compiler->GenerateStubCall(TokenPosition::kNoSource,  // No token position.
+    compiler->GenerateStubCall(InstructionSource(),  // No token position.
                                stub, PcDescriptorsLayout::kOther, locs);
     __ MoveRegister(result_, R0);
     compiler->RestoreLiveRegisters(locs);
@@ -3076,7 +3075,7 @@
   ASSERT(locs()->in(1).reg() == TypeTestABI::kInstantiatorTypeArgumentsReg);
   ASSERT(locs()->in(2).reg() == TypeTestABI::kFunctionTypeArgumentsReg);
 
-  compiler->GenerateInstanceOf(token_pos(), deopt_id(), type(), locs());
+  compiler->GenerateInstanceOf(source(), deopt_id(), type(), locs());
   ASSERT(locs()->out(0).reg() == R0);
 }
 
@@ -3180,7 +3179,7 @@
   auto object_store = compiler->isolate()->object_store();
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
-  compiler->GenerateStubCall(token_pos(), allocate_array_stub,
+  compiler->GenerateStubCall(source(), allocate_array_stub,
                              PcDescriptorsLayout::kOther, locs(), deopt_id());
   __ Bind(&done);
   ASSERT(locs()->out(0).reg() == kResultReg);
@@ -3475,7 +3474,7 @@
                 "Should be ordered to push arguments with one instruction");
   __ PushList((1 << instantiator_type_args_reg) |
               (1 << function_type_args_reg));
-  compiler->GenerateRuntimeCall(token_pos(), deopt_id(),
+  compiler->GenerateRuntimeCall(source(), deopt_id(),
                                 kInstantiateTypeRuntimeEntry, 3, locs());
   __ Drop(3);          // Drop 2 type vectors, and uninstantiated type.
   __ Pop(result_reg);  // Pop instantiated type.
@@ -3539,8 +3538,8 @@
     __ b(&type_arguments_instantiated, EQ);
   }
   // Lookup cache in stub before calling runtime.
-  compiler->GenerateStubCall(token_pos(), GetStub(),
-                             PcDescriptorsLayout::kOther, locs());
+  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+                             locs());
   __ Bind(&type_arguments_instantiated);
 }
 
@@ -3579,8 +3578,7 @@
     const auto& allocate_context_stub = Code::ZoneHandle(
         compiler->zone(), object_store->allocate_context_stub());
     __ LoadImmediate(R1, instruction()->num_context_variables());
-    compiler->GenerateStubCall(instruction()->token_pos(),
-                               allocate_context_stub,
+    compiler->GenerateStubCall(instruction()->source(), allocate_context_stub,
                                PcDescriptorsLayout::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == R0);
     compiler->RestoreLiveRegisters(instruction()->locs());
@@ -3630,7 +3628,7 @@
   const auto& allocate_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_context_stub());
   __ LoadImmediate(R1, num_context_variables());
-  compiler->GenerateStubCall(token_pos(), allocate_context_stub,
+  compiler->GenerateStubCall(source(), allocate_context_stub,
                              PcDescriptorsLayout::kOther, locs());
 }
 
@@ -3652,7 +3650,7 @@
   auto object_store = compiler->isolate()->object_store();
   const auto& clone_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->clone_context_stub());
-  compiler->GenerateStubCall(token_pos(), clone_context_stub,
+  compiler->GenerateStubCall(source(), clone_context_stub,
                              /*kind=*/PcDescriptorsLayout::kOther, locs());
 }
 
@@ -3675,7 +3673,7 @@
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
   }
   if (HasParallelMove()) {
@@ -3760,11 +3758,11 @@
       compiler->RecordCatchEntryMoves();
       compiler->AddDescriptor(
           PcDescriptorsLayout::kOther, compiler->assembler()->CodeSize(),
-          instruction()->deopt_id(), instruction()->token_pos(),
+          instruction()->deopt_id(), instruction()->source(),
           compiler->CurrentTryIndex());
     } else {
       compiler->GenerateRuntimeCall(
-          instruction()->token_pos(), instruction()->deopt_id(),
+          instruction()->source(), instruction()->deopt_id(),
           kStackOverflowRuntimeEntry, kNumSlowPathArgs, instruction()->locs());
     }
 
@@ -3773,7 +3771,7 @@
       // In unoptimized code, record loop stack checks as possible OSR entries.
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
                                      instruction()->deopt_id(),
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
     compiler->pending_deoptimization_env_ = NULL;
     if (!using_shared_stub) {
@@ -3812,7 +3810,7 @@
     // reflect live registers being saved in the shared spilling stubs (see
     // the stub above).
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->EmitCallsiteMetadata(token_pos(), deopt_id(),
+    compiler->EmitCallsiteMetadata(source(), deopt_id(),
                                    PcDescriptorsLayout::kOther, locs(),
                                    extended_env);
     return;
@@ -3972,7 +3970,7 @@
             /*type_args_len=*/0, /*num_arguments=*/2));
     compiler->EmitMegamorphicInstanceCall(
         selector, arguments_descriptor, instruction()->call()->deopt_id(),
-        instruction()->token_pos(), locs, try_index_, kNumSlowPathArgs);
+        instruction()->source(), locs, try_index_, kNumSlowPathArgs);
     __ mov(result, compiler::Operand(R0));
     compiler->RestoreLiveRegisters(locs);
     __ b(exit_label());
@@ -4121,7 +4119,7 @@
             /*type_args_len=*/0, /*num_arguments=*/2));
     compiler->EmitMegamorphicInstanceCall(
         selector, arguments_descriptor, instruction()->call()->deopt_id(),
-        instruction()->token_pos(), locs, try_index_, kNumSlowPathArgs);
+        instruction()->source(), locs, try_index_, kNumSlowPathArgs);
     __ mov(result, compiler::Operand(R0));
     compiler->RestoreLiveRegisters(locs);
     compiler->pending_deoptimization_env_ = nullptr;
@@ -5076,7 +5074,7 @@
     ASSERT(!locs()->live_registers()->ContainsRegister(
         AllocateMintABI::kResultReg));
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                                locs(), DeoptId::kNone, extended_env);
   } else {
     BoxAllocationSlowPath::Allocate(compiler, this, compiler->mint_class(),
@@ -6122,7 +6120,7 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), instance_call()->token_pos(), target,
+  compiler->GenerateStaticCall(deopt_id(), instance_call()->source(), target,
                                args_info, locs(), ICData::Handle(),
                                ICData::kStatic);
   __ Bind(&done);
@@ -6632,7 +6630,7 @@
     // reflect live registers being saved in the shared spilling stubs (see
     // the stub above).
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->EmitCallsiteMetadata(token_pos(), deopt_id(),
+    compiler->EmitCallsiteMetadata(source(), deopt_id(),
                                    PcDescriptorsLayout::kOther, locs(),
                                    extended_env);
     CheckNullInstr::AddMetadataForRuntimeCall(this, compiler);
@@ -7532,7 +7530,7 @@
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
-                                   TokenPosition::kNoSource);
+                                   InstructionSource());
   }
   if (HasParallelMove()) {
     compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
@@ -7622,7 +7620,7 @@
     Register reg,
     const Object& obj) {
   return compiler->EmitEqualityRegConstCompare(reg, obj, needs_number_check(),
-                                               token_pos(), deopt_id());
+                                               source(), deopt_id());
 }
 
 void ComparisonInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
@@ -7705,7 +7703,7 @@
   }
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                              locs());
 }
 
@@ -7715,7 +7713,7 @@
 #else
   ASSERT(!compiler->is_optimizing());
   __ BranchLinkPatchable(StubCode::DebugStepCheck());
-  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, token_pos());
+  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, source());
   compiler->RecordSafepoint(locs());
 #endif
 }
diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index 201633d..a3fe07a 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -413,7 +413,7 @@
 #endif
   ASSERT(__ constant_pool_allowed());
   if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
-    compiler->EmitYieldPositionMetadata(token_pos(), yield_index());
+    compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveDartFrame();  // Disallows constant pool use.
   __ ret();
@@ -528,7 +528,7 @@
     __ LoadImmediate(R5, 0);
   }
   __ blr(R2);
-  compiler->EmitCallsiteMetadata(token_pos(), deopt_id(),
+  compiler->EmitCallsiteMetadata(source(), deopt_id(),
                                  PcDescriptorsLayout::kOther, locs());
   __ Drop(argument_count);
 }
@@ -1102,10 +1102,10 @@
                      link_lazily() ? ObjectPool::Patchability::kPatchable
                                    : ObjectPool::Patchability::kNotPatchable);
   if (link_lazily()) {
-    compiler->GeneratePatchableCall(token_pos(), *stub,
+    compiler->GeneratePatchableCall(source(), *stub,
                                     PcDescriptorsLayout::kOther, locs());
   } else {
-    compiler->GenerateStubCall(token_pos(), *stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
                                locs());
   }
   __ Pop(result);
@@ -1142,7 +1142,7 @@
   // ADR loads relative to itself, so add kInstrSize to point to the next
   // instruction.
   __ adr(temp, compiler::Immediate(Instr::kInstrSize));
-  compiler->EmitCallsiteMetadata(token_pos(), deopt_id(),
+  compiler->EmitCallsiteMetadata(source(), deopt_id(),
                                  PcDescriptorsLayout::Kind::kOther, locs());
 
   __ StoreToOffset(temp, FPREG, kSavedCallerPcSlotFromFp * kWordSize);
@@ -1441,9 +1441,8 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), token_pos(), CallFunction(),
-                               args_info, locs(), ICData::Handle(),
-                               ICData::kStatic);
+  compiler->GenerateStaticCall(deopt_id(), source(), CallFunction(), args_info,
+                               locs(), ICData::Handle(), ICData::kStatic);
   ASSERT(locs()->out(0).reg() == R0);
 }
 
@@ -2309,7 +2308,7 @@
     locs->live_registers()->Remove(Location::RegisterLocation(result_));
 
     compiler->SaveLiveRegisters(locs);
-    compiler->GenerateStubCall(TokenPosition::kNoSource,  // No token position.
+    compiler->GenerateStubCall(InstructionSource(),  // No token position.
                                stub, PcDescriptorsLayout::kOther, locs);
     __ MoveRegister(result_, R0);
     compiler->RestoreLiveRegisters(locs);
@@ -2624,7 +2623,7 @@
   ASSERT(locs()->in(1).reg() == TypeTestABI::kInstantiatorTypeArgumentsReg);
   ASSERT(locs()->in(2).reg() == TypeTestABI::kFunctionTypeArgumentsReg);
 
-  compiler->GenerateInstanceOf(token_pos(), deopt_id(), type(), locs());
+  compiler->GenerateInstanceOf(source(), deopt_id(), type(), locs());
   ASSERT(locs()->out(0).reg() == R0);
 }
 
@@ -2727,7 +2726,7 @@
   auto object_store = compiler->isolate()->object_store();
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
-  compiler->GenerateStubCall(token_pos(), allocate_array_stub,
+  compiler->GenerateStubCall(source(), allocate_array_stub,
                              PcDescriptorsLayout::kOther, locs(), deopt_id());
   ASSERT(locs()->out(0).reg() == kResultReg);
   __ Bind(&done);
@@ -2985,7 +2984,7 @@
   __ LoadObject(TMP, type());
   __ PushPair(TMP, NULL_REG);
   __ PushPair(function_type_args_reg, instantiator_type_args_reg);
-  compiler->GenerateRuntimeCall(token_pos(), deopt_id(),
+  compiler->GenerateRuntimeCall(source(), deopt_id(),
                                 kInstantiateTypeRuntimeEntry, 3, locs());
   __ Drop(3);          // Drop 2 type vectors, and uninstantiated type.
   __ Pop(result_reg);  // Pop instantiated type.
@@ -3052,8 +3051,8 @@
   }
   // Lookup cache in stub before calling runtime.
 
-  compiler->GenerateStubCall(token_pos(), GetStub(),
-                             PcDescriptorsLayout::kOther, locs());
+  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+                             locs());
   __ Bind(&type_arguments_instantiated);
 }
 
@@ -3093,8 +3092,7 @@
         compiler->zone(), object_store->allocate_context_stub());
 
     __ LoadImmediate(R1, instruction()->num_context_variables());
-    compiler->GenerateStubCall(instruction()->token_pos(),
-                               allocate_context_stub,
+    compiler->GenerateStubCall(instruction()->source(), allocate_context_stub,
                                PcDescriptorsLayout::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == R0);
     compiler->RestoreLiveRegisters(instruction()->locs());
@@ -3144,7 +3142,7 @@
   const auto& allocate_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_context_stub());
   __ LoadImmediate(R1, num_context_variables());
-  compiler->GenerateStubCall(token_pos(), allocate_context_stub,
+  compiler->GenerateStubCall(source(), allocate_context_stub,
                              PcDescriptorsLayout::kOther, locs());
 }
 
@@ -3166,7 +3164,7 @@
   auto object_store = compiler->isolate()->object_store();
   const auto& clone_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->clone_context_stub());
-  compiler->GenerateStubCall(token_pos(), clone_context_stub,
+  compiler->GenerateStubCall(source(), clone_context_stub,
                              /*kind=*/PcDescriptorsLayout::kOther, locs());
 }
 
@@ -3189,7 +3187,7 @@
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
   }
   if (HasParallelMove()) {
@@ -3285,11 +3283,11 @@
       compiler->RecordCatchEntryMoves();
       compiler->AddDescriptor(
           PcDescriptorsLayout::kOther, compiler->assembler()->CodeSize(),
-          instruction()->deopt_id(), instruction()->token_pos(),
+          instruction()->deopt_id(), instruction()->source(),
           compiler->CurrentTryIndex());
     } else {
       compiler->GenerateRuntimeCall(
-          instruction()->token_pos(), instruction()->deopt_id(),
+          instruction()->source(), instruction()->deopt_id(),
           kStackOverflowRuntimeEntry, kNumSlowPathArgs, locs);
     }
 
@@ -3298,7 +3296,7 @@
       // In unoptimized code, record loop stack checks as possible OSR entries.
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
                                      instruction()->deopt_id(),
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
     compiler->pending_deoptimization_env_ = NULL;
     if (!using_shared_stub) {
@@ -3470,7 +3468,7 @@
             /*type_args_len=*/0, /*num_arguments=*/2));
     compiler->EmitMegamorphicInstanceCall(
         selector, arguments_descriptor, instruction()->call()->deopt_id(),
-        instruction()->token_pos(), locs, try_index_, kNumSlowPathArgs);
+        instruction()->source(), locs, try_index_, kNumSlowPathArgs);
     __ mov(result, R0);
     compiler->RestoreLiveRegisters(locs);
     __ b(exit_label());
@@ -3616,7 +3614,7 @@
             /*type_args_len=*/0, /*num_arguments=*/2));
     compiler->EmitMegamorphicInstanceCall(
         selector, arguments_descriptor, instruction()->call()->deopt_id(),
-        instruction()->token_pos(), locs, try_index_, kNumSlowPathArgs);
+        instruction()->source(), locs, try_index_, kNumSlowPathArgs);
     __ mov(result, R0);
     compiler->RestoreLiveRegisters(locs);
     compiler->pending_deoptimization_env_ = nullptr;
@@ -4295,7 +4293,7 @@
     ASSERT(!locs()->live_registers()->ContainsRegister(
         AllocateMintABI::kResultReg));
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                                locs(), DeoptId::kNone, extended_env);
   } else {
     BoxAllocationSlowPath::Allocate(compiler, this, compiler->mint_class(), out,
@@ -5162,7 +5160,7 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), instance_call()->token_pos(), target,
+  compiler->GenerateStaticCall(deopt_id(), instance_call()->source(), target,
                                args_info, locs(), ICData::Handle(),
                                ICData::kStatic);
   __ Bind(&done);
@@ -6570,7 +6568,7 @@
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
-                                   TokenPosition::kNoSource);
+                                   InstructionSource());
   }
   if (HasParallelMove()) {
     compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
@@ -6658,7 +6656,7 @@
     return kInvalidCondition;
   } else {
     return compiler->EmitEqualityRegConstCompare(reg, obj, needs_number_check(),
-                                                 token_pos(), deopt_id());
+                                                 source(), deopt_id());
   }
 }
 
@@ -6736,7 +6734,7 @@
   }
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                              locs());
 }
 
@@ -6746,7 +6744,7 @@
 #else
   ASSERT(!compiler->is_optimizing());
   __ BranchLinkPatchable(StubCode::DebugStepCheck());
-  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, token_pos());
+  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, source());
   compiler->RecordSafepoint(locs());
 #endif
 }
diff --git a/runtime/vm/compiler/backend/il_deserializer.cc b/runtime/vm/compiler/backend/il_deserializer.cc
index 1639057..2dcf6a6 100644
--- a/runtime/vm/compiler/backend/il_deserializer.cc
+++ b/runtime/vm/compiler/backend/il_deserializer.cc
@@ -771,7 +771,12 @@
           CheckInteger(list->ExtraLookupValue("token_pos"))) {
     token_pos = TokenPosition::Deserialize(token_int->value());
   }
-  InstrInfo common_info = {deopt_id, token_pos};
+  intptr_t inlining_id = -1;
+  if (auto const inlining_int =
+          CheckInteger(list->ExtraLookupValue("inlining_id"))) {
+    inlining_id = inlining_int->value();
+  }
+  InstrInfo common_info = {deopt_id, InstructionSource(token_pos, inlining_id)};
 
   // Parse the environment before handling the instruction, as we may have
   // references to PushArguments and parsing the instruction may pop
@@ -860,7 +865,7 @@
   }
 
   auto const inst =
-      new (zone()) AllocateObjectInstr(info.token_pos, cls, type_arguments);
+      new (zone()) AllocateObjectInstr(info.source, cls, type_arguments);
 
   if (auto const closure_sexp = CheckTaggedList(
           sexp->ExtraLookupValue("closure_function"), "Function")) {
@@ -908,7 +913,7 @@
   }
 
   return new (zone())
-      AssertAssignableInstr(info.token_pos, val, dst_type, inst_type_args,
+      AssertAssignableInstr(info.source, val, dst_type, inst_type_args,
                             func_type_args, dst_name, info.deopt_id, kind);
 }
 
@@ -918,7 +923,7 @@
   auto const val = ParseValue(Retrieve(sexp, 1));
   if (val == nullptr) return nullptr;
 
-  return new (zone()) AssertBooleanInstr(info.token_pos, val, info.deopt_id);
+  return new (zone()) AssertBooleanInstr(info.source, val, info.deopt_id);
 }
 
 BooleanNegateInstr* FlowGraphDeserializer::DeserializeBooleanNegate(
@@ -974,7 +979,7 @@
   }
 
   return new (zone())
-      CheckNullInstr(val, func_name, info.deopt_id, info.token_pos);
+      CheckNullInstr(val, func_name, info.deopt_id, info.source);
 }
 
 CheckStackOverflowInstr* FlowGraphDeserializer::DeserializeCheckStackOverflow(
@@ -998,7 +1003,7 @@
     kind = CheckStackOverflowInstr::kOsrOnly;
   }
 
-  return new (zone()) CheckStackOverflowInstr(info.token_pos, stack_depth,
+  return new (zone()) CheckStackOverflowInstr(info.source, stack_depth,
                                               loop_depth, info.deopt_id, kind);
 }
 
@@ -1007,7 +1012,7 @@
     const InstrInfo& info) {
   Object& obj = Object::ZoneHandle(zone());
   if (!ParseDartValue(Retrieve(sexp, 1), &obj)) return nullptr;
-  return new (zone()) ConstantInstr(obj, info.token_pos);
+  return new (zone()) ConstantInstr(obj, info.source);
 }
 
 DebugStepCheckInstr* FlowGraphDeserializer::DeserializeDebugStepCheck(
@@ -1020,7 +1025,7 @@
       return nullptr;
     }
   }
-  return new (zone()) DebugStepCheckInstr(info.token_pos, kind, info.deopt_id);
+  return new (zone()) DebugStepCheckInstr(info.source, kind, info.deopt_id);
 }
 
 GotoInstr* FlowGraphDeserializer::DeserializeGoto(SExpList* sexp,
@@ -1076,7 +1081,7 @@
   }
 
   auto const inst = new (zone()) InstanceCallInstr(
-      info.token_pos, function_name, token_kind, call_info.inputs,
+      info.source, function_name, token_kind, call_info.inputs,
       call_info.type_args_len, call_info.argument_names, checked_arg_count,
       info.deopt_id, interface_target, tearoff_interface_target);
 
@@ -1118,7 +1123,7 @@
     calls_initializer = calls_initializer_sexp->value();
   }
 
-  return new (zone()) LoadFieldInstr(instance, *slot, info.token_pos,
+  return new (zone()) LoadFieldInstr(instance, *slot, info.source,
                                      calls_initializer, info.deopt_id);
 }
 
@@ -1146,7 +1151,7 @@
   if (!ParseCallInfo(sexp, &call_info)) return nullptr;
 
   return new (zone()) NativeCallInstr(&name, &function, link_lazily,
-                                      info.token_pos, call_info.inputs);
+                                      info.source, call_info.inputs);
 }
 
 ParameterInstr* FlowGraphDeserializer::DeserializeParameter(
@@ -1175,7 +1180,7 @@
                                                       const InstrInfo& info) {
   Value* val = ParseValue(Retrieve(list, 1));
   if (val == nullptr) return nullptr;
-  return new (zone()) ReturnInstr(info.token_pos, val, info.deopt_id);
+  return new (zone()) ReturnInstr(info.source, val, info.deopt_id);
 }
 
 SpecialParameterInstr* FlowGraphDeserializer::DeserializeSpecialParameter(
@@ -1219,10 +1224,9 @@
     }
   }
 
-  auto const inst = new (zone())
-      StaticCallInstr(info.token_pos, function, call_info.type_args_len,
-                      call_info.argument_names, call_info.inputs, info.deopt_id,
-                      call_count, rebind_rule);
+  auto const inst = new (zone()) StaticCallInstr(
+      info.source, function, call_info.type_args_len, call_info.argument_names,
+      call_info.inputs, info.deopt_id, call_count, rebind_rule);
 
   if (call_info.result_type != nullptr) {
     inst->SetResultType(zone(), *call_info.result_type);
@@ -1262,8 +1266,8 @@
     if (init_sexp->value()) kind = StoreInstanceFieldInstr::Kind::kInitializing;
   }
 
-  return new (zone()) StoreInstanceFieldInstr(
-      *slot, instance, value, barrier_type, info.token_pos, kind);
+  return new (zone()) StoreInstanceFieldInstr(*slot, instance, value,
+                                              barrier_type, info.source, kind);
 }
 
 StrictCompareInstr* FlowGraphDeserializer::DeserializeStrictCompare(
@@ -1285,7 +1289,7 @@
     needs_check = check_sexp->value();
   }
 
-  return new (zone()) StrictCompareInstr(info.token_pos, kind, left, right,
+  return new (zone()) StrictCompareInstr(info.source, kind, left, right,
                                          needs_check, info.deopt_id);
 }
 
@@ -1293,7 +1297,7 @@
                                                     const InstrInfo& info) {
   Value* exception = ParseValue(Retrieve(sexp, 1));
   if (exception == nullptr) return nullptr;
-  return new (zone()) ThrowInstr(info.token_pos, info.deopt_id, exception);
+  return new (zone()) ThrowInstr(info.source, info.deopt_id, exception);
 }
 
 bool FlowGraphDeserializer::ParseCallInfo(SExpList* call,
diff --git a/runtime/vm/compiler/backend/il_deserializer.h b/runtime/vm/compiler/backend/il_deserializer.h
index 3959d37..f1637dd 100644
--- a/runtime/vm/compiler/backend/il_deserializer.h
+++ b/runtime/vm/compiler/backend/il_deserializer.h
@@ -196,8 +196,8 @@
 #undef HANDLER_DECL
 
   struct InstrInfo {
-    intptr_t deopt_id;
-    TokenPosition token_pos;
+    const intptr_t deopt_id;
+    const InstructionSource source;
   };
 
   enum HandledInstruction {
diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc
index bd3bd78..fa15876 100644
--- a/runtime/vm/compiler/backend/il_ia32.cc
+++ b/runtime/vm/compiler/backend/il_ia32.cc
@@ -255,7 +255,7 @@
   __ Bind(&done);
 #endif
   if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
-    compiler->EmitYieldPositionMetadata(token_pos(), yield_index());
+    compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveFrame();
   __ ret();
@@ -977,7 +977,7 @@
   const compiler::ExternalLabel label(
       reinterpret_cast<uword>(native_c_function()));
   __ movl(ECX, compiler::Immediate(label.address()));
-  compiler->GenerateStubCall(token_pos(), *stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
                              locs());
 
   __ popl(result);
@@ -1016,7 +1016,7 @@
   // PC-relative 'leaq' available, so we have do a trick with 'call'.
   compiler::Label get_pc;
   __ call(&get_pc);
-  compiler->EmitCallsiteMetadata(TokenPosition::kNoSource, deopt_id(),
+  compiler->EmitCallsiteMetadata(InstructionSource(), deopt_id(),
                                  PcDescriptorsLayout::Kind::kOther, locs());
   __ Bind(&get_pc);
   __ popl(temp);
@@ -1239,9 +1239,8 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), token_pos(), CallFunction(),
-                               args_info, locs(), ICData::Handle(),
-                               ICData::kStatic);
+  compiler->GenerateStaticCall(deopt_id(), source(), CallFunction(), args_info,
+                               locs(), ICData::Handle(), ICData::kStatic);
   ASSERT(locs()->out(0).reg() == EAX);
 }
 
@@ -2141,7 +2140,7 @@
     locs->live_registers()->Remove(Location::RegisterLocation(result_));
 
     compiler->SaveLiveRegisters(locs);
-    compiler->GenerateStubCall(TokenPosition::kNoSource, stub,
+    compiler->GenerateStubCall(InstructionSource(), stub,
                                PcDescriptorsLayout::kOther, locs);
     __ MoveRegister(result_, EAX);
     compiler->RestoreLiveRegisters(locs);
@@ -2437,7 +2436,7 @@
   ASSERT(locs()->in(1).reg() == TypeTestABI::kInstantiatorTypeArgumentsReg);
   ASSERT(locs()->in(2).reg() == TypeTestABI::kFunctionTypeArgumentsReg);
 
-  compiler->GenerateInstanceOf(token_pos(), deopt_id(), type(), locs());
+  compiler->GenerateInstanceOf(source(), deopt_id(), type(), locs());
   ASSERT(locs()->out(0).reg() == EAX);
 }
 
@@ -2534,7 +2533,7 @@
   auto object_store = compiler->isolate()->object_store();
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
-  compiler->GenerateStubCall(token_pos(), allocate_array_stub,
+  compiler->GenerateStubCall(source(), allocate_array_stub,
                              PcDescriptorsLayout::kOther, locs(), deopt_id());
   __ Bind(&done);
   ASSERT(locs()->out(0).reg() == kResultReg);
@@ -2782,7 +2781,7 @@
   __ PushObject(type());
   __ pushl(instantiator_type_args_reg);  // Push instantiator type arguments.
   __ pushl(function_type_args_reg);      // Push function type arguments.
-  compiler->GenerateRuntimeCall(token_pos(), deopt_id(),
+  compiler->GenerateRuntimeCall(source(), deopt_id(),
                                 kInstantiateTypeRuntimeEntry, 3, locs());
   __ Drop(3);           // Drop 2 type vectors, and uninstantiated type.
   __ popl(result_reg);  // Pop instantiated type.
@@ -2848,8 +2847,8 @@
     __ Bind(&non_null_type_args);
   }
   // Lookup cache in stub before calling runtime.
-  compiler->GenerateStubCall(token_pos(), GetStub(),
-                             PcDescriptorsLayout::kOther, locs());
+  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+                             locs());
   __ Bind(&type_arguments_instantiated);
 }
 
@@ -2884,7 +2883,7 @@
     compiler->SaveLiveRegisters(locs);
 
     __ movl(EDX, compiler::Immediate(instruction()->num_context_variables()));
-    compiler->GenerateStubCall(instruction()->token_pos(),
+    compiler->GenerateStubCall(instruction()->source(),
                                StubCode::AllocateContext(),
                                PcDescriptorsLayout::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == EAX);
@@ -2933,7 +2932,7 @@
   ASSERT(locs()->out(0).reg() == EAX);
 
   __ movl(EDX, compiler::Immediate(num_context_variables()));
-  compiler->GenerateStubCall(token_pos(), StubCode::AllocateContext(),
+  compiler->GenerateStubCall(source(), StubCode::AllocateContext(),
                              PcDescriptorsLayout::kOther, locs());
 }
 
@@ -2952,7 +2951,7 @@
   ASSERT(locs()->in(0).reg() == ECX);
   ASSERT(locs()->out(0).reg() == EAX);
 
-  compiler->GenerateStubCall(token_pos(), StubCode::CloneContext(),
+  compiler->GenerateStubCall(source(), StubCode::CloneContext(),
                              /*kind=*/PcDescriptorsLayout::kOther, locs());
 }
 
@@ -2975,7 +2974,7 @@
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
   }
   if (HasParallelMove()) {
@@ -3042,7 +3041,7 @@
         instruction(), /*num_slow_path_args=*/0);
     compiler->pending_deoptimization_env_ = env;
     compiler->GenerateRuntimeCall(
-        instruction()->token_pos(), instruction()->deopt_id(),
+        instruction()->source(), instruction()->deopt_id(),
         kStackOverflowRuntimeEntry, 0, instruction()->locs());
 
     if (compiler->isolate()->use_osr() && !compiler->is_optimizing() &&
@@ -3050,7 +3049,7 @@
       // In unoptimized code, record loop stack checks as possible OSR entries.
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
                                      instruction()->deopt_id(),
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
     compiler->pending_deoptimization_env_ = NULL;
     compiler->RestoreLiveRegisters(instruction()->locs());
@@ -5115,7 +5114,7 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), instance_call()->token_pos(), target,
+  compiler->GenerateStaticCall(deopt_id(), instance_call()->source(), target,
                                args_info, locs(), ICData::Handle(),
                                ICData::kStatic);
   __ Bind(&done);
@@ -6450,7 +6449,7 @@
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
-                                   TokenPosition::kNoSource);
+                                   InstructionSource());
   }
   if (HasParallelMove()) {
     compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
@@ -6532,7 +6531,7 @@
     Register reg,
     const Object& obj) {
   return compiler->EmitEqualityRegConstCompare(reg, obj, needs_number_check(),
-                                               token_pos(), deopt_id());
+                                               source(), deopt_id());
 }
 
 // Detect pattern when one value is zero and another is a power of 2.
@@ -6636,7 +6635,7 @@
   // ECX: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value).
   __ xorl(ECX, ECX);
   __ call(EBX);
-  compiler->EmitCallsiteMetadata(token_pos(), deopt_id(),
+  compiler->EmitCallsiteMetadata(source(), deopt_id(),
                                  PcDescriptorsLayout::kOther, locs());
   __ Drop(argument_count);
 }
@@ -6686,7 +6685,7 @@
 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                              locs());
 }
 
@@ -6696,7 +6695,7 @@
 #else
   ASSERT(!compiler->is_optimizing());
   __ Call(StubCode::DebugStepCheck());
-  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, token_pos());
+  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, source());
   compiler->RecordSafepoint(locs());
 #endif
 }
diff --git a/runtime/vm/compiler/backend/il_serializer.cc b/runtime/vm/compiler/backend/il_serializer.cc
index 979c410..eeff8fb 100644
--- a/runtime/vm/compiler/backend/il_serializer.cc
+++ b/runtime/vm/compiler/backend/il_serializer.cc
@@ -825,6 +825,9 @@
   if (!token_pos().IsNoSource()) {
     s->AddExtraInteger(sexp, "token_pos", token_pos().Serialize());
   }
+  if (has_inlining_id()) {
+    s->AddExtraInteger(sexp, "inlining_id", inlining_id());
+  }
 }
 
 SExpression* Range::ToSExpression(FlowGraphSerializer* s) {
diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc
index 01faa99..80ddf22 100644
--- a/runtime/vm/compiler/backend/il_x64.cc
+++ b/runtime/vm/compiler/backend/il_x64.cc
@@ -327,7 +327,7 @@
 #endif
   ASSERT(__ constant_pool_allowed());
   if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
-    compiler->EmitYieldPositionMetadata(token_pos(), yield_index());
+    compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveDartFrame();  // Disallows constant pool use.
   __ ret();
@@ -1023,7 +1023,7 @@
     compiler::ExternalLabel label(NativeEntry::LinkNativeCallEntry());
     __ LoadNativeEntry(RBX, &label,
                        compiler::ObjectPoolBuilderEntry::kPatchable);
-    compiler->GeneratePatchableCall(token_pos(), *stub,
+    compiler->GeneratePatchableCall(source(), *stub,
                                     PcDescriptorsLayout::kOther, locs());
   } else {
     if (is_bootstrap_native()) {
@@ -1037,7 +1037,7 @@
         reinterpret_cast<uword>(native_c_function()));
     __ LoadNativeEntry(RBX, &label,
                        compiler::ObjectPoolBuilderEntry::kNotPatchable);
-    compiler->GenerateStubCall(token_pos(), *stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
                                locs());
   }
   __ popq(result);
@@ -1074,7 +1074,7 @@
   // instruction, so 'AddressRIPRelative' loads the address of the following
   // 'movq'.
   __ leaq(TMP, compiler::Address::AddressRIPRelative(0));
-  compiler->EmitCallsiteMetadata(TokenPosition::kNoSource, deopt_id(),
+  compiler->EmitCallsiteMetadata(InstructionSource(), deopt_id(),
                                  PcDescriptorsLayout::Kind::kOther, locs());
   __ movq(compiler::Address(FPREG, kSavedCallerPcSlotFromFp * kWordSize), TMP);
 
@@ -1318,9 +1318,8 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), token_pos(), CallFunction(),
-                               args_info, locs(), ICData::Handle(),
-                               ICData::kStatic);
+  compiler->GenerateStaticCall(deopt_id(), source(), CallFunction(), args_info,
+                               locs(), ICData::Handle(), ICData::kStatic);
   ASSERT(locs()->out(0).reg() == RAX);
 }
 
@@ -1519,7 +1518,7 @@
     locs->live_registers()->Remove(Location::RegisterLocation(result_));
 
     compiler->SaveLiveRegisters(locs);
-    compiler->GenerateStubCall(TokenPosition::kNoSource,  // No token position.
+    compiler->GenerateStubCall(InstructionSource(),  // No token position.
                                stub, PcDescriptorsLayout::kOther, locs);
     __ MoveRegister(result_, RAX);
     compiler->RestoreLiveRegisters(locs);
@@ -2645,7 +2644,7 @@
   ASSERT(locs()->in(1).reg() == TypeTestABI::kInstantiatorTypeArgumentsReg);
   ASSERT(locs()->in(2).reg() == TypeTestABI::kFunctionTypeArgumentsReg);
 
-  compiler->GenerateInstanceOf(token_pos(), deopt_id(), type(), locs());
+  compiler->GenerateInstanceOf(source(), deopt_id(), type(), locs());
   ASSERT(locs()->out(0).reg() == RAX);
 }
 
@@ -2747,7 +2746,7 @@
   auto object_store = compiler->isolate()->object_store();
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
-  compiler->GenerateStubCall(token_pos(), allocate_array_stub,
+  compiler->GenerateStubCall(source(), allocate_array_stub,
                              PcDescriptorsLayout::kOther, locs(), deopt_id());
   __ Bind(&done);
   ASSERT(locs()->out(0).reg() == kResultReg);
@@ -3015,7 +3014,7 @@
   __ PushObject(type());
   __ pushq(instantiator_type_args_reg);  // Push instantiator type arguments.
   __ pushq(function_type_args_reg);      // Push function type arguments.
-  compiler->GenerateRuntimeCall(token_pos(), deopt_id(),
+  compiler->GenerateRuntimeCall(source(), deopt_id(),
                                 kInstantiateTypeRuntimeEntry, 3, locs());
   __ Drop(3);           // Drop 2 type vectors, and uninstantiated type.
   __ popq(result_reg);  // Pop instantiated type.
@@ -3081,8 +3080,8 @@
     __ Bind(&non_null_type_args);
   }
   // Lookup cache in stub before calling runtime.
-  compiler->GenerateStubCall(token_pos(), GetStub(),
-                             PcDescriptorsLayout::kOther, locs());
+  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+                             locs());
   __ Bind(&type_arguments_instantiated);
 }
 
@@ -3122,8 +3121,7 @@
 
     __ LoadImmediate(
         R10, compiler::Immediate(instruction()->num_context_variables()));
-    compiler->GenerateStubCall(instruction()->token_pos(),
-                               allocate_context_stub,
+    compiler->GenerateStubCall(instruction()->source(), allocate_context_stub,
                                PcDescriptorsLayout::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == RAX);
     compiler->RestoreLiveRegisters(instruction()->locs());
@@ -3174,7 +3172,7 @@
       Code::ZoneHandle(compiler->zone(), object_store->allocate_context_stub());
 
   __ LoadImmediate(R10, compiler::Immediate(num_context_variables()));
-  compiler->GenerateStubCall(token_pos(), allocate_context_stub,
+  compiler->GenerateStubCall(source(), allocate_context_stub,
                              PcDescriptorsLayout::kOther, locs());
 }
 
@@ -3196,7 +3194,7 @@
   auto object_store = compiler->isolate()->object_store();
   const auto& clone_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->clone_context_stub());
-  compiler->GenerateStubCall(token_pos(), clone_context_stub,
+  compiler->GenerateStubCall(source(), clone_context_stub,
                              /*kind=*/PcDescriptorsLayout::kOther, locs());
 }
 
@@ -3219,7 +3217,7 @@
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
   }
   if (HasParallelMove()) {
@@ -3302,11 +3300,11 @@
       compiler->RecordCatchEntryMoves();
       compiler->AddDescriptor(
           PcDescriptorsLayout::kOther, compiler->assembler()->CodeSize(),
-          instruction()->deopt_id(), instruction()->token_pos(),
+          instruction()->deopt_id(), instruction()->source(),
           compiler->CurrentTryIndex());
     } else {
       compiler->GenerateRuntimeCall(
-          instruction()->token_pos(), instruction()->deopt_id(),
+          instruction()->source(), instruction()->deopt_id(),
           kStackOverflowRuntimeEntry, kNumSlowPathArgs, instruction()->locs());
     }
 
@@ -3315,7 +3313,7 @@
       // In unoptimized code, record loop stack checks as possible OSR entries.
       compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
                                      instruction()->deopt_id(),
-                                     TokenPosition::kNoSource);
+                                     InstructionSource());
     }
     compiler->pending_deoptimization_env_ = NULL;
     if (!using_shared_stub) {
@@ -3506,7 +3504,7 @@
             /*type_args_len=*/0, /*num_arguments=*/2));
     compiler->EmitMegamorphicInstanceCall(
         selector, arguments_descriptor, instruction()->call()->deopt_id(),
-        instruction()->token_pos(), locs, try_index_, kNumSlowPathArgs);
+        instruction()->source(), locs, try_index_, kNumSlowPathArgs);
     __ MoveRegister(result, RAX);
     compiler->RestoreLiveRegisters(locs);
     __ jmp(exit_label());
@@ -3681,7 +3679,7 @@
 
     compiler->EmitMegamorphicInstanceCall(
         selector, arguments_descriptor, instruction()->call()->deopt_id(),
-        instruction()->token_pos(), locs, try_index_, kNumSlowPathArgs);
+        instruction()->source(), locs, try_index_, kNumSlowPathArgs);
     __ MoveRegister(result, RAX);
     compiler->RestoreLiveRegisters(locs);
     compiler->pending_deoptimization_env_ = nullptr;
@@ -4566,7 +4564,7 @@
     ASSERT(!locs()->live_registers()->ContainsRegister(
         AllocateMintABI::kResultReg));
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                                locs(), DeoptId::kNone, extended_env);
   } else {
     BoxAllocationSlowPath::Allocate(compiler, this, compiler->mint_class(), out,
@@ -5337,7 +5335,7 @@
   const Array& kNoArgumentNames = Object::null_array();
   ArgumentsInfo args_info(kTypeArgsLen, kNumberOfArguments, kSizeOfArguments,
                           kNoArgumentNames);
-  compiler->GenerateStaticCall(deopt_id(), instance_call()->token_pos(), target,
+  compiler->GenerateStaticCall(deopt_id(), instance_call()->source(), target,
                                args_info, locs(), ICData::Handle(),
                                ICData::kStatic);
   __ Bind(&done);
@@ -6851,7 +6849,7 @@
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
     compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
-                                   TokenPosition::kNoSource);
+                                   InstructionSource());
   }
   if (HasParallelMove()) {
     compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
@@ -6932,7 +6930,7 @@
     Register reg,
     const Object& obj) {
   return compiler->EmitEqualityRegConstCompare(reg, obj, needs_number_check(),
-                                               token_pos(), deopt_id());
+                                               source(), deopt_id());
 }
 
 LocationSummary* DispatchTableCallInstr::MakeLocationSummary(Zone* zone,
@@ -6980,7 +6978,7 @@
     __ xorq(RBX, RBX);
   }
   __ call(RCX);
-  compiler->EmitCallsiteMetadata(token_pos(), deopt_id(),
+  compiler->EmitCallsiteMetadata(source(), deopt_id(),
                                  PcDescriptorsLayout::kOther, locs());
   __ Drop(argument_count);
 }
@@ -7037,7 +7035,7 @@
   }
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(token_pos(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
                              locs());
 }
 
@@ -7047,7 +7045,7 @@
 #else
   ASSERT(!compiler->is_optimizing());
   __ CallPatchable(StubCode::DebugStepCheck());
-  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, token_pos());
+  compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, source());
   compiler->RecordSafepoint(locs());
 #endif
 }
diff --git a/runtime/vm/compiler/backend/inliner.cc b/runtime/vm/compiler/backend/inliner.cc
index 73ee62c..bbc0700 100644
--- a/runtime/vm/compiler/backend/inliner.cc
+++ b/runtime/vm/compiler/backend/inliner.cc
@@ -569,8 +569,7 @@
                   const Array& arguments_descriptor,
                   intptr_t first_arg_index,  // 1 if type args are passed.
                   GrowableArray<Value*>* arguments,
-                  const Function& caller,
-                  intptr_t caller_inlining_id)
+                  const Function& caller)
       : call(call),
         arguments_descriptor(arguments_descriptor),
         first_arg_index(first_arg_index),
@@ -578,8 +577,7 @@
         callee_graph(NULL),
         parameter_stubs(NULL),
         exit_collector(NULL),
-        caller(caller),
-        caller_inlining_id(caller_inlining_id) {}
+        caller(caller) {}
 
   Definition* call;
   const Array& arguments_descriptor;
@@ -589,7 +587,6 @@
   ZoneGrowableArray<Definition*>* parameter_stubs;
   InlineExitCollector* exit_collector;
   const Function& caller;
-  const intptr_t caller_inlining_id;
 };
 
 class CallSiteInliner;
@@ -598,8 +595,7 @@
  public:
   PolymorphicInliner(CallSiteInliner* owner,
                      PolymorphicInstanceCallInstr* call,
-                     const Function& caller_function,
-                     intptr_t caller_inlining_id);
+                     const Function& caller_function);
 
   bool Inline();
 
@@ -630,7 +626,6 @@
   InlineExitCollector* exit_collector_;
 
   const Function& caller_function_;
-  const intptr_t caller_inlining_id_;
 };
 
 static void ReplaceParameterStubs(Zone* zone,
@@ -713,7 +708,7 @@
           ASSERT(call_data->call->IsClosureCall());
           LoadFieldInstr* context_load = new (zone) LoadFieldInstr(
               new Value((*arguments)[first_arg_index]->definition()),
-              Slot::Closure_context(), call_data->call->token_pos());
+              Slot::Closure_context(), call_data->call->source());
           context_load->set_ssa_temp_index(
               caller_graph->alloc_ssa_temp_index());
           if (FlowGraph::NeedsPairLocation(context_load->representation())) {
@@ -1273,10 +1268,8 @@
         }
 
         FlowGraphInliner::SetInliningId(
-            callee_graph,
-            inliner_->NextInlineId(callee_graph->function(),
-                                   call_data->call->token_pos(),
-                                   call_data->caller_inlining_id));
+            callee_graph, inliner_->NextInlineId(callee_graph->function(),
+                                                 call_data->call->source()));
         TRACE_INLINING(THR_Print("     Success\n"));
         TRACE_INLINING(THR_Print(
             "       with reason %s, code size %" Pd ", call sites: %" Pd "\n",
@@ -1449,8 +1442,7 @@
       }
       InlinedCallData call_data(
           call, Array::ZoneHandle(Z, call->GetArgumentsDescriptor()),
-          call->FirstArgIndex(), &arguments, call_info[call_idx].caller(),
-          call_info[call_idx].caller_graph->inlining_id());
+          call->FirstArgIndex(), &arguments, call_info[call_idx].caller());
 
       // Under AOT, calls outside loops may pass our regular heuristics due
       // to a relatively high ratio. So, unless we are optimizing solely for
@@ -1514,10 +1506,9 @@
       }
       const Array& arguments_descriptor =
           Array::ZoneHandle(Z, call->GetArgumentsDescriptor());
-      InlinedCallData call_data(
-          call, arguments_descriptor, call->FirstArgIndex(), &arguments,
-          call_info[call_idx].caller(),
-          call_info[call_idx].caller_graph->inlining_id());
+      InlinedCallData call_data(call, arguments_descriptor,
+                                call->FirstArgIndex(), &arguments,
+                                call_info[call_idx].caller());
       if (TryInlining(target, call->argument_names(), &call_data, false)) {
         InlineCall(&call_data);
         inlined = true;
@@ -1541,9 +1532,7 @@
         continue;
       }
       const Function& cl = call_info[call_idx].caller();
-      intptr_t caller_inlining_id =
-          call_info[call_idx].caller_graph->inlining_id();
-      PolymorphicInliner inliner(this, call, cl, caller_inlining_id);
+      PolymorphicInliner inliner(this, call, cl);
       if (inliner.Inline()) inlined = true;
     }
     return inlined;
@@ -1663,8 +1652,7 @@
 
 PolymorphicInliner::PolymorphicInliner(CallSiteInliner* owner,
                                        PolymorphicInstanceCallInstr* call,
-                                       const Function& caller_function,
-                                       intptr_t caller_inlining_id)
+                                       const Function& caller_function)
     : owner_(owner),
       call_(call),
       num_variants_(call->NumberOfChecks()),
@@ -1673,8 +1661,7 @@
       non_inlined_variants_(new (zone()) CallTargets(zone())),
       inlined_entries_(num_variants_),
       exit_collector_(new (Z) InlineExitCollector(owner->caller_graph(), call)),
-      caller_function_(caller_function),
-      caller_inlining_id_(caller_inlining_id) {}
+      caller_function_(caller_function) {}
 
 Isolate* PolymorphicInliner::isolate() const {
   return owner_->caller_graph()->isolate();
@@ -1780,7 +1767,7 @@
   const Array& arguments_descriptor =
       Array::ZoneHandle(Z, call_->GetArgumentsDescriptor());
   InlinedCallData call_data(call_, arguments_descriptor, call_->FirstArgIndex(),
-                            &arguments, caller_function_, caller_inlining_id_);
+                            &arguments, caller_function_);
   Function& target = Function::ZoneHandle(zone(), target_info.target->raw());
   if (!owner_->TryInlining(target, call_->argument_names(), &call_data,
                            false)) {
@@ -1828,7 +1815,7 @@
   }
   if (FlowGraphInliner::TryInlineRecognizedMethod(
           owner_->caller_graph(), receiver_cid, target, call_, redefinition,
-          call_->token_pos(), call_->ic_data(), graph_entry, &entry, &last,
+          call_->source(), call_->ic_data(), graph_entry, &entry, &last,
           &result, owner_->inliner_->speculative_policy())) {
     // The empty Object constructor is the only case where the inlined body is
     // empty and there is no result.
@@ -1840,7 +1827,7 @@
     InlineExitCollector* exit_collector =
         new (Z) InlineExitCollector(owner_->caller_graph(), call_);
     ReturnInstr* return_result = new (Z)
-        ReturnInstr(call_->token_pos(), new (Z) Value(result), DeoptId::kNone);
+        ReturnInstr(call_->source(), new (Z) Value(result), DeoptId::kNone);
     owner_->caller_graph()->AppendTo(
         last, return_result,
         call_->env(),  // Return can become deoptimization target.
@@ -1962,7 +1949,7 @@
         ConstantInstr* cid_constant_end =
             owner_->caller_graph()->GetConstant(cid_end);
         RelationalOpInstr* compare_top = new RelationalOpInstr(
-            call_->token_pos(), Token::kLTE, new Value(load_cid),
+            call_->source(), Token::kLTE, new Value(load_cid),
             new Value(cid_constant_end), kSmiCid, call_->deopt_id());
         BranchInstr* branch_top = upper_limit_branch =
             new BranchInstr(compare_top, DeoptId::kNone);
@@ -1979,12 +1966,12 @@
         *branch_top->true_successor_address() = below_target;
 
         RelationalOpInstr* compare_bottom = new RelationalOpInstr(
-            call_->token_pos(), Token::kGTE, new Value(load_cid),
+            call_->source(), Token::kGTE, new Value(load_cid),
             new Value(cid_constant), kSmiCid, call_->deopt_id());
         branch = new BranchInstr(compare_bottom, DeoptId::kNone);
       } else {
         StrictCompareInstr* compare =
-            new StrictCompareInstr(call_->token_pos(), Token::kEQ_STRICT,
+            new StrictCompareInstr(call_->source(), Token::kEQ_STRICT,
                                    new Value(load_cid), new Value(cid_constant),
                                    /* number_check = */ false, DeoptId::kNone);
         branch = new BranchInstr(compare, DeoptId::kNone);
@@ -2093,7 +2080,7 @@
     fallback_call->InheritDeoptTarget(zone(), call_);
     fallback_call->set_total_call_count(call_->CallCount());
     ReturnInstr* fallback_return = new ReturnInstr(
-        call_->token_pos(), new Value(fallback_call), DeoptId::kNone);
+        call_->source(), new Value(fallback_call), DeoptId::kNone);
     fallback_return->InheritDeoptTargetAfter(owner_->caller_graph(), call_,
                                              fallback_call);
     AppendInstruction(AppendInstruction(cursor, fallback_call),
@@ -2250,20 +2237,18 @@
   *call_site_count = function.optimized_call_site_count();
 }
 
-// TODO(srdjan): This is only needed when disassembling and/or profiling.
-// Sets inlining id for all instructions of this flow-graph, as well for the
-// FlowGraph itself.
 void FlowGraphInliner::SetInliningId(FlowGraph* flow_graph,
                                      intptr_t inlining_id) {
   ASSERT(flow_graph->inlining_id() < 0);
   flow_graph->set_inlining_id(inlining_id);
+  // We only need to set the inlining ID on instructions that may possibly
+  // have token positions, so no need to set it on blocks or internal
+  // definitions.
   for (BlockIterator block_it = flow_graph->postorder_iterator();
        !block_it.Done(); block_it.Advance()) {
     for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
          it.Advance()) {
       Instruction* current = it.Current();
-      // Do not overwrite owner function.
-      ASSERT(!current->has_inlining_id());
       current->set_inlining_id(inlining_id);
     }
   }
@@ -2378,16 +2363,16 @@
 }
 
 intptr_t FlowGraphInliner::NextInlineId(const Function& function,
-                                        TokenPosition tp,
-                                        intptr_t parent_id) {
+                                        const InstructionSource& source) {
   const intptr_t id = inline_id_to_function_->length();
   // TODO(johnmccutchan): Do not allow IsNoSource once all nodes have proper
   // source positions.
-  ASSERT(tp.IsReal() || tp.IsSynthetic() || tp.IsNoSource());
+  ASSERT(source.token_pos.IsReal() || source.token_pos.IsSynthetic() ||
+         source.token_pos.IsNoSource());
   RELEASE_ASSERT(!function.IsNull());
   inline_id_to_function_->Add(&function);
-  inline_id_to_token_pos_->Add(tp);
-  caller_inline_id_->Add(parent_id);
+  inline_id_to_token_pos_->Add(source.token_pos);
+  caller_inline_id_->Add(source.inlining_id);
   // We always have one less token position than functions.
   ASSERT(inline_id_to_token_pos_->length() ==
          (inline_id_to_function_->length() - 1));
@@ -2426,7 +2411,7 @@
   // Insert array length load and bounds check.
   LoadFieldInstr* length = new (Z) LoadFieldInstr(
       new (Z) Value(*array), Slot::GetLengthFieldForArrayCid(array_cid),
-      call->token_pos());
+      call->source());
   *cursor = flow_graph->AppendTo(*cursor, length, NULL, FlowGraph::kValue);
   *index = flow_graph->CreateCheckBound(length, *index, call->deopt_id());
   *cursor =
@@ -2436,7 +2421,7 @@
     // Insert data elements load.
     LoadFieldInstr* elements = new (Z)
         LoadFieldInstr(new (Z) Value(*array), Slot::GrowableObjectArray_data(),
-                       call->token_pos());
+                       call->source());
     *cursor = flow_graph->AppendTo(*cursor, elements, NULL, FlowGraph::kValue);
     // Load from the data from backing store which is a fixed-length array.
     *array = elements;
@@ -2491,7 +2476,7 @@
   intptr_t index_scale = compiler::target::Instance::ElementSizeFor(array_cid);
   LoadIndexedInstr* load = new (Z) LoadIndexedInstr(
       new (Z) Value(array), new (Z) Value(index), /*index_unboxed=*/false,
-      index_scale, array_cid, kAlignedAccess, deopt_id, call->token_pos(),
+      index_scale, array_cid, kAlignedAccess, deopt_id, call->source(),
       ResultType(call));
 
   *last = load;
@@ -2526,7 +2511,7 @@
                              const Function& target,
                              Instruction* call,
                              Definition* receiver,
-                             TokenPosition token_pos,
+                             const InstructionSource& source,
                              const Cids* value_check,
                              FlowGraphInliner::ExactnessInfo* exactness,
                              GraphEntryInstr* graph_entry,
@@ -2575,7 +2560,7 @@
             LoadFieldInstr(new (Z) Value(array),
                            Slot::GetTypeArgumentsSlotFor(flow_graph->thread(),
                                                          instantiator_class),
-                           call->token_pos());
+                           call->source());
         cursor = flow_graph->AppendTo(cursor, load_type_args, NULL,
                                       FlowGraph::kValue);
         type_args = load_type_args;
@@ -2637,7 +2622,7 @@
       auto const function_type_args = flow_graph->constant_null();
       auto const dst_type = flow_graph->GetConstant(value_type);
       AssertAssignableInstr* assert_value = new (Z) AssertAssignableInstr(
-          token_pos, new (Z) Value(stored_value), new (Z) Value(dst_type),
+          source, new (Z) Value(stored_value), new (Z) Value(dst_type),
           new (Z) Value(type_args), new (Z) Value(function_type_args),
           Symbols::Value(), call->deopt_id());
       cursor = flow_graph->AppendTo(cursor, assert_value, call->env(),
@@ -2671,7 +2656,7 @@
     // an unboxed double, an unboxed Float32x4, or unboxed Int32x4.
     needs_store_barrier = kNoStoreBarrier;
     Instruction* check = flow_graph->CreateCheckClass(
-        stored_value, *value_check, call->deopt_id(), call->token_pos());
+        stored_value, *value_check, call->deopt_id(), call->source());
     cursor =
         flow_graph->AppendTo(cursor, check, call->env(), FlowGraph::kEffect);
   }
@@ -2714,7 +2699,7 @@
   *last = new (Z) StoreIndexedInstr(
       new (Z) Value(array), new (Z) Value(index), new (Z) Value(stored_value),
       needs_store_barrier, /*index_unboxed=*/false, index_scale, array_cid,
-      kAlignedAccess, call->deopt_id(), call->token_pos());
+      kAlignedAccess, call->deopt_id(), call->source());
   flow_graph->AppendTo(cursor, *last, call->env(), FlowGraph::kEffect);
   // We need a return value to replace uses of the original definition. However,
   // the final instruction is a use of 'void operator[]=()', so we use null.
@@ -2743,7 +2728,7 @@
   // Arguments are checked. No need for class check.
   BinaryDoubleOpInstr* double_bin_op = new (Z)
       BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
-                          call->deopt_id(), call->token_pos());
+                          call->deopt_id(), call->source());
   flow_graph->AppendTo(*entry, double_bin_op, call->env(), FlowGraph::kValue);
   *last = double_bin_op;
   *result = double_bin_op->AsDefinition();
@@ -2770,7 +2755,7 @@
   // Arguments are checked. No need for class check.
 
   DoubleTestOpInstr* double_test_op = new (Z) DoubleTestOpInstr(
-      kind, new (Z) Value(receiver), call->deopt_id(), call->token_pos());
+      kind, new (Z) Value(receiver), call->deopt_id(), call->source());
   flow_graph->AppendTo(*entry, double_test_op, call->env(), FlowGraph::kValue);
   *last = double_test_op;
   *result = double_test_op->AsDefinition();
@@ -2823,7 +2808,7 @@
   // This is an internal method, no need to check argument types.
   StoreInstanceFieldInstr* store = new (Z)
       StoreInstanceFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
-                              store_barrier_type, call->token_pos());
+                              store_barrier_type, call->source());
   flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect);
   *last = store;
   // We need a return value to replace uses of the original definition. However,
@@ -2917,7 +2902,7 @@
   LoadIndexedInstr* load = new (Z) LoadIndexedInstr(
       new (Z) Value(array), new (Z) Value(index),
       /*index_unboxed=*/false, /*index_scale=*/1, view_cid, kUnalignedAccess,
-      DeoptId::kNone, call->token_pos(), ResultType(call));
+      DeoptId::kNone, call->source(), ResultType(call));
   flow_graph->AppendTo(
       cursor, load, call->deopt_id() != DeoptId::kNone ? call->env() : nullptr,
       FlowGraph::kValue);
@@ -2942,7 +2927,7 @@
       new (Z) Value(array), new (Z) Value(index), new (Z) Value(stored_value),
       kNoStoreBarrier, /*index_unboxed=*/false,
       /*index_scale=*/1, view_cid, kUnalignedAccess, call->deopt_id(),
-      call->token_pos());
+      call->source());
 }
 
 static bool InlineByteArrayBaseStore(FlowGraph* flow_graph,
@@ -3044,7 +3029,7 @@
   // Handle value check.
   if (value_check != nullptr) {
     Instruction* check = flow_graph->CreateCheckClass(
-        stored_value, *value_check, call->deopt_id(), call->token_pos());
+        stored_value, *value_check, call->deopt_id(), call->source());
     cursor =
         flow_graph->AppendTo(cursor, check, call->env(), FlowGraph::kEffect);
   }
@@ -3053,7 +3038,7 @@
   if (needs_null_check) {
     String& name = String::ZoneHandle(Z, target.name());
     Instruction* check = new (Z) CheckNullInstr(
-        new (Z) Value(stored_value), name, call->deopt_id(), call->token_pos());
+        new (Z) Value(stored_value), name, call->deopt_id(), call->source());
     cursor =
         flow_graph->AppendTo(cursor, check, call->env(), FlowGraph::kEffect);
     // With an explicit null check, a non-speculative unbox suffices.
@@ -3153,9 +3138,8 @@
                                               Definition* str,
                                               Definition* index,
                                               Instruction* cursor) {
-  LoadFieldInstr* length = new (Z)
-      LoadFieldInstr(new (Z) Value(str), Slot::GetLengthFieldForArrayCid(cid),
-                     str->token_pos());
+  LoadFieldInstr* length = new (Z) LoadFieldInstr(
+      new (Z) Value(str), Slot::GetLengthFieldForArrayCid(cid), str->source());
   cursor = flow_graph->AppendTo(cursor, length, NULL, FlowGraph::kValue);
 
   // Bounds check.
@@ -3178,7 +3162,7 @@
   LoadIndexedInstr* load_indexed = new (Z) LoadIndexedInstr(
       new (Z) Value(str), new (Z) Value(index), /*index_unboxed=*/false,
       compiler::target::Instance::ElementSizeFor(cid), cid, kAlignedAccess,
-      DeoptId::kNone, call->token_pos());
+      DeoptId::kNone, call->source());
   cursor = flow_graph->AppendTo(cursor, load_indexed, NULL, FlowGraph::kValue);
 
   auto box = BoxInstr::Create(kUnboxedIntPtr, new Value(load_indexed));
@@ -3267,7 +3251,7 @@
   Definition* result = nullptr;
   if (FlowGraphInliner::TryInlineRecognizedMethod(
           flow_graph, receiver_cid, target, call,
-          call->Receiver()->definition(), call->token_pos(), call->ic_data(),
+          call->Receiver()->definition(), call->source(), call->ic_data(),
           /*graph_entry=*/nullptr, &entry, &last, &result, policy,
           &exactness_info)) {
     // The empty Object constructor is the only case where the inlined body is
@@ -3287,7 +3271,7 @@
       case FlowGraph::ToCheck::kCheckCid: {
         Instruction* check_class = flow_graph->CreateCheckClass(
             call->Receiver()->definition(), targets, call->deopt_id(),
-            call->token_pos());
+            call->source());
         flow_graph->InsertBefore(call, check_class, call->env(),
                                  FlowGraph::kEffect);
         break;
@@ -3295,7 +3279,7 @@
       case FlowGraph::ToCheck::kCheckNull: {
         Instruction* check_null = new (Z) CheckNullInstr(
             call->Receiver()->CopyWithType(Z), call->function_name(),
-            call->deopt_id(), call->token_pos());
+            call->deopt_id(), call->source());
         flow_graph->InsertBefore(call, check_null, call->env(),
                                  FlowGraph::kEffect);
         break;
@@ -3351,7 +3335,7 @@
   }
   if (FlowGraphInliner::TryInlineRecognizedMethod(
           flow_graph, receiver_cid, call->function(), call, receiver,
-          call->token_pos(), call->ic_data(), /*graph_entry=*/nullptr, &entry,
+          call->source(), call->ic_data(), /*graph_entry=*/nullptr, &entry,
           &last, &result, policy)) {
     // The empty Object constructor is the only case where the inlined body is
     // empty and there is no result.
@@ -3508,12 +3492,11 @@
         // inserted to be non-speculative.
         CheckNullInstr* check1 =
             new (Z) CheckNullInstr(new (Z) Value(receiver), Symbols::FirstArg(),
-                                   call->deopt_id(), call->token_pos());
+                                   call->deopt_id(), call->source());
 
-        CheckNullInstr* check2 = new (Z)
-            CheckNullInstr(new (Z) Value(call->ArgumentAt(1)),
-                           Symbols::SecondArg(), call->deopt_id(),
-                           call->token_pos(), CheckNullInstr::kArgumentError);
+        CheckNullInstr* check2 = new (Z) CheckNullInstr(
+            new (Z) Value(call->ArgumentAt(1)), Symbols::SecondArg(),
+            call->deopt_id(), call->source(), CheckNullInstr::kArgumentError);
 
         (*last)->SetInputAt(0, new (Z) Value(check1));
         (*last)->SetInputAt(1, new (Z) Value(check2));
@@ -3574,7 +3557,7 @@
         args->Add(new (Z) Value(call->ArgumentAt(i)));
       }
       *last = new (Z) InvokeMathCFunctionInstr(args, call->deopt_id(), kind,
-                                               call->token_pos());
+                                               call->source());
       break;
     }
   }
@@ -3667,7 +3650,7 @@
     const Function& target,
     Definition* call,
     Definition* receiver,
-    TokenPosition token_pos,
+    const InstructionSource& source,
     const ICData* ic_data,
     GraphEntryInstr* graph_entry,
     FunctionEntryInstr** entry,
@@ -3747,9 +3730,9 @@
     case MethodRecognizer::kGrowableArraySetIndexed:
     case MethodRecognizer::kObjectArraySetIndexedUnchecked:
     case MethodRecognizer::kGrowableArraySetIndexedUnchecked:
-      return InlineSetIndexed(flow_graph, kind, target, call, receiver,
-                              token_pos, /* value_check = */ NULL, exactness,
-                              graph_entry, entry, last, result);
+      return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
+                              /* value_check = */ NULL, exactness, graph_entry,
+                              entry, last, result);
     case MethodRecognizer::kInt8ArraySetIndexed:
     case MethodRecognizer::kUint8ArraySetIndexed:
     case MethodRecognizer::kUint8ClampedArraySetIndexed:
@@ -3763,53 +3746,53 @@
         return false;
       }
       Cids* value_check = Cids::CreateMonomorphic(Z, kSmiCid);
-      return InlineSetIndexed(flow_graph, kind, target, call, receiver,
-                              token_pos, value_check, exactness, graph_entry,
-                              entry, last, result);
+      return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
+                              value_check, exactness, graph_entry, entry, last,
+                              result);
     }
     case MethodRecognizer::kInt32ArraySetIndexed:
     case MethodRecognizer::kUint32ArraySetIndexed: {
       // Value check not needed for Int32 and Uint32 arrays because they
       // implicitly contain unboxing instructions which check for right type.
-      return InlineSetIndexed(flow_graph, kind, target, call, receiver,
-                              token_pos, /* value_check = */ NULL, exactness,
-                              graph_entry, entry, last, result);
+      return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
+                              /* value_check = */ NULL, exactness, graph_entry,
+                              entry, last, result);
     }
     case MethodRecognizer::kInt64ArraySetIndexed:
     case MethodRecognizer::kUint64ArraySetIndexed:
       if (!ShouldInlineInt64ArrayOps()) {
         return false;
       }
-      return InlineSetIndexed(flow_graph, kind, target, call, receiver,
-                              token_pos, /* value_check = */ NULL, exactness,
-                              graph_entry, entry, last, result);
+      return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
+                              /* value_check = */ NULL, exactness, graph_entry,
+                              entry, last, result);
     case MethodRecognizer::kFloat32ArraySetIndexed:
     case MethodRecognizer::kFloat64ArraySetIndexed: {
       if (!CanUnboxDouble()) {
         return false;
       }
       Cids* value_check = Cids::CreateMonomorphic(Z, kDoubleCid);
-      return InlineSetIndexed(flow_graph, kind, target, call, receiver,
-                              token_pos, value_check, exactness, graph_entry,
-                              entry, last, result);
+      return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
+                              value_check, exactness, graph_entry, entry, last,
+                              result);
     }
     case MethodRecognizer::kFloat32x4ArraySetIndexed: {
       if (!ShouldInlineSimd()) {
         return false;
       }
       Cids* value_check = Cids::CreateMonomorphic(Z, kFloat32x4Cid);
-      return InlineSetIndexed(flow_graph, kind, target, call, receiver,
-                              token_pos, value_check, exactness, graph_entry,
-                              entry, last, result);
+      return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
+                              value_check, exactness, graph_entry, entry, last,
+                              result);
     }
     case MethodRecognizer::kFloat64x2ArraySetIndexed: {
       if (!ShouldInlineSimd()) {
         return false;
       }
       Cids* value_check = Cids::CreateMonomorphic(Z, kFloat64x2Cid);
-      return InlineSetIndexed(flow_graph, kind, target, call, receiver,
-                              token_pos, value_check, exactness, graph_entry,
-                              entry, last, result);
+      return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
+                              value_check, exactness, graph_entry, entry, last,
+                              result);
     }
     case MethodRecognizer::kByteArrayBaseGetInt8:
       return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
@@ -4102,7 +4085,7 @@
           FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
                              call->GetBlock()->try_index(), DeoptId::kNone);
       (*entry)->InheritDeoptTarget(Z, call);
-      *last = new (Z) CreateArrayInstr(call->token_pos(), type, num_elements,
+      *last = new (Z) CreateArrayInstr(call->source(), type, num_elements,
                                        call->deopt_id());
       flow_graph->AppendTo(
           *entry, *last,
@@ -4122,8 +4105,8 @@
               FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
                                  call->GetBlock()->try_index(), DeoptId::kNone);
           (*entry)->InheritDeoptTarget(Z, call);
-          *last = new (Z) CreateArrayInstr(call->token_pos(), type,
-                                           num_elements, call->deopt_id());
+          *last = new (Z) CreateArrayInstr(call->source(), type, num_elements,
+                                           call->deopt_id());
           flow_graph->AppendTo(
               *entry, *last,
               call->deopt_id() != DeoptId::kNone ? call->env() : NULL,
@@ -4201,7 +4184,7 @@
       *last = new (Z) StoreIndexedInstr(
           new (Z) Value(str), new (Z) Value(index), new (Z) Value(value),
           kNoStoreBarrier, /*index_unboxed=*/false, index_scale, cid,
-          kAlignedAccess, call->deopt_id(), call->token_pos());
+          kAlignedAccess, call->deopt_id(), call->source());
       flow_graph->AppendTo(value, *last, env, FlowGraph::kEffect);
 
       // We need a return value to replace uses of the original definition.
diff --git a/runtime/vm/compiler/backend/inliner.h b/runtime/vm/compiler/backend/inliner.h
index 10a404c..404654b 100644
--- a/runtime/vm/compiler/backend/inliner.h
+++ b/runtime/vm/compiler/backend/inliner.h
@@ -25,6 +25,7 @@
 class ICData;
 class InstanceCallInstr;
 class Instruction;
+struct InstructionSource;
 class Precompiler;
 class StaticCallInstr;
 class TargetEntryInstr;
@@ -125,8 +126,7 @@
 
   FlowGraph* flow_graph() const { return flow_graph_; }
   intptr_t NextInlineId(const Function& function,
-                        TokenPosition tp,
-                        intptr_t caller_id);
+                        const InstructionSource& source);
 
   bool trace_inlining() const { return trace_inlining_; }
 
@@ -156,7 +156,7 @@
                                         const Function& target,
                                         Definition* call,
                                         Definition* receiver,
-                                        TokenPosition token_pos,
+                                        const InstructionSource& source,
                                         const ICData* ic_data,
                                         GraphEntryInstr* graph_entry,
                                         FunctionEntryInstr** entry,
diff --git a/runtime/vm/compiler/backend/redundancy_elimination.cc b/runtime/vm/compiler/backend/redundancy_elimination.cc
index c682aef..5661445 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination.cc
@@ -3553,10 +3553,10 @@
               flow_graph_->GetConstant(Smi::ZoneHandle(Z, Smi::New(index)))),
           /*index_unboxed=*/false,
           /*index_scale=*/compiler::target::Instance::ElementSizeFor(array_cid),
-          array_cid, kAlignedAccess, DeoptId::kNone, alloc->token_pos());
+          array_cid, kAlignedAccess, DeoptId::kNone, alloc->source());
     } else {
-      load = new (Z)
-          LoadFieldInstr(new (Z) Value(alloc), *slot, alloc->token_pos());
+      load =
+          new (Z) LoadFieldInstr(new (Z) Value(alloc), *slot, alloc->source());
     }
     flow_graph_->InsertBefore(load_point, load, nullptr, FlowGraph::kValue);
     values->Add(new (Z) Value(load));
diff --git a/runtime/vm/compiler/backend/redundancy_elimination_test.cc b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
index 2a2fdfd..859888f 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination_test.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
@@ -262,9 +262,9 @@
     BlockBuilder builder(H.flow_graph(), b1);
     auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
     v0 = builder.AddDefinition(
-        new AllocateObjectInstr(TokenPosition::kNoSource, cls));
+        new AllocateObjectInstr(InstructionSource(), cls));
     v1 = builder.AddDefinition(
-        new LoadFieldInstr(new Value(v0), slot, TokenPosition::kNoSource));
+        new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
     auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v0));
     auto args = new InputsArray(2);
     args->Add(new Value(v1));
@@ -272,12 +272,12 @@
       args->Add(new Value(v2));
     }
     call = builder.AddInstruction(new StaticCallInstr(
-        TokenPosition::kNoSource, blackhole, 0, Array::empty_array(), args,
+        InstructionSource(), blackhole, 0, Array::empty_array(), args,
         S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
     v4 = builder.AddDefinition(
-        new LoadFieldInstr(new Value(v2), slot, TokenPosition::kNoSource));
+        new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
     ret = builder.AddInstruction(new ReturnInstr(
-        TokenPosition::kNoSource, new Value(v4), S.GetNextDeoptId()));
+        InstructionSource(), new Value(v4), S.GetNextDeoptId()));
   }
   H.FinishGraph();
   DominatorBasedCSE::Optimize(H.flow_graph());
@@ -311,7 +311,7 @@
                                  FlowGraph* flow_graph,
                                  Definition* defn) {
   return new CheckNullInstr(new Value(defn), String::ZoneHandle(),
-                            S->GetNextDeoptId(), TokenPosition::kNoSource);
+                            S->GetNextDeoptId(), InstructionSource());
 }
 
 static Definition* MakeRedefinition(CompilerState* S,
@@ -324,7 +324,7 @@
                                         FlowGraph* flow_graph,
                                         Definition* defn) {
   const auto& dst_type = AbstractType::ZoneHandle(Type::ObjectType());
-  return new AssertAssignableInstr(TokenPosition::kNoSource, new Value(defn),
+  return new AssertAssignableInstr(InstructionSource(), new Value(defn),
                                    new Value(flow_graph->GetConstant(dst_type)),
                                    new Value(flow_graph->constant_null()),
                                    new Value(flow_graph->constant_null()),
@@ -435,36 +435,36 @@
     BlockBuilder builder(H.flow_graph(), b1);
     auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
     v0 = builder.AddDefinition(
-        new AllocateObjectInstr(TokenPosition::kNoSource, cls));
+        new AllocateObjectInstr(InstructionSource(), cls));
     v5 = builder.AddDefinition(
-        new AllocateObjectInstr(TokenPosition::kNoSource, cls));
+        new AllocateObjectInstr(InstructionSource(), cls));
     if (!make_host_escape) {
-      builder.AddInstruction(new StoreInstanceFieldInstr(
-          slot, new Value(v5), new Value(v0), kEmitStoreBarrier,
-          TokenPosition::kNoSource));
+      builder.AddInstruction(
+          new StoreInstanceFieldInstr(slot, new Value(v5), new Value(v0),
+                                      kEmitStoreBarrier, InstructionSource()));
     }
     v1 = builder.AddDefinition(
-        new LoadFieldInstr(new Value(v0), slot, TokenPosition::kNoSource));
+        new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
     auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v5));
     auto args = new InputsArray(2);
     args->Add(new Value(v1));
     if (make_it_escape) {
       auto v6 = builder.AddDefinition(
-          new LoadFieldInstr(new Value(v2), slot, TokenPosition::kNoSource));
+          new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
       args->Add(new Value(v6));
     } else if (make_host_escape) {
-      builder.AddInstruction(new StoreInstanceFieldInstr(
-          slot, new Value(v2), new Value(v0), kEmitStoreBarrier,
-          TokenPosition::kNoSource));
+      builder.AddInstruction(
+          new StoreInstanceFieldInstr(slot, new Value(v2), new Value(v0),
+                                      kEmitStoreBarrier, InstructionSource()));
       args->Add(new Value(v5));
     }
     call = builder.AddInstruction(new StaticCallInstr(
-        TokenPosition::kNoSource, blackhole, 0, Array::empty_array(), args,
+        InstructionSource(), blackhole, 0, Array::empty_array(), args,
         S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
     v4 = builder.AddDefinition(
-        new LoadFieldInstr(new Value(v0), slot, TokenPosition::kNoSource));
+        new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
     ret = builder.AddInstruction(new ReturnInstr(
-        TokenPosition::kNoSource, new Value(v4), S.GetNextDeoptId()));
+        InstructionSource(), new Value(v4), S.GetNextDeoptId()));
   }
   H.FinishGraph();
   DominatorBasedCSE::Optimize(H.flow_graph());
@@ -599,7 +599,7 @@
 
     //   array <- StaticCall(...) {_Uint32List}
     array = builder.AddDefinition(new StaticCallInstr(
-        TokenPosition::kNoSource, function, 0, Array::empty_array(),
+        InstructionSource(), function, 0, Array::empty_array(),
         new InputsArray(), DeoptId::kNone, 0, ICData::kNoRebind));
     array->UpdateType(CompileType::FromCid(kTypedDataUint32ArrayCid));
     array->SetResultType(zone, CompileType::FromCid(kTypedDataUint32ArrayCid));
@@ -609,7 +609,7 @@
     v1 = builder.AddDefinition(new LoadIndexedInstr(
         new Value(array), new Value(vc0), /*index_unboxed=*/false, 1,
         kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone,
-        TokenPosition::kNoSource));
+        InstructionSource()));
 
     //   v2 <- LoadUntagged(array)
     //   StoreIndexed(v2, index=0, value=42)
@@ -617,17 +617,17 @@
     store = builder.AddInstruction(new StoreIndexedInstr(
         new Value(v2), new Value(vc0), new Value(vc42), kNoStoreBarrier,
         /*index_unboxed=*/false, 1, kTypedDataUint32ArrayCid, kAlignedAccess,
-        DeoptId::kNone, TokenPosition::kNoSource));
+        DeoptId::kNone, InstructionSource()));
 
     //   v3 <- LoadIndexed(array)
     v3 = builder.AddDefinition(new LoadIndexedInstr(
         new Value(array), new Value(vc0), /*index_unboxed=*/false, 1,
         kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone,
-        TokenPosition::kNoSource));
+        InstructionSource()));
 
     //   return v3
     ret = builder.AddInstruction(new ReturnInstr(
-        TokenPosition::kNoSource, new Value(v3), S.GetNextDeoptId()));
+        InstructionSource(), new Value(v3), S.GetNextDeoptId()));
   }
   H.FinishGraph();
 
diff --git a/runtime/vm/compiler/backend/type_propagator.cc b/runtime/vm/compiler/backend/type_propagator.cc
index 8d96744..e841d39 100644
--- a/runtime/vm/compiler/backend/type_propagator.cc
+++ b/runtime/vm/compiler/backend/type_propagator.cc
@@ -508,14 +508,14 @@
   Instruction* check_clone = NULL;
   if (check->IsCheckSmi()) {
     check_clone = new CheckSmiInstr(assert->value()->Copy(zone()),
-                                    assert->deopt_id(), check->token_pos());
+                                    assert->deopt_id(), check->source());
     check_clone->AsCheckSmi()->set_licm_hoisted(
         check->AsCheckSmi()->licm_hoisted());
   } else {
     ASSERT(check->IsCheckClass());
     check_clone =
         new CheckClassInstr(assert->value()->Copy(zone()), assert->deopt_id(),
-                            check->AsCheckClass()->cids(), check->token_pos());
+                            check->AsCheckClass()->cids(), check->source());
     check_clone->AsCheckClass()->set_licm_hoisted(
         check->AsCheckClass()->licm_hoisted());
   }
diff --git a/runtime/vm/compiler/backend/type_propagator_test.cc b/runtime/vm/compiler/backend/type_propagator_test.cc
index 24da405..5c30a19 100644
--- a/runtime/vm/compiler/backend/type_propagator_test.cc
+++ b/runtime/vm/compiler/backend/type_propagator_test.cc
@@ -61,7 +61,7 @@
     v0 = builder.AddParameter(0, 0, /*with_frame=*/true, kTagged);
     builder.AddBranch(
         new StrictCompareInstr(
-            TokenPosition::kNoSource, Token::kEQ_STRICT, new Value(v0),
+            InstructionSource(), Token::kEQ_STRICT, new Value(v0),
             new Value(H.flow_graph()->GetConstant(Object::Handle())),
             /*needs_number_check=*/false, S.GetNextDeoptId()),
         b2, b3);
@@ -128,7 +128,7 @@
     auto load_cid = builder.AddDefinition(new LoadClassIdInstr(new Value(v0)));
     builder.AddBranch(
         new StrictCompareInstr(
-            TokenPosition::kNoSource, Token::kEQ_STRICT, new Value(load_cid),
+            InstructionSource(), Token::kEQ_STRICT, new Value(load_cid),
             new Value(H.IntConstant(kDoubleCid)),
             /*needs_number_check=*/false, S.GetNextDeoptId()),
         b2, b3);
@@ -223,8 +223,8 @@
     BlockBuilder builder(H.flow_graph(), b1);
     v0 = builder.AddParameter(0, 0, /*with_frame=*/true, kTagged);
     builder.AddBranch(new StrictCompareInstr(
-                          TokenPosition::kNoSource, Token::kEQ_STRICT,
-                          new Value(v0), new Value(H.IntConstant(1)),
+                          InstructionSource(), Token::kEQ_STRICT, new Value(v0),
+                          new Value(H.IntConstant(1)),
                           /*needs_number_check=*/false, S.GetNextDeoptId()),
                       b2, b3);
   }
@@ -232,7 +232,7 @@
   {
     BlockBuilder builder(H.flow_graph(), b2);
     v2 = builder.AddDefinition(
-        new StaticCallInstr(TokenPosition::kNoSource, target_func,
+        new StaticCallInstr(InstructionSource(), target_func,
                             /*type_args_len=*/0,
                             /*argument_names=*/Array::empty_array(),
                             new InputsArray(0), S.GetNextDeoptId(),
@@ -258,7 +258,7 @@
   EXPECT_PROPERTY(v2->Type(), it.IsNullableInt());
   EXPECT_PROPERTY(v3->Type(), it.IsNullableInt());
 
-  auto v4 = new LoadStaticFieldInstr(field, TokenPosition::kNoSource);
+  auto v4 = new LoadStaticFieldInstr(field, InstructionSource());
   H.flow_graph()->InsertBefore(v2, v4, nullptr, FlowGraph::kValue);
   v2->ReplaceUsesWith(v4);
   v2->RemoveFromGraph();
@@ -313,8 +313,8 @@
     BlockBuilder builder(H.flow_graph(), b1);
     v0 = builder.AddParameter(0, 0, /*with_frame=*/true, kTagged);
     builder.AddBranch(new StrictCompareInstr(
-                          TokenPosition::kNoSource, Token::kEQ_STRICT,
-                          new Value(v0), new Value(H.IntConstant(1)),
+                          InstructionSource(), Token::kEQ_STRICT, new Value(v0),
+                          new Value(H.IntConstant(1)),
                           /*needs_number_check=*/false, S.GetNextDeoptId()),
                       b6, b2);
   }
@@ -322,8 +322,8 @@
   {
     BlockBuilder builder(H.flow_graph(), b2);
     builder.AddBranch(new StrictCompareInstr(
-                          TokenPosition::kNoSource, Token::kEQ_STRICT,
-                          new Value(v0), new Value(H.IntConstant(2)),
+                          InstructionSource(), Token::kEQ_STRICT, new Value(v0),
+                          new Value(H.IntConstant(2)),
                           /*needs_number_check=*/false, S.GetNextDeoptId()),
                       b3, b4);
   }
@@ -354,8 +354,8 @@
     BlockBuilder builder(H.flow_graph(), b7);
     v5 = H.Phi(b7, {{b5, v3}, {b6, H.DoubleConstant(1.0)}});
     builder.AddPhi(v5);
-    builder.AddInstruction(new ReturnInstr(TokenPosition::kNoSource,
-                                           new Value(v5), S.GetNextDeoptId()));
+    builder.AddInstruction(new ReturnInstr(InstructionSource(), new Value(v5),
+                                           S.GetNextDeoptId()));
   }
 
   H.FinishGraph();
diff --git a/runtime/vm/compiler/call_specializer.cc b/runtime/vm/compiler/call_specializer.cc
index c096c35..53014cc 100644
--- a/runtime/vm/compiler/call_specializer.cc
+++ b/runtime/vm/compiler/call_specializer.cc
@@ -232,7 +232,7 @@
   if (to_check->Type()->ToCid() != kSmiCid) {
     InsertBefore(insert_before,
                  new (Z) CheckSmiInstr(new (Z) Value(to_check), deopt_id,
-                                       insert_before->token_pos()),
+                                       insert_before->source()),
                  deopt_environment, FlowGraph::kEffect);
   }
 }
@@ -243,8 +243,8 @@
                                     Environment* deopt_environment,
                                     Instruction* insert_before) {
   // Type propagation has not run yet, we cannot eliminate the check.
-  Instruction* check = flow_graph_->CreateCheckClass(
-      to_check, cids, deopt_id, insert_before->token_pos());
+  Instruction* check = flow_graph_->CreateCheckClass(to_check, cids, deopt_id,
+                                                     insert_before->source());
   InsertBefore(insert_before, check, deopt_environment, FlowGraph::kEffect);
 }
 
@@ -264,7 +264,7 @@
   if (to_check->Type()->is_nullable()) {
     CheckNullInstr* check_null =
         new (Z) CheckNullInstr(to_check->CopyWithType(Z), function_name,
-                               deopt_id, insert_before->token_pos());
+                               deopt_id, insert_before->source());
     if (FLAG_trace_strong_mode_types) {
       THR_Print("[Strong mode] Inserted %s\n", check_null->ToCString());
     }
@@ -355,7 +355,7 @@
 
     // Comparing char-codes instead of strings.
     EqualityCompareInstr* comp =
-        new (Z) EqualityCompareInstr(call->token_pos(), op_kind, left_val,
+        new (Z) EqualityCompareInstr(call->source(), op_kind, left_val,
                                      right_val, kSmiCid, call->deopt_id());
     ReplaceCall(call, comp);
 
@@ -394,11 +394,11 @@
   } else if (binary_feedback.OperandsAre(kSmiCid)) {
     InsertBefore(call,
                  new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
-                                       call->token_pos()),
+                                       call->source()),
                  call->env(), FlowGraph::kEffect);
     InsertBefore(call,
                  new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
-                                       call->token_pos()),
+                                       call->source()),
                  call->env(), FlowGraph::kEffect);
     cid = kSmiCid;
   } else if (binary_feedback.OperandsAreSmiOrMint() &&
@@ -440,7 +440,7 @@
       if ((right_const != NULL && right_const->value().IsNull()) ||
           (left_const != NULL && left_const->value().IsNull())) {
         StrictCompareInstr* comp = new (Z)
-            StrictCompareInstr(call->token_pos(), Token::kEQ_STRICT,
+            StrictCompareInstr(call->source(), Token::kEQ_STRICT,
                                new (Z) Value(left), new (Z) Value(right),
                                /* number_check = */ false, DeoptId::kNone);
         ReplaceCall(call, comp);
@@ -450,9 +450,9 @@
     }
   }
   ASSERT(cid != kIllegalCid);
-  EqualityCompareInstr* comp = new (Z)
-      EqualityCompareInstr(call->token_pos(), op_kind, new (Z) Value(left),
-                           new (Z) Value(right), cid, call->deopt_id());
+  EqualityCompareInstr* comp =
+      new (Z) EqualityCompareInstr(call->source(), op_kind, new (Z) Value(left),
+                                   new (Z) Value(right), cid, call->deopt_id());
   ReplaceCall(call, comp);
   return true;
 }
@@ -470,11 +470,11 @@
   if (binary_feedback.OperandsAre(kSmiCid)) {
     InsertBefore(call,
                  new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
-                                       call->token_pos()),
+                                       call->source()),
                  call->env(), FlowGraph::kEffect);
     InsertBefore(call,
                  new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
-                                       call->token_pos()),
+                                       call->source()),
                  call->env(), FlowGraph::kEffect);
     cid = kSmiCid;
   } else if (binary_feedback.OperandsAreSmiOrMint() &&
@@ -503,7 +503,7 @@
   }
   ASSERT(cid != kIllegalCid);
   RelationalOpInstr* comp =
-      new (Z) RelationalOpInstr(call->token_pos(), op_kind, new (Z) Value(left),
+      new (Z) RelationalOpInstr(call->source(), op_kind, new (Z) Value(left),
                                 new (Z) Value(right), cid, call->deopt_id());
   ReplaceCall(call, comp);
   return true;
@@ -634,7 +634,7 @@
 
     BinaryDoubleOpInstr* double_bin_op = new (Z)
         BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
-                            call->deopt_id(), call->token_pos());
+                            call->deopt_id(), call->source());
     ReplaceCall(call, double_bin_op);
   } else if (operands_type == kMintCid) {
     if (!FlowGraphCompiler::SupportsUnboxedInt64()) return false;
@@ -661,7 +661,7 @@
         // because the smi operation can still deoptimize.
         InsertBefore(call,
                      new (Z) CheckSmiInstr(new (Z) Value(left),
-                                           call->deopt_id(), call->token_pos()),
+                                           call->deopt_id(), call->source()),
                      call->env(), FlowGraph::kEffect);
         ConstantInstr* constant = flow_graph()->GetConstant(
             Smi::Handle(Z, Smi::New(Smi::Cast(obj).Value() - 1)));
@@ -708,7 +708,7 @@
   if (call->Targets().ReceiverIs(kSmiCid)) {
     InsertBefore(call,
                  new (Z) CheckSmiInstr(new (Z) Value(input), call->deopt_id(),
-                                       call->token_pos()),
+                                       call->source()),
                  call->env(), FlowGraph::kEffect);
     unary_op = new (Z)
         UnarySmiOpInstr(op_kind, new (Z) Value(input), call->deopt_id());
@@ -771,7 +771,7 @@
   const bool calls_initializer = field.NeedsInitializationCheckOnLoad();
   const Slot& slot = Slot::Get(field, &flow_graph()->parsed_function());
   LoadFieldInstr* load = new (Z) LoadFieldInstr(
-      new (Z) Value(receiver), slot, call->token_pos(), calls_initializer,
+      new (Z) Value(receiver), slot, call->source(), calls_initializer,
       calls_initializer ? call->deopt_id() : DeoptId::kNone);
 
   // Note that this is a case of LoadField -> InstanceCall lazy deopt.
@@ -897,10 +897,9 @@
       if (!dst_type.IsInstantiated()) {
         const Class& owner = Class::Handle(Z, field.Owner());
         if (owner.NumTypeArguments() > 0) {
-          instantiator_type_args = new (Z)
-              LoadFieldInstr(new (Z) Value(instr->ArgumentAt(0)),
-                             Slot::GetTypeArgumentsSlotFor(thread(), owner),
-                             instr->token_pos());
+          instantiator_type_args = new (Z) LoadFieldInstr(
+              new (Z) Value(instr->ArgumentAt(0)),
+              Slot::GetTypeArgumentsSlotFor(thread(), owner), instr->source());
           InsertBefore(instr, instantiator_type_args, instr->env(),
                        FlowGraph::kValue);
         }
@@ -909,7 +908,7 @@
       InsertBefore(
           instr,
           new (Z) AssertAssignableInstr(
-              instr->token_pos(), new (Z) Value(instr->ArgumentAt(1)),
+              instr->source(), new (Z) Value(instr->ArgumentAt(1)),
               new (Z) Value(flow_graph_->GetConstant(dst_type)),
               new (Z) Value(instantiator_type_args),
               new (Z) Value(function_type_args),
@@ -922,8 +921,8 @@
   ASSERT(instr->FirstArgIndex() == 0);
   StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
       field, new (Z) Value(instr->ArgumentAt(0)),
-      new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier,
-      instr->token_pos(), &flow_graph()->parsed_function());
+      new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier, instr->source(),
+      &flow_graph()->parsed_function());
 
   // Discard the environment from the original instruction because the store
   // can't deoptimize.
@@ -982,7 +981,7 @@
     args->Add(new (Z) Value(call->ArgumentAt(i)));
   }
   InvokeMathCFunctionInstr* invoke = new (Z) InvokeMathCFunctionInstr(
-      args, call->deopt_id(), recognized_kind, call->token_pos());
+      args, call->deopt_id(), recognized_kind, call->source());
   ReplaceCall(call, invoke);
 }
 
@@ -1004,7 +1003,7 @@
       AddReceiverCheck(call);
       ReplaceCall(call,
                   new (Z) SmiToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
-                                           call->token_pos()));
+                                           call->source()));
       return true;
     } else if ((receiver_cid == kMintCid) && CanConvertInt64ToDouble()) {
       AddReceiverCheck(call);
@@ -1235,7 +1234,7 @@
   if (type.IsNullType() || (type.IsNeverType() && type.IsLegacy()) ||
       left_value->Type()->IsSubtypeOf(type)) {
     Definition* replacement = new (Z) StrictCompareInstr(
-        call->token_pos(),
+        call->source(),
         (type.IsNullType() || (type.IsNeverType() && type.IsLegacy()))
             ? Token::kEQ_STRICT
             : Token::kNE_STRICT,
@@ -1286,7 +1285,7 @@
         flow_graph()->GetConstant(Smi::Handle(Z, Smi::New(type_cid)));
 
     StrictCompareInstr* check_cid = new (Z) StrictCompareInstr(
-        call->token_pos(), Token::kEQ_STRICT, new (Z) Value(left_cid),
+        call->source(), Token::kEQ_STRICT, new (Z) Value(left_cid),
         new (Z) Value(cid), /* number_check = */ false, DeoptId::kNone);
     ReplaceCall(call, check_cid);
     return;
@@ -1313,7 +1312,7 @@
           return;
         }
         TestCidsInstr* test_cids = new (Z) TestCidsInstr(
-            call->token_pos(), Token::kIS, new (Z) Value(left), *results,
+            call->source(), Token::kIS, new (Z) Value(left), *results,
             can_deopt ? call->deopt_id() : DeoptId::kNone);
         // Remove type.
         ReplaceCall(call, test_cids);
@@ -1332,7 +1331,7 @@
   }
 
   InstanceOfInstr* instance_of = new (Z) InstanceOfInstr(
-      call->token_pos(), new (Z) Value(left),
+      call->source(), new (Z) Value(left),
       new (Z) Value(instantiator_type_args), new (Z) Value(function_type_args),
       type, call->deopt_id());
   ReplaceCall(call, instance_of);
@@ -1388,7 +1387,7 @@
               Definition* arg = call->ArgumentAt(1);
               AddCheckSmi(arg, call->deopt_id(), call->env(), call);
               ReplaceCall(call, new (Z) SmiToDoubleInstr(new (Z) Value(arg),
-                                                         call->token_pos()));
+                                                         call->source()));
               return;
             } else if (binary_feedback.ArgumentIs(kMintCid) &&
                        CanConvertInt64ToDouble()) {
@@ -1675,7 +1674,7 @@
                                            Definition** value) {
   auto check =
       new (Z) CheckNullInstr(new (Z) Value(*value), Symbols::OptimizedOut(),
-                             call->deopt_id(), call->token_pos());
+                             call->deopt_id(), call->source());
   flow_graph_->InsertBefore(call, check, call->env(), FlowGraph::kValue);
 
   // Use data dependency as control dependency.
@@ -1686,7 +1685,7 @@
                                              Definition* array,
                                              Definition** index) {
   auto length = new (Z) LoadFieldInstr(
-      new (Z) Value(array), Slot::TypedDataBase_length(), call->token_pos());
+      new (Z) Value(array), Slot::TypedDataBase_length(), call->source());
   flow_graph_->InsertBefore(call, length, call->env(), FlowGraph::kValue);
 
   auto check = new (Z) GenericCheckBoundInstr(
@@ -1700,7 +1699,7 @@
 Definition* TypedDataSpecializer::AppendLoadLength(TemplateDartCall<0>* call,
                                                    Definition* array) {
   auto length = new (Z) LoadFieldInstr(
-      new (Z) Value(array), Slot::TypedDataBase_length(), call->token_pos());
+      new (Z) Value(array), Slot::TypedDataBase_length(), call->source());
   flow_graph_->InsertBefore(call, length, call->env(), FlowGraph::kValue);
   return length;
 }
@@ -1719,7 +1718,7 @@
 
   Definition* load = new (Z) LoadIndexedInstr(
       new (Z) Value(data), new (Z) Value(index), /*index_unboxed=*/false,
-      index_scale, cid, kAlignedAccess, DeoptId::kNone, call->token_pos());
+      index_scale, cid, kAlignedAccess, DeoptId::kNone, call->source());
   flow_graph_->InsertBefore(call, load, call->env(), FlowGraph::kValue);
 
   if (cid == kTypedDataFloat32ArrayCid) {
@@ -1798,7 +1797,7 @@
   auto store = new (Z) StoreIndexedInstr(
       new (Z) Value(data), new (Z) Value(index), new (Z) Value(value),
       kNoStoreBarrier, /*index_unboxed=*/false, index_scale, cid,
-      kAlignedAccess, DeoptId::kNone, call->token_pos(),
+      kAlignedAccess, DeoptId::kNone, call->source(),
       Instruction::kNotSpeculative);
   flow_graph_->InsertBefore(call, store, call->env(), FlowGraph::kEffect);
 }
diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
index c6ae149..ea5dfb0 100644
--- a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
@@ -128,8 +128,9 @@
                                              bool number_check /* = false */) {
   Value* right = Pop();
   Value* left = Pop();
-  StrictCompareInstr* compare = new (Z) StrictCompareInstr(
-      position, kind, left, right, number_check, GetNextDeoptId());
+  StrictCompareInstr* compare =
+      new (Z) StrictCompareInstr(InstructionSource(position), kind, left, right,
+                                 number_check, GetNextDeoptId());
   Push(compare);
   return Fragment(compare);
 }
@@ -138,9 +139,8 @@
                                              bool number_check /* = false */) {
   Value* right = Pop();
   Value* left = Pop();
-  StrictCompareInstr* compare =
-      new (Z) StrictCompareInstr(TokenPosition::kNoSource, kind, left, right,
-                                 number_check, GetNextDeoptId());
+  StrictCompareInstr* compare = new (Z) StrictCompareInstr(
+      InstructionSource(), kind, left, right, number_check, GetNextDeoptId());
   Push(compare);
   return Fragment(compare);
 }
@@ -165,7 +165,7 @@
   Value* right_value = Pop();
   Value* left_value = Pop();
   StrictCompareInstr* compare = new (Z) StrictCompareInstr(
-      TokenPosition::kNoSource, negate ? Token::kNE_STRICT : Token::kEQ_STRICT,
+      InstructionSource(), negate ? Token::kNE_STRICT : Token::kEQ_STRICT,
       left_value, right_value, false, GetNextDeoptId());
   BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
   *then_entry = *branch->true_successor_address() = BuildTargetEntry();
@@ -179,8 +179,8 @@
   Value* rhs = Pop();
   Value* lhs = Pop();
   StrictCompareInstr* compare =
-      new (Z) StrictCompareInstr(TokenPosition::kNoSource, Token::kEQ_STRICT,
-                                 lhs, rhs, false, GetNextDeoptId());
+      new (Z) StrictCompareInstr(InstructionSource(), Token::kEQ_STRICT, lhs,
+                                 rhs, false, GetNextDeoptId());
   BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
   *then_entry = *branch->true_successor_address() = BuildTargetEntry();
   *otherwise_entry = *branch->false_successor_address() = BuildTargetEntry();
@@ -203,8 +203,9 @@
     ASSERT(!function.has_unboxed_return());
     representation = kTagged;
   }
-  ReturnInstr* return_instr = new (Z) ReturnInstr(
-      position, value, GetNextDeoptId(), yield_index, representation);
+  ReturnInstr* return_instr =
+      new (Z) ReturnInstr(InstructionSource(position), value, GetNextDeoptId(),
+                          yield_index, representation);
   if (exit_collector_ != nullptr) exit_collector_->AddExit(return_instr);
 
   instructions <<= return_instr;
@@ -216,7 +217,7 @@
                                                   intptr_t stack_depth,
                                                   intptr_t loop_depth) {
   return Fragment(new (Z) CheckStackOverflowInstr(
-      position, stack_depth, loop_depth, GetNextDeoptId(),
+      InstructionSource(position), stack_depth, loop_depth, GetNextDeoptId(),
       CheckStackOverflowInstr::kOsrAndPreemption));
 }
 
@@ -376,9 +377,9 @@
 
   // We use C behavior when dereferencing pointers, so we use aligned access in
   // all cases.
-  LoadIndexedInstr* instr = new (Z) LoadIndexedInstr(
-      array, index, index_unboxed, index_scale, class_id, kAlignedAccess,
-      DeoptId::kNone, TokenPosition::kNoSource);
+  LoadIndexedInstr* instr = new (Z)
+      LoadIndexedInstr(array, index, index_unboxed, index_scale, class_id,
+                       kAlignedAccess, DeoptId::kNone, InstructionSource());
   Push(instr);
   return Fragment(instr);
 }
@@ -469,7 +470,7 @@
 Fragment BaseFlowGraphBuilder::LoadNativeField(const Slot& native_field,
                                                bool calls_initializer) {
   LoadFieldInstr* load = new (Z) LoadFieldInstr(
-      Pop(), native_field, TokenPosition::kNoSource, calls_initializer,
+      Pop(), native_field, InstructionSource(), calls_initializer,
       calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
   Push(load);
   return Fragment(load);
@@ -477,8 +478,7 @@
 
 Fragment BaseFlowGraphBuilder::LoadLocal(LocalVariable* variable) {
   ASSERT(!variable->is_captured());
-  LoadLocalInstr* load =
-      new (Z) LoadLocalInstr(*variable, TokenPosition::kNoSource);
+  LoadLocalInstr* load = new (Z) LoadLocalInstr(*variable, InstructionSource());
   Push(load);
   return Fragment(load);
 }
@@ -517,8 +517,9 @@
   if (value->BindsToConstant()) {
     emit_store_barrier = kNoStoreBarrier;
   }
-  StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
-      field, Pop(), value, emit_store_barrier, position, kind);
+  StoreInstanceFieldInstr* store =
+      new (Z) StoreInstanceFieldInstr(field, Pop(), value, emit_store_barrier,
+                                      InstructionSource(position), kind);
   return Fragment(store);
 }
 
@@ -534,7 +535,7 @@
 
   StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
       MayCloneField(Z, field), Pop(), value, emit_store_barrier,
-      TokenPosition::kNoSource, parsed_function_, kind);
+      InstructionSource(), parsed_function_, kind);
 
   return Fragment(store);
 }
@@ -578,7 +579,7 @@
 Fragment BaseFlowGraphBuilder::LoadStaticField(const Field& field,
                                                bool calls_initializer) {
   LoadStaticFieldInstr* load = new (Z) LoadStaticFieldInstr(
-      field, TokenPosition::kNoSource, calls_initializer,
+      field, InstructionSource(), calls_initializer,
       calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
   Push(load);
   return Fragment(load);
@@ -615,8 +616,8 @@
 
 Fragment BaseFlowGraphBuilder::StoreStaticField(TokenPosition position,
                                                 const Field& field) {
-  return Fragment(
-      new (Z) StoreStaticFieldInstr(MayCloneField(Z, field), Pop(), position));
+  return Fragment(new (Z) StoreStaticFieldInstr(MayCloneField(Z, field), Pop(),
+                                                InstructionSource(position)));
 }
 
 Fragment BaseFlowGraphBuilder::StoreIndexed(classid_t class_id) {
@@ -629,7 +630,7 @@
       index, value, emit_store_barrier, /*index_unboxed=*/false,
 
       compiler::target::Instance::ElementSizeFor(class_id), class_id,
-      kAlignedAccess, DeoptId::kNone, TokenPosition::kNoSource);
+      kAlignedAccess, DeoptId::kNone, InstructionSource());
   return Fragment(store);
 }
 
@@ -644,7 +645,7 @@
   Value* c_pointer = Pop();
   StoreIndexedInstr* instr = new (Z) StoreIndexedInstr(
       c_pointer, index, value, kNoStoreBarrier, index_unboxed, index_scale,
-      class_id, alignment, DeoptId::kNone, TokenPosition::kNoSource,
+      class_id, alignment, DeoptId::kNone, InstructionSource(),
       Instruction::SpeculativeMode::kNotSpeculative);
   return Fragment(instr);
 }
@@ -667,7 +668,8 @@
                                              LocalVariable* variable) {
   ASSERT(!variable->is_captured());
   Value* value = Pop();
-  StoreLocalInstr* store = new (Z) StoreLocalInstr(*variable, value, position);
+  StoreLocalInstr* store =
+      new (Z) StoreLocalInstr(*variable, value, InstructionSource(position));
   Fragment instructions(store);
   Push(store);
   return instructions;
@@ -822,7 +824,7 @@
   Value* right = Pop();
   Value* left = Pop();
   RelationalOpInstr* instr = new (Z) RelationalOpInstr(
-      TokenPosition::kNoSource, kind, left, right, kSmiCid, GetNextDeoptId());
+      InstructionSource(), kind, left, right, kSmiCid, GetNextDeoptId());
   Push(instr);
   return Fragment(instr);
 }
@@ -881,8 +883,8 @@
 
 Fragment BaseFlowGraphBuilder::AssertBool(TokenPosition position) {
   Value* value = Pop();
-  AssertBooleanInstr* instr =
-      new (Z) AssertBooleanInstr(position, value, GetNextDeoptId());
+  AssertBooleanInstr* instr = new (Z)
+      AssertBooleanInstr(InstructionSource(position), value, GetNextDeoptId());
   Push(instr);
   return Fragment(instr);
 }
@@ -896,7 +898,7 @@
 Fragment BaseFlowGraphBuilder::AllocateContext(
     const ZoneGrowableArray<const Slot*>& context_slots) {
   AllocateContextInstr* allocate =
-      new (Z) AllocateContextInstr(TokenPosition::kNoSource, context_slots);
+      new (Z) AllocateContextInstr(InstructionSource(), context_slots);
   Push(allocate);
   return Fragment(allocate);
 }
@@ -905,7 +907,8 @@
     TokenPosition position,
     const Function& closure_function) {
   const Class& cls = Class::ZoneHandle(Z, I->object_store()->closure_class());
-  AllocateObjectInstr* allocate = new (Z) AllocateObjectInstr(position, cls);
+  AllocateObjectInstr* allocate =
+      new (Z) AllocateObjectInstr(InstructionSource(position), cls);
   allocate->set_closure_function(closure_function);
   Push(allocate);
   return Fragment(allocate);
@@ -914,7 +917,7 @@
 Fragment BaseFlowGraphBuilder::CreateArray() {
   Value* element_count = Pop();
   CreateArrayInstr* array =
-      new (Z) CreateArrayInstr(TokenPosition::kNoSource,
+      new (Z) CreateArrayInstr(InstructionSource(),
                                Pop(),  // Element type.
                                element_count, GetNextDeoptId());
   Push(array);
@@ -924,8 +927,8 @@
 Fragment BaseFlowGraphBuilder::AllocateTypedData(TokenPosition position,
                                                  classid_t class_id) {
   Value* num_elements = Pop();
-  auto* instr = new (Z) AllocateTypedDataInstr(position, class_id, num_elements,
-                                               GetNextDeoptId());
+  auto* instr = new (Z) AllocateTypedDataInstr(
+      InstructionSource(position), class_id, num_elements, GetNextDeoptId());
   Push(instr);
   return Fragment(instr);
 }
@@ -933,9 +936,9 @@
 Fragment BaseFlowGraphBuilder::InstantiateType(const AbstractType& type) {
   Value* function_type_args = Pop();
   Value* instantiator_type_args = Pop();
-  InstantiateTypeInstr* instr = new (Z) InstantiateTypeInstr(
-      TokenPosition::kNoSource, type, instantiator_type_args,
-      function_type_args, GetNextDeoptId());
+  InstantiateTypeInstr* instr = new (Z)
+      InstantiateTypeInstr(InstructionSource(), type, instantiator_type_args,
+                           function_type_args, GetNextDeoptId());
   Push(instr);
   return Fragment(instr);
 }
@@ -950,7 +953,7 @@
   Value* instantiator_type_args = Pop();
   const Class& instantiator_class = Class::ZoneHandle(Z, function_.Owner());
   InstantiateTypeArgumentsInstr* instr = new (Z) InstantiateTypeArgumentsInstr(
-      TokenPosition::kNoSource, instantiator_type_args, function_type_args,
+      InstructionSource(), instantiator_type_args, function_type_args,
       type_arguments, instantiator_class, function_, GetNextDeoptId());
   Push(instr);
   instructions += Fragment(instr);
@@ -964,7 +967,7 @@
   const Function& function = Object::null_function();
   const Class& instantiator_class = Class::ZoneHandle(Z);
   InstantiateTypeArgumentsInstr* instr = new (Z) InstantiateTypeArgumentsInstr(
-      TokenPosition::kNoSource, instantiator_type_args, function_type_args,
+      InstructionSource(), instantiator_type_args, function_type_args,
       type_arguments, instantiator_class, function, GetNextDeoptId());
   Push(instr);
   return Fragment(instr);
@@ -981,8 +984,8 @@
                                               intptr_t argument_count) {
   ASSERT((argument_count == 0) || (argument_count == 1));
   Value* type_arguments = (argument_count > 0) ? Pop() : nullptr;
-  AllocateObjectInstr* allocate =
-      new (Z) AllocateObjectInstr(position, klass, type_arguments);
+  AllocateObjectInstr* allocate = new (Z)
+      AllocateObjectInstr(InstructionSource(position), klass, type_arguments);
   Push(allocate);
   return Fragment(allocate);
 }
@@ -1044,8 +1047,9 @@
 #ifdef PRODUCT
   return Fragment();
 #else
-  return Fragment(new (Z) DebugStepCheckInstr(
-      position, PcDescriptorsLayout::kRuntimeCall, GetNextDeoptId()));
+  return Fragment(new (Z) DebugStepCheckInstr(InstructionSource(position),
+                                              PcDescriptorsLayout::kRuntimeCall,
+                                              GetNextDeoptId()));
 #endif
 }
 
@@ -1055,10 +1059,10 @@
                                          bool clear_the_temp /* = true */) {
   Fragment instructions = LoadLocal(receiver);
 
-  CheckNullInstr* check_null = new (Z)
-      CheckNullInstr(Pop(), function_name, GetNextDeoptId(), position,
-                     function_name.IsNull() ? CheckNullInstr::kCastError
-                                            : CheckNullInstr::kNoSuchMethod);
+  CheckNullInstr* check_null = new (Z) CheckNullInstr(
+      Pop(), function_name, GetNextDeoptId(), InstructionSource(position),
+      function_name.IsNull() ? CheckNullInstr::kCastError
+                             : CheckNullInstr::kNoSuchMethod);
 
   // Does not use the redefinition, no `Push(check_null)`.
   instructions <<= check_null;
@@ -1077,8 +1081,8 @@
 Fragment BaseFlowGraphBuilder::CheckNullOptimized(TokenPosition position,
                                                   const String& function_name) {
   Value* value = Pop();
-  CheckNullInstr* check_null =
-      new (Z) CheckNullInstr(value, function_name, GetNextDeoptId(), position);
+  CheckNullInstr* check_null = new (Z) CheckNullInstr(
+      value, function_name, GetNextDeoptId(), InstructionSource(position));
   Push(check_null);  // Use the redefinition.
   return Fragment(check_null);
 }
@@ -1156,18 +1160,19 @@
                                            bool is_statically_checked) {
   const intptr_t total_count = argument_count + (type_args_len > 0 ? 1 : 0) + 1;
   InputsArray* arguments = GetArguments(total_count);
-  ClosureCallInstr* call = new (Z) ClosureCallInstr(
-      arguments, type_args_len, argument_names, position, GetNextDeoptId(),
-      is_statically_checked ? Code::EntryKind::kUnchecked
-                            : Code::EntryKind::kNormal);
+  ClosureCallInstr* call = new (Z)
+      ClosureCallInstr(arguments, type_args_len, argument_names,
+                       InstructionSource(position), GetNextDeoptId(),
+                       is_statically_checked ? Code::EntryKind::kUnchecked
+                                             : Code::EntryKind::kNormal);
   Push(call);
   return Fragment(call);
 }
 
 Fragment BaseFlowGraphBuilder::StringInterpolate(TokenPosition position) {
   Value* array = Pop();
-  StringInterpolateInstr* interpolate =
-      new (Z) StringInterpolateInstr(array, position, GetNextDeoptId());
+  StringInterpolateInstr* interpolate = new (Z) StringInterpolateInstr(
+      array, InstructionSource(position), GetNextDeoptId());
   Push(interpolate);
   return Fragment(interpolate);
 }
@@ -1194,8 +1199,8 @@
   Value* value = Pop();
 
   AssertAssignableInstr* instr = new (Z) AssertAssignableInstr(
-      position, value, dst_type, instantiator_type_args, function_type_args,
-      dst_name, GetNextDeoptId(), kind);
+      InstructionSource(position), value, dst_type, instantiator_type_args,
+      function_type_args, dst_name, GetNextDeoptId(), kind);
   Push(instr);
 
   return Fragment(instr);
diff --git a/runtime/vm/compiler/frontend/flow_graph_builder.cc b/runtime/vm/compiler/frontend/flow_graph_builder.cc
index e09620b..5034b6d 100644
--- a/runtime/vm/compiler/frontend/flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/flow_graph_builder.cc
@@ -260,7 +260,7 @@
 
     ConstantInstr* true_const = caller_graph_->GetConstant(Bool::True());
     BranchInstr* branch = new (Z) BranchInstr(
-        new (Z) StrictCompareInstr(TokenPosition::kNoSource, Token::kEQ_STRICT,
+        new (Z) StrictCompareInstr(InstructionSource(), Token::kEQ_STRICT,
                                    new (Z) Value(true_const),
                                    new (Z) Value(true_const), false,
                                    CompilerState::Current().GetNextDeoptId()),
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index b1ea0ea..74f4ecd 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -1939,9 +1939,9 @@
       Value* right_value = Pop();
       Value* left_value = Pop();
       StrictCompareInstr* compare = new (Z) StrictCompareInstr(
-          TokenPosition::kNoSource,
-          negate ? Token::kNE_STRICT : Token::kEQ_STRICT, left_value,
-          right_value, false, flow_graph_builder_->GetNextDeoptId());
+          InstructionSource(), negate ? Token::kNE_STRICT : Token::kEQ_STRICT,
+          left_value, right_value, false,
+          flow_graph_builder_->GetNextDeoptId());
       branch =
           new (Z) BranchInstr(compare, flow_graph_builder_->GetNextDeoptId());
       negate = false;
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.cc b/runtime/vm/compiler/frontend/kernel_to_il.cc
index 350e06d..cdc4f00 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.cc
+++ b/runtime/vm/compiler/frontend/kernel_to_il.cc
@@ -326,7 +326,7 @@
   Fragment instructions = LoadLocal(context_variable);
 
   CloneContextInstr* clone_instruction = new (Z) CloneContextInstr(
-      TokenPosition::kNoSource, Pop(), context_slots, GetNextDeoptId());
+      InstructionSource(), Pop(), context_slots, GetNextDeoptId());
   instructions <<= clone_instruction;
   Push(clone_instruction);
 
@@ -352,8 +352,8 @@
   const intptr_t total_count = argument_count + (type_args_len > 0 ? 1 : 0);
   InputsArray* arguments = GetArguments(total_count);
   InstanceCallInstr* call = new (Z) InstanceCallInstr(
-      position, name, kind, arguments, type_args_len, argument_names,
-      checked_argument_count, ic_data_array_, GetNextDeoptId(),
+      InstructionSource(position), name, kind, arguments, type_args_len,
+      argument_names, checked_argument_count, ic_data_array_, GetNextDeoptId(),
       interface_target, tearoff_interface_target);
   if ((result_type != NULL) && !result_type->IsTrivial()) {
     call->SetResultType(Z, result_type->ToCompileType(Z));
@@ -401,9 +401,9 @@
 Fragment FlowGraphBuilder::ThrowException(TokenPosition position) {
   Fragment instructions;
   Value* exception = Pop();
-  instructions +=
-      Fragment(new (Z) ThrowInstr(position, GetNextDeoptId(), exception))
-          .closed();
+  instructions += Fragment(new (Z) ThrowInstr(InstructionSource(position),
+                                              GetNextDeoptId(), exception))
+                      .closed();
   // Use its side effect of leaving a constant on the stack (does not change
   // the graph).
   NullConstant();
@@ -416,10 +416,10 @@
   Fragment instructions;
   Value* stacktrace = Pop();
   Value* exception = Pop();
-  instructions +=
-      Fragment(new (Z) ReThrowInstr(position, catch_try_index, GetNextDeoptId(),
-                                    exception, stacktrace))
-          .closed();
+  instructions += Fragment(new (Z) ReThrowInstr(
+                               InstructionSource(position), catch_try_index,
+                               GetNextDeoptId(), exception, stacktrace))
+                      .closed();
   // Use its side effect of leaving a constant on the stack (does not change
   // the graph).
   NullConstant();
@@ -531,9 +531,9 @@
   const intptr_t num_args =
       function->NumParameters() + (function->IsGeneric() ? 1 : 0);
   InputsArray* arguments = GetArguments(num_args);
-  NativeCallInstr* call =
-      new (Z) NativeCallInstr(name, function, FLAG_link_natives_lazily,
-                              function->end_token_pos(), arguments);
+  NativeCallInstr* call = new (Z)
+      NativeCallInstr(name, function, FLAG_link_natives_lazily,
+                      InstructionSource(function->end_token_pos()), arguments);
   Push(call);
   return Fragment(call);
 }
@@ -594,9 +594,9 @@
                                       bool use_unchecked_entry) {
   const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
   InputsArray* arguments = GetArguments(total_count);
-  StaticCallInstr* call = new (Z)
-      StaticCallInstr(position, target, type_args_count, argument_names,
-                      arguments, ic_data_array_, GetNextDeoptId(), rebind_rule);
+  StaticCallInstr* call = new (Z) StaticCallInstr(
+      InstructionSource(position), target, type_args_count, argument_names,
+      arguments, ic_data_array_, GetNextDeoptId(), rebind_rule);
   SetResultTypeForStaticCall(call, target, argument_count, result_type);
   if (use_unchecked_entry) {
     call->set_entry_kind(Code::EntryKind::kUnchecked);
@@ -1755,9 +1755,9 @@
   Value* function_type_args = Pop();
   Value* instantiator_type_args = Pop();
 
-  AssertSubtypeInstr* instr = new (Z)
-      AssertSubtypeInstr(position, instantiator_type_args, function_type_args,
-                         sub_type, super_type, dst_name, GetNextDeoptId());
+  AssertSubtypeInstr* instr = new (Z) AssertSubtypeInstr(
+      InstructionSource(position), instantiator_type_args, function_type_args,
+      sub_type, super_type, dst_name, GetNextDeoptId());
   instructions += Fragment(instr);
 
   return instructions;
@@ -3727,8 +3727,8 @@
 
 Fragment FlowGraphBuilder::NativeReturn(
     const compiler::ffi::CallbackMarshaller& marshaller) {
-  auto* instr = new (Z) NativeReturnInstr(TokenPosition::kNoSource, Pop(),
-                                          marshaller, DeoptId::kNone);
+  auto* instr = new (Z)
+      NativeReturnInstr(InstructionSource(), Pop(), marshaller, DeoptId::kNone);
   return Fragment(instr).closed();
 }
 
diff --git a/runtime/vm/compiler/graph_intrinsifier.cc b/runtime/vm/compiler/graph_intrinsifier.cc
index 4935b0b..61cf909 100644
--- a/runtime/vm/compiler/graph_intrinsifier.cc
+++ b/runtime/vm/compiler/graph_intrinsifier.cc
@@ -170,8 +170,8 @@
                                     Definition* array,
                                     Definition* index,
                                     const Slot& length_field) {
-  Definition* length = builder->AddDefinition(new LoadFieldInstr(
-      new Value(array), length_field, TokenPosition::kNoSource));
+  Definition* length = builder->AddDefinition(
+      new LoadFieldInstr(new Value(array), length_field, InstructionSource()));
   // Note that the intrinsifier must always use deopting array bound
   // checks, because intrinsics currently don't support calls.
   Definition* safe_index = new CheckArrayBoundInstr(
@@ -247,7 +247,7 @@
   Definition* result = builder.AddDefinition(new LoadIndexedInstr(
       new Value(array), new Value(index), /*index_unboxed=*/false,
       /*index_scale=*/target::Instance::ElementSizeFor(array_cid), array_cid,
-      kAlignedAccess, DeoptId::kNone, builder.TokenPos()));
+      kAlignedAccess, DeoptId::kNone, builder.Source()));
 
   // We don't perform [RangeAnalysis] for graph intrinsics. To inform the
   // following boxing instruction about a more precise range we attach it here
@@ -355,7 +355,7 @@
       // maps to kUnboxedInt64. All other situations get away with
       // truncating even non-smi values.
       builder.AddInstruction(new CheckSmiInstr(new Value(value), DeoptId::kNone,
-                                               builder.TokenPos()));
+                                               builder.Source()));
       FALL_THROUGH;
 #endif
     case kTypedDataInt8ArrayCid:
@@ -410,7 +410,7 @@
       Zone* zone = flow_graph->zone();
       Cids* value_check = Cids::CreateMonomorphic(zone, value_check_cid);
       builder.AddInstruction(new CheckClassInstr(
-          new Value(value), DeoptId::kNone, *value_check, builder.TokenPos()));
+          new Value(value), DeoptId::kNone, *value_check, builder.Source()));
       value = builder.AddUnboxInstr(rep, new Value(value),
                                     /* is_checked = */ true);
       if (array_cid == kTypedDataFloat32ArrayCid) {
@@ -434,7 +434,7 @@
       new Value(array), new Value(index), new Value(value), kNoStoreBarrier,
       /*index_unboxed=*/false,
       /*index_scale=*/target::Instance::ElementSizeFor(array_cid), array_cid,
-      kAlignedAccess, DeoptId::kNone, builder.TokenPos()));
+      kAlignedAccess, DeoptId::kNone, builder.Source()));
   // Return null.
   Definition* null_def = builder.AddNullDefinition();
   builder.AddReturn(new Value(null_def));
@@ -573,7 +573,7 @@
   Definition* load = builder.AddDefinition(new LoadIndexedInstr(
       new Value(str), new Value(index), /*index_unboxed=*/false,
       target::Instance::ElementSizeFor(cid), cid, kAlignedAccess,
-      DeoptId::kNone, builder.TokenPos()));
+      DeoptId::kNone, builder.Source()));
 
   // We don't perform [RangeAnalysis] for graph intrinsics. To inform the
   // following boxing instruction about a more precise range we attach it here
@@ -631,7 +631,7 @@
   Cids* value_check = Cids::CreateMonomorphic(zone, cid);
   // Check argument. Receiver (left) is known to be a Float32x4.
   builder.AddInstruction(new CheckClassInstr(new Value(right), DeoptId::kNone,
-                                             *value_check, builder.TokenPos()));
+                                             *value_check, builder.Source()));
   Definition* left_simd = builder.AddUnboxInstr(rep, new Value(left),
                                                 /* is_checked = */ true);
 
@@ -737,7 +737,7 @@
   VerifyParameterIsBoxed(&builder, 0);
 
   Definition* length = builder.AddDefinition(
-      new LoadFieldInstr(new Value(array), field, builder.TokenPos()));
+      new LoadFieldInstr(new Value(array), field, builder.Source()));
 
   length = CreateUnboxedResultIfNeeded(&builder, length);
   builder.AddReturn(new Value(length));
@@ -781,9 +781,9 @@
   VerifyParameterIsBoxed(&builder, 0);
 
   Definition* backing_store = builder.AddDefinition(new LoadFieldInstr(
-      new Value(array), Slot::GrowableObjectArray_data(), builder.TokenPos()));
+      new Value(array), Slot::GrowableObjectArray_data(), builder.Source()));
   Definition* capacity = builder.AddDefinition(new LoadFieldInstr(
-      new Value(backing_store), Slot::Array_length(), builder.TokenPos()));
+      new Value(backing_store), Slot::Array_length(), builder.Source()));
   capacity = CreateUnboxedResultIfNeeded(&builder, capacity);
   builder.AddReturn(new Value(capacity));
   return true;
@@ -805,11 +805,11 @@
 
   Definition* backing_store = builder.AddDefinition(
       new LoadFieldInstr(new Value(growable_array),
-                         Slot::GrowableObjectArray_data(), builder.TokenPos()));
+                         Slot::GrowableObjectArray_data(), builder.Source()));
   Definition* result = builder.AddDefinition(new LoadIndexedInstr(
       new Value(backing_store), new Value(index), /*index_unboxed=*/false,
       /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid,
-      kAlignedAccess, DeoptId::kNone, builder.TokenPos()));
+      kAlignedAccess, DeoptId::kNone, builder.Source()));
   result = CreateUnboxedResultIfNeeded(&builder, result);
   builder.AddReturn(new Value(result));
   return true;
@@ -836,7 +836,7 @@
       new Value(array), new Value(index), new Value(value), kEmitStoreBarrier,
       /*index_unboxed=*/false,
       /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid,
-      kAlignedAccess, DeoptId::kNone, builder.TokenPos()));
+      kAlignedAccess, DeoptId::kNone, builder.Source()));
   // Return null.
   Definition* null_def = builder.AddNullDefinition();
   builder.AddReturn(new Value(null_def));
@@ -861,13 +861,13 @@
                            Slot::GrowableObjectArray_length());
 
   Definition* backing_store = builder.AddDefinition(new LoadFieldInstr(
-      new Value(array), Slot::GrowableObjectArray_data(), builder.TokenPos()));
+      new Value(array), Slot::GrowableObjectArray_data(), builder.Source()));
 
   builder.AddInstruction(new StoreIndexedInstr(
       new Value(backing_store), new Value(index), new Value(value),
       kEmitStoreBarrier, /*index_unboxed=*/false,
       /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid,
-      kAlignedAccess, DeoptId::kNone, builder.TokenPos()));
+      kAlignedAccess, DeoptId::kNone, builder.Source()));
   // Return null.
   Definition* null_def = builder.AddNullDefinition();
   builder.AddReturn(new Value(null_def));
@@ -888,11 +888,11 @@
 
   Cids* value_check = Cids::CreateMonomorphic(zone, kArrayCid);
   builder.AddInstruction(new CheckClassInstr(new Value(data), DeoptId::kNone,
-                                             *value_check, builder.TokenPos()));
+                                             *value_check, builder.Source()));
 
   builder.AddInstruction(new StoreInstanceFieldInstr(
       Slot::GrowableObjectArray_data(), new Value(growable_array),
-      new Value(data), kEmitStoreBarrier, builder.TokenPos()));
+      new Value(data), kEmitStoreBarrier, builder.Source()));
   // Return null.
   Definition* null_def = builder.AddNullDefinition();
   builder.AddReturn(new Value(null_def));
@@ -911,10 +911,10 @@
   VerifyParameterIsBoxed(&builder, 1);
 
   builder.AddInstruction(
-      new CheckSmiInstr(new Value(length), DeoptId::kNone, builder.TokenPos()));
+      new CheckSmiInstr(new Value(length), DeoptId::kNone, builder.Source()));
   builder.AddInstruction(new StoreInstanceFieldInstr(
       Slot::GrowableObjectArray_length(), new Value(growable_array),
-      new Value(length), kNoStoreBarrier, builder.TokenPos()));
+      new Value(length), kNoStoreBarrier, builder.Source()));
   Definition* null_def = builder.AddNullDefinition();
   builder.AddReturn(new Value(null_def));
   return true;
@@ -984,7 +984,7 @@
 
   Definition* unboxed_result =
       builder->AddDefinition(new InvokeMathCFunctionInstr(
-          args, DeoptId::kNone, kind, builder->TokenPos()));
+          args, DeoptId::kNone, kind, builder->Source()));
   Definition* result =
       CreateBoxedResultIfNeeded(builder, unboxed_result, kUnboxedDouble);
   builder->AddReturn(new Value(result));
@@ -1158,7 +1158,7 @@
   VerifyParameterIsBoxed(&builder, 0);
 
   Definition* field_value = builder.AddDefinition(new (zone) LoadFieldInstr(
-      new (zone) Value(receiver), slot, builder.TokenPos()));
+      new (zone) Value(receiver), slot, builder.Source()));
 
   // We only support cases where we do not have to create a box (whose
   // allocation could fail).
@@ -1220,7 +1220,7 @@
 
   builder.AddInstruction(new (zone) StoreInstanceFieldInstr(
       slot, new (zone) Value(receiver), new (zone) Value(value), barrier_mode,
-      builder.TokenPos()));
+      builder.Source()));
 
   builder.AddReturn(new (zone) Value(flow_graph->constant_null()));
   return true;
diff --git a/runtime/vm/compiler/jit/jit_call_specializer.cc b/runtime/vm/compiler/jit/jit_call_specializer.cc
index e596fa3..873174d 100644
--- a/runtime/vm/compiler/jit/jit_call_specializer.cc
+++ b/runtime/vm/compiler/jit/jit_call_specializer.cc
@@ -230,7 +230,7 @@
   ASSERT(alloc->IsAllocateContext() || alloc->IsCloneContext());
 
   AllocateUninitializedContextInstr* replacement =
-      new AllocateUninitializedContextInstr(alloc->token_pos(),
+      new AllocateUninitializedContextInstr(alloc->source(),
                                             context_variables.length());
   alloc->ReplaceWith(replacement, current_iterator());
 
@@ -240,7 +240,7 @@
   if (context_value != NULL) {
     LoadFieldInstr* load =
         new (Z) LoadFieldInstr(context_value->CopyWithType(Z),
-                               Slot::Context_parent(), alloc->token_pos());
+                               Slot::Context_parent(), alloc->source());
     flow_graph()->InsertAfter(cursor, load, NULL, FlowGraph::kValue);
     cursor = load;
     initial_value = new (Z) Value(load);
@@ -249,7 +249,7 @@
   }
   StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
       Slot::Context_parent(), new (Z) Value(replacement), initial_value,
-      kNoStoreBarrier, alloc->token_pos(),
+      kNoStoreBarrier, alloc->source(),
       StoreInstanceFieldInstr::Kind::kInitializing);
   flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
   cursor = replacement;
@@ -257,7 +257,7 @@
   for (auto& slot : context_variables) {
     if (context_value != nullptr) {
       LoadFieldInstr* load = new (Z) LoadFieldInstr(
-          context_value->CopyWithType(Z), *slot, alloc->token_pos());
+          context_value->CopyWithType(Z), *slot, alloc->source());
       flow_graph()->InsertAfter(cursor, load, nullptr, FlowGraph::kValue);
       cursor = load;
       initial_value = new (Z) Value(load);
@@ -267,7 +267,7 @@
 
     store = new (Z) StoreInstanceFieldInstr(
         *slot, new (Z) Value(replacement), initial_value, kNoStoreBarrier,
-        alloc->token_pos(), StoreInstanceFieldInstr::Kind::kInitializing);
+        alloc->source(), StoreInstanceFieldInstr::Kind::kInitializing);
     flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
     cursor = store;
   }
diff --git a/runtime/vm/dwarf.cc b/runtime/vm/dwarf.cc
index cfafd48..ef08d8c 100644
--- a/runtime/vm/dwarf.cc
+++ b/runtime/vm/dwarf.cc
@@ -15,21 +15,19 @@
 
 class DwarfPosition {
  public:
-  // The DWARF standard uses 0 to denote missing line or column information.
-  DwarfPosition(intptr_t line, intptr_t column)
-      : line_(line > 0 ? line : 0), column_(column > 0 ? column : 0) {
+  DwarfPosition(int32_t line, int32_t column) : line_(line), column_(column) {
     // Should only have no line information if also no column information.
-    ASSERT(line_ > 0 || column_ == 0);
+    ASSERT(line_ > 0 || column_ <= 0);
   }
-  explicit DwarfPosition(intptr_t line) : DwarfPosition(line, 0) {}
-  constexpr DwarfPosition() : line_(0), column_(0) {}
+  explicit DwarfPosition(int32_t line) : DwarfPosition(line, -1) {}
+  constexpr DwarfPosition() : line_(-1), column_(-1) {}
 
-  intptr_t line() const { return line_; }
-  intptr_t column() const { return column_; }
+  int32_t line() const { return line_; }
+  int32_t column() const { return column_; }
 
  private:
-  intptr_t line_;
-  intptr_t column_;
+  int32_t line_;
+  int32_t column_;
 };
 
 static constexpr auto kNoDwarfPositionInfo = DwarfPosition();
@@ -392,15 +390,6 @@
   }
 }
 
-static DwarfPosition ReadPosition(ReadStream* stream) {
-  const intptr_t line = stream->Read<int32_t>();
-  if (!FLAG_dwarf_stack_traces_mode) {
-    return DwarfPosition(line);
-  }
-  const intptr_t column = stream->Read<int32_t>();
-  return DwarfPosition(line, column);
-}
-
 // Our state machine encodes position metadata such that we don't know the
 // end pc for an inlined function until it is popped, but DWARF DIEs encode
 // it where the function is pushed. We expand the state transitions into
@@ -431,21 +420,24 @@
   node_stack.Add(root_node);
 
   while (stream.PendingBytes() > 0) {
-    uint8_t opcode = stream.Read<uint8_t>();
+    int32_t arg1;
+    int32_t arg2 = -1;
+    const uint8_t opcode = CodeSourceMapOps::Read(&stream, &arg1, &arg2);
     switch (opcode) {
-      case CodeSourceMapBuilder::kChangePosition: {
-        token_positions[token_positions.length() - 1] = ReadPosition(&stream);
+      case CodeSourceMapOps::kChangePosition: {
+        const DwarfPosition& old_pos =
+            token_positions[token_positions.length() - 1];
+        token_positions[token_positions.length() - 1] =
+            DwarfPosition(Utils::AddWithWrapAround(old_pos.line(), arg1), arg2);
         break;
       }
-      case CodeSourceMapBuilder::kAdvancePC: {
-        int32_t delta = stream.Read<int32_t>();
-        current_pc_offset += delta;
+      case CodeSourceMapOps::kAdvancePC: {
+        current_pc_offset += arg1;
         break;
       }
-      case CodeSourceMapBuilder::kPushFunction: {
-        int32_t func = stream.Read<int32_t>();
+      case CodeSourceMapOps::kPushFunction: {
         const Function& child_func =
-            Function::ZoneHandle(zone_, Function::RawCast(functions.At(func)));
+            Function::ZoneHandle(zone_, Function::RawCast(functions.At(arg1)));
         InliningNode* child_node = new (zone_)
             InliningNode(child_func, token_positions.Last(), current_pc_offset);
         node_stack.Last()->AppendChild(child_node);
@@ -453,7 +445,7 @@
         token_positions.Add(kNoDwarfPositionInfo);
         break;
       }
-      case CodeSourceMapBuilder::kPopFunction: {
+      case CodeSourceMapOps::kPopFunction: {
         // We never pop the root function.
         ASSERT(node_stack.length() > 1);
         ASSERT(token_positions.length() > 1);
@@ -462,8 +454,7 @@
         token_positions.RemoveLast();
         break;
       }
-      case CodeSourceMapBuilder::kNullCheck: {
-        stream.Read<int32_t>();
+      case CodeSourceMapOps::kNullCheck: {
         break;
       }
       default:
@@ -499,10 +490,13 @@
   stream->OffsetFromSymbol(root_asm_name, node->end_pc_offset);
   // DW_AT_call_file
   stream->uleb128(file);
+
+  // The DWARF standard uses 0 to denote missing line or column information.
+
   // DW_AT_call_line
-  stream->uleb128(node->position.line());
+  stream->uleb128(node->position.line() < 0 ? 0 : node->position.line());
   // DW_at_call_column
-  stream->uleb128(node->position.column());
+  stream->uleb128(node->position.column() < 0 ? 0 : node->position.column());
 
   for (InliningNode* child = node->children_head; child != NULL;
        child = child->children_next) {
@@ -609,16 +603,20 @@
     token_positions.Add(kNoDwarfPositionInfo);
 
     while (code_map_stream.PendingBytes() > 0) {
-      uint8_t opcode = code_map_stream.Read<uint8_t>();
+      int32_t arg1;
+      int32_t arg2 = -1;
+      const uint8_t opcode =
+          CodeSourceMapOps::Read(&code_map_stream, &arg1, &arg2);
       switch (opcode) {
-        case CodeSourceMapBuilder::kChangePosition: {
-          token_positions[token_positions.length() - 1] =
-              ReadPosition(&code_map_stream);
+        case CodeSourceMapOps::kChangePosition: {
+          const DwarfPosition& old_pos =
+              token_positions[token_positions.length() - 1];
+          token_positions[token_positions.length() - 1] = DwarfPosition(
+              Utils::AddWithWrapAround(old_pos.line(), arg1), arg2);
           break;
         }
-        case CodeSourceMapBuilder::kAdvancePC: {
-          int32_t delta = code_map_stream.Read<int32_t>();
-          current_pc_offset += delta;
+        case CodeSourceMapOps::kAdvancePC: {
+          current_pc_offset += arg1;
 
           const Function& function = *(function_stack.Last());
           script = function.script();
@@ -632,8 +630,14 @@
           }
 
           // 2. Update LNP line.
-          const intptr_t line = token_positions.Last().line();
-          const intptr_t column = token_positions.Last().column();
+          // The DWARF standard uses 0 to denote missing line or column
+          // information.
+          const intptr_t line = token_positions.Last().line() < 0
+                                    ? 0
+                                    : token_positions.Last().line();
+          const intptr_t column = token_positions.Last().column() < 0
+                                      ? 0
+                                      : token_positions.Last().column();
           if (line != previous_line) {
             stream->u1(DW_LNS_advance_line);
             stream->sleb128(line - previous_line);
@@ -668,15 +672,14 @@
           previous_pc_offset = current_pc_offset;
           break;
         }
-        case CodeSourceMapBuilder::kPushFunction: {
-          int32_t func_index = code_map_stream.Read<int32_t>();
-          const Function& child_func = Function::Handle(
-              zone_, Function::RawCast(functions.At(func_index)));
+        case CodeSourceMapOps::kPushFunction: {
+          const Function& child_func =
+              Function::Handle(zone_, Function::RawCast(functions.At(arg1)));
           function_stack.Add(&child_func);
           token_positions.Add(kNoDwarfPositionInfo);
           break;
         }
-        case CodeSourceMapBuilder::kPopFunction: {
+        case CodeSourceMapOps::kPopFunction: {
           // We never pop the root function.
           ASSERT(function_stack.length() > 1);
           ASSERT(token_positions.length() > 1);
@@ -684,8 +687,7 @@
           token_positions.RemoveLast();
           break;
         }
-        case CodeSourceMapBuilder::kNullCheck: {
-          code_map_stream.Read<int32_t>();
+        case CodeSourceMapOps::kNullCheck: {
           break;
         }
         default:
diff --git a/runtime/vm/kernel.cc b/runtime/vm/kernel.cc
index dea993b..144b228 100644
--- a/runtime/vm/kernel.cc
+++ b/runtime/vm/kernel.cc
@@ -35,6 +35,15 @@
   }
 }
 
+int32_t KernelLineStartsReader::MaxPosition() const {
+  const intptr_t line_count = line_starts_data_.Length();
+  intptr_t current_start = 0;
+  for (intptr_t i = 0; i < line_count; i++) {
+    current_start += helper_->At(line_starts_data_, i);
+  }
+  return current_start;
+}
+
 bool KernelLineStartsReader::LocationForPosition(intptr_t position,
                                                  intptr_t* line,
                                                  intptr_t* col) const {
diff --git a/runtime/vm/kernel.h b/runtime/vm/kernel.h
index b9a582f..f7c7dfb 100644
--- a/runtime/vm/kernel.h
+++ b/runtime/vm/kernel.h
@@ -140,6 +140,8 @@
     return helper_->At(line_starts_data_, index);
   }
 
+  int32_t MaxPosition() const;
+
   // Returns whether the given offset corresponds to a valid source offset
   // If it does, then *line and *column (if column is not nullptr) are set
   // to the line and column the token starts at.
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 2a50ea2..5e33f16 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -11148,10 +11148,10 @@
       script = lib.LookupScript(uri, /* useResolvedUri = */ true);
       if (!script.IsNull()) {
         const auto& source = String::Handle(zone, script.Source());
-        const auto& line_starts = TypedData::Handle(zone, script.line_starts());
-        if (!source.IsNull() || !line_starts.IsNull()) {
+        const auto& starts = TypedData::Handle(zone, script.line_starts());
+        if (!source.IsNull() || !starts.IsNull()) {
           set_source(source);
-          set_line_starts(line_starts);
+          set_line_starts(starts);
           break;
         }
       }
@@ -11215,6 +11215,27 @@
   return info.raw();
 }
 
+TokenPosition Script::MaxPosition() const {
+#if !defined(DART_PRECOMPILED_RUNTIME)
+  if (HasCachedMaxPosition()) {
+    return TokenPosition::Deserialize(
+        ScriptLayout::CachedMaxPositionBitField::decode(
+            raw_ptr()->flags_and_max_position_));
+  }
+  auto const zone = Thread::Current()->zone();
+  LookupSourceAndLineStarts(zone);
+  if (!HasCachedMaxPosition() && line_starts() != TypedData::null()) {
+    const auto& starts = TypedData::Handle(zone, line_starts());
+    kernel::KernelLineStartsReader reader(starts, zone);
+    const intptr_t max_position = reader.MaxPosition();
+    SetCachedMaxPosition(max_position);
+    SetHasCachedMaxPosition(true);
+    return TokenPosition::Deserialize(max_position);
+  }
+#endif
+  return TokenPosition::kNoSource;
+}
+
 void Script::set_url(const String& value) const {
   raw_ptr()->set_url(value.raw());
 }
@@ -11260,20 +11281,36 @@
   return raw_ptr()->debug_positions();
 }
 
-void Script::set_flags(uint8_t value) const {
-  StoreNonPointer(&raw_ptr()->flags_, value);
-}
-
+#if !defined(DART_PRECOMPILED_RUNTIME)
 void Script::SetLazyLookupSourceAndLineStarts(bool value) const {
-  set_flags(ScriptLayout::LazyLookupSourceAndLineStartsBit::update(
-      value, raw_ptr()->flags_));
+  StoreNonPointer(&raw_ptr()->flags_and_max_position_,
+                  ScriptLayout::LazyLookupSourceAndLineStartsBit::update(
+                      value, raw_ptr()->flags_and_max_position_));
 }
 
 bool Script::IsLazyLookupSourceAndLineStarts() const {
   return ScriptLayout::LazyLookupSourceAndLineStartsBit::decode(
-      raw_ptr()->flags_);
+      raw_ptr()->flags_and_max_position_);
 }
 
+bool Script::HasCachedMaxPosition() const {
+  return ScriptLayout::HasCachedMaxPositionBit::decode(
+      raw_ptr()->flags_and_max_position_);
+}
+
+void Script::SetHasCachedMaxPosition(bool value) const {
+  StoreNonPointer(&raw_ptr()->flags_and_max_position_,
+                  ScriptLayout::HasCachedMaxPositionBit::update(
+                      value, raw_ptr()->flags_and_max_position_));
+}
+
+void Script::SetCachedMaxPosition(intptr_t value) const {
+  StoreNonPointer(&raw_ptr()->flags_and_max_position_,
+                  ScriptLayout::CachedMaxPositionBitField::update(
+                      value, raw_ptr()->flags_and_max_position_));
+}
+#endif
+
 void Script::set_load_timestamp(int64_t value) const {
   StoreNonPointer(&raw_ptr()->load_timestamp_, value);
 }
@@ -11286,6 +11323,15 @@
   StoreNonPointer(&raw_ptr()->col_offset_, col_offset);
 }
 
+bool Script::IsValidTokenPosition(TokenPosition token_pos) const {
+  const TokenPosition& max_position = MaxPosition();
+  // We may end up with scripts that have the empty string as a source file
+  // in testing and the like, so allow any token position when the max position
+  // is 0 as well as when it is kNoSource.
+  return !max_position.IsReal() || !token_pos.IsReal() ||
+         max_position.Pos() == 0 || token_pos <= max_position;
+}
+
 #if !defined(DART_PRECOMPILED_RUNTIME)
 static bool IsLetter(int32_t c) {
   return (('A' <= c) && (c <= 'Z')) || (('a' <= c) && (c <= 'z'));
@@ -11497,7 +11543,8 @@
       String::Handle(zone, Symbols::New(thread, resolved_url)));
   result.set_source(source);
   result.SetLocationOffset(0, 0);
-  result.set_flags(0);
+  NOT_IN_PRECOMPILED(result.SetLazyLookupSourceAndLineStarts(false));
+  NOT_IN_PRECOMPILED(result.SetHasCachedMaxPosition(false));
   result.set_kernel_script_index(0);
   result.set_load_timestamp(
       FLAG_remove_script_timestamps_for_test ? 0 : OS::GetCurrentTimeMillis());
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index d789af2..75da560 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -4504,6 +4504,9 @@
 
   intptr_t line_offset() const { return raw_ptr()->line_offset_; }
   intptr_t col_offset() const { return raw_ptr()->col_offset_; }
+  // Returns the max real token position for this script, or kNoSource
+  // if there is no line starts information.
+  TokenPosition MaxPosition() const;
 
   // The load time in milliseconds since epoch.
   int64_t load_timestamp() const { return raw_ptr()->load_timestamp_; }
@@ -4546,6 +4549,11 @@
 
   void SetLocationOffset(intptr_t line_offset, intptr_t col_offset) const;
 
+  // For real token positions when line starts are available, returns whether or
+  // not a GetTokenLocation call would succeed. Returns true for non-real token
+  // positions or if there is no line starts information.
+  bool IsValidTokenPosition(TokenPosition token_pos) const;
+
   // Returns whether a line and column could be computed for the given token
   // position and, if so, sets *line and *column (if not nullptr).
   bool GetTokenLocation(const TokenPosition& token_pos,
@@ -4576,15 +4584,20 @@
 #if !defined(DART_PRECOMPILED_RUNTIME)
   void LoadSourceFromKernel(const uint8_t* kernel_buffer,
                             intptr_t kernel_buffer_len) const;
+  bool IsLazyLookupSourceAndLineStarts() const;
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
-  void SetLazyLookupSourceAndLineStarts(bool value) const;
-  bool IsLazyLookupSourceAndLineStarts() const;
-
  private:
+#if !defined(DART_PRECOMPILED_RUNTIME)
+  bool HasCachedMaxPosition() const;
+
+  void SetLazyLookupSourceAndLineStarts(bool value) const;
+  void SetHasCachedMaxPosition(bool value) const;
+  void SetCachedMaxPosition(intptr_t value) const;
+#endif  // !defined(DART_PRECOMPILED_RUNTIME)
+
   void set_resolved_url(const String& value) const;
   void set_source(const String& value) const;
-  void set_flags(uint8_t value) const;
   void set_load_timestamp(int64_t value) const;
   ArrayPtr debug_positions() const;
 
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index fd85941..f1367db 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -1314,13 +1314,6 @@
 };
 
 class alignas(8) ScriptLayout : public ObjectLayout {
- public:
-  enum {
-    kLazyLookupSourceAndLineStartsPos = 0,
-    kLazyLookupSourceAndLineStartsSize = 1,
-  };
-
- private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(Script);
 
   VISIT_FROM(ObjectPtr, url)
@@ -1354,13 +1347,23 @@
 
   int32_t line_offset_;
   int32_t col_offset_;
+#if !defined(DART_PRECOMPILED_RUNTIME)
+  int32_t flags_and_max_position_;
 
+ public:
   using LazyLookupSourceAndLineStartsBit =
-      BitField<uint8_t,
+      BitField<decltype(flags_and_max_position_), bool, 0, 1>;
+  using HasCachedMaxPositionBit =
+      BitField<decltype(flags_and_max_position_),
                bool,
-               kLazyLookupSourceAndLineStartsPos,
-               kLazyLookupSourceAndLineStartsSize>;
-  uint8_t flags_;
+               LazyLookupSourceAndLineStartsBit::kNextBit,
+               1>;
+  using CachedMaxPositionBitField = BitField<decltype(flags_and_max_position_),
+                                             intptr_t,
+                                             HasCachedMaxPositionBit::kNextBit>;
+
+ private:
+#endif
 
 #if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
   int64_t load_timestamp_;
diff --git a/runtime/vm/regexp_assembler_ir.cc b/runtime/vm/regexp_assembler_ir.cc
index 003ceb7..3c48384 100644
--- a/runtime/vm/regexp_assembler_ir.cc
+++ b/runtime/vm/regexp_assembler_ir.cc
@@ -243,7 +243,7 @@
   Value* type = Bind(new (Z) ConstantInstr(TypeArguments::ZoneHandle(
       Z, Isolate::Current()->object_store()->type_argument_int())));
   Value* length = Bind(Uint64Constant(saved_registers_count_));
-  Value* array = Bind(new (Z) CreateArrayInstr(TokenPosition::kNoSource, type,
+  Value* array = Bind(new (Z) CreateArrayInstr(InstructionSource(), type,
                                                length, GetNextDeoptId()));
   StoreLocal(result_, array);
 
@@ -267,7 +267,7 @@
 
   // Return true on success.
   AppendInstruction(new (Z) ReturnInstr(
-      TokenPosition::kNoSource, Bind(LoadLocal(result_)), GetNextDeoptId()));
+      InstructionSource(), Bind(LoadLocal(result_)), GetNextDeoptId()));
 }
 
 void IRRegExpMacroAssembler::GenerateExitBlock() {
@@ -276,7 +276,7 @@
 
   // Return false on failure.
   AppendInstruction(new (Z) ReturnInstr(
-      TokenPosition::kNoSource, Bind(LoadLocal(result_)), GetNextDeoptId()));
+      InstructionSource(), Bind(LoadLocal(result_)), GetNextDeoptId()));
 }
 
 void IRRegExpMacroAssembler::FinalizeRegistersArray() {
@@ -426,7 +426,7 @@
   Value* rhs_value = Bind(BoolConstant(true));
 
   return new (Z)
-      StrictCompareInstr(TokenPosition::kNoSource, strict_comparison, lhs_value,
+      StrictCompareInstr(InstructionSource(), strict_comparison, lhs_value,
                          rhs_value, true, GetNextDeoptId());
 }
 
@@ -472,8 +472,8 @@
     InputsArray* arguments,
     ICData::RebindRule rebind_rule) const {
   const intptr_t kTypeArgsLen = 0;
-  return new (Z) StaticCallInstr(TokenPosition::kNoSource, function,
-                                 kTypeArgsLen, Object::null_array(), arguments,
+  return new (Z) StaticCallInstr(InstructionSource(), function, kTypeArgsLen,
+                                 Object::null_array(), arguments,
                                  ic_data_array_, GetNextDeoptId(), rebind_rule);
 }
 
@@ -515,17 +515,17 @@
     InputsArray* arguments) const {
   const intptr_t kTypeArgsLen = 0;
   return new (Z) InstanceCallInstr(
-      TokenPosition::kNoSource, desc.name, desc.token_kind, arguments,
-      kTypeArgsLen, Object::null_array(), desc.checked_argument_count,
-      ic_data_array_, GetNextDeoptId());
+      InstructionSource(), desc.name, desc.token_kind, arguments, kTypeArgsLen,
+      Object::null_array(), desc.checked_argument_count, ic_data_array_,
+      GetNextDeoptId());
 }
 
 LoadLocalInstr* IRRegExpMacroAssembler::LoadLocal(LocalVariable* local) const {
-  return new (Z) LoadLocalInstr(*local, TokenPosition::kNoSource);
+  return new (Z) LoadLocalInstr(*local, InstructionSource());
 }
 
 void IRRegExpMacroAssembler::StoreLocal(LocalVariable* local, Value* value) {
-  Do(new (Z) StoreLocalInstr(*local, value, TokenPosition::kNoSource));
+  Do(new (Z) StoreLocalInstr(*local, value, InstructionSource()));
 }
 
 void IRRegExpMacroAssembler::set_current_instruction(Instruction* instruction) {
@@ -548,7 +548,7 @@
     return Bind(new (Z) ConstantInstr(*local.ConstValue()));
   }
   ASSERT(!local.is_captured());
-  return Bind(new (Z) LoadLocalInstr(local, TokenPosition::kNoSource));
+  return Bind(new (Z) LoadLocalInstr(local, InstructionSource()));
 }
 
 // In some cases, the V8 irregexp engine generates unreachable code by emitting
@@ -1691,7 +1691,7 @@
   // we set loop_depth to a non-zero value because this instruction does
   // not act as an OSR entry outside loops.
   AppendInstruction(new (Z) CheckStackOverflowInstr(
-      TokenPosition::kNoSource,
+      InstructionSource(),
       /*stack_depth=*/0,
       /*loop_depth=*/1, GetNextDeoptId(),
       is_backtrack ? CheckStackOverflowInstr::kOsrAndPreemption
@@ -1761,9 +1761,9 @@
   // Here pattern_val might be untagged so this must not trigger a GC.
   Value* index_val = BindLoadLocal(*index);
 
-  return Bind(new (Z) LoadCodeUnitsInstr(pattern_val, index_val, characters,
-                                         specialization_cid_,
-                                         TokenPosition::kNoSource));
+  return Bind(new (Z)
+                  LoadCodeUnitsInstr(pattern_val, index_val, characters,
+                                     specialization_cid_, InstructionSource()));
 }
 
 #undef __
diff --git a/tools/VERSION b/tools/VERSION
index 36ceaf1..f676ec5 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 12
 PATCH 0
-PRERELEASE 166
+PRERELEASE 167
 PRERELEASE_PATCH 0
\ No newline at end of file