Reland "[vm] Fix V8 snapshot profile handling of the dispatch table."

This is a reland of 5909fd111d52e459b4c1885c5805849336df0f71

Does a large refactoring on the V8 snapshot profile writer
to clean things up, add more debugging support, and to fix
the problems that surfaced during the original landing.

Other changes:

Changes Serializer::CreateArtificialNodeIfNeeded() to create
artificial nodes for Code objects and immutable arrays.

Fixes CodeSerializationCluster::Trace() to only push needed parts of
discarded code objects, instead of tracing them like full code objects.

Adds test cases to v8_snapshot_profile_writer_test that exercise
the following situations (both separately and together):

* Non-symbolic stack traces are enabled and code and function objects
  are dropped when not needed at runtime.

* Creation of the dispatch table is disabled.

TEST=vm/dart{,_2}/v8_snapshot_profile_writer_test

Original change's description:
> [vm] Fix V8 snapshot profile handling of the dispatch table.
>
> Fixes https://github.com/dart-lang/sdk/issues/45702.
>
> TEST=Tests listed in the issue above.
>
> Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-debug-x64-try
> Change-Id: Ibf5e3ccf3828c01f9dda47de360314dabe8cb8a9
> Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/195272
> Reviewed-by: Daco Harkes <dacoharkes@google.com>
> Commit-Queue: Tess Strickland <sstrickl@google.com>

Change-Id: I8e7030267fe190079a8f68d00fe20bf7170e5719
Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-debug-x64-try,vm-kernel-precomp-linux-product-x64-try,vm-kernel-precomp-mac-release-simarm64-try,vm-kernel-precomp-linux-debug-x64c-try,vm-kernel-precomp-obfuscate-linux-release-x64-try
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/195513
Reviewed-by: Daco Harkes <dacoharkes@google.com>
Commit-Queue: Tess Strickland <sstrickl@google.com>
diff --git a/pkg/expect/lib/expect.dart b/pkg/expect/lib/expect.dart
index e6cf6e5..9ad6e5b 100644
--- a/pkg/expect/lib/expect.dart
+++ b/pkg/expect/lib/expect.dart
@@ -169,6 +169,15 @@
   }
 
   /**
+   * Checks whether the Iterable [actual] is empty.
+   */
+  static void isEmpty(Iterable actual, [String reason = ""]) {
+    if (actual.isEmpty) return;
+    String msg = _getMessage(reason);
+    _fail("Expect.isEmpty(actual: <$actual>$msg) fails.");
+  }
+
+  /**
    * Checks whether the expected and actual values are identical
    * (using `identical`).
    */
diff --git a/runtime/platform/utils.cc b/runtime/platform/utils.cc
index 0799742..940e85a 100644
--- a/runtime/platform/utils.cc
+++ b/runtime/platform/utils.cc
@@ -50,35 +50,6 @@
 #endif
 }
 
-// TODO(koda): Compare to flsll call/intrinsic.
-int Utils::HighestBit(int64_t v) {
-  uint64_t x = static_cast<uint64_t>((v > 0) ? v : -v);
-  uint64_t t;
-  int r = 0;
-  if ((t = x >> 32) != 0) {
-    x = t;
-    r += 32;
-  }
-  if ((t = x >> 16) != 0) {
-    x = t;
-    r += 16;
-  }
-  if ((t = x >> 8) != 0) {
-    x = t;
-    r += 8;
-  }
-  if ((t = x >> 4) != 0) {
-    x = t;
-    r += 4;
-  }
-  if ((t = x >> 2) != 0) {
-    x = t;
-    r += 2;
-  }
-  if (x > 1) r += 1;
-  return r;
-}
-
 int Utils::CountLeadingZeros64(uint64_t x) {
 #if defined(ARCH_IS_32_BIT)
   const uint32_t x_hi = static_cast<uint32_t>(x >> 32);
diff --git a/runtime/platform/utils.h b/runtime/platform/utils.h
index a381505..a73d6d9 100644
--- a/runtime/platform/utils.h
+++ b/runtime/platform/utils.h
@@ -117,9 +117,36 @@
 #endif
   }
 
-  static int HighestBit(int64_t v);
+  // TODO(koda): Compare to flsll call/intrinsic.
+  static constexpr size_t HighestBit(int64_t v) {
+    uint64_t x = static_cast<uint64_t>((v > 0) ? v : -v);
+    uint64_t t = 0;
+    size_t r = 0;
+    if ((t = x >> 32) != 0) {
+      x = t;
+      r += 32;
+    }
+    if ((t = x >> 16) != 0) {
+      x = t;
+      r += 16;
+    }
+    if ((t = x >> 8) != 0) {
+      x = t;
+      r += 8;
+    }
+    if ((t = x >> 4) != 0) {
+      x = t;
+      r += 4;
+    }
+    if ((t = x >> 2) != 0) {
+      x = t;
+      r += 2;
+    }
+    if (x > 1) r += 1;
+    return r;
+  }
 
-  static int BitLength(int64_t value) {
+  static constexpr size_t BitLength(int64_t value) {
     // Flip bits if negative (-1 becomes 0).
     value ^= value >> (8 * sizeof(value) - 1);
     return (value == 0) ? 0 : (Utils::HighestBit(value) + 1);
diff --git a/runtime/tests/vm/dart/emit_aot_size_info_flag_test.dart b/runtime/tests/vm/dart/emit_aot_size_info_flag_test.dart
index 8c647eb0..229178f 100644
--- a/runtime/tests/vm/dart/emit_aot_size_info_flag_test.dart
+++ b/runtime/tests/vm/dart/emit_aot_size_info_flag_test.dart
@@ -9,6 +9,8 @@
 import 'package:expect/expect.dart';
 import 'package:path/path.dart' as path;
 
+import 'use_flag_test_helper.dart';
+
 main(List<String> args) async {
   if (!Platform.executable.endsWith("dart_precompiled_runtime")) {
     return; // Running in JIT: AOT binaries not available.
@@ -24,7 +26,7 @@
   final genSnapshot = path.join(buildDir, 'gen_snapshot');
   final aotRuntime = path.join(buildDir, 'dart_precompiled_runtime');
 
-  await withTempDir((String tempDir) async {
+  await withTempDir('emit_aot_size_info_flag', (String tempDir) async {
     final script = path.join(sdkDir, 'pkg/kernel/bin/dump.dart');
     final scriptDill = path.join(tempDir, 'kernel_dump.dill');
     final appHeapsnapshot = path.join(tempDir, 'app.heapsnapshot');
@@ -77,33 +79,3 @@
 Future<String> readFile(String file) {
   return new File(file).readAsString();
 }
-
-Future run(String executable, List<String> args) async {
-  print('Running $executable ${args.join(' ')}');
-
-  final result = await Process.run(executable, args);
-  final String stdout = result.stdout;
-  final String stderr = result.stderr;
-  if (stdout.isNotEmpty) {
-    print('stdout:');
-    print(stdout);
-  }
-  if (stderr.isNotEmpty) {
-    print('stderr:');
-    print(stderr);
-  }
-
-  if (result.exitCode != 0) {
-    throw 'Command failed with non-zero exit code (was ${result.exitCode})';
-  }
-}
-
-Future withTempDir(Future fun(String dir)) async {
-  final tempDir =
-      Directory.systemTemp.createTempSync('aot-size-info-flags-test');
-  try {
-    await fun(tempDir.path);
-  } finally {
-    tempDir.deleteSync(recursive: true);
-  }
-}
diff --git a/runtime/tests/vm/dart/v8_snapshot_profile_writer_test.dart b/runtime/tests/vm/dart/v8_snapshot_profile_writer_test.dart
index 0520b9d..aa7eac0 100644
--- a/runtime/tests/vm/dart/v8_snapshot_profile_writer_test.dart
+++ b/runtime/tests/vm/dart/v8_snapshot_profile_writer_test.dart
@@ -11,34 +11,61 @@
 
 import 'use_flag_test_helper.dart';
 
-test(
-    {required String dillPath,
-    required bool useAsm,
-    required bool useBare,
-    required bool stripFlag,
-    required bool stripUtil,
+// Used to ensure we don't have multiple equivalent calls to test.
+final _seenDescriptions = <String>{};
+
+Future<void> test(String dillPath,
+    {bool useAsm = false,
+    bool useBare = true,
+    bool forceDrops = false,
+    bool useDispatch = true,
+    bool stripUtil = false, // Note: forced if useAsm.
+    bool stripFlag = false, // Note: forced if !stripUtil (and thus !useAsm).
     bool disassemble = false}) async {
   // We don't assume forced disassembler support in Product mode, so skip any
   // disassembly test.
-  if (!const bool.fromEnvironment('dart.vm.product') && disassemble) return;
+  if (!const bool.fromEnvironment('dart.vm.product') && disassemble) {
+    return;
+  }
 
   // The assembler may add extra unnecessary information to the compiled
   // snapshot whether or not we generate DWARF information in the assembly, so
   // we force the use of a utility when generating assembly.
-  if (useAsm) Expect.isTrue(stripUtil);
+  if (useAsm) {
+    stripUtil = true;
+  }
 
   // We must strip the output in some way when generating ELF snapshots,
   // else the debugging information added will cause the test to fail.
-  if (!stripUtil) Expect.isTrue(stripFlag);
+  if (!stripUtil) {
+    stripFlag = true;
+  }
 
-  final tempDirPrefix = 'v8-snapshot-profile' +
-      (useAsm ? '-assembly' : '-elf') +
-      (useBare ? '-bare' : '-nonbare') +
-      (stripFlag ? '-intstrip' : '') +
-      (stripUtil ? '-extstrip' : '') +
-      (disassemble ? '-disassembled' : '');
+  final descriptionBuilder = StringBuffer()..write(useAsm ? 'assembly' : 'elf');
+  if (!useBare) {
+    descriptionBuilder.write('-nonbare');
+  }
+  if (forceDrops) {
+    descriptionBuilder.write('-dropped');
+  }
+  if (!useDispatch) {
+    descriptionBuilder.write('-nodispatch');
+  }
+  if (stripFlag) {
+    descriptionBuilder.write('-intstrip');
+  }
+  if (stripUtil) {
+    descriptionBuilder.write('-extstrip');
+  }
+  if (disassemble) {
+    descriptionBuilder.write('-disassembled');
+  }
 
-  await withTempDir(tempDirPrefix, (String tempDir) async {
+  final description = descriptionBuilder.toString();
+  Expect.isTrue(_seenDescriptions.add(description),
+      "test configuration $description would be run multiple times");
+
+  await withTempDir('v8-snapshot-profile-$description', (String tempDir) async {
     // Generate the snapshot profile.
     final profilePath = path.join(tempDir, 'profile.heapsnapshot');
     final snapshotPath = path.join(tempDir, 'test.snap');
@@ -46,6 +73,12 @@
       if (stripFlag) '--strip',
       useBare ? '--use-bare-instructions' : '--no-use-bare-instructions',
       "--write-v8-snapshot-profile-to=$profilePath",
+      if (forceDrops) ...[
+        '--dwarf-stack-traces',
+        '--no-retain-function-objects',
+        '--no-retain-code-objects'
+      ],
+      if (!useDispatch) '--no-use-table-dispatch',
       if (disassemble) '--disassemble',
       '--ignore-unrecognized-flags',
       dillPath,
@@ -77,6 +110,8 @@
       strippedPath = snapshotPath;
     }
 
+    print("Snapshot profile generated at $profilePath.");
+
     final profile =
         Snapshot.fromJson(jsonDecode(File(profilePath).readAsStringSync()));
 
@@ -84,39 +119,38 @@
     // reference to an some object but no other metadata about the object was
     // recorded. We should at least record the type for every object in the
     // graph (in some cases the shallow size can legitimately be 0, e.g. for
-    // "base objects").
+    // "base objects" not written to the snapshot or artificial nodes).
     for (final node in profile.nodes) {
-      Expect.notEquals("Unknown", node.type, "unknown node at ID ${node.id}");
+      Expect.notEquals("Unknown", node.type, "unknown node ${node}");
     }
 
+    final root = profile.nodeAt(0);
+    final reachable = <Node>{};
+
     // HeapSnapshotWorker.HeapSnapshot.calculateDistances (from HeapSnapshot.js)
-    // assumes that the root does not have more than one edge to any other node
+    // assumes that the graph root has at most one edge to any other node
     // (most likely an oversight).
-    final Set<int> roots = <int>{};
-    for (final edge in profile.nodeAt(0).edges) {
-      Expect.isTrue(roots.add(edge.target.index));
+    for (final edge in root.edges) {
+      Expect.isTrue(
+          reachable.add(edge.target),
+          "root\n\n$root\n\nhas multiple edges to node\n\n${edge.target}:\n\n"
+          "${root.edges.where((e) => e.target == edge.target).toList()}");
     }
 
-    // Check that all nodes are reachable from the root (index 0).
-    final Set<int> reachable = {0};
-    final dfs = <int>[0];
-    while (!dfs.isEmpty) {
-      final next = dfs.removeLast();
-      for (final edge in profile.nodeAt(next).edges) {
-        final target = edge.target;
-        if (!reachable.contains(target.index)) {
-          reachable.add(target.index);
-          dfs.add(target.index);
+    // Check that all other nodes are reachable from the root.
+    final stack = <Node>[...reachable];
+    while (!stack.isEmpty) {
+      final next = stack.removeLast();
+      for (final edge in next.edges) {
+        if (reachable.add(edge.target)) {
+          stack.add(edge.target);
         }
       }
     }
 
-    if (reachable.length != profile.nodeCount) {
-      for (final node in profile.nodes) {
-        Expect.isTrue(reachable.contains(node.index),
-            "unreachable node at ID ${node.id}");
-      }
-    }
+    final unreachable =
+        profile.nodes.skip(1).where((Node n) => !reachable.contains(n)).toSet();
+    Expect.isEmpty(unreachable);
 
     // Verify that the actual size of the snapshot is close to the sum of the
     // shallow sizes of all objects in the profile. They will not be exactly
@@ -124,25 +158,14 @@
     final actual = await File(strippedPath).length();
     final expected = profile.nodes.fold<int>(0, (size, n) => size + n.selfSize);
 
-    final bareUsed = useBare ? "bare" : "non-bare";
-    final fileType = useAsm ? "assembly" : "ELF";
-    String stripPrefix = "";
-    if (stripFlag && stripUtil) {
-      stripPrefix = "internally and externally stripped ";
-    } else if (stripFlag) {
-      stripPrefix = "internally stripped ";
-    } else if (stripUtil) {
-      stripPrefix = "externally stripped ";
-    }
-
     // See Elf::kPages in runtime/vm/elf.h.
-    final segmentAlignment = 16384;
+    final segmentAlignment = 16 * 1024;
     // Not every byte is accounted for by the snapshot profile, and data and
     // instruction segments are padded to an alignment boundary.
     final tolerance = 0.03 * actual + 2 * segmentAlignment;
 
-    Expect.approxEquals(expected, actual, tolerance,
-        "failed on $bareUsed $stripPrefix$fileType snapshot type.");
+    Expect.approxEquals(
+        expected, actual, tolerance, "failed on $description snapshot");
   });
 }
 
@@ -258,28 +281,22 @@
       _thisTestPath
     ]);
 
+    // Just as a reminder (these rules are applied in order inside test):
+    // If useAsm is true, then stripUtil is forced (as the assembler may add
+    // extra information that needs stripping).
+    // If stripUtil is false, then stripFlag is forced (as the output must be
+    // stripped in some way to remove DWARF information).
+
     // Test stripped ELF generation directly.
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: false,
-        useAsm: false,
-        useBare: false);
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: false,
-        useAsm: false,
-        useBare: true);
+    await test(dillPath);
+    await test(dillPath, useBare: false);
+    await test(dillPath, forceDrops: true);
+    await test(dillPath, forceDrops: true, useBare: false);
+    await test(dillPath, forceDrops: true, useDispatch: false);
+    await test(dillPath, forceDrops: true, useDispatch: false, useBare: false);
 
     // Regression test for dartbug.com/41149.
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: false,
-        useAsm: false,
-        useBare: false,
-        disassemble: true);
+    await test(dillPath, useBare: false, disassemble: true);
 
     // We neither generate assembly nor have a stripping utility on Windows.
     if (Platform.isWindows) {
@@ -292,18 +309,8 @@
       printSkip('ELF external stripping test');
     } else {
       // Test unstripped ELF generation that is then stripped externally.
-      await test(
-          dillPath: dillPath,
-          stripFlag: false,
-          stripUtil: true,
-          useAsm: false,
-          useBare: false);
-      await test(
-          dillPath: dillPath,
-          stripFlag: false,
-          stripUtil: true,
-          useAsm: false,
-          useBare: true);
+      await test(dillPath, stripUtil: true);
+      await test(dillPath, stripUtil: true, useBare: false);
     }
 
     // TODO(sstrickl): Currently we can't assemble for SIMARM64 on MacOSX.
@@ -314,31 +321,11 @@
       return;
     }
 
-    // Test stripped assembly generation that is then compiled and stripped.
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: true,
-        useAsm: true,
-        useBare: false);
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: true,
-        useAsm: true,
-        useBare: true);
     // Test unstripped assembly generation that is then compiled and stripped.
-    await test(
-        dillPath: dillPath,
-        stripFlag: false,
-        stripUtil: true,
-        useAsm: true,
-        useBare: false);
-    await test(
-        dillPath: dillPath,
-        stripFlag: false,
-        stripUtil: true,
-        useAsm: true,
-        useBare: true);
+    await test(dillPath, useAsm: true);
+    await test(dillPath, useAsm: true, useBare: false);
+    // Test stripped assembly generation that is then compiled and stripped.
+    await test(dillPath, useAsm: true, stripFlag: true);
+    await test(dillPath, useAsm: true, stripFlag: true, useBare: false);
   });
 }
diff --git a/runtime/tests/vm/dart_2/emit_aot_size_info_flag_test.dart b/runtime/tests/vm/dart_2/emit_aot_size_info_flag_test.dart
index 8c647eb0..229178f 100644
--- a/runtime/tests/vm/dart_2/emit_aot_size_info_flag_test.dart
+++ b/runtime/tests/vm/dart_2/emit_aot_size_info_flag_test.dart
@@ -9,6 +9,8 @@
 import 'package:expect/expect.dart';
 import 'package:path/path.dart' as path;
 
+import 'use_flag_test_helper.dart';
+
 main(List<String> args) async {
   if (!Platform.executable.endsWith("dart_precompiled_runtime")) {
     return; // Running in JIT: AOT binaries not available.
@@ -24,7 +26,7 @@
   final genSnapshot = path.join(buildDir, 'gen_snapshot');
   final aotRuntime = path.join(buildDir, 'dart_precompiled_runtime');
 
-  await withTempDir((String tempDir) async {
+  await withTempDir('emit_aot_size_info_flag', (String tempDir) async {
     final script = path.join(sdkDir, 'pkg/kernel/bin/dump.dart');
     final scriptDill = path.join(tempDir, 'kernel_dump.dill');
     final appHeapsnapshot = path.join(tempDir, 'app.heapsnapshot');
@@ -77,33 +79,3 @@
 Future<String> readFile(String file) {
   return new File(file).readAsString();
 }
-
-Future run(String executable, List<String> args) async {
-  print('Running $executable ${args.join(' ')}');
-
-  final result = await Process.run(executable, args);
-  final String stdout = result.stdout;
-  final String stderr = result.stderr;
-  if (stdout.isNotEmpty) {
-    print('stdout:');
-    print(stdout);
-  }
-  if (stderr.isNotEmpty) {
-    print('stderr:');
-    print(stderr);
-  }
-
-  if (result.exitCode != 0) {
-    throw 'Command failed with non-zero exit code (was ${result.exitCode})';
-  }
-}
-
-Future withTempDir(Future fun(String dir)) async {
-  final tempDir =
-      Directory.systemTemp.createTempSync('aot-size-info-flags-test');
-  try {
-    await fun(tempDir.path);
-  } finally {
-    tempDir.deleteSync(recursive: true);
-  }
-}
diff --git a/runtime/tests/vm/dart_2/v8_snapshot_profile_writer_test.dart b/runtime/tests/vm/dart_2/v8_snapshot_profile_writer_test.dart
index 3d88b62..7977a71 100644
--- a/runtime/tests/vm/dart_2/v8_snapshot_profile_writer_test.dart
+++ b/runtime/tests/vm/dart_2/v8_snapshot_profile_writer_test.dart
@@ -11,34 +11,61 @@
 
 import 'use_flag_test_helper.dart';
 
-test(
-    {String dillPath,
-    bool useAsm,
-    bool useBare,
-    bool stripFlag,
-    bool stripUtil,
+// Used to ensure we don't have multiple equivalent calls to test.
+final _seenDescriptions = <String>{};
+
+Future<void> test(String dillPath,
+    {bool useAsm = false,
+    bool useBare = true,
+    bool forceDrops = false,
+    bool useDispatch = true,
+    bool stripUtil = false, // Note: forced if useAsm.
+    bool stripFlag = false, // Note: forced if !stripUtil (and thus !useAsm).
     bool disassemble = false}) async {
   // We don't assume forced disassembler support in Product mode, so skip any
   // disassembly test.
-  if (!const bool.fromEnvironment('dart.vm.product') && disassemble) return;
+  if (!const bool.fromEnvironment('dart.vm.product') && disassemble) {
+    return;
+  }
 
   // The assembler may add extra unnecessary information to the compiled
   // snapshot whether or not we generate DWARF information in the assembly, so
   // we force the use of a utility when generating assembly.
-  if (useAsm) Expect.isTrue(stripUtil);
+  if (useAsm) {
+    stripUtil = true;
+  }
 
   // We must strip the output in some way when generating ELF snapshots,
   // else the debugging information added will cause the test to fail.
-  if (!stripUtil) Expect.isTrue(stripFlag);
+  if (!stripUtil) {
+    stripFlag = true;
+  }
 
-  final tempDirPrefix = 'v8-snapshot-profile' +
-      (useAsm ? '-assembly' : '-elf') +
-      (useBare ? '-bare' : '-nonbare') +
-      (stripFlag ? '-intstrip' : '') +
-      (stripUtil ? '-extstrip' : '') +
-      (disassemble ? '-disassembled' : '');
+  final descriptionBuilder = StringBuffer()..write(useAsm ? 'assembly' : 'elf');
+  if (!useBare) {
+    descriptionBuilder.write('-nonbare');
+  }
+  if (forceDrops) {
+    descriptionBuilder.write('-dropped');
+  }
+  if (!useDispatch) {
+    descriptionBuilder.write('-nodispatch');
+  }
+  if (stripFlag) {
+    descriptionBuilder.write('-intstrip');
+  }
+  if (stripUtil) {
+    descriptionBuilder.write('-extstrip');
+  }
+  if (disassemble) {
+    descriptionBuilder.write('-disassembled');
+  }
 
-  await withTempDir(tempDirPrefix, (String tempDir) async {
+  final description = descriptionBuilder.toString();
+  Expect.isTrue(_seenDescriptions.add(description),
+      "test configuration $description would be run multiple times");
+
+  await withTempDir('v8-snapshot-profile-$description', (String tempDir) async {
     // Generate the snapshot profile.
     final profilePath = path.join(tempDir, 'profile.heapsnapshot');
     final snapshotPath = path.join(tempDir, 'test.snap');
@@ -46,6 +73,12 @@
       if (stripFlag) '--strip',
       useBare ? '--use-bare-instructions' : '--no-use-bare-instructions',
       "--write-v8-snapshot-profile-to=$profilePath",
+      if (forceDrops) ...[
+        '--dwarf-stack-traces',
+        '--no-retain-function-objects',
+        '--no-retain-code-objects'
+      ],
+      if (!useDispatch) '--no-use-table-dispatch',
       if (disassemble) '--disassemble',
       '--ignore-unrecognized-flags',
       dillPath,
@@ -77,6 +110,8 @@
       strippedPath = snapshotPath;
     }
 
+    print("Snapshot profile generated at $profilePath.");
+
     final profile =
         Snapshot.fromJson(jsonDecode(File(profilePath).readAsStringSync()));
 
@@ -84,39 +119,38 @@
     // reference to an some object but no other metadata about the object was
     // recorded. We should at least record the type for every object in the
     // graph (in some cases the shallow size can legitimately be 0, e.g. for
-    // "base objects").
+    // "base objects" not written to the snapshot or artificial nodes).
     for (final node in profile.nodes) {
-      Expect.notEquals("Unknown", node.type, "unknown node at ID ${node.id}");
+      Expect.notEquals("Unknown", node.type, "unknown node ${node}");
     }
 
+    final root = profile.nodeAt(0);
+    final reachable = <Node>{};
+
     // HeapSnapshotWorker.HeapSnapshot.calculateDistances (from HeapSnapshot.js)
-    // assumes that the root does not have more than one edge to any other node
+    // assumes that the graph root has at most one edge to any other node
     // (most likely an oversight).
-    final Set<int> roots = <int>{};
-    for (final edge in profile.nodeAt(0).edges) {
-      Expect.isTrue(roots.add(edge.target.index));
+    for (final edge in root.edges) {
+      Expect.isTrue(
+          reachable.add(edge.target),
+          "root\n\n$root\n\nhas multiple edges to node\n\n${edge.target}:\n\n"
+          "${root.edges.where((e) => e.target == edge.target).toList()}");
     }
 
-    // Check that all nodes are reachable from the root (index 0).
-    final Set<int> reachable = {0};
-    final dfs = <int>[0];
-    while (!dfs.isEmpty) {
-      final next = dfs.removeLast();
-      for (final edge in profile.nodeAt(next).edges) {
-        final target = edge.target;
-        if (!reachable.contains(target.index)) {
-          reachable.add(target.index);
-          dfs.add(target.index);
+    // Check that all other nodes are reachable from the root.
+    final stack = <Node>[...reachable];
+    while (!stack.isEmpty) {
+      final next = stack.removeLast();
+      for (final edge in next.edges) {
+        if (reachable.add(edge.target)) {
+          stack.add(edge.target);
         }
       }
     }
 
-    if (reachable.length != profile.nodeCount) {
-      for (final node in profile.nodes) {
-        Expect.isTrue(reachable.contains(node.index),
-            "unreachable node at ID ${node.id}");
-      }
-    }
+    final unreachable =
+        profile.nodes.skip(1).where((Node n) => !reachable.contains(n)).toSet();
+    Expect.isEmpty(unreachable);
 
     // Verify that the actual size of the snapshot is close to the sum of the
     // shallow sizes of all objects in the profile. They will not be exactly
@@ -124,25 +158,14 @@
     final actual = await File(strippedPath).length();
     final expected = profile.nodes.fold<int>(0, (size, n) => size + n.selfSize);
 
-    final bareUsed = useBare ? "bare" : "non-bare";
-    final fileType = useAsm ? "assembly" : "ELF";
-    String stripPrefix = "";
-    if (stripFlag && stripUtil) {
-      stripPrefix = "internally and externally stripped ";
-    } else if (stripFlag) {
-      stripPrefix = "internally stripped ";
-    } else if (stripUtil) {
-      stripPrefix = "externally stripped ";
-    }
-
     // See Elf::kPages in runtime/vm/elf.h.
-    final segmentAlignment = 16384;
+    final segmentAlignment = 16 * 1024;
     // Not every byte is accounted for by the snapshot profile, and data and
     // instruction segments are padded to an alignment boundary.
     final tolerance = 0.03 * actual + 2 * segmentAlignment;
 
-    Expect.approxEquals(expected, actual, tolerance,
-        "failed on $bareUsed $stripPrefix$fileType snapshot type.");
+    Expect.approxEquals(
+        expected, actual, tolerance, "failed on $description snapshot");
   });
 }
 
@@ -254,28 +277,22 @@
       _thisTestPath
     ]);
 
+    // Just as a reminder (these rules are applied in order inside test):
+    // If useAsm is true, then stripUtil is forced (as the assembler may add
+    // extra information that needs stripping).
+    // If stripUtil is false, then stripFlag is forced (as the output must be
+    // stripped in some way to remove DWARF information).
+
     // Test stripped ELF generation directly.
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: false,
-        useAsm: false,
-        useBare: false);
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: false,
-        useAsm: false,
-        useBare: true);
+    await test(dillPath);
+    await test(dillPath, useBare: false);
+    await test(dillPath, forceDrops: true);
+    await test(dillPath, forceDrops: true, useBare: false);
+    await test(dillPath, forceDrops: true, useDispatch: false);
+    await test(dillPath, forceDrops: true, useDispatch: false, useBare: false);
 
     // Regression test for dartbug.com/41149.
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: false,
-        useAsm: false,
-        useBare: false,
-        disassemble: true);
+    await test(dillPath, useBare: false, disassemble: true);
 
     // We neither generate assembly nor have a stripping utility on Windows.
     if (Platform.isWindows) {
@@ -288,18 +305,8 @@
       printSkip('ELF external stripping test');
     } else {
       // Test unstripped ELF generation that is then stripped externally.
-      await test(
-          dillPath: dillPath,
-          stripFlag: false,
-          stripUtil: true,
-          useAsm: false,
-          useBare: false);
-      await test(
-          dillPath: dillPath,
-          stripFlag: false,
-          stripUtil: true,
-          useAsm: false,
-          useBare: true);
+      await test(dillPath, stripUtil: true);
+      await test(dillPath, stripUtil: true, useBare: false);
     }
 
     // TODO(sstrickl): Currently we can't assemble for SIMARM64 on MacOSX.
@@ -310,31 +317,11 @@
       return;
     }
 
-    // Test stripped assembly generation that is then compiled and stripped.
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: true,
-        useAsm: true,
-        useBare: false);
-    await test(
-        dillPath: dillPath,
-        stripFlag: true,
-        stripUtil: true,
-        useAsm: true,
-        useBare: true);
     // Test unstripped assembly generation that is then compiled and stripped.
-    await test(
-        dillPath: dillPath,
-        stripFlag: false,
-        stripUtil: true,
-        useAsm: true,
-        useBare: false);
-    await test(
-        dillPath: dillPath,
-        stripFlag: false,
-        stripUtil: true,
-        useAsm: true,
-        useBare: true);
+    await test(dillPath, useAsm: true);
+    await test(dillPath, useAsm: true, useBare: false);
+    // Test stripped assembly generation that is then compiled and stripped.
+    await test(dillPath, useAsm: true, stripFlag: true);
+    await test(dillPath, useAsm: true, stripFlag: true, useBare: false);
   });
 }
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 89f9681..e75d799 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -28,6 +28,7 @@
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
 #include "vm/timeline.h"
+#include "vm/v8_snapshot_writer.h"
 #include "vm/version.h"
 #include "vm/zone_text_buffer.h"
 
@@ -1747,19 +1748,6 @@
       }
     }
 
-    s->Push(code->untag()->owner_);
-    s->Push(code->untag()->exception_handlers_);
-    s->Push(code->untag()->pc_descriptors_);
-    s->Push(code->untag()->catch_entry_);
-    if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
-      s->Push(code->untag()->compressed_stackmaps_);
-    }
-    if (!FLAG_precompiled_mode || !FLAG_dwarf_stack_traces_mode) {
-      s->Push(code->untag()->inlined_id_to_function_);
-      if (s->InCurrentLoadingUnit(code->untag()->code_source_map_)) {
-        s->Push(code->untag()->code_source_map_);
-      }
-    }
     if (s->kind() == Snapshot::kFullJIT) {
       s->Push(code->untag()->deopt_info_array_);
       s->Push(code->untag()->static_calls_target_table_);
@@ -1793,6 +1781,29 @@
       UNREACHABLE();
 #endif
     }
+
+    if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
+      s->Push(code->untag()->compressed_stackmaps_);
+    }
+
+    if (Code::IsDiscarded(code)) {
+      ASSERT(s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
+             FLAG_dwarf_stack_traces_mode && !FLAG_retain_code_objects);
+      // Only object pool and static call table entries and the compressed
+      // stack maps should be pushed.
+      return;
+    }
+
+    s->Push(code->untag()->owner_);
+    s->Push(code->untag()->exception_handlers_);
+    s->Push(code->untag()->pc_descriptors_);
+    s->Push(code->untag()->catch_entry_);
+    if (!FLAG_precompiled_mode || !FLAG_dwarf_stack_traces_mode) {
+      s->Push(code->untag()->inlined_id_to_function_);
+      if (s->InCurrentLoadingUnit(code->untag()->code_source_map_)) {
+        s->Push(code->untag()->code_source_map_);
+      }
+    }
 #if !defined(PRODUCT)
     s->Push(code->untag()->return_address_metadata_);
     if (FLAG_code_comments) {
@@ -2020,7 +2031,6 @@
   GrowableArray<CodePtr>* objects() { return &objects_; }
   GrowableArray<CodePtr>* deferred_objects() { return &deferred_objects_; }
 
- private:
   static const char* MakeDisambiguatedCodeName(Serializer* s, CodePtr c) {
     if (s->profile_writer() == nullptr) {
       return nullptr;
@@ -2034,6 +2044,7 @@
             Object::NameVisibility::kInternalName));
   }
 
+ private:
   GrowableArray<CodePtr> objects_;
   GrowableArray<CodePtr> deferred_objects_;
   Array& array_;
@@ -5586,17 +5597,15 @@
     if (!should_write_symbols_ && s->profile_writer() != nullptr) {
       // If writing V8 snapshot profile create an artifical node representing
       // VM isolate symbol table.
+      ASSERT(!s->IsReachable(symbols_.ptr()));
       s->AssignArtificialRef(symbols_.ptr());
       const auto& symbols_snapshot_id = s->GetProfileId(symbols_.ptr());
-      s->profile_writer()->AddRoot(symbols_snapshot_id, "vm_symbols");
-      s->profile_writer()->SetObjectType(symbols_snapshot_id, "Symbols");
+      s->profile_writer()->SetObjectTypeAndName(symbols_snapshot_id, "Symbols",
+                                                "vm_symbols");
+      s->profile_writer()->AddRoot(symbols_snapshot_id);
       for (intptr_t i = 0; i < symbols_.Length(); i++) {
         s->profile_writer()->AttributeReferenceTo(
-            symbols_snapshot_id,
-            {
-                V8SnapshotProfileWriter::Reference::kElement,
-                {.offset = i},
-            },
+            symbols_snapshot_id, V8SnapshotProfileWriter::Reference::Element(i),
             s->GetProfileId(symbols_.At(i)));
       }
     }
@@ -6096,9 +6105,6 @@
   num_base_objects_++;
 
   if ((profile_writer_ != nullptr) && (type != nullptr)) {
-    if (name == nullptr) {
-      name = "<base object>";
-    }
     const auto& profile_id = GetProfileId(base_object);
     profile_writer_->SetObjectTypeAndName(profile_id, type, name);
     profile_writer_->AddRoot(profile_id);
@@ -6120,12 +6126,14 @@
 }
 
 intptr_t Serializer::AssignArtificialRef(ObjectPtr object) {
-  ASSERT(!object.IsHeapObject() || !object.IsInstructions());
-  ASSERT(heap_->GetObjectId(object) == kUnreachableReference);
   const intptr_t ref = -(next_ref_index_++);
   ASSERT(IsArtificialReference(ref));
-  heap_->SetObjectId(object, ref);
-  ASSERT(heap_->GetObjectId(object) == ref);
+  if (object != nullptr) {
+    ASSERT(!object.IsHeapObject() || !object.IsInstructions());
+    ASSERT(heap_->GetObjectId(object) == kUnreachableReference);
+    heap_->SetObjectId(object, ref);
+    ASSERT(heap_->GetObjectId(object) == ref);
+  }
   return ref;
 }
 
@@ -6141,12 +6149,16 @@
     ObjectPtr object) const {
   // Instructions are handled separately.
   ASSERT(!object->IsHeapObject() || !object->IsInstructions());
-  intptr_t heap_id = UnsafeRefId(object);
+  return GetProfileId(UnsafeRefId(object));
+}
+
+V8SnapshotProfileWriter::ObjectId Serializer::GetProfileId(
+    intptr_t heap_id) const {
   if (IsArtificialReference(heap_id)) {
-    return {V8SnapshotProfileWriter::kArtificial, -heap_id};
+    return {IdSpace::kArtificial, -heap_id};
   }
   ASSERT(IsAllocatedReference(heap_id));
-  return {V8SnapshotProfileWriter::kSnapshot, heap_id};
+  return {IdSpace::kSnapshot, heap_id};
 }
 
 void Serializer::AttributeReference(
@@ -6159,7 +6171,8 @@
   if (object->IsHeapObject() && object->IsWeakSerializationReference()) {
     const auto& wsr = WeakSerializationReference::RawCast(object);
     const auto& target = wsr->untag()->target();
-    if (!CreateArtificialNodeIfNeeded(wsr) && HasArtificialRef(target)) {
+    const bool wsr_reachable = !CreateArtificialNodeIfNeeded(wsr);
+    if (wsr_reachable && HasArtificialRef(target)) {
       // The target has artificial information used for snapshot analysis and
       // the replacement is part of the snapshot, so write information for both.
       const auto& replacement = wsr->untag()->replacement();
@@ -6168,12 +6181,11 @@
           GetProfileId(replacement));
       return;
     }
-    // Either the target of the WSR is strongly referenced or the WSR itself is
-    // unreachable, in which case it shares an artificial object ID with the
-    // target due to CreateArtificialNodeIfNeeded, so fall through.
-    ASSERT(HasRef(target) || HasArtificialRef(wsr));
-  } else if (object_currently_writing_.id_.first ==
-             V8SnapshotProfileWriter::kArtificial) {
+    // The replacement isn't used, as either the target is strongly referenced
+    // or the WSR itself is unreachable, so fall through to attributing a
+    // reference to the WSR (which shares a profile ID with the target).
+    ASSERT(GetProfileId(wsr) == GetProfileId(target));
+  } else if (object_currently_writing_.id_.IsArtificial()) {
     // We may need to recur when writing members of artificial nodes in
     // CreateArtificialNodeIfNeeded.
     CreateArtificialNodeIfNeeded(object);
@@ -6214,21 +6226,6 @@
     Serializer* s,
     const char* type,
     ObjectPtr obj,
-    StringPtr name) {
-  const char* name_str = nullptr;
-  if (name != nullptr) {
-    REUSABLE_STRING_HANDLESCOPE(s->thread());
-    String& str = reused_string_handle.Handle();
-    str = name;
-    name_str = str.ToCString();
-  }
-  return ReserveId(s, type, obj, name_str);
-}
-
-V8SnapshotProfileWriter::ObjectId Serializer::WritingObjectScope::ReserveId(
-    Serializer* s,
-    const char* type,
-    ObjectPtr obj,
     const char* name) {
   if (s->profile_writer_ == nullptr) {
     return V8SnapshotProfileWriter::kArtificialRootId;
@@ -6247,10 +6244,7 @@
       }
       case kOneByteStringCid:
       case kTwoByteStringCid: {
-        REUSABLE_STRING_HANDLESCOPE(s->thread());
-        String& str = reused_string_handle.Handle();
-        str = String::RawCast(obj);
-        name = str.ToCString();
+        name = String::ToCString(s->thread(), String::RawCast(obj));
         break;
       }
     }
@@ -6294,20 +6288,12 @@
     return false;
   }
   ASSERT_EQUAL(id, kUnreachableReference);
-  id = AssignArtificialRef(obj);
-
-  auto property = [](const char* name) -> V8SnapshotProfileWriter::Reference {
-    return {V8SnapshotProfileWriter::Reference::kProperty, {.name = name}};
-  };
-  auto element = [](intptr_t index) -> V8SnapshotProfileWriter::Reference {
-    return {V8SnapshotProfileWriter::Reference::kElement, {.offset = index}};
-  };
 
   const char* type = nullptr;
-  StringPtr name_string = nullptr;
   const char* name = nullptr;
   GrowableArray<std::pair<ObjectPtr, V8SnapshotProfileWriter::Reference>> links;
-  switch (obj->GetClassIdMayBeSmi()) {
+  const classid_t cid = obj->GetClassIdMayBeSmi();
+  switch (cid) {
     // For profiling static call target tables in AOT mode.
     case kSmiCid: {
       type = "Smi";
@@ -6324,31 +6310,43 @@
           auto const elem = pool->untag()->data()[i].raw_obj_;
           // Elements should be reachable from the global object pool.
           ASSERT(HasRef(elem));
-          links.Add({elem, element(i)});
+          links.Add({elem, V8SnapshotProfileWriter::Reference::Element(i)});
         }
       }
       break;
     }
-    // For profiling static call target tables in AOT mode.
+    // For profiling static call target tables and the dispatch table in AOT.
+    case kImmutableArrayCid:
     case kArrayCid: {
       type = "Array";
       auto const array = Array::RawCast(obj);
       for (intptr_t i = 0, n = Smi::Value(array->untag()->length()); i < n;
            i++) {
         ObjectPtr elem = array->untag()->data()[i];
-        links.Add({elem, element(i)});
+        links.Add({elem, V8SnapshotProfileWriter::Reference::Element(i)});
       }
       break;
     }
+    // For profiling the dispatch table.
+    case kCodeCid: {
+      type = "Code";
+      auto const code = Code::RawCast(obj);
+      name = CodeSerializationCluster::MakeDisambiguatedCodeName(this, code);
+      links.Add({code->untag()->owner(),
+                 V8SnapshotProfileWriter::Reference::Property("owner_")});
+      break;
+    }
     case kFunctionCid: {
       FunctionPtr func = static_cast<FunctionPtr>(obj);
       type = "Function";
       name = FunctionSerializationCluster::MakeDisambiguatedFunctionName(this,
                                                                          func);
-      links.Add({func->untag()->owner(), property("owner_")});
+      links.Add({func->untag()->owner(),
+                 V8SnapshotProfileWriter::Reference::Property("owner_")});
       ObjectPtr data = func->untag()->data();
       if (data->GetClassId() == kClosureDataCid) {
-        links.Add({func->untag()->data(), property("data_")});
+        links.Add(
+            {data, V8SnapshotProfileWriter::Reference::Property("data_")});
       }
       break;
     }
@@ -6356,40 +6354,37 @@
       auto data = static_cast<ClosureDataPtr>(obj);
       type = "ClosureData";
       links.Add(
-          {data->untag()->parent_function(), property("parent_function_")});
+          {data->untag()->parent_function(),
+           V8SnapshotProfileWriter::Reference::Property("parent_function_")});
       break;
     }
     case kClassCid: {
       ClassPtr cls = static_cast<ClassPtr>(obj);
       type = "Class";
-      name_string = cls->untag()->name();
-      links.Add({cls->untag()->library(), property("library_")});
+      name = String::ToCString(thread(), cls->untag()->name());
+      links.Add({cls->untag()->library(),
+                 V8SnapshotProfileWriter::Reference::Property("library_")});
       break;
     }
     case kPatchClassCid: {
       PatchClassPtr patch_cls = static_cast<PatchClassPtr>(obj);
       type = "PatchClass";
       links.Add(
-          {patch_cls->untag()->patched_class(), property("patched_class_")});
+          {patch_cls->untag()->patched_class(),
+           V8SnapshotProfileWriter::Reference::Property("patched_class_")});
       break;
     }
     case kLibraryCid: {
       LibraryPtr lib = static_cast<LibraryPtr>(obj);
       type = "Library";
-      name_string = lib->untag()->url();
+      name = String::ToCString(thread(), lib->untag()->url());
       break;
     }
     default:
-      UNREACHABLE();
+      FATAL("Request to create artificial node for object with cid %d", cid);
   }
 
-  if (name_string != nullptr) {
-    REUSABLE_STRING_HANDLESCOPE(thread());
-    String& str = reused_string_handle.Handle();
-    str = name_string;
-    name = str.ToCString();
-  }
-
+  id = AssignArtificialRef(obj);
   Serializer::WritingObjectScope scope(this, type, obj, name);
   for (const auto& link : links) {
     AttributeReference(link.first, link.second);
@@ -6705,17 +6700,13 @@
   const intptr_t offset = image_writer_->GetTextOffsetFor(instr, code);
 #if defined(DART_PRECOMPILER)
   if (profile_writer_ != nullptr) {
-    ASSERT(IsAllocatedReference(object_currently_writing_.id_.second));
-    const auto offset_space = vm_ ? V8SnapshotProfileWriter::kVmText
-                                  : V8SnapshotProfileWriter::kIsolateText;
-    const V8SnapshotProfileWriter::ObjectId to_object(offset_space, offset);
+    ASSERT(object_currently_writing_.id_ !=
+           V8SnapshotProfileWriter::kArtificialRootId);
+    const auto offset_space = vm_ ? IdSpace::kVmText : IdSpace::kIsolateText;
     profile_writer_->AttributeReferenceTo(
         object_currently_writing_.id_,
-        {
-            V8SnapshotProfileWriter::Reference::kProperty,
-            {.name = "<instructions>"},
-        },
-        to_object);
+        V8SnapshotProfileWriter::Reference::Property("<instructions>"),
+        {offset_space, offset});
   }
 
   if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
@@ -6745,22 +6736,16 @@
 }
 
 void Serializer::TraceDataOffset(uint32_t offset) {
-  if (profile_writer_ != nullptr) {
-    // ROData cannot be roots.
-    ASSERT(IsAllocatedReference(object_currently_writing_.id_.second));
-    auto offset_space = vm_ ? V8SnapshotProfileWriter::kVmData
-                            : V8SnapshotProfileWriter::kIsolateData;
-    V8SnapshotProfileWriter::ObjectId to_object = {offset_space, offset};
-    // TODO(sjindel): Give this edge a more appropriate type than element
-    // (internal, maybe?).
-    profile_writer_->AttributeReferenceTo(
-        object_currently_writing_.id_,
-        {
-            V8SnapshotProfileWriter::Reference::kElement,
-            {.offset = 0},
-        },
-        to_object);
-  }
+  if (profile_writer_ == nullptr) return;
+  // ROData cannot be roots.
+  ASSERT(object_currently_writing_.id_ !=
+         V8SnapshotProfileWriter::kArtificialRootId);
+  auto offset_space = vm_ ? IdSpace::kVmData : IdSpace::kIsolateData;
+  // TODO(sjindel): Give this edge a more appropriate type than element
+  // (internal, maybe?).
+  profile_writer_->AttributeReferenceTo(
+      object_currently_writing_.id_,
+      V8SnapshotProfileWriter::Reference::Element(0), {offset_space, offset});
 }
 
 uint32_t Serializer::GetDataOffset(ObjectPtr object) const {
@@ -7094,13 +7079,25 @@
 #if defined(DART_PRECOMPILER)
   if (kind() != Snapshot::kFullAOT) return;
 
-  AssignArtificialRef(entries.ptr());
-  const auto& dispatch_table_snapshot_id = GetProfileId(entries.ptr());
+  // Create an artifical node to which the bytes should be attributed. We
+  // don't attribute them to entries.ptr(), as we don't want to attribute the
+  // bytes for printing out a length of 0 to Object::null() when the dispatch
+  // table is empty.
+  const intptr_t profile_ref = AssignArtificialRef();
+  const auto& dispatch_table_profile_id = GetProfileId(profile_ref);
   if (profile_writer_ != nullptr) {
-    profile_writer_->AddRoot(dispatch_table_snapshot_id, "dispatch_table");
-    profile_writer_->SetObjectType(dispatch_table_snapshot_id, "DispatchTable");
+    profile_writer_->SetObjectTypeAndName(dispatch_table_profile_id,
+                                          "DispatchTable", "dispatch_table");
+    profile_writer_->AddRoot(dispatch_table_profile_id);
   }
-  WritingObjectScope scope(this, dispatch_table_snapshot_id);
+  WritingObjectScope scope(this, dispatch_table_profile_id);
+  if (profile_writer_ != nullptr) {
+    // We'll write the Array object as a property of the artificial dispatch
+    // table node, so Code objects otherwise unreferenced will have it as an
+    // ancestor.
+    CreateArtificialNodeIfNeeded(entries.ptr());
+    AttributePropertyRef(entries.ptr(), "<code entries>");
+  }
 
   const intptr_t bytes_before = bytes_written();
   const intptr_t table_length = entries.IsNull() ? 0 : entries.Length();
@@ -7190,23 +7187,6 @@
     Write(repeat_count);
   }
   dispatch_table_size_ = bytes_written() - bytes_before;
-
-  // If any bytes were written for the dispatch table, add the elements of
-  // the dispatch table in the profile.
-  if (profile_writer_ != nullptr && !entries.IsNull()) {
-    for (intptr_t i = 0; i < entries.Length(); i++) {
-      auto const code = Code::RawCast(entries.At(i));
-      if (code == Code::null()) continue;
-      profile_writer_->AttributeReferenceTo(
-          dispatch_table_snapshot_id,
-          {
-              V8SnapshotProfileWriter::Reference::kElement,
-              {.offset = i},
-          },
-          GetProfileId(code));
-    }
-  }
-
 #endif  // defined(DART_PRECOMPILER)
 }
 
diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h
index ebaa3a4..a7a1e6a 100644
--- a/runtime/vm/clustered_snapshot.h
+++ b/runtime/vm/clustered_snapshot.h
@@ -17,7 +17,6 @@
 #include "vm/object.h"
 #include "vm/raw_object_fields.h"
 #include "vm/snapshot.h"
-#include "vm/v8_snapshot_writer.h"
 #include "vm/version.h"
 
 #if defined(DEBUG)
@@ -213,7 +212,7 @@
                      const char* type = nullptr,
                      const char* name = nullptr);
   intptr_t AssignRef(ObjectPtr object);
-  intptr_t AssignArtificialRef(ObjectPtr object);
+  intptr_t AssignArtificialRef(ObjectPtr object = nullptr);
 
   void Push(ObjectPtr object);
 
@@ -257,9 +256,13 @@
                        const char* type,
                        ObjectPtr object,
                        StringPtr name)
-        : WritingObjectScope(serializer,
-                             ReserveId(serializer, type, object, name),
-                             object) {}
+        : WritingObjectScope(
+              serializer,
+              ReserveId(serializer,
+                        type,
+                        object,
+                        String::ToCString(serializer->thread(), name)),
+              object) {}
 
     WritingObjectScope(Serializer* serializer,
                        const char* type,
@@ -284,11 +287,6 @@
     static V8SnapshotProfileWriter::ObjectId ReserveId(Serializer* serializer,
                                                        const char* type,
                                                        ObjectPtr object,
-                                                       StringPtr name);
-
-    static V8SnapshotProfileWriter::ObjectId ReserveId(Serializer* serializer,
-                                                       const char* type,
-                                                       ObjectPtr object,
                                                        const char* name);
 
    private:
@@ -317,6 +315,7 @@
   void Align(intptr_t alignment) { stream_->Align(alignment); }
 
   V8SnapshotProfileWriter::ObjectId GetProfileId(ObjectPtr object) const;
+  V8SnapshotProfileWriter::ObjectId GetProfileId(intptr_t ref) const;
 
   void WriteRootRef(ObjectPtr object, const char* name = nullptr) {
     intptr_t id = RefId(object);
@@ -332,8 +331,8 @@
                           const V8SnapshotProfileWriter::Reference& reference);
 
   void AttributeElementRef(ObjectPtr object, intptr_t index) {
-    AttributeReference(object, {V8SnapshotProfileWriter::Reference::kElement,
-                                {.offset = index}});
+    AttributeReference(object,
+                       V8SnapshotProfileWriter::Reference::Element(index));
   }
 
   void WriteElementRef(ObjectPtr object, intptr_t index) {
@@ -342,8 +341,8 @@
   }
 
   void AttributePropertyRef(ObjectPtr object, const char* property) {
-    AttributeReference(object, {V8SnapshotProfileWriter::Reference::kProperty,
-                                {.name = property}});
+    AttributeReference(object,
+                       V8SnapshotProfileWriter::Reference::Property(property));
   }
 
   void WritePropertyRef(ObjectPtr object, const char* property) {
diff --git a/runtime/vm/elf.cc b/runtime/vm/elf.cc
index b6abf76..7f17df5 100644
--- a/runtime/vm/elf.cc
+++ b/runtime/vm/elf.cc
@@ -535,8 +535,7 @@
         dynamic_(allocate),
         text_(zone, 128),
         text_indices_(zone) {
-    text_.AddChar('\0');
-    text_indices_.Insert({"", 1});
+    AddString("");
   }
 
   intptr_t FileSize() const { return text_.length(); }
@@ -549,11 +548,13 @@
 
   intptr_t AddString(const char* str) {
     ASSERT(str != nullptr);
-    if (auto const kv = text_indices_.Lookup(str)) return kv->value - 1;
+    if (auto const kv = text_indices_.Lookup(str)) {
+      return kv->value;
+    }
     intptr_t offset = text_.length();
     text_.AddString(str);
     text_.AddChar('\0');
-    text_indices_.Insert({str, offset + 1});
+    text_indices_.Insert({str, offset});
     return offset;
   }
 
@@ -562,13 +563,12 @@
     return text_.buffer() + index;
   }
   intptr_t Lookup(const char* str) const {
-    return text_indices_.LookupValue(str) - 1;
+    return text_indices_.LookupValue(str);
   }
 
   const bool dynamic_;
   ZoneTextBuffer text_;
-  // To avoid kNoValue for intptr_t (0), we store an index n as n + 1.
-  CStringMap<intptr_t> text_indices_;
+  CStringIntMap text_indices_;
 };
 
 class Symbol : public ZoneAllocated {
diff --git a/runtime/vm/hash.h b/runtime/vm/hash.h
index 7365500..955bb57 100644
--- a/runtime/vm/hash.h
+++ b/runtime/vm/hash.h
@@ -16,7 +16,7 @@
   return hash;
 }
 
-inline uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits) {
+inline uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits = kBitsPerInt32) {
   hash += hash << 3;
   hash ^= hash >> 11;  // Logical shift, unsigned hash.
   hash += hash << 15;
diff --git a/runtime/vm/hash_map.h b/runtime/vm/hash_map.h
index 2d6f6a0..3273f4c 100644
--- a/runtime/vm/hash_map.h
+++ b/runtime/vm/hash_map.h
@@ -542,47 +542,56 @@
   DISALLOW_COPY_AND_ASSIGN(ZoneCStringSet);
 };
 
-template <typename V>
-class CStringMapKeyValueTrait : public RawPointerKeyValueTrait<const char, V> {
- public:
-  typedef typename RawPointerKeyValueTrait<const char, V>::Key Key;
-  typedef typename RawPointerKeyValueTrait<const char, V>::Value Value;
-  typedef typename RawPointerKeyValueTrait<const char, V>::Pair Pair;
+struct CStringIntMapKeyValueTrait {
+  using Key = const char*;
+  using Value = intptr_t;
 
-  static uword Hash(Key key) {
+  static constexpr Value kNoValue = kIntptrMin;
+
+  struct Pair {
+    Key key;
+    Value value;
+    Pair() : key(nullptr), value(kNoValue) {}
+    Pair(const Key key, const Value& value) : key(key), value(value) {}
+    Pair(const Pair& other) : key(other.key), value(other.value) {}
+    Pair& operator=(const Pair&) = default;
+  };
+
+  static Key KeyOf(const Pair& pair) { return pair.key; }
+  static Value ValueOf(const Pair& pair) { return pair.value; }
+  static uword Hash(const Key& key) {
     ASSERT(key != nullptr);
     return Utils::StringHash(key, strlen(key));
   }
-  static bool IsKeyEqual(Pair kv, Key key) {
+  static bool IsKeyEqual(const Pair& kv, const Key& key) {
     ASSERT(kv.key != nullptr && key != nullptr);
     return kv.key == key || strcmp(kv.key, key) == 0;
   }
 };
 
-template <typename V, typename B, typename Allocator>
-class BaseCStringMap
-    : public BaseDirectChainedHashMap<CStringMapKeyValueTrait<V>,
+template <typename B, typename Allocator>
+class BaseCStringIntMap
+    : public BaseDirectChainedHashMap<CStringIntMapKeyValueTrait,
                                       B,
                                       Allocator> {
  public:
-  explicit BaseCStringMap(Allocator* allocator)
-      : BaseDirectChainedHashMap<CStringMapKeyValueTrait<V>, B, Allocator>(
+  explicit BaseCStringIntMap(Allocator* allocator)
+      : BaseDirectChainedHashMap<CStringIntMapKeyValueTrait, B, Allocator>(
             allocator) {}
 
  private:
-  DISALLOW_COPY_AND_ASSIGN(BaseCStringMap);
+  DISALLOW_COPY_AND_ASSIGN(BaseCStringIntMap);
 };
 
-template <typename V>
-class CStringMap : public BaseCStringMap<V, ValueObject, Zone> {
+class CStringIntMap : public BaseCStringIntMap<ValueObject, Zone> {
  public:
-  CStringMap()
-      : BaseCStringMap<V, ValueObject, Zone>(ThreadState::Current()->zone()) {}
-  explicit CStringMap(Zone* zone)
-      : BaseCStringMap<V, ValueObject, Zone>(zone) {}
+  CStringIntMap()
+      : BaseCStringIntMap<ValueObject, Zone>(ThreadState::Current()->zone()) {}
+  explicit CStringIntMap(Zone* zone)
+      : BaseCStringIntMap<ValueObject, Zone>(zone) {}
 
  private:
-  DISALLOW_COPY_AND_ASSIGN(CStringMap);
+  DISALLOW_COPY_AND_ASSIGN(CStringIntMap);
 };
 
 template <typename V>
@@ -653,7 +662,7 @@
   static Value ValueOf(Pair kv) { return kv; }
 
   static inline uword Hash(Key key) {
-    return Utils::WordHash(reinterpret_cast<intptr_t>(key));
+    return Utils::StringHash(reinterpret_cast<const char*>(&key), sizeof(key));
   }
 
   static inline bool IsKeyEqual(Pair pair, Key key) { return pair == key; }
diff --git a/runtime/vm/hash_map_test.cc b/runtime/vm/hash_map_test.cc
index 8b5b1ba..52fc97a 100644
--- a/runtime/vm/hash_map_test.cc
+++ b/runtime/vm/hash_map_test.cc
@@ -259,7 +259,7 @@
   EXPECT(set->IsEmpty());
 }
 
-TEST_CASE(CStringMap) {
+TEST_CASE(CStringIntMap) {
   const char* const kConst1 = "test";
   const char* const kConst2 = "test 2";
 
@@ -274,7 +274,7 @@
   const intptr_t i1 = 1;
   const intptr_t i2 = 2;
 
-  CStringMap<intptr_t> map;
+  CStringIntMap map;
   EXPECT(map.IsEmpty());
 
   map.Insert({str1, i1});
@@ -307,7 +307,7 @@
   free(str1);
 }
 
-TEST_CASE(CStringMapUpdate) {
+TEST_CASE(CStringIntMapUpdate) {
   const char* const kConst1 = "test";
   const char* const kConst2 = "test 2";
 
@@ -323,11 +323,11 @@
   EXPECT_STREQ(str1, str3);
   EXPECT_STREQ(str1, str4);
 
-  CStringMapKeyValueTrait<intptr_t>::Pair p1 = {str1, 1};
-  CStringMapKeyValueTrait<intptr_t>::Pair p2 = {str2, 2};
-  CStringMapKeyValueTrait<intptr_t>::Pair p3 = {str3, 3};
+  CStringIntMapKeyValueTrait::Pair p1{str1, 1};
+  CStringIntMapKeyValueTrait::Pair p2{str2, 2};
+  CStringIntMapKeyValueTrait::Pair p3{str3, 3};
 
-  CStringMap<intptr_t> map;
+  CStringIntMap map;
   EXPECT(map.IsEmpty());
 
   map.Update(p1);
diff --git a/runtime/vm/image_snapshot.cc b/runtime/vm/image_snapshot.cc
index 73cc3d6..344ae25 100644
--- a/runtime/vm/image_snapshot.cc
+++ b/runtime/vm/image_snapshot.cc
@@ -462,15 +462,13 @@
   // BSSsection in the text section as an initial InstructionsSection object.
   WriteBss(vm);
 
-  offset_space_ = vm ? V8SnapshotProfileWriter::kVmText
-                     : V8SnapshotProfileWriter::kIsolateText;
+  offset_space_ = vm ? IdSpace::kVmText : IdSpace::kIsolateText;
   WriteText(vm);
 
   // Append the direct-mapped RO data objects after the clustered snapshot
   // and then for ELF and assembly outputs, add appropriate sections with
   // that combined data.
-  offset_space_ = vm ? V8SnapshotProfileWriter::kVmData
-                     : V8SnapshotProfileWriter::kIsolateData;
+  offset_space_ = vm ? IdSpace::kVmData : IdSpace::kIsolateData;
   WriteROData(clustered_stream, vm);
 }
 
@@ -680,14 +678,10 @@
                                             instructions_symbol);
       profile_writer_->AttributeBytesTo(id,
                                         section_size - section_payload_length);
-      const intptr_t element_offset = id.second - parent_id.second;
+      const intptr_t element_offset = id.nonce() - parent_id.nonce();
       profile_writer_->AttributeReferenceTo(
           parent_id,
-          {
-              V8SnapshotProfileWriter::Reference::kElement,
-              {.offset = element_offset},
-          },
-          id);
+          V8SnapshotProfileWriter::Reference::Element(element_offset), id);
       // Later objects will have the InstructionsSection as a parent if in
       // bare instructions mode, otherwise the image.
       if (bare_instruction_payloads) {
@@ -718,7 +712,7 @@
             ? compiler::target::InstructionsSection::HeaderSize()
             : compiler::target::InstructionsSection::InstanceSize(0);
     text_offset += Align(section_contents_alignment, text_offset);
-    ASSERT_EQUAL(text_offset - id.second, expected_size);
+    ASSERT_EQUAL(text_offset - id.nonce(), expected_size);
   }
 #endif
 
@@ -729,7 +723,7 @@
   SnapshotTextObjectNamer namer(zone);
 #endif
 
-  ASSERT(offset_space_ != V8SnapshotProfileWriter::kSnapshot);
+  ASSERT(offset_space_ != IdSpace::kSnapshot);
   for (intptr_t i = 0; i < instructions_.length(); i++) {
     auto& data = instructions_[i];
     const bool is_trampoline = data.trampoline_bytes != nullptr;
@@ -748,14 +742,10 @@
                                           : SizeInSnapshot(data.insns_->ptr());
       profile_writer_->SetObjectTypeAndName(id, type, object_name);
       profile_writer_->AttributeBytesTo(id, size);
-      const intptr_t element_offset = id.second - parent_id.second;
+      const intptr_t element_offset = id.nonce() - parent_id.nonce();
       profile_writer_->AttributeReferenceTo(
           parent_id,
-          {
-              V8SnapshotProfileWriter::Reference::kElement,
-              {.offset = element_offset},
-          },
-          id);
+          V8SnapshotProfileWriter::Reference::Element(element_offset), id);
     }
 #endif
 
diff --git a/runtime/vm/image_snapshot.h b/runtime/vm/image_snapshot.h
index c234dd8..42f96e9 100644
--- a/runtime/vm/image_snapshot.h
+++ b/runtime/vm/image_snapshot.h
@@ -269,10 +269,10 @@
   void PrepareForSerialization(GrowableArray<ImageWriterCommand>* commands);
 
   bool IsROSpace() const {
-    return offset_space_ == V8SnapshotProfileWriter::kVmData ||
-           offset_space_ == V8SnapshotProfileWriter::kVmText ||
-           offset_space_ == V8SnapshotProfileWriter::kIsolateData ||
-           offset_space_ == V8SnapshotProfileWriter::kIsolateText;
+    return offset_space_ == IdSpace::kVmData ||
+           offset_space_ == IdSpace::kVmText ||
+           offset_space_ == IdSpace::kIsolateData ||
+           offset_space_ == IdSpace::kIsolateText;
   }
   int32_t GetTextOffsetFor(InstructionsPtr instructions, CodePtr code);
   uint32_t GetDataOffsetFor(ObjectPtr raw_object);
@@ -436,8 +436,7 @@
   GrowableArray<ObjectData> objects_;
   GrowableArray<InstructionsData> instructions_;
 
-  V8SnapshotProfileWriter::IdSpace offset_space_ =
-      V8SnapshotProfileWriter::kSnapshot;
+  IdSpace offset_space_ = IdSpace::kSnapshot;
   V8SnapshotProfileWriter* profile_writer_ = nullptr;
   const char* const image_type_;
   const char* const instructions_section_type_;
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index fb463cf..a00fa04 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -23096,6 +23096,14 @@
   Utf8::Encode(*this, reinterpret_cast<char*>(utf8_array), array_len);
 }
 
+const char* String::ToCString(Thread* thread, StringPtr ptr) {
+  if (ptr == nullptr) return nullptr;
+  REUSABLE_STRING_HANDLESCOPE(thread);
+  String& str = reused_string_handle.Handle();
+  str = ptr;
+  return str.ToCString();
+}
+
 static FinalizablePersistentHandle* AddFinalizer(const Object& referent,
                                                  void* peer,
                                                  Dart_HandleFinalizer callback,
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 6657bf0..f43d2d0 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -9079,6 +9079,7 @@
 
   char* ToMallocCString() const;
   void ToUTF8(uint8_t* utf8_array, intptr_t array_len) const;
+  static const char* ToCString(Thread* thread, StringPtr ptr);
 
   // Creates a new String object from a C string that is assumed to contain
   // UTF-8 encoded characters and '\0' is considered a termination character.
diff --git a/runtime/vm/v8_snapshot_writer.cc b/runtime/vm/v8_snapshot_writer.cc
index c828b4f..351c831 100644
--- a/runtime/vm/v8_snapshot_writer.cc
+++ b/runtime/vm/v8_snapshot_writer.cc
@@ -10,109 +10,85 @@
 namespace dart {
 
 const V8SnapshotProfileWriter::ObjectId
-    V8SnapshotProfileWriter::kArtificialRootId{kArtificial, 0};
+    V8SnapshotProfileWriter::kArtificialRootId{IdSpace::kArtificial, 0};
 
 #if defined(DART_PRECOMPILER)
 
-static const char* ZoneString(Zone* Z, const char* str) {
-  return OS::SCreate(Z, "%s", str);
-}
-
 V8SnapshotProfileWriter::V8SnapshotProfileWriter(Zone* zone)
     : zone_(zone),
       node_types_(zone_),
       edge_types_(zone_),
       strings_(zone_),
       roots_(zone_) {
-  node_types_.Insert({"Unknown", kUnknown});
-  node_types_.Insert({"ArtificialRoot", kArtificialRoot});
+  intptr_t idx = edge_types_.Add("context");
+  ASSERT_EQUAL(idx, static_cast<intptr_t>(Edge::Type::kContext));
+  idx = edge_types_.Add("element");
+  ASSERT_EQUAL(idx, static_cast<intptr_t>(Edge::Type::kElement));
+  idx = edge_types_.Add("property");
+  ASSERT_EQUAL(idx, static_cast<intptr_t>(Edge::Type::kProperty));
+  idx = edge_types_.Add("internal");
+  ASSERT_EQUAL(idx, static_cast<intptr_t>(Edge::Type::kInternal));
 
-  edge_types_.Insert({"context", kContext});
-  edge_types_.Insert({"element", kElement});
-  edge_types_.Insert({"property", kProperty});
-  edge_types_.Insert({"internal", kInternal});
-
-  strings_.Insert({"<unknown>", kUnknownString});
-  strings_.Insert({"<artificial root>", kArtificialRootString});
-
-  nodes_.Insert(NodeInfo(zone_, kArtificialRoot, kArtificialRootString,
-                         kArtificialRootId, 0, 0));
+  SetObjectTypeAndName(kArtificialRootId, "ArtificialRoot",
+                       "<artificial root>");
 }
 
-void V8SnapshotProfileWriter::SetObjectTypeAndName(ObjectId object_id,
+void V8SnapshotProfileWriter::SetObjectTypeAndName(const ObjectId& object_id,
                                                    const char* type,
                                                    const char* name) {
   ASSERT(type != nullptr);
-
-  if (!node_types_.HasKey(type)) {
-    node_types_.Insert({ZoneString(zone_, type), node_types_.Size()});
-  }
-
-  intptr_t type_id = node_types_.LookupValue(type);
   NodeInfo* info = EnsureId(object_id);
-  ASSERT(info->type == kUnknown || info->type == type_id);
-  info->type = type_id;
-  if (name != nullptr) {
-    info->name = EnsureString(name);
-  } else {
-    info->name =
-        EnsureString(OS::SCreate(zone_, "Unnamed [%s] %s", type, "(nil)"));
+  const intptr_t type_index = node_types_.Add(type);
+  if (info->type != kInvalidString && info->type != type_index) {
+    FATAL("Attempting to assign mismatching type %s to node %s", type,
+          info->ToCString(zone_));
+  }
+  info->type = type_index;
+  // Don't overwrite any existing name.
+  if (info->name == kInvalidString) {
+    info->name = strings_.Add(name);
   }
 }
 
-void V8SnapshotProfileWriter::AttributeBytesTo(ObjectId object_id,
+void V8SnapshotProfileWriter::AttributeBytesTo(const ObjectId& object_id,
                                                size_t num_bytes) {
   EnsureId(object_id)->self_size += num_bytes;
 }
 
-V8SnapshotProfileWriter::ConstantEdgeType
-V8SnapshotProfileWriter::ReferenceTypeToEdgeType(Reference::Type type) {
-  switch (type) {
-    case Reference::kElement:
-      return ConstantEdgeType::kElement;
-    case Reference::kProperty:
-      return ConstantEdgeType::kProperty;
-  }
-}
-
-void V8SnapshotProfileWriter::AttributeReferenceTo(ObjectId from_object_id,
-                                                   Reference reference,
-                                                   ObjectId to_object_id) {
-  const bool is_element = reference.reference_type == Reference::kElement;
-  ASSERT(is_element ? reference.offset >= 0 : reference.name != nullptr);
-
+void V8SnapshotProfileWriter::AttributeReferenceTo(
+    const ObjectId& from_object_id,
+    const Reference& reference,
+    const ObjectId& to_object_id) {
+  ASSERT(reference.IsElement() ? reference.offset >= 0
+                               : reference.name != nullptr);
   EnsureId(to_object_id);
-  const Edge edge(ReferenceTypeToEdgeType(reference.reference_type),
-                  is_element ? reference.offset : EnsureString(reference.name));
+  const Edge edge(this, reference);
   EnsureId(from_object_id)->AddEdge(edge, to_object_id);
-  ++edge_count_;
 }
 
 void V8SnapshotProfileWriter::AttributeDroppedReferenceTo(
-    ObjectId from_object_id,
-    Reference reference,
-    ObjectId to_object_id,
-    ObjectId replacement_object_id) {
-  ASSERT(to_object_id.first == kArtificial);
-  ASSERT(replacement_object_id.first != kArtificial);
-
-  const bool is_element = reference.reference_type == Reference::kElement;
-  ASSERT(is_element ? reference.offset >= 0 : reference.name != nullptr);
+    const ObjectId& from_object_id,
+    const Reference& reference,
+    const ObjectId& to_object_id,
+    const ObjectId& replacement_object_id) {
+  ASSERT(to_object_id.IsArtificial());
+  ASSERT(!replacement_object_id.IsArtificial());
+  ASSERT(reference.IsElement() ? reference.offset >= 0
+                               : reference.name != nullptr);
 
   // The target node is added normally.
   AttributeReferenceTo(from_object_id, reference, to_object_id);
 
+  EnsureId(replacement_object_id);
   // Put the replacement node at an invalid offset or name that can still be
   // associated with the real one. For offsets, this is the negative offset.
   // For names, it's the name prefixed with ":replacement_".
-  EnsureId(replacement_object_id);
-  const Edge replacement_edge(
-      ReferenceTypeToEdgeType(reference.reference_type),
-      is_element ? -reference.offset
-                 : EnsureString(
-                       OS::SCreate(zone_, ":replacement_%s", reference.name)));
+  Reference replacement_reference =
+      reference.IsElement() ? Reference::Element(-reference.offset)
+                            : Reference::Property(OS::SCreate(
+                                  zone_, ":replacement_%s", reference.name));
+  const Edge replacement_edge(this, replacement_reference);
   EnsureId(from_object_id)->AddEdge(replacement_edge, replacement_object_id);
-  ++edge_count_;
 }
 
 bool V8SnapshotProfileWriter::HasId(const ObjectId& object_id) {
@@ -120,72 +96,195 @@
 }
 
 V8SnapshotProfileWriter::NodeInfo* V8SnapshotProfileWriter::EnsureId(
-    ObjectId object_id) {
+    const ObjectId& object_id) {
   if (!HasId(object_id)) {
-    nodes_.Insert(NodeInfo(zone_, kUnknown, kUnknownString, object_id, 0, -1));
+    nodes_.Insert(NodeInfo(this, object_id));
   }
   return nodes_.Lookup(object_id);
 }
 
-intptr_t V8SnapshotProfileWriter::EnsureString(const char* str) {
-  if (!strings_.HasKey(str)) {
-    strings_.Insert({ZoneString(zone_, str), strings_.Size()});
-    return strings_.Size() - 1;
+const char* V8SnapshotProfileWriter::NodeInfo::ToCString(Zone* zone) const {
+  JSONWriter writer;
+  WriteDebug(&writer);
+  return OS::SCreate(zone, "%s", writer.buffer()->buffer());
+}
+
+void V8SnapshotProfileWriter::NodeInfo::Write(JSONWriter* writer) const {
+  ASSERT(id.space() != IdSpace::kInvalid);
+  if (type == kInvalidString) {
+    FATAL("No type given for node %s", id.ToCString(profile_writer_->zone_));
   }
-  return strings_.LookupValue(str);
+  writer->PrintValue(type);
+  if (name != kInvalidString) {
+    writer->PrintValue(name);
+  } else {
+    ASSERT(profile_writer_ != nullptr);
+    // If we don't already have a name for the node, we lazily create a default
+    // one. This is safe since the strings table is written out after the nodes.
+    const intptr_t name = profile_writer_->strings_.AddFormatted(
+        "Unnamed [%s] (nil)", profile_writer_->node_types_.At(type));
+    writer->PrintValue(name);
+  }
+  id.Write(writer);
+  writer->PrintValue(self_size);
+  writer->PrintValue64(edges->Length());
 }
 
-intptr_t V8SnapshotProfileWriter::WriteNodeInfo(JSONWriter* writer,
-                                                const NodeInfo& info) {
-  writer->PrintValue(info.type);
-  writer->PrintValue(info.name);
-  writer->PrintValue(NodeIdFor(info.id));
-  writer->PrintValue(info.self_size);
-  writer->PrintValue64(info.edges->Length());
-  writer->PrintNewline();
-  return kNumNodeFields;
+void V8SnapshotProfileWriter::NodeInfo::WriteDebug(JSONWriter* writer) const {
+  writer->OpenObject();
+  if (type != kInvalidString) {
+    writer->PrintProperty("type", profile_writer_->node_types_.At(type));
+  }
+  if (name != kInvalidString) {
+    writer->PrintProperty("name", profile_writer_->strings_.At(name));
+  }
+  id.WriteDebug(writer, "id");
+  writer->PrintProperty("self_size", self_size);
+  edges->WriteDebug(writer, "edges");
+  writer->CloseObject();
 }
 
-void V8SnapshotProfileWriter::WriteEdgeInfo(JSONWriter* writer,
-                                            const Edge& info,
-                                            const ObjectId& target) {
-  writer->PrintValue64(info.first);
-  writer->PrintValue64(info.second);
-  writer->PrintValue64(nodes_.LookupValue(target).offset);
-  writer->PrintNewline();
+const char* V8SnapshotProfileWriter::ObjectId::ToCString(Zone* zone) const {
+  JSONWriter writer;
+  WriteDebug(&writer);
+  return OS::SCreate(zone, "%s", writer.buffer()->buffer());
 }
 
-void V8SnapshotProfileWriter::AddRoot(ObjectId object_id,
-                                      const char* name /*= nullptr*/) {
-  EnsureId(object_id);
+void V8SnapshotProfileWriter::ObjectId::Write(JSONWriter* writer,
+                                              const char* property) const {
+  if (property != nullptr) {
+    writer->PrintProperty64(property, encoded_);
+  } else {
+    writer->PrintValue64(encoded_);
+  }
+}
+
+void V8SnapshotProfileWriter::ObjectId::WriteDebug(JSONWriter* writer,
+                                                   const char* property) const {
+  writer->OpenObject(property);
+  writer->PrintProperty("space", IdSpaceToCString(space()));
+  writer->PrintProperty64("nonce", nonce());
+  writer->CloseObject();
+}
+
+const char* V8SnapshotProfileWriter::ObjectId::IdSpaceToCString(IdSpace space) {
+  switch (space) {
+    case IdSpace::kInvalid:
+      return "Invalid";
+    case IdSpace::kSnapshot:
+      return "Snapshot";
+    case IdSpace::kVmText:
+      return "VmText";
+    case IdSpace::kIsolateText:
+      return "IsolateText";
+    case IdSpace::kVmData:
+      return "VmData";
+    case IdSpace::kIsolateData:
+      return "IsolateData";
+    case IdSpace::kArtificial:
+      return "Artificial";
+  }
+}
+
+const char* V8SnapshotProfileWriter::EdgeMap::ToCString(Zone* zone) const {
+  JSONWriter writer;
+  WriteDebug(&writer);
+  return OS::SCreate(zone, "%s", writer.buffer()->buffer());
+}
+
+void V8SnapshotProfileWriter::EdgeMap::WriteDebug(JSONWriter* writer,
+                                                  const char* property) const {
+  writer->OpenArray(property);
+  auto edge_it = GetIterator();
+  while (auto const pair = edge_it.Next()) {
+    pair->edge.WriteDebug(writer, pair->target);
+  }
+  writer->CloseArray();
+}
+
+void V8SnapshotProfileWriter::Edge::Write(JSONWriter* writer,
+                                          const ObjectId& target_id) const {
+  ASSERT(type != Type::kInvalid);
+  writer->PrintValue64(static_cast<intptr_t>(type));
+  writer->PrintValue64(name_or_offset);
+  auto const target = profile_writer_->nodes_.LookupValue(target_id);
+  writer->PrintValue64(target.offset());
+}
+
+void V8SnapshotProfileWriter::Edge::WriteDebug(
+    JSONWriter* writer,
+    const ObjectId& target_id) const {
+  writer->OpenObject();
+  if (type != Type::kInvalid) {
+    writer->PrintProperty(
+        "type", profile_writer_->edge_types_.At(static_cast<intptr_t>(type)));
+  }
+  if (type == Type::kProperty) {
+    writer->PrintProperty("name", profile_writer_->strings_.At(name_or_offset));
+  } else {
+    writer->PrintProperty64("offset", name_or_offset);
+  }
+  auto const target = profile_writer_->nodes_.LookupValue(target_id);
+  target.id.WriteDebug(writer, "target");
+  writer->CloseObject();
+}
+
+void V8SnapshotProfileWriter::AddRoot(const ObjectId& object_id,
+                                      const char* name) {
   // HeapSnapshotWorker.HeapSnapshot.calculateDistances (from HeapSnapshot.js)
   // assumes that the root does not have more than one edge to any other node
   // (most likely an oversight).
   if (roots_.HasKey(object_id)) return;
+  roots_.Insert(object_id);
 
-  auto const info = NodeInfo(
-      zone_, 0, name != nullptr ? EnsureString(name) : -1, object_id, 0, 0);
-  roots_.Insert(info);
-  auto const root = EnsureId(kArtificialRootId);
-  root->AddEdge(info.name != -1 ? Edge(kProperty, info.name)
-                                : Edge(kInternal, root->edges->Length()),
+  auto const str_index = strings_.Add(name);
+  auto const root = nodes_.Lookup(kArtificialRootId);
+  ASSERT(root != nullptr);
+  root->AddEdge(str_index != kInvalidString
+                    ? Edge(this, Edge::Type::kProperty, str_index)
+                    : Edge(this, Edge::Type::kInternal, root->edges->Length()),
                 object_id);
 }
 
-void V8SnapshotProfileWriter::WriteStringsTable(
-    JSONWriter* writer,
-    const DirectChainedHashMap<StringToIntMapTraits>& map) {
-  const char** strings = zone_->Alloc<const char*>(map.Size());
-  StringToIntMapTraits::Pair* pair = nullptr;
-  auto it = map.GetIterator();
-  while ((pair = it.Next()) != nullptr) {
-    ASSERT(pair->value >= 0 && pair->value < map.Size());
-    strings[pair->value] = pair->key;
+intptr_t V8SnapshotProfileWriter::StringsTable::Add(const char* str) {
+  if (str == nullptr) return kInvalidString;
+  if (auto const kv = index_map_.Lookup(str)) {
+    return kv->value;
   }
-  for (intptr_t i = 0; i < map.Size(); ++i) {
-    writer->PrintValue(strings[i]);
+  const char* new_str = OS::SCreate(zone_, "%s", str);
+  const intptr_t index = strings_.length();
+  strings_.Add(new_str);
+  index_map_.Insert({new_str, index});
+  return index;
+}
+
+intptr_t V8SnapshotProfileWriter::StringsTable::AddFormatted(const char* fmt,
+                                                             ...) {
+  va_list args;
+  va_start(args, fmt);
+  const char* str = OS::VSCreate(zone_, fmt, args);
+  if (auto const kv = index_map_.Lookup(str)) {
+    return kv->value;
+  }
+  const intptr_t index = strings_.length();
+  strings_.Add(str);
+  index_map_.Insert({str, index});
+  return index;
+}
+
+const char* V8SnapshotProfileWriter::StringsTable::At(intptr_t index) const {
+  if (index > strings_.length()) return nullptr;
+  return strings_[index];
+}
+
+void V8SnapshotProfileWriter::StringsTable::Write(JSONWriter* writer,
+                                                  const char* property) const {
+  writer->OpenArray(property);
+  for (auto const str : strings_) {
+    writer->PrintValue(str);
     writer->PrintNewline();
   }
+  writer->CloseArray();
 }
 
 void V8SnapshotProfileWriter::Write(JSONWriter* writer) {
@@ -207,11 +306,7 @@
 
     {
       writer->OpenArray("node_types");
-      {
-        writer->OpenArray();
-        WriteStringsTable(writer, node_types_);
-        writer->CloseArray();
-      }
+      node_types_.Write(writer);
       writer->CloseArray();
     }
 
@@ -225,48 +320,59 @@
 
     {
       writer->OpenArray("edge_types");
-      {
-        writer->OpenArray();
-        WriteStringsTable(writer, edge_types_);
-        writer->CloseArray();
-      }
+      edge_types_.Write(writer);
       writer->CloseArray();
     }
 
     writer->CloseObject();
 
     writer->PrintProperty64("node_count", nodes_.Size());
-    writer->PrintProperty64("edge_count", edge_count_ + roots_.Size());
+    {
+      intptr_t edge_count = 0;
+      auto nodes_it = nodes_.GetIterator();
+      while (auto const info = nodes_it.Next()) {
+        // All nodes should have an edge map, though it may be empty.
+        ASSERT(info->edges != nullptr);
+        edge_count += info->edges->Length();
+      }
+      writer->PrintProperty64("edge_count", edge_count);
+    }
   }
   writer->CloseObject();
 
-  const auto& root = *nodes_.Lookup(kArtificialRootId);
-  auto nodes_it = nodes_.GetIterator();
-
   {
     writer->OpenArray("nodes");
     //  Always write the information for the artificial root first.
-    intptr_t offset = WriteNodeInfo(writer, root);
+    auto const root = nodes_.Lookup(kArtificialRootId);
+    ASSERT(root != nullptr);
+    intptr_t offset = 0;
+    root->set_offset(offset);
+    root->Write(writer);
+    offset += kNumNodeFields;
+    auto nodes_it = nodes_.GetIterator();
     for (auto entry = nodes_it.Next(); entry != nullptr;
          entry = nodes_it.Next()) {
       if (entry->id == kArtificialRootId) continue;
-      entry->offset = offset;
-      offset += WriteNodeInfo(writer, *entry);
+      entry->set_offset(offset);
+      entry->Write(writer);
+      offset += kNumNodeFields;
     }
     writer->CloseArray();
-    nodes_it.Reset();
   }
 
   {
     auto write_edges = [&](const NodeInfo& info) {
       auto edges_it = info.edges->GetIterator();
       while (auto const pair = edges_it.Next()) {
-        WriteEdgeInfo(writer, pair->edge, pair->target);
+        pair->edge.Write(writer, pair->target);
       }
     };
     writer->OpenArray("edges");
     //  Always write the information for the artificial root first.
-    write_edges(root);
+    auto const root = nodes_.Lookup(kArtificialRootId);
+    ASSERT(root != nullptr);
+    write_edges(*root);
+    auto nodes_it = nodes_.GetIterator();
     while (auto const entry = nodes_it.Next()) {
       if (entry->id == kArtificialRootId) continue;
       write_edges(*entry);
@@ -274,11 +380,9 @@
     writer->CloseArray();
   }
 
-  {
-    writer->OpenArray("strings");
-    WriteStringsTable(writer, strings_);
-    writer->CloseArray();
-  }
+  // Must happen after any calls to WriteNodeInfo, as those calls may add more
+  // strings.
+  strings_.Write(writer, "strings");
 
   writer->CloseObject();
 }
diff --git a/runtime/vm/v8_snapshot_writer.h b/runtime/vm/v8_snapshot_writer.h
index 09ff87c..63544c9 100644
--- a/runtime/vm/v8_snapshot_writer.h
+++ b/runtime/vm/v8_snapshot_writer.h
@@ -16,54 +16,71 @@
 
 namespace dart {
 
-struct StringToIntMapTraits {
-  typedef char const* Key;
-  typedef intptr_t Value;
-
-  struct Pair {
-    Key key;
-    Value value;
-    Pair() : key(nullptr), value(-1) {}
-    Pair(Key k, Value v) : key(k), value(v) {}
-  };
-
-  static Value ValueOf(Pair pair) { return pair.value; }
-
-  static Key KeyOf(Pair pair) { return pair.key; }
-
-  static uword Hash(Key key) { return String::Hash(key, strlen(key)); }
-
-  static bool IsKeyEqual(Pair x, Key y) { return strcmp(x.key, y) == 0; }
+enum class IdSpace : uint8_t {
+  kInvalid = 0,   // So default-constructed ObjectIds are invalid.
+  kSnapshot = 1,  // Can be VM or Isolate heap, they share ids.
+  kVmText = 2,
+  kIsolateText = 3,
+  kVmData = 4,
+  kIsolateData = 5,
+  kArtificial = 6,  // Artificial objects (e.g. the global root).
+  // Change ObjectId::kIdSpaceBits to use last entry if more are added.
 };
 
 class V8SnapshotProfileWriter : public ZoneAllocated {
  public:
-  enum IdSpace {
-    kSnapshot = 0,  // Can be VM or Isolate heap, they share ids.
-    kVmText = 1,
-    kIsolateText = 2,
-    kVmData = 3,
-    kIsolateData = 4,
-    kArtificial = 5,  // Artificial objects (e.g. the global root).
-    kIdSpaceBits = 3,
+  struct ObjectId {
+    ObjectId() : ObjectId(IdSpace::kInvalid, -1) {}
+    ObjectId(IdSpace space, int64_t nonce)
+        : encoded_((nonce << kIdSpaceBits) | static_cast<intptr_t>(space)) {
+      ASSERT(Utils::IsInt(kBitsPerInt64 - kIdSpaceBits, nonce));
+    }
+
+    inline bool operator!=(const ObjectId& other) const {
+      return encoded_ != other.encoded_;
+    }
+    inline bool operator==(const ObjectId& other) const {
+      return !(*this != other);
+    }
+
+    inline uword Hash() const { return Utils::WordHash(encoded_); }
+    inline int64_t nonce() const { return encoded_ >> kIdSpaceBits; }
+    inline IdSpace space() const {
+      return static_cast<IdSpace>(encoded_ & kIdSpaceMask);
+    }
+    inline bool IsArtificial() const { return space() == IdSpace::kArtificial; }
+
+    const char* ToCString(Zone* zone) const;
+    void Write(JSONWriter* writer, const char* property = nullptr) const;
+    void WriteDebug(JSONWriter* writer, const char* property = nullptr) const;
+
+   private:
+    static constexpr size_t kIdSpaceBits =
+        Utils::BitLength(static_cast<int64_t>(IdSpace::kArtificial));
+    static constexpr int64_t kIdSpaceMask = Utils::NBitMaskUnsafe(kIdSpaceBits);
+    static const char* IdSpaceToCString(IdSpace space);
+
+    int64_t encoded_;
   };
 
-  typedef std::pair<IdSpace, intptr_t> ObjectId;
-
   struct Reference {
-    enum Type {
+    enum class Type {
       kElement,
       kProperty,
-    } reference_type;
+    } type;
     union {
       intptr_t offset;   // kElement
       const char* name;  // kProperty
     };
-  };
 
-  enum ConstantStrings {
-    kUnknownString = 0,
-    kArtificialRootString = 1,
+    static Reference Element(intptr_t offset) {
+      return {Type::kElement, {.offset = offset}};
+    }
+    static Reference Property(const char* name) {
+      return {Type::kProperty, {.name = name}};
+    }
+
+    bool IsElement() const { return type == Type::kElement; }
   };
 
   static const ObjectId kArtificialRootId;
@@ -72,47 +89,41 @@
   explicit V8SnapshotProfileWriter(Zone* zone) {}
   virtual ~V8SnapshotProfileWriter() {}
 
-  void SetObjectType(ObjectId object_id, const char* type) {}
-  void SetObjectTypeAndName(ObjectId object_id,
+  void SetObjectTypeAndName(const ObjectId& object_id,
                             const char* type,
                             const char* name) {}
-  void AttributeBytesTo(ObjectId object_id, size_t num_bytes) {}
-  void AttributeReferenceTo(ObjectId from_object_id,
-                            Reference reference,
-                            ObjectId to_object_id) {}
-  void AttributeWeakReferenceTo(
-      ObjectId from_object_id,
-      Reference reference,
-      ObjectId to_object_id,
-      ObjectId replacement_object_id = kArtificialRootId) {}
-  void AddRoot(ObjectId object_id, const char* name = nullptr) {}
+  void AttributeBytesTo(const ObjectId& object_id, size_t num_bytes) {}
+  void AttributeReferenceTo(const ObjectId& from_object_id,
+                            const Reference& reference,
+                            const ObjectId& to_object_id) {}
+  void AttributeWeakReferenceTo(const ObjectId& from_object_id,
+                                const Reference& reference,
+                                const ObjectId& to_object_id,
+                                const ObjectId& replacement_object_id) {}
+  void AddRoot(const ObjectId& object_id, const char* name = nullptr) {}
   bool HasId(const ObjectId& object_id) { return false; }
 #else
   explicit V8SnapshotProfileWriter(Zone* zone);
   virtual ~V8SnapshotProfileWriter() {}
 
-  void SetObjectType(ObjectId object_id, const char* type) {
-    SetObjectTypeAndName(object_id, type, nullptr);
-  }
-
   // Records that the object referenced by 'object_id' has type 'type'. The
   // 'type' for all 'Instance's should be 'Instance', not the user-visible type
   // and use 'name' for the real type instead.
-  void SetObjectTypeAndName(ObjectId object_id,
+  void SetObjectTypeAndName(const ObjectId& object_id,
                             const char* type,
                             const char* name);
 
   // Charges 'num_bytes'-many bytes to 'object_id'. In a clustered snapshot,
   // objects can have their data spread across multiple sections, so this can be
   // called multiple times for the same object.
-  void AttributeBytesTo(ObjectId object_id, size_t num_bytes);
+  void AttributeBytesTo(const ObjectId& object_id, size_t num_bytes);
 
   // Records that a reference to the object with id 'to_object_id' was written
   // in order to serialize the object with id 'from_object_id'. This does not
   // affect the number of bytes charged to 'from_object_id'.
-  void AttributeReferenceTo(ObjectId from_object_id,
-                            Reference reference,
-                            ObjectId to_object_id);
+  void AttributeReferenceTo(const ObjectId& from_object_id,
+                            const Reference& reference,
+                            const ObjectId& to_object_id);
 
   // Records that a weak serialization reference to a dropped object
   // with id 'to_object_id' was written in order to serialize the object with id
@@ -120,14 +131,14 @@
   // 'replacement_object_id' is recorded as the replacement for the
   // dropped object in the snapshot. This does not affect the number of
   // bytes charged to 'from_object_id'.
-  void AttributeDroppedReferenceTo(ObjectId from_object_id,
-                                   Reference reference,
-                                   ObjectId to_object_id,
-                                   ObjectId replacement_object_id);
+  void AttributeDroppedReferenceTo(const ObjectId& from_object_id,
+                                   const Reference& reference,
+                                   const ObjectId& to_object_id,
+                                   const ObjectId& replacement_object_id);
 
-  // Marks an object as being a root in the graph. Used for analysis of the
-  // graph.
-  void AddRoot(ObjectId object_id, const char* name = nullptr);
+  // Marks an object as being a root in the graph. Used for analysis of
+  // the graph.
+  void AddRoot(const ObjectId& object_id, const char* name = nullptr);
 
   // Write to a file in the V8 Snapshot Profile (JSON/.heapsnapshot) format.
   void Write(const char* file);
@@ -137,17 +148,61 @@
   bool HasId(const ObjectId& object_id);
 
  private:
+  static constexpr intptr_t kInvalidString =
+      CStringIntMapKeyValueTrait::kNoValue;
   static constexpr intptr_t kNumNodeFields = 5;
   static constexpr intptr_t kNumEdgeFields = 3;
 
-  using Edge = std::pair<intptr_t, intptr_t>;
+  struct Edge {
+    enum class Type : intptr_t {
+      kInvalid = -1,
+      kContext = 0,
+      kElement = 1,
+      kProperty = 2,
+      kInternal = 3,
+      kHidden = 4,
+      kShortcut = 5,
+      kWeak = 6,
+      kExtra = 7,
+    };
+
+    Edge() : Edge(nullptr, Type::kInvalid, -1) {}
+    Edge(V8SnapshotProfileWriter* profile_writer, const Reference& reference)
+        : Edge(profile_writer,
+               reference.type == Reference::Type::kElement ? Type::kElement
+                                                           : Type::kProperty,
+               reference.type == Reference::Type::kElement
+                   ? reference.offset
+                   : profile_writer->strings_.Add(reference.name)) {}
+    Edge(V8SnapshotProfileWriter* profile_writer,
+         Type type,
+         intptr_t name_or_offset)
+        : type(type),
+          name_or_offset(name_or_offset),
+          profile_writer_(profile_writer) {}
+
+    inline bool operator!=(const Edge& other) {
+      return profile_writer_ != other.profile_writer_ || type != other.type ||
+             name_or_offset != other.name_or_offset;
+    }
+    inline bool operator==(const Edge& other) { return !(*this != other); }
+
+    void Write(JSONWriter* writer, const ObjectId& target_id) const;
+    void WriteDebug(JSONWriter* writer, const ObjectId& target_id) const;
+
+    Type type;
+    intptr_t name_or_offset;
+
+   private:
+    V8SnapshotProfileWriter* profile_writer_;
+  };
 
   struct EdgeToObjectIdMapTrait {
     using Key = Edge;
     using Value = ObjectId;
 
     struct Pair {
-      Pair() : edge{kContext, -1}, target(kArtificialRootId) {}
+      Pair() : edge{}, target(kArtificialRootId) {}
       Pair(Key key, Value value) : edge(key), target(value) {}
       Edge edge;
       ObjectId target;
@@ -156,73 +211,90 @@
     static Key KeyOf(Pair kv) { return kv.edge; }
     static Value ValueOf(Pair kv) { return kv.target; }
     static uword Hash(Key key) {
-      return FinalizeHash(CombineHashes(key.first, key.second), 30);
+      return FinalizeHash(
+          CombineHashes(static_cast<intptr_t>(key.type), key.name_or_offset));
     }
     static bool IsKeyEqual(Pair kv, Key key) { return kv.edge == key; }
   };
 
-  using EdgeMap = ZoneDirectChainedHashMap<EdgeToObjectIdMapTrait>;
+  struct EdgeMap : public ZoneDirectChainedHashMap<EdgeToObjectIdMapTrait> {
+    explicit EdgeMap(Zone* zone)
+        : ZoneDirectChainedHashMap<EdgeToObjectIdMapTrait>(zone) {}
+
+    const char* ToCString(Zone* zone) const;
+    void WriteDebug(JSONWriter* writer, const char* property = nullptr) const;
+  };
 
   struct NodeInfo {
-    intptr_t type = 0;
-    intptr_t name = 0;
-    ObjectId id;
-    intptr_t self_size = 0;
-    EdgeMap* edges = nullptr;
-    // Populated during serialization.
-    intptr_t offset = -1;
-    // 'trace_node_id' isn't supported.
-    // 'edge_count' is computed on-demand.
+    NodeInfo() {}
+    NodeInfo(V8SnapshotProfileWriter* profile_writer,
+             const ObjectId& id,
+             intptr_t type = kInvalidString,
+             intptr_t name = kInvalidString)
+        : id(id),
+          type(type),
+          name(name),
+          edges(new (profile_writer->zone_) EdgeMap(profile_writer->zone_)),
+          profile_writer_(profile_writer) {}
 
-    // Used for testing sentinel in the hashtable.
-    bool operator!=(const NodeInfo& other) { return id != other.id; }
-    bool operator==(const NodeInfo& other) { return !(*this != other); }
+    inline bool operator!=(const NodeInfo& other) {
+      return id != other.id || type != other.type || name != other.name ||
+             self_size != other.self_size || edges != other.edges ||
+             offset_ != other.offset_ ||
+             profile_writer_ != other.profile_writer_;
+    }
+    inline bool operator==(const NodeInfo& other) { return !(*this != other); }
 
     void AddEdge(const Edge& edge, const ObjectId& target) {
       edges->Insert({edge, target});
     }
     bool HasEdge(const Edge& edge) { return edges->HasKey(edge); }
 
-    // To allow NodeInfo to be used as the pair in ObjectIdToNodeInfoTraits.
-    NodeInfo() : id{kSnapshot, -1} {}
+    const char* ToCString(Zone* zone) const;
+    void Write(JSONWriter* writer) const;
+    void WriteDebug(JSONWriter* writer) const;
 
-    NodeInfo(Zone* zone,
-             intptr_t type,
-             intptr_t name,
-             const ObjectId& id,
-             intptr_t self_size,
-             intptr_t offset)
-        : type(type),
-          name(name),
-          id(id),
-          self_size(self_size),
-          edges(new (zone) EdgeMap(zone)),
-          offset(offset) {}
+    intptr_t offset() const { return offset_; }
+    void set_offset(intptr_t offset) {
+      ASSERT_EQUAL(offset_, -1);
+      offset_ = offset;
+    }
+
+    ObjectId id;
+    intptr_t type = kInvalidString;
+    intptr_t name = kInvalidString;
+    intptr_t self_size = 0;
+    EdgeMap* edges = nullptr;
+
+   private:
+    // Populated during serialization.
+    intptr_t offset_ = -1;
+    // 'trace_node_id' isn't supported.
+    // 'edge_count' is computed on-demand.
+
+    // Used for debugging prints and creating default names if none given.
+    V8SnapshotProfileWriter* profile_writer_ = nullptr;
   };
 
-  NodeInfo* EnsureId(ObjectId object_id);
-  static intptr_t NodeIdFor(ObjectId id) {
-    return (id.second << kIdSpaceBits) | id.first;
-  }
+  NodeInfo* EnsureId(const ObjectId& object_id);
+  void Write(JSONWriter* writer);
 
-  intptr_t EnsureString(const char* str);
+  // Class that encapsulates both an array of strings and a mapping from
+  // strings to their index in the array.
+  class StringsTable {
+   public:
+    explicit StringsTable(Zone* zone)
+        : zone_(zone), index_map_(zone), strings_(zone, 2) {}
 
-  enum ConstantEdgeType {
-    kContext = 0,
-    kElement = 1,
-    kProperty = 2,
-    kInternal = 3,
-    kHidden = 4,
-    kShortcut = 5,
-    kWeak = 6,
-    kExtra = 7,
-  };
+    intptr_t Add(const char* str);
+    intptr_t AddFormatted(const char* fmt, ...) PRINTF_ATTRIBUTE(2, 3);
+    const char* At(intptr_t index) const;
+    void Write(JSONWriter* writer, const char* property = nullptr) const;
 
-  static ConstantEdgeType ReferenceTypeToEdgeType(Reference::Type type);
-
-  enum ConstantNodeType {
-    kUnknown = 0,
-    kArtificialRoot = 1,
+   private:
+    Zone* zone_;
+    CStringIntMap index_map_;
+    GrowableArray<const char*> strings_;
   };
 
   struct ObjectIdToNodeInfoTraits {
@@ -234,31 +306,30 @@
 
     static Value ValueOf(const Pair& pair) { return pair; }
 
-    static uword Hash(Key key) { return Utils::WordHash(NodeIdFor(key)); }
+    static uword Hash(const Key& key) { return key.Hash(); }
 
-    static bool IsKeyEqual(const Pair& x, Key y) { return x.id == y; }
+    static bool IsKeyEqual(const Pair& x, const Key& y) { return x.id == y; }
+  };
+
+  struct ObjectIdSetKeyValueTrait {
+    using Pair = ObjectId;
+    using Key = Pair;
+    using Value = Pair;
+
+    static Key KeyOf(const Pair& pair) { return pair; }
+    static Value ValueOf(const Pair& pair) { return pair; }
+    static uword Hash(const Key& key) { return key.Hash(); }
+    static bool IsKeyEqual(const Pair& pair, const Key& key) {
+      return pair == key;
+    }
   };
 
   Zone* zone_;
-  void Write(JSONWriter* writer);
-  intptr_t WriteNodeInfo(JSONWriter* writer, const NodeInfo& info);
-  void WriteEdgeInfo(JSONWriter* writer,
-                     const Edge& info,
-                     const ObjectId& target);
-  void WriteStringsTable(JSONWriter* writer,
-                         const DirectChainedHashMap<StringToIntMapTraits>& map);
-
   DirectChainedHashMap<ObjectIdToNodeInfoTraits> nodes_;
-  DirectChainedHashMap<StringToIntMapTraits> node_types_;
-  DirectChainedHashMap<StringToIntMapTraits> edge_types_;
-  DirectChainedHashMap<StringToIntMapTraits> strings_;
-
-  // We don't have a zone-allocated hash set, so we just re-use the type for
-  // nodes_ even though we don't need to access the node info (and fill it with
-  // dummy values).
-  DirectChainedHashMap<ObjectIdToNodeInfoTraits> roots_;
-
-  size_t edge_count_ = 0;
+  StringsTable node_types_;
+  StringsTable edge_types_;
+  StringsTable strings_;
+  DirectChainedHashMap<ObjectIdSetKeyValueTrait> roots_;
 #endif
 };