Version 2.12.0-233.0.dev

Merge commit 'd4d3e3698c8bf2ede46b52438874890564f12113' into 'dev'
diff --git a/docs/process/breaking-changes.md b/docs/process/breaking-changes.md
index b7023bd..cee804e 100644
--- a/docs/process/breaking-changes.md
+++ b/docs/process/breaking-changes.md
@@ -96,8 +96,8 @@
 
 * Resolve the breaking change issue and make a note that the change has landed
 
-* Make a note in the [Dart SDK changelog](`changelog.md`) detailing the change.
-  This must be prefixed `** Breaking change:`.
+* Make a note in the [Dart SDK changelog](../../CHANGELOG.md) detailing the change.
+  This must be prefixed `**Breaking Change**`.
 
 * Reply to the original announcement email, and make a note that the change is
   being implemented.
diff --git a/pkg/analyzer/lib/src/dart/element/scope.dart b/pkg/analyzer/lib/src/dart/element/scope.dart
index 85085f8..2622bd1 100644
--- a/pkg/analyzer/lib/src/dart/element/scope.dart
+++ b/pkg/analyzer/lib/src/dart/element/scope.dart
@@ -115,46 +115,27 @@
     @required String prefix,
     @required String name,
   }) {
-    Iterable<NamespaceCombinator> getShowCombinators(
-        ImportElement importElement) {
-      return importElement.combinators.whereType<ShowElementCombinator>();
+    for (var importElement in _element.imports) {
+      if (importElement.prefix?.name == prefix &&
+          importElement.importedLibrary?.isSynthetic != false) {
+        var showCombinators = importElement.combinators
+            .whereType<ShowElementCombinator>()
+            .toList();
+        if (prefix != null && showCombinators.isEmpty) {
+          return true;
+        }
+        for (var combinator in showCombinators) {
+          if (combinator.shownNames.contains(name)) {
+            return true;
+          }
+        }
+      }
     }
 
-    if (prefix != null) {
-      for (var importElement in _element.imports) {
-        if (importElement.prefix?.name == prefix &&
-            importElement.importedLibrary?.isSynthetic != false) {
-          var showCombinators = getShowCombinators(importElement);
-          if (showCombinators.isEmpty) {
-            return true;
-          }
-          for (ShowElementCombinator combinator in showCombinators) {
-            if (combinator.shownNames.contains(name)) {
-              return true;
-            }
-          }
-        }
-      }
-    } else {
-      // TODO(scheglov) merge for(s).
-      for (var importElement in _element.imports) {
-        if (importElement.prefix == null &&
-            importElement.importedLibrary?.isSynthetic != false) {
-          for (ShowElementCombinator combinator
-              in getShowCombinators(importElement)) {
-            if (combinator.shownNames.contains(name)) {
-              return true;
-            }
-          }
-        }
-      }
-
-      if (name.startsWith(r'_$')) {
-        for (var partElement in _element.parts) {
-          if (partElement.isSynthetic &&
-              isGeneratedSource(partElement.source)) {
-            return true;
-          }
+    if (prefix == null && name.startsWith(r'_$')) {
+      for (var partElement in _element.parts) {
+        if (partElement.isSynthetic && isGeneratedSource(partElement.source)) {
+          return true;
         }
       }
     }
diff --git a/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart b/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart
index ebb0cce..36a1be3 100644
--- a/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart
+++ b/pkg/front_end/lib/src/fasta/kernel/inference_visitor.dart
@@ -6563,18 +6563,17 @@
     VariableDeclarationImpl variable = node.variable;
     DartType promotedType;
     DartType declaredOrInferredType = variable.lateType ?? variable.type;
-    if (inferrer.isNonNullableByDefault) {
+    if (isExtensionThis(variable)) {
+      // Don't promote the synthetic variable `#this` that we use to represent
+      // `this` inside extension methods.
+    } else if (inferrer.isNonNullableByDefault) {
       if (node.forNullGuardedAccess) {
         DartType nonNullableType = inferrer.computeNonNullable(variable.type);
         if (nonNullableType != variable.type) {
           promotedType = nonNullableType;
         }
-      } else if (variable.isLocalFunction) {
+      } else if (!variable.isLocalFunction) {
         // Don't promote local functions.
-      } else if (isExtensionThis(variable)) {
-        // Don't promote the synthetic variable `#this` that we use to represent
-        // `this` inside extension methods.
-      } else {
         promotedType = inferrer.flowAnalysis.variableRead(node, variable);
       }
     } else {
diff --git a/pkg/wasm/example/README.md b/pkg/wasm/example/README.md
new file mode 100644
index 0000000..c809dc66
--- /dev/null
+++ b/pkg/wasm/example/README.md
@@ -0,0 +1,29 @@
+# Example usage of package:wasm
+
+This example demonstrates how to use package:wasm to run a wasm build of the [Brotli compression library](https://github.com/google/brotli).
+
+### Running the example
+
+`dart brotli.dart lipsum.txt`
+
+This will compress lipsum.txt, report the compression ratio, then decompress it and verify that the result matches the input.
+
+### Generating wasm code
+
+libbrotli.wasm was built by cloning the [Brotli repo](https://github.com/google/brotli), and compiling it using [wasienv](https://github.com/wasienv/wasienv).
+
+There are several ways of building wasm code. The most important difference between the tool sets is how the wasm code they generate interacts with the OS. For very simple code this difference doesn't matter. But if the library does any sort of OS interaction, such as file IO, or even using malloc, it will need to use either Emscripten or WASI for that interaction. package:wasm only supports WASI at the moment.
+
+To target WASI, one option is to use [wasi-libc](https://github.com/WebAssembly/wasi-libc) and a recent version of clang. Set the target to `--target=wasm32-unknown-wasi` and the `--sysroot` to wasi-libc.
+
+Another option is to build using [wasienv](https://github.com/wasienv/wasienv), which is a set of tools that are essentially just an ergonomic wrapper around the clang + wasi-libc approach. This is how libbrotli.wasm was built:
+
+1. Install [wasienv](https://github.com/wasienv/wasienv) and clone the [Brotli repo](https://github.com/google/brotli).
+2. Compile every .c file in brotli/c/common/, dec/, and enc/, using wasicc:
+`wasicc -c foo.c -o out/foo.o -I c/include`
+3. Link all the .o files together using wasild:
+`wasild --no-entry --export=bar out/foo.o $wasienv_sysroot/lib/wasm32-wasi/libc.a`
+The `--no-entry` flag tells the linker to ignore the fact that there's no `main()` function, which is important for libraries.
+`--export=bar` will export the `bar()` function from the library, so that it can be found by package:wasm. For libbrotli.wasm, every function in c/include/brotli/encode.h and decode.h was exported.
+Brotli used functions from libc, so the wasm version of libc that comes with wasienv was also linked in.
+If there are still undefined symbols after linking in the wasi libraries, the `--allow-undefined` flag tells the linker to treat undefined symbols as function imports. These functions can then be supplied from Dart code.
diff --git a/pkg/wasm/example/brotli.dart b/pkg/wasm/example/brotli.dart
new file mode 100644
index 0000000..e9520e3
--- /dev/null
+++ b/pkg/wasm/example/brotli.dart
@@ -0,0 +1,83 @@
+// Copyright (c) 2021, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Example of using package:wasm to run a wasm build of the Brotli compression
+// library. Usage:
+// dart brotli.dart input.txt
+
+import 'dart:convert';
+import "dart:io";
+import "dart:typed_data";
+import "package:wasm/wasm.dart";
+
+// Brotli compression parameters.
+const int kDefaultQuality = 11;
+const int kDefaultWindow = 22;
+const int kDefaultMode = 0;
+
+main(List<String> args) {
+  var brotliPath = Platform.script.resolve('libbrotli.wasm');
+  var moduleData = File(brotliPath.path).readAsBytesSync();
+  var module = WasmModule(moduleData);
+  print(module.describe());
+
+  var instance = module.instantiate().enableWasi().build();
+  var memory = instance.memory;
+  var compress = instance.lookupFunction("BrotliEncoderCompress");
+  var decompress = instance.lookupFunction("BrotliDecoderDecompress");
+
+  print("Loading ${args[0]}");
+  var inputData = File(args[0]).readAsBytesSync();
+  print("Input size: ${inputData.length} bytes");
+
+  // Grow the module's memory to get unused space to put our data.
+  // [initial memory][input data][output data][size][decoded data][size]
+  var inputPtr = memory.lengthInBytes;
+  memory.grow((3 * inputData.length / WasmMemory.kPageSizeInBytes).ceil());
+  var memoryView = memory.view;
+  var outputPtr = inputPtr + inputData.length;
+  var outSizePtr = outputPtr + inputData.length;
+  var decodedPtr = outSizePtr + 4;
+  var decSizePtr = decodedPtr + inputData.length;
+
+  memoryView.setRange(inputPtr, inputPtr + inputData.length, inputData);
+
+  var sizeBytes = ByteData(4);
+  sizeBytes.setUint32(0, inputData.length, Endian.host);
+  memoryView.setRange(
+      outSizePtr, outSizePtr + 4, sizeBytes.buffer.asUint8List());
+
+  print("\nCompressing...");
+  var status = compress(kDefaultQuality, kDefaultWindow, kDefaultMode,
+      inputData.length, inputPtr, outSizePtr, outputPtr);
+  print("Compression status: $status");
+
+  var compressedSize =
+      ByteData.sublistView(memoryView, outSizePtr, outSizePtr + 4)
+          .getUint32(0, Endian.host);
+  print("Compressed size: $compressedSize bytes");
+  var spaceSaving = 100 * (1 - compressedSize / inputData.length);
+  print("Space saving: ${spaceSaving.toStringAsFixed(2)}%");
+
+  var decSizeBytes = ByteData(4);
+  decSizeBytes.setUint32(0, inputData.length, Endian.host);
+  memoryView.setRange(
+      decSizePtr, decSizePtr + 4, decSizeBytes.buffer.asUint8List());
+
+  print("\nDecompressing...");
+  status = decompress(compressedSize, outputPtr, decSizePtr, decodedPtr);
+  print("Decompression status: $status");
+
+  var decompressedSize =
+      ByteData.sublistView(memoryView, decSizePtr, decSizePtr + 4)
+          .getUint32(0, Endian.host);
+  print("Decompressed size: $decompressedSize bytes");
+
+  print("\nVerifying decompression...");
+  assert(inputData.length == decompressedSize);
+  for (var i = 0; i < inputData.length; ++i) {
+    assert(inputData[i] == memoryView[decodedPtr + i]);
+  }
+  print("Decompression succeeded :)");
+}
diff --git a/pkg/wasm/example/libbrotli.wasm b/pkg/wasm/example/libbrotli.wasm
new file mode 100755
index 0000000..0f8ddb3
--- /dev/null
+++ b/pkg/wasm/example/libbrotli.wasm
Binary files differ
diff --git a/pkg/wasm/example/lipsum.txt b/pkg/wasm/example/lipsum.txt
new file mode 100644
index 0000000..f3b5ee0
--- /dev/null
+++ b/pkg/wasm/example/lipsum.txt
@@ -0,0 +1,10 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam feugiat efficitur est a sodales. Ut et odio at nunc fermentum ullamcorper nec in libero. Mauris et interdum elit. Integer in diam nec felis venenatis consectetur. Curabitur elementum egestas augue ac sollicitudin. Cras vulputate efficitur nulla. In blandit sapien ultrices maximus posuere.
+
+Suspendisse in mi at arcu scelerisque tincidunt. Nullam venenatis ex nunc, ut efficitur nulla vehicula eget. Fusce sagittis facilisis ligula nec semper. Nullam et semper ligula. Curabitur sollicitudin ultrices elit et sodales. In iaculis erat scelerisque ipsum scelerisque rutrum. Quisque sollicitudin dolor eu venenatis venenatis. Donec non varius lacus. Phasellus fermentum id mauris nec consequat. Curabitur ultrices, mauris ut scelerisque aliquam, neque augue elementum mi, at accumsan est massa vitae metus. Donec sit amet accumsan dolor, sed vehicula augue. Nunc augue ligula, faucibus tincidunt lorem sed, efficitur ullamcorper erat. Curabitur pellentesque auctor nisi. Cras placerat, massa quis scelerisque commodo, augue leo aliquam elit, sed tempor turpis neque sed nulla. Proin vulputate malesuada augue, quis finibus felis rutrum nec.
+
+Phasellus molestie, tellus eget hendrerit accumsan, diam nunc scelerisque nisi, quis aliquet augue lacus non diam. Nulla facilisi. Nulla est urna, egestas vel luctus nec, sagittis in risus. Mauris aliquam viverra massa vitae efficitur. Integer fringilla sollicitudin ex, et maximus sem gravida ultrices. Vestibulum nec sodales nulla. Cras dapibus maximus venenatis. Vivamus condimentum porttitor mollis. Aliquam congue eleifend condimentum. Donec sagittis bibendum gravida. Nulla condimentum viverra sapien, quis congue libero aliquet nec. Fusce et interdum nisi. Suspendisse at commodo eros. Mauris malesuada nisi in tortor accumsan iaculis. Nam hendrerit interdum magna, eu aliquam est varius eu. Nullam auctor ornare erat, sit amet maximus orci fringilla eu.
+
+Vivamus ullamcorper enim eget tellus lobortis mattis. Vivamus nec tincidunt ipsum. Quisque pharetra non neque non sagittis. Morbi ultrices massa nulla, ac eleifend nulla bibendum mollis. Donec in sodales massa, id luctus dolor. Pellentesque vel auctor tortor, eu imperdiet felis. Pellentesque eleifend eros ipsum, sagittis feugiat enim placerat at. Interdum et malesuada fames ac ante ipsum primis in faucibus. Aliquam lacinia pharetra est.
+
+Duis elit arcu, faucibus ac libero ut, auctor volutpat elit. Duis blandit quis felis at ultricies. Duis ac eros id velit pretium sagittis. Praesent eget orci porttitor, posuere purus ac, interdum eros. Nam augue velit, euismod nec lobortis vitae, rutrum ut libero. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean porttitor sem ante. Integer elit purus, sollicitudin sit amet est in, accumsan suscipit libero. Donec finibus metus scelerisque, mattis dui quis, suscipit turpis. Aliquam rhoncus leo ipsum, vel hendrerit ante porta sit amet. Donec dui nisi, bibendum non rutrum vel, ornare nec diam. Proin tristique ipsum eu pulvinar finibus. Duis pellentesque massa a condimentum elementum. Maecenas efficitur ac est in eleifend. Mauris sit amet lacus blandit, pulvinar ex in, commodo eros.
+
diff --git a/pkg/wasm/lib/src/module.dart b/pkg/wasm/lib/src/module.dart
index 62723d5..b06abb8 100644
--- a/pkg/wasm/lib/src/module.dart
+++ b/pkg/wasm/lib/src/module.dart
@@ -314,6 +314,9 @@
     _view[index] = value;
   }
 
+  /// Returns a Uint8List view into the memory.
+  Uint8List get view => _view;
+
   /// Grow the memory by deltaPages.
   void grow(int deltaPages) {
     var runtime = WasmRuntime();
diff --git a/runtime/lib/array.cc b/runtime/lib/array.cc
index d143a98..ec94448 100644
--- a/runtime/lib/array.cc
+++ b/runtime/lib/array.cc
@@ -85,7 +85,7 @@
     result.SetAt(i, temp);
   }
   result.MakeImmutable();
-  return result.raw();
+  return result.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/lib/bool.cc b/runtime/lib/bool.cc
index 7a4e416..c18a8c3 100644
--- a/runtime/lib/bool.cc
+++ b/runtime/lib/bool.cc
@@ -24,13 +24,13 @@
       String::Handle(Api::GetEnvironmentValue(thread, name));
   if (!env_value.IsNull()) {
     if (Symbols::True().Equals(env_value)) {
-      return Bool::True().raw();
+      return Bool::True().ptr();
     }
     if (Symbols::False().Equals(env_value)) {
-      return Bool::False().raw();
+      return Bool::False().ptr();
     }
   }
-  return default_value.raw();
+  return default_value.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Bool_hasEnvironment, 0, 2) {
@@ -39,9 +39,9 @@
   const String& env_value =
       String::Handle(Api::GetEnvironmentValue(thread, name));
   if (!env_value.IsNull()) {
-    return Bool::True().raw();
+    return Bool::True().ptr();
   }
-  return Bool::False().raw();
+  return Bool::False().ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/lib/developer.cc b/runtime/lib/developer.cc
index fafd0ca..d45cecd 100644
--- a/runtime/lib/developer.cc
+++ b/runtime/lib/developer.cc
@@ -25,13 +25,13 @@
   GET_NATIVE_ARGUMENT(String, msg, arguments->NativeArgAt(1));
   Debugger* debugger = isolate->debugger();
   if (debugger == nullptr) {
-    return when.raw();
+    return when.ptr();
   }
   if (when.value()) {
     debugger->PauseDeveloper(msg);
   }
 #endif
-  return when.raw();
+  return when.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Developer_inspect, 0, 1) {
@@ -39,7 +39,7 @@
 #ifndef PRODUCT
   Service::SendInspectEvent(isolate, inspectee);
 #endif  // !PRODUCT
-  return inspectee.raw();
+  return inspectee.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Developer_log, 0, 8) {
diff --git a/runtime/lib/double.cc b/runtime/lib/double.cc
index 61b76a3..09136b7 100644
--- a/runtime/lib/double.cc
+++ b/runtime/lib/double.cc
@@ -134,13 +134,13 @@
     OS::PrintErr("Double_greaterThan %s > %s\n", left.ToCString(),
                  right.ToCString());
   }
-  return Bool::Get(result).raw();
+  return Bool::Get(result).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Double_greaterThanFromInteger, 0, 2) {
   const Double& right = Double::CheckedHandle(zone, arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(Integer, left, arguments->NativeArgAt(1));
-  return Bool::Get(left.AsDoubleValue() > right.value()).raw();
+  return Bool::Get(left.AsDoubleValue() > right.value()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Double_equal, 0, 2) {
@@ -151,13 +151,13 @@
     OS::PrintErr("Double_equal %s == %s\n", left.ToCString(),
                  right.ToCString());
   }
-  return Bool::Get(result).raw();
+  return Bool::Get(result).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Double_equalToInteger, 0, 2) {
   const Double& left = Double::CheckedHandle(zone, arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(Integer, right, arguments->NativeArgAt(1));
-  return Bool::Get(left.value() == right.AsDoubleValue()).raw();
+  return Bool::Get(left.value() == right.AsDoubleValue()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Double_round, 0, 1) {
@@ -264,19 +264,19 @@
 
 DEFINE_NATIVE_ENTRY(Double_getIsInfinite, 0, 1) {
   const Double& arg = Double::CheckedHandle(zone, arguments->NativeArgAt(0));
-  return Bool::Get(isinf(arg.value())).raw();
+  return Bool::Get(isinf(arg.value())).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Double_getIsNaN, 0, 1) {
   const Double& arg = Double::CheckedHandle(zone, arguments->NativeArgAt(0));
-  return Bool::Get(isnan(arg.value())).raw();
+  return Bool::Get(isnan(arg.value())).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Double_getIsNegative, 0, 1) {
   const Double& arg = Double::CheckedHandle(zone, arguments->NativeArgAt(0));
   // Include negative zero, infinity.
   double dval = arg.value();
-  return Bool::Get(signbit(dval) && !isnan(dval)).raw();
+  return Bool::Get(signbit(dval) && !isnan(dval)).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Double_flipSignBit, 0, 1) {
diff --git a/runtime/lib/errors.cc b/runtime/lib/errors.cc
index 24d84ab..42c0acc 100644
--- a/runtime/lib/errors.cc
+++ b/runtime/lib/errors.cc
@@ -40,7 +40,7 @@
           return func.script();
         }
         ASSERT(!hit_assertion_error);
-        hit_assertion_error = (func.Owner() == assert_error_class.raw());
+        hit_assertion_error = (func.Owner() == assert_error_class.ptr());
         inlined_iterator.Advance();
       }
       continue;
@@ -52,7 +52,7 @@
       return func.script();
     }
     ASSERT(!hit_assertion_error);
-    hit_assertion_error = (func.Owner() == assert_error_class.raw());
+    hit_assertion_error = (func.Owner() == assert_error_class.ptr());
   }
   UNREACHABLE();
   return Script::null();
@@ -93,7 +93,7 @@
         script.GetSnippet(from_line, from_column, to_line, to_column);
   }
   if (condition_text.IsNull()) {
-    condition_text = Symbols::OptimizedOut().raw();
+    condition_text = Symbols::OptimizedOut().ptr();
   }
   args.SetAt(0, condition_text);
 
diff --git a/runtime/lib/ffi.cc b/runtime/lib/ffi.cc
index 967a4ef..d703d0b 100644
--- a/runtime/lib/ffi.cc
+++ b/runtime/lib/ffi.cc
@@ -110,7 +110,7 @@
   const Object& constructorResult =
       Object::Handle(DartEntry::InvokeFunction(constructor, args));
   ASSERT(!constructorResult.IsError());
-  return new_object.raw();
+  return new_object.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Ffi_loadStruct, 0, 2) {
@@ -269,7 +269,7 @@
                                  native_signature, func, exceptional_return)));
 
   // Because we have already set the return value.
-  return Object::sentinel().raw();
+  return Object::sentinel().ptr();
 #endif
 }
 
@@ -292,7 +292,7 @@
     Exceptions::PropagateError(Error::Cast(result));
   }
   ASSERT(result.IsCode());
-  code ^= result.raw();
+  code ^= result.ptr();
 #endif
 
   ASSERT(!code.IsNull());
diff --git a/runtime/lib/function.cc b/runtime/lib/function.cc
index 1ea506c..07e839f 100644
--- a/runtime/lib/function.cc
+++ b/runtime/lib/function.cc
@@ -27,7 +27,7 @@
   if (result.IsError()) {
     Exceptions::PropagateError(Error::Cast(result));
   }
-  return result.raw();
+  return result.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Closure_equals, 0, 2) {
@@ -39,8 +39,8 @@
   // name and owner (multiple function objects could exist for the same
   // function due to hot reload).
   // Objects of other closure kinds are unique, so use identity comparison.
-  if (receiver.raw() == other.raw()) {
-    return Bool::True().raw();
+  if (receiver.ptr() == other.ptr()) {
+    return Bool::True().ptr();
   }
   if (other.IsClosure()) {
     const Function& func_a = Function::Handle(zone, receiver.function());
@@ -54,15 +54,15 @@
         ObjectPtr receiver_a = context_a.At(0);
         ObjectPtr receiver_b = context_b.At(0);
         if ((receiver_a == receiver_b) &&
-            ((func_a.raw() == func_b.raw()) ||
+            ((func_a.ptr() == func_b.ptr()) ||
              ((func_a.name() == func_b.name()) &&
               (func_a.Owner() == func_b.Owner())))) {
-          return Bool::True().raw();
+          return Bool::True().ptr();
         }
       }
     }
   }
-  return Bool::False().raw();
+  return Bool::False().ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Closure_computeHash, 0, 1) {
diff --git a/runtime/lib/growable_array.cc b/runtime/lib/growable_array.cc
index 71d9f2a..1f8bd54 100644
--- a/runtime/lib/growable_array.cc
+++ b/runtime/lib/growable_array.cc
@@ -24,7 +24,7 @@
   const GrowableObjectArray& new_array =
       GrowableObjectArray::Handle(GrowableObjectArray::New(data));
   new_array.SetTypeArguments(type_arguments);
-  return new_array.raw();
+  return new_array.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(GrowableList_getIndexed, 0, 2) {
@@ -35,7 +35,7 @@
     Exceptions::ThrowRangeError("index", index, 0, array.Length() - 1);
   }
   const Instance& obj = Instance::CheckedHandle(zone, array.At(index.Value()));
-  return obj.raw();
+  return obj.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(GrowableList_setIndexed, 0, 3) {
@@ -89,7 +89,7 @@
 DEFINE_NATIVE_ENTRY(Internal_makeFixedListUnmodifiable, 0, 1) {
   GET_NON_NULL_NATIVE_ARGUMENT(Array, array, arguments->NativeArgAt(0));
   array.MakeImmutable();
-  return array.raw();
+  return array.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/lib/identical.cc b/runtime/lib/identical.cc
index c0d2b96..832ded4 100644
--- a/runtime/lib/identical.cc
+++ b/runtime/lib/identical.cc
@@ -12,7 +12,7 @@
   GET_NATIVE_ARGUMENT(Instance, a, arguments->NativeArgAt(0));
   GET_NATIVE_ARGUMENT(Instance, b, arguments->NativeArgAt(1));
   const bool is_identical = a.IsIdenticalTo(b);
-  return Bool::Get(is_identical).raw();
+  return Bool::Get(is_identical).ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/lib/integers.cc b/runtime/lib/integers.cc
index 1c89c6d..af4a2a7 100644
--- a/runtime/lib/integers.cc
+++ b/runtime/lib/integers.cc
@@ -143,7 +143,7 @@
     OS::PrintErr("Integer_greaterThanFromInteger %s > %s\n", left.ToCString(),
                  right.ToCString());
   }
-  return Bool::Get(left.CompareWith(right) == 1).raw();
+  return Bool::Get(left.CompareWith(right) == 1).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Integer_equalToInteger, 0, 2) {
@@ -155,7 +155,7 @@
     OS::PrintErr("Integer_equalToInteger %s == %s\n", left.ToCString(),
                  right.ToCString());
   }
-  return Bool::Get(left.CompareWith(right) == 0).raw();
+  return Bool::Get(left.CompareWith(right) == 0).ptr();
 }
 
 static IntegerPtr ParseInteger(const String& value) {
@@ -194,12 +194,12 @@
     const Integer& result = Integer::Handle(ParseInteger(env_value));
     if (!result.IsNull()) {
       if (result.IsSmi()) {
-        return result.raw();
+        return result.ptr();
       }
       return result.Canonicalize(thread);
     }
   }
-  return default_value.raw();
+  return default_value.ptr();
 }
 
 static IntegerPtr ShiftOperationHelper(Token::Kind kind,
diff --git a/runtime/lib/isolate.cc b/runtime/lib/isolate.cc
index 23352fc..b5d898e 100644
--- a/runtime/lib/isolate.cc
+++ b/runtime/lib/isolate.cc
@@ -39,7 +39,7 @@
 DEFINE_NATIVE_ENTRY(CapabilityImpl_equals, 0, 2) {
   GET_NON_NULL_NATIVE_ARGUMENT(Capability, recv, arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(Capability, other, arguments->NativeArgAt(1));
-  return (recv.Id() == other.Id()) ? Bool::True().raw() : Bool::False().raw();
+  return (recv.Id() == other.Id()) ? Bool::True().ptr() : Bool::False().ptr();
 }
 
 DEFINE_NATIVE_ENTRY(CapabilityImpl_get_hashcode, 0, 1) {
@@ -107,9 +107,9 @@
   const Dart_Port destination_port_id = port.Id();
   const bool can_send_any_object = isolate->origin_id() == port.origin_id();
 
-  if (ApiObjectConverter::CanConvert(obj.raw())) {
+  if (ApiObjectConverter::CanConvert(obj.ptr())) {
     PortMap::PostMessage(
-        Message::New(destination_port_id, obj.raw(), Message::kNormalPriority));
+        Message::New(destination_port_id, obj.ptr(), Message::kNormalPriority));
   } else {
     MessageWriter writer(can_send_any_object);
     // TODO(turnidge): Throw an exception when the return value is false?
@@ -119,7 +119,7 @@
   return Object::null();
 }
 
-class ObjectPtrSetTraitsLayout {
+class UntaggedObjectPtrSetTraits {
  public:
   static bool ReportStats() { return false; }
   static const char* Name() { return "RawObjectPtrSetTraits"; }
@@ -146,7 +146,7 @@
    private:
     void VisitPointers(ObjectPtr* from, ObjectPtr* to) {
       for (ObjectPtr* raw = from; raw <= to; raw++) {
-        if (!(*raw)->IsHeapObject() || (*raw)->ptr()->IsCanonical()) {
+        if (!(*raw)->IsHeapObject() || (*raw)->untag()->IsCanonical()) {
           continue;
         }
         if (visited_->GetValueExclusive(*raw) == 1) {
@@ -160,8 +160,8 @@
     WeakTable* visited_;
     MallocGrowableArray<ObjectPtr>* const working_set_;
   };
-  if (!obj.raw()->IsHeapObject() || obj.raw()->ptr()->IsCanonical()) {
-    return obj.raw();
+  if (!obj.ptr()->IsHeapObject() || obj.ptr()->untag()->IsCanonical()) {
+    return obj.ptr();
   }
   ClassTable* class_table = isolate->group()->class_table();
 
@@ -184,8 +184,8 @@
 
     SendMessageValidator visitor(isolate->group(), visited.get(), &working_set);
 
-    visited->SetValueExclusive(obj.raw(), 1);
-    working_set.Add(obj.raw());
+    visited->SetValueExclusive(obj.ptr(), 1);
+    working_set.Add(obj.ptr());
 
     while (!working_set.is_empty() && !error_found) {
       ObjectPtr raw = working_set.RemoveLast();
@@ -225,13 +225,13 @@
           if (cid >= kNumPredefinedCids) {
             klass = class_table->At(cid);
             if (klass.num_native_fields() != 0) {
-              erroneous_nativewrapper_class = klass.raw();
+              erroneous_nativewrapper_class = klass.ptr();
               error_found = true;
               break;
             }
           }
       }
-      raw->ptr()->VisitPointers(&visitor);
+      raw->untag()->VisitPointers(&visitor);
     }
   }
   if (error_found) {
@@ -255,7 +255,7 @@
         zone, Exceptions::kArgumentValue, exception_message);
   }
   isolate->set_forward_table_new(nullptr);
-  return obj.raw();
+  return obj.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(SendPortImpl_sendAndExitInternal_, 0, 2) {
@@ -271,7 +271,7 @@
   GET_NON_NULL_NATIVE_ARGUMENT(Instance, obj, arguments->NativeArgAt(1));
 
   Object& validated_result = Object::Handle(zone);
-  Object& msg_obj = Object::Handle(zone, obj.raw());
+  Object& msg_obj = Object::Handle(zone, obj.ptr());
   validated_result = ValidateMessageObject(zone, isolate, msg_obj);
   if (validated_result.IsUnhandledException()) {
     Exceptions::PropagateError(Error::Cast(validated_result));
@@ -279,7 +279,7 @@
   }
   PersistentHandle* handle =
       isolate->group()->api_state()->AllocatePersistentHandle();
-  handle->set_raw(msg_obj);
+  handle->set_ptr(msg_obj);
   isolate->bequeath(std::unique_ptr<Bequest>(new Bequest(handle, port.Id())));
   // TODO(aam): Ensure there are no dart api calls after this point as we want
   // to ensure that validated message won't get tampered with.
@@ -472,7 +472,7 @@
       // Check whether main is reexported from the root library.
       const Object& obj = Object::Handle(zone, lib.LookupReExport(func_name));
       if (obj.IsFunction()) {
-        func ^= obj.raw();
+        func ^= obj.ptr();
       }
     }
     if (func.IsNull()) {
@@ -482,7 +482,7 @@
                     function_name(), script_url()));
       return LanguageError::New(msg);
     }
-    return func.raw();
+    return func.ptr();
   }
 
   // Lookup the to be spawned function for the Isolate.spawn implementation.
@@ -508,7 +508,7 @@
                     function_name(), library_url()));
       return LanguageError::New(msg);
     }
-    return func.raw();
+    return func.ptr();
   }
 
   const String& cls_name = String::Handle(zone, String::New(class_name()));
@@ -533,7 +533,7 @@
                   (library_url() != nullptr ? library_url() : script_url())));
     return LanguageError::New(msg);
   }
-  return func.raw();
+  return func.ptr();
 }
 
 static InstancePtr DeserializeMessage(Thread* thread, Message* message) {
@@ -547,7 +547,7 @@
     MessageSnapshotReader reader(message, thread);
     const Object& obj = Object::Handle(zone, reader.ReadObject());
     ASSERT(!obj.IsError());
-    return Instance::RawCast(obj.raw());
+    return Instance::RawCast(obj.ptr());
   }
 }
 
@@ -720,7 +720,7 @@
       return false;
     }
     ASSERT(result.IsFunction());
-    auto& func = Function::Handle(zone, Function::Cast(result).raw());
+    auto& func = Function::Handle(zone, Function::Cast(result).ptr());
     func = func.ImplicitClosureFunction();
     const auto& entrypoint_closure =
         Object::Handle(zone, func.ImplicitStaticClosure());
@@ -882,7 +882,7 @@
       result = String2UTF8(String::Cast(obj));
     } else if (obj.IsError()) {
       Error& error_obj = Error::Handle();
-      error_obj ^= obj.raw();
+      error_obj ^= obj.ptr();
       *error = zone->PrintToString("Unable to canonicalize uri '%s': %s",
                                    uri.ToCString(), error_obj.ToErrorCString());
     } else {
@@ -983,7 +983,7 @@
       1, Capability::Handle(Capability::New(isolate->pause_capability())));
   result.SetAt(
       2, Capability::Handle(Capability::New(isolate->terminate_capability())));
-  return result.raw();
+  return result.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Isolate_getCurrentRootUriStr, 0, 0) {
@@ -1045,7 +1045,7 @@
     array ^= growable_array.data();
     array_length = growable_array.Length();
   } else if (array_instance.IsArray()) {
-    array ^= Array::Cast(array_instance).raw();
+    array ^= Array::Cast(array_instance).ptr();
     array_length = array.Length();
   } else {
     Exceptions::ThrowArgumentError(array_instance);
@@ -1102,7 +1102,7 @@
   void* peer;
   {
     NoSafepointScope no_safepoint;
-    peer = thread->heap()->GetPeer(t.raw());
+    peer = thread->heap()->GetPeer(t.ptr());
     // Assume that object's Peer is only used to track transferrability state.
     ASSERT(peer != nullptr);
   }
@@ -1126,7 +1126,7 @@
                                    /* peer= */ data,
                                    &ExternalTypedDataFinalizer, length,
                                    /*auto_delete=*/true);
-  return typed_data.raw();
+  return typed_data.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/lib/math.cc b/runtime/lib/math.cc
index b1e7513..9346d1e 100644
--- a/runtime/lib/math.cc
+++ b/runtime/lib/math.cc
@@ -86,7 +86,7 @@
   const TypedData& array = TypedData::Cast(state_field_value);
   ASSERT(array.Length() == 2);
   ASSERT(array.ElementType() == kUint32ArrayElement);
-  return array.raw();
+  return array.ptr();
 }
 
 // Implements:
@@ -113,7 +113,7 @@
   result.SetUint32(0, static_cast<uint32_t>(seed));
   result.SetUint32(result.ElementSizeInBytes(),
                    static_cast<uint32_t>(seed >> 32));
-  return result.raw();
+  return result.ptr();
 }
 
 uint64_t mix64(uint64_t n) {
diff --git a/runtime/lib/mirrors.cc b/runtime/lib/mirrors.cc
index 2c0d827..d2d9a4f 100644
--- a/runtime/lib/mirrors.cc
+++ b/runtime/lib/mirrors.cc
@@ -38,7 +38,7 @@
   if (result.IsError()) {
     Exceptions::PropagateError(Error::Cast(result));
   }
-  return Instance::Cast(result).raw();
+  return Instance::Cast(result).ptr();
 }
 
 // Conventions:
@@ -140,7 +140,7 @@
       Exceptions::PropagateError(Error::Cast(result));
       UNREACHABLE();
     }
-    param_descriptor ^= result.raw();
+    param_descriptor ^= result.ptr();
     ASSERT(param_descriptor.Length() ==
            (Parser::kParameterEntrySize * non_implicit_param_count));
   }
@@ -149,7 +149,7 @@
   args.SetAt(2, owner_mirror);
 
   if (!has_extra_parameter_info) {
-    is_final = Bool::True().raw();
+    is_final = Bool::True().ptr();
     default_value = Object::null();
     metadata = Object::null();
   }
@@ -184,7 +184,7 @@
     results.SetAt(i, param);
   }
   results.MakeImmutable();
-  return results.raw();
+  return results.ptr();
 }
 
 static InstancePtr CreateTypeVariableMirror(const TypeParameter& param,
@@ -201,7 +201,7 @@
 static InstancePtr CreateTypeVariableList(const Class& cls) {
   const TypeArguments& args = TypeArguments::Handle(cls.type_parameters());
   if (args.IsNull()) {
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   }
   const Array& result = Array::Handle(Array::New(args.Length() * 2));
   TypeParameter& type = TypeParameter::Handle();
@@ -214,7 +214,7 @@
     result.SetAt(2 * i, name);
     result.SetAt(2 * i + 1, type);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 static InstancePtr CreateFunctionTypeMirror(const AbstractType& type) {
@@ -249,7 +249,7 @@
       (static_cast<intptr_t>(func.IsGetterFunction()) << Mirrors::kGetter);
   kind_flags |=
       (static_cast<intptr_t>(func.IsSetterFunction()) << Mirrors::kSetter);
-  bool is_ctor = (func.kind() == FunctionLayout::kConstructor);
+  bool is_ctor = (func.kind() == UntaggedFunction::kConstructor);
   kind_flags |= (static_cast<intptr_t>(is_ctor) << Mirrors::kConstructor);
   kind_flags |= (static_cast<intptr_t>(is_ctor && func.is_const())
                  << Mirrors::kConstCtor);
@@ -491,7 +491,7 @@
   while (entries.HasNext()) {
     entry = entries.GetNext();
     if (entry.IsLibraryPrefix()) {
-      prefix ^= entry.raw();
+      prefix ^= entry.ptr();
       ports = prefix.imports();
       for (intptr_t i = 0; i < ports.Length(); i++) {
         ns ^= ports.At(i);
@@ -506,7 +506,7 @@
     }
   }
 
-  return deps.raw();
+  return deps.ptr();
 }
 
 static InstancePtr CreateTypeMirror(const AbstractType& type) {
@@ -644,7 +644,7 @@
       library_mirrors.Add(library_mirror);
     }
   }
-  return library_mirrors.raw();
+  return library_mirrors.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(MirrorSystem_isolate, 0, 0) {
@@ -671,7 +671,7 @@
   // Canonicalize library URI.
   String& canonical_uri = String::Handle(zone);
   if (uri.StartsWith(Symbols::DartScheme())) {
-    canonical_uri = uri.raw();
+    canonical_uri = uri.ptr();
   } else {
     isolate->BlockClassFinalization();
     const Object& result = Object::Handle(
@@ -690,7 +690,7 @@
       ThrowLanguageError("library handler failed URI canonicalization");
     }
 
-    canonical_uri ^= result.raw();
+    canonical_uri ^= result.ptr();
   }
 
   // Return the existing library if it has already been loaded.
@@ -810,20 +810,20 @@
 
   Type& instantiated_type = Type::Handle(Type::New(clz, type_args_obj));
   instantiated_type ^= ClassFinalizer::FinalizeType(instantiated_type);
-  return instantiated_type.raw();
+  return instantiated_type.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Mirrors_mangleName, 0, 2) {
   GET_NON_NULL_NATIVE_ARGUMENT(String, name, arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(MirrorReference, ref, arguments->NativeArgAt(1));
   const Library& lib = Library::Handle(ref.GetLibraryReferent());
-  return lib.IsPrivate(name) ? lib.PrivateName(name) : name.raw();
+  return lib.IsPrivate(name) ? lib.PrivateName(name) : name.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(MirrorReference_equals, 0, 2) {
   GET_NON_NULL_NATIVE_ARGUMENT(MirrorReference, a, arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(MirrorReference, b, arguments->NativeArgAt(1));
-  return Bool::Get(a.referent() == b.referent()).raw();
+  return Bool::Get(a.referent() == b.referent()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(DeclarationMirror_metadata, 0, 1) {
@@ -833,7 +833,7 @@
     const MirrorReference& decl_ref = MirrorReference::Cast(reflectee);
     decl = decl_ref.referent();
   } else if (reflectee.IsTypeParameter()) {
-    decl = reflectee.raw();
+    decl = reflectee.ptr();
   } else {
     UNREACHABLE();
   }
@@ -842,7 +842,7 @@
   Library& library = Library::Handle();
 
   if (decl.IsClass()) {
-    klass ^= decl.raw();
+    klass ^= decl.ptr();
     library = klass.library();
   } else if (decl.IsFunction()) {
     klass = Function::Cast(decl).origin();
@@ -851,19 +851,19 @@
     klass = Field::Cast(decl).Origin();
     library = klass.library();
   } else if (decl.IsLibrary()) {
-    library ^= decl.raw();
+    library ^= decl.ptr();
   } else if (decl.IsTypeParameter()) {
     // There is no reference from a canonical type parameter to its declaration.
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   } else {
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   }
 
   const Object& metadata = Object::Handle(library.GetMetadata(decl));
   if (metadata.IsError()) {
     Exceptions::PropagateError(Error::Cast(metadata));
   }
-  return metadata.raw();
+  return metadata.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(FunctionTypeMirror_call_method, 0, 2) {
@@ -913,7 +913,7 @@
           : type.type_class());
   const AbstractType& super_type = AbstractType::Handle(cls.super_type());
   ASSERT(super_type.IsNull() || super_type.IsFinalized());
-  return super_type.raw();
+  return super_type.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(ClassMirror_supertype_instantiated, 0, 1) {
@@ -964,7 +964,7 @@
     interfaces_inst.SetAt(i, interface);
   }
 
-  return interfaces_inst.raw();
+  return interfaces_inst.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(ClassMirror_mixin, 0, 1) {
@@ -980,7 +980,7 @@
     mixin_type ^= interfaces.At(interfaces.Length() - 1);
   }
   ASSERT(mixin_type.IsNull() || mixin_type.IsFinalized());
-  return mixin_type.raw();
+  return mixin_type.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(ClassMirror_mixin_instantiated, 0, 2) {
@@ -998,7 +998,7 @@
     mixin_type ^= interfaces.At(interfaces.Length() - 1);
   }
   if (mixin_type.IsNull()) {
-    return mixin_type.raw();
+    return mixin_type.ptr();
   }
 
   return InstantiateType(mixin_type, instantiator);
@@ -1040,16 +1040,16 @@
   for (intptr_t i = 0; i < num_functions; i++) {
     func ^= functions.At(i);
     if (func.is_reflectable() &&
-        (func.kind() == FunctionLayout::kRegularFunction ||
-         func.kind() == FunctionLayout::kGetterFunction ||
-         func.kind() == FunctionLayout::kSetterFunction)) {
+        (func.kind() == UntaggedFunction::kRegularFunction ||
+         func.kind() == UntaggedFunction::kGetterFunction ||
+         func.kind() == UntaggedFunction::kSetterFunction)) {
       member_mirror =
           CreateMethodMirror(func, owner_mirror, owner_instantiator);
       member_mirrors.Add(member_mirror);
     }
   }
 
-  return member_mirrors.raw();
+  return member_mirrors.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(ClassMirror_constructors, 0, 3) {
@@ -1075,14 +1075,15 @@
   Function& func = Function::Handle();
   for (intptr_t i = 0; i < num_functions; i++) {
     func ^= functions.At(i);
-    if (func.is_reflectable() && func.kind() == FunctionLayout::kConstructor) {
+    if (func.is_reflectable() &&
+        func.kind() == UntaggedFunction::kConstructor) {
       constructor_mirror =
           CreateMethodMirror(func, owner_mirror, owner_instantiator);
       constructor_mirrors.Add(constructor_mirror);
     }
   }
 
-  return constructor_mirrors.raw();
+  return constructor_mirrors.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(LibraryMirror_members, 0, 2) {
@@ -1128,9 +1129,9 @@
     } else if (entry.IsFunction()) {
       const Function& func = Function::Cast(entry);
       if (func.is_reflectable() &&
-          (func.kind() == FunctionLayout::kRegularFunction ||
-           func.kind() == FunctionLayout::kGetterFunction ||
-           func.kind() == FunctionLayout::kSetterFunction)) {
+          (func.kind() == UntaggedFunction::kRegularFunction ||
+           func.kind() == UntaggedFunction::kGetterFunction ||
+           func.kind() == UntaggedFunction::kSetterFunction)) {
         member_mirror =
             CreateMethodMirror(func, owner_mirror, AbstractType::Handle());
         member_mirrors.Add(member_mirror);
@@ -1138,7 +1139,7 @@
     }
   }
 
-  return member_mirrors.raw();
+  return member_mirrors.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(ClassMirror_type_variables, 0, 1) {
@@ -1162,7 +1163,7 @@
   const intptr_t num_params = cls.NumTypeParameters();
 
   if (num_params == 0) {
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   }
 
   const Array& result = Array::Handle(Array::New(num_params));
@@ -1174,12 +1175,12 @@
   // arguments have been provided, or all arguments are dynamic. Return a list
   // of typemirrors on dynamic in this case.
   if (args.IsNull()) {
-    arg_type = Object::dynamic_type().raw();
+    arg_type = Object::dynamic_type().ptr();
     type_mirror = CreateTypeMirror(arg_type);
     for (intptr_t i = 0; i < num_params; i++) {
       result.SetAt(i, type_mirror);
     }
-    return result.raw();
+    return result.ptr();
   }
 
   ASSERT(args.Length() >= num_params);
@@ -1189,7 +1190,7 @@
     type_mirror = CreateTypeMirror(arg_type);
     result.SetAt(i, type_mirror);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(TypeVariableMirror_owner, 0, 1) {
@@ -1261,7 +1262,7 @@
       // In the case of extension methods also we avoid handing out a reference
       // to the tear-off and instead get the parent function of the
       // anonymous closure.
-      function = parent.raw();
+      function = parent.ptr();
     }
 
     Type& instantiator = Type::Handle();
@@ -1343,20 +1344,20 @@
   // unnamed constructor for class 'A' is labeled 'A.'.
   // This convention prevents users from explicitly calling constructors.
   const String& klass_name = String::Handle(klass.Name());
-  String& external_constructor_name = String::Handle(klass_name.raw());
+  String& external_constructor_name = String::Handle(klass_name.ptr());
   String& internal_constructor_name =
       String::Handle(String::Concat(klass_name, Symbols::Dot()));
   if (!constructor_name.IsNull() && constructor_name.Length() > 0) {
     internal_constructor_name =
         String::Concat(internal_constructor_name, constructor_name);
-    external_constructor_name = internal_constructor_name.raw();
+    external_constructor_name = internal_constructor_name.ptr();
   }
 
   Function& lookup_constructor = Function::Handle(
       Resolver::ResolveFunction(zone, klass, internal_constructor_name));
 
   if (lookup_constructor.IsNull() ||
-      (lookup_constructor.kind() != FunctionLayout::kConstructor) ||
+      (lookup_constructor.kind() != UntaggedFunction::kConstructor) ||
       !lookup_constructor.is_reflectable()) {
     ThrowNoSuchMethod(AbstractType::Handle(klass.RareType()),
                       external_constructor_name, explicit_args, arg_names,
@@ -1384,7 +1385,7 @@
     type_arguments = rare_type.arguments();
   }
 
-  Class& redirected_klass = Class::Handle(klass.raw());
+  Class& redirected_klass = Class::Handle(klass.ptr());
   const intptr_t num_explicit_args = explicit_args.Length();
   const intptr_t num_implicit_args = 1;
   const Array& args =
@@ -1454,9 +1455,9 @@
   ASSERT(result.IsInstance() || result.IsNull());
 
   if (lookup_constructor.IsGenerativeConstructor()) {
-    return new_object.raw();
+    return new_object.ptr();
   } else {
-    return result.raw();
+    return result.ptr();
   }
 }
 
@@ -1554,7 +1555,7 @@
     const MirrorReference& decl_ref = MirrorReference::Cast(reflectee);
     decl = decl_ref.referent();
   } else if (reflectee.IsTypeParameter()) {
-    decl = reflectee.raw();
+    decl = reflectee.ptr();
   } else {
     UNREACHABLE();
   }
@@ -1585,7 +1586,7 @@
     return Instance::null();
   } else if (decl.IsLibrary()) {
     const Library& lib = Library::Cast(decl);
-    if (lib.raw() == Library::NativeWrappersLibrary()) {
+    if (lib.ptr() == Library::NativeWrappersLibrary()) {
       return Instance::null();  // No source.
     }
     const Array& scripts = Array::Handle(zone, lib.LoadedScripts());
@@ -1633,7 +1634,7 @@
 DEFINE_NATIVE_ENTRY(TypeMirror_subtypeTest, 0, 2) {
   GET_NON_NULL_NATIVE_ARGUMENT(AbstractType, a, arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(AbstractType, b, arguments->NativeArgAt(1));
-  return Bool::Get(a.IsSubtypeOf(b, Heap::kNew)).raw();
+  return Bool::Get(a.IsSubtypeOf(b, Heap::kNew)).ptr();
 }
 
 #endif  // !DART_PRECOMPILED_RUNTIME
diff --git a/runtime/lib/object.cc b/runtime/lib/object.cc
index fae4910..103983e 100644
--- a/runtime/lib/object.cc
+++ b/runtime/lib/object.cc
@@ -54,7 +54,7 @@
   const Instance& instance =
       Instance::CheckedHandle(zone, arguments->NativeArgAt(0));
   Heap* heap = isolate->group()->heap();
-  heap->SetHash(instance.raw(), hash.Value());
+  heap->SetHash(instance.ptr(), hash.Value());
 #endif
   return Object::null();
 }
@@ -63,7 +63,7 @@
   const Instance& instance =
       Instance::CheckedHandle(zone, arguments->NativeArgAt(0));
   if (instance.IsString()) {
-    return instance.raw();
+    return instance.ptr();
   }
   if (instance.IsAbstractType()) {
     return AbstractType::Cast(instance).UserVisibleName();
@@ -98,13 +98,13 @@
 
   if (left_cid != right_cid) {
     if (IsIntegerClassId(left_cid)) {
-      return Bool::Get(IsIntegerClassId(right_cid)).raw();
+      return Bool::Get(IsIntegerClassId(right_cid)).ptr();
     } else if (IsStringClassId(left_cid)) {
-      return Bool::Get(IsStringClassId(right_cid)).raw();
+      return Bool::Get(IsStringClassId(right_cid)).ptr();
     } else if (IsTypeClassId(left_cid)) {
-      return Bool::Get(IsTypeClassId(right_cid)).raw();
+      return Bool::Get(IsTypeClassId(right_cid)).ptr();
     } else {
-      return Bool::False().raw();
+      return Bool::False().ptr();
     }
   }
 
@@ -117,15 +117,15 @@
         AbstractType::Handle(right.GetType(Heap::kNew));
     return Bool::Get(
                left_type.IsEquivalent(right_type, TypeEquality::kSyntactical))
-        .raw();
+        .ptr();
   }
 
   if (!cls.IsGeneric()) {
-    return Bool::True().raw();
+    return Bool::True().ptr();
   }
 
   if (left.GetTypeArguments() == right.GetTypeArguments()) {
-    return Bool::True().raw();
+    return Bool::True().ptr();
   }
   const TypeArguments& left_type_arguments =
       TypeArguments::Handle(left.GetTypeArguments());
@@ -136,7 +136,7 @@
   return Bool::Get(left_type_arguments.IsSubvectorEquivalent(
                        right_type_arguments, num_type_args - num_type_params,
                        num_type_params, TypeEquality::kSyntactical))
-      .raw();
+      .ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Object_instanceOf, 0, 4) {
@@ -161,7 +161,7 @@
     OS::PrintErr("  test type: %s\n",
                  String::Handle(zone, type.Name()).ToCString());
   }
-  return Bool::Get(is_instance_of).raw();
+  return Bool::Get(is_instance_of).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Object_simpleInstanceOf, 0, 2) {
@@ -175,7 +175,7 @@
   ASSERT(type.IsInstantiated());
   const bool is_instance_of = instance.IsInstanceOf(
       type, Object::null_type_arguments(), Object::null_type_arguments());
-  return Bool::Get(is_instance_of).raw();
+  return Bool::Get(is_instance_of).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(AbstractType_toString, 0, 1) {
@@ -196,10 +196,10 @@
   const Type& type = Type::CheckedHandle(zone, arguments->NativeArgAt(0));
   const Instance& other =
       Instance::CheckedHandle(zone, arguments->NativeArgAt(1));
-  if (type.raw() == other.raw()) {
-    return Bool::True().raw();
+  if (type.ptr() == other.ptr()) {
+    return Bool::True().ptr();
   }
-  return Bool::Get(type.IsEquivalent(other, TypeEquality::kSyntactical)).raw();
+  return Bool::Get(type.IsEquivalent(other, TypeEquality::kSyntactical)).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(FunctionType_getHashCode, 0, 1) {
@@ -216,16 +216,16 @@
       FunctionType::CheckedHandle(zone, arguments->NativeArgAt(0));
   const Instance& other =
       Instance::CheckedHandle(zone, arguments->NativeArgAt(1));
-  if (type.raw() == other.raw()) {
-    return Bool::True().raw();
+  if (type.ptr() == other.ptr()) {
+    return Bool::True().ptr();
   }
-  return Bool::Get(type.IsEquivalent(other, TypeEquality::kSyntactical)).raw();
+  return Bool::Get(type.IsEquivalent(other, TypeEquality::kSyntactical)).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(LibraryPrefix_isLoaded, 0, 1) {
   const LibraryPrefix& prefix =
       LibraryPrefix::CheckedHandle(zone, arguments->NativeArgAt(0));
-  return Bool::Get(prefix.is_loaded()).raw();
+  return Bool::Get(prefix.is_loaded()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(LibraryPrefix_setLoaded, 0, 1) {
@@ -268,9 +268,9 @@
 
 DEFINE_NATIVE_ENTRY(Internal_inquireIs64Bit, 0, 0) {
 #if defined(ARCH_IS_64_BIT)
-  return Bool::True().raw();
+  return Bool::True().ptr();
 #else
-  return Bool::False().raw();
+  return Bool::False().ptr();
 #endif  // defined(ARCH_IS_64_BIT)
 }
 
@@ -293,7 +293,7 @@
                                      const TypeArguments& instance_type_args,
                                      const Class& interface_cls,
                                      TypeArguments* interface_type_args) {
-  Class& cur_cls = Class::Handle(zone, instance_cls.raw());
+  Class& cur_cls = Class::Handle(zone, instance_cls.ptr());
   // The following code is a specialization of Class::IsSubtypeOf().
   Array& interfaces = Array::Handle(zone);
   AbstractType& interface = AbstractType::Handle(zone);
@@ -301,8 +301,8 @@
   TypeArguments& cur_interface_type_args = TypeArguments::Handle(zone);
   while (true) {
     // Additional subtyping rules related to 'FutureOr' are not applied.
-    if (cur_cls.raw() == interface_cls.raw()) {
-      *interface_type_args = instance_type_args.raw();
+    if (cur_cls.ptr() == interface_cls.ptr()) {
+      *interface_type_args = instance_type_args.ptr();
       return true;
     }
     interfaces = cur_cls.interfaces();
@@ -415,7 +415,7 @@
     Exceptions::PropagateError(Error::Cast(result));
     UNREACHABLE();
   }
-  return result.raw();
+  return result.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Internal_prependTypeArguments, 0, 4) {
@@ -462,7 +462,7 @@
   for (intptr_t i = 0; i < bounds.Length(); ++i) {
     parameter ^= bounds.TypeAt(i);
     supertype = parameter.bound();
-    subtype = type_args_to_check.IsNull() ? Object::dynamic_type().raw()
+    subtype = type_args_to_check.IsNull() ? Object::dynamic_type().ptr()
                                           : type_args_to_check.TypeAt(i);
 
     ASSERT(!subtype.IsNull());
@@ -510,7 +510,7 @@
     }
   }
   type_list.MakeImmutable();
-  return type_list.raw();
+  return type_list.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(NoSuchMethodError_existingMethodSignature, 0, 3) {
diff --git a/runtime/lib/profiler.cc b/runtime/lib/profiler.cc
index 3d6d84e..956597a 100644
--- a/runtime/lib/profiler.cc
+++ b/runtime/lib/profiler.cc
@@ -34,7 +34,7 @@
   }
   const UserTag& old = UserTag::Handle(zone, isolate->current_tag());
   self.MakeActive();
-  return old.raw();
+  return old.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(UserTag_defaultTag, 0, 0) {
diff --git a/runtime/lib/regexp.cc b/runtime/lib/regexp.cc
index 57aa72a..3e0e337 100644
--- a/runtime/lib/regexp.cc
+++ b/runtime/lib/regexp.cc
@@ -29,10 +29,10 @@
                                arguments->NativeArgAt(4));
   GET_NON_NULL_NATIVE_ARGUMENT(Instance, handle_dot_all,
                                arguments->NativeArgAt(5));
-  bool ignore_case = handle_case_sensitive.raw() != Bool::True().raw();
-  bool multi_line = handle_multi_line.raw() == Bool::True().raw();
-  bool unicode = handle_unicode.raw() == Bool::True().raw();
-  bool dot_all = handle_dot_all.raw() == Bool::True().raw();
+  bool ignore_case = handle_case_sensitive.ptr() != Bool::True().ptr();
+  bool multi_line = handle_multi_line.ptr() == Bool::True().ptr();
+  bool unicode = handle_unicode.ptr() == Bool::True().ptr();
+  bool dot_all = handle_dot_all.ptr() == Bool::True().ptr();
 
   RegExpFlags flags;
 
@@ -60,25 +60,25 @@
 DEFINE_NATIVE_ENTRY(RegExp_getIsMultiLine, 0, 1) {
   const RegExp& regexp = RegExp::CheckedHandle(zone, arguments->NativeArgAt(0));
   ASSERT(!regexp.IsNull());
-  return Bool::Get(regexp.flags().IsMultiLine()).raw();
+  return Bool::Get(regexp.flags().IsMultiLine()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(RegExp_getIsUnicode, 0, 1) {
   const RegExp& regexp = RegExp::CheckedHandle(zone, arguments->NativeArgAt(0));
   ASSERT(!regexp.IsNull());
-  return Bool::Get(regexp.flags().IsUnicode()).raw();
+  return Bool::Get(regexp.flags().IsUnicode()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(RegExp_getIsDotAll, 0, 1) {
   const RegExp& regexp = RegExp::CheckedHandle(zone, arguments->NativeArgAt(0));
   ASSERT(!regexp.IsNull());
-  return Bool::Get(regexp.flags().IsDotAll()).raw();
+  return Bool::Get(regexp.flags().IsDotAll()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(RegExp_getIsCaseSensitive, 0, 1) {
   const RegExp& regexp = RegExp::CheckedHandle(zone, arguments->NativeArgAt(0));
   ASSERT(!regexp.IsNull());
-  return Bool::Get(!regexp.flags().IgnoreCase()).raw();
+  return Bool::Get(!regexp.flags().IgnoreCase()).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(RegExp_getGroupCount, 0, 1) {
diff --git a/runtime/lib/simd128.cc b/runtime/lib/simd128.cc
index e5983b3..9adf5c3 100644
--- a/runtime/lib/simd128.cc
+++ b/runtime/lib/simd128.cc
@@ -550,25 +550,25 @@
 DEFINE_NATIVE_ENTRY(Int32x4_getFlagX, 0, 1) {
   GET_NON_NULL_NATIVE_ARGUMENT(Int32x4, self, arguments->NativeArgAt(0));
   int32_t value = self.x();
-  return Bool::Get(value != 0).raw();
+  return Bool::Get(value != 0).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Int32x4_getFlagY, 0, 1) {
   GET_NON_NULL_NATIVE_ARGUMENT(Int32x4, self, arguments->NativeArgAt(0));
   int32_t value = self.y();
-  return Bool::Get(value != 0).raw();
+  return Bool::Get(value != 0).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Int32x4_getFlagZ, 0, 1) {
   GET_NON_NULL_NATIVE_ARGUMENT(Int32x4, self, arguments->NativeArgAt(0));
   int32_t value = self.z();
-  return Bool::Get(value != 0).raw();
+  return Bool::Get(value != 0).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Int32x4_getFlagW, 0, 1) {
   GET_NON_NULL_NATIVE_ARGUMENT(Int32x4, self, arguments->NativeArgAt(0));
   int32_t value = self.w();
-  return Bool::Get(value != 0).raw();
+  return Bool::Get(value != 0).ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Int32x4_setFlagX, 0, 2) {
@@ -578,7 +578,7 @@
   int32_t _y = self.y();
   int32_t _z = self.z();
   int32_t _w = self.w();
-  _x = flagX.raw() == Bool::True().raw() ? 0xFFFFFFFF : 0x0;
+  _x = flagX.ptr() == Bool::True().ptr() ? 0xFFFFFFFF : 0x0;
   return Int32x4::New(_x, _y, _z, _w);
 }
 
@@ -589,7 +589,7 @@
   int32_t _y = self.y();
   int32_t _z = self.z();
   int32_t _w = self.w();
-  _y = flagY.raw() == Bool::True().raw() ? 0xFFFFFFFF : 0x0;
+  _y = flagY.ptr() == Bool::True().ptr() ? 0xFFFFFFFF : 0x0;
   return Int32x4::New(_x, _y, _z, _w);
 }
 
@@ -600,7 +600,7 @@
   int32_t _y = self.y();
   int32_t _z = self.z();
   int32_t _w = self.w();
-  _z = flagZ.raw() == Bool::True().raw() ? 0xFFFFFFFF : 0x0;
+  _z = flagZ.ptr() == Bool::True().ptr() ? 0xFFFFFFFF : 0x0;
   return Int32x4::New(_x, _y, _z, _w);
 }
 
@@ -611,7 +611,7 @@
   int32_t _y = self.y();
   int32_t _z = self.z();
   int32_t _w = self.w();
-  _w = flagW.raw() == Bool::True().raw() ? 0xFFFFFFFF : 0x0;
+  _w = flagW.ptr() == Bool::True().ptr() ? 0xFFFFFFFF : 0x0;
   return Int32x4::New(_x, _y, _z, _w);
 }
 
diff --git a/runtime/lib/string.cc b/runtime/lib/string.cc
index 631f3ad..4e1fc80 100644
--- a/runtime/lib/string.cc
+++ b/runtime/lib/string.cc
@@ -25,7 +25,7 @@
   if (!env_value.IsNull()) {
     return Symbols::New(thread, env_value);
   }
-  return default_value.raw();
+  return default_value.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(StringBase_createFromCodePoints, 0, 3) {
@@ -40,7 +40,7 @@
     a = growableArray.data();
     length = growableArray.Length();
   } else if (list.IsArray()) {
-    a = Array::Cast(list).raw();
+    a = Array::Cast(list).ptr();
     length = a.Length();
   } else {
     Exceptions::ThrowArgumentError(list);
@@ -242,7 +242,7 @@
   if (write_index < length) {
     Exceptions::ThrowArgumentError(matches_growable);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(OneByteString_substringUnchecked, 0, 3) {
@@ -283,7 +283,7 @@
   result.Add(str);
   result.SetTypeArguments(TypeArguments::Handle(
       zone, isolate->group()->object_store()->type_argument_string()));
-  return result.raw();
+  return result.ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Internal_allocateOneByteString, 0, 1) {
@@ -379,7 +379,7 @@
       intptr_t value = Smi::Value(static_cast<SmiPtr>(array.At(start + i)));
       OneByteString::SetCharAt(string, i, value);
     }
-    return string.raw();
+    return string.ptr();
   } else if (list.IsGrowableObjectArray()) {
     const GrowableObjectArray& array = GrowableObjectArray::Cast(list);
     if (end > array.Length()) {
@@ -392,7 +392,7 @@
       intptr_t value = Smi::Value(static_cast<SmiPtr>(array.At(start + i)));
       OneByteString::SetCharAt(string, i, value);
     }
-    return string.raw();
+    return string.ptr();
   }
   UNREACHABLE();
   return Object::null();
@@ -483,7 +483,7 @@
       intptr_t value = Smi::Value(static_cast<SmiPtr>(array.At(start + i)));
       TwoByteString::SetCharAt(string, i, value);
     }
-    return string.raw();
+    return string.ptr();
   } else if (list.IsGrowableObjectArray()) {
     const GrowableObjectArray& array = GrowableObjectArray::Cast(list);
     if (end > array.Length()) {
@@ -495,7 +495,7 @@
       intptr_t value = Smi::Value(static_cast<SmiPtr>(array.At(start + i)));
       TwoByteString::SetCharAt(string, i, value);
     }
-    return string.raw();
+    return string.ptr();
   }
   UNREACHABLE();
   return Object::null();
@@ -579,7 +579,7 @@
   Array& strings = Array::Handle();
   intptr_t length = -1;
   if (argument.IsArray()) {
-    strings ^= argument.raw();
+    strings ^= argument.ptr();
     length = strings.Length();
   } else if (argument.IsGrowableObjectArray()) {
     const GrowableObjectArray& g_array = GrowableObjectArray::Cast(argument);
@@ -619,7 +619,7 @@
 
   uint16_t* data_position = reinterpret_cast<uint16_t*>(codeUnits.DataAddr(0));
   String::Copy(result, 0, data_position, length_value);
-  return result.raw();
+  return result.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/lib/timeline.cc b/runtime/lib/timeline.cc
index 12c0c68..0b3816c 100644
--- a/runtime/lib/timeline.cc
+++ b/runtime/lib/timeline.cc
@@ -18,10 +18,10 @@
 DEFINE_NATIVE_ENTRY(Timeline_isDartStreamEnabled, 0, 0) {
 #if defined(SUPPORT_TIMELINE)
   if (Timeline::GetDartStream()->enabled()) {
-    return Bool::True().raw();
+    return Bool::True().ptr();
   }
 #endif
-  return Bool::False().raw();
+  return Bool::False().ptr();
 }
 
 DEFINE_NATIVE_ENTRY(Timeline_getNextAsyncId, 0, 0) {
diff --git a/runtime/lib/typed_data.cc b/runtime/lib/typed_data.cc
index d71d08e..28d9331 100644
--- a/runtime/lib/typed_data.cc
+++ b/runtime/lib/typed_data.cc
@@ -108,7 +108,7 @@
     TypedData::Copy<DstType, SrcType>(dst_array, dst_offset_in_bytes, src_array,
                                       src_offset_in_bytes, length_in_bytes);
   }
-  return Bool::True().raw();
+  return Bool::True().ptr();
 }
 
 static bool IsClamped(intptr_t cid) {
@@ -174,7 +174,7 @@
     }
   }
   UNREACHABLE();
-  return Bool::False().raw();
+  return Bool::False().ptr();
 }
 
 // Native methods for typed data allocation are recognized and implemented
diff --git a/runtime/lib/uri.cc b/runtime/lib/uri.cc
index 4f1ef81..af4c65d 100644
--- a/runtime/lib/uri.cc
+++ b/runtime/lib/uri.cc
@@ -10,9 +10,9 @@
 
 DEFINE_NATIVE_ENTRY(Uri_isWindowsPlatform, 0, 0) {
 #if defined(HOST_OS_WINDOWS)
-  return Bool::True().raw();
+  return Bool::True().ptr();
 #else
-  return Bool::False().raw();
+  return Bool::False().ptr();
 #endif
 }
 
diff --git a/runtime/lib/vmservice.cc b/runtime/lib/vmservice.cc
index 8ffecd6..c42fa15 100644
--- a/runtime/lib/vmservice.cc
+++ b/runtime/lib/vmservice.cc
@@ -55,7 +55,7 @@
   // TODO(turnidge): Throw an exception when the return value is false?
   bool result = PortMap::PostMessage(
       writer.WriteMessage(message, sp.Id(), Message::kOOBPriority));
-  return Bool::Get(result).raw();
+  return Bool::Get(result).ptr();
 #else
   return Object::null();
 #endif
@@ -122,7 +122,7 @@
 #ifndef PRODUCT
   GET_NON_NULL_NATIVE_ARGUMENT(String, stream_id, arguments->NativeArgAt(0));
   bool result = Service::ListenStream(stream_id.ToCString());
-  return Bool::Get(result).raw();
+  return Bool::Get(result).ptr();
 #else
   return Object::null();
 #endif
@@ -323,7 +323,7 @@
 #ifndef PRODUCT
   GET_NON_NULL_NATIVE_ARGUMENT(TypedData, data, arguments->NativeArgAt(0));
   Api::Scope scope(thread);
-  Dart_Handle data_handle = Api::NewHandle(thread, data.raw());
+  Dart_Handle data_handle = Api::NewHandle(thread, data.ptr());
   Dart_Handle result_list;
   {
     TransitionVMToNative transition(thread);
@@ -368,7 +368,7 @@
       idx += 2;
     }
   }
-  return Api::UnwrapArrayHandle(thread->zone(), result_list).raw();
+  return Api::UnwrapArrayHandle(thread->zone(), result_list).ptr();
 #else
   return Object::null();
 #endif
diff --git a/runtime/observatory/web/third_party/trace_viewer_full.html b/runtime/observatory/web/third_party/trace_viewer_full.html
index 49c4e55..daa87d0 100644
--- a/runtime/observatory/web/third_party/trace_viewer_full.html
+++ b/runtime/observatory/web/third_party/trace_viewer_full.html
@@ -4496,8 +4496,8 @@
 DocLinkBuilder.prototype={addAppVideo(name,videoId){this.docLinks.push({label:'Video Link',textContent:('Android Performance Patterns: '+name),href:'https://www.youtube.com/watch?list=PLWz5rJ2EKKc9CBxr3BVjPTPoDPLdPIFCE&v='+videoId});return this;},addDacRef(name,link){this.docLinks.push({label:'Doc Link',textContent:(name+' documentation'),href:'https://developer.android.com/reference/'+link});return this;},build(){return this.docLinks;}};function AndroidAuditor(model){Auditor.call(this,model);const helper=model.getOrCreateHelper(AndroidModelHelper);if(helper.apps.length||helper.surfaceFlinger){this.helper=helper;}}
 AndroidAuditor.viewAlphaAlertInfo_=new EventInfo('Inefficient View alpha usage','Setting an alpha between 0 and 1 has significant performance costs, if one of the fast alpha paths is not used.',new DocLinkBuilder().addAppVideo('Hidden Cost of Transparency','wIy8g8yNhNk').addDacRef('View#setAlpha()','android/view/View.html#setAlpha(float)').build());AndroidAuditor.saveLayerAlertInfo_=new EventInfo('Expensive rendering with Canvas#saveLayer()','Canvas#saveLayer() incurs extremely high rendering cost. They disrupt the rendering pipeline when drawn, forcing a flush of drawing content. Instead use View hardware layers, or static Bitmaps. This enables the offscreen buffers to be reused in between frames, and avoids the disruptive render target switch.',new DocLinkBuilder().addAppVideo('Hidden Cost of Transparency','wIy8g8yNhNk').addDacRef('Canvas#saveLayerAlpha()','android/graphics/Canvas.html#saveLayerAlpha(android.graphics.RectF, int, int)').build());AndroidAuditor.getSaveLayerAlerts_=function(frame){const badAlphaRegEx=/^(.+) alpha caused (unclipped )?saveLayer (\d+)x(\d+)$/;const saveLayerRegEx=/^(unclipped )?saveLayer (\d+)x(\d+)$/;const ret=[];const events=[];frame.associatedEvents.forEach(function(slice){const match=badAlphaRegEx.exec(slice.title);if(match){const args={'view name':match[1],'width':parseInt(match[3]),'height':parseInt(match[4])};ret.push(new Alert(AndroidAuditor.viewAlphaAlertInfo_,slice.start,[slice],args));}else if(saveLayerRegEx.test(slice.title)){events.push(slice);}},this);if(events.length>ret.length){const unclippedSeen=Statistics.sum(events,function(slice){return saveLayerRegEx.exec(slice.title)[1]?1:0;});const clippedSeen=events.length-unclippedSeen;const earliestStart=Statistics.min(events,function(slice){return slice.start;});const args={'Unclipped saveLayer count (especially bad!)':unclippedSeen,'Clipped saveLayer count':clippedSeen};events.push(frame);ret.push(new Alert(AndroidAuditor.saveLayerAlertInfo_,earliestStart,events,args));}
 return ret;};AndroidAuditor.pathAlertInfo_=new EventInfo('Path texture churn','Paths are drawn with a mask texture, so when a path is modified / newly drawn, that texture must be generated and uploaded to the GPU. Ensure that you cache paths between frames and do not unnecessarily call Path#reset(). You can cut down on this cost by sharing Path object instances between drawables/views.');AndroidAuditor.getPathAlert_=function(frame){const uploadRegEx=/^Generate Path Texture$/;const events=frame.associatedEvents.filter(function(event){return event.title==='Generate Path Texture';});const start=Statistics.min(events,getStart);const duration=Statistics.sum(events,getDuration);if(duration<3)return undefined;events.push(frame);return new Alert(AndroidAuditor.pathAlertInfo_,start,events,{'Time spent':new Scalar(timeDurationInMs,duration)});};AndroidAuditor.uploadAlertInfo_=new EventInfo('Expensive Bitmap uploads','Bitmaps that have been modified / newly drawn must be uploaded to the GPU. Since this is expensive if the total number of pixels uploaded is large, reduce the amount of Bitmap churn in this animation/context, per frame.');AndroidAuditor.getUploadAlert_=function(frame){const uploadRegEx=/^Upload (\d+)x(\d+) Texture$/;const events=[];let start=Number.POSITIVE_INFINITY;let duration=0;let pixelsUploaded=0;frame.associatedEvents.forEach(function(event){const match=uploadRegEx.exec(event.title);if(match){events.push(event);start=Math.min(start,event.start);duration+=event.duration;pixelsUploaded+=parseInt(match[1])*parseInt(match[2]);}});if(events.length===0||duration<3)return undefined;const mPixels=(pixelsUploaded/1000000).toFixed(2)+' million';const args={'Pixels uploaded':mPixels,'Time spent':new Scalar(timeDurationInMs,duration)};events.push(frame);return new Alert(AndroidAuditor.uploadAlertInfo_,start,events,args);};AndroidAuditor.ListViewInflateAlertInfo_=new EventInfo('Inflation during ListView recycling','ListView item recycling involved inflating views. Ensure your Adapter#getView() recycles the incoming View, instead of constructing a new one.');AndroidAuditor.ListViewBindAlertInfo_=new EventInfo('Inefficient ListView recycling/rebinding','ListView recycling taking too much time per frame. Ensure your Adapter#getView() binds data efficiently.');AndroidAuditor.getListViewAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='obtainView'||event.title==='setupListItem';});const duration=Statistics.sum(events,getCpuDuration);if(events.length===0||duration<3)return undefined;let hasInflation=false;for(const event of events){if(event.findDescendentSlice('inflate')){hasInflation=true;}}
-const start=Statistics.min(events,getStart);const args={'Time spent':new Scalar(timeDurationInMs,duration)};args['ListView items '+(hasInflation?'inflated':'rebound')]=events.length/2;const eventInfo=hasInflation?AndroidAuditor.ListViewInflateAlertInfo_:AndroidAuditor.ListViewBindAlertInfo_;events.push(frame);return new Alert(eventInfo,start,events,args);};AndroidAuditor.measureLayoutAlertInfo_=new EventInfo('Expensive measure/layout pass','Measure/Layout took a significant time, contributing to jank. Avoid triggering layout during animations.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').build());AndroidAuditor.getMeasureLayoutAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='measure'||event.title==='layout';});const duration=Statistics.sum(events,getCpuDuration);if(events.length===0||duration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.measureLayoutAlertInfo_,start,events,{'Time spent':new Scalar(timeDurationInMs,duration)});};AndroidAuditor.viewDrawAlertInfo_=new EventInfo('Long View#draw()','Recording the drawing commands of invalidated Views took a long time. Avoid significant work in View or Drawable custom drawing, especially allocations or drawing to Bitmaps.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').addAppVideo('Avoiding Allocations in onDraw()','HAK5acHQ53E').build());AndroidAuditor.getViewDrawAlert_=function(frame){let slice=undefined;for(const event of frame.associatedEvents){if(event.title==='getDisplayList'||event.title==='Record View#draw()'){slice=event;break;}}
-if(!slice||getCpuDuration(slice)<3)return undefined;return new Alert(AndroidAuditor.viewDrawAlertInfo_,slice.start,[slice,frame],{'Time spent':new Scalar(timeDurationInMs,getCpuDuration(slice))});};AndroidAuditor.blockingGcAlertInfo_=new EventInfo('Blocking Garbage Collection','Blocking GCs are caused by object churn, and made worse by having large numbers of objects in the heap. Avoid allocating objects during animations/scrolling, and recycle Bitmaps to avoid triggering garbage collection.',new DocLinkBuilder().addAppVideo('Garbage Collection in Android','pzfzz50W5Uo').addAppVideo('Avoiding Allocations in onDraw()','HAK5acHQ53E').build());AndroidAuditor.getBlockingGcAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='DVM Suspend'||event.title==='GC: Wait For Concurrent';});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.blockingGcAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.lockContentionAlertInfo_=new EventInfo('Lock contention','UI thread lock contention is caused when another thread holds a lock that the UI thread is trying to use. UI thread progress is blocked until the lock is released. Inspect locking done within the UI thread, and ensure critical sections are short.');AndroidAuditor.getLockContentionAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return/^Lock Contention on /.test(event.title);});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<1)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.lockContentionAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.schedulingAlertInfo_=new EventInfo('Scheduling delay','Work to produce this frame was descheduled for several milliseconds, contributing to jank. Ensure that code on the UI thread doesn\'t block on work being done on other threads, and that background threads (doing e.g. network or bitmap loading) are running at android.os.Process#THREAD_PRIORITY_BACKGROUND or lower so they are less likely to interrupt the UI thread. These background threads should show up with a priority number of 130 or higher in the scheduling section under the Kernel process.');AndroidAuditor.getSchedulingAlert_=function(frame){let totalDuration=0;const totalStats={};for(const ttr of frame.threadTimeRanges){const stats=ttr.thread.getSchedulingStatsForRange(ttr.start,ttr.end);for(const[key,value]of Object.entries(stats)){if(!(key in totalStats)){totalStats[key]=0;}
+const start=Statistics.min(events,getStart);const args={'Time spent':new Scalar(timeDurationInMs,duration)};args['ListView items '+(hasInflation?'inflated':'rebound')]=events.length/2;const eventInfo=hasInflation?AndroidAuditor.ListViewInflateAlertInfo_:AndroidAuditor.ListViewBindAlertInfo_;events.push(frame);return new Alert(eventInfo,start,events,args);};AndroidAuditor.measureLayoutAlertInfo_=new EventInfo('Expensive measure/layout pass','Measure/Layout took a significant time, contributing to jank. Avoid triggering layout during animations.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').build());AndroidAuditor.getMeasureLayoutAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='measure'||event.title==='layout';});const duration=Statistics.sum(events,getCpuDuration);if(events.length===0||duration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.measureLayoutAlertInfo_,start,events,{'Time spent':new Scalar(timeDurationInMs,duration)});};AndroidAuditor.viewDrawAlertInfo_=new EventInfo('Long View#dptr()','Recording the drawing commands of invalidated Views took a long time. Avoid significant work in View or Drawable custom drawing, especially allocations or drawing to Bitmaps.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').addAppVideo('Avoiding Allocations in onDptr()','HAK5acHQ53E').build());AndroidAuditor.getViewDrawAlert_=function(frame){let slice=undefined;for(const event of frame.associatedEvents){if(event.title==='getDisplayList'||event.title==='Record View#dptr()'){slice=event;break;}}
+if(!slice||getCpuDuration(slice)<3)return undefined;return new Alert(AndroidAuditor.viewDrawAlertInfo_,slice.start,[slice,frame],{'Time spent':new Scalar(timeDurationInMs,getCpuDuration(slice))});};AndroidAuditor.blockingGcAlertInfo_=new EventInfo('Blocking Garbage Collection','Blocking GCs are caused by object churn, and made worse by having large numbers of objects in the heap. Avoid allocating objects during animations/scrolling, and recycle Bitmaps to avoid triggering garbage collection.',new DocLinkBuilder().addAppVideo('Garbage Collection in Android','pzfzz50W5Uo').addAppVideo('Avoiding Allocations in onDptr()','HAK5acHQ53E').build());AndroidAuditor.getBlockingGcAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='DVM Suspend'||event.title==='GC: Wait For Concurrent';});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.blockingGcAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.lockContentionAlertInfo_=new EventInfo('Lock contention','UI thread lock contention is caused when another thread holds a lock that the UI thread is trying to use. UI thread progress is blocked until the lock is released. Inspect locking done within the UI thread, and ensure critical sections are short.');AndroidAuditor.getLockContentionAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return/^Lock Contention on /.test(event.title);});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<1)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.lockContentionAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.schedulingAlertInfo_=new EventInfo('Scheduling delay','Work to produce this frame was descheduled for several milliseconds, contributing to jank. Ensure that code on the UI thread doesn\'t block on work being done on other threads, and that background threads (doing e.g. network or bitmap loading) are running at android.os.Process#THREAD_PRIORITY_BACKGROUND or lower so they are less likely to interrupt the UI thread. These background threads should show up with a priority number of 130 or higher in the scheduling section under the Kernel process.');AndroidAuditor.getSchedulingAlert_=function(frame){let totalDuration=0;const totalStats={};for(const ttr of frame.threadTimeRanges){const stats=ttr.thread.getSchedulingStatsForRange(ttr.start,ttr.end);for(const[key,value]of Object.entries(stats)){if(!(key in totalStats)){totalStats[key]=0;}
 totalStats[key]+=value;totalDuration+=value;}}
 if(!(SCHEDULING_STATE.RUNNING in totalStats)||totalDuration===0||totalDuration-totalStats[SCHEDULING_STATE.RUNNING]<3){return;}
 const args={};for(const[key,value]of Object.entries(totalStats)){let newKey=key;if(key===SCHEDULING_STATE.RUNNABLE){newKey='Not scheduled, but runnable';}else if(key===SCHEDULING_STATE.UNINTR_SLEEP){newKey='Blocking I/O delay';}
@@ -4511,7 +4511,7 @@
 if(/^hwuiTask/.test(thread.name)){thread.sortIndex=-1;}});},pushFramesAndJudgeJank_(){let badFramesObserved=0;let framesObserved=0;const surfaceFlinger=this.helper.surfaceFlinger;this.helper.apps.forEach(function(app){app.process.frames=app.getFrames();app.process.frames.forEach(function(frame){if(frame.totalDuration>EXPECTED_FRAME_TIME_MS*2){badFramesObserved+=2;frame.perfClass=FRAME_PERF_CLASS.TERRIBLE;}else if(frame.totalDuration>EXPECTED_FRAME_TIME_MS||frameMissedDeadline(frame)){badFramesObserved++;frame.perfClass=FRAME_PERF_CLASS.BAD;}else{frame.perfClass=FRAME_PERF_CLASS.GOOD;}});framesObserved+=app.process.frames.length;});if(framesObserved){const portionBad=badFramesObserved/framesObserved;if(portionBad>0.3){this.model.faviconHue='red';}else if(portionBad>0.05){this.model.faviconHue='yellow';}else{this.model.faviconHue='green';}}},pushEventInfo_(){const appAnnotator=new AppAnnotator();this.helper.apps.forEach(function(app){if(app.uiThread){appAnnotator.applyEventInfos(app.uiThread.sliceGroup);}
 if(app.renderThread){appAnnotator.applyEventInfos(app.renderThread.sliceGroup);}});},runAnnotate(){if(!this.helper)return;this.renameAndSort_();this.pushFramesAndJudgeJank_();this.pushEventInfo_();this.helper.iterateImportantSlices(function(slice){slice.important=true;});},runAudit(){if(!this.helper)return;const alerts=this.model.alerts;this.helper.apps.forEach(function(app){app.getFrames().forEach(function(frame){alerts.push.apply(alerts,AndroidAuditor.getSaveLayerAlerts_(frame));if(frame.perfClass===FRAME_PERF_CLASS.NEUTRAL||frame.perfClass===FRAME_PERF_CLASS.GOOD){return;}
 let alert=AndroidAuditor.getPathAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getUploadAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getListViewAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getMeasureLayoutAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getViewDrawAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getBlockingGcAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getLockContentionAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getSchedulingAlert_(frame);if(alert)alerts.push(alert);});},this);this.addRenderingInteractionRecords();this.addInputInteractionRecords();},addRenderingInteractionRecords(){const events=[];this.helper.apps.forEach(function(app){events.push.apply(events,app.getAnimationAsyncSlices());events.push.apply(events,app.getFrames());});const mergerFunction=function(events){const ir=new tr.model.um.ResponseExpectation(this.model,'Rendering',events[0].min,events[events.length-1].max-events[0].min);this.model.userModel.expectations.push(ir);}.bind(this);tr.b.math.mergeRanges(tr.b.math.convertEventsToRanges(events),30,mergerFunction);},addInputInteractionRecords(){const inputSamples=[];this.helper.apps.forEach(function(app){inputSamples.push.apply(inputSamples,app.getInputSamples());});const mergerFunction=function(events){const ir=new tr.model.um.ResponseExpectation(this.model,'Input',events[0].min,events[events.length-1].max-events[0].min);this.model.userModel.expectations.push(ir);}.bind(this);const inputRanges=inputSamples.map(function(sample){return tr.b.math.Range.fromExplicitRange(sample.timestamp,sample.timestamp);});tr.b.math.mergeRanges(inputRanges,30,mergerFunction);}};Auditor.register(AndroidAuditor);function AppAnnotator(){this.titleInfoLookup=new Map();this.titleParentLookup=new Map();this.build_();}
-AppAnnotator.prototype={build_(){const registerEventInfo=function(dict){this.titleInfoLookup.set(dict.title,new EventInfo(dict.title,dict.description,dict.docLinks));if(dict.parents){this.titleParentLookup.set(dict.title,dict.parents);}}.bind(this);registerEventInfo({title:'inflate',description:'Constructing a View hierarchy from pre-processed XML via LayoutInflater#layout. This includes constructing all of the View objects in the hierarchy, and applying styled attributes.'});registerEventInfo({title:'obtainView',description:'Adapter#getView() called to bind content to a recycled View that is being presented.'});registerEventInfo({title:'setupListItem',description:'Attached a newly-bound, recycled View to its parent ListView.'});registerEventInfo({title:'setupGridItem',description:'Attached a newly-bound, recycled View to its parent GridView.'});const choreographerLinks=new DocLinkBuilder().addDacRef('Choreographer','android/view/Choreographer.html').build();registerEventInfo({title:'Choreographer#doFrame',docLinks:choreographerLinks,description:'Choreographer executes frame callbacks for inputs, animations, and rendering traversals. When this work is done, a frame will be presented to the user.'});registerEventInfo({title:'input',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Input callbacks are processed. This generally encompasses dispatching input to Views, as well as any work the Views do to process this input/gesture.'});registerEventInfo({title:'animation',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Animation callbacks are processed. This is generally minimal work, as animations determine progress for the frame, and push new state to animated objects (such as setting View properties).'});registerEventInfo({title:'traversals',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Primary draw traversals. This is the primary traversal of the View hierarchy, including layout and draw passes.'});const traversalParents=['Choreographer#doFrame','performTraversals'];const layoutLinks=new DocLinkBuilder().addDacRef('View#Layout','android/view/View.html#Layout').build();registerEventInfo({title:'performTraversals',description:'A drawing traversal of the View hierarchy, comprised of all layout and drawing needed to produce the frame.'});registerEventInfo({title:'measure',parents:traversalParents,docLinks:layoutLinks,description:'First of two phases in view hierarchy layout. Views are asked to size themselves according to constraints supplied by their parent. Some ViewGroups may measure a child more than once to help satisfy their own constraints. Nesting ViewGroups that measure children more than once can lead to excessive and repeated work.'});registerEventInfo({title:'layout',parents:traversalParents,docLinks:layoutLinks,description:'Second of two phases in view hierarchy layout, repositioning content and child Views into their new locations.'});const drawString='Draw pass over the View hierarchy. Every invalidated View will have its drawing commands recorded. On Android versions prior to Lollipop, this would also include the issuing of draw commands to the GPU. Starting with Lollipop, it only includes the recording of commands, and syncing that information to the RenderThread.';registerEventInfo({title:'draw',parents:traversalParents,description:drawString});const recordString='Every invalidated View\'s drawing commands are recorded. Each will have View#draw() called, and is passed a Canvas that will record and store its drawing commands until it is next invalidated/rerecorded.';registerEventInfo({title:'getDisplayList',parents:['draw'],description:recordString});registerEventInfo({title:'Record View#draw()',parents:['draw'],description:recordString});registerEventInfo({title:'drawDisplayList',parents:['draw'],description:'Execution of recorded draw commands to generate a frame. This represents the actual formation and issuing of drawing commands to the GPU. On Android L and higher devices, this work is done on a dedicated RenderThread, instead of on the UI Thread.'});registerEventInfo({title:'DrawFrame',description:'RenderThread portion of the standard UI/RenderThread split frame. This represents the actual formation and issuing of drawing commands to the GPU.'});registerEventInfo({title:'doFrame',description:'RenderThread animation frame. Represents drawing work done by the RenderThread on a frame where the UI thread did not produce new drawing content.'});registerEventInfo({title:'syncFrameState',description:'Sync stage between the UI thread and the RenderThread, where the UI thread hands off a frame (including information about modified Views). Time in this method primarily consists of uploading modified Bitmaps to the GPU. After this sync is completed, the UI thread is unblocked, and the RenderThread starts to render the frame.'});registerEventInfo({title:'flush drawing commands',description:'Issuing the now complete drawing commands to the GPU.'});registerEventInfo({title:'eglSwapBuffers',description:'Complete GPU rendering of the frame.'});registerEventInfo({title:'RV Scroll',description:'RecyclerView is calculating a scroll. If there are too many of these in Systrace, some Views inside RecyclerView might be causing it. Try to avoid using EditText, focusable views or handle them with care.'});registerEventInfo({title:'RV OnLayout',description:'OnLayout has been called by the View system. If this shows up too many times in Systrace, make sure the children of RecyclerView do not update themselves directly. This will cause a full re-layout but when it happens via the Adapter notifyItemChanged, RecyclerView can avoid full layout calculation.'});registerEventInfo({title:'RV FullInvalidate',description:'NotifyDataSetChanged or equal has been called. If this is taking a long time, try sending granular notify adapter changes instead of just calling notifyDataSetChanged or setAdapter / swapAdapter. Adding stable ids to your adapter might help.'});registerEventInfo({title:'RV PartialInvalidate',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV OnBindView',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV CreateView',description:'RecyclerView is creating a new View. If too many of these are present: 1) There might be a problem in Recycling (e.g. custom Animations that set transient state and prevent recycling or ItemAnimator not implementing the contract properly. See Adapter#onFailedToRecycleView(ViewHolder). 2) There may be too many item view types. Try merging them. 3) There might be too many itemChange animations and not enough space in RecyclerPool. Try increasing your pool size and item cache size.'});registerEventInfo({title:'eglSwapBuffers',description:'The CPU has finished producing drawing commands, and is flushing drawing work to the GPU, and posting that buffer to the consumer (which is often SurfaceFlinger window composition). Once this is completed, the GPU can produce the frame content without any involvement from the CPU.'});},applyEventInfosRecursive_(parentNames,slice){const checkExpectedParentNames=function(expectedParentNames){if(!expectedParentNames)return true;return expectedParentNames.some(function(name){return parentNames.has(name);});};if(this.titleInfoLookup.has(slice.title)){if(checkExpectedParentNames(this.titleParentLookup.get(slice.title))){slice.info=this.titleInfoLookup.get(slice.title);}}
+AppAnnotator.prototype={build_(){const registerEventInfo=function(dict){this.titleInfoLookup.set(dict.title,new EventInfo(dict.title,dict.description,dict.docLinks));if(dict.parents){this.titleParentLookup.set(dict.title,dict.parents);}}.bind(this);registerEventInfo({title:'inflate',description:'Constructing a View hierarchy from pre-processed XML via LayoutInflater#layout. This includes constructing all of the View objects in the hierarchy, and applying styled attributes.'});registerEventInfo({title:'obtainView',description:'Adapter#getView() called to bind content to a recycled View that is being presented.'});registerEventInfo({title:'setupListItem',description:'Attached a newly-bound, recycled View to its parent ListView.'});registerEventInfo({title:'setupGridItem',description:'Attached a newly-bound, recycled View to its parent GridView.'});const choreographerLinks=new DocLinkBuilder().addDacRef('Choreographer','android/view/Choreographer.html').build();registerEventInfo({title:'Choreographer#doFrame',docLinks:choreographerLinks,description:'Choreographer executes frame callbacks for inputs, animations, and rendering traversals. When this work is done, a frame will be presented to the user.'});registerEventInfo({title:'input',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Input callbacks are processed. This generally encompasses dispatching input to Views, as well as any work the Views do to process this input/gesture.'});registerEventInfo({title:'animation',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Animation callbacks are processed. This is generally minimal work, as animations determine progress for the frame, and push new state to animated objects (such as setting View properties).'});registerEventInfo({title:'traversals',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Primary draw traversals. This is the primary traversal of the View hierarchy, including layout and draw passes.'});const traversalParents=['Choreographer#doFrame','performTraversals'];const layoutLinks=new DocLinkBuilder().addDacRef('View#Layout','android/view/View.html#Layout').build();registerEventInfo({title:'performTraversals',description:'A drawing traversal of the View hierarchy, comprised of all layout and drawing needed to produce the frame.'});registerEventInfo({title:'measure',parents:traversalParents,docLinks:layoutLinks,description:'First of two phases in view hierarchy layout. Views are asked to size themselves according to constraints supplied by their parent. Some ViewGroups may measure a child more than once to help satisfy their own constraints. Nesting ViewGroups that measure children more than once can lead to excessive and repeated work.'});registerEventInfo({title:'layout',parents:traversalParents,docLinks:layoutLinks,description:'Second of two phases in view hierarchy layout, repositioning content and child Views into their new locations.'});const drawString='Draw pass over the View hierarchy. Every invalidated View will have its drawing commands recorded. On Android versions prior to Lollipop, this would also include the issuing of draw commands to the GPU. Starting with Lollipop, it only includes the recording of commands, and syncing that information to the RenderThread.';registerEventInfo({title:'draw',parents:traversalParents,description:drawString});const recordString='Every invalidated View\'s drawing commands are recorded. Each will have View#dptr() called, and is passed a Canvas that will record and store its drawing commands until it is next invalidated/rerecorded.';registerEventInfo({title:'getDisplayList',parents:['draw'],description:recordString});registerEventInfo({title:'Record View#dptr()',parents:['draw'],description:recordString});registerEventInfo({title:'drawDisplayList',parents:['draw'],description:'Execution of recorded draw commands to generate a frame. This represents the actual formation and issuing of drawing commands to the GPU. On Android L and higher devices, this work is done on a dedicated RenderThread, instead of on the UI Thread.'});registerEventInfo({title:'DrawFrame',description:'RenderThread portion of the standard UI/RenderThread split frame. This represents the actual formation and issuing of drawing commands to the GPU.'});registerEventInfo({title:'doFrame',description:'RenderThread animation frame. Represents drawing work done by the RenderThread on a frame where the UI thread did not produce new drawing content.'});registerEventInfo({title:'syncFrameState',description:'Sync stage between the UI thread and the RenderThread, where the UI thread hands off a frame (including information about modified Views). Time in this method primarily consists of uploading modified Bitmaps to the GPU. After this sync is completed, the UI thread is unblocked, and the RenderThread starts to render the frame.'});registerEventInfo({title:'flush drawing commands',description:'Issuing the now complete drawing commands to the GPU.'});registerEventInfo({title:'eglSwapBuffers',description:'Complete GPU rendering of the frame.'});registerEventInfo({title:'RV Scroll',description:'RecyclerView is calculating a scroll. If there are too many of these in Systrace, some Views inside RecyclerView might be causing it. Try to avoid using EditText, focusable views or handle them with care.'});registerEventInfo({title:'RV OnLayout',description:'OnLayout has been called by the View system. If this shows up too many times in Systrace, make sure the children of RecyclerView do not update themselves directly. This will cause a full re-layout but when it happens via the Adapter notifyItemChanged, RecyclerView can avoid full layout calculation.'});registerEventInfo({title:'RV FullInvalidate',description:'NotifyDataSetChanged or equal has been called. If this is taking a long time, try sending granular notify adapter changes instead of just calling notifyDataSetChanged or setAdapter / swapAdapter. Adding stable ids to your adapter might help.'});registerEventInfo({title:'RV PartialInvalidate',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV OnBindView',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV CreateView',description:'RecyclerView is creating a new View. If too many of these are present: 1) There might be a problem in Recycling (e.g. custom Animations that set transient state and prevent recycling or ItemAnimator not implementing the contract properly. See Adapter#onFailedToRecycleView(ViewHolder). 2) There may be too many item view types. Try merging them. 3) There might be too many itemChange animations and not enough space in RecyclerPool. Try increasing your pool size and item cache size.'});registerEventInfo({title:'eglSwapBuffers',description:'The CPU has finished producing drawing commands, and is flushing drawing work to the GPU, and posting that buffer to the consumer (which is often SurfaceFlinger window composition). Once this is completed, the GPU can produce the frame content without any involvement from the CPU.'});},applyEventInfosRecursive_(parentNames,slice){const checkExpectedParentNames=function(expectedParentNames){if(!expectedParentNames)return true;return expectedParentNames.some(function(name){return parentNames.has(name);});};if(this.titleInfoLookup.has(slice.title)){if(checkExpectedParentNames(this.titleParentLookup.get(slice.title))){slice.info=this.titleInfoLookup.get(slice.title);}}
 if(slice.subSlices.length>0){if(!parentNames.has(slice.title)){parentNames.set(slice.title,0);}
 parentNames.set(slice.title,parentNames.get(slice.title)+1);slice.subSlices.forEach(function(subSlice){this.applyEventInfosRecursive_(parentNames,subSlice);},this);parentNames.set(slice.title,parentNames.get(slice.title)-1);if(parentNames.get(slice.title)===0){delete parentNames[slice.title];}}},applyEventInfos(sliceGroup){sliceGroup.topLevelSlices.forEach(function(slice){this.applyEventInfosRecursive_(new Map(),slice);},this);}};return{AndroidAuditor,};});'use strict';tr.exportTo('tr.model',function(){function ObjectSnapshot(objectInstance,ts,args){tr.model.Event.call(this);this.objectInstance=objectInstance;this.ts=ts;this.args=args;}
 ObjectSnapshot.prototype={__proto__:tr.model.Event.prototype,preInitialize(){},initialize(){},referencedAt(item,object,field){},addBoundsToRange(range){range.addValue(this.ts);},get userFriendlyName(){return'Snapshot of '+this.objectInstance.userFriendlyName+' @ '+
@@ -5573,7 +5573,7 @@
 class ProfilingDictionaryReader{constructor(opt_metadata,opt_maps,opt_groups,opt_parent){this.metadata=opt_metadata||new Map();this.maps=opt_maps||new Map();this.groups=opt_groups||new Map();this.parent_=opt_parent||undefined;this.inflated_=undefined;this.raw_=undefined;this.boundGetString_=this.getString.bind(this);this.deferenceStrings_=o=>deferenceStrings(this.boundGetString_,o);}
 static empty(){return new ProfilingDictionaryReader();}
 get parent(){return this.parent_;}
-get raw(){if(this.raw_)return this.raw_;this.raw_={};for(const[name,group]of this.groups.entries()){this.raw_[name]=group;}
+get ptr(){if(this.raw_)return this.raw_;this.raw_={};for(const[name,group]of this.groups.entries()){this.raw_[name]=group;}
 return this.raw_;}
 get inflated(){if(this.inflated_)return this.inflated_;this.inflated_={};for(const[name,group]of this.groups.entries()){this.inflated_[name]=this.inflateGroup(group);}
 return this.inflated_;}
@@ -7559,11 +7559,11 @@
 function drawProjectedQuadToContext(passNumber,quad,p1,p2,p3,p4,ctx,quadCanvas){if(passNumber===0){drawProjectedQuadBackgroundToContext(quad,p1,p2,p3,p4,ctx,quadCanvas);}else if(passNumber===1){drawProjectedQuadOutlineToContext(quad,p1,p2,p3,p4,ctx,quadCanvas);}else if(passNumber===2){drawProjectedQuadSelectionOutlineToContext(quad,p1,p2,p3,p4,ctx,quadCanvas);}else{throw new Error('Invalid pass number');}}
 const tmpP1=vec3.create();const tmpP2=vec3.create();const tmpP3=vec3.create();const tmpP4=vec3.create();function transformAndProcessQuads(matrix,viewport,quads,numPasses,handleQuadFunc,opt_arg1,opt_arg2){for(let passNumber=0;passNumber<numPasses;passNumber++){for(let i=0;i<quads.length;i++){const quad=quads[i];transform(tmpP1,quad.p1,matrix,viewport);transform(tmpP2,quad.p2,matrix,viewport);transform(tmpP3,quad.p3,matrix,viewport);transform(tmpP4,quad.p4,matrix,viewport);handleQuadFunc(passNumber,quad,tmpP1,tmpP2,tmpP3,tmpP4,opt_arg1,opt_arg2);}}}
 const QuadStackView=tr.ui.b.define('quad-stack-view');QuadStackView.prototype={__proto__:HTMLDivElement.prototype,decorate(){this.className='quad-stack-view';this.style.display='flex';this.style.position='relative';const node=tr.ui.b.instantiateTemplate('#quad-stack-view-template',THIS_DOC);Polymer.dom(this).appendChild(node);this.updateHeaderVisibility_();const header=Polymer.dom(this).querySelector('#header');header.style.position='absolute';header.style.fontSize='70%';header.style.top='10px';header.style.left='10px';header.style.right='150px';const scroller=Polymer.dom(this).querySelector('#canvas-scroller');scroller.style.flexGrow=1;scroller.style.flexShrink=1;scroller.style.flexBasis='auto';scroller.style.minWidth=0;scroller.style.minHeight=0;scroller.style.overflow='auto';this.canvas_=Polymer.dom(this).querySelector('#canvas');this.chromeImages_={left:Polymer.dom(this).querySelector('#chrome-left'),mid:Polymer.dom(this).querySelector('#chrome-mid'),right:Polymer.dom(this).querySelector('#chrome-right')};const stackingDistanceSlider=Polymer.dom(this).querySelector('#stacking-distance-slider');stackingDistanceSlider.style.position='absolute';stackingDistanceSlider.style.fontSize='70%';stackingDistanceSlider.style.top='10px';stackingDistanceSlider.style.right='10px';stackingDistanceSlider.value=tr.b.Settings.get('quadStackView.stackingDistance',45);stackingDistanceSlider.addEventListener('change',this.onStackingDistanceChange_.bind(this));stackingDistanceSlider.addEventListener('input',this.onStackingDistanceChange_.bind(this));this.trackMouse_();this.camera_=new tr.ui.b.Camera(this.mouseModeSelector_);this.camera_.addEventListener('renderrequired',this.onRenderRequired_.bind(this));this.cameraWasReset_=false;this.camera_.canvas=this.canvas_;this.viewportRect_=tr.b.math.Rect.fromXYWH(0,0,0,0);this.pixelRatio_=window.devicePixelRatio||1;},updateHeaderVisibility_(){if(this.headerText){Polymer.dom(this).querySelector('#header').style.display='';}else{Polymer.dom(this).querySelector('#header').style.display='none';}},get headerText(){return Polymer.dom(this).querySelector('#header').textContent;},set headerText(headerText){Polymer.dom(this).querySelector('#header').textContent=headerText;this.updateHeaderVisibility_();},onStackingDistanceChange_(e){tr.b.Settings.set('quadStackView.stackingDistance',this.stackingDistance);this.scheduleRender();e.stopPropagation();},get stackingDistance(){return Polymer.dom(this).querySelector('#stacking-distance-slider').value;},get mouseModeSelector(){return this.mouseModeSelector_;},get camera(){return this.camera_;},set quads(q){this.quads_=q;this.scheduleRender();},set deviceRect(rect){if(!rect||rect.equalTo(this.deviceRect_))return;this.deviceRect_=rect;this.camera_.deviceRect=rect;this.chromeQuad_=undefined;},resize(){if(!this.offsetParent)return true;const width=parseInt(window.getComputedStyle(this.offsetParent).width);const height=parseInt(window.getComputedStyle(this.offsetParent).height);const rect=tr.b.math.Rect.fromXYWH(0,0,width,height);if(rect.equalTo(this.viewportRect_))return false;this.viewportRect_=rect;this.canvas_.style.width=width+'px';this.canvas_.style.height=height+'px';this.canvas_.width=this.pixelRatio_*width;this.canvas_.height=this.pixelRatio_*height;if(!this.cameraWasReset_){this.camera_.resetCamera();this.cameraWasReset_=true;}
-return true;},readyToDraw(){if(!this.chromeImages_.left.src){let leftContent=window.getComputedStyle(this.chromeImages_.left).backgroundImage;leftContent=tr.ui.b.extractUrlString(leftContent);let midContent=window.getComputedStyle(this.chromeImages_.mid).backgroundImage;midContent=tr.ui.b.extractUrlString(midContent);let rightContent=window.getComputedStyle(this.chromeImages_.right).backgroundImage;rightContent=tr.ui.b.extractUrlString(rightContent);this.chromeImages_.left.src=leftContent;this.chromeImages_.mid.src=midContent;this.chromeImages_.right.src=rightContent;}
+return true;},readyToDptr(){if(!this.chromeImages_.left.src){let leftContent=window.getComputedStyle(this.chromeImages_.left).backgroundImage;leftContent=tr.ui.b.extractUrlString(leftContent);let midContent=window.getComputedStyle(this.chromeImages_.mid).backgroundImage;midContent=tr.ui.b.extractUrlString(midContent);let rightContent=window.getComputedStyle(this.chromeImages_.right).backgroundImage;rightContent=tr.ui.b.extractUrlString(rightContent);this.chromeImages_.left.src=leftContent;this.chromeImages_.mid.src=midContent;this.chromeImages_.right.src=rightContent;}
 return(this.chromeImages_.left.height>0)&&(this.chromeImages_.mid.height>0)&&(this.chromeImages_.right.height>0);},get chromeQuad(){if(this.chromeQuad_)return this.chromeQuad_;const chromeCanvas=document.createElement('canvas');const offsetY=this.chromeImages_.left.height;chromeCanvas.width=this.deviceRect_.width;chromeCanvas.height=this.deviceRect_.height+offsetY;const leftWidth=this.chromeImages_.left.width;const midWidth=this.chromeImages_.mid.width;const rightWidth=this.chromeImages_.right.width;const chromeCtx=chromeCanvas.getContext('2d');chromeCtx.drawImage(this.chromeImages_.left,0,0);chromeCtx.save();chromeCtx.translate(leftWidth,0);const s=(this.deviceRect_.width-leftWidth-rightWidth)/midWidth;chromeCtx.scale(s,1);chromeCtx.drawImage(this.chromeImages_.mid,0,0);chromeCtx.restore();chromeCtx.drawImage(this.chromeImages_.right,leftWidth+s*midWidth,0);const chromeRect=tr.b.math.Rect.fromXYWH(this.deviceRect_.x,this.deviceRect_.y-offsetY,this.deviceRect_.width,this.deviceRect_.height+offsetY);const chromeQuad=tr.b.math.Quad.fromRect(chromeRect);chromeQuad.stackingGroupId=this.maxStackingGroupId_+1;chromeQuad.imageData=chromeCtx.getImageData(0,0,chromeCanvas.width,chromeCanvas.height);chromeQuad.shadowOffset=[0,0];chromeQuad.shadowBlur=5;chromeQuad.borderWidth=3;this.chromeQuad_=chromeQuad;return this.chromeQuad_;},scheduleRender(){if(this.redrawScheduled_)return false;this.redrawScheduled_=true;tr.b.requestAnimationFrame(this.render,this);},onRenderRequired_(e){this.scheduleRender();},stackTransformAndProcessQuads_(numPasses,handleQuadFunc,includeChromeQuad,opt_arg1,opt_arg2){const mv=this.camera_.modelViewMatrix;const p=this.camera_.projectionMatrix;const viewport=tr.b.math.Rect.fromXYWH(0,0,this.canvas_.width,this.canvas_.height);const quadStacks=[];for(let i=0;i<this.quads_.length;++i){const quad=this.quads_[i];const stackingId=quad.stackingGroupId||0;while(stackingId>=quadStacks.length){quadStacks.push([]);}
 quadStacks[stackingId].push(quad);}
 const mvp=mat4.create();this.maxStackingGroupId_=quadStacks.length;const effectiveStackingDistance=this.stackingDistance*this.camera_.stackingDistanceDampening;mat4.multiply(mvp,p,mv);for(let i=0;i<quadStacks.length;++i){transformAndProcessQuads(mvp,viewport,quadStacks[i],numPasses,handleQuadFunc,opt_arg1,opt_arg2);mat4.translate(mv,mv,[0,0,effectiveStackingDistance]);mat4.multiply(mvp,p,mv);}
-if(includeChromeQuad&&this.deviceRect_){transformAndProcessQuads(mvp,viewport,[this.chromeQuad],numPasses,drawProjectedQuadToContext,opt_arg1,opt_arg2);}},render(){this.redrawScheduled_=false;if(!this.readyToDraw()){setTimeout(this.scheduleRender.bind(this),constants.IMAGE_LOAD_RETRY_TIME_MS);return;}
+if(includeChromeQuad&&this.deviceRect_){transformAndProcessQuads(mvp,viewport,[this.chromeQuad],numPasses,drawProjectedQuadToContext,opt_arg1,opt_arg2);}},render(){this.redrawScheduled_=false;if(!this.readyToDptr()){setTimeout(this.scheduleRender.bind(this),constants.IMAGE_LOAD_RETRY_TIME_MS);return;}
 if(!this.quads_)return;const canvasCtx=this.canvas_.getContext('2d');if(!this.resize()){canvasCtx.clearRect(0,0,this.canvas_.width,this.canvas_.height);}
 const quadCanvas=document.createElement('canvas');this.stackTransformAndProcessQuads_(3,drawProjectedQuadToContext,true,canvasCtx,quadCanvas);quadCanvas.width=0;},trackMouse_(){this.mouseModeSelector_=document.createElement('tr-ui-b-mouse-mode-selector');this.mouseModeSelector_.targetElement=this.canvas_;this.mouseModeSelector_.supportedModeMask=tr.ui.b.MOUSE_SELECTOR_MODE.SELECTION|tr.ui.b.MOUSE_SELECTOR_MODE.PANSCAN|tr.ui.b.MOUSE_SELECTOR_MODE.ZOOM|tr.ui.b.MOUSE_SELECTOR_MODE.ROTATE;this.mouseModeSelector_.mode=tr.ui.b.MOUSE_SELECTOR_MODE.PANSCAN;this.mouseModeSelector_.pos={x:0,y:100};Polymer.dom(this).appendChild(this.mouseModeSelector_);this.mouseModeSelector_.settingsKey='quadStackView.mouseModeSelector';this.mouseModeSelector_.setModifierForAlternateMode(tr.ui.b.MOUSE_SELECTOR_MODE.ROTATE,tr.ui.b.MODIFIER.SHIFT);this.mouseModeSelector_.setModifierForAlternateMode(tr.ui.b.MOUSE_SELECTOR_MODE.PANSCAN,tr.ui.b.MODIFIER.SPACE);this.mouseModeSelector_.setModifierForAlternateMode(tr.ui.b.MOUSE_SELECTOR_MODE.ZOOM,tr.ui.b.MODIFIER.CMD_OR_CTRL);this.mouseModeSelector_.addEventListener('updateselection',this.onSelectionUpdate_.bind(this));this.mouseModeSelector_.addEventListener('endselection',this.onSelectionUpdate_.bind(this));},extractRelativeMousePosition_(e){const br=this.canvas_.getBoundingClientRect();return[this.pixelRatio_*(e.clientX-this.canvas_.offsetLeft-br.left),this.pixelRatio_*(e.clientY-this.canvas_.offsetTop-br.top)];},onSelectionUpdate_(e){const mousePos=this.extractRelativeMousePosition_(e);const res=[];function handleQuad(passNumber,quad,p1,p2,p3,p4){if(tr.b.math.pointInImplicitQuad(mousePos,p1,p2,p3,p4)){res.push(quad);}}
 this.stackTransformAndProcessQuads_(1,handleQuad,false);e=new tr.b.Event('selectionchange');e.quads=res;this.dispatchEvent(e);}};return{QuadStackView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const ColorScheme=tr.b.ColorScheme;const THIS_DOC=document.currentScript.ownerDocument;const TILE_HEATMAP_TYPE={};TILE_HEATMAP_TYPE.NONE='none';TILE_HEATMAP_TYPE.SCHEDULED_PRIORITY='scheduledPriority';TILE_HEATMAP_TYPE.USING_GPU_MEMORY='usingGpuMemory';const cc=tr.ui.e.chrome.cc;function createTileRectsSelectorBaseOptions(){return[{label:'None',value:'none'},{label:'Coverage Rects',value:'coverage'}];}
@@ -7636,7 +7636,7 @@
 tr.b.dispatchSimpleEvent(this,'selection-change');},createPictureBtn_(pictures){if(!(pictures instanceof Array)){pictures=[pictures];}
 const link=document.createElement('tr-ui-a-analysis-link');link.selection=function(){const layeredPicture=new tr.e.cc.LayeredPicture(pictures);const snapshot=new tr.e.cc.PictureSnapshot(layeredPicture);snapshot.picture=layeredPicture;const selection=new tr.model.EventSet();selection.push(snapshot);return selection;};Polymer.dom(link).textContent='View in Picture Debugger';return link;},onRequestSelectionChangeFromAnalysisEl_(e){if(!(e.selection instanceof tr.ui.e.chrome.cc.Selection)){return;}
 e.stopPropagation();this.selection=e.selection;},get extraHighlightsByLayerId(){return this.layerTreeQuadStackView_.extraHighlightsByLayerId;},set extraHighlightsByLayerId(extraHighlightsByLayerId){this.layerTreeQuadStackView_.extraHighlightsByLayerId=extraHighlightsByLayerId;}};return{LayerView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const LayerTreeHostImplSnapshotView=tr.ui.b.define('tr-ui-e-chrome-cc-layer-tree-host-impl-snapshot-view',tr.ui.analysis.ObjectSnapshotView);LayerTreeHostImplSnapshotView.prototype={__proto__:tr.ui.analysis.ObjectSnapshotView.prototype,decorate(){Polymer.dom(this).classList.add('tr-ui-e-chrome-cc-lthi-s-view');this.style.display='flex';this.style.flexDirection='row';this.style.flexGrow=1;this.style.flexShrink=1;this.style.flexBasis='auto';this.style.minWidth=0;this.selection_=undefined;this.layerPicker_=new tr.ui.e.chrome.cc.LayerPicker();this.layerPicker_.style.flexGrow=0;this.layerPicker_.style.flexShrink=0;this.layerPicker_.style.flexBasis='auto';this.layerPicker_.style.minWidth='200px';this.layerPicker_.addEventListener('selection-change',this.onLayerPickerSelectionChanged_.bind(this));this.layerView_=new tr.ui.e.chrome.cc.LayerView();this.layerView_.addEventListener('selection-change',this.onLayerViewSelectionChanged_.bind(this));this.layerView_.style.flexGrow=1;this.layerView_.style.flexShrink=1;this.layerView_.style.flexBasis='auto';this.layerView_.style.minWidth=0;this.dragHandle_=document.createElement('tr-ui-b-drag-handle');this.dragHandle_.style.flexGrow=0;this.dragHandle_.style.flexShrink=0;this.dragHandle_.style.flexBasis='auto';this.dragHandle_.horizontal=false;this.dragHandle_.target=this.layerPicker_;Polymer.dom(this).appendChild(this.layerPicker_);Polymer.dom(this).appendChild(this.dragHandle_);Polymer.dom(this).appendChild(this.layerView_);this.onLayerViewSelectionChanged_();this.onLayerPickerSelectionChanged_();},get objectSnapshot(){return this.objectSnapshot_;},set objectSnapshot(objectSnapshot){this.objectSnapshot_=objectSnapshot;const lthi=this.objectSnapshot;let layerTreeImpl;if(lthi){layerTreeImpl=lthi.getTree(this.layerPicker_.whichTree);}
-this.layerPicker_.lthiSnapshot=lthi;this.layerView_.layerTreeImpl=layerTreeImpl;this.layerView_.regenerateContent();if(!this.selection_)return;this.selection=this.selection_.findEquivalent(lthi);},get selection(){return this.selection_;},set selection(selection){if(this.selection_===selection)return;this.selection_=selection;this.layerPicker_.selection=selection;this.layerView_.selection=selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerPickerSelectionChanged_(){this.selection_=this.layerPicker_.selection;this.layerView_.selection=this.selection;this.layerView_.layerTreeImpl=this.layerPicker_.layerTreeImpl;this.layerView_.isRenderPassQuads=this.layerPicker_.isRenderPassQuads;this.layerView_.regenerateContent();tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerViewSelectionChanged_(){this.selection_=this.layerView_.selection;this.layerPicker_.selection=this.selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},get extraHighlightsByLayerId(){return this.layerView_.extraHighlightsByLayerId;},set extraHighlightsByLayerId(extraHighlightsByLayerId){this.layerView_.extraHighlightsByLayerId=extraHighlightsByLayerId;}};tr.ui.analysis.ObjectSnapshotView.register(LayerTreeHostImplSnapshotView,{typeName:'cc::LayerTreeHostImpl'});return{LayerTreeHostImplSnapshotView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const OPS_TIMING_ITERATIONS=3;const CHART_PADDING_LEFT=65;const CHART_PADDING_RIGHT=40;const AXIS_PADDING_LEFT=60;const AXIS_PADDING_RIGHT=35;const AXIS_PADDING_TOP=25;const AXIS_PADDING_BOTTOM=45;const AXIS_LABEL_PADDING=5;const AXIS_TICK_SIZE=10;const LABEL_PADDING=5;const LABEL_INTERLEAVE_OFFSET=15;const BAR_PADDING=5;const VERTICAL_TICKS=5;const HUE_CHAR_CODE_ADJUSTMENT=5.7;const PictureOpsChartSummaryView=tr.ui.b.define('tr-ui-e-chrome-cc-picture-ops-chart-summary-view');PictureOpsChartSummaryView.prototype={__proto__:HTMLDivElement.prototype,decorate(){this.style.flexGrow=0;this.style.flexShrink=0;this.style.flexBasis='auto';this.style.fontSize=0;this.style.margin=0;this.style.minHeight='200px';this.style.minWidth='200px';this.style.overflow='hidden';this.style.padding=0;this.picture_=undefined;this.pictureDataProcessed_=false;this.chartScale_=window.devicePixelRatio;this.chart_=document.createElement('canvas');this.chartCtx_=this.chart_.getContext('2d');Polymer.dom(this).appendChild(this.chart_);this.opsTimingData_=[];this.chartWidth_=0;this.chartHeight_=0;this.requiresRedraw_=true;this.currentBarMouseOverTarget_=null;this.chart_.addEventListener('mousemove',this.onMouseMove_.bind(this));try{new ResizeObserver(this.onResize_.bind(this)).observe(this);}catch(e){}},get requiresRedraw(){return this.requiresRedraw_;},set requiresRedraw(requiresRedraw){this.requiresRedraw_=requiresRedraw;},get picture(){return this.picture_;},set picture(picture){this.picture_=picture;this.pictureDataProcessed_=false;if(Polymer.dom(this).classList.contains('hidden'))return;this.processPictureData_();this.requiresRedraw=true;this.updateChartContents();},hide(){Polymer.dom(this).classList.add('hidden');this.style.display='none';},show(){Polymer.dom(this).classList.remove('hidden');this.style.display='';if(!this.pictureDataProcessed_){this.processPictureData_();}
+this.layerPicker_.lthiSnapshot=lthi;this.layerView_.layerTreeImpl=layerTreeImpl;this.layerView_.regenerateContent();if(!this.selection_)return;this.selection=this.selection_.findEquivalent(lthi);},get selection(){return this.selection_;},set selection(selection){if(this.selection_===selection)return;this.selection_=selection;this.layerPicker_.selection=selection;this.layerView_.selection=selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerPickerSelectionChanged_(){this.selection_=this.layerPicker_.selection;this.layerView_.selection=this.selection;this.layerView_.layerTreeImpl=this.layerPicker_.layerTreeImpl;this.layerView_.isRenderPassQuads=this.layerPicker_.isRenderPassQuads;this.layerView_.regenerateContent();tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerViewSelectionChanged_(){this.selection_=this.layerView_.selection;this.layerPicker_.selection=this.selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},get extraHighlightsByLayerId(){return this.layerView_.extraHighlightsByLayerId;},set extraHighlightsByLayerId(extraHighlightsByLayerId){this.layerView_.extraHighlightsByLayerId=extraHighlightsByLayerId;}};tr.ui.analysis.ObjectSnapshotView.register(LayerTreeHostImplSnapshotView,{typeName:'cc::LayerTreeHostImpl'});return{LayerTreeHostImplSnapshotView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const OPS_TIMING_ITERATIONS=3;const CHART_PADDING_LEFT=65;const CHART_PADDING_RIGHT=40;const AXIS_PADDING_LEFT=60;const AXIS_PADDING_RIGHT=35;const AXIS_PADDING_TOP=25;const AXIS_PADDING_BOTTOM=45;const AXIS_LABEL_PADDING=5;const AXIS_TICK_SIZE=10;const LABEL_PADDING=5;const LABEL_INTERLEAVE_OFFSET=15;const BAR_PADDING=5;const VERTICAL_TICKS=5;const HUE_CHAR_CODE_ADJUSTMENT=5.7;const PictureOpsChartSummaryView=tr.ui.b.define('tr-ui-e-chrome-cc-picture-ops-chart-summary-view');PictureOpsChartSummaryView.prototype={__proto__:HTMLDivElement.prototype,decorate(){this.style.flexGrow=0;this.style.flexShrink=0;this.style.flexBasis='auto';this.style.fontSize=0;this.style.margin=0;this.style.minHeight='200px';this.style.minWidth='200px';this.style.overflow='hidden';this.style.padding=0;this.picture_=undefined;this.pictureDataProcessed_=false;this.chartScale_=window.devicePixelRatio;this.chart_=document.createElement('canvas');this.chartCtx_=this.chart_.getContext('2d');Polymer.dom(this).appendChild(this.chart_);this.opsTimingData_=[];this.chartWidth_=0;this.chartHeight_=0;this.requiresRedraw_=true;this.currentBarMouseOverTarget_=null;this.chart_.addEventListener('mousemove',this.onMouseMove_.bind(this));try{new ResizeObserver(this.onResize_.bind(this)).observe(this);}catch(e){}},get requiresRedptr(){return this.requiresRedraw_;},set requiresRedraw(requiresRedraw){this.requiresRedraw_=requiresRedraw;},get picture(){return this.picture_;},set picture(picture){this.picture_=picture;this.pictureDataProcessed_=false;if(Polymer.dom(this).classList.contains('hidden'))return;this.processPictureData_();this.requiresRedraw=true;this.updateChartContents();},hide(){Polymer.dom(this).classList.add('hidden');this.style.display='none';},show(){Polymer.dom(this).classList.remove('hidden');this.style.display='';if(!this.pictureDataProcessed_){this.processPictureData_();}
 this.requiresRedraw=true;this.updateChartContents();},onMouseMove_(e){const lastBarMouseOverTarget=this.currentBarMouseOverTarget_;this.currentBarMouseOverTarget_=null;const x=e.offsetX;const y=e.offsetY;const chartLeft=CHART_PADDING_LEFT;const chartRight=this.chartWidth_-CHART_PADDING_RIGHT;const chartTop=AXIS_PADDING_TOP;const chartBottom=this.chartHeight_-AXIS_PADDING_BOTTOM;const chartInnerWidth=chartRight-chartLeft;if(x>chartLeft&&x<chartRight&&y>chartTop&&y<chartBottom){this.currentBarMouseOverTarget_=Math.floor((x-chartLeft)/chartInnerWidth*this.opsTimingData_.length);this.currentBarMouseOverTarget_=tr.b.math.clamp(this.currentBarMouseOverTarget_,0,this.opsTimingData_.length-1);}
 if(this.currentBarMouseOverTarget_===lastBarMouseOverTarget)return;this.drawChartContents_();},onResize_(){this.requiresRedraw=true;this.updateChartContents();},updateChartContents(){if(this.requiresRedraw){this.updateChartDimensions_();}
 this.drawChartContents_();},updateChartDimensions_(){this.chartWidth_=this.offsetWidth;this.chartHeight_=this.offsetHeight;this.chart_.width=this.chartWidth_*this.chartScale_;this.chart_.height=this.chartHeight_*this.chartScale_;this.chart_.style.width=this.chartWidth_+'px';this.chart_.style.height=this.chartHeight_+'px';this.chartCtx_.scale(this.chartScale_,this.chartScale_);},processPictureData_(){this.resetOpsTimingData_();this.pictureDataProcessed_=true;if(!this.picture_)return;let ops=this.picture_.getOps();if(!ops)return;ops=this.picture_.tagOpsWithTimings(ops);if(ops[0].cmd_time===undefined)return;this.collapseOpsToTimingBuckets_(ops);},drawChartContents_(){this.clearChartContents_();if(this.opsTimingData_.length===0){this.showNoTimingDataMessage_();return;}
diff --git a/runtime/observatory_2/web/third_party/trace_viewer_full.html b/runtime/observatory_2/web/third_party/trace_viewer_full.html
index 49c4e55..daa87d0 100644
--- a/runtime/observatory_2/web/third_party/trace_viewer_full.html
+++ b/runtime/observatory_2/web/third_party/trace_viewer_full.html
@@ -4496,8 +4496,8 @@
 DocLinkBuilder.prototype={addAppVideo(name,videoId){this.docLinks.push({label:'Video Link',textContent:('Android Performance Patterns: '+name),href:'https://www.youtube.com/watch?list=PLWz5rJ2EKKc9CBxr3BVjPTPoDPLdPIFCE&v='+videoId});return this;},addDacRef(name,link){this.docLinks.push({label:'Doc Link',textContent:(name+' documentation'),href:'https://developer.android.com/reference/'+link});return this;},build(){return this.docLinks;}};function AndroidAuditor(model){Auditor.call(this,model);const helper=model.getOrCreateHelper(AndroidModelHelper);if(helper.apps.length||helper.surfaceFlinger){this.helper=helper;}}
 AndroidAuditor.viewAlphaAlertInfo_=new EventInfo('Inefficient View alpha usage','Setting an alpha between 0 and 1 has significant performance costs, if one of the fast alpha paths is not used.',new DocLinkBuilder().addAppVideo('Hidden Cost of Transparency','wIy8g8yNhNk').addDacRef('View#setAlpha()','android/view/View.html#setAlpha(float)').build());AndroidAuditor.saveLayerAlertInfo_=new EventInfo('Expensive rendering with Canvas#saveLayer()','Canvas#saveLayer() incurs extremely high rendering cost. They disrupt the rendering pipeline when drawn, forcing a flush of drawing content. Instead use View hardware layers, or static Bitmaps. This enables the offscreen buffers to be reused in between frames, and avoids the disruptive render target switch.',new DocLinkBuilder().addAppVideo('Hidden Cost of Transparency','wIy8g8yNhNk').addDacRef('Canvas#saveLayerAlpha()','android/graphics/Canvas.html#saveLayerAlpha(android.graphics.RectF, int, int)').build());AndroidAuditor.getSaveLayerAlerts_=function(frame){const badAlphaRegEx=/^(.+) alpha caused (unclipped )?saveLayer (\d+)x(\d+)$/;const saveLayerRegEx=/^(unclipped )?saveLayer (\d+)x(\d+)$/;const ret=[];const events=[];frame.associatedEvents.forEach(function(slice){const match=badAlphaRegEx.exec(slice.title);if(match){const args={'view name':match[1],'width':parseInt(match[3]),'height':parseInt(match[4])};ret.push(new Alert(AndroidAuditor.viewAlphaAlertInfo_,slice.start,[slice],args));}else if(saveLayerRegEx.test(slice.title)){events.push(slice);}},this);if(events.length>ret.length){const unclippedSeen=Statistics.sum(events,function(slice){return saveLayerRegEx.exec(slice.title)[1]?1:0;});const clippedSeen=events.length-unclippedSeen;const earliestStart=Statistics.min(events,function(slice){return slice.start;});const args={'Unclipped saveLayer count (especially bad!)':unclippedSeen,'Clipped saveLayer count':clippedSeen};events.push(frame);ret.push(new Alert(AndroidAuditor.saveLayerAlertInfo_,earliestStart,events,args));}
 return ret;};AndroidAuditor.pathAlertInfo_=new EventInfo('Path texture churn','Paths are drawn with a mask texture, so when a path is modified / newly drawn, that texture must be generated and uploaded to the GPU. Ensure that you cache paths between frames and do not unnecessarily call Path#reset(). You can cut down on this cost by sharing Path object instances between drawables/views.');AndroidAuditor.getPathAlert_=function(frame){const uploadRegEx=/^Generate Path Texture$/;const events=frame.associatedEvents.filter(function(event){return event.title==='Generate Path Texture';});const start=Statistics.min(events,getStart);const duration=Statistics.sum(events,getDuration);if(duration<3)return undefined;events.push(frame);return new Alert(AndroidAuditor.pathAlertInfo_,start,events,{'Time spent':new Scalar(timeDurationInMs,duration)});};AndroidAuditor.uploadAlertInfo_=new EventInfo('Expensive Bitmap uploads','Bitmaps that have been modified / newly drawn must be uploaded to the GPU. Since this is expensive if the total number of pixels uploaded is large, reduce the amount of Bitmap churn in this animation/context, per frame.');AndroidAuditor.getUploadAlert_=function(frame){const uploadRegEx=/^Upload (\d+)x(\d+) Texture$/;const events=[];let start=Number.POSITIVE_INFINITY;let duration=0;let pixelsUploaded=0;frame.associatedEvents.forEach(function(event){const match=uploadRegEx.exec(event.title);if(match){events.push(event);start=Math.min(start,event.start);duration+=event.duration;pixelsUploaded+=parseInt(match[1])*parseInt(match[2]);}});if(events.length===0||duration<3)return undefined;const mPixels=(pixelsUploaded/1000000).toFixed(2)+' million';const args={'Pixels uploaded':mPixels,'Time spent':new Scalar(timeDurationInMs,duration)};events.push(frame);return new Alert(AndroidAuditor.uploadAlertInfo_,start,events,args);};AndroidAuditor.ListViewInflateAlertInfo_=new EventInfo('Inflation during ListView recycling','ListView item recycling involved inflating views. Ensure your Adapter#getView() recycles the incoming View, instead of constructing a new one.');AndroidAuditor.ListViewBindAlertInfo_=new EventInfo('Inefficient ListView recycling/rebinding','ListView recycling taking too much time per frame. Ensure your Adapter#getView() binds data efficiently.');AndroidAuditor.getListViewAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='obtainView'||event.title==='setupListItem';});const duration=Statistics.sum(events,getCpuDuration);if(events.length===0||duration<3)return undefined;let hasInflation=false;for(const event of events){if(event.findDescendentSlice('inflate')){hasInflation=true;}}
-const start=Statistics.min(events,getStart);const args={'Time spent':new Scalar(timeDurationInMs,duration)};args['ListView items '+(hasInflation?'inflated':'rebound')]=events.length/2;const eventInfo=hasInflation?AndroidAuditor.ListViewInflateAlertInfo_:AndroidAuditor.ListViewBindAlertInfo_;events.push(frame);return new Alert(eventInfo,start,events,args);};AndroidAuditor.measureLayoutAlertInfo_=new EventInfo('Expensive measure/layout pass','Measure/Layout took a significant time, contributing to jank. Avoid triggering layout during animations.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').build());AndroidAuditor.getMeasureLayoutAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='measure'||event.title==='layout';});const duration=Statistics.sum(events,getCpuDuration);if(events.length===0||duration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.measureLayoutAlertInfo_,start,events,{'Time spent':new Scalar(timeDurationInMs,duration)});};AndroidAuditor.viewDrawAlertInfo_=new EventInfo('Long View#draw()','Recording the drawing commands of invalidated Views took a long time. Avoid significant work in View or Drawable custom drawing, especially allocations or drawing to Bitmaps.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').addAppVideo('Avoiding Allocations in onDraw()','HAK5acHQ53E').build());AndroidAuditor.getViewDrawAlert_=function(frame){let slice=undefined;for(const event of frame.associatedEvents){if(event.title==='getDisplayList'||event.title==='Record View#draw()'){slice=event;break;}}
-if(!slice||getCpuDuration(slice)<3)return undefined;return new Alert(AndroidAuditor.viewDrawAlertInfo_,slice.start,[slice,frame],{'Time spent':new Scalar(timeDurationInMs,getCpuDuration(slice))});};AndroidAuditor.blockingGcAlertInfo_=new EventInfo('Blocking Garbage Collection','Blocking GCs are caused by object churn, and made worse by having large numbers of objects in the heap. Avoid allocating objects during animations/scrolling, and recycle Bitmaps to avoid triggering garbage collection.',new DocLinkBuilder().addAppVideo('Garbage Collection in Android','pzfzz50W5Uo').addAppVideo('Avoiding Allocations in onDraw()','HAK5acHQ53E').build());AndroidAuditor.getBlockingGcAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='DVM Suspend'||event.title==='GC: Wait For Concurrent';});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.blockingGcAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.lockContentionAlertInfo_=new EventInfo('Lock contention','UI thread lock contention is caused when another thread holds a lock that the UI thread is trying to use. UI thread progress is blocked until the lock is released. Inspect locking done within the UI thread, and ensure critical sections are short.');AndroidAuditor.getLockContentionAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return/^Lock Contention on /.test(event.title);});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<1)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.lockContentionAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.schedulingAlertInfo_=new EventInfo('Scheduling delay','Work to produce this frame was descheduled for several milliseconds, contributing to jank. Ensure that code on the UI thread doesn\'t block on work being done on other threads, and that background threads (doing e.g. network or bitmap loading) are running at android.os.Process#THREAD_PRIORITY_BACKGROUND or lower so they are less likely to interrupt the UI thread. These background threads should show up with a priority number of 130 or higher in the scheduling section under the Kernel process.');AndroidAuditor.getSchedulingAlert_=function(frame){let totalDuration=0;const totalStats={};for(const ttr of frame.threadTimeRanges){const stats=ttr.thread.getSchedulingStatsForRange(ttr.start,ttr.end);for(const[key,value]of Object.entries(stats)){if(!(key in totalStats)){totalStats[key]=0;}
+const start=Statistics.min(events,getStart);const args={'Time spent':new Scalar(timeDurationInMs,duration)};args['ListView items '+(hasInflation?'inflated':'rebound')]=events.length/2;const eventInfo=hasInflation?AndroidAuditor.ListViewInflateAlertInfo_:AndroidAuditor.ListViewBindAlertInfo_;events.push(frame);return new Alert(eventInfo,start,events,args);};AndroidAuditor.measureLayoutAlertInfo_=new EventInfo('Expensive measure/layout pass','Measure/Layout took a significant time, contributing to jank. Avoid triggering layout during animations.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').build());AndroidAuditor.getMeasureLayoutAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='measure'||event.title==='layout';});const duration=Statistics.sum(events,getCpuDuration);if(events.length===0||duration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.measureLayoutAlertInfo_,start,events,{'Time spent':new Scalar(timeDurationInMs,duration)});};AndroidAuditor.viewDrawAlertInfo_=new EventInfo('Long View#dptr()','Recording the drawing commands of invalidated Views took a long time. Avoid significant work in View or Drawable custom drawing, especially allocations or drawing to Bitmaps.',new DocLinkBuilder().addAppVideo('Invalidations, Layouts, and Performance','we6poP0kw6E').addAppVideo('Avoiding Allocations in onDptr()','HAK5acHQ53E').build());AndroidAuditor.getViewDrawAlert_=function(frame){let slice=undefined;for(const event of frame.associatedEvents){if(event.title==='getDisplayList'||event.title==='Record View#dptr()'){slice=event;break;}}
+if(!slice||getCpuDuration(slice)<3)return undefined;return new Alert(AndroidAuditor.viewDrawAlertInfo_,slice.start,[slice,frame],{'Time spent':new Scalar(timeDurationInMs,getCpuDuration(slice))});};AndroidAuditor.blockingGcAlertInfo_=new EventInfo('Blocking Garbage Collection','Blocking GCs are caused by object churn, and made worse by having large numbers of objects in the heap. Avoid allocating objects during animations/scrolling, and recycle Bitmaps to avoid triggering garbage collection.',new DocLinkBuilder().addAppVideo('Garbage Collection in Android','pzfzz50W5Uo').addAppVideo('Avoiding Allocations in onDptr()','HAK5acHQ53E').build());AndroidAuditor.getBlockingGcAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return event.title==='DVM Suspend'||event.title==='GC: Wait For Concurrent';});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<3)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.blockingGcAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.lockContentionAlertInfo_=new EventInfo('Lock contention','UI thread lock contention is caused when another thread holds a lock that the UI thread is trying to use. UI thread progress is blocked until the lock is released. Inspect locking done within the UI thread, and ensure critical sections are short.');AndroidAuditor.getLockContentionAlert_=function(frame){const events=frame.associatedEvents.filter(function(event){return/^Lock Contention on /.test(event.title);});const blockedDuration=Statistics.sum(events,getDuration);if(blockedDuration<1)return undefined;const start=Statistics.min(events,getStart);events.push(frame);return new Alert(AndroidAuditor.lockContentionAlertInfo_,start,events,{'Blocked duration':new Scalar(timeDurationInMs,blockedDuration)});};AndroidAuditor.schedulingAlertInfo_=new EventInfo('Scheduling delay','Work to produce this frame was descheduled for several milliseconds, contributing to jank. Ensure that code on the UI thread doesn\'t block on work being done on other threads, and that background threads (doing e.g. network or bitmap loading) are running at android.os.Process#THREAD_PRIORITY_BACKGROUND or lower so they are less likely to interrupt the UI thread. These background threads should show up with a priority number of 130 or higher in the scheduling section under the Kernel process.');AndroidAuditor.getSchedulingAlert_=function(frame){let totalDuration=0;const totalStats={};for(const ttr of frame.threadTimeRanges){const stats=ttr.thread.getSchedulingStatsForRange(ttr.start,ttr.end);for(const[key,value]of Object.entries(stats)){if(!(key in totalStats)){totalStats[key]=0;}
 totalStats[key]+=value;totalDuration+=value;}}
 if(!(SCHEDULING_STATE.RUNNING in totalStats)||totalDuration===0||totalDuration-totalStats[SCHEDULING_STATE.RUNNING]<3){return;}
 const args={};for(const[key,value]of Object.entries(totalStats)){let newKey=key;if(key===SCHEDULING_STATE.RUNNABLE){newKey='Not scheduled, but runnable';}else if(key===SCHEDULING_STATE.UNINTR_SLEEP){newKey='Blocking I/O delay';}
@@ -4511,7 +4511,7 @@
 if(/^hwuiTask/.test(thread.name)){thread.sortIndex=-1;}});},pushFramesAndJudgeJank_(){let badFramesObserved=0;let framesObserved=0;const surfaceFlinger=this.helper.surfaceFlinger;this.helper.apps.forEach(function(app){app.process.frames=app.getFrames();app.process.frames.forEach(function(frame){if(frame.totalDuration>EXPECTED_FRAME_TIME_MS*2){badFramesObserved+=2;frame.perfClass=FRAME_PERF_CLASS.TERRIBLE;}else if(frame.totalDuration>EXPECTED_FRAME_TIME_MS||frameMissedDeadline(frame)){badFramesObserved++;frame.perfClass=FRAME_PERF_CLASS.BAD;}else{frame.perfClass=FRAME_PERF_CLASS.GOOD;}});framesObserved+=app.process.frames.length;});if(framesObserved){const portionBad=badFramesObserved/framesObserved;if(portionBad>0.3){this.model.faviconHue='red';}else if(portionBad>0.05){this.model.faviconHue='yellow';}else{this.model.faviconHue='green';}}},pushEventInfo_(){const appAnnotator=new AppAnnotator();this.helper.apps.forEach(function(app){if(app.uiThread){appAnnotator.applyEventInfos(app.uiThread.sliceGroup);}
 if(app.renderThread){appAnnotator.applyEventInfos(app.renderThread.sliceGroup);}});},runAnnotate(){if(!this.helper)return;this.renameAndSort_();this.pushFramesAndJudgeJank_();this.pushEventInfo_();this.helper.iterateImportantSlices(function(slice){slice.important=true;});},runAudit(){if(!this.helper)return;const alerts=this.model.alerts;this.helper.apps.forEach(function(app){app.getFrames().forEach(function(frame){alerts.push.apply(alerts,AndroidAuditor.getSaveLayerAlerts_(frame));if(frame.perfClass===FRAME_PERF_CLASS.NEUTRAL||frame.perfClass===FRAME_PERF_CLASS.GOOD){return;}
 let alert=AndroidAuditor.getPathAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getUploadAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getListViewAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getMeasureLayoutAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getViewDrawAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getBlockingGcAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getLockContentionAlert_(frame);if(alert)alerts.push(alert);alert=AndroidAuditor.getSchedulingAlert_(frame);if(alert)alerts.push(alert);});},this);this.addRenderingInteractionRecords();this.addInputInteractionRecords();},addRenderingInteractionRecords(){const events=[];this.helper.apps.forEach(function(app){events.push.apply(events,app.getAnimationAsyncSlices());events.push.apply(events,app.getFrames());});const mergerFunction=function(events){const ir=new tr.model.um.ResponseExpectation(this.model,'Rendering',events[0].min,events[events.length-1].max-events[0].min);this.model.userModel.expectations.push(ir);}.bind(this);tr.b.math.mergeRanges(tr.b.math.convertEventsToRanges(events),30,mergerFunction);},addInputInteractionRecords(){const inputSamples=[];this.helper.apps.forEach(function(app){inputSamples.push.apply(inputSamples,app.getInputSamples());});const mergerFunction=function(events){const ir=new tr.model.um.ResponseExpectation(this.model,'Input',events[0].min,events[events.length-1].max-events[0].min);this.model.userModel.expectations.push(ir);}.bind(this);const inputRanges=inputSamples.map(function(sample){return tr.b.math.Range.fromExplicitRange(sample.timestamp,sample.timestamp);});tr.b.math.mergeRanges(inputRanges,30,mergerFunction);}};Auditor.register(AndroidAuditor);function AppAnnotator(){this.titleInfoLookup=new Map();this.titleParentLookup=new Map();this.build_();}
-AppAnnotator.prototype={build_(){const registerEventInfo=function(dict){this.titleInfoLookup.set(dict.title,new EventInfo(dict.title,dict.description,dict.docLinks));if(dict.parents){this.titleParentLookup.set(dict.title,dict.parents);}}.bind(this);registerEventInfo({title:'inflate',description:'Constructing a View hierarchy from pre-processed XML via LayoutInflater#layout. This includes constructing all of the View objects in the hierarchy, and applying styled attributes.'});registerEventInfo({title:'obtainView',description:'Adapter#getView() called to bind content to a recycled View that is being presented.'});registerEventInfo({title:'setupListItem',description:'Attached a newly-bound, recycled View to its parent ListView.'});registerEventInfo({title:'setupGridItem',description:'Attached a newly-bound, recycled View to its parent GridView.'});const choreographerLinks=new DocLinkBuilder().addDacRef('Choreographer','android/view/Choreographer.html').build();registerEventInfo({title:'Choreographer#doFrame',docLinks:choreographerLinks,description:'Choreographer executes frame callbacks for inputs, animations, and rendering traversals. When this work is done, a frame will be presented to the user.'});registerEventInfo({title:'input',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Input callbacks are processed. This generally encompasses dispatching input to Views, as well as any work the Views do to process this input/gesture.'});registerEventInfo({title:'animation',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Animation callbacks are processed. This is generally minimal work, as animations determine progress for the frame, and push new state to animated objects (such as setting View properties).'});registerEventInfo({title:'traversals',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Primary draw traversals. This is the primary traversal of the View hierarchy, including layout and draw passes.'});const traversalParents=['Choreographer#doFrame','performTraversals'];const layoutLinks=new DocLinkBuilder().addDacRef('View#Layout','android/view/View.html#Layout').build();registerEventInfo({title:'performTraversals',description:'A drawing traversal of the View hierarchy, comprised of all layout and drawing needed to produce the frame.'});registerEventInfo({title:'measure',parents:traversalParents,docLinks:layoutLinks,description:'First of two phases in view hierarchy layout. Views are asked to size themselves according to constraints supplied by their parent. Some ViewGroups may measure a child more than once to help satisfy their own constraints. Nesting ViewGroups that measure children more than once can lead to excessive and repeated work.'});registerEventInfo({title:'layout',parents:traversalParents,docLinks:layoutLinks,description:'Second of two phases in view hierarchy layout, repositioning content and child Views into their new locations.'});const drawString='Draw pass over the View hierarchy. Every invalidated View will have its drawing commands recorded. On Android versions prior to Lollipop, this would also include the issuing of draw commands to the GPU. Starting with Lollipop, it only includes the recording of commands, and syncing that information to the RenderThread.';registerEventInfo({title:'draw',parents:traversalParents,description:drawString});const recordString='Every invalidated View\'s drawing commands are recorded. Each will have View#draw() called, and is passed a Canvas that will record and store its drawing commands until it is next invalidated/rerecorded.';registerEventInfo({title:'getDisplayList',parents:['draw'],description:recordString});registerEventInfo({title:'Record View#draw()',parents:['draw'],description:recordString});registerEventInfo({title:'drawDisplayList',parents:['draw'],description:'Execution of recorded draw commands to generate a frame. This represents the actual formation and issuing of drawing commands to the GPU. On Android L and higher devices, this work is done on a dedicated RenderThread, instead of on the UI Thread.'});registerEventInfo({title:'DrawFrame',description:'RenderThread portion of the standard UI/RenderThread split frame. This represents the actual formation and issuing of drawing commands to the GPU.'});registerEventInfo({title:'doFrame',description:'RenderThread animation frame. Represents drawing work done by the RenderThread on a frame where the UI thread did not produce new drawing content.'});registerEventInfo({title:'syncFrameState',description:'Sync stage between the UI thread and the RenderThread, where the UI thread hands off a frame (including information about modified Views). Time in this method primarily consists of uploading modified Bitmaps to the GPU. After this sync is completed, the UI thread is unblocked, and the RenderThread starts to render the frame.'});registerEventInfo({title:'flush drawing commands',description:'Issuing the now complete drawing commands to the GPU.'});registerEventInfo({title:'eglSwapBuffers',description:'Complete GPU rendering of the frame.'});registerEventInfo({title:'RV Scroll',description:'RecyclerView is calculating a scroll. If there are too many of these in Systrace, some Views inside RecyclerView might be causing it. Try to avoid using EditText, focusable views or handle them with care.'});registerEventInfo({title:'RV OnLayout',description:'OnLayout has been called by the View system. If this shows up too many times in Systrace, make sure the children of RecyclerView do not update themselves directly. This will cause a full re-layout but when it happens via the Adapter notifyItemChanged, RecyclerView can avoid full layout calculation.'});registerEventInfo({title:'RV FullInvalidate',description:'NotifyDataSetChanged or equal has been called. If this is taking a long time, try sending granular notify adapter changes instead of just calling notifyDataSetChanged or setAdapter / swapAdapter. Adding stable ids to your adapter might help.'});registerEventInfo({title:'RV PartialInvalidate',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV OnBindView',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV CreateView',description:'RecyclerView is creating a new View. If too many of these are present: 1) There might be a problem in Recycling (e.g. custom Animations that set transient state and prevent recycling or ItemAnimator not implementing the contract properly. See Adapter#onFailedToRecycleView(ViewHolder). 2) There may be too many item view types. Try merging them. 3) There might be too many itemChange animations and not enough space in RecyclerPool. Try increasing your pool size and item cache size.'});registerEventInfo({title:'eglSwapBuffers',description:'The CPU has finished producing drawing commands, and is flushing drawing work to the GPU, and posting that buffer to the consumer (which is often SurfaceFlinger window composition). Once this is completed, the GPU can produce the frame content without any involvement from the CPU.'});},applyEventInfosRecursive_(parentNames,slice){const checkExpectedParentNames=function(expectedParentNames){if(!expectedParentNames)return true;return expectedParentNames.some(function(name){return parentNames.has(name);});};if(this.titleInfoLookup.has(slice.title)){if(checkExpectedParentNames(this.titleParentLookup.get(slice.title))){slice.info=this.titleInfoLookup.get(slice.title);}}
+AppAnnotator.prototype={build_(){const registerEventInfo=function(dict){this.titleInfoLookup.set(dict.title,new EventInfo(dict.title,dict.description,dict.docLinks));if(dict.parents){this.titleParentLookup.set(dict.title,dict.parents);}}.bind(this);registerEventInfo({title:'inflate',description:'Constructing a View hierarchy from pre-processed XML via LayoutInflater#layout. This includes constructing all of the View objects in the hierarchy, and applying styled attributes.'});registerEventInfo({title:'obtainView',description:'Adapter#getView() called to bind content to a recycled View that is being presented.'});registerEventInfo({title:'setupListItem',description:'Attached a newly-bound, recycled View to its parent ListView.'});registerEventInfo({title:'setupGridItem',description:'Attached a newly-bound, recycled View to its parent GridView.'});const choreographerLinks=new DocLinkBuilder().addDacRef('Choreographer','android/view/Choreographer.html').build();registerEventInfo({title:'Choreographer#doFrame',docLinks:choreographerLinks,description:'Choreographer executes frame callbacks for inputs, animations, and rendering traversals. When this work is done, a frame will be presented to the user.'});registerEventInfo({title:'input',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Input callbacks are processed. This generally encompasses dispatching input to Views, as well as any work the Views do to process this input/gesture.'});registerEventInfo({title:'animation',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Animation callbacks are processed. This is generally minimal work, as animations determine progress for the frame, and push new state to animated objects (such as setting View properties).'});registerEventInfo({title:'traversals',parents:['Choreographer#doFrame'],docLinks:choreographerLinks,description:'Primary draw traversals. This is the primary traversal of the View hierarchy, including layout and draw passes.'});const traversalParents=['Choreographer#doFrame','performTraversals'];const layoutLinks=new DocLinkBuilder().addDacRef('View#Layout','android/view/View.html#Layout').build();registerEventInfo({title:'performTraversals',description:'A drawing traversal of the View hierarchy, comprised of all layout and drawing needed to produce the frame.'});registerEventInfo({title:'measure',parents:traversalParents,docLinks:layoutLinks,description:'First of two phases in view hierarchy layout. Views are asked to size themselves according to constraints supplied by their parent. Some ViewGroups may measure a child more than once to help satisfy their own constraints. Nesting ViewGroups that measure children more than once can lead to excessive and repeated work.'});registerEventInfo({title:'layout',parents:traversalParents,docLinks:layoutLinks,description:'Second of two phases in view hierarchy layout, repositioning content and child Views into their new locations.'});const drawString='Draw pass over the View hierarchy. Every invalidated View will have its drawing commands recorded. On Android versions prior to Lollipop, this would also include the issuing of draw commands to the GPU. Starting with Lollipop, it only includes the recording of commands, and syncing that information to the RenderThread.';registerEventInfo({title:'draw',parents:traversalParents,description:drawString});const recordString='Every invalidated View\'s drawing commands are recorded. Each will have View#dptr() called, and is passed a Canvas that will record and store its drawing commands until it is next invalidated/rerecorded.';registerEventInfo({title:'getDisplayList',parents:['draw'],description:recordString});registerEventInfo({title:'Record View#dptr()',parents:['draw'],description:recordString});registerEventInfo({title:'drawDisplayList',parents:['draw'],description:'Execution of recorded draw commands to generate a frame. This represents the actual formation and issuing of drawing commands to the GPU. On Android L and higher devices, this work is done on a dedicated RenderThread, instead of on the UI Thread.'});registerEventInfo({title:'DrawFrame',description:'RenderThread portion of the standard UI/RenderThread split frame. This represents the actual formation and issuing of drawing commands to the GPU.'});registerEventInfo({title:'doFrame',description:'RenderThread animation frame. Represents drawing work done by the RenderThread on a frame where the UI thread did not produce new drawing content.'});registerEventInfo({title:'syncFrameState',description:'Sync stage between the UI thread and the RenderThread, where the UI thread hands off a frame (including information about modified Views). Time in this method primarily consists of uploading modified Bitmaps to the GPU. After this sync is completed, the UI thread is unblocked, and the RenderThread starts to render the frame.'});registerEventInfo({title:'flush drawing commands',description:'Issuing the now complete drawing commands to the GPU.'});registerEventInfo({title:'eglSwapBuffers',description:'Complete GPU rendering of the frame.'});registerEventInfo({title:'RV Scroll',description:'RecyclerView is calculating a scroll. If there are too many of these in Systrace, some Views inside RecyclerView might be causing it. Try to avoid using EditText, focusable views or handle them with care.'});registerEventInfo({title:'RV OnLayout',description:'OnLayout has been called by the View system. If this shows up too many times in Systrace, make sure the children of RecyclerView do not update themselves directly. This will cause a full re-layout but when it happens via the Adapter notifyItemChanged, RecyclerView can avoid full layout calculation.'});registerEventInfo({title:'RV FullInvalidate',description:'NotifyDataSetChanged or equal has been called. If this is taking a long time, try sending granular notify adapter changes instead of just calling notifyDataSetChanged or setAdapter / swapAdapter. Adding stable ids to your adapter might help.'});registerEventInfo({title:'RV PartialInvalidate',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV OnBindView',description:'RecyclerView is rebinding a View. If this is taking a lot of time, consider optimizing your layout or make sure you are not doing extra operations in onBindViewHolder call.'});registerEventInfo({title:'RV CreateView',description:'RecyclerView is creating a new View. If too many of these are present: 1) There might be a problem in Recycling (e.g. custom Animations that set transient state and prevent recycling or ItemAnimator not implementing the contract properly. See Adapter#onFailedToRecycleView(ViewHolder). 2) There may be too many item view types. Try merging them. 3) There might be too many itemChange animations and not enough space in RecyclerPool. Try increasing your pool size and item cache size.'});registerEventInfo({title:'eglSwapBuffers',description:'The CPU has finished producing drawing commands, and is flushing drawing work to the GPU, and posting that buffer to the consumer (which is often SurfaceFlinger window composition). Once this is completed, the GPU can produce the frame content without any involvement from the CPU.'});},applyEventInfosRecursive_(parentNames,slice){const checkExpectedParentNames=function(expectedParentNames){if(!expectedParentNames)return true;return expectedParentNames.some(function(name){return parentNames.has(name);});};if(this.titleInfoLookup.has(slice.title)){if(checkExpectedParentNames(this.titleParentLookup.get(slice.title))){slice.info=this.titleInfoLookup.get(slice.title);}}
 if(slice.subSlices.length>0){if(!parentNames.has(slice.title)){parentNames.set(slice.title,0);}
 parentNames.set(slice.title,parentNames.get(slice.title)+1);slice.subSlices.forEach(function(subSlice){this.applyEventInfosRecursive_(parentNames,subSlice);},this);parentNames.set(slice.title,parentNames.get(slice.title)-1);if(parentNames.get(slice.title)===0){delete parentNames[slice.title];}}},applyEventInfos(sliceGroup){sliceGroup.topLevelSlices.forEach(function(slice){this.applyEventInfosRecursive_(new Map(),slice);},this);}};return{AndroidAuditor,};});'use strict';tr.exportTo('tr.model',function(){function ObjectSnapshot(objectInstance,ts,args){tr.model.Event.call(this);this.objectInstance=objectInstance;this.ts=ts;this.args=args;}
 ObjectSnapshot.prototype={__proto__:tr.model.Event.prototype,preInitialize(){},initialize(){},referencedAt(item,object,field){},addBoundsToRange(range){range.addValue(this.ts);},get userFriendlyName(){return'Snapshot of '+this.objectInstance.userFriendlyName+' @ '+
@@ -5573,7 +5573,7 @@
 class ProfilingDictionaryReader{constructor(opt_metadata,opt_maps,opt_groups,opt_parent){this.metadata=opt_metadata||new Map();this.maps=opt_maps||new Map();this.groups=opt_groups||new Map();this.parent_=opt_parent||undefined;this.inflated_=undefined;this.raw_=undefined;this.boundGetString_=this.getString.bind(this);this.deferenceStrings_=o=>deferenceStrings(this.boundGetString_,o);}
 static empty(){return new ProfilingDictionaryReader();}
 get parent(){return this.parent_;}
-get raw(){if(this.raw_)return this.raw_;this.raw_={};for(const[name,group]of this.groups.entries()){this.raw_[name]=group;}
+get ptr(){if(this.raw_)return this.raw_;this.raw_={};for(const[name,group]of this.groups.entries()){this.raw_[name]=group;}
 return this.raw_;}
 get inflated(){if(this.inflated_)return this.inflated_;this.inflated_={};for(const[name,group]of this.groups.entries()){this.inflated_[name]=this.inflateGroup(group);}
 return this.inflated_;}
@@ -7559,11 +7559,11 @@
 function drawProjectedQuadToContext(passNumber,quad,p1,p2,p3,p4,ctx,quadCanvas){if(passNumber===0){drawProjectedQuadBackgroundToContext(quad,p1,p2,p3,p4,ctx,quadCanvas);}else if(passNumber===1){drawProjectedQuadOutlineToContext(quad,p1,p2,p3,p4,ctx,quadCanvas);}else if(passNumber===2){drawProjectedQuadSelectionOutlineToContext(quad,p1,p2,p3,p4,ctx,quadCanvas);}else{throw new Error('Invalid pass number');}}
 const tmpP1=vec3.create();const tmpP2=vec3.create();const tmpP3=vec3.create();const tmpP4=vec3.create();function transformAndProcessQuads(matrix,viewport,quads,numPasses,handleQuadFunc,opt_arg1,opt_arg2){for(let passNumber=0;passNumber<numPasses;passNumber++){for(let i=0;i<quads.length;i++){const quad=quads[i];transform(tmpP1,quad.p1,matrix,viewport);transform(tmpP2,quad.p2,matrix,viewport);transform(tmpP3,quad.p3,matrix,viewport);transform(tmpP4,quad.p4,matrix,viewport);handleQuadFunc(passNumber,quad,tmpP1,tmpP2,tmpP3,tmpP4,opt_arg1,opt_arg2);}}}
 const QuadStackView=tr.ui.b.define('quad-stack-view');QuadStackView.prototype={__proto__:HTMLDivElement.prototype,decorate(){this.className='quad-stack-view';this.style.display='flex';this.style.position='relative';const node=tr.ui.b.instantiateTemplate('#quad-stack-view-template',THIS_DOC);Polymer.dom(this).appendChild(node);this.updateHeaderVisibility_();const header=Polymer.dom(this).querySelector('#header');header.style.position='absolute';header.style.fontSize='70%';header.style.top='10px';header.style.left='10px';header.style.right='150px';const scroller=Polymer.dom(this).querySelector('#canvas-scroller');scroller.style.flexGrow=1;scroller.style.flexShrink=1;scroller.style.flexBasis='auto';scroller.style.minWidth=0;scroller.style.minHeight=0;scroller.style.overflow='auto';this.canvas_=Polymer.dom(this).querySelector('#canvas');this.chromeImages_={left:Polymer.dom(this).querySelector('#chrome-left'),mid:Polymer.dom(this).querySelector('#chrome-mid'),right:Polymer.dom(this).querySelector('#chrome-right')};const stackingDistanceSlider=Polymer.dom(this).querySelector('#stacking-distance-slider');stackingDistanceSlider.style.position='absolute';stackingDistanceSlider.style.fontSize='70%';stackingDistanceSlider.style.top='10px';stackingDistanceSlider.style.right='10px';stackingDistanceSlider.value=tr.b.Settings.get('quadStackView.stackingDistance',45);stackingDistanceSlider.addEventListener('change',this.onStackingDistanceChange_.bind(this));stackingDistanceSlider.addEventListener('input',this.onStackingDistanceChange_.bind(this));this.trackMouse_();this.camera_=new tr.ui.b.Camera(this.mouseModeSelector_);this.camera_.addEventListener('renderrequired',this.onRenderRequired_.bind(this));this.cameraWasReset_=false;this.camera_.canvas=this.canvas_;this.viewportRect_=tr.b.math.Rect.fromXYWH(0,0,0,0);this.pixelRatio_=window.devicePixelRatio||1;},updateHeaderVisibility_(){if(this.headerText){Polymer.dom(this).querySelector('#header').style.display='';}else{Polymer.dom(this).querySelector('#header').style.display='none';}},get headerText(){return Polymer.dom(this).querySelector('#header').textContent;},set headerText(headerText){Polymer.dom(this).querySelector('#header').textContent=headerText;this.updateHeaderVisibility_();},onStackingDistanceChange_(e){tr.b.Settings.set('quadStackView.stackingDistance',this.stackingDistance);this.scheduleRender();e.stopPropagation();},get stackingDistance(){return Polymer.dom(this).querySelector('#stacking-distance-slider').value;},get mouseModeSelector(){return this.mouseModeSelector_;},get camera(){return this.camera_;},set quads(q){this.quads_=q;this.scheduleRender();},set deviceRect(rect){if(!rect||rect.equalTo(this.deviceRect_))return;this.deviceRect_=rect;this.camera_.deviceRect=rect;this.chromeQuad_=undefined;},resize(){if(!this.offsetParent)return true;const width=parseInt(window.getComputedStyle(this.offsetParent).width);const height=parseInt(window.getComputedStyle(this.offsetParent).height);const rect=tr.b.math.Rect.fromXYWH(0,0,width,height);if(rect.equalTo(this.viewportRect_))return false;this.viewportRect_=rect;this.canvas_.style.width=width+'px';this.canvas_.style.height=height+'px';this.canvas_.width=this.pixelRatio_*width;this.canvas_.height=this.pixelRatio_*height;if(!this.cameraWasReset_){this.camera_.resetCamera();this.cameraWasReset_=true;}
-return true;},readyToDraw(){if(!this.chromeImages_.left.src){let leftContent=window.getComputedStyle(this.chromeImages_.left).backgroundImage;leftContent=tr.ui.b.extractUrlString(leftContent);let midContent=window.getComputedStyle(this.chromeImages_.mid).backgroundImage;midContent=tr.ui.b.extractUrlString(midContent);let rightContent=window.getComputedStyle(this.chromeImages_.right).backgroundImage;rightContent=tr.ui.b.extractUrlString(rightContent);this.chromeImages_.left.src=leftContent;this.chromeImages_.mid.src=midContent;this.chromeImages_.right.src=rightContent;}
+return true;},readyToDptr(){if(!this.chromeImages_.left.src){let leftContent=window.getComputedStyle(this.chromeImages_.left).backgroundImage;leftContent=tr.ui.b.extractUrlString(leftContent);let midContent=window.getComputedStyle(this.chromeImages_.mid).backgroundImage;midContent=tr.ui.b.extractUrlString(midContent);let rightContent=window.getComputedStyle(this.chromeImages_.right).backgroundImage;rightContent=tr.ui.b.extractUrlString(rightContent);this.chromeImages_.left.src=leftContent;this.chromeImages_.mid.src=midContent;this.chromeImages_.right.src=rightContent;}
 return(this.chromeImages_.left.height>0)&&(this.chromeImages_.mid.height>0)&&(this.chromeImages_.right.height>0);},get chromeQuad(){if(this.chromeQuad_)return this.chromeQuad_;const chromeCanvas=document.createElement('canvas');const offsetY=this.chromeImages_.left.height;chromeCanvas.width=this.deviceRect_.width;chromeCanvas.height=this.deviceRect_.height+offsetY;const leftWidth=this.chromeImages_.left.width;const midWidth=this.chromeImages_.mid.width;const rightWidth=this.chromeImages_.right.width;const chromeCtx=chromeCanvas.getContext('2d');chromeCtx.drawImage(this.chromeImages_.left,0,0);chromeCtx.save();chromeCtx.translate(leftWidth,0);const s=(this.deviceRect_.width-leftWidth-rightWidth)/midWidth;chromeCtx.scale(s,1);chromeCtx.drawImage(this.chromeImages_.mid,0,0);chromeCtx.restore();chromeCtx.drawImage(this.chromeImages_.right,leftWidth+s*midWidth,0);const chromeRect=tr.b.math.Rect.fromXYWH(this.deviceRect_.x,this.deviceRect_.y-offsetY,this.deviceRect_.width,this.deviceRect_.height+offsetY);const chromeQuad=tr.b.math.Quad.fromRect(chromeRect);chromeQuad.stackingGroupId=this.maxStackingGroupId_+1;chromeQuad.imageData=chromeCtx.getImageData(0,0,chromeCanvas.width,chromeCanvas.height);chromeQuad.shadowOffset=[0,0];chromeQuad.shadowBlur=5;chromeQuad.borderWidth=3;this.chromeQuad_=chromeQuad;return this.chromeQuad_;},scheduleRender(){if(this.redrawScheduled_)return false;this.redrawScheduled_=true;tr.b.requestAnimationFrame(this.render,this);},onRenderRequired_(e){this.scheduleRender();},stackTransformAndProcessQuads_(numPasses,handleQuadFunc,includeChromeQuad,opt_arg1,opt_arg2){const mv=this.camera_.modelViewMatrix;const p=this.camera_.projectionMatrix;const viewport=tr.b.math.Rect.fromXYWH(0,0,this.canvas_.width,this.canvas_.height);const quadStacks=[];for(let i=0;i<this.quads_.length;++i){const quad=this.quads_[i];const stackingId=quad.stackingGroupId||0;while(stackingId>=quadStacks.length){quadStacks.push([]);}
 quadStacks[stackingId].push(quad);}
 const mvp=mat4.create();this.maxStackingGroupId_=quadStacks.length;const effectiveStackingDistance=this.stackingDistance*this.camera_.stackingDistanceDampening;mat4.multiply(mvp,p,mv);for(let i=0;i<quadStacks.length;++i){transformAndProcessQuads(mvp,viewport,quadStacks[i],numPasses,handleQuadFunc,opt_arg1,opt_arg2);mat4.translate(mv,mv,[0,0,effectiveStackingDistance]);mat4.multiply(mvp,p,mv);}
-if(includeChromeQuad&&this.deviceRect_){transformAndProcessQuads(mvp,viewport,[this.chromeQuad],numPasses,drawProjectedQuadToContext,opt_arg1,opt_arg2);}},render(){this.redrawScheduled_=false;if(!this.readyToDraw()){setTimeout(this.scheduleRender.bind(this),constants.IMAGE_LOAD_RETRY_TIME_MS);return;}
+if(includeChromeQuad&&this.deviceRect_){transformAndProcessQuads(mvp,viewport,[this.chromeQuad],numPasses,drawProjectedQuadToContext,opt_arg1,opt_arg2);}},render(){this.redrawScheduled_=false;if(!this.readyToDptr()){setTimeout(this.scheduleRender.bind(this),constants.IMAGE_LOAD_RETRY_TIME_MS);return;}
 if(!this.quads_)return;const canvasCtx=this.canvas_.getContext('2d');if(!this.resize()){canvasCtx.clearRect(0,0,this.canvas_.width,this.canvas_.height);}
 const quadCanvas=document.createElement('canvas');this.stackTransformAndProcessQuads_(3,drawProjectedQuadToContext,true,canvasCtx,quadCanvas);quadCanvas.width=0;},trackMouse_(){this.mouseModeSelector_=document.createElement('tr-ui-b-mouse-mode-selector');this.mouseModeSelector_.targetElement=this.canvas_;this.mouseModeSelector_.supportedModeMask=tr.ui.b.MOUSE_SELECTOR_MODE.SELECTION|tr.ui.b.MOUSE_SELECTOR_MODE.PANSCAN|tr.ui.b.MOUSE_SELECTOR_MODE.ZOOM|tr.ui.b.MOUSE_SELECTOR_MODE.ROTATE;this.mouseModeSelector_.mode=tr.ui.b.MOUSE_SELECTOR_MODE.PANSCAN;this.mouseModeSelector_.pos={x:0,y:100};Polymer.dom(this).appendChild(this.mouseModeSelector_);this.mouseModeSelector_.settingsKey='quadStackView.mouseModeSelector';this.mouseModeSelector_.setModifierForAlternateMode(tr.ui.b.MOUSE_SELECTOR_MODE.ROTATE,tr.ui.b.MODIFIER.SHIFT);this.mouseModeSelector_.setModifierForAlternateMode(tr.ui.b.MOUSE_SELECTOR_MODE.PANSCAN,tr.ui.b.MODIFIER.SPACE);this.mouseModeSelector_.setModifierForAlternateMode(tr.ui.b.MOUSE_SELECTOR_MODE.ZOOM,tr.ui.b.MODIFIER.CMD_OR_CTRL);this.mouseModeSelector_.addEventListener('updateselection',this.onSelectionUpdate_.bind(this));this.mouseModeSelector_.addEventListener('endselection',this.onSelectionUpdate_.bind(this));},extractRelativeMousePosition_(e){const br=this.canvas_.getBoundingClientRect();return[this.pixelRatio_*(e.clientX-this.canvas_.offsetLeft-br.left),this.pixelRatio_*(e.clientY-this.canvas_.offsetTop-br.top)];},onSelectionUpdate_(e){const mousePos=this.extractRelativeMousePosition_(e);const res=[];function handleQuad(passNumber,quad,p1,p2,p3,p4){if(tr.b.math.pointInImplicitQuad(mousePos,p1,p2,p3,p4)){res.push(quad);}}
 this.stackTransformAndProcessQuads_(1,handleQuad,false);e=new tr.b.Event('selectionchange');e.quads=res;this.dispatchEvent(e);}};return{QuadStackView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const ColorScheme=tr.b.ColorScheme;const THIS_DOC=document.currentScript.ownerDocument;const TILE_HEATMAP_TYPE={};TILE_HEATMAP_TYPE.NONE='none';TILE_HEATMAP_TYPE.SCHEDULED_PRIORITY='scheduledPriority';TILE_HEATMAP_TYPE.USING_GPU_MEMORY='usingGpuMemory';const cc=tr.ui.e.chrome.cc;function createTileRectsSelectorBaseOptions(){return[{label:'None',value:'none'},{label:'Coverage Rects',value:'coverage'}];}
@@ -7636,7 +7636,7 @@
 tr.b.dispatchSimpleEvent(this,'selection-change');},createPictureBtn_(pictures){if(!(pictures instanceof Array)){pictures=[pictures];}
 const link=document.createElement('tr-ui-a-analysis-link');link.selection=function(){const layeredPicture=new tr.e.cc.LayeredPicture(pictures);const snapshot=new tr.e.cc.PictureSnapshot(layeredPicture);snapshot.picture=layeredPicture;const selection=new tr.model.EventSet();selection.push(snapshot);return selection;};Polymer.dom(link).textContent='View in Picture Debugger';return link;},onRequestSelectionChangeFromAnalysisEl_(e){if(!(e.selection instanceof tr.ui.e.chrome.cc.Selection)){return;}
 e.stopPropagation();this.selection=e.selection;},get extraHighlightsByLayerId(){return this.layerTreeQuadStackView_.extraHighlightsByLayerId;},set extraHighlightsByLayerId(extraHighlightsByLayerId){this.layerTreeQuadStackView_.extraHighlightsByLayerId=extraHighlightsByLayerId;}};return{LayerView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const LayerTreeHostImplSnapshotView=tr.ui.b.define('tr-ui-e-chrome-cc-layer-tree-host-impl-snapshot-view',tr.ui.analysis.ObjectSnapshotView);LayerTreeHostImplSnapshotView.prototype={__proto__:tr.ui.analysis.ObjectSnapshotView.prototype,decorate(){Polymer.dom(this).classList.add('tr-ui-e-chrome-cc-lthi-s-view');this.style.display='flex';this.style.flexDirection='row';this.style.flexGrow=1;this.style.flexShrink=1;this.style.flexBasis='auto';this.style.minWidth=0;this.selection_=undefined;this.layerPicker_=new tr.ui.e.chrome.cc.LayerPicker();this.layerPicker_.style.flexGrow=0;this.layerPicker_.style.flexShrink=0;this.layerPicker_.style.flexBasis='auto';this.layerPicker_.style.minWidth='200px';this.layerPicker_.addEventListener('selection-change',this.onLayerPickerSelectionChanged_.bind(this));this.layerView_=new tr.ui.e.chrome.cc.LayerView();this.layerView_.addEventListener('selection-change',this.onLayerViewSelectionChanged_.bind(this));this.layerView_.style.flexGrow=1;this.layerView_.style.flexShrink=1;this.layerView_.style.flexBasis='auto';this.layerView_.style.minWidth=0;this.dragHandle_=document.createElement('tr-ui-b-drag-handle');this.dragHandle_.style.flexGrow=0;this.dragHandle_.style.flexShrink=0;this.dragHandle_.style.flexBasis='auto';this.dragHandle_.horizontal=false;this.dragHandle_.target=this.layerPicker_;Polymer.dom(this).appendChild(this.layerPicker_);Polymer.dom(this).appendChild(this.dragHandle_);Polymer.dom(this).appendChild(this.layerView_);this.onLayerViewSelectionChanged_();this.onLayerPickerSelectionChanged_();},get objectSnapshot(){return this.objectSnapshot_;},set objectSnapshot(objectSnapshot){this.objectSnapshot_=objectSnapshot;const lthi=this.objectSnapshot;let layerTreeImpl;if(lthi){layerTreeImpl=lthi.getTree(this.layerPicker_.whichTree);}
-this.layerPicker_.lthiSnapshot=lthi;this.layerView_.layerTreeImpl=layerTreeImpl;this.layerView_.regenerateContent();if(!this.selection_)return;this.selection=this.selection_.findEquivalent(lthi);},get selection(){return this.selection_;},set selection(selection){if(this.selection_===selection)return;this.selection_=selection;this.layerPicker_.selection=selection;this.layerView_.selection=selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerPickerSelectionChanged_(){this.selection_=this.layerPicker_.selection;this.layerView_.selection=this.selection;this.layerView_.layerTreeImpl=this.layerPicker_.layerTreeImpl;this.layerView_.isRenderPassQuads=this.layerPicker_.isRenderPassQuads;this.layerView_.regenerateContent();tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerViewSelectionChanged_(){this.selection_=this.layerView_.selection;this.layerPicker_.selection=this.selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},get extraHighlightsByLayerId(){return this.layerView_.extraHighlightsByLayerId;},set extraHighlightsByLayerId(extraHighlightsByLayerId){this.layerView_.extraHighlightsByLayerId=extraHighlightsByLayerId;}};tr.ui.analysis.ObjectSnapshotView.register(LayerTreeHostImplSnapshotView,{typeName:'cc::LayerTreeHostImpl'});return{LayerTreeHostImplSnapshotView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const OPS_TIMING_ITERATIONS=3;const CHART_PADDING_LEFT=65;const CHART_PADDING_RIGHT=40;const AXIS_PADDING_LEFT=60;const AXIS_PADDING_RIGHT=35;const AXIS_PADDING_TOP=25;const AXIS_PADDING_BOTTOM=45;const AXIS_LABEL_PADDING=5;const AXIS_TICK_SIZE=10;const LABEL_PADDING=5;const LABEL_INTERLEAVE_OFFSET=15;const BAR_PADDING=5;const VERTICAL_TICKS=5;const HUE_CHAR_CODE_ADJUSTMENT=5.7;const PictureOpsChartSummaryView=tr.ui.b.define('tr-ui-e-chrome-cc-picture-ops-chart-summary-view');PictureOpsChartSummaryView.prototype={__proto__:HTMLDivElement.prototype,decorate(){this.style.flexGrow=0;this.style.flexShrink=0;this.style.flexBasis='auto';this.style.fontSize=0;this.style.margin=0;this.style.minHeight='200px';this.style.minWidth='200px';this.style.overflow='hidden';this.style.padding=0;this.picture_=undefined;this.pictureDataProcessed_=false;this.chartScale_=window.devicePixelRatio;this.chart_=document.createElement('canvas');this.chartCtx_=this.chart_.getContext('2d');Polymer.dom(this).appendChild(this.chart_);this.opsTimingData_=[];this.chartWidth_=0;this.chartHeight_=0;this.requiresRedraw_=true;this.currentBarMouseOverTarget_=null;this.chart_.addEventListener('mousemove',this.onMouseMove_.bind(this));try{new ResizeObserver(this.onResize_.bind(this)).observe(this);}catch(e){}},get requiresRedraw(){return this.requiresRedraw_;},set requiresRedraw(requiresRedraw){this.requiresRedraw_=requiresRedraw;},get picture(){return this.picture_;},set picture(picture){this.picture_=picture;this.pictureDataProcessed_=false;if(Polymer.dom(this).classList.contains('hidden'))return;this.processPictureData_();this.requiresRedraw=true;this.updateChartContents();},hide(){Polymer.dom(this).classList.add('hidden');this.style.display='none';},show(){Polymer.dom(this).classList.remove('hidden');this.style.display='';if(!this.pictureDataProcessed_){this.processPictureData_();}
+this.layerPicker_.lthiSnapshot=lthi;this.layerView_.layerTreeImpl=layerTreeImpl;this.layerView_.regenerateContent();if(!this.selection_)return;this.selection=this.selection_.findEquivalent(lthi);},get selection(){return this.selection_;},set selection(selection){if(this.selection_===selection)return;this.selection_=selection;this.layerPicker_.selection=selection;this.layerView_.selection=selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerPickerSelectionChanged_(){this.selection_=this.layerPicker_.selection;this.layerView_.selection=this.selection;this.layerView_.layerTreeImpl=this.layerPicker_.layerTreeImpl;this.layerView_.isRenderPassQuads=this.layerPicker_.isRenderPassQuads;this.layerView_.regenerateContent();tr.b.dispatchSimpleEvent(this,'cc-selection-change');},onLayerViewSelectionChanged_(){this.selection_=this.layerView_.selection;this.layerPicker_.selection=this.selection;tr.b.dispatchSimpleEvent(this,'cc-selection-change');},get extraHighlightsByLayerId(){return this.layerView_.extraHighlightsByLayerId;},set extraHighlightsByLayerId(extraHighlightsByLayerId){this.layerView_.extraHighlightsByLayerId=extraHighlightsByLayerId;}};tr.ui.analysis.ObjectSnapshotView.register(LayerTreeHostImplSnapshotView,{typeName:'cc::LayerTreeHostImpl'});return{LayerTreeHostImplSnapshotView,};});'use strict';tr.exportTo('tr.ui.e.chrome.cc',function(){const OPS_TIMING_ITERATIONS=3;const CHART_PADDING_LEFT=65;const CHART_PADDING_RIGHT=40;const AXIS_PADDING_LEFT=60;const AXIS_PADDING_RIGHT=35;const AXIS_PADDING_TOP=25;const AXIS_PADDING_BOTTOM=45;const AXIS_LABEL_PADDING=5;const AXIS_TICK_SIZE=10;const LABEL_PADDING=5;const LABEL_INTERLEAVE_OFFSET=15;const BAR_PADDING=5;const VERTICAL_TICKS=5;const HUE_CHAR_CODE_ADJUSTMENT=5.7;const PictureOpsChartSummaryView=tr.ui.b.define('tr-ui-e-chrome-cc-picture-ops-chart-summary-view');PictureOpsChartSummaryView.prototype={__proto__:HTMLDivElement.prototype,decorate(){this.style.flexGrow=0;this.style.flexShrink=0;this.style.flexBasis='auto';this.style.fontSize=0;this.style.margin=0;this.style.minHeight='200px';this.style.minWidth='200px';this.style.overflow='hidden';this.style.padding=0;this.picture_=undefined;this.pictureDataProcessed_=false;this.chartScale_=window.devicePixelRatio;this.chart_=document.createElement('canvas');this.chartCtx_=this.chart_.getContext('2d');Polymer.dom(this).appendChild(this.chart_);this.opsTimingData_=[];this.chartWidth_=0;this.chartHeight_=0;this.requiresRedraw_=true;this.currentBarMouseOverTarget_=null;this.chart_.addEventListener('mousemove',this.onMouseMove_.bind(this));try{new ResizeObserver(this.onResize_.bind(this)).observe(this);}catch(e){}},get requiresRedptr(){return this.requiresRedraw_;},set requiresRedraw(requiresRedraw){this.requiresRedraw_=requiresRedraw;},get picture(){return this.picture_;},set picture(picture){this.picture_=picture;this.pictureDataProcessed_=false;if(Polymer.dom(this).classList.contains('hidden'))return;this.processPictureData_();this.requiresRedraw=true;this.updateChartContents();},hide(){Polymer.dom(this).classList.add('hidden');this.style.display='none';},show(){Polymer.dom(this).classList.remove('hidden');this.style.display='';if(!this.pictureDataProcessed_){this.processPictureData_();}
 this.requiresRedraw=true;this.updateChartContents();},onMouseMove_(e){const lastBarMouseOverTarget=this.currentBarMouseOverTarget_;this.currentBarMouseOverTarget_=null;const x=e.offsetX;const y=e.offsetY;const chartLeft=CHART_PADDING_LEFT;const chartRight=this.chartWidth_-CHART_PADDING_RIGHT;const chartTop=AXIS_PADDING_TOP;const chartBottom=this.chartHeight_-AXIS_PADDING_BOTTOM;const chartInnerWidth=chartRight-chartLeft;if(x>chartLeft&&x<chartRight&&y>chartTop&&y<chartBottom){this.currentBarMouseOverTarget_=Math.floor((x-chartLeft)/chartInnerWidth*this.opsTimingData_.length);this.currentBarMouseOverTarget_=tr.b.math.clamp(this.currentBarMouseOverTarget_,0,this.opsTimingData_.length-1);}
 if(this.currentBarMouseOverTarget_===lastBarMouseOverTarget)return;this.drawChartContents_();},onResize_(){this.requiresRedraw=true;this.updateChartContents();},updateChartContents(){if(this.requiresRedraw){this.updateChartDimensions_();}
 this.drawChartContents_();},updateChartDimensions_(){this.chartWidth_=this.offsetWidth;this.chartHeight_=this.offsetHeight;this.chart_.width=this.chartWidth_*this.chartScale_;this.chart_.height=this.chartHeight_*this.chartScale_;this.chart_.style.width=this.chartWidth_+'px';this.chart_.style.height=this.chartHeight_+'px';this.chartCtx_.scale(this.chartScale_,this.chartScale_);},processPictureData_(){this.resetOpsTimingData_();this.pictureDataProcessed_=true;if(!this.picture_)return;let ops=this.picture_.getOps();if(!ops)return;ops=this.picture_.tagOpsWithTimings(ops);if(ops[0].cmd_time===undefined)return;this.collapseOpsToTimingBuckets_(ops);},drawChartContents_(){this.clearChartContents_();if(this.opsTimingData_.length===0){this.showNoTimingDataMessage_();return;}
diff --git a/runtime/vm/bootstrap.cc b/runtime/vm/bootstrap.cc
index 4662ff3..5e3ddd7 100644
--- a/runtime/vm/bootstrap.cc
+++ b/runtime/vm/bootstrap.cc
@@ -133,7 +133,7 @@
     const Object& result = Object::Handle(zone, loader.LoadProgram());
     program.reset();
     if (result.IsError()) {
-      return Error::Cast(result).raw();
+      return Error::Cast(result).ptr();
     }
 
     // The builtin library should be registered with the VM.
@@ -169,7 +169,7 @@
     ObjectStore::BootstrapLibraryId id = bootstrap_libraries[i].index;
     uri = Symbols::New(thread, bootstrap_libraries[i].uri);
     lib = isolate_group->object_store()->bootstrap_library(id);
-    ASSERT(lib.raw() == Library::LookupLibrary(thread, uri));
+    ASSERT(lib.ptr() == Library::LookupLibrary(thread, uri));
     if (lib.IsNull()) {
       lib = Library::NewLibraryHelper(uri, false);
       lib.SetLoadRequested();
diff --git a/runtime/vm/canonical_tables.cc b/runtime/vm/canonical_tables.cc
index 1fb5e8c..13c9470 100644
--- a/runtime/vm/canonical_tables.cc
+++ b/runtime/vm/canonical_tables.cc
@@ -52,7 +52,7 @@
     return TypeParameter::Cast(a).parameterized_class_id() ==
            TypeParameter::Cast(b).parameterized_class_id();
   }
-  return a.raw() == b.raw();
+  return a.ptr() == b.ptr();
 }
 
 uword MetadataMapTraits::Hash(const Object& key) {
diff --git a/runtime/vm/canonical_tables.h b/runtime/vm/canonical_tables.h
index 8c44802..004b07f 100644
--- a/runtime/vm/canonical_tables.h
+++ b/runtime/vm/canonical_tables.h
@@ -21,7 +21,7 @@
     String& result = String::Handle(StringFrom(data_, len_, Heap::kOld));
     result.SetCanonical();
     result.SetHash(hash_);
-    return result.raw();
+    return result.ptr();
   }
   bool Equals(const String& other) const {
     ASSERT(other.HasHash());
@@ -167,7 +167,7 @@
   }
   static uword Hash(const CanonicalTypeKey& key) { return key.Hash(); }
   static ObjectPtr NewKey(const CanonicalTypeKey& obj) {
-    return obj.key_.raw();
+    return obj.key_.ptr();
   }
 };
 typedef UnorderedHashSet<CanonicalTypeTraits> CanonicalTypeSet;
@@ -206,7 +206,7 @@
   }
   static uword Hash(const CanonicalFunctionTypeKey& key) { return key.Hash(); }
   static ObjectPtr NewKey(const CanonicalFunctionTypeKey& obj) {
-    return obj.key_.raw();
+    return obj.key_.ptr();
   }
 };
 typedef UnorderedHashSet<CanonicalFunctionTypeTraits> CanonicalFunctionTypeSet;
@@ -245,7 +245,7 @@
   }
   static uword Hash(const CanonicalTypeParameterKey& key) { return key.Hash(); }
   static ObjectPtr NewKey(const CanonicalTypeParameterKey& obj) {
-    return obj.key_.raw();
+    return obj.key_.ptr();
   }
 };
 typedef UnorderedHashSet<CanonicalTypeParameterTraits>
@@ -287,7 +287,7 @@
   }
   static uword Hash(const CanonicalTypeArgumentsKey& key) { return key.Hash(); }
   static ObjectPtr NewKey(const CanonicalTypeArgumentsKey& obj) {
-    return obj.key_.raw();
+    return obj.key_.ptr();
   }
 };
 typedef UnorderedHashSet<CanonicalTypeArgumentsTraits>
diff --git a/runtime/vm/class_finalizer.cc b/runtime/vm/class_finalizer.cc
index f054bb9..b6ee854 100644
--- a/runtime/vm/class_finalizer.cc
+++ b/runtime/vm/class_finalizer.cc
@@ -84,7 +84,7 @@
 static void CollectFinalizedSuperClasses(
     const Class& cls_,
     GrowableArray<intptr_t>* finalized_super_classes) {
-  Class& cls = Class::Handle(cls_.raw());
+  Class& cls = Class::Handle(cls_.ptr());
   AbstractType& super_type = Type::Handle();
   super_type = cls.super_type();
   if (!super_type.IsNull()) {
@@ -117,7 +117,7 @@
     ScopedHandle<Class> current_class(&class_handles_);
     ScopedHandle<AbstractType> type(&type_handles_);
 
-    *current_class = klass.raw();
+    *current_class = klass.ptr();
     while (true) {
       // We don't care about top types.
       const intptr_t cid = current_class->id();
@@ -361,8 +361,8 @@
                 String::Handle(pending_type.Name()).ToCString(),
                 pending_type.ToCString());
     }
-    if ((pending_type.raw() != type.raw()) && pending_type.IsType() &&
-        (pending_type.type_class() == type_cls.raw())) {
+    if ((pending_type.ptr() != type.ptr()) && pending_type.IsType() &&
+        (pending_type.type_class() == type_cls.ptr())) {
       pending_arguments = pending_type.arguments();
       // By using TypeEquality::kInSubtypeTest, we throw a wider net than
       // using canonical or syntactical equality and may reject more
@@ -589,7 +589,7 @@
           // While finalizing D<T>, the super type arg D<T> (a typeref) gets
           // instantiated from vector [T], yielding itself.
           if (super_type_arg.IsTypeRef() &&
-              (super_type_arg.arguments() == arguments.raw())) {
+              (super_type_arg.arguments() == arguments.ptr())) {
             ASSERT(super_type_arg.IsBeingFinalized());
             arguments.SetTypeAt(i, super_type_arg);
             continue;
@@ -607,7 +607,7 @@
               unfinalized_type = TypeRef::Cast(super_type_arg).type();
             } else {
               ASSERT(super_type_arg.IsType());
-              unfinalized_type = super_type_arg.raw();
+              unfinalized_type = super_type_arg.ptr();
             }
             if (FLAG_trace_type_finalization) {
               THR_Print("Instantiated unfinalized '%s': '%s'\n",
@@ -660,19 +660,19 @@
         !type.IsBeingFinalized()) {
       return type.Canonicalize(Thread::Current(), nullptr);
     }
-    return type.raw();
+    return type.ptr();
   }
 
   if (type.IsTypeRef()) {
     // The referenced type will be finalized later by the code that set the
     // is_being_finalized mark bit.
-    return type.raw();
+    return type.ptr();
   }
 
   if (type.IsTypeParameter() && type.IsBeingFinalized()) {
     // The base and index have already been adjusted, but the bound referring
     // back to the type parameter is still being finalized.
-    return type.raw();
+    return type.ptr();
   }
 
   // Recursive types must be processed in FinalizeTypeArguments() and cannot be
@@ -732,7 +732,7 @@
     if (finalization >= kCanonicalize) {
       return type_parameter.Canonicalize(thread, nullptr);
     }
-    return type_parameter.raw();
+    return type_parameter.ptr();
   }
 
   // If the type is a function type, we also need to finalize the types in its
@@ -781,11 +781,11 @@
           AbstractType::Handle(zone, type.Canonicalize(thread, nullptr));
       THR_Print("Done canonicalizing type '%s'\n",
                 String::Handle(zone, canonical_type.Name()).ToCString());
-      return canonical_type.raw();
+      return canonical_type.ptr();
     }
     return type.Canonicalize(thread, nullptr);
   } else {
-    return type.raw();
+    return type.ptr();
   }
 }
 
@@ -804,7 +804,7 @@
     for (intptr_t i = 0; i < num_type_params; i++) {
       type_param ^= type_params.TypeAt(i);
       finalized_type ^= FinalizeType(type_param, kFinalize, pending_types);
-      if (type_param.raw() != finalized_type.raw()) {
+      if (type_param.ptr() != finalized_type.ptr()) {
         type_params.SetTypeAt(i, TypeParameter::Cast(finalized_type));
       }
     }
@@ -813,7 +813,7 @@
   // Finalize result type.
   type = signature.result_type();
   finalized_type = FinalizeType(type, kFinalize, pending_types);
-  if (finalized_type.raw() != type.raw()) {
+  if (finalized_type.ptr() != type.ptr()) {
     signature.set_result_type(finalized_type);
   }
   // Finalize formal parameter types.
@@ -821,7 +821,7 @@
   for (intptr_t i = 0; i < num_parameters; i++) {
     type = signature.ParameterTypeAt(i);
     finalized_type = FinalizeType(type, kFinalize, pending_types);
-    if (type.raw() != finalized_type.raw()) {
+    if (type.ptr() != finalized_type.ptr()) {
       signature.SetParameterTypeAt(i, finalized_type);
     }
   }
@@ -835,7 +835,7 @@
   if (finalization >= kCanonicalize) {
     return signature.Canonicalize(Thread::Current(), nullptr);
   }
-  return signature.raw();
+  return signature.ptr();
 }
 
 #if defined(TARGET_ARCH_X64)
@@ -930,7 +930,7 @@
     return;
   }
 
-  Class& cls = Class::Handle(zone, iface.raw());
+  Class& cls = Class::Handle(zone, iface.ptr());
   AbstractType& type = AbstractType::Handle(zone);
 
   while (!cls.is_implemented()) {
@@ -967,7 +967,7 @@
   }
   // Finalize type parameters before finalizing the super type.
   FinalizeTypeParameters(cls, kFinalize);
-  ASSERT(super_class.raw() == cls.SuperClass());  // Not modified.
+  ASSERT(super_class.ptr() == cls.SuperClass());  // Not modified.
   ASSERT(super_class.IsNull() || super_class.is_type_finalized());
   FinalizeTypeParameters(cls, kCanonicalize);
   // Finalize super type.
@@ -1189,7 +1189,7 @@
   for (intptr_t i = 0; i < fields.Length(); i++) {
     field = Field::RawCast(fields.At(i));
     if (!field.is_static() || !field.is_const() ||
-        (sentinel.raw() == field.raw())) {
+        (sentinel.ptr() == field.ptr())) {
       continue;
     }
     // Hot-reload expects the static const fields to be evaluated when
@@ -1405,25 +1405,25 @@
   void VisitObject(ObjectPtr obj) {
     if (obj->IsClass()) {
       ClassPtr cls = Class::RawCast(obj);
-      const classid_t old_cid = cls->ptr()->id_;
+      const classid_t old_cid = cls->untag()->id_;
       if (ClassTable::IsTopLevelCid(old_cid)) {
         // We don't remap cids of top level classes.
         return;
       }
-      cls->ptr()->id_ = Map(old_cid);
+      cls->untag()->id_ = Map(old_cid);
     } else if (obj->IsField()) {
       FieldPtr field = Field::RawCast(obj);
-      field->ptr()->guarded_cid_ = Map(field->ptr()->guarded_cid_);
-      field->ptr()->is_nullable_ = Map(field->ptr()->is_nullable_);
+      field->untag()->guarded_cid_ = Map(field->untag()->guarded_cid_);
+      field->untag()->is_nullable_ = Map(field->untag()->is_nullable_);
     } else if (obj->IsTypeParameter()) {
       TypeParameterPtr param = TypeParameter::RawCast(obj);
-      param->ptr()->parameterized_class_id_ =
-          Map(param->ptr()->parameterized_class_id_);
+      param->untag()->parameterized_class_id_ =
+          Map(param->untag()->parameterized_class_id_);
     } else if (obj->IsType()) {
       TypePtr type = Type::RawCast(obj);
-      ObjectPtr id = type->ptr()->type_class_id_;
+      ObjectPtr id = type->untag()->type_class_id_;
       if (!id->IsHeapObject()) {
-        type->ptr()->type_class_id_ =
+        type->untag()->type_class_id_ =
             Smi::New(Map(Smi::Value(Smi::RawCast(id))));
       }
     } else {
@@ -1432,7 +1432,7 @@
       if (old_cid != new_cid) {
         // Don't touch objects that are unchanged. In particular, Instructions,
         // which are write-protected.
-        obj->ptr()->SetClassIdUnsynchronized(new_cid);
+        obj->untag()->SetClassIdUnsynchronized(new_cid);
       }
     }
   }
@@ -1490,13 +1490,13 @@
 // In the Dart VM heap the following instances directly use cids for the
 // computation of canonical hash codes:
 //
-//    * TypePtr (due to TypeLayout::type_class_id_)
-//    * TypeParameterPtr (due to TypeParameterLayout::parameterized_class_id_)
+//    * TypePtr (due to UntaggedType::type_class_id_)
+//    * TypeParameterPtr (due to UntaggedTypeParameter::parameterized_class_id_)
 //
 // The following instances use cids for the computation of canonical hash codes
 // indirectly:
 //
-//    * TypeRefPtr (due to TypeRefLayout::type_->type_class_id)
+//    * TypeRefPtr (due to UntaggedTypeRef::type_->type_class_id)
 //    * TypePtr (due to type arguments)
 //    * FunctionTypePtr (due to the result and parameter types)
 //    * TypeArgumentsPtr (due to type references)
@@ -1505,17 +1505,17 @@
 //
 // Caching of the canonical hash codes happens for:
 //
-//    * TypeLayout::hash_
-//    * FunctionTypeLayout::hash_
-//    * TypeParameterLayout::hash_
-//    * TypeArgumentsLayout::hash_
-//    * RawInstance (weak table)
-//    * RawArray (weak table)
+//    * UntaggedType::hash_
+//    * UntaggedFunctionType::hash_
+//    * UntaggedTypeParameter::hash_
+//    * UntaggedTypeArguments::hash_
+//    * InstancePtr (weak table)
+//    * ArrayPtr (weak table)
 //
 // No caching of canonical hash codes (i.e. it gets re-computed every time)
 // happens for:
 //
-//    * TypeRefPtr (computed via TypeRefLayout::type_->type_class_id)
+//    * TypeRefPtr (computed via UntaggedTypeRef::type_->type_class_id)
 //
 // Usages of canonical hash codes are:
 //
diff --git a/runtime/vm/class_finalizer_test.cc b/runtime/vm/class_finalizer_test.cc
index 62c85a1..d51c4cf 100644
--- a/runtime/vm/class_finalizer_test.cc
+++ b/runtime/vm/class_finalizer_test.cc
@@ -20,7 +20,7 @@
   SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
   cls.SetFunctions(Object::empty_array());
   cls.SetFields(Object::empty_array());
-  return cls.raw();
+  return cls.ptr();
 }
 
 ISOLATE_UNIT_TEST_CASE(ClassFinalizer) {
diff --git a/runtime/vm/class_id.h b/runtime/vm/class_id.h
index 741fc58..5fb3b61 100644
--- a/runtime/vm/class_id.h
+++ b/runtime/vm/class_id.h
@@ -14,7 +14,7 @@
 
 namespace dart {
 
-// Size of the class-id part of the object header. See ObjectLayout.
+// Size of the class-id part of the object header. See UntaggedObject.
 typedef uint16_t ClassIdTagType;
 
 #define CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY(V)                           \
diff --git a/runtime/vm/class_table.cc b/runtime/vm/class_table.cc
index b182553..a2a1b66 100644
--- a/runtime/vm/class_table.cc
+++ b/runtime/vm/class_table.cc
@@ -160,7 +160,7 @@
   // parallel to [ClassTable].
 
   const intptr_t instance_size =
-      cls.is_abstract() ? 0 : Class::host_instance_size(cls.raw());
+      cls.is_abstract() ? 0 : Class::host_instance_size(cls.ptr());
 
   const intptr_t expected_cid =
       shared_class_table_->Register(cid, instance_size);
@@ -168,7 +168,7 @@
   if (cid != kIllegalCid) {
     ASSERT(cid > 0 && cid < kNumPredefinedCids && cid < top_);
     ASSERT(table_.load()[cid] == nullptr);
-    table_.load()[cid] = cls.raw();
+    table_.load()[cid] = cls.ptr();
   } else {
     if (top_ == capacity_) {
       const intptr_t new_capacity = capacity_ + kCapacityIncrement;
@@ -176,7 +176,7 @@
     }
     ASSERT(top_ < capacity_);
     cls.set_id(top_);
-    table_.load()[top_] = cls.raw();
+    table_.load()[top_] = cls.ptr();
     top_++;  // Increment next index.
   }
   ASSERT(expected_cid == cls.id());
@@ -200,7 +200,7 @@
   }
   ASSERT(tlc_top_ < tlc_capacity_);
   cls.set_id(ClassTable::CidFromTopLevelIndex(tlc_top_));
-  tlc_table_.load()[tlc_top_] = cls.raw();
+  tlc_table_.load()[tlc_top_] = cls.ptr();
   tlc_top_++;  // Increment next index.
 }
 
@@ -496,7 +496,7 @@
       continue;
     }
     cls = At(i);
-    if (cls.raw() != nullptr) {
+    if (cls.ptr() != nullptr) {
       name = cls.Name();
       OS::PrintErr("%" Pd ": %s\n", i, name.ToCString());
     }
diff --git a/runtime/vm/closure_functions_cache.cc b/runtime/vm/closure_functions_cache.cc
index a85a15b..e70d6ce 100644
--- a/runtime/vm/closure_functions_cache.cc
+++ b/runtime/vm/closure_functions_cache.cc
@@ -34,8 +34,8 @@
   intptr_t num_closures = closures.Length();
   for (intptr_t i = 0; i < num_closures; i++) {
     closure ^= closures.At(i);
-    if (closure.token_pos() == token_pos && closure.Owner() == owner.raw()) {
-      return closure.raw();
+    if (closure.token_pos() == token_pos && closure.Owner() == owner.ptr()) {
+      return closure.ptr();
     }
   }
   return Function::null();
@@ -66,8 +66,8 @@
   for (intptr_t i = 0; i < num_closures; i++) {
     closure ^= closures.At(i);
     if (closure.token_pos() == token_pos &&
-        closure.parent_function() == parent.raw()) {
-      return closure.raw();
+        closure.parent_function() == parent.ptr()) {
+      return closure.ptr();
     }
   }
   return Function::null();
@@ -101,7 +101,7 @@
       GrowableObjectArray::Handle(zone, object_store->closure_functions());
   intptr_t num_closures = closures_array.Length();
   for (intptr_t i = 0; i < num_closures; i++) {
-    if (closures_array.At(i) == needle.raw()) {
+    if (closures_array.At(i) == needle.ptr()) {
       return i;
     }
   }
@@ -136,15 +136,15 @@
   auto& entry = Function::Handle(zone);
   for (intptr_t i = (closures.Length() - 1); i >= 0; i--) {
     entry ^= closures.At(i);
-    if (entry.parent_function() == outer.raw()) {
+    if (entry.parent_function() == outer.ptr()) {
 #if defined(DEBUG)
       auto& other = Function::Handle(zone);
       for (intptr_t j = i - 1; j >= 0; j--) {
         other ^= closures.At(j);
-        ASSERT(other.parent_function() != outer.raw());
+        ASSERT(other.parent_function() != outer.ptr());
       }
 #endif
-      return entry.raw();
+      return entry.ptr();
     }
   }
   return Function::null();
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 59f8696..e221513 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -91,7 +91,7 @@
   if (address == 0) {
     OUT_OF_MEMORY();
   }
-  return ObjectLayout::FromAddr(address);
+  return UntaggedObject::FromAddr(address);
 }
 
 void Deserializer::InitializeHeader(ObjectPtr raw,
@@ -100,14 +100,14 @@
                                     bool is_canonical) {
   ASSERT(Utils::IsAligned(size, kObjectAlignment));
   uword tags = 0;
-  tags = ObjectLayout::ClassIdTag::update(class_id, tags);
-  tags = ObjectLayout::SizeTag::update(size, tags);
-  tags = ObjectLayout::CanonicalBit::update(is_canonical, tags);
-  tags = ObjectLayout::OldBit::update(true, tags);
-  tags = ObjectLayout::OldAndNotMarkedBit::update(true, tags);
-  tags = ObjectLayout::OldAndNotRememberedBit::update(true, tags);
-  tags = ObjectLayout::NewBit::update(false, tags);
-  raw->ptr()->tags_ = tags;
+  tags = UntaggedObject::ClassIdTag::update(class_id, tags);
+  tags = UntaggedObject::SizeTag::update(size, tags);
+  tags = UntaggedObject::CanonicalBit::update(is_canonical, tags);
+  tags = UntaggedObject::OldBit::update(true, tags);
+  tags = UntaggedObject::OldAndNotMarkedBit::update(true, tags);
+  tags = UntaggedObject::OldAndNotRememberedBit::update(true, tags);
+  tags = UntaggedObject::NewBit::update(false, tags);
+  raw->untag()->tags_ = tags;
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -185,7 +185,7 @@
 
   void Trace(Serializer* s, ObjectPtr object) {
     ClassPtr cls = Class::RawCast(object);
-    intptr_t class_id = cls->ptr()->id_;
+    intptr_t class_id = cls->untag()->id_;
 
     if (class_id == kIllegalCid) {
       // Classes expected to be dropped by the precompiler should not be traced.
@@ -211,7 +211,7 @@
       ClassPtr cls = predefined_[i];
       s->AssignRef(cls);
       AutoTraceObject(cls);
-      intptr_t class_id = cls->ptr()->id_;
+      intptr_t class_id = cls->untag()->id_;
       s->WriteCid(class_id);
     }
     count = objects_.length();
@@ -235,9 +235,9 @@
 
  private:
   void WriteClass(Serializer* s, ClassPtr cls) {
-    AutoTraceObjectName(cls, cls->ptr()->name_);
+    AutoTraceObjectName(cls, cls->untag()->name_);
     WriteFromTo(cls);
-    intptr_t class_id = cls->ptr()->id_;
+    intptr_t class_id = cls->untag()->id_;
     if (class_id == kIllegalCid) {
       s->UnexpectedObject(cls, "Class with illegal cid");
     }
@@ -247,16 +247,16 @@
       s->UnexpectedObject(cls, "Class with non mode agnostic constants");
     }
     if (s->kind() != Snapshot::kFullAOT) {
-      s->Write<uint32_t>(cls->ptr()->kernel_offset_);
+      s->Write<uint32_t>(cls->untag()->kernel_offset_);
     }
     s->Write<int32_t>(Class::target_instance_size_in_words(cls));
     s->Write<int32_t>(Class::target_next_field_offset_in_words(cls));
     s->Write<int32_t>(Class::target_type_arguments_field_offset_in_words(cls));
-    s->Write<int16_t>(cls->ptr()->num_type_arguments_);
-    s->Write<uint16_t>(cls->ptr()->num_native_fields_);
-    s->WriteTokenPosition(cls->ptr()->token_pos_);
-    s->WriteTokenPosition(cls->ptr()->end_token_pos_);
-    s->Write<uint32_t>(cls->ptr()->state_bits_);
+    s->Write<int16_t>(cls->untag()->num_type_arguments_);
+    s->Write<uint16_t>(cls->untag()->num_native_fields_);
+    s->WriteTokenPosition(cls->untag()->token_pos_);
+    s->WriteTokenPosition(cls->untag()->end_token_pos_);
+    s->Write<uint32_t>(cls->untag()->state_bits_);
 
     // In AOT, the bitmap of unboxed fields should also be serialized
     if (FLAG_precompiled_mode && !ClassTable::IsTopLevelCid(class_id)) {
@@ -272,9 +272,9 @@
     // Do not generate a core snapshot containing constants that would require
     // a canonical erasure of their types if loaded in an isolate running in
     // unsound nullability mode.
-    if (cls->ptr()->host_type_arguments_field_offset_in_words_ ==
+    if (cls->untag()->host_type_arguments_field_offset_in_words_ ==
             Class::kNoTypeArguments ||
-        cls->ptr()->constants_ == Array::null()) {
+        cls->untag()->constants_ == Array::null()) {
       return false;
     }
     Zone* zone = Thread::Current()->zone();
@@ -319,38 +319,38 @@
       ClassPtr cls = static_cast<ClassPtr>(d->Ref(id));
       ReadFromTo(cls);
       intptr_t class_id = d->ReadCid();
-      cls->ptr()->id_ = class_id;
+      cls->untag()->id_ = class_id;
 #if !defined(DART_PRECOMPILED_RUNTIME)
       if (d->kind() != Snapshot::kFullAOT) {
-        cls->ptr()->kernel_offset_ = d->Read<uint32_t>();
+        cls->untag()->kernel_offset_ = d->Read<uint32_t>();
       }
 #endif
       if (!IsInternalVMdefinedClassId(class_id)) {
-        cls->ptr()->host_instance_size_in_words_ = d->Read<int32_t>();
-        cls->ptr()->host_next_field_offset_in_words_ = d->Read<int32_t>();
+        cls->untag()->host_instance_size_in_words_ = d->Read<int32_t>();
+        cls->untag()->host_next_field_offset_in_words_ = d->Read<int32_t>();
 #if !defined(DART_PRECOMPILED_RUNTIME)
         // Only one pair is serialized. The target field only exists when
         // DART_PRECOMPILED_RUNTIME is not defined
-        cls->ptr()->target_instance_size_in_words_ =
-            cls->ptr()->host_instance_size_in_words_;
-        cls->ptr()->target_next_field_offset_in_words_ =
-            cls->ptr()->host_next_field_offset_in_words_;
+        cls->untag()->target_instance_size_in_words_ =
+            cls->untag()->host_instance_size_in_words_;
+        cls->untag()->target_next_field_offset_in_words_ =
+            cls->untag()->host_next_field_offset_in_words_;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
       } else {
         d->Read<int32_t>();  // Skip.
         d->Read<int32_t>();  // Skip.
       }
-      cls->ptr()->host_type_arguments_field_offset_in_words_ =
+      cls->untag()->host_type_arguments_field_offset_in_words_ =
           d->Read<int32_t>();
 #if !defined(DART_PRECOMPILED_RUNTIME)
-      cls->ptr()->target_type_arguments_field_offset_in_words_ =
-          cls->ptr()->host_type_arguments_field_offset_in_words_;
+      cls->untag()->target_type_arguments_field_offset_in_words_ =
+          cls->untag()->host_type_arguments_field_offset_in_words_;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
-      cls->ptr()->num_type_arguments_ = d->Read<int16_t>();
-      cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
-      cls->ptr()->token_pos_ = d->ReadTokenPosition();
-      cls->ptr()->end_token_pos_ = d->ReadTokenPosition();
-      cls->ptr()->state_bits_ = d->Read<uint32_t>();
+      cls->untag()->num_type_arguments_ = d->Read<int16_t>();
+      cls->untag()->num_native_fields_ = d->Read<uint16_t>();
+      cls->untag()->token_pos_ = d->ReadTokenPosition();
+      cls->untag()->end_token_pos_ = d->ReadTokenPosition();
+      cls->untag()->state_bits_ = d->Read<uint32_t>();
 
       if (FLAG_precompiled_mode) {
         d->ReadUnsigned64();  // Skip unboxed fields bitmap.
@@ -365,30 +365,30 @@
 
       intptr_t class_id = d->ReadCid();
       ASSERT(class_id >= kNumPredefinedCids);
-      cls->ptr()->id_ = class_id;
+      cls->untag()->id_ = class_id;
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
       if (d->kind() != Snapshot::kFullAOT) {
-        cls->ptr()->kernel_offset_ = d->Read<uint32_t>();
+        cls->untag()->kernel_offset_ = d->Read<uint32_t>();
       }
 #endif
-      cls->ptr()->host_instance_size_in_words_ = d->Read<int32_t>();
-      cls->ptr()->host_next_field_offset_in_words_ = d->Read<int32_t>();
-      cls->ptr()->host_type_arguments_field_offset_in_words_ =
+      cls->untag()->host_instance_size_in_words_ = d->Read<int32_t>();
+      cls->untag()->host_next_field_offset_in_words_ = d->Read<int32_t>();
+      cls->untag()->host_type_arguments_field_offset_in_words_ =
           d->Read<int32_t>();
 #if !defined(DART_PRECOMPILED_RUNTIME)
-      cls->ptr()->target_instance_size_in_words_ =
-          cls->ptr()->host_instance_size_in_words_;
-      cls->ptr()->target_next_field_offset_in_words_ =
-          cls->ptr()->host_next_field_offset_in_words_;
-      cls->ptr()->target_type_arguments_field_offset_in_words_ =
-          cls->ptr()->host_type_arguments_field_offset_in_words_;
+      cls->untag()->target_instance_size_in_words_ =
+          cls->untag()->host_instance_size_in_words_;
+      cls->untag()->target_next_field_offset_in_words_ =
+          cls->untag()->host_next_field_offset_in_words_;
+      cls->untag()->target_type_arguments_field_offset_in_words_ =
+          cls->untag()->host_type_arguments_field_offset_in_words_;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
-      cls->ptr()->num_type_arguments_ = d->Read<int16_t>();
-      cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
-      cls->ptr()->token_pos_ = d->ReadTokenPosition();
-      cls->ptr()->end_token_pos_ = d->ReadTokenPosition();
-      cls->ptr()->state_bits_ = d->Read<uint32_t>();
+      cls->untag()->num_type_arguments_ = d->Read<int16_t>();
+      cls->untag()->num_native_fields_ = d->Read<uint16_t>();
+      cls->untag()->token_pos_ = d->ReadTokenPosition();
+      cls->untag()->end_token_pos_ = d->ReadTokenPosition();
+      cls->untag()->state_bits_ = d->Read<uint32_t>();
 
       table->AllocateIndex(class_id);
       table->SetAt(class_id, cls);
@@ -415,10 +415,10 @@
     TypeArgumentsPtr type_args = TypeArguments::RawCast(object);
     objects_.Add(type_args);
 
-    s->Push(type_args->ptr()->instantiations_);
-    const intptr_t length = Smi::Value(type_args->ptr()->length_);
+    s->Push(type_args->untag()->instantiations_);
+    const intptr_t length = Smi::Value(type_args->untag()->length_);
     for (intptr_t i = 0; i < length; i++) {
-      s->Push(type_args->ptr()->types()[i]);
+      s->Push(type_args->untag()->types()[i]);
     }
   }
 
@@ -430,7 +430,7 @@
       TypeArgumentsPtr type_args = objects_[i];
       s->AssignRef(type_args);
       AutoTraceObject(type_args);
-      const intptr_t length = Smi::Value(type_args->ptr()->length_);
+      const intptr_t length = Smi::Value(type_args->untag()->length_);
       s->WriteUnsigned(length);
     }
   }
@@ -440,15 +440,15 @@
     for (intptr_t i = 0; i < count; i++) {
       TypeArgumentsPtr type_args = objects_[i];
       AutoTraceObject(type_args);
-      const intptr_t length = Smi::Value(type_args->ptr()->length_);
+      const intptr_t length = Smi::Value(type_args->untag()->length_);
       s->WriteUnsigned(length);
-      intptr_t hash = Smi::Value(type_args->ptr()->hash_);
+      intptr_t hash = Smi::Value(type_args->untag()->hash_);
       s->Write<int32_t>(hash);
-      const intptr_t nullability = Smi::Value(type_args->ptr()->nullability_);
+      const intptr_t nullability = Smi::Value(type_args->untag()->nullability_);
       s->WriteUnsigned(nullability);
       WriteField(type_args, instantiations_);
       for (intptr_t j = 0; j < length; j++) {
-        s->WriteElementRef(type_args->ptr()->types()[j], j);
+        s->WriteElementRef(type_args->untag()->types()[j], j);
       }
     }
   }
@@ -483,12 +483,12 @@
       Deserializer::InitializeHeader(type_args, kTypeArgumentsCid,
                                      TypeArguments::InstanceSize(length),
                                      stamp_canonical);
-      type_args->ptr()->length_ = Smi::New(length);
-      type_args->ptr()->hash_ = Smi::New(d->Read<int32_t>());
-      type_args->ptr()->nullability_ = Smi::New(d->ReadUnsigned());
-      type_args->ptr()->instantiations_ = static_cast<ArrayPtr>(d->ReadRef());
+      type_args->untag()->length_ = Smi::New(length);
+      type_args->untag()->hash_ = Smi::New(d->Read<int32_t>());
+      type_args->untag()->nullability_ = Smi::New(d->ReadUnsigned());
+      type_args->untag()->instantiations_ = static_cast<ArrayPtr>(d->ReadRef());
       for (intptr_t j = 0; j < length; j++) {
-        type_args->ptr()->types()[j] =
+        type_args->untag()->types()[j] =
             static_cast<AbstractTypePtr>(d->ReadRef());
       }
     }
@@ -536,7 +536,7 @@
       AutoTraceObject(cls);
       WriteFromTo(cls);
       if (s->kind() != Snapshot::kFullAOT) {
-        s->Write<int32_t>(cls->ptr()->library_kernel_offset_);
+        s->Write<int32_t>(cls->untag()->library_kernel_offset_);
       }
     }
   }
@@ -571,7 +571,7 @@
       ReadFromTo(cls);
 #if !defined(DART_PRECOMPILED_RUNTIME)
       if (d->kind() != Snapshot::kFullAOT) {
-        cls->ptr()->library_kernel_offset_ = d->Read<int32_t>();
+        cls->untag()->library_kernel_offset_ = d->Read<int32_t>();
       }
 #endif
     }
@@ -591,11 +591,11 @@
 
     PushFromTo(func);
     if (kind == Snapshot::kFullAOT) {
-      s->Push(func->ptr()->code_);
+      s->Push(func->untag()->code_);
     } else if (kind == Snapshot::kFullJIT) {
-      NOT_IN_PRECOMPILED(s->Push(func->ptr()->unoptimized_code_));
-      s->Push(func->ptr()->code_);
-      s->Push(func->ptr()->ic_data_array_);
+      NOT_IN_PRECOMPILED(s->Push(func->untag()->unoptimized_code_));
+      s->Push(func->untag()->code_);
+      s->Push(func->untag()->ic_data_array_);
     }
   }
 
@@ -625,13 +625,13 @@
       }
 
       if (kind != Snapshot::kFullAOT) {
-        s->WriteTokenPosition(func->ptr()->token_pos_);
-        s->WriteTokenPosition(func->ptr()->end_token_pos_);
-        s->Write<uint32_t>(func->ptr()->kernel_offset_);
+        s->WriteTokenPosition(func->untag()->token_pos_);
+        s->WriteTokenPosition(func->untag()->end_token_pos_);
+        s->Write<uint32_t>(func->untag()->kernel_offset_);
       }
 
-      s->Write<uint32_t>(func->ptr()->packed_fields_);
-      s->Write<uint32_t>(func->ptr()->kind_tag_);
+      s->Write<uint32_t>(func->untag()->packed_fields_);
+      s->Write<uint32_t>(func->untag()->kind_tag_);
     }
   }
 
@@ -682,39 +682,39 @@
       ReadFromTo(func);
 
       if (kind == Snapshot::kFullAOT) {
-        func->ptr()->code_ = static_cast<CodePtr>(d->ReadRef());
+        func->untag()->code_ = static_cast<CodePtr>(d->ReadRef());
       } else if (kind == Snapshot::kFullJIT) {
-        NOT_IN_PRECOMPILED(func->ptr()->unoptimized_code_ =
+        NOT_IN_PRECOMPILED(func->untag()->unoptimized_code_ =
                                static_cast<CodePtr>(d->ReadRef()));
-        func->ptr()->code_ = static_cast<CodePtr>(d->ReadRef());
-        func->ptr()->ic_data_array_ = static_cast<ArrayPtr>(d->ReadRef());
+        func->untag()->code_ = static_cast<CodePtr>(d->ReadRef());
+        func->untag()->ic_data_array_ = static_cast<ArrayPtr>(d->ReadRef());
       }
 
 #if defined(DEBUG)
-      func->ptr()->entry_point_ = 0;
-      func->ptr()->unchecked_entry_point_ = 0;
+      func->untag()->entry_point_ = 0;
+      func->untag()->unchecked_entry_point_ = 0;
 #endif
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
       if (kind != Snapshot::kFullAOT) {
-        func->ptr()->token_pos_ = d->ReadTokenPosition();
-        func->ptr()->end_token_pos_ = d->ReadTokenPosition();
-        func->ptr()->kernel_offset_ = d->Read<uint32_t>();
+        func->untag()->token_pos_ = d->ReadTokenPosition();
+        func->untag()->end_token_pos_ = d->ReadTokenPosition();
+        func->untag()->kernel_offset_ = d->Read<uint32_t>();
       }
-      func->ptr()->unboxed_parameters_info_.Reset();
+      func->untag()->unboxed_parameters_info_.Reset();
 #endif
-      func->ptr()->packed_fields_ = d->Read<uint32_t>();
-      func->ptr()->kind_tag_ = d->Read<uint32_t>();
+      func->untag()->packed_fields_ = d->Read<uint32_t>();
+      func->untag()->kind_tag_ = d->Read<uint32_t>();
       if (kind == Snapshot::kFullAOT) {
         // Omit fields used to support de/reoptimization.
       } else {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-        func->ptr()->usage_counter_ = 0;
-        func->ptr()->optimized_instruction_count_ = 0;
-        func->ptr()->optimized_call_site_count_ = 0;
-        func->ptr()->deoptimization_counter_ = 0;
-        func->ptr()->state_bits_ = 0;
-        func->ptr()->inlining_depth_ = 0;
+        func->untag()->usage_counter_ = 0;
+        func->untag()->optimized_instruction_count_ = 0;
+        func->untag()->optimized_call_site_count_ = 0;
+        func->untag()->deoptimization_counter_ = 0;
+        func->untag()->state_bits_ = 0;
+        func->untag()->inlining_depth_ = 0;
 #endif
       }
     }
@@ -725,14 +725,14 @@
       Function& func = Function::Handle(d->zone());
       for (intptr_t i = start_index_; i < stop_index_; i++) {
         func ^= refs.At(i);
-        ASSERT(func.raw()->ptr()->code_->IsCode());
-        uword entry_point = func.raw()->ptr()->code_->ptr()->entry_point_;
+        ASSERT(func.ptr()->untag()->code_->IsCode());
+        uword entry_point = func.ptr()->untag()->code_->untag()->entry_point_;
         ASSERT(entry_point != 0);
-        func.raw()->ptr()->entry_point_ = entry_point;
+        func.ptr()->untag()->entry_point_ = entry_point;
         uword unchecked_entry_point =
-            func.raw()->ptr()->code_->ptr()->unchecked_entry_point_;
+            func.ptr()->untag()->code_->untag()->unchecked_entry_point_;
         ASSERT(unchecked_entry_point != 0);
-        func.raw()->ptr()->unchecked_entry_point_ = unchecked_entry_point;
+        func.ptr()->untag()->unchecked_entry_point_ = unchecked_entry_point;
       }
     } else if (d->kind() == Snapshot::kFullJIT) {
       Function& func = Function::Handle(d->zone());
@@ -768,12 +768,12 @@
     objects_.Add(data);
 
     if (s->kind() != Snapshot::kFullAOT) {
-      s->Push(data->ptr()->context_scope_);
+      s->Push(data->untag()->context_scope_);
     }
-    s->Push(data->ptr()->parent_function_);
-    s->Push(data->ptr()->closure_);
-    s->Push(data->ptr()->default_type_arguments_);
-    s->Push(data->ptr()->default_type_arguments_info_);
+    s->Push(data->untag()->parent_function_);
+    s->Push(data->untag()->closure_);
+    s->Push(data->untag()->default_type_arguments_);
+    s->Push(data->untag()->default_type_arguments_info_);
   }
 
   void WriteAlloc(Serializer* s) {
@@ -829,16 +829,16 @@
       Deserializer::InitializeHeader(data, kClosureDataCid,
                                      ClosureData::InstanceSize());
       if (d->kind() == Snapshot::kFullAOT) {
-        data->ptr()->context_scope_ = ContextScope::null();
+        data->untag()->context_scope_ = ContextScope::null();
       } else {
-        data->ptr()->context_scope_ =
+        data->untag()->context_scope_ =
             static_cast<ContextScopePtr>(d->ReadRef());
       }
-      data->ptr()->parent_function_ = static_cast<FunctionPtr>(d->ReadRef());
-      data->ptr()->closure_ = static_cast<InstancePtr>(d->ReadRef());
-      data->ptr()->default_type_arguments_ =
+      data->untag()->parent_function_ = static_cast<FunctionPtr>(d->ReadRef());
+      data->untag()->closure_ = static_cast<InstancePtr>(d->ReadRef());
+      data->untag()->default_type_arguments_ =
           static_cast<TypeArgumentsPtr>(d->ReadRef());
-      data->ptr()->default_type_arguments_info_ =
+      data->untag()->default_type_arguments_info_ =
           static_cast<SmiPtr>(d->ReadRef());
     }
   }
@@ -874,10 +874,10 @@
       WriteFromTo(data);
 
       if (s->kind() == Snapshot::kFullAOT) {
-        s->WriteUnsigned(data->ptr()->callback_id_);
+        s->WriteUnsigned(data->untag()->callback_id_);
       } else {
         // FFI callbacks can only be written to AOT snapshots.
-        ASSERT(data->ptr()->callback_target_ == Object::null());
+        ASSERT(data->untag()->callback_target_ == Object::null());
       }
     }
   }
@@ -911,7 +911,7 @@
       Deserializer::InitializeHeader(data, kFfiTrampolineDataCid,
                                      FfiTrampolineData::InstanceSize());
       ReadFromTo(data);
-      data->ptr()->callback_id_ =
+      data->untag()->callback_id_ =
           d->kind() == Snapshot::kFullAOT ? d->ReadUnsigned() : 0;
     }
   }
@@ -929,22 +929,22 @@
 
     Snapshot::Kind kind = s->kind();
 
-    s->Push(field->ptr()->name_);
-    s->Push(field->ptr()->owner_);
-    s->Push(field->ptr()->type_);
+    s->Push(field->untag()->name_);
+    s->Push(field->untag()->owner_);
+    s->Push(field->untag()->type_);
     // Write out the initializer function
-    s->Push(field->ptr()->initializer_function_);
+    s->Push(field->untag()->initializer_function_);
 
     if (kind != Snapshot::kFullAOT) {
-      s->Push(field->ptr()->guarded_list_length_);
+      s->Push(field->untag()->guarded_list_length_);
     }
     if (kind == Snapshot::kFullJIT) {
-      s->Push(field->ptr()->dependent_code_);
+      s->Push(field->untag()->dependent_code_);
     }
     // Write out either the initial static value or field offset.
-    if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
+    if (Field::StaticBit::decode(field->untag()->kind_bits_)) {
       const intptr_t field_id =
-          Smi::Value(field->ptr()->host_offset_or_field_id_);
+          Smi::Value(field->untag()->host_offset_or_field_id_);
       s->Push(s->initial_field_table()->At(field_id));
     } else {
       s->Push(Smi::New(Field::TargetOffsetOf(field)));
@@ -966,7 +966,7 @@
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       FieldPtr field = objects_[i];
-      AutoTraceObjectName(field, field->ptr()->name_);
+      AutoTraceObjectName(field, field->untag()->name_);
 
       WriteField(field, name_);
       WriteField(field, owner_);
@@ -981,19 +981,19 @@
       }
 
       if (kind != Snapshot::kFullAOT) {
-        s->WriteTokenPosition(field->ptr()->token_pos_);
-        s->WriteTokenPosition(field->ptr()->end_token_pos_);
-        s->WriteCid(field->ptr()->guarded_cid_);
-        s->WriteCid(field->ptr()->is_nullable_);
-        s->Write<int8_t>(field->ptr()->static_type_exactness_state_);
-        s->Write<uint32_t>(field->ptr()->kernel_offset_);
+        s->WriteTokenPosition(field->untag()->token_pos_);
+        s->WriteTokenPosition(field->untag()->end_token_pos_);
+        s->WriteCid(field->untag()->guarded_cid_);
+        s->WriteCid(field->untag()->is_nullable_);
+        s->Write<int8_t>(field->untag()->static_type_exactness_state_);
+        s->Write<uint32_t>(field->untag()->kernel_offset_);
       }
-      s->Write<uint16_t>(field->ptr()->kind_bits_);
+      s->Write<uint16_t>(field->untag()->kind_bits_);
 
       // Write out either the initial static value or field offset.
-      if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
+      if (Field::StaticBit::decode(field->untag()->kind_bits_)) {
         const intptr_t field_id =
-            Smi::Value(field->ptr()->host_offset_or_field_id_);
+            Smi::Value(field->untag()->host_offset_or_field_id_);
         WriteFieldValue("static value", s->initial_field_table()->At(field_id));
         s->WriteUnsigned(field_id);
       } else {
@@ -1031,34 +1031,36 @@
       Deserializer::InitializeHeader(field, kFieldCid, Field::InstanceSize());
       ReadFromTo(field);
       if (kind != Snapshot::kFullAOT) {
-        field->ptr()->guarded_list_length_ = static_cast<SmiPtr>(d->ReadRef());
+        field->untag()->guarded_list_length_ =
+            static_cast<SmiPtr>(d->ReadRef());
       }
       if (kind == Snapshot::kFullJIT) {
-        field->ptr()->dependent_code_ = static_cast<ArrayPtr>(d->ReadRef());
+        field->untag()->dependent_code_ = static_cast<ArrayPtr>(d->ReadRef());
       }
       if (kind != Snapshot::kFullAOT) {
-        field->ptr()->token_pos_ = d->ReadTokenPosition();
-        field->ptr()->end_token_pos_ = d->ReadTokenPosition();
-        field->ptr()->guarded_cid_ = d->ReadCid();
-        field->ptr()->is_nullable_ = d->ReadCid();
-        field->ptr()->static_type_exactness_state_ = d->Read<int8_t>();
+        field->untag()->token_pos_ = d->ReadTokenPosition();
+        field->untag()->end_token_pos_ = d->ReadTokenPosition();
+        field->untag()->guarded_cid_ = d->ReadCid();
+        field->untag()->is_nullable_ = d->ReadCid();
+        field->untag()->static_type_exactness_state_ = d->Read<int8_t>();
 #if !defined(DART_PRECOMPILED_RUNTIME)
-        field->ptr()->kernel_offset_ = d->Read<uint32_t>();
+        field->untag()->kernel_offset_ = d->Read<uint32_t>();
 #endif
       }
-      field->ptr()->kind_bits_ = d->Read<uint16_t>();
+      field->untag()->kind_bits_ = d->Read<uint16_t>();
 
       ObjectPtr value_or_offset = d->ReadRef();
-      if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
+      if (Field::StaticBit::decode(field->untag()->kind_bits_)) {
         const intptr_t field_id = d->ReadUnsigned();
         d->initial_field_table()->SetAt(
             field_id, static_cast<InstancePtr>(value_or_offset));
-        field->ptr()->host_offset_or_field_id_ = Smi::New(field_id);
+        field->untag()->host_offset_or_field_id_ = Smi::New(field_id);
       } else {
-        field->ptr()->host_offset_or_field_id_ = Smi::RawCast(value_or_offset);
+        field->untag()->host_offset_or_field_id_ =
+            Smi::RawCast(value_or_offset);
 #if !defined(DART_PRECOMPILED_RUNTIME)
-        field->ptr()->target_offset_ =
-            Smi::Value(field->ptr()->host_offset_or_field_id_);
+        field->untag()->target_offset_ =
+            Smi::Value(field->untag()->host_offset_or_field_id_);
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
       }
     }
@@ -1112,20 +1114,21 @@
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       ScriptPtr script = objects_[i];
-      AutoTraceObjectName(script, script->ptr()->url_);
+      AutoTraceObjectName(script, script->untag()->url_);
       WriteFromTo(script);
-      s->Write<int32_t>(script->ptr()->line_offset_);
-      s->Write<int32_t>(script->ptr()->col_offset_);
+      s->Write<int32_t>(script->untag()->line_offset_);
+      s->Write<int32_t>(script->untag()->col_offset_);
       if (s->kind() != Snapshot::kFullAOT) {
         // Clear out the max position cache in snapshots to ensure no
         // differences in the snapshot due to triggering caching vs. not.
-        int32_t written_flags = ScriptLayout::CachedMaxPositionBitField::update(
-            0, script->ptr()->flags_and_max_position_);
-        written_flags =
-            ScriptLayout::HasCachedMaxPositionBit::update(false, written_flags);
+        int32_t written_flags =
+            UntaggedScript::CachedMaxPositionBitField::update(
+                0, script->untag()->flags_and_max_position_);
+        written_flags = UntaggedScript::HasCachedMaxPositionBit::update(
+            false, written_flags);
         s->Write<int32_t>(written_flags);
       }
-      s->Write<int32_t>(script->ptr()->kernel_script_index_);
+      s->Write<int32_t>(script->untag()->kernel_script_index_);
     }
   }
 
@@ -1156,13 +1159,13 @@
       Deserializer::InitializeHeader(script, kScriptCid,
                                      Script::InstanceSize());
       ReadFromTo(script);
-      script->ptr()->line_offset_ = d->Read<int32_t>();
-      script->ptr()->col_offset_ = d->Read<int32_t>();
+      script->untag()->line_offset_ = d->Read<int32_t>();
+      script->untag()->col_offset_ = d->Read<int32_t>();
 #if !defined(DART_PRECOMPILED_RUNTIME)
-      script->ptr()->flags_and_max_position_ = d->Read<int32_t>();
+      script->untag()->flags_and_max_position_ = d->Read<int32_t>();
 #endif
-      script->ptr()->kernel_script_index_ = d->Read<int32_t>();
-      script->ptr()->load_timestamp_ = 0;
+      script->untag()->kernel_script_index_ = d->Read<int32_t>();
+      script->untag()->load_timestamp_ = 0;
     }
   }
 };
@@ -1193,14 +1196,14 @@
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       LibraryPtr lib = objects_[i];
-      AutoTraceObjectName(lib, lib->ptr()->url_);
+      AutoTraceObjectName(lib, lib->untag()->url_);
       WriteFromTo(lib);
-      s->Write<int32_t>(lib->ptr()->index_);
-      s->Write<uint16_t>(lib->ptr()->num_imports_);
-      s->Write<int8_t>(lib->ptr()->load_state_);
-      s->Write<uint8_t>(lib->ptr()->flags_);
+      s->Write<int32_t>(lib->untag()->index_);
+      s->Write<uint16_t>(lib->untag()->num_imports_);
+      s->Write<int8_t>(lib->untag()->load_state_);
+      s->Write<uint8_t>(lib->untag()->flags_);
       if (s->kind() != Snapshot::kFullAOT) {
-        s->Write<uint32_t>(lib->ptr()->kernel_offset_);
+        s->Write<uint32_t>(lib->untag()->kernel_offset_);
       }
     }
   }
@@ -1231,16 +1234,16 @@
       LibraryPtr lib = static_cast<LibraryPtr>(d->Ref(id));
       Deserializer::InitializeHeader(lib, kLibraryCid, Library::InstanceSize());
       ReadFromTo(lib);
-      lib->ptr()->native_entry_resolver_ = NULL;
-      lib->ptr()->native_entry_symbol_resolver_ = NULL;
-      lib->ptr()->index_ = d->Read<int32_t>();
-      lib->ptr()->num_imports_ = d->Read<uint16_t>();
-      lib->ptr()->load_state_ = d->Read<int8_t>();
-      lib->ptr()->flags_ =
-          LibraryLayout::InFullSnapshotBit::update(true, d->Read<uint8_t>());
+      lib->untag()->native_entry_resolver_ = NULL;
+      lib->untag()->native_entry_symbol_resolver_ = NULL;
+      lib->untag()->index_ = d->Read<int32_t>();
+      lib->untag()->num_imports_ = d->Read<uint16_t>();
+      lib->untag()->load_state_ = d->Read<int8_t>();
+      lib->untag()->flags_ =
+          UntaggedLibrary::InFullSnapshotBit::update(true, d->Read<uint8_t>());
 #if !defined(DART_PRECOMPILED_RUNTIME)
       if (d->kind() != Snapshot::kFullAOT) {
-        lib->ptr()->kernel_offset_ = d->Read<uint32_t>();
+        lib->untag()->kernel_offset_ = d->Read<uint32_t>();
       }
 #endif
     }
@@ -1339,7 +1342,7 @@
       KernelProgramInfoPtr info = objects_[i];
       AutoTraceObject(info);
       WriteFromTo(info);
-      s->Write<uint32_t>(info->ptr()->kernel_binary_version_);
+      s->Write<uint32_t>(info->untag()->kernel_binary_version_);
     }
   }
 
@@ -1373,7 +1376,7 @@
       Deserializer::InitializeHeader(info, kKernelProgramInfoCid,
                                      KernelProgramInfo::InstanceSize());
       ReadFromTo(info);
-      info->ptr()->kernel_binary_version_ = d->Read<uint32_t>();
+      info->untag()->kernel_binary_version_ = d->Read<uint32_t>();
     }
   }
 
@@ -1405,43 +1408,43 @@
 
     if (s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions) {
       if (FLAG_retain_function_objects) {
-        ObjectPoolPtr pool = code->ptr()->object_pool_;
+        ObjectPoolPtr pool = code->untag()->object_pool_;
         if ((pool != ObjectPool::null()) && s->InCurrentLoadingUnit(code)) {
-          const intptr_t length = pool->ptr()->length_;
-          uint8_t* entry_bits = pool->ptr()->entry_bits();
+          const intptr_t length = pool->untag()->length_;
+          uint8_t* entry_bits = pool->untag()->entry_bits();
           for (intptr_t i = 0; i < length; i++) {
             auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
             if (entry_type == ObjectPool::EntryType::kTaggedObject) {
-              s->Push(pool->ptr()->data()[i].raw_obj_);
+              s->Push(pool->untag()->data()[i].raw_obj_);
             }
           }
         }
       }
     } else {
-      if (s->InCurrentLoadingUnit(code->ptr()->object_pool_)) {
-        s->Push(code->ptr()->object_pool_);
+      if (s->InCurrentLoadingUnit(code->untag()->object_pool_)) {
+        s->Push(code->untag()->object_pool_);
       }
     }
 
-    s->Push(code->ptr()->owner_);
-    s->Push(code->ptr()->exception_handlers_);
-    s->Push(code->ptr()->pc_descriptors_);
-    s->Push(code->ptr()->catch_entry_);
-    if (s->InCurrentLoadingUnit(code->ptr()->compressed_stackmaps_)) {
-      s->Push(code->ptr()->compressed_stackmaps_);
+    s->Push(code->untag()->owner_);
+    s->Push(code->untag()->exception_handlers_);
+    s->Push(code->untag()->pc_descriptors_);
+    s->Push(code->untag()->catch_entry_);
+    if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
+      s->Push(code->untag()->compressed_stackmaps_);
     }
     if (!FLAG_precompiled_mode || !FLAG_dwarf_stack_traces_mode) {
-      s->Push(code->ptr()->inlined_id_to_function_);
-      if (s->InCurrentLoadingUnit(code->ptr()->code_source_map_)) {
-        s->Push(code->ptr()->code_source_map_);
+      s->Push(code->untag()->inlined_id_to_function_);
+      if (s->InCurrentLoadingUnit(code->untag()->code_source_map_)) {
+        s->Push(code->untag()->code_source_map_);
       }
     }
     if (s->kind() == Snapshot::kFullJIT) {
-      s->Push(code->ptr()->deopt_info_array_);
-      s->Push(code->ptr()->static_calls_target_table_);
+      s->Push(code->untag()->deopt_info_array_);
+      s->Push(code->untag()->static_calls_target_table_);
     } else if (s->kind() == Snapshot::kFullAOT) {
 #if defined(DART_PRECOMPILER)
-      auto const calls_array = code->ptr()->static_calls_target_table_;
+      auto const calls_array = code->untag()->static_calls_target_table_;
       if (calls_array != Array::null()) {
         // Some Code entries in the static calls target table may only be
         // accessible via here, so push the Code objects.
@@ -1470,9 +1473,9 @@
 #endif
     }
 #if !defined(PRODUCT)
-    s->Push(code->ptr()->return_address_metadata_);
+    s->Push(code->untag()->return_address_metadata_);
     if (FLAG_code_comments) {
-      s->Push(code->ptr()->comments_);
+      s->Push(code->untag()->comments_);
     }
 #endif
   }
@@ -1492,7 +1495,7 @@
   static void Insert(GrowableArray<CodeOrderInfo>* order_list,
                      IntMap<intptr_t>* order_map,
                      CodePtr code) {
-    InstructionsPtr instr = code->ptr()->instructions_;
+    InstructionsPtr instr = code->untag()->instructions_;
     intptr_t key = static_cast<intptr_t>(instr);
     intptr_t order;
     if (order_map->HasKey(key)) {
@@ -1524,7 +1527,7 @@
     GrowableArray<CodeOrderInfo> order_list;
     IntMap<intptr_t> order_map;
     for (intptr_t i = 0; i < codes->length(); i++) {
-      Insert(&order_list, &order_map, (*codes)[i]->raw());
+      Insert(&order_list, &order_map, (*codes)[i]->ptr());
     }
     order_list.Sort(CompareCodeOrderInfo);
     ASSERT(order_list.length() == codes->length());
@@ -1570,7 +1573,7 @@
     AutoTraceObjectName(code, MakeDisambiguatedCodeName(s, code));
 
     intptr_t pointer_offsets_length =
-        Code::PtrOffBits::decode(code->ptr()->state_bits_);
+        Code::PtrOffBits::decode(code->untag()->state_bits_);
     if (pointer_offsets_length != 0) {
       FATAL("Cannot serialize code with embedded pointers");
     }
@@ -1579,38 +1582,38 @@
       s->UnexpectedObject(code, "Disabled code");
     }
 
-    s->WriteInstructions(code->ptr()->instructions_,
-                         code->ptr()->unchecked_offset_, code, deferred);
+    s->WriteInstructions(code->untag()->instructions_,
+                         code->untag()->unchecked_offset_, code, deferred);
     if (kind == Snapshot::kFullJIT) {
       // TODO(rmacnak): Fix references to disabled code before serializing.
       // For now, we may write the FixCallersTarget or equivalent stub. This
       // will cause a fixup if this code is called.
       const uint32_t active_unchecked_offset =
-          code->ptr()->unchecked_entry_point_ - code->ptr()->entry_point_;
-      s->WriteInstructions(code->ptr()->active_instructions_,
+          code->untag()->unchecked_entry_point_ - code->untag()->entry_point_;
+      s->WriteInstructions(code->untag()->active_instructions_,
                            active_unchecked_offset, code, deferred);
     }
 
     // No need to write object pool out if we are producing full AOT
     // snapshot with bare instructions.
     if (!(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
-      if (s->InCurrentLoadingUnit(code->ptr()->object_pool_)) {
+      if (s->InCurrentLoadingUnit(code->untag()->object_pool_)) {
         WriteField(code, object_pool_);
       } else {
         WriteFieldValue(object_pool_, ObjectPool::null());
       }
 #if defined(DART_PRECOMPILER)
     } else if (FLAG_write_v8_snapshot_profile_to != nullptr &&
-               code->ptr()->object_pool_ != ObjectPool::null()) {
+               code->untag()->object_pool_ != ObjectPool::null()) {
       // If we are writing V8 snapshot profile then attribute references
       // going through the object pool to the code object itself.
-      ObjectPoolPtr pool = code->ptr()->object_pool_;
+      ObjectPoolPtr pool = code->untag()->object_pool_;
 
-      for (intptr_t i = 0; i < pool->ptr()->length_; i++) {
-        uint8_t bits = pool->ptr()->entry_bits()[i];
+      for (intptr_t i = 0; i < pool->untag()->length_; i++) {
+        uint8_t bits = pool->untag()->entry_bits()[i];
         if (ObjectPool::TypeBits::decode(bits) ==
             ObjectPool::EntryType::kTaggedObject) {
-          s->AttributeElementRef(pool->ptr()->data()[i].raw_obj_, i);
+          s->AttributeElementRef(pool->untag()->data()[i].raw_obj_, i);
         }
       }
 #endif  // defined(DART_PRECOMPILER)
@@ -1619,7 +1622,7 @@
     WriteField(code, exception_handlers_);
     WriteField(code, pc_descriptors_);
     WriteField(code, catch_entry_);
-    if (s->InCurrentLoadingUnit(code->ptr()->compressed_stackmaps_)) {
+    if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
       WriteField(code, compressed_stackmaps_);
     } else {
       WriteFieldValue(compressed_stackmaps_, CompressedStackMaps::null());
@@ -1629,7 +1632,7 @@
       WriteFieldValue(code_source_map_, CodeSourceMap::null());
     } else {
       WriteField(code, inlined_id_to_function_);
-      if (s->InCurrentLoadingUnit(code->ptr()->code_source_map_)) {
+      if (s->InCurrentLoadingUnit(code->untag()->code_source_map_)) {
         WriteField(code, code_source_map_);
       } else {
         WriteFieldValue(code_source_map_, CodeSourceMap::null());
@@ -1642,12 +1645,12 @@
 
 #if defined(DART_PRECOMPILER)
     if (FLAG_write_v8_snapshot_profile_to != nullptr &&
-        code->ptr()->static_calls_target_table_ != Array::null()) {
+        code->untag()->static_calls_target_table_ != Array::null()) {
       // If we are writing V8 snapshot profile then attribute references
       // going through static calls.
-      array_ = code->ptr()->static_calls_target_table_;
-      intptr_t index = code->ptr()->object_pool_ != ObjectPool::null()
-                           ? code->ptr()->object_pool_->ptr()->length_
+      array_ = code->untag()->static_calls_target_table_;
+      intptr_t index = code->untag()->object_pool_ != ObjectPool::null()
+                           ? code->untag()->object_pool_->untag()->length_
                            : 0;
       for (auto entry : StaticCallsTable(array_)) {
         auto kind = Code::KindField::decode(
@@ -1676,7 +1679,7 @@
       WriteField(code, comments_);
     }
 #endif
-    s->Write<int32_t>(code->ptr()->state_bits_);
+    s->Write<int32_t>(code->untag()->state_bits_);
   }
 
   GrowableArray<CodePtr>* objects() { return &objects_; }
@@ -1691,7 +1694,8 @@
     ASSERT(s->profile_writer() != nullptr);
 
     for (auto code : objects_) {
-      ObjectPtr owner = WeakSerializationReference::Unwrap(code->ptr()->owner_);
+      ObjectPtr owner =
+          WeakSerializationReference::Unwrap(code->untag()->owner_);
       if (s->CreateArtificalNodeIfNeeded(owner)) {
         AutoTraceObject(code);
         s->AttributePropertyRef(owner, ":owner_",
@@ -1762,38 +1766,41 @@
     // There would be a single global pool if this is a full AOT snapshot
     // with bare instructions.
     if (!(d->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
-      code->ptr()->object_pool_ = static_cast<ObjectPoolPtr>(d->ReadRef());
+      code->untag()->object_pool_ = static_cast<ObjectPoolPtr>(d->ReadRef());
     } else {
-      code->ptr()->object_pool_ = ObjectPool::null();
+      code->untag()->object_pool_ = ObjectPool::null();
     }
-    code->ptr()->owner_ = d->ReadRef();
-    code->ptr()->exception_handlers_ =
+    code->untag()->owner_ = d->ReadRef();
+    code->untag()->exception_handlers_ =
         static_cast<ExceptionHandlersPtr>(d->ReadRef());
-    code->ptr()->pc_descriptors_ = static_cast<PcDescriptorsPtr>(d->ReadRef());
-    code->ptr()->catch_entry_ = d->ReadRef();
-    code->ptr()->compressed_stackmaps_ =
+    code->untag()->pc_descriptors_ =
+        static_cast<PcDescriptorsPtr>(d->ReadRef());
+    code->untag()->catch_entry_ = d->ReadRef();
+    code->untag()->compressed_stackmaps_ =
         static_cast<CompressedStackMapsPtr>(d->ReadRef());
-    code->ptr()->inlined_id_to_function_ = static_cast<ArrayPtr>(d->ReadRef());
-    code->ptr()->code_source_map_ = static_cast<CodeSourceMapPtr>(d->ReadRef());
+    code->untag()->inlined_id_to_function_ =
+        static_cast<ArrayPtr>(d->ReadRef());
+    code->untag()->code_source_map_ =
+        static_cast<CodeSourceMapPtr>(d->ReadRef());
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
     if (d->kind() == Snapshot::kFullJIT) {
-      code->ptr()->deopt_info_array_ = static_cast<ArrayPtr>(d->ReadRef());
-      code->ptr()->static_calls_target_table_ =
+      code->untag()->deopt_info_array_ = static_cast<ArrayPtr>(d->ReadRef());
+      code->untag()->static_calls_target_table_ =
           static_cast<ArrayPtr>(d->ReadRef());
     }
 #endif  // !DART_PRECOMPILED_RUNTIME
 
 #if !defined(PRODUCT)
-    code->ptr()->return_address_metadata_ = d->ReadRef();
-    code->ptr()->var_descriptors_ = LocalVarDescriptors::null();
-    code->ptr()->comments_ = FLAG_code_comments
-                                 ? static_cast<ArrayPtr>(d->ReadRef())
-                                 : Array::null();
-    code->ptr()->compile_timestamp_ = 0;
+    code->untag()->return_address_metadata_ = d->ReadRef();
+    code->untag()->var_descriptors_ = LocalVarDescriptors::null();
+    code->untag()->comments_ = FLAG_code_comments
+                                   ? static_cast<ArrayPtr>(d->ReadRef())
+                                   : Array::null();
+    code->untag()->compile_timestamp_ = 0;
 #endif
 
-    code->ptr()->state_bits_ = d->Read<int32_t>();
+    code->untag()->state_bits_ = d->Read<int32_t>();
   }
 
   void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
@@ -1848,12 +1855,12 @@
         FLAG_retain_function_objects) {
       // Treat pool as weak.
     } else {
-      const intptr_t length = pool->ptr()->length_;
-      uint8_t* entry_bits = pool->ptr()->entry_bits();
+      const intptr_t length = pool->untag()->length_;
+      uint8_t* entry_bits = pool->untag()->entry_bits();
       for (intptr_t i = 0; i < length; i++) {
         auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
         if (entry_type == ObjectPool::EntryType::kTaggedObject) {
-          s->Push(pool->ptr()->data()[i].raw_obj_);
+          s->Push(pool->untag()->data()[i].raw_obj_);
         }
       }
     }
@@ -1867,7 +1874,7 @@
       ObjectPoolPtr pool = objects_[i];
       s->AssignRef(pool);
       AutoTraceObject(pool);
-      const intptr_t length = pool->ptr()->length_;
+      const intptr_t length = pool->untag()->length_;
       s->WriteUnsigned(length);
     }
   }
@@ -1880,20 +1887,20 @@
     for (intptr_t i = 0; i < count; i++) {
       ObjectPoolPtr pool = objects_[i];
       AutoTraceObject(pool);
-      const intptr_t length = pool->ptr()->length_;
+      const intptr_t length = pool->untag()->length_;
       s->WriteUnsigned(length);
-      uint8_t* entry_bits = pool->ptr()->entry_bits();
+      uint8_t* entry_bits = pool->untag()->entry_bits();
       for (intptr_t j = 0; j < length; j++) {
         s->Write<uint8_t>(entry_bits[j]);
-        ObjectPoolLayout::Entry& entry = pool->ptr()->data()[j];
+        UntaggedObjectPool::Entry& entry = pool->untag()->data()[j];
         switch (ObjectPool::TypeBits::decode(entry_bits[j])) {
           case ObjectPool::EntryType::kTaggedObject: {
-            if ((entry.raw_obj_ == StubCode::CallNoScopeNative().raw()) ||
-                (entry.raw_obj_ == StubCode::CallAutoScopeNative().raw())) {
+            if ((entry.raw_obj_ == StubCode::CallNoScopeNative().ptr()) ||
+                (entry.raw_obj_ == StubCode::CallAutoScopeNative().ptr())) {
               // Natives can run while precompiling, becoming linked and
               // switching their stub. Reset to the initial stub used for
               // lazy-linking.
-              s->WriteElementRef(StubCode::CallBootstrapNative().raw(), j);
+              s->WriteElementRef(StubCode::CallBootstrapNative().ptr(), j);
               break;
             }
             if (weak && !s->HasRef(entry.raw_obj_)) {
@@ -1951,11 +1958,11 @@
       ObjectPoolPtr pool = static_cast<ObjectPoolPtr>(d->Ref(id + 0));
       Deserializer::InitializeHeader(pool, kObjectPoolCid,
                                      ObjectPool::InstanceSize(length));
-      pool->ptr()->length_ = length;
+      pool->untag()->length_ = length;
       for (intptr_t j = 0; j < length; j++) {
         const uint8_t entry_bits = d->Read<uint8_t>();
-        pool->ptr()->entry_bits()[j] = entry_bits;
-        ObjectPoolLayout::Entry& entry = pool->ptr()->data()[j];
+        pool->untag()->entry_bits()[j] = entry_bits;
+        UntaggedObjectPool::Entry& entry = pool->untag()->data()[j];
         switch (ObjectPool::TypeBits::decode(entry_bits)) {
           case ObjectPool::EntryType::kTaggedObject:
             entry.raw_obj_ = d->ReadRef();
@@ -2153,7 +2160,7 @@
       Deserializer::InitializeHeader(
           ref, kWeakSerializationReferenceCid,
           WeakSerializationReference::InstanceSize());
-      ref->ptr()->cid_ = d->ReadCid();
+      ref->untag()->cid_ = d->ReadCid();
     }
   }
 };
@@ -2178,7 +2185,7 @@
       PcDescriptorsPtr desc = objects_[i];
       s->AssignRef(desc);
       AutoTraceObject(desc);
-      const intptr_t length = desc->ptr()->length_;
+      const intptr_t length = desc->untag()->length_;
       s->WriteUnsigned(length);
     }
   }
@@ -2188,9 +2195,9 @@
     for (intptr_t i = 0; i < count; i++) {
       PcDescriptorsPtr desc = objects_[i];
       AutoTraceObject(desc);
-      const intptr_t length = desc->ptr()->length_;
+      const intptr_t length = desc->untag()->length_;
       s->WriteUnsigned(length);
-      uint8_t* cdata = reinterpret_cast<uint8_t*>(desc->ptr()->data());
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(desc->untag()->data());
       s->WriteBytes(cdata, length);
     }
   }
@@ -2225,8 +2232,8 @@
       PcDescriptorsPtr desc = static_cast<PcDescriptorsPtr>(d->Ref(id));
       Deserializer::InitializeHeader(desc, kPcDescriptorsCid,
                                      PcDescriptors::InstanceSize(length));
-      desc->ptr()->length_ = length;
-      uint8_t* cdata = reinterpret_cast<uint8_t*>(desc->ptr()->data());
+      desc->untag()->length_ = length;
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(desc->untag()->data());
       d->ReadBytes(cdata, length);
     }
   }
@@ -2248,7 +2255,7 @@
     // will be loaded into read-only memory. Extra bytes due to allocation
     // rounding need to be deterministically set for reliable deduplication in
     // shared images.
-    if (object->ptr()->InVMIsolateHeap() ||
+    if (object->untag()->InVMIsolateHeap() ||
         s->heap()->old_space()->IsObjectFromImagePages(object)) {
       // This object is already read-only.
     } else {
@@ -2325,7 +2332,7 @@
       for (intptr_t i = start_index_; i < stop_index_; i++) {
         str ^= refs.At(i);
         str2 ^= table.InsertOrGet(str);
-        if (str.raw() == str2.raw()) {
+        if (str.ptr() == str2.ptr()) {
           // str.SetCanonical();
         } else {
           FATAL("Lost canonicalization race");
@@ -2351,7 +2358,7 @@
     ExceptionHandlersPtr handlers = ExceptionHandlers::RawCast(object);
     objects_.Add(handlers);
 
-    s->Push(handlers->ptr()->handled_types_data_);
+    s->Push(handlers->untag()->handled_types_data_);
   }
 
   void WriteAlloc(Serializer* s) {
@@ -2362,7 +2369,7 @@
       ExceptionHandlersPtr handlers = objects_[i];
       s->AssignRef(handlers);
       AutoTraceObject(handlers);
-      const intptr_t length = handlers->ptr()->num_entries_;
+      const intptr_t length = handlers->untag()->num_entries_;
       s->WriteUnsigned(length);
     }
   }
@@ -2372,11 +2379,11 @@
     for (intptr_t i = 0; i < count; i++) {
       ExceptionHandlersPtr handlers = objects_[i];
       AutoTraceObject(handlers);
-      const intptr_t length = handlers->ptr()->num_entries_;
+      const intptr_t length = handlers->untag()->num_entries_;
       s->WriteUnsigned(length);
       WriteField(handlers, handled_types_data_);
       for (intptr_t j = 0; j < length; j++) {
-        const ExceptionHandlerInfo& info = handlers->ptr()->data()[j];
+        const ExceptionHandlerInfo& info = handlers->untag()->data()[j];
         s->Write<uint32_t>(info.handler_pc_offset);
         s->Write<int16_t>(info.outer_try_index);
         s->Write<int8_t>(info.needs_stacktrace);
@@ -2417,11 +2424,11 @@
       const intptr_t length = d->ReadUnsigned();
       Deserializer::InitializeHeader(handlers, kExceptionHandlersCid,
                                      ExceptionHandlers::InstanceSize(length));
-      handlers->ptr()->num_entries_ = length;
-      handlers->ptr()->handled_types_data_ =
+      handlers->untag()->num_entries_ = length;
+      handlers->untag()->handled_types_data_ =
           static_cast<ArrayPtr>(d->ReadRef());
       for (intptr_t j = 0; j < length; j++) {
-        ExceptionHandlerInfo& info = handlers->ptr()->data()[j];
+        ExceptionHandlerInfo& info = handlers->untag()->data()[j];
         info.handler_pc_offset = d->Read<uint32_t>();
         info.outer_try_index = d->Read<int16_t>();
         info.needs_stacktrace = d->Read<int8_t>();
@@ -2442,10 +2449,10 @@
     ContextPtr context = Context::RawCast(object);
     objects_.Add(context);
 
-    s->Push(context->ptr()->parent_);
-    const intptr_t length = context->ptr()->num_variables_;
+    s->Push(context->untag()->parent_);
+    const intptr_t length = context->untag()->num_variables_;
     for (intptr_t i = 0; i < length; i++) {
-      s->Push(context->ptr()->data()[i]);
+      s->Push(context->untag()->data()[i]);
     }
   }
 
@@ -2457,7 +2464,7 @@
       ContextPtr context = objects_[i];
       s->AssignRef(context);
       AutoTraceObject(context);
-      const intptr_t length = context->ptr()->num_variables_;
+      const intptr_t length = context->untag()->num_variables_;
       s->WriteUnsigned(length);
     }
   }
@@ -2467,11 +2474,11 @@
     for (intptr_t i = 0; i < count; i++) {
       ContextPtr context = objects_[i];
       AutoTraceObject(context);
-      const intptr_t length = context->ptr()->num_variables_;
+      const intptr_t length = context->untag()->num_variables_;
       s->WriteUnsigned(length);
       WriteField(context, parent_);
       for (intptr_t j = 0; j < length; j++) {
-        s->WriteElementRef(context->ptr()->data()[j], j);
+        s->WriteElementRef(context->untag()->data()[j], j);
       }
     }
   }
@@ -2505,10 +2512,10 @@
       const intptr_t length = d->ReadUnsigned();
       Deserializer::InitializeHeader(context, kContextCid,
                                      Context::InstanceSize(length));
-      context->ptr()->num_variables_ = length;
-      context->ptr()->parent_ = static_cast<ContextPtr>(d->ReadRef());
+      context->untag()->num_variables_ = length;
+      context->untag()->parent_ = static_cast<ContextPtr>(d->ReadRef());
       for (intptr_t j = 0; j < length; j++) {
-        context->ptr()->data()[j] = d->ReadRef();
+        context->untag()->data()[j] = d->ReadRef();
       }
     }
   }
@@ -2524,7 +2531,7 @@
     ContextScopePtr scope = ContextScope::RawCast(object);
     objects_.Add(scope);
 
-    const intptr_t length = scope->ptr()->num_variables_;
+    const intptr_t length = scope->untag()->num_variables_;
     PushFromTo(scope, length);
   }
 
@@ -2536,7 +2543,7 @@
       ContextScopePtr scope = objects_[i];
       s->AssignRef(scope);
       AutoTraceObject(scope);
-      const intptr_t length = scope->ptr()->num_variables_;
+      const intptr_t length = scope->untag()->num_variables_;
       s->WriteUnsigned(length);
     }
   }
@@ -2546,9 +2553,9 @@
     for (intptr_t i = 0; i < count; i++) {
       ContextScopePtr scope = objects_[i];
       AutoTraceObject(scope);
-      const intptr_t length = scope->ptr()->num_variables_;
+      const intptr_t length = scope->untag()->num_variables_;
       s->WriteUnsigned(length);
-      s->Write<bool>(scope->ptr()->is_implicit_);
+      s->Write<bool>(scope->untag()->is_implicit_);
       WriteFromTo(scope, length);
     }
   }
@@ -2583,8 +2590,8 @@
       const intptr_t length = d->ReadUnsigned();
       Deserializer::InitializeHeader(scope, kContextScopeCid,
                                      ContextScope::InstanceSize(length));
-      scope->ptr()->num_variables_ = length;
-      scope->ptr()->is_implicit_ = d->Read<bool>();
+      scope->untag()->num_variables_ = length;
+      scope->untag()->is_implicit_ = d->Read<bool>();
       ReadFromTo(scope, length);
     }
   }
@@ -2616,9 +2623,9 @@
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       UnlinkedCallPtr unlinked = objects_[i];
-      AutoTraceObjectName(unlinked, unlinked->ptr()->target_name_);
+      AutoTraceObjectName(unlinked, unlinked->untag()->target_name_);
       WriteFromTo(unlinked);
-      s->Write<bool>(unlinked->ptr()->can_patch_to_monomorphic_);
+      s->Write<bool>(unlinked->untag()->can_patch_to_monomorphic_);
     }
   }
 
@@ -2651,7 +2658,7 @@
       Deserializer::InitializeHeader(unlinked, kUnlinkedCallCid,
                                      UnlinkedCall::InstanceSize());
       ReadFromTo(unlinked);
-      unlinked->ptr()->can_patch_to_monomorphic_ = d->Read<bool>();
+      unlinked->untag()->can_patch_to_monomorphic_ = d->Read<bool>();
     }
   }
 };
@@ -2683,12 +2690,12 @@
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       ICDataPtr ic = objects_[i];
-      AutoTraceObjectName(ic, ic->ptr()->target_name_);
+      AutoTraceObjectName(ic, ic->untag()->target_name_);
       WriteFromTo(ic);
       if (kind != Snapshot::kFullAOT) {
-        NOT_IN_PRECOMPILED(s->Write<int32_t>(ic->ptr()->deopt_id_));
+        NOT_IN_PRECOMPILED(s->Write<int32_t>(ic->untag()->deopt_id_));
       }
-      s->Write<uint32_t>(ic->ptr()->state_bits_);
+      s->Write<uint32_t>(ic->untag()->state_bits_);
     }
   }
 
@@ -2718,8 +2725,8 @@
       ICDataPtr ic = static_cast<ICDataPtr>(d->Ref(id));
       Deserializer::InitializeHeader(ic, kICDataCid, ICData::InstanceSize());
       ReadFromTo(ic);
-      NOT_IN_PRECOMPILED(ic->ptr()->deopt_id_ = d->Read<int32_t>());
-      ic->ptr()->state_bits_ = d->Read<int32_t>();
+      NOT_IN_PRECOMPILED(ic->untag()->deopt_id_ = d->Read<int32_t>());
+      ic->untag()->state_bits_ = d->Read<int32_t>();
     }
   }
 };
@@ -2751,9 +2758,9 @@
     const intptr_t count = objects_.length();
     for (intptr_t i = 0; i < count; i++) {
       MegamorphicCachePtr cache = objects_[i];
-      AutoTraceObjectName(cache, cache->ptr()->target_name_);
+      AutoTraceObjectName(cache, cache->untag()->target_name_);
       WriteFromTo(cache);
-      s->Write<int32_t>(cache->ptr()->filled_entry_count_);
+      s->Write<int32_t>(cache->untag()->filled_entry_count_);
     }
   }
 
@@ -2786,7 +2793,7 @@
       Deserializer::InitializeHeader(cache, kMegamorphicCacheCid,
                                      MegamorphicCache::InstanceSize());
       ReadFromTo(cache);
-      cache->ptr()->filled_entry_count_ = d->Read<int32_t>();
+      cache->untag()->filled_entry_count_ = d->Read<int32_t>();
     }
   }
 
@@ -2827,7 +2834,7 @@
   void Trace(Serializer* s, ObjectPtr object) {
     SubtypeTestCachePtr cache = SubtypeTestCache::RawCast(object);
     objects_.Add(cache);
-    s->Push(cache->ptr()->cache_);
+    s->Push(cache->untag()->cache_);
   }
 
   void WriteAlloc(Serializer* s) {
@@ -2877,7 +2884,7 @@
       SubtypeTestCachePtr cache = static_cast<SubtypeTestCachePtr>(d->Ref(id));
       Deserializer::InitializeHeader(cache, kSubtypeTestCacheCid,
                                      SubtypeTestCache::InstanceSize());
-      cache->ptr()->cache_ = static_cast<ArrayPtr>(d->ReadRef());
+      cache->untag()->cache_ = static_cast<ArrayPtr>(d->ReadRef());
     }
   }
 };
@@ -2891,7 +2898,7 @@
   void Trace(Serializer* s, ObjectPtr object) {
     LoadingUnitPtr unit = LoadingUnit::RawCast(object);
     objects_.Add(unit);
-    s->Push(unit->ptr()->parent_);
+    s->Push(unit->untag()->parent_);
   }
 
   void WriteAlloc(Serializer* s) {
@@ -2910,7 +2917,7 @@
       LoadingUnitPtr unit = objects_[i];
       AutoTraceObject(unit);
       WriteField(unit, parent_);
-      s->Write<int32_t>(unit->ptr()->id_);
+      s->Write<int32_t>(unit->untag()->id_);
     }
   }
 
@@ -2941,11 +2948,11 @@
       LoadingUnitPtr unit = static_cast<LoadingUnitPtr>(d->Ref(id));
       Deserializer::InitializeHeader(unit, kLoadingUnitCid,
                                      LoadingUnit::InstanceSize());
-      unit->ptr()->parent_ = static_cast<LoadingUnitPtr>(d->ReadRef());
-      unit->ptr()->base_objects_ = Array::null();
-      unit->ptr()->id_ = d->Read<int32_t>();
-      unit->ptr()->loaded_ = false;
-      unit->ptr()->load_outstanding_ = false;
+      unit->untag()->parent_ = static_cast<LoadingUnitPtr>(d->ReadRef());
+      unit->untag()->base_objects_ = Array::null();
+      unit->untag()->id_ = d->Read<int32_t>();
+      unit->untag()->loaded_ = false;
+      unit->untag()->load_outstanding_ = false;
     }
   }
 };
@@ -2978,9 +2985,9 @@
       LanguageErrorPtr error = objects_[i];
       AutoTraceObject(error);
       WriteFromTo(error);
-      s->WriteTokenPosition(error->ptr()->token_pos_);
-      s->Write<bool>(error->ptr()->report_after_token_);
-      s->Write<int8_t>(error->ptr()->kind_);
+      s->WriteTokenPosition(error->untag()->token_pos_);
+      s->Write<bool>(error->untag()->report_after_token_);
+      s->Write<int8_t>(error->untag()->kind_);
     }
   }
 
@@ -3013,9 +3020,9 @@
       Deserializer::InitializeHeader(error, kLanguageErrorCid,
                                      LanguageError::InstanceSize());
       ReadFromTo(error);
-      error->ptr()->token_pos_ = d->ReadTokenPosition();
-      error->ptr()->report_after_token_ = d->Read<bool>();
-      error->ptr()->kind_ = d->Read<int8_t>();
+      error->untag()->token_pos_ = d->ReadTokenPosition();
+      error->untag()->report_after_token_ = d->Read<bool>();
+      error->untag()->kind_ = d->Read<int8_t>();
     }
   }
 };
@@ -3093,12 +3100,13 @@
       : SerializationCluster("Instance"), cid_(cid) {
     ClassPtr cls = IsolateGroup::Current()->class_table()->At(cid);
     host_next_field_offset_in_words_ =
-        cls->ptr()->host_next_field_offset_in_words_;
+        cls->untag()->host_next_field_offset_in_words_;
     ASSERT(host_next_field_offset_in_words_ > 0);
 #if !defined(DART_PRECOMPILED_RUNTIME)
     target_next_field_offset_in_words_ =
-        cls->ptr()->target_next_field_offset_in_words_;
-    target_instance_size_in_words_ = cls->ptr()->target_instance_size_in_words_;
+        cls->untag()->target_next_field_offset_in_words_;
+    target_instance_size_in_words_ =
+        cls->untag()->target_instance_size_in_words_;
     ASSERT(target_next_field_offset_in_words_ > 0);
     ASSERT(target_instance_size_in_words_ > 0);
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
@@ -3118,7 +3126,7 @@
       // Skips unboxed fields
       if (!unboxed_fields_bitmap.Get(offset / kWordSize)) {
         ObjectPtr raw_obj = *reinterpret_cast<ObjectPtr*>(
-            reinterpret_cast<uword>(instance->ptr()) + offset);
+            reinterpret_cast<uword>(instance->untag()) + offset);
         s->Push(raw_obj);
       }
       offset += kWordSize;
@@ -3160,11 +3168,11 @@
         if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
           // Writes 32 bits of the unboxed value at a time
           const uword value = *reinterpret_cast<uword*>(
-              reinterpret_cast<uword>(instance->ptr()) + offset);
+              reinterpret_cast<uword>(instance->untag()) + offset);
           s->WriteWordWith32BitWrites(value);
         } else {
           ObjectPtr raw_obj = *reinterpret_cast<ObjectPtr*>(
-              reinterpret_cast<uword>(instance->ptr()) + offset);
+              reinterpret_cast<uword>(instance->untag()) + offset);
           s->WriteElementRef(raw_obj, offset);
         }
         offset += kWordSize;
@@ -3239,19 +3247,19 @@
       while (offset < next_field_offset) {
         if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
           uword* p = reinterpret_cast<uword*>(
-              reinterpret_cast<uword>(instance->ptr()) + offset);
+              reinterpret_cast<uword>(instance->untag()) + offset);
           // Reads 32 bits of the unboxed value at a time
           *p = d->ReadWordWith32BitReads();
         } else {
           ObjectPtr* p = reinterpret_cast<ObjectPtr*>(
-              reinterpret_cast<uword>(instance->ptr()) + offset);
+              reinterpret_cast<uword>(instance->untag()) + offset);
           *p = d->ReadRef();
         }
         offset += kWordSize;
       }
       if (offset < instance_size) {
         ObjectPtr* p = reinterpret_cast<ObjectPtr*>(
-            reinterpret_cast<uword>(instance->ptr()) + offset);
+            reinterpret_cast<uword>(instance->untag()) + offset);
         *p = Object::null();
         offset += kWordSize;
       }
@@ -3293,8 +3301,8 @@
       LibraryPrefixPtr prefix = objects_[i];
       AutoTraceObject(prefix);
       WriteFromTo(prefix);
-      s->Write<uint16_t>(prefix->ptr()->num_imports_);
-      s->Write<bool>(prefix->ptr()->is_deferred_load_);
+      s->Write<uint16_t>(prefix->untag()->num_imports_);
+      s->Write<bool>(prefix->untag()->is_deferred_load_);
     }
   }
 
@@ -3327,9 +3335,9 @@
       Deserializer::InitializeHeader(prefix, kLibraryPrefixCid,
                                      LibraryPrefix::InstanceSize());
       ReadFromTo(prefix);
-      prefix->ptr()->num_imports_ = d->Read<uint16_t>();
-      prefix->ptr()->is_deferred_load_ = d->Read<bool>();
-      prefix->ptr()->is_loaded_ = !prefix->ptr()->is_deferred_load_;
+      prefix->untag()->num_imports_ = d->Read<uint16_t>();
+      prefix->untag()->is_deferred_load_ = d->Read<bool>();
+      prefix->untag()->is_loaded_ = !prefix->untag()->is_deferred_load_;
     }
   }
 };
@@ -3350,12 +3358,12 @@
 
     PushFromTo(type);
 
-    if (type->ptr()->type_class_id_->IsHeapObject()) {
+    if (type->untag()->type_class_id_->IsHeapObject()) {
       // Type class is still an unresolved class.
       UNREACHABLE();
     }
 
-    SmiPtr raw_type_class_id = Smi::RawCast(type->ptr()->type_class_id_);
+    SmiPtr raw_type_class_id = Smi::RawCast(type->untag()->type_class_id_);
     ClassPtr type_class =
         s->isolate_group()->class_table()->At(Smi::Value(raw_type_class_id));
     s->Push(type_class);
@@ -3382,15 +3390,16 @@
   void WriteType(Serializer* s, TypePtr type) {
     AutoTraceObject(type);
     WriteFromTo(type);
-    ASSERT(type->ptr()->type_state_ < (1 << TypeLayout::kTypeStateBitSize));
-    ASSERT(type->ptr()->nullability_ < (1 << kNullabilityBitSize));
-    static_assert(TypeLayout::kTypeStateBitSize + kNullabilityBitSize <=
+    ASSERT(type->untag()->type_state_ < (1 << UntaggedType::kTypeStateBitSize));
+    ASSERT(type->untag()->nullability_ < (1 << kNullabilityBitSize));
+    static_assert(UntaggedType::kTypeStateBitSize + kNullabilityBitSize <=
                       kBitsPerByte * sizeof(uint8_t),
                   "Cannot pack type_state_ and nullability_ into a uint8_t");
-    const uint8_t combined = (type->ptr()->type_state_ << kNullabilityBitSize) |
-                             type->ptr()->nullability_;
-    ASSERT_EQUAL(type->ptr()->type_state_, combined >> kNullabilityBitSize);
-    ASSERT_EQUAL(type->ptr()->nullability_, combined & kNullabilityBitMask);
+    const uint8_t combined =
+        (type->untag()->type_state_ << kNullabilityBitSize) |
+        type->untag()->nullability_;
+    ASSERT_EQUAL(type->untag()->type_state_, combined >> kNullabilityBitSize);
+    ASSERT_EQUAL(type->untag()->nullability_, combined & kNullabilityBitMask);
     s->Write<uint8_t>(combined);
   }
 
@@ -3420,8 +3429,8 @@
                                      stamp_canonical);
       ReadFromTo(type);
       const uint8_t combined = d->Read<uint8_t>();
-      type->ptr()->type_state_ = combined >> kNullabilityBitSize;
-      type->ptr()->nullability_ = combined & kNullabilityBitMask;
+      type->untag()->type_state_ = combined >> kNullabilityBitSize;
+      type->untag()->nullability_ = combined & kNullabilityBitMask;
     }
   }
 
@@ -3488,18 +3497,20 @@
   void WriteFunctionType(Serializer* s, FunctionTypePtr type) {
     AutoTraceObject(type);
     WriteFromTo(type);
-    ASSERT(type->ptr()->type_state_ <
-           (1 << FunctionTypeLayout::kTypeStateBitSize));
-    ASSERT(type->ptr()->nullability_ < (1 << kNullabilityBitSize));
-    static_assert(FunctionTypeLayout::kTypeStateBitSize + kNullabilityBitSize <=
-                      kBitsPerByte * sizeof(uint8_t),
-                  "Cannot pack type_state_ and nullability_ into a uint8_t");
-    const uint8_t combined = (type->ptr()->type_state_ << kNullabilityBitSize) |
-                             type->ptr()->nullability_;
-    ASSERT_EQUAL(type->ptr()->type_state_, combined >> kNullabilityBitSize);
-    ASSERT_EQUAL(type->ptr()->nullability_, combined & kNullabilityBitMask);
+    ASSERT(type->untag()->type_state_ <
+           (1 << UntaggedFunctionType::kTypeStateBitSize));
+    ASSERT(type->untag()->nullability_ < (1 << kNullabilityBitSize));
+    static_assert(
+        UntaggedFunctionType::kTypeStateBitSize + kNullabilityBitSize <=
+            kBitsPerByte * sizeof(uint8_t),
+        "Cannot pack type_state_ and nullability_ into a uint8_t");
+    const uint8_t combined =
+        (type->untag()->type_state_ << kNullabilityBitSize) |
+        type->untag()->nullability_;
+    ASSERT_EQUAL(type->untag()->type_state_, combined >> kNullabilityBitSize);
+    ASSERT_EQUAL(type->untag()->nullability_, combined & kNullabilityBitMask);
     s->Write<uint8_t>(combined);
-    s->Write<uint32_t>(type->ptr()->packed_fields_);
+    s->Write<uint32_t>(type->untag()->packed_fields_);
   }
 
   GrowableArray<FunctionTypePtr> objects_;
@@ -3531,9 +3542,9 @@
                                      stamp_canonical);
       ReadFromTo(type);
       const uint8_t combined = d->Read<uint8_t>();
-      type->ptr()->type_state_ = combined >> kNullabilityBitSize;
-      type->ptr()->nullability_ = combined & kNullabilityBitMask;
-      type->ptr()->packed_fields_ = d->Read<uint32_t>();
+      type->untag()->type_state_ = combined >> kNullabilityBitSize;
+      type->untag()->nullability_ = combined & kNullabilityBitMask;
+      type->untag()->packed_fields_ = d->Read<uint32_t>();
     }
   }
 
@@ -3682,18 +3693,18 @@
   void WriteTypeParameter(Serializer* s, TypeParameterPtr type) {
     AutoTraceObject(type);
     WriteFromTo(type);
-    s->Write<int32_t>(type->ptr()->parameterized_class_id_);
-    s->Write<uint16_t>(type->ptr()->base_);
-    s->Write<uint16_t>(type->ptr()->index_);
-    ASSERT(type->ptr()->flags_ < (1 << TypeParameterLayout::kFlagsBitSize));
-    ASSERT(type->ptr()->nullability_ < (1 << kNullabilityBitSize));
-    static_assert(TypeParameterLayout::kFlagsBitSize + kNullabilityBitSize <=
+    s->Write<int32_t>(type->untag()->parameterized_class_id_);
+    s->Write<uint16_t>(type->untag()->base_);
+    s->Write<uint16_t>(type->untag()->index_);
+    ASSERT(type->untag()->flags_ < (1 << UntaggedTypeParameter::kFlagsBitSize));
+    ASSERT(type->untag()->nullability_ < (1 << kNullabilityBitSize));
+    static_assert(UntaggedTypeParameter::kFlagsBitSize + kNullabilityBitSize <=
                       kBitsPerByte * sizeof(uint8_t),
                   "Cannot pack flags_ and nullability_ into a uint8_t");
-    const uint8_t combined = (type->ptr()->flags_ << kNullabilityBitSize) |
-                             type->ptr()->nullability_;
-    ASSERT_EQUAL(type->ptr()->flags_, combined >> kNullabilityBitSize);
-    ASSERT_EQUAL(type->ptr()->nullability_, combined & kNullabilityBitMask);
+    const uint8_t combined = (type->untag()->flags_ << kNullabilityBitSize) |
+                             type->untag()->nullability_;
+    ASSERT_EQUAL(type->untag()->flags_, combined >> kNullabilityBitSize);
+    ASSERT_EQUAL(type->untag()->nullability_, combined & kNullabilityBitMask);
     s->Write<uint8_t>(combined);
   }
 
@@ -3725,12 +3736,12 @@
                                      TypeParameter::InstanceSize(),
                                      stamp_canonical);
       ReadFromTo(type);
-      type->ptr()->parameterized_class_id_ = d->Read<int32_t>();
-      type->ptr()->base_ = d->Read<uint16_t>();
-      type->ptr()->index_ = d->Read<uint16_t>();
+      type->untag()->parameterized_class_id_ = d->Read<int32_t>();
+      type->untag()->base_ = d->Read<uint16_t>();
+      type->untag()->index_ = d->Read<uint16_t>();
       const uint8_t combined = d->Read<uint8_t>();
-      type->ptr()->flags_ = combined >> kNullabilityBitSize;
-      type->ptr()->nullability_ = combined & kNullabilityBitMask;
+      type->untag()->flags_ = combined >> kNullabilityBitSize;
+      type->untag()->nullability_ = combined & kNullabilityBitMask;
     }
   }
 
@@ -3858,7 +3869,7 @@
       MintPtr mint = mints_[i];
       s->AssignRef(mint);
       AutoTraceObject(mint);
-      s->Write<int64_t>(mint->ptr()->value_);
+      s->Write<int64_t>(mint->untag()->value_);
     }
   }
 
@@ -3889,7 +3900,7 @@
             AllocateUninitialized(old_space, Mint::InstanceSize()));
         Deserializer::InitializeHeader(mint, kMintCid, Mint::InstanceSize(),
                                        stamp_canonical);
-        mint->ptr()->value_ = value;
+        mint->untag()->value_ = value;
         d->AssignRef(mint);
       }
     }
@@ -3946,7 +3957,7 @@
     for (intptr_t i = 0; i < count; i++) {
       DoublePtr dbl = objects_[i];
       AutoTraceObject(dbl);
-      s->Write<double>(dbl->ptr()->value_);
+      s->Write<double>(dbl->untag()->value_);
     }
   }
 
@@ -3975,7 +3986,7 @@
       DoublePtr dbl = static_cast<DoublePtr>(d->Ref(id));
       Deserializer::InitializeHeader(dbl, kDoubleCid, Double::InstanceSize(),
                                      stamp_canonical);
-      dbl->ptr()->value_ = d->Read<double>();
+      dbl->untag()->value_ = d->Read<double>();
     }
   }
 
@@ -4088,7 +4099,7 @@
       TypedDataPtr data = objects_[i];
       s->AssignRef(data);
       AutoTraceObject(data);
-      const intptr_t length = Smi::Value(data->ptr()->length_);
+      const intptr_t length = Smi::Value(data->untag()->length_);
       s->WriteUnsigned(length);
     }
   }
@@ -4099,9 +4110,9 @@
     for (intptr_t i = 0; i < count; i++) {
       TypedDataPtr data = objects_[i];
       AutoTraceObject(data);
-      const intptr_t length = Smi::Value(data->ptr()->length_);
+      const intptr_t length = Smi::Value(data->untag()->length_);
       s->WriteUnsigned(length);
-      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data());
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->untag()->data());
       s->WriteBytes(cdata, length * element_size);
     }
   }
@@ -4142,9 +4153,9 @@
       Deserializer::InitializeHeader(data, cid_,
                                      TypedData::InstanceSize(length_in_bytes),
                                      stamp_canonical);
-      data->ptr()->length_ = Smi::New(length);
-      data->ptr()->RecomputeDataField();
-      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data());
+      data->untag()->length_ = Smi::New(length);
+      data->untag()->RecomputeDataField();
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->untag()->data());
       d->ReadBytes(cdata, length_in_bytes);
     }
   }
@@ -4259,9 +4270,9 @@
     for (intptr_t i = 0; i < count; i++) {
       ExternalTypedDataPtr data = objects_[i];
       AutoTraceObject(data);
-      const intptr_t length = Smi::Value(data->ptr()->length_);
+      const intptr_t length = Smi::Value(data->untag()->length_);
       s->WriteUnsigned(length);
-      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data_);
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->untag()->data_);
       s->Align(ExternalTypedData::kDataSerializationAlignment);
       s->WriteBytes(cdata, length * element_size);
     }
@@ -4299,9 +4310,9 @@
       const intptr_t length = d->ReadUnsigned();
       Deserializer::InitializeHeader(data, cid_,
                                      ExternalTypedData::InstanceSize());
-      data->ptr()->length_ = Smi::New(length);
+      data->untag()->length_ = Smi::New(length);
       d->Align(ExternalTypedData::kDataSerializationAlignment);
-      data->ptr()->data_ = const_cast<uint8_t*>(d->CurrentBufferAddress());
+      data->untag()->data_ = const_cast<uint8_t*>(d->CurrentBufferAddress());
       d->Advance(length * element_size);
       // No finalizer / external size 0.
     }
@@ -4402,9 +4413,9 @@
       RegExpPtr regexp = objects_[i];
       AutoTraceObject(regexp);
       WriteFromTo(regexp);
-      s->Write<int32_t>(regexp->ptr()->num_one_byte_registers_);
-      s->Write<int32_t>(regexp->ptr()->num_two_byte_registers_);
-      s->Write<int8_t>(regexp->ptr()->type_flags_);
+      s->Write<int32_t>(regexp->untag()->num_one_byte_registers_);
+      s->Write<int32_t>(regexp->untag()->num_two_byte_registers_);
+      s->Write<int8_t>(regexp->untag()->type_flags_);
     }
   }
 
@@ -4435,9 +4446,9 @@
       Deserializer::InitializeHeader(regexp, kRegExpCid,
                                      RegExp::InstanceSize());
       ReadFromTo(regexp);
-      regexp->ptr()->num_one_byte_registers_ = d->Read<int32_t>();
-      regexp->ptr()->num_two_byte_registers_ = d->Read<int32_t>();
-      regexp->ptr()->type_flags_ = d->Read<int8_t>();
+      regexp->untag()->num_one_byte_registers_ = d->Read<int32_t>();
+      regexp->untag()->num_two_byte_registers_ = d->Read<int32_t>();
+      regexp->untag()->type_flags_ = d->Read<int8_t>();
     }
   }
 };
@@ -4502,7 +4513,7 @@
       Deserializer::InitializeHeader(property, kWeakPropertyCid,
                                      WeakProperty::InstanceSize());
       ReadFromTo(property);
-      property->ptr()->next_ = WeakProperty::null();
+      property->untag()->next_ = WeakProperty::null();
     }
   }
 };
@@ -4517,11 +4528,11 @@
     LinkedHashMapPtr map = LinkedHashMap::RawCast(object);
     objects_.Add(map);
 
-    s->Push(map->ptr()->type_arguments_);
+    s->Push(map->untag()->type_arguments_);
 
-    intptr_t used_data = Smi::Value(map->ptr()->used_data_);
-    ArrayPtr data_array = map->ptr()->data_;
-    ObjectPtr* data_elements = data_array->ptr()->data();
+    intptr_t used_data = Smi::Value(map->untag()->used_data_);
+    ArrayPtr data_array = map->untag()->data_;
+    ObjectPtr* data_elements = data_array->untag()->data();
     for (intptr_t i = 0; i < used_data; i += 2) {
       ObjectPtr key = data_elements[i];
       if (key != data_array) {
@@ -4550,15 +4561,15 @@
 
       WriteField(map, type_arguments_);
 
-      const intptr_t used_data = Smi::Value(map->ptr()->used_data_);
+      const intptr_t used_data = Smi::Value(map->untag()->used_data_);
       ASSERT((used_data & 1) == 0);  // Keys + values, so must be even.
-      const intptr_t deleted_keys = Smi::Value(map->ptr()->deleted_keys_);
+      const intptr_t deleted_keys = Smi::Value(map->untag()->deleted_keys_);
 
       // Write out the number of (not deleted) key/value pairs that will follow.
       s->Write<int32_t>((used_data >> 1) - deleted_keys);
 
-      ArrayPtr data_array = map->ptr()->data_;
-      ObjectPtr* data_elements = data_array->ptr()->data();
+      ArrayPtr data_array = map->untag()->data_;
+      ObjectPtr* data_elements = data_array->untag()->data();
       for (intptr_t i = 0; i < used_data; i += 2) {
         ObjectPtr key = data_elements[i];
         if (key != data_array) {
@@ -4602,7 +4613,8 @@
                                      LinkedHashMap::InstanceSize(),
                                      stamp_canonical);
 
-      map->ptr()->type_arguments_ = static_cast<TypeArgumentsPtr>(d->ReadRef());
+      map->untag()->type_arguments_ =
+          static_cast<TypeArgumentsPtr>(d->ReadRef());
 
       // TODO(rmacnak): Reserve ref ids and co-allocate in ReadAlloc.
       intptr_t pairs = d->Read<int32_t>();
@@ -4613,21 +4625,21 @@
 
       ArrayPtr data = static_cast<ArrayPtr>(
           AllocateUninitialized(old_space, Array::InstanceSize(data_size)));
-      data->ptr()->type_arguments_ = TypeArguments::null();
-      data->ptr()->length_ = Smi::New(data_size);
+      data->untag()->type_arguments_ = TypeArguments::null();
+      data->untag()->length_ = Smi::New(data_size);
       intptr_t i;
       for (i = 0; i < used_data; i++) {
-        data->ptr()->data()[i] = d->ReadRef();
+        data->untag()->data()[i] = d->ReadRef();
       }
       for (; i < data_size; i++) {
-        data->ptr()->data()[i] = Object::null();
+        data->untag()->data()[i] = Object::null();
       }
 
-      map->ptr()->index_ = TypedData::null();
-      map->ptr()->hash_mask_ = Smi::New(0);
-      map->ptr()->data_ = data;
-      map->ptr()->used_data_ = Smi::New(used_data);
-      map->ptr()->deleted_keys_ = Smi::New(0);
+      map->untag()->index_ = TypedData::null();
+      map->untag()->hash_mask_ = Smi::New(0);
+      map->untag()->data_ = data;
+      map->untag()->used_data_ = Smi::New(used_data);
+      map->untag()->deleted_keys_ = Smi::New(0);
     }
   }
 };
@@ -4643,10 +4655,10 @@
     ArrayPtr array = Array::RawCast(object);
     objects_.Add(array);
 
-    s->Push(array->ptr()->type_arguments_);
-    const intptr_t length = Smi::Value(array->ptr()->length_);
+    s->Push(array->untag()->type_arguments_);
+    const intptr_t length = Smi::Value(array->untag()->length_);
     for (intptr_t i = 0; i < length; i++) {
-      s->Push(array->ptr()->data()[i]);
+      s->Push(array->untag()->data()[i]);
     }
   }
 
@@ -4658,7 +4670,7 @@
       ArrayPtr array = objects_[i];
       s->AssignRef(array);
       AutoTraceObject(array);
-      const intptr_t length = Smi::Value(array->ptr()->length_);
+      const intptr_t length = Smi::Value(array->untag()->length_);
       s->WriteUnsigned(length);
     }
   }
@@ -4668,11 +4680,11 @@
     for (intptr_t i = 0; i < count; i++) {
       ArrayPtr array = objects_[i];
       AutoTraceObject(array);
-      const intptr_t length = Smi::Value(array->ptr()->length_);
+      const intptr_t length = Smi::Value(array->untag()->length_);
       s->WriteUnsigned(length);
       WriteField(array, type_arguments_);
       for (intptr_t j = 0; j < length; j++) {
-        s->WriteElementRef(array->ptr()->data()[j], j);
+        s->WriteElementRef(array->untag()->data()[j], j);
       }
     }
   }
@@ -4708,11 +4720,11 @@
       const intptr_t length = d->ReadUnsigned();
       Deserializer::InitializeHeader(array, cid_, Array::InstanceSize(length),
                                      stamp_canonical);
-      array->ptr()->type_arguments_ =
+      array->untag()->type_arguments_ =
           static_cast<TypeArgumentsPtr>(d->ReadRef());
-      array->ptr()->length_ = Smi::New(length);
+      array->untag()->length_ = Smi::New(length);
       for (intptr_t j = 0; j < length; j++) {
-        array->ptr()->data()[j] = d->ReadRef();
+        array->untag()->data()[j] = d->ReadRef();
       }
     }
   }
@@ -4740,7 +4752,7 @@
       OneByteStringPtr str = objects_[i];
       s->AssignRef(str);
       AutoTraceObject(str);
-      const intptr_t length = Smi::Value(str->ptr()->length_);
+      const intptr_t length = Smi::Value(str->untag()->length_);
       s->WriteUnsigned(length);
     }
   }
@@ -4750,10 +4762,10 @@
     for (intptr_t i = 0; i < count; i++) {
       OneByteStringPtr str = objects_[i];
       AutoTraceObject(str);
-      const intptr_t length = Smi::Value(str->ptr()->length_);
+      const intptr_t length = Smi::Value(str->untag()->length_);
       ASSERT(length <= compiler::target::kSmiMax);
       s->WriteUnsigned(length);
-      s->WriteBytes(str->ptr()->data(), length);
+      s->WriteBytes(str->untag()->data(), length);
     }
   }
 
@@ -4777,7 +4789,7 @@
       for (intptr_t i = start_index_; i < stop_index_; i++) {
         str ^= refs.At(i);
         str2 ^= table.InsertOrGet(str);
-        if (str.raw() == str2.raw()) {
+        if (str.ptr() == str2.ptr()) {
           str.SetCanonical();
         } else {
           refs.SetAt(i, str2);
@@ -4814,11 +4826,11 @@
       Deserializer::InitializeHeader(str, kOneByteStringCid,
                                      OneByteString::InstanceSize(length),
                                      stamp_canonical);
-      str->ptr()->length_ = Smi::New(length);
+      str->untag()->length_ = Smi::New(length);
       StringHasher hasher;
       for (intptr_t j = 0; j < length; j++) {
         uint8_t code_unit = d->Read<uint8_t>();
-        str->ptr()->data()[j] = code_unit;
+        str->untag()->data()[j] = code_unit;
         hasher.Add(code_unit);
       }
       String::SetCachedHash(str, hasher.Finalize());
@@ -4845,7 +4857,7 @@
       TwoByteStringPtr str = objects_[i];
       s->AssignRef(str);
       AutoTraceObject(str);
-      const intptr_t length = Smi::Value(str->ptr()->length_);
+      const intptr_t length = Smi::Value(str->untag()->length_);
       s->WriteUnsigned(length);
     }
   }
@@ -4855,10 +4867,11 @@
     for (intptr_t i = 0; i < count; i++) {
       TwoByteStringPtr str = objects_[i];
       AutoTraceObject(str);
-      const intptr_t length = Smi::Value(str->ptr()->length_);
+      const intptr_t length = Smi::Value(str->untag()->length_);
       ASSERT(length <= (compiler::target::kSmiMax / 2));
       s->WriteUnsigned(length);
-      s->WriteBytes(reinterpret_cast<uint8_t*>(str->ptr()->data()), length * 2);
+      s->WriteBytes(reinterpret_cast<uint8_t*>(str->untag()->data()),
+                    length * 2);
     }
   }
 
@@ -4893,12 +4906,12 @@
       Deserializer::InitializeHeader(str, kTwoByteStringCid,
                                      TwoByteString::InstanceSize(length),
                                      stamp_canonical);
-      str->ptr()->length_ = Smi::New(length);
+      str->untag()->length_ = Smi::New(length);
       StringHasher hasher;
       for (intptr_t j = 0; j < length; j++) {
         uint16_t code_unit = d->Read<uint8_t>();
         code_unit = code_unit | (d->Read<uint8_t>() << 8);
-        str->ptr()->data()[j] = code_unit;
+        str->untag()->data()[j] = code_unit;
         hasher.Add(code_unit);
       }
       String::SetCachedHash(str, hasher.Finalize());
@@ -4935,34 +4948,34 @@
     // written into the snapshot.
 
     s->AddBaseObject(Object::null(), "Null", "null");
-    s->AddBaseObject(Object::sentinel().raw(), "Null", "sentinel");
-    s->AddBaseObject(Object::transition_sentinel().raw(), "Null",
+    s->AddBaseObject(Object::sentinel().ptr(), "Null", "sentinel");
+    s->AddBaseObject(Object::transition_sentinel().ptr(), "Null",
                      "transition_sentinel");
-    s->AddBaseObject(Object::empty_array().raw(), "Array", "<empty_array>");
-    s->AddBaseObject(Object::zero_array().raw(), "Array", "<zero_array>");
-    s->AddBaseObject(Object::dynamic_type().raw(), "Type", "<dynamic type>");
-    s->AddBaseObject(Object::void_type().raw(), "Type", "<void type>");
-    s->AddBaseObject(Object::empty_type_arguments().raw(), "TypeArguments",
+    s->AddBaseObject(Object::empty_array().ptr(), "Array", "<empty_array>");
+    s->AddBaseObject(Object::zero_array().ptr(), "Array", "<zero_array>");
+    s->AddBaseObject(Object::dynamic_type().ptr(), "Type", "<dynamic type>");
+    s->AddBaseObject(Object::void_type().ptr(), "Type", "<void type>");
+    s->AddBaseObject(Object::empty_type_arguments().ptr(), "TypeArguments",
                      "[]");
-    s->AddBaseObject(Bool::True().raw(), "bool", "true");
-    s->AddBaseObject(Bool::False().raw(), "bool", "false");
-    ASSERT(Object::extractor_parameter_types().raw() != Object::null());
-    s->AddBaseObject(Object::extractor_parameter_types().raw(), "Array",
+    s->AddBaseObject(Bool::True().ptr(), "bool", "true");
+    s->AddBaseObject(Bool::False().ptr(), "bool", "false");
+    ASSERT(Object::extractor_parameter_types().ptr() != Object::null());
+    s->AddBaseObject(Object::extractor_parameter_types().ptr(), "Array",
                      "<extractor parameter types>");
-    ASSERT(Object::extractor_parameter_names().raw() != Object::null());
-    s->AddBaseObject(Object::extractor_parameter_names().raw(), "Array",
+    ASSERT(Object::extractor_parameter_names().ptr() != Object::null());
+    s->AddBaseObject(Object::extractor_parameter_names().ptr(), "Array",
                      "<extractor parameter names>");
-    s->AddBaseObject(Object::empty_context_scope().raw(), "ContextScope",
+    s->AddBaseObject(Object::empty_context_scope().ptr(), "ContextScope",
                      "<empty>");
-    s->AddBaseObject(Object::empty_object_pool().raw(), "ObjectPool",
+    s->AddBaseObject(Object::empty_object_pool().ptr(), "ObjectPool",
                      "<empty>");
-    s->AddBaseObject(Object::empty_compressed_stackmaps().raw(),
+    s->AddBaseObject(Object::empty_compressed_stackmaps().ptr(),
                      "CompressedStackMaps", "<empty>");
-    s->AddBaseObject(Object::empty_descriptors().raw(), "PcDescriptors",
+    s->AddBaseObject(Object::empty_descriptors().ptr(), "PcDescriptors",
                      "<empty>");
-    s->AddBaseObject(Object::empty_var_descriptors().raw(),
+    s->AddBaseObject(Object::empty_var_descriptors().ptr(),
                      "LocalVarDescriptors", "<empty>");
-    s->AddBaseObject(Object::empty_exception_handlers().raw(),
+    s->AddBaseObject(Object::empty_exception_handlers().ptr(),
                      "ExceptionHandlers", "<empty>");
 
     for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) {
@@ -4989,25 +5002,25 @@
 
     if (!Snapshot::IncludesCode(s->kind())) {
       for (intptr_t i = 0; i < StubCode::NumEntries(); i++) {
-        s->AddBaseObject(StubCode::EntryAt(i).raw(), "Code", "<stub code>");
+        s->AddBaseObject(StubCode::EntryAt(i).ptr(), "Code", "<stub code>");
       }
     }
   }
 
   void PushRoots(Serializer* s) {
-    s->Push(symbols_.raw());
+    s->Push(symbols_.ptr());
     if (Snapshot::IncludesCode(s->kind())) {
       for (intptr_t i = 0; i < StubCode::NumEntries(); i++) {
-        s->Push(StubCode::EntryAt(i).raw());
+        s->Push(StubCode::EntryAt(i).ptr());
       }
     }
   }
 
   void WriteRoots(Serializer* s) {
-    s->WriteRootRef(symbols_.raw(), "symbol-table");
+    s->WriteRootRef(symbols_.ptr(), "symbol-table");
     if (Snapshot::IncludesCode(s->kind())) {
       for (intptr_t i = 0; i < StubCode::NumEntries(); i++) {
-        s->WriteRootRef(StubCode::EntryAt(i).raw(),
+        s->WriteRootRef(StubCode::EntryAt(i).ptr(),
                         zone_->PrintToString("Stub:%s", StubCode::NameAt(i)));
       }
     }
@@ -5028,25 +5041,25 @@
     // written into the snapshot.
 
     d->AddBaseObject(Object::null());
-    d->AddBaseObject(Object::sentinel().raw());
-    d->AddBaseObject(Object::transition_sentinel().raw());
-    d->AddBaseObject(Object::empty_array().raw());
-    d->AddBaseObject(Object::zero_array().raw());
-    d->AddBaseObject(Object::dynamic_type().raw());
-    d->AddBaseObject(Object::void_type().raw());
-    d->AddBaseObject(Object::empty_type_arguments().raw());
-    d->AddBaseObject(Bool::True().raw());
-    d->AddBaseObject(Bool::False().raw());
-    ASSERT(Object::extractor_parameter_types().raw() != Object::null());
-    d->AddBaseObject(Object::extractor_parameter_types().raw());
-    ASSERT(Object::extractor_parameter_names().raw() != Object::null());
-    d->AddBaseObject(Object::extractor_parameter_names().raw());
-    d->AddBaseObject(Object::empty_context_scope().raw());
-    d->AddBaseObject(Object::empty_object_pool().raw());
-    d->AddBaseObject(Object::empty_compressed_stackmaps().raw());
-    d->AddBaseObject(Object::empty_descriptors().raw());
-    d->AddBaseObject(Object::empty_var_descriptors().raw());
-    d->AddBaseObject(Object::empty_exception_handlers().raw());
+    d->AddBaseObject(Object::sentinel().ptr());
+    d->AddBaseObject(Object::transition_sentinel().ptr());
+    d->AddBaseObject(Object::empty_array().ptr());
+    d->AddBaseObject(Object::zero_array().ptr());
+    d->AddBaseObject(Object::dynamic_type().ptr());
+    d->AddBaseObject(Object::void_type().ptr());
+    d->AddBaseObject(Object::empty_type_arguments().ptr());
+    d->AddBaseObject(Bool::True().ptr());
+    d->AddBaseObject(Bool::False().ptr());
+    ASSERT(Object::extractor_parameter_types().ptr() != Object::null());
+    d->AddBaseObject(Object::extractor_parameter_types().ptr());
+    ASSERT(Object::extractor_parameter_names().ptr() != Object::null());
+    d->AddBaseObject(Object::extractor_parameter_names().ptr());
+    d->AddBaseObject(Object::empty_context_scope().ptr());
+    d->AddBaseObject(Object::empty_object_pool().ptr());
+    d->AddBaseObject(Object::empty_compressed_stackmaps().ptr());
+    d->AddBaseObject(Object::empty_descriptors().ptr());
+    d->AddBaseObject(Object::empty_var_descriptors().ptr());
+    d->AddBaseObject(Object::empty_exception_handlers().ptr());
 
     for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) {
       d->AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]);
@@ -5069,7 +5082,7 @@
 
     if (!Snapshot::IncludesCode(d->kind())) {
       for (intptr_t i = 0; i < StubCode::NumEntries(); i++) {
-        d->AddBaseObject(StubCode::EntryAt(i).raw());
+        d->AddBaseObject(StubCode::EntryAt(i).ptr());
       }
     }
   }
@@ -5164,7 +5177,7 @@
     } else {
       // Base objects carried over from WriteVMSnapshot.
       for (intptr_t i = 0; i < base_objects_->length(); i++) {
-        s->AddBaseObject((*base_objects_)[i]->raw());
+        s->AddBaseObject((*base_objects_)[i]->ptr());
       }
     }
   }
@@ -5273,7 +5286,7 @@
   void AddBaseObjects(Serializer* s) {
     ZoneGrowableArray<Object*>* objects = unit_->parent()->objects();
     for (intptr_t i = 0; i < objects->length(); i++) {
-      s->AddBaseObject(objects->At(i)->raw());
+      s->AddBaseObject(objects->At(i)->ptr());
     }
   }
 
@@ -5282,26 +5295,26 @@
     for (intptr_t i = 0; i < num_deferred_objects; i++) {
       const Object* deferred_object = (*unit_->deferred_objects())[i];
       ASSERT(deferred_object->IsCode());
-      CodePtr code = static_cast<CodePtr>(deferred_object->raw());
+      CodePtr code = static_cast<CodePtr>(deferred_object->ptr());
       if (FLAG_use_bare_instructions) {
         if (FLAG_retain_function_objects) {
-          ObjectPoolPtr pool = code->ptr()->object_pool_;
+          ObjectPoolPtr pool = code->untag()->object_pool_;
           if (pool != ObjectPool::null()) {
-            const intptr_t length = pool->ptr()->length_;
-            uint8_t* entry_bits = pool->ptr()->entry_bits();
+            const intptr_t length = pool->untag()->length_;
+            uint8_t* entry_bits = pool->untag()->entry_bits();
             for (intptr_t i = 0; i < length; i++) {
               auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
               if (entry_type == ObjectPool::EntryType::kTaggedObject) {
-                s->Push(pool->ptr()->data()[i].raw_obj_);
+                s->Push(pool->untag()->data()[i].raw_obj_);
               }
             }
           }
         }
       } else {
-        s->Push(code->ptr()->object_pool_);
+        s->Push(code->untag()->object_pool_);
       }
-      s->Push(code->ptr()->compressed_stackmaps_);
-      s->Push(code->ptr()->code_source_map_);
+      s->Push(code->untag()->compressed_stackmaps_);
+      s->Push(code->untag()->code_source_map_);
     }
   }
 
@@ -5310,7 +5323,7 @@
     intptr_t start_index = 0;
     intptr_t num_deferred_objects = unit_->deferred_objects()->length();
     if (num_deferred_objects != 0) {
-      start_index = s->RefId(unit_->deferred_objects()->At(0)->raw());
+      start_index = s->RefId(unit_->deferred_objects()->At(0)->ptr());
       ASSERT(start_index > 0);
     }
     s->WriteUnsigned(start_index);
@@ -5318,30 +5331,30 @@
     for (intptr_t i = 0; i < num_deferred_objects; i++) {
       const Object* deferred_object = (*unit_->deferred_objects())[i];
       ASSERT(deferred_object->IsCode());
-      CodePtr code = static_cast<CodePtr>(deferred_object->raw());
+      CodePtr code = static_cast<CodePtr>(deferred_object->ptr());
       ASSERT(s->RefId(code) == (start_index + i));
-      s->WriteInstructions(code->ptr()->instructions_,
-                           code->ptr()->unchecked_offset_, code, false);
+      s->WriteInstructions(code->untag()->instructions_,
+                           code->untag()->unchecked_offset_, code, false);
       if (!FLAG_use_bare_instructions) {
-        s->WriteRootRef(code->ptr()->object_pool_, "deferred-code");
+        s->WriteRootRef(code->untag()->object_pool_, "deferred-code");
       }
-      s->WriteRootRef(code->ptr()->compressed_stackmaps_, "deferred-code");
-      s->WriteRootRef(code->ptr()->code_source_map_, "deferred-code");
+      s->WriteRootRef(code->untag()->compressed_stackmaps_, "deferred-code");
+      s->WriteRootRef(code->untag()->code_source_map_, "deferred-code");
     }
 
     if (FLAG_use_bare_instructions && FLAG_retain_function_objects) {
       ObjectPoolPtr pool =
           s->isolate_group()->object_store()->global_object_pool();
-      const intptr_t length = pool->ptr()->length_;
-      uint8_t* entry_bits = pool->ptr()->entry_bits();
+      const intptr_t length = pool->untag()->length_;
+      uint8_t* entry_bits = pool->untag()->entry_bits();
       intptr_t last_write = 0;
       for (intptr_t i = 0; i < length; i++) {
         auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
         if (entry_type == ObjectPool::EntryType::kTaggedObject) {
-          if (s->IsWritten(pool->ptr()->data()[i].raw_obj_)) {
+          if (s->IsWritten(pool->untag()->data()[i].raw_obj_)) {
             intptr_t skip = i - last_write;
             s->WriteUnsigned(skip);
-            s->WriteRootRef(pool->ptr()->data()[i].raw_obj_,
+            s->WriteRootRef(pool->untag()->data()[i].raw_obj_,
                             "deferred-literal");
             last_write = i;
           }
@@ -5375,35 +5388,35 @@
     for (intptr_t id = deferred_start_index_; id < deferred_stop_index_; id++) {
       CodePtr code = static_cast<CodePtr>(d->Ref(id));
       d->ReadInstructions(code, false);
-      if (code->ptr()->owner_->IsFunction()) {
-        FunctionPtr func = static_cast<FunctionPtr>(code->ptr()->owner_);
-        uword entry_point = code->ptr()->entry_point_;
+      if (code->untag()->owner_->IsFunction()) {
+        FunctionPtr func = static_cast<FunctionPtr>(code->untag()->owner_);
+        uword entry_point = code->untag()->entry_point_;
         ASSERT(entry_point != 0);
-        func->ptr()->entry_point_ = entry_point;
-        uword unchecked_entry_point = code->ptr()->unchecked_entry_point_;
+        func->untag()->entry_point_ = entry_point;
+        uword unchecked_entry_point = code->untag()->unchecked_entry_point_;
         ASSERT(unchecked_entry_point != 0);
-        func->ptr()->unchecked_entry_point_ = unchecked_entry_point;
+        func->untag()->unchecked_entry_point_ = unchecked_entry_point;
       }
       if (!FLAG_use_bare_instructions) {
-        code->ptr()->object_pool_ = static_cast<ObjectPoolPtr>(d->ReadRef());
+        code->untag()->object_pool_ = static_cast<ObjectPoolPtr>(d->ReadRef());
       }
-      code->ptr()->compressed_stackmaps_ =
+      code->untag()->compressed_stackmaps_ =
           static_cast<CompressedStackMapsPtr>(d->ReadRef());
-      code->ptr()->code_source_map_ =
+      code->untag()->code_source_map_ =
           static_cast<CodeSourceMapPtr>(d->ReadRef());
     }
 
     if (FLAG_use_bare_instructions && FLAG_retain_function_objects) {
       ObjectPoolPtr pool =
           d->isolate_group()->object_store()->global_object_pool();
-      const intptr_t length = pool->ptr()->length_;
-      uint8_t* entry_bits = pool->ptr()->entry_bits();
+      const intptr_t length = pool->untag()->length_;
+      uint8_t* entry_bits = pool->untag()->entry_bits();
       for (intptr_t i = d->ReadUnsigned(); i < length; i += d->ReadUnsigned()) {
         auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
         ASSERT(entry_type == ObjectPool::EntryType::kTaggedObject);
         // The existing entry will usually be null, but it might also be an
         // equivalent object that was duplicated in another loading unit.
-        pool->ptr()->data()[i].raw_obj_ = d->ReadRef();
+        pool->untag()->data()[i].raw_obj_ = d->ReadRef();
       }
     }
 
@@ -5612,28 +5625,28 @@
       name = FunctionSerializationCluster::MakeDisambiguatedFunctionName(this,
                                                                          func);
       owner_ref_name = "owner_";
-      owner = func->ptr()->owner_;
+      owner = func->untag()->owner_;
       break;
     }
     case kClassCid: {
       ClassPtr cls = static_cast<ClassPtr>(obj);
       type = "Class";
-      name_string = cls->ptr()->name_;
+      name_string = cls->untag()->name_;
       owner_ref_name = "library_";
-      owner = cls->ptr()->library_;
+      owner = cls->untag()->library_;
       break;
     }
     case kPatchClassCid: {
       PatchClassPtr patch_cls = static_cast<PatchClassPtr>(obj);
       type = "PatchClass";
       owner_ref_name = "patched_class_";
-      owner = patch_cls->ptr()->patched_class_;
+      owner = patch_cls->untag()->patched_class_;
       break;
     }
     case kLibraryCid: {
       LibraryPtr lib = static_cast<LibraryPtr>(obj);
       type = "Library";
-      name_string = lib->ptr()->url_;
+      name_string = lib->untag()->url_;
       break;
     }
     default:
@@ -5854,7 +5867,7 @@
       auto unit_objects = loading_units_->At(i)->deferred_objects();
       CodeSerializationCluster::Sort(unit_objects);
       for (intptr_t j = 0; j < unit_objects->length(); j++) {
-        cluster->deferred_objects()->Add(unit_objects->At(j)->raw());
+        cluster->deferred_objects()->Add(unit_objects->At(j)->ptr());
       }
     }
   }
@@ -5878,7 +5891,7 @@
       auto in =
           loading_units_->At(current_loading_unit_id_)->deferred_objects();
       for (intptr_t i = 0; i < in->length(); i++) {
-        code_objects.Add(in->At(i)->raw());
+        code_objects.Add(in->At(i)->ptr());
       }
     }
 
@@ -5997,7 +6010,7 @@
     is_canonical = true;
   } else {
     cid = object->GetClassId();
-    is_canonical = object->ptr()->IsCanonical();
+    is_canonical = object->untag()->IsCanonical();
   }
 
   SerializationCluster** cluster_ref =
@@ -6029,13 +6042,13 @@
   }
   Object& object = Object::Handle(raw_object);
   OS::PrintErr("Unexpected object (%s, %s): 0x%" Px " %s\n", message,
-               Snapshot::KindToCString(kind_), static_cast<uword>(object.raw()),
+               Snapshot::KindToCString(kind_), static_cast<uword>(object.ptr()),
                object.ToCString());
 #if defined(SNAPSHOT_BACKTRACE)
   while (!object.IsNull()) {
     object = ParentOf(object);
     OS::PrintErr("referenced by 0x%" Px " %s\n",
-                 static_cast<uword>(object.raw()), object.ToCString());
+                 static_cast<uword>(object.ptr()), object.ToCString());
   }
 #endif
   OS::Abort();
@@ -6044,8 +6057,8 @@
 #if defined(SNAPSHOT_BACKTRACE)
 ObjectPtr Serializer::ParentOf(const Object& object) {
   for (intptr_t i = 0; i < parent_pairs_.length(); i += 2) {
-    if (parent_pairs_[i]->raw() == object.raw()) {
-      return parent_pairs_[i + 1]->raw();
+    if (parent_pairs_[i]->ptr() == object.ptr()) {
+      return parent_pairs_[i + 1]->ptr();
     }
   }
   return Object::null();
@@ -6752,21 +6765,21 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
     if (FLAG_use_bare_instructions) {
       uword entry_point = StubCode::NotLoaded().EntryPoint();
-      code->ptr()->entry_point_ = entry_point;
-      code->ptr()->unchecked_entry_point_ = entry_point;
-      code->ptr()->monomorphic_entry_point_ = entry_point;
-      code->ptr()->monomorphic_unchecked_entry_point_ = entry_point;
-      code->ptr()->instructions_length_ = 0;
+      code->untag()->entry_point_ = entry_point;
+      code->untag()->unchecked_entry_point_ = entry_point;
+      code->untag()->monomorphic_entry_point_ = entry_point;
+      code->untag()->monomorphic_unchecked_entry_point_ = entry_point;
+      code->untag()->instructions_length_ = 0;
       return;
     }
 #endif
     InstructionsPtr instr = StubCode::NotLoaded().instructions();
     uint32_t unchecked_offset = 0;
-    code->ptr()->instructions_ = instr;
+    code->untag()->instructions_ = instr;
 #if defined(DART_PRECOMPILED_RUNTIME)
-    code->ptr()->instructions_length_ = Instructions::Size(instr);
+    code->untag()->instructions_length_ = Instructions::Size(instr);
 #else
-    code->ptr()->unchecked_offset_ = unchecked_offset;
+    code->untag()->unchecked_offset_ = unchecked_offset;
 #endif
     Code::InitializeCachedEntryPointsFrom(code, instr, unchecked_offset);
     return;
@@ -6775,7 +6788,7 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
   if (FLAG_use_bare_instructions) {
     // There are no serialized RawInstructions objects in this mode.
-    code->ptr()->instructions_ = Instructions::null();
+    code->untag()->instructions_ = Instructions::null();
     previous_text_offset_ += ReadUnsigned();
     const uword payload_start =
         image_reader_->GetBareInstructionsAt(previous_text_offset_);
@@ -6794,10 +6807,10 @@
     const uword monomorphic_entry_point =
         payload_start + monomorphic_entry_offset;
 
-    code->ptr()->entry_point_ = entry_point;
-    code->ptr()->unchecked_entry_point_ = entry_point + unchecked_offset;
-    code->ptr()->monomorphic_entry_point_ = monomorphic_entry_point;
-    code->ptr()->monomorphic_unchecked_entry_point_ =
+    code->untag()->entry_point_ = entry_point;
+    code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
+    code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
+    code->untag()->monomorphic_unchecked_entry_point_ =
         monomorphic_entry_point + unchecked_offset;
     return;
   }
@@ -6805,17 +6818,17 @@
 
   InstructionsPtr instr = image_reader_->GetInstructionsAt(Read<uint32_t>());
   uint32_t unchecked_offset = ReadUnsigned();
-  code->ptr()->instructions_ = instr;
+  code->untag()->instructions_ = instr;
 #if defined(DART_PRECOMPILED_RUNTIME)
-  code->ptr()->instructions_length_ = Instructions::Size(instr);
+  code->untag()->instructions_length_ = Instructions::Size(instr);
 #else
-  code->ptr()->unchecked_offset_ = unchecked_offset;
+  code->untag()->unchecked_offset_ = unchecked_offset;
   if (kind() == Snapshot::kFullJIT) {
     const uint32_t active_offset = Read<uint32_t>();
     instr = image_reader_->GetInstructionsAt(active_offset);
     unchecked_offset = ReadUnsigned();
   }
-  code->ptr()->active_instructions_ = instr;
+  code->untag()->active_instructions_ = instr;
 #endif
   Code::InitializeCachedEntryPointsFrom(code, instr, unchecked_offset);
 }
@@ -6830,7 +6843,7 @@
       CodePtr code = static_cast<CodePtr>(refs.At(id));
       uword start = Code::PayloadStartOf(code);
       ASSERT(start <= previous_end);
-      code->ptr()->instructions_length_ = previous_end - start;
+      code->untag()->instructions_length_ = previous_end - start;
       previous_end = start;
     }
 
@@ -6906,7 +6919,7 @@
     // Must not perform any other type of allocation, which might trigger GC
     // while there are still uninitialized objects.
     NoSafepointScope no_safepoint;
-    refs_ = refs.raw();
+    refs_ = refs.ptr();
 
     roots->AddBaseObjects(this);
 
@@ -7074,11 +7087,11 @@
   ASSERT(object_store != NULL);
 
   // These type arguments must always be retained.
-  ASSERT(object_store->type_argument_int()->ptr()->IsCanonical());
-  ASSERT(object_store->type_argument_double()->ptr()->IsCanonical());
-  ASSERT(object_store->type_argument_string()->ptr()->IsCanonical());
-  ASSERT(object_store->type_argument_string_dynamic()->ptr()->IsCanonical());
-  ASSERT(object_store->type_argument_string_string()->ptr()->IsCanonical());
+  ASSERT(object_store->type_argument_int()->untag()->IsCanonical());
+  ASSERT(object_store->type_argument_double()->untag()->IsCanonical());
+  ASSERT(object_store->type_argument_string()->untag()->IsCanonical());
+  ASSERT(object_store->type_argument_string_dynamic()->untag()->IsCanonical());
+  ASSERT(object_store->type_argument_string_string()->untag()->IsCanonical());
 
   serializer.ReserveHeader();
   serializer.WriteVersionAndFeatures(false);
@@ -7485,13 +7498,13 @@
     for (intptr_t i = 0; i < pool.Length(); i++) {
       if (pool.TypeAt(i) == ObjectPool::EntryType::kTaggedObject) {
         entry = pool.ObjectAt(i);
-        if (entry.raw() == StubCode::SwitchableCallMiss().raw()) {
+        if (entry.ptr() == StubCode::SwitchableCallMiss().ptr()) {
           smi = Smi::FromAlignedAddress(
               StubCode::SwitchableCallMiss().MonomorphicEntryPoint());
           pool.SetTypeAt(i, ObjectPool::EntryType::kImmediate,
                          ObjectPool::Patchability::kPatchable);
           pool.SetObjectAt(i, smi);
-        } else if (entry.raw() == StubCode::MegamorphicCall().raw()) {
+        } else if (entry.ptr() == StubCode::MegamorphicCall().ptr()) {
           smi = Smi::FromAlignedAddress(
               StubCode::MegamorphicCall().MonomorphicEntryPoint());
           pool.SetTypeAt(i, ObjectPool::EntryType::kImmediate,
diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h
index 47a4594..d35b83a 100644
--- a/runtime/vm/clustered_snapshot.h
+++ b/runtime/vm/clustered_snapshot.h
@@ -327,18 +327,18 @@
 
   template <typename T, typename... P>
   void WriteFromTo(T obj, P&&... args) {
-    ObjectPtr* from = obj->ptr()->from();
-    ObjectPtr* to = obj->ptr()->to_snapshot(kind(), args...);
+    ObjectPtr* from = obj->untag()->from();
+    ObjectPtr* to = obj->untag()->to_snapshot(kind(), args...);
     for (ObjectPtr* p = from; p <= to; p++) {
-      WriteOffsetRef(*p, (p - reinterpret_cast<ObjectPtr*>(obj->ptr())) *
+      WriteOffsetRef(*p, (p - reinterpret_cast<ObjectPtr*>(obj->untag())) *
                              sizeof(ObjectPtr));
     }
   }
 
   template <typename T, typename... P>
   void PushFromTo(T obj, P&&... args) {
-    ObjectPtr* from = obj->ptr()->from();
-    ObjectPtr* to = obj->ptr()->to_snapshot(kind(), args...);
+    ObjectPtr* from = obj->untag()->from();
+    ObjectPtr* to = obj->untag()->to_snapshot(kind(), args...);
     for (ObjectPtr* p = from; p <= to; p++) {
       Push(*p);
     }
@@ -347,7 +347,7 @@
   void WriteTokenPosition(TokenPosition pos) { Write(pos.Serialize()); }
 
   void WriteCid(intptr_t cid) {
-    COMPILE_ASSERT(ObjectLayout::kClassIdTagSize <= 32);
+    COMPILE_ASSERT(UntaggedObject::kClassIdTagSize <= 32);
     Write<int32_t>(cid);
   }
 
@@ -487,7 +487,7 @@
 
 #define PushFromTo(obj, ...) s->PushFromTo(obj, ##__VA_ARGS__);
 
-#define WriteField(obj, field) s->WritePropertyRef(obj->ptr()->field, #field)
+#define WriteField(obj, field) s->WritePropertyRef(obj->untag()->field, #field)
 
 class SerializerWritingObjectScope {
  public:
@@ -598,23 +598,23 @@
 
   void AssignRef(ObjectPtr object) {
     ASSERT(next_ref_index_ <= num_objects_);
-    refs_->ptr()->data()[next_ref_index_] = object;
+    refs_->untag()->data()[next_ref_index_] = object;
     next_ref_index_++;
   }
 
   ObjectPtr Ref(intptr_t index) const {
     ASSERT(index > 0);
     ASSERT(index <= num_objects_);
-    return refs_->ptr()->data()[index];
+    return refs_->untag()->data()[index];
   }
 
   ObjectPtr ReadRef() { return Ref(ReadUnsigned()); }
 
   template <typename T, typename... P>
   void ReadFromTo(T obj, P&&... params) {
-    ObjectPtr* from = obj->ptr()->from();
-    ObjectPtr* to_snapshot = obj->ptr()->to_snapshot(kind(), params...);
-    ObjectPtr* to = obj->ptr()->to(params...);
+    ObjectPtr* from = obj->untag()->from();
+    ObjectPtr* to_snapshot = obj->untag()->to_snapshot(kind(), params...);
+    ObjectPtr* to = obj->untag()->to(params...);
     for (ObjectPtr* p = from; p <= to_snapshot; p++) {
       *p = ReadRef();
     }
@@ -632,7 +632,7 @@
   }
 
   intptr_t ReadCid() {
-    COMPILE_ASSERT(ObjectLayout::kClassIdTagSize <= 32);
+    COMPILE_ASSERT(UntaggedObject::kClassIdTagSize <= 32);
     return Read<int32_t>();
   }
 
diff --git a/runtime/vm/code_descriptors.cc b/runtime/vm/code_descriptors.cc
index 6a75f04..dd8bc1d 100644
--- a/runtime/vm/code_descriptors.cc
+++ b/runtime/vm/code_descriptors.cc
@@ -20,7 +20,7 @@
     : function_(Function::Handle(
           zone,
           FLAG_check_token_positions && (inline_id_to_function != nullptr)
-              ? inline_id_to_function->At(0)->raw()
+              ? inline_id_to_function->At(0)->ptr()
               : Function::null())),
       script_(Script::Handle(
           zone,
@@ -30,7 +30,7 @@
       prev_deopt_id(0),
       prev_token_pos(0) {}
 
-void DescriptorList::AddDescriptor(PcDescriptorsLayout::Kind kind,
+void DescriptorList::AddDescriptor(UntaggedPcDescriptors::Kind kind,
                                    intptr_t pc_offset,
                                    intptr_t deopt_id,
                                    const TokenPosition token_pos,
@@ -39,20 +39,20 @@
   // yield index 0 is reserved for normal entry.
   RELEASE_ASSERT(yield_index != 0);
 
-  ASSERT((kind == PcDescriptorsLayout::kRuntimeCall) ||
-         (kind == PcDescriptorsLayout::kBSSRelocation) ||
-         (kind == PcDescriptorsLayout::kOther) ||
-         (yield_index != PcDescriptorsLayout::kInvalidYieldIndex) ||
+  ASSERT((kind == UntaggedPcDescriptors::kRuntimeCall) ||
+         (kind == UntaggedPcDescriptors::kBSSRelocation) ||
+         (kind == UntaggedPcDescriptors::kOther) ||
+         (yield_index != UntaggedPcDescriptors::kInvalidYieldIndex) ||
          (deopt_id != DeoptId::kNone));
 
   // When precompiling, we only use pc descriptors for exceptions,
   // relocations and yield indices.
   if (!FLAG_precompiled_mode || try_index != -1 ||
-      yield_index != PcDescriptorsLayout::kInvalidYieldIndex ||
-      kind == PcDescriptorsLayout::kBSSRelocation) {
+      yield_index != UntaggedPcDescriptors::kInvalidYieldIndex ||
+      kind == UntaggedPcDescriptors::kBSSRelocation) {
     const int32_t kind_and_metadata =
-        PcDescriptorsLayout::KindAndMetadata::Encode(kind, try_index,
-                                                     yield_index);
+        UntaggedPcDescriptors::KindAndMetadata::Encode(kind, try_index,
+                                                       yield_index);
 
     encoded_data_.WriteSLEB128(kind_and_metadata);
     encoded_data_.WriteSLEB128(pc_offset - prev_pc_offset);
@@ -65,17 +65,18 @@
                                 function_.end_token_pos())) {
           FATAL("Token position %s for PC descriptor %s at offset 0x%" Px
                 " invalid for function %s (%s, %s)",
-                token_pos.ToCString(), PcDescriptorsLayout::KindToCString(kind),
-                pc_offset, function_.ToFullyQualifiedCString(),
+                token_pos.ToCString(),
+                UntaggedPcDescriptors::KindToCString(kind), pc_offset,
+                function_.ToFullyQualifiedCString(),
                 function_.token_pos().ToCString(),
                 function_.end_token_pos().ToCString());
         }
         if (!script_.IsNull() && !script_.IsValidTokenPosition(token_pos)) {
           FATAL("Token position %s for PC descriptor %s at offset 0x%" Px
                 " invalid for script %s of function %s",
-                token_pos.ToCString(), PcDescriptorsLayout::KindToCString(kind),
-                pc_offset, script_.ToCString(),
-                function_.ToFullyQualifiedCString());
+                token_pos.ToCString(),
+                UntaggedPcDescriptors::KindToCString(kind), pc_offset,
+                script_.ToCString(), function_.ToFullyQualifiedCString());
         }
       }
       const int32_t encoded_pos = token_pos.Serialize();
@@ -90,7 +91,7 @@
 
 PcDescriptorsPtr DescriptorList::FinalizePcDescriptors(uword entry_point) {
   if (encoded_data_.bytes_written() == 0) {
-    return Object::empty_descriptors().raw();
+    return Object::empty_descriptors().ptr();
   }
   return PcDescriptors::New(encoded_data_.buffer(),
                             encoded_data_.bytes_written());
@@ -114,7 +115,7 @@
 
 CompressedStackMapsPtr CompressedStackMapsBuilder::Finalize() const {
   if (encoded_bytes_.bytes_written() == 0) {
-    return Object::empty_compressed_stackmaps().raw();
+    return Object::empty_compressed_stackmaps().ptr();
   }
   return CompressedStackMaps::NewInlined(encoded_bytes_.buffer(),
                                          encoded_bytes_.bytes_written());
@@ -124,7 +125,7 @@
     uword entry_point) const {
   intptr_t num_handlers = Length();
   if (num_handlers == 0) {
-    return Object::empty_exception_handlers().raw();
+    return Object::empty_exception_handlers().ptr();
   }
   const ExceptionHandlers& handlers =
       ExceptionHandlers::Handle(ExceptionHandlers::New(num_handlers));
@@ -149,7 +150,7 @@
       handlers.SetHandledTypes(i, *list_[i].handler_types);
     }
   }
-  return handlers.raw();
+  return handlers.ptr();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -233,7 +234,7 @@
   for (intptr_t i = 0; i < stream_.bytes_written(); i++) {
     dest[i] = src[i];
   }
-  return td.raw();
+  return td.ptr();
 }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
@@ -437,12 +438,12 @@
   BufferAdvancePC(pc_offset - buffered_pc_offset_);
 }
 
-void CodeSourceMapBuilder::NoteDescriptor(PcDescriptorsLayout::Kind kind,
+void CodeSourceMapBuilder::NoteDescriptor(UntaggedPcDescriptors::Kind kind,
                                           int32_t pc_offset,
                                           const InstructionSource& source) {
   const uint8_t kCanThrow =
-      PcDescriptorsLayout::kIcCall | PcDescriptorsLayout::kUnoptStaticCall |
-      PcDescriptorsLayout::kRuntimeCall | PcDescriptorsLayout::kOther;
+      UntaggedPcDescriptors::kIcCall | UntaggedPcDescriptors::kUnoptStaticCall |
+      UntaggedPcDescriptors::kRuntimeCall | UntaggedPcDescriptors::kOther;
   if ((kind & kCanThrow) != 0) {
     StartInliningInterval(pc_offset, source);
     BufferChangePosition(source.token_pos);
@@ -464,7 +465,7 @@
 intptr_t CodeSourceMapBuilder::GetFunctionId(intptr_t inline_id) {
   const Function& function = *inline_id_to_function_[inline_id];
   for (intptr_t i = 0; i < inlined_functions_.Length(); i++) {
-    if (inlined_functions_.At(i) == function.raw()) {
+    if (inlined_functions_.At(i) == function.ptr()) {
       return i;
     }
   }
@@ -486,7 +487,7 @@
 
 ArrayPtr CodeSourceMapBuilder::InliningIdToFunction() {
   if (inlined_functions_.Length() == 0) {
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   }
   return Array::MakeFixedLength(inlined_functions_);
 }
@@ -499,7 +500,7 @@
   const auto& map = CodeSourceMap::Handle(zone_, CodeSourceMap::New(length));
   NoSafepointScope no_safepoint;
   memmove(map.Data(), stream_.buffer(), length);
-  return map.raw();
+  return map.ptr();
 }
 
 void CodeSourceMapBuilder::BufferChangePosition(TokenPosition pos) {
diff --git a/runtime/vm/code_descriptors.h b/runtime/vm/code_descriptors.h
index 5356c50..0d15c38 100644
--- a/runtime/vm/code_descriptors.h
+++ b/runtime/vm/code_descriptors.h
@@ -24,7 +24,7 @@
 
   ~DescriptorList() {}
 
-  void AddDescriptor(PcDescriptorsLayout::Kind kind,
+  void AddDescriptor(UntaggedPcDescriptors::Kind kind,
                      intptr_t pc_offset,
                      intptr_t deopt_id,
                      TokenPosition token_pos,
@@ -240,7 +240,7 @@
 
   void BeginCodeSourceRange(int32_t pc_offset, const InstructionSource& source);
   void EndCodeSourceRange(int32_t pc_offset, const InstructionSource& source);
-  void NoteDescriptor(PcDescriptorsLayout::Kind kind,
+  void NoteDescriptor(UntaggedPcDescriptors::Kind kind,
                       int32_t pc_offset,
                       const InstructionSource& source);
   void NoteNullCheck(int32_t pc_offset,
diff --git a/runtime/vm/code_descriptors_test.cc b/runtime/vm/code_descriptors_test.cc
index 2fd788b..e096382 100644
--- a/runtime/vm/code_descriptors_test.cc
+++ b/runtime/vm/code_descriptors_test.cc
@@ -99,7 +99,7 @@
       PcDescriptors::Handle(code.pc_descriptors());
   int call_count = 0;
   PcDescriptors::Iterator iter(descriptors,
-                               PcDescriptorsLayout::kUnoptStaticCall);
+                               UntaggedPcDescriptors::kUnoptStaticCall);
   CompressedStackMapsBuilder compressed_maps_builder(thread->zone());
   while (iter.MoveNext()) {
     compressed_maps_builder.AddEntry(iter.PcOffset(), stack_bitmap, 0);
@@ -145,7 +145,7 @@
 
   for (intptr_t i = 0; i < num_token_positions; i++) {
     const TokenPosition& tp = TokenPosition::Deserialize(token_positions[i]);
-    descriptors->AddDescriptor(PcDescriptorsLayout::kRuntimeCall, 0, 0, tp, 0,
+    descriptors->AddDescriptor(UntaggedPcDescriptors::kRuntimeCall, 0, 0, tp, 0,
                                1);
   }
 
@@ -154,7 +154,7 @@
 
   ASSERT(!finalized_descriptors.IsNull());
   PcDescriptors::Iterator it(finalized_descriptors,
-                             PcDescriptorsLayout::kRuntimeCall);
+                             UntaggedPcDescriptors::kRuntimeCall);
 
   intptr_t i = 0;
   while (it.MoveNext()) {
diff --git a/runtime/vm/code_patcher_arm.cc b/runtime/vm/code_patcher_arm.cc
index 38729b1..89de68a 100644
--- a/runtime/vm/code_patcher_arm.cc
+++ b/runtime/vm/code_patcher_arm.cc
@@ -73,7 +73,7 @@
   ICData& ic_data = ICData::Handle();
   ic_data ^= static_call.Data();
   if (ic_data_result != NULL) {
-    *ic_data_result = ic_data.raw();
+    *ic_data_result = ic_data.ptr();
   }
   return ic_data.GetTargetAt(0);
 }
diff --git a/runtime/vm/code_patcher_arm64.cc b/runtime/vm/code_patcher_arm64.cc
index f13fb80..a1d4c4d 100644
--- a/runtime/vm/code_patcher_arm64.cc
+++ b/runtime/vm/code_patcher_arm64.cc
@@ -109,7 +109,7 @@
   ICData& ic_data = ICData::Handle();
   ic_data ^= static_call.Data();
   if (ic_data_result != NULL) {
-    *ic_data_result = ic_data.raw();
+    *ic_data_result = ic_data.ptr();
   }
   return ic_data.GetTargetAt(0);
 }
diff --git a/runtime/vm/code_patcher_arm64_test.cc b/runtime/vm/code_patcher_arm64_test.cc
index 7d3ddc1..54fc1b9 100644
--- a/runtime/vm/code_patcher_arm64_test.cc
+++ b/runtime/vm/code_patcher_arm64_test.cc
@@ -30,7 +30,7 @@
       String::Handle(Symbols::New(thread, "callerFunction"));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   const Function& function = Function::Handle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
 
   const String& target_name = String::Handle(String::New("targetFunction"));
diff --git a/runtime/vm/code_patcher_arm_test.cc b/runtime/vm/code_patcher_arm_test.cc
index 6e30b8e..c9026d5 100644
--- a/runtime/vm/code_patcher_arm_test.cc
+++ b/runtime/vm/code_patcher_arm_test.cc
@@ -30,7 +30,7 @@
       String::Handle(Symbols::New(thread, "callerFunction"));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   const Function& function = Function::Handle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
 
   const String& target_name = String::Handle(String::New("targetFunction"));
diff --git a/runtime/vm/code_patcher_ia32.cc b/runtime/vm/code_patcher_ia32.cc
index 2c123dc..3104115 100644
--- a/runtime/vm/code_patcher_ia32.cc
+++ b/runtime/vm/code_patcher_ia32.cc
@@ -94,14 +94,14 @@
     return LoadUnaligned(reinterpret_cast<ObjectPtr*>(start_ + 1));
   }
   void set_data(const Object& data) const {
-    StoreUnaligned(reinterpret_cast<ObjectPtr*>(start_ + 1), data.raw());
+    StoreUnaligned(reinterpret_cast<ObjectPtr*>(start_ + 1), data.ptr());
   }
 
   CodePtr target() const {
     return LoadUnaligned(reinterpret_cast<CodePtr*>(start_ + 6));
   }
   void set_target(const Code& target) const {
-    StoreUnaligned(reinterpret_cast<CodePtr*>(start_ + 6), target.raw());
+    StoreUnaligned(reinterpret_cast<CodePtr*>(start_ + 6), target.ptr());
   }
 
  private:
@@ -147,7 +147,7 @@
 
   void set_target(const Code& target) const {
     uword* target_addr = reinterpret_cast<uword*>(start_ + 1);
-    uword imm = static_cast<uword>(target.raw());
+    uword imm = static_cast<uword>(target.ptr());
     *target_addr = imm;
     CPU::FlushICache(start_ + 1, sizeof(imm));
   }
@@ -238,7 +238,7 @@
   ICData& ic_data = ICData::Handle();
   ic_data ^= static_call.ic_data();
   if (ic_data_result != NULL) {
-    *ic_data_result = ic_data.raw();
+    *ic_data_result = ic_data.ptr();
   }
   return ic_data.GetTargetAt(0);
 }
diff --git a/runtime/vm/code_patcher_ia32_test.cc b/runtime/vm/code_patcher_ia32_test.cc
index ed7a48b..36c6ec5 100644
--- a/runtime/vm/code_patcher_ia32_test.cc
+++ b/runtime/vm/code_patcher_ia32_test.cc
@@ -30,7 +30,7 @@
       String::Handle(Symbols::New(thread, "callerFunction"));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   const Function& function = Function::Handle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
 
   const String& target_name = String::Handle(String::New("targetFunction"));
diff --git a/runtime/vm/code_patcher_x64.cc b/runtime/vm/code_patcher_x64.cc
index e918bfdd..c916789 100644
--- a/runtime/vm/code_patcher_x64.cc
+++ b/runtime/vm/code_patcher_x64.cc
@@ -78,7 +78,7 @@
   CodePtr target() const {
     Code& code = Code::Handle();
     code ^= object_pool_.ObjectAt(code_index_);
-    return code.raw();
+    return code.ptr();
   }
 
   void set_target(const Code& target) const {
@@ -197,7 +197,7 @@
   CodePtr Target() const {
     Code& code = Code::Handle();
     code ^= object_pool_.ObjectAt(code_index_);
-    return code.raw();
+    return code.ptr();
   }
 
   void SetTarget(const Code& target) const {
@@ -476,7 +476,7 @@
   ICData& ic_data = ICData::Handle();
   ic_data ^= static_call.ic_data();
   if (ic_data_result != NULL) {
-    *ic_data_result = ic_data.raw();
+    *ic_data_result = ic_data.ptr();
   }
   return ic_data.GetTargetAt(0);
 }
diff --git a/runtime/vm/code_patcher_x64_test.cc b/runtime/vm/code_patcher_x64_test.cc
index 2daf04c..e5666e3 100644
--- a/runtime/vm/code_patcher_x64_test.cc
+++ b/runtime/vm/code_patcher_x64_test.cc
@@ -30,7 +30,7 @@
       String::Handle(Symbols::New(thread, "callerFunction"));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   const Function& function = Function::Handle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
 
   const String& target_name = String::Handle(String::New("targetFunction"));
diff --git a/runtime/vm/compilation_trace.cc b/runtime/vm/compilation_trace.cc
index e1bc89d..3aa1561 100644
--- a/runtime/vm/compilation_trace.cc
+++ b/runtime/vm/compilation_trace.cc
@@ -105,7 +105,7 @@
     *newline = 0;
     error_ = CompileTriple(uri, cls_name, func_name);
     if (error_.IsError()) {
-      return error_.raw();
+      return error_.ptr();
     }
     cursor = newline + 1;
   }
@@ -125,10 +125,10 @@
     arguments_descriptor = ArgumentsDescriptor::NewBoxed(kTypeArgsLen, argc);
     dispatcher = closure_class.GetInvocationDispatcher(
         Symbols::Call(), arguments_descriptor,
-        FunctionLayout::kInvokeFieldDispatcher, true /* create_if_absent */);
+        UntaggedFunction::kInvokeFieldDispatcher, true /* create_if_absent */);
     error_ = CompileFunction(dispatcher);
     if (error_.IsError()) {
-      return error_.raw();
+      return error_.ptr();
     }
   }
 
@@ -141,7 +141,7 @@
     if (function2_.HasCode()) {
       result = CompileFunction(function_);
       if (result.IsError()) {
-        error_ = result.raw();
+        error_ = result.ptr();
         return false;  // Stop iteration.
       }
     }
@@ -239,7 +239,7 @@
                   function_name_.ToCString(),
                   Error::Cast(error_).ToErrorCString());
       }
-      return error_.raw();
+      return error_.ptr();
     }
 
     function_ = cls_.LookupFunctionAllowPrivate(function_name_);
@@ -265,7 +265,7 @@
                   class_name_.ToCString(), function_name_.ToCString(),
                   Error::Cast(error_).ToErrorCString());
             }
-            return error_.raw();
+            return error_.ptr();
           }
         }
       }
@@ -281,7 +281,7 @@
   }
 
   if (!field_.IsNull() && field_.is_const() && field_.is_static() &&
-      (field_.StaticValue() == Object::sentinel().raw())) {
+      (field_.StaticValue() == Object::sentinel().ptr())) {
     processed = true;
     error_ = field_.InitializeStatic();
     if (error_.IsError()) {
@@ -292,7 +292,7 @@
             field_.ToCString(), uri_.ToCString(), class_name_.ToCString(),
             function_name_.ToCString(), Error::Cast(error_).ToErrorCString());
       }
-      return error_.raw();
+      return error_.ptr();
     }
   }
 
@@ -306,7 +306,7 @@
                   function_name_.ToCString(),
                   Error::Cast(error_).ToErrorCString());
       }
-      return error_.raw();
+      return error_.ptr();
     }
     if (add_closure) {
       function_ = function_.ImplicitClosureFunction();
@@ -318,7 +318,7 @@
               uri_.ToCString(), class_name_.ToCString(),
               function_name_.ToCString(), Error::Cast(error_).ToErrorCString());
         }
-        return error_.raw();
+        return error_.ptr();
       }
     } else if (is_dyn) {
       function_name_ = function_.name();  // With private mangling.
@@ -334,7 +334,7 @@
               uri_.ToCString(), class_name_.ToCString(),
               function_name_.ToCString(), Error::Cast(error_).ToErrorCString());
         }
-        return error_.raw();
+        return error_.ptr();
       }
     }
   }
@@ -351,7 +351,7 @@
             uri_.ToCString(), class_name_.ToCString(),
             function_name_.ToCString(), Error::Cast(error_).ToErrorCString());
       }
-      return error_.raw();
+      return error_.ptr();
     }
   }
 
@@ -371,12 +371,12 @@
 
   error_ = Compiler::CompileFunction(thread_, function);
   if (error_.IsError()) {
-    return error_.raw();
+    return error_.ptr();
   }
 
   SpeculateInstanceCallTargets(function);
 
-  return error_.raw();
+  return error_.ptr();
 }
 
 // For instance calls, if the receiver's static type has one concrete
@@ -536,7 +536,7 @@
 
   call_sites_ = function.ic_data_array();
   if (call_sites_.IsNull()) {
-    call_sites_ = Object::empty_array().raw();  // Remove edge case.
+    call_sites_ = Object::empty_array().ptr();  // Remove edge case.
   }
 
   // First element is edge counters.
@@ -618,23 +618,23 @@
 
   error_ = CheckHeader();
   if (error_.IsError()) {
-    return error_.raw();
+    return error_.ptr();
   }
 
   error_ = LoadClasses();
   if (error_.IsError()) {
-    return error_.raw();
+    return error_.ptr();
   }
 
   error_ = LoadFields();
   if (error_.IsError()) {
-    return error_.raw();
+    return error_.ptr();
   }
 
   while (stream_->PendingBytes() > 0) {
     error_ = LoadFunction();
     if (error_.IsError()) {
-      return error_.raw();
+      return error_.ptr();
     }
   }
 
@@ -645,7 +645,7 @@
         (func_.usage_counter() >= FLAG_optimization_counter_threshold)) {
       error_ = Compiler::CompileOptimizedFunction(thread_, func_);
       if (error_.IsError()) {
-        return error_.raw();
+        return error_.ptr();
       }
     }
   }
@@ -741,7 +741,7 @@
     if (!skip && (num_fields > 0)) {
       error_ = cls_.EnsureIsFinalized(thread_);
       if (error_.IsError()) {
-        return error_.raw();
+        return error_.ptr();
       }
       fields_ = cls_.fields();
     }
@@ -809,14 +809,14 @@
   if (!cls_.IsNull()) {
     error_ = cls_.EnsureIsFinalized(thread_);
     if (error_.IsError()) {
-      return error_.raw();
+      return error_.ptr();
     }
   } else {
     skip = true;
   }
 
   func_name_ = ReadString();  // Without private mangling.
-  FunctionLayout::Kind kind = static_cast<FunctionLayout::Kind>(ReadInt());
+  UntaggedFunction::Kind kind = static_cast<UntaggedFunction::Kind>(ReadInt());
   const TokenPosition& token_pos = TokenPosition::Deserialize(ReadInt());
   intptr_t usage = ReadInt();
   intptr_t inlining_depth = ReadInt();
@@ -836,11 +836,11 @@
   if (!skip) {
     error_ = Compiler::CompileFunction(thread_, func_);
     if (error_.IsError()) {
-      return error_.raw();
+      return error_.ptr();
     }
     call_sites_ = func_.ic_data_array();
     if (call_sites_.IsNull()) {
-      call_sites_ = Object::empty_array().raw();  // Remove edge case.
+      call_sites_ = Object::empty_array().ptr();  // Remove edge case.
     }
     if (call_sites_.Length() != num_call_sites + 1) {
       skip = true;
@@ -928,7 +928,7 @@
   return Error::null();
 }
 
-FunctionPtr TypeFeedbackLoader::FindFunction(FunctionLayout::Kind kind,
+FunctionPtr TypeFeedbackLoader::FindFunction(UntaggedFunction::Kind kind,
                                              const TokenPosition& token_pos) {
   if (cls_name_.Equals(Symbols::TopLevel())) {
     func_ = lib_.LookupFunctionAllowPrivate(func_name_);
@@ -938,7 +938,7 @@
 
   if (!func_.IsNull()) {
     // Found regular method.
-  } else if (kind == FunctionLayout::kMethodExtractor) {
+  } else if (kind == UntaggedFunction::kMethodExtractor) {
     ASSERT(Field::IsGetterName(func_name_));
     // Without private mangling:
     String& name = String::Handle(zone_, Field::NameFromGetter(func_name_));
@@ -950,7 +950,7 @@
     } else {
       func_ = Function::null();
     }
-  } else if (kind == FunctionLayout::kDynamicInvocationForwarder) {
+  } else if (kind == UntaggedFunction::kDynamicInvocationForwarder) {
     // Without private mangling:
     String& name = String::Handle(
         zone_, Function::DemangleDynamicInvocationForwarderName(func_name_));
@@ -962,7 +962,7 @@
     } else {
       func_ = Function::null();
     }
-  } else if (kind == FunctionLayout::kClosureFunction) {
+  } else if (kind == UntaggedFunction::kClosureFunction) {
     // Note this lookup relies on parent functions appearing before child
     // functions in the serialized feedback, so the parent will have already
     // been unoptimized compilated and the child function created and added to
@@ -976,7 +976,7 @@
   }
 
   if (!func_.IsNull()) {
-    if (kind == FunctionLayout::kImplicitClosureFunction) {
+    if (kind == UntaggedFunction::kImplicitClosureFunction) {
       func_ = func_.ImplicitClosureFunction();
     }
     if (func_.is_abstract() || (func_.kind() != kind)) {
@@ -984,7 +984,7 @@
     }
   }
 
-  return func_.raw();
+  return func_.ptr();
 }
 
 ClassPtr TypeFeedbackLoader::ReadClassByName() {
@@ -1010,7 +1010,7 @@
       }
     }
   }
-  return cls_.raw();
+  return cls_.ptr();
 }
 
 StringPtr TypeFeedbackLoader::ReadString() {
diff --git a/runtime/vm/compilation_trace.h b/runtime/vm/compilation_trace.h
index 459ca27..aa4a6b7 100644
--- a/runtime/vm/compilation_trace.h
+++ b/runtime/vm/compilation_trace.h
@@ -102,7 +102,7 @@
   ObjectPtr LoadClasses();
   ObjectPtr LoadFields();
   ObjectPtr LoadFunction();
-  FunctionPtr FindFunction(FunctionLayout::Kind kind,
+  FunctionPtr FindFunction(UntaggedFunction::Kind kind,
                            const TokenPosition& token_pos);
 
   ClassPtr ReadClassByName();
diff --git a/runtime/vm/compiler/aot/aot_call_specializer.cc b/runtime/vm/compiler/aot/aot_call_specializer.cc
index 33bb8f0..4ea2220 100644
--- a/runtime/vm/compiler/aot/aot_call_specializer.cc
+++ b/runtime/vm/compiler/aot/aot_call_specializer.cc
@@ -56,7 +56,7 @@
       isolate_group->object_store()->unique_dynamic_targets());
   ASSERT(fname.IsSymbol());
   *function = functions_map.GetOrNull(fname);
-  ASSERT(functions_map.Release().raw() ==
+  ASSERT(functions_map.Release().ptr() ==
          isolate_group->object_store()->unique_dynamic_targets());
 }
 
@@ -155,7 +155,7 @@
     return false;
   }
 
-  if (call->function_name().raw() != Symbols::GetRuntimeType().raw()) {
+  if (call->function_name().ptr() != Symbols::GetRuntimeType().ptr()) {
     return false;
   }
 
@@ -165,7 +165,7 @@
   const Function& function =
       Function::Handle(Z, call->ResolveForReceiverClass(cls));
   ASSERT(!function.IsNull());
-  const Function& target = Function::ZoneHandle(Z, function.raw());
+  const Function& target = Function::ZoneHandle(Z, function.ptr());
   StaticCallInstr* static_call =
       StaticCallInstr::FromCall(Z, call, target, call->CallCount());
   // Since the result is either a Type or a FunctionType, we cannot pin it.
@@ -812,7 +812,7 @@
 
   if (has_one_target) {
     const Function& target = targets.FirstTarget();
-    FunctionLayout::Kind function_kind = target.kind();
+    UntaggedFunction::Kind function_kind = target.kind();
     if (flow_graph()->CheckForInstanceCall(instr, function_kind) ==
         FlowGraph::ToCheck::kNoCheck) {
       StaticCallInstr* call = StaticCallInstr::FromCall(
@@ -877,7 +877,7 @@
     const Function& function =
         Function::Handle(Z, instr->ResolveForReceiverClass(receiver_class));
     if (!function.IsNull()) {
-      const Function& target = Function::ZoneHandle(Z, function.raw());
+      const Function& target = Function::ZoneHandle(Z, function.ptr());
       StaticCallInstr* call =
           StaticCallInstr::FromCall(Z, instr, target, instr->CallCount());
       instr->ReplaceWith(call, current_iterator());
@@ -967,9 +967,9 @@
           // First we are trying to compute a single target for all subclasses.
           if (single_target.IsNull()) {
             ASSERT(i == 0);
-            single_target = target.raw();
+            single_target = target.ptr();
             continue;
-          } else if (single_target.raw() == target.raw()) {
+          } else if (single_target.ptr() == target.ptr()) {
             continue;
           }
 
@@ -992,12 +992,12 @@
           single_target = Function::null();
         }
 
-        ASSERT(ic_data.raw() != ICData::null());
-        ASSERT(single_target.raw() == Function::null());
+        ASSERT(ic_data.ptr() != ICData::null());
+        ASSERT(single_target.ptr() == Function::null());
         ic_data.AddReceiverCheck(cid, target);
       }
 
-      if (single_target.raw() != Function::null()) {
+      if (single_target.ptr() != Function::null()) {
         // If this is a getter or setter invocation try inlining it right away
         // instead of replacing it with a static call.
         if ((op_kind == Token::kGET) || (op_kind == Token::kSET)) {
@@ -1017,12 +1017,12 @@
 
         // We have computed that there is only a single target for this call
         // within the whole hierarchy. Replace InstanceCall with StaticCall.
-        const Function& target = Function::ZoneHandle(Z, single_target.raw());
+        const Function& target = Function::ZoneHandle(Z, single_target.ptr());
         StaticCallInstr* call =
             StaticCallInstr::FromCall(Z, instr, target, instr->CallCount());
         instr->ReplaceWith(call, current_iterator());
         return;
-      } else if ((ic_data.raw() != ICData::null()) &&
+      } else if ((ic_data.ptr() != ICData::null()) &&
                  !ic_data.NumberOfChecksIs(0)) {
         const CallTargets* targets = CallTargets::Create(Z, ic_data);
         ASSERT(!targets->is_empty());
@@ -1071,7 +1071,7 @@
 
   // Ignore callsites like f.call() for now. Those need to be handled
   // specially if f is a closure.
-  if (call->function_name().raw() == Symbols::Call().raw()) {
+  if (call->function_name().ptr() == Symbols::Call().ptr()) {
     return false;
   }
 
@@ -1089,7 +1089,7 @@
   ArgumentsDescriptor args_desc(args_desc_array);
   target = Resolver::ResolveDynamicForReceiverClass(
       receiver_class, getter_name, args_desc, /*allow_add=*/false);
-  if (target.raw() == Function::null() || target.IsMethodExtractor()) {
+  if (target.ptr() == Function::null() || target.IsMethodExtractor()) {
     return false;
   }
 
diff --git a/runtime/vm/compiler/aot/dispatch_table_generator.cc b/runtime/vm/compiler/aot/dispatch_table_generator.cc
index 415b1ef..2198ae7 100644
--- a/runtime/vm/compiler/aot/dispatch_table_generator.cc
+++ b/runtime/vm/compiler/aot/dispatch_table_generator.cc
@@ -444,7 +444,7 @@
   for (classid_t cid = kIllegalCid + 1; cid < num_classes_; cid++) {
     obj = classes_->At(cid);
     if (obj.IsClass()) {
-      klass = Class::RawCast(obj.raw());
+      klass = Class::RawCast(obj.ptr());
       functions = klass.current_functions();
       if (!functions.IsNull()) {
         for (intptr_t j = 0; j < functions.Length(); j++) {
@@ -489,7 +489,7 @@
     if (cid > kIllegalCid) {
       obj = classes_->At(cid);
       if (obj.IsClass()) {
-        klass = Class::RawCast(obj.raw());
+        klass = Class::RawCast(obj.ptr());
         concrete = !klass.is_abstract();
         klass = klass.SuperClass();
         if (!klass.IsNull()) {
@@ -558,7 +558,7 @@
   for (classid_t cid = kIllegalCid + 1; cid < num_classes_; cid++) {
     obj = classes_->At(cid);
     if (obj.IsClass()) {
-      klass = Class::RawCast(obj.raw());
+      klass = Class::RawCast(obj.ptr());
       GrowableArray<Interval>& subclasss_cid_ranges = cid_subclass_ranges[cid];
 
       functions = klass.current_functions();
@@ -572,7 +572,7 @@
             if (sid != SelectorMap::kInvalidSelectorId) {
               auto MakeIntervals = [&](const Function& function, int32_t sid) {
                 // A function handle that survives until the table is built.
-                auto& function_handle = Function::ZoneHandle(Z, function.raw());
+                auto& function_handle = Function::ZoneHandle(Z, function.ptr());
 
                 for (intptr_t i = 0; i < subclasss_cid_ranges.length(); i++) {
                   Interval& subclass_cid_range = subclasss_cid_ranges[i];
@@ -667,7 +667,7 @@
     table_rows_[i]->FillTable(classes_, entries);
   }
   entries.MakeImmutable();
-  return entries.raw();
+  return entries.ptr();
 }
 
 }  // namespace compiler
diff --git a/runtime/vm/compiler/aot/precompiler.cc b/runtime/vm/compiler/aot/precompiler.cc
index 6d0771d..dfa42c7 100644
--- a/runtime/vm/compiler/aot/precompiler.cc
+++ b/runtime/vm/compiler/aot/precompiler.cc
@@ -590,7 +590,7 @@
         if (!IsSent(field_name)) continue;
         // Create arguments descriptor with fixed parameters from
         // signature of field_type.
-        signature ^= field_type.raw();
+        signature ^= field_type.ptr();
         if (signature.IsGeneric()) continue;
         if (signature.HasOptionalParameters()) continue;
         if (FLAG_trace_precompiler) {
@@ -610,7 +610,8 @@
             if (subcls.is_allocated()) {
               // Add dispatcher to cls.
               dispatcher = subcls.GetInvocationDispatcher(
-                  field_name, args_desc, FunctionLayout::kInvokeFieldDispatcher,
+                  field_name, args_desc,
+                  UntaggedFunction::kInvokeFieldDispatcher,
                   /* create_if_absent = */ true);
               if (FLAG_trace_precompiler) {
                 THR_Print("Added invoke-field-dispatcher for %s to %s\n",
@@ -631,7 +632,7 @@
                                  : 0;
   RELEASE_ASSERT(!function.HasCode());
   // Ffi trampoline functions have no signature.
-  ASSERT(function.kind() == FunctionLayout::kFfiTrampoline ||
+  ASSERT(function.kind() == UntaggedFunction::kFfiTrampoline ||
          FunctionType::Handle(Z, function.signature()).IsFinalized());
 
   TracingScope tracing_scope(this);
@@ -719,8 +720,8 @@
 }
 
 static bool IsPotentialClosureCall(const String& selector) {
-  return selector.raw() == Symbols::Call().raw() ||
-         selector.raw() == Symbols::DynamicCall().raw();
+  return selector.ptr() == Symbols::Call().ptr() ||
+         selector.ptr() == Symbols::DynamicCall().ptr();
 }
 
 void Precompiler::AddCalleesOfHelper(const Object& entry,
@@ -770,7 +771,7 @@
 void Precompiler::AddTypesOf(const Class& cls) {
   if (cls.IsNull()) return;
   if (classes_to_retain_.HasKey(&cls)) return;
-  classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.raw()));
+  classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.ptr()));
 
   Array& interfaces = Array::Handle(Z, cls.interfaces());
   AbstractType& type = AbstractType::Handle(Z);
@@ -833,7 +834,7 @@
   if (abstype.IsTypeParameter()) {
     const auto& param = TypeParameter::Cast(abstype);
     if (typeparams_to_retain_.HasKey(&param)) return;
-    typeparams_to_retain_.Insert(&TypeParameter::ZoneHandle(Z, param.raw()));
+    typeparams_to_retain_.Insert(&TypeParameter::ZoneHandle(Z, param.ptr()));
 
     auto& type = AbstractType::Handle(Z, param.bound());
     AddType(type);
@@ -845,7 +846,7 @@
   if (abstype.IsFunctionType()) {
     if (functiontypes_to_retain_.HasKey(&FunctionType::Cast(abstype))) return;
     const FunctionType& signature =
-        FunctionType::ZoneHandle(Z, FunctionType::Cast(abstype).raw());
+        FunctionType::ZoneHandle(Z, FunctionType::Cast(abstype).ptr());
     functiontypes_to_retain_.Insert(&signature);
 
     AddTypeArguments(TypeArguments::Handle(Z, signature.type_parameters()));
@@ -861,7 +862,7 @@
   }
 
   if (types_to_retain_.HasKey(&abstype)) return;
-  types_to_retain_.Insert(&AbstractType::ZoneHandle(Z, abstype.raw()));
+  types_to_retain_.Insert(&AbstractType::ZoneHandle(Z, abstype.ptr()));
 
   if (abstype.IsType()) {
     const Type& type = Type::Cast(abstype);
@@ -880,7 +881,7 @@
   if (args.IsNull()) return;
 
   if (typeargs_to_retain_.HasKey(&args)) return;
-  typeargs_to_retain_.Insert(&TypeArguments::ZoneHandle(Z, args.raw()));
+  typeargs_to_retain_.Insert(&TypeArguments::ZoneHandle(Z, args.ptr()));
 
   AbstractType& arg = AbstractType::Handle(Z);
   for (intptr_t i = 0; i < args.Length(); i++) {
@@ -899,8 +900,8 @@
     return;
   }
 
-  if (instance.raw() == Object::sentinel().raw() ||
-      instance.raw() == Object::transition_sentinel().raw()) {
+  if (instance.ptr() == Object::sentinel().ptr() ||
+      instance.ptr() == Object::transition_sentinel().ptr()) {
     return;
   }
 
@@ -928,7 +929,7 @@
     const Library& target = Library::Handle(Z, prefix.GetLibrary(0));
     cls = target.toplevel_class();
     if (!classes_to_retain_.HasKey(&cls)) {
-      classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.raw()));
+      classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.ptr()));
     }
     return;
   }
@@ -943,7 +944,7 @@
   // Constants are canonicalized and we avoid repeated processing of them.
   if (consts_to_retain_.HasKey(&instance)) return;
 
-  consts_to_retain_.Insert(&Instance::ZoneHandle(Z, instance.raw()));
+  consts_to_retain_.Insert(&Instance::ZoneHandle(Z, instance.ptr()));
 
   if (cls.NumTypeArguments() > 0) {
     AddTypeArguments(TypeArguments::Handle(Z, instance.GetTypeArguments()));
@@ -972,7 +973,7 @@
   };
 
   ConstObjectVisitor visitor(this, IG);
-  instance.raw()->ptr()->VisitPointers(&visitor);
+  instance.ptr()->untag()->VisitPointers(&visitor);
 }
 
 void Precompiler::AddClosureCall(const String& call_selector,
@@ -982,7 +983,7 @@
   const Function& dispatcher =
       Function::Handle(Z, cache_class.GetInvocationDispatcher(
                               call_selector, arguments_descriptor,
-                              FunctionLayout::kInvokeFieldDispatcher,
+                              UntaggedFunction::kInvokeFieldDispatcher,
                               true /* create_if_absent */));
   AddFunction(dispatcher);
 }
@@ -994,15 +995,15 @@
 
   if (fields_to_retain_.HasKey(&field)) return;
 
-  fields_to_retain_.Insert(&Field::ZoneHandle(Z, field.raw()));
+  fields_to_retain_.Insert(&Field::ZoneHandle(Z, field.ptr()));
 
   if (field.is_static()) {
     const Object& value = Object::Handle(Z, field.StaticValue());
     // Should not be in the middle of initialization while precompiling.
-    ASSERT(value.raw() != Object::transition_sentinel().raw());
+    ASSERT(value.ptr() != Object::transition_sentinel().ptr());
 
-    if (value.raw() != Object::sentinel().raw() &&
-        value.raw() != Object::null()) {
+    if (value.ptr() != Object::sentinel().ptr() &&
+        value.ptr() != Object::null()) {
       ASSERT(value.IsInstance());
       AddConstObject(Instance::Cast(value));
     }
@@ -1030,15 +1031,15 @@
 
   // Resolver::ResolveDynamic uses.
   const auto& selector = String::Handle(Z, function.name());
-  if (selector.raw() == Symbols::toString().raw()) return true;
-  if (selector.raw() == Symbols::AssignIndexToken().raw()) return true;
-  if (selector.raw() == Symbols::IndexToken().raw()) return true;
-  if (selector.raw() == Symbols::hashCode().raw()) return true;
-  if (selector.raw() == Symbols::NoSuchMethod().raw()) return true;
-  if (selector.raw() == Symbols::EqualOperator().raw()) return true;
+  if (selector.ptr() == Symbols::toString().ptr()) return true;
+  if (selector.ptr() == Symbols::AssignIndexToken().ptr()) return true;
+  if (selector.ptr() == Symbols::IndexToken().ptr()) return true;
+  if (selector.ptr() == Symbols::hashCode().ptr()) return true;
+  if (selector.ptr() == Symbols::NoSuchMethod().ptr()) return true;
+  if (selector.ptr() == Symbols::EqualOperator().ptr()) return true;
 
   // Use the same check for _Closure.call as in stack_trace.{h|cc}.
-  if (selector.raw() == Symbols::Call().raw()) {
+  if (selector.ptr() == Symbols::Call().ptr()) {
     const auto& name = String::Handle(Z, function.QualifiedScrubbedName());
     if (name.Equals(Symbols::_ClosureCall())) return true;
   }
@@ -1084,7 +1085,7 @@
 
   ASSERT(!selector.IsNull());
   if (!IsSent(selector)) {
-    sent_selectors_.Insert(&String::ZoneHandle(Z, selector.raw()));
+    sent_selectors_.Insert(&String::ZoneHandle(Z, selector.ptr()));
     selector_count_++;
     changed_ = true;
 
@@ -1224,7 +1225,7 @@
 
           if ((type == EntryPointPragma::kAlways ||
                type == EntryPointPragma::kGetterOnly) &&
-              function.kind() != FunctionLayout::kConstructor &&
+              function.kind() != UntaggedFunction::kConstructor &&
               !function.IsSetterFunction()) {
             function2 = function.ImplicitClosureFunction();
             AddFunction(function2);
@@ -1234,29 +1235,29 @@
             AddInstantiatedClass(cls);
           }
         }
-        if (function.kind() == FunctionLayout::kImplicitGetter &&
+        if (function.kind() == UntaggedFunction::kImplicitGetter &&
             !implicit_getters.IsNull()) {
           for (intptr_t i = 0; i < implicit_getters.Length(); ++i) {
             field ^= implicit_getters.At(i);
-            if (function.accessor_field() == field.raw()) {
+            if (function.accessor_field() == field.ptr()) {
               AddFunction(function);
             }
           }
         }
-        if (function.kind() == FunctionLayout::kImplicitSetter &&
+        if (function.kind() == UntaggedFunction::kImplicitSetter &&
             !implicit_setters.IsNull()) {
           for (intptr_t i = 0; i < implicit_setters.Length(); ++i) {
             field ^= implicit_setters.At(i);
-            if (function.accessor_field() == field.raw()) {
+            if (function.accessor_field() == field.ptr()) {
               AddFunction(function);
             }
           }
         }
-        if (function.kind() == FunctionLayout::kImplicitStaticGetter &&
+        if (function.kind() == UntaggedFunction::kImplicitStaticGetter &&
             !implicit_static_getters.IsNull()) {
           for (intptr_t i = 0; i < implicit_static_getters.Length(); ++i) {
             field ^= implicit_static_getters.At(i);
-            if (function.accessor_field() == field.raw()) {
+            if (function.accessor_field() == field.ptr()) {
               AddFunction(function);
             }
           }
@@ -1312,7 +1313,7 @@
 
         // Handle the implicit call type conversions.
         if (Field::IsGetterName(selector) &&
-            (function.kind() != FunctionLayout::kMethodExtractor)) {
+            (function.kind() != UntaggedFunction::kMethodExtractor)) {
           // Call-through-getter.
           // Function is get:foo and somewhere foo (or dyn:foo) is called.
           // Note that we need to skip method extractors (which were potentially
@@ -1330,7 +1331,7 @@
             function2 = function.GetDynamicInvocationForwarder(selector2);
             AddFunction(function2);
           }
-        } else if (function.kind() == FunctionLayout::kRegularFunction) {
+        } else if (function.kind() == UntaggedFunction::kRegularFunction) {
           selector2 = Field::LookupGetterSymbol(selector);
           selector3 = String::null();
           if (!selector2.IsNull()) {
@@ -1355,18 +1356,18 @@
         }
 
         const bool is_getter =
-            function.kind() == FunctionLayout::kImplicitGetter ||
-            function.kind() == FunctionLayout::kGetterFunction;
+            function.kind() == UntaggedFunction::kImplicitGetter ||
+            function.kind() == UntaggedFunction::kGetterFunction;
         const bool is_setter =
-            function.kind() == FunctionLayout::kImplicitSetter ||
-            function.kind() == FunctionLayout::kSetterFunction;
+            function.kind() == UntaggedFunction::kImplicitSetter ||
+            function.kind() == UntaggedFunction::kSetterFunction;
         const bool is_regular =
-            function.kind() == FunctionLayout::kRegularFunction;
+            function.kind() == UntaggedFunction::kRegularFunction;
         if (is_getter || is_setter || is_regular) {
           selector2 = Function::CreateDynamicInvocationForwarderName(selector);
           if (IsSent(selector2)) {
-            if (function.kind() == FunctionLayout::kImplicitGetter ||
-                function.kind() == FunctionLayout::kImplicitSetter) {
+            if (function.kind() == UntaggedFunction::kImplicitGetter ||
+                function.kind() == UntaggedFunction::kImplicitSetter) {
               field = function.accessor_field();
               metadata = kernel::ProcedureAttributesOf(field, Z);
             } else if (!found_metadata) {
@@ -1401,7 +1402,7 @@
            String::Cast(a).Equals(String::Cast(b));
   }
   static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
-  static ObjectPtr NewKey(const String& str) { return str.raw(); }
+  static ObjectPtr NewKey(const String& str) { return str.ptr(); }
 };
 
 typedef UnorderedHashMap<NameFunctionsTraits> Table;
@@ -1425,7 +1426,7 @@
                                      Function* dyn_function) {
   AddNameToFunctionsTable(zone, table, fname, function);
 
-  *dyn_function = function.raw();
+  *dyn_function = function.ptr();
   if (kernel::NeedsDynamicInvocationForwarder(function)) {
     *mangled_name = function.name();
     *mangled_name =
@@ -1513,12 +1514,12 @@
       // create lazily.
       // => We disable unique target optimization if the target belongs to the
       //    lazily created functions.
-      key_demangled = key.raw();
+      key_demangled = key.ptr();
       if (Function::IsDynamicInvocationForwarderName(key)) {
         key_demangled = Function::DemangleDynamicInvocationForwarderName(key);
       }
-      if (function.name() != key.raw() &&
-          function.name() != key_demangled.raw()) {
+      if (function.name() != key.ptr() &&
+          function.name() != key_demangled.ptr()) {
         continue;
       }
       functions_map.UpdateOrInsert(key, function);
@@ -1698,7 +1699,7 @@
           const uword pc = pc_offset + code.PayloadStart();
           CodePatcher::PatchStaticCallAt(pc, code, target_code_);
           if (append_to_pool) {
-            builder.AddObject(Object::ZoneHandle(target_code_.raw()));
+            builder.AddObject(Object::ZoneHandle(target_code_.ptr()));
           }
         }
         if (FLAG_trace_precompiler) {
@@ -1805,7 +1806,7 @@
         retained_functions.Add(Object::null_object());
         functions = Array::MakeFixedLength(retained_functions);
       } else {
-        functions = Object::empty_array().raw();
+        functions = Object::empty_array().ptr();
       }
       cls.set_invocation_dispatcher_cache(functions);
     }
@@ -1969,7 +1970,7 @@
   const intptr_t dict_size =
       Utils::RoundUpToPowerOfTwo(retained_types.Length() * 4 / 3);
   types_array = HashTables::New<CanonicalTypeSet>(dict_size, Heap::kOld);
-  CanonicalTypeSet types_table(Z, types_array.raw());
+  CanonicalTypeSet types_table(Z, types_array.ptr());
   bool present;
   for (intptr_t i = 0; i < retained_types.Length(); i++) {
     type ^= retained_types.At(i);
@@ -2008,7 +2009,7 @@
       Utils::RoundUpToPowerOfTwo(retained_types.Length() * 4 / 3);
   types_array =
       HashTables::New<CanonicalFunctionTypeSet>(dict_size, Heap::kOld);
-  CanonicalFunctionTypeSet types_table(Z, types_array.raw());
+  CanonicalFunctionTypeSet types_table(Z, types_array.ptr());
   bool present;
   for (intptr_t i = 0; i < retained_types.Length(); i++) {
     type ^= retained_types.At(i);
@@ -2050,7 +2051,7 @@
       Utils::RoundUpToPowerOfTwo(retained_typeparams.Length() * 4 / 3);
   typeparams_array =
       HashTables::New<CanonicalTypeParameterSet>(dict_size, Heap::kOld);
-  CanonicalTypeParameterSet typeparams_table(Z, typeparams_array.raw());
+  CanonicalTypeParameterSet typeparams_table(Z, typeparams_array.ptr());
   bool present;
   for (intptr_t i = 0; i < retained_typeparams.Length(); i++) {
     typeparam ^= retained_typeparams.At(i);
@@ -2089,7 +2090,7 @@
       Utils::RoundUpToPowerOfTwo(retained_typeargs.Length() * 4 / 3);
   typeargs_array =
       HashTables::New<CanonicalTypeArgumentsSet>(dict_size, Heap::kOld);
-  CanonicalTypeArgumentsSet typeargs_table(Z, typeargs_array.raw());
+  CanonicalTypeArgumentsSet typeargs_table(Z, typeargs_array.ptr());
   bool present;
   for (intptr_t i = 0; i < retained_typeargs.Length(); i++) {
     typeargs ^= retained_typeargs.At(i);
@@ -2259,7 +2260,7 @@
 
     lib.RehashDictionary(dict, used * 4 / 3 + 1);
     if (!(retain_root_library_caches_ &&
-          (lib.raw() == IG->object_store()->root_library()))) {
+          (lib.ptr() == IG->object_store()->root_library()))) {
       lib.DropDependenciesAndCaches();
     }
   }
@@ -2335,7 +2336,7 @@
     } else if (lib.is_dart_scheme()) {
       // The core libraries are referenced from the object store.
       retain = true;
-    } else if (lib.raw() == root_lib.raw()) {
+    } else if (lib.ptr() == root_lib.ptr()) {
       // The root library might have no surviving members if it only exports
       // main from another library. It will still be referenced from the object
       // store, so retain it.
@@ -2367,7 +2368,7 @@
   }
 
   Library::RegisterLibraries(T, retained_libraries);
-  libraries_ = retained_libraries.raw();
+  libraries_ = retained_libraries.ptr();
 }
 
 // Traits for the HashTable template.
@@ -2375,7 +2376,7 @@
   static uint32_t Hash(const Object& key) { return Code::Cast(key).Size(); }
   static const char* Name() { return "CodeKeyTraits"; }
   static bool IsMatch(const Object& x, const Object& y) {
-    return x.raw() == y.raw();
+    return x.ptr() == y.ptr();
   }
   static bool ReportStats() { return false; }
 };
@@ -2409,7 +2410,7 @@
     function ^= functions_to_retain_.GetKey(it.Current());
     if (!function.HasCode()) continue;
     code = function.CurrentCode();
-    if (!visited.ContainsKey(code)) return function.raw();
+    if (!visited.ContainsKey(code)) return function.ptr();
   }
   return Function::null();
 }
@@ -2739,13 +2740,13 @@
       // We bailed out or we encountered an error.
       const Error& error = Error::Handle(thread()->StealStickyError());
 
-      if (error.raw() == Object::branch_offset_error().raw()) {
+      if (error.ptr() == Object::branch_offset_error().ptr()) {
         // Compilation failed due to an out of range branch offset in the
         // assembler. We try again (done = false) with far branches enabled.
         done = false;
         ASSERT(!use_far_branches);
         use_far_branches = true;
-      } else if (error.raw() == Object::speculative_inlining_error().raw()) {
+      } else if (error.ptr() == Object::speculative_inlining_error().ptr()) {
         // The return value of setjmp is the deopt id of the check instruction
         // that caused the bailout.
         done = false;
@@ -2801,7 +2802,7 @@
     per_compile_timer.Start();
 
     ParsedFunction* parsed_function = new (zone)
-        ParsedFunction(thread, Function::ZoneHandle(zone, function.raw()));
+        ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
     if (trace_compiler) {
       THR_Print("Precompiling %sfunction: '%s' @ token %" Pd ", size %" Pd "\n",
                 (optimized ? "optimized " : ""),
@@ -2821,7 +2822,7 @@
       const Error& error = Error::Handle(thread->StealStickyError());
       ASSERT(error.IsLanguageError() &&
              LanguageError::Cast(error).kind() != Report::kBailout);
-      return error.raw();
+      return error.ptr();
     }
 
     per_compile_timer.Stop();
@@ -2851,7 +2852,7 @@
     // Precompilation may encounter compile-time errors.
     // Do not attempt to optimize functions that can cause errors.
     function.set_is_optimizable(false);
-    return error.raw();
+    return error.ptr();
   }
   UNREACHABLE();
   return Error::null();
@@ -3012,7 +3013,7 @@
     renamed_ = BuildRename(name, atomic);
     renames_.UpdateOrInsert(name, renamed_);
   }
-  return renamed_.raw();
+  return renamed_.ptr();
 }
 
 static const char* const kGetterPrefix = "get:";
@@ -3088,8 +3089,8 @@
                                      should_be_private ? "_" : "", name_);
     // Must check if our generated name clashes with something that will
     // have an identity renaming.
-  } while (renames_.GetOrNull(renamed_) == renamed_.raw());
-  return renamed_.raw();
+  } while (renames_.GetOrNull(renamed_) == renamed_.ptr());
+  return renamed_.ptr();
 }
 
 StringPtr Obfuscator::ObfuscationState::BuildRename(const String& name,
@@ -3143,7 +3144,7 @@
     } else if (is_setter) {
       return Symbols::FromSet(thread_, string_);
     }
-    return string_.raw();
+    return string_.ptr();
   } else {
     return NewAtomicRename(is_private);
   }
@@ -3161,19 +3162,19 @@
   const Array& renames = Array::Handle(
       thread->zone(), GetRenamesFromSavedState(obfuscation_state));
 
-  ObfuscationMap renames_map(renames.raw());
+  ObfuscationMap renames_map(renames.ptr());
   String& piece = String::Handle();
   for (intptr_t i = 0; i < pieces.Length(); i++) {
     piece ^= pieces.At(i);
     ASSERT(piece.IsSymbol());
 
     // Fast path: skip '.'
-    if (piece.raw() == Symbols::Dot().raw()) {
+    if (piece.ptr() == Symbols::Dot().ptr()) {
       continue;
     }
 
     // Fast path: check if piece has an identity obfuscation.
-    if (renames_map.GetOrNull(piece) == piece.raw()) {
+    if (renames_map.GetOrNull(piece) == piece.ptr()) {
       continue;
     }
 
@@ -3183,7 +3184,7 @@
     ObfuscationMap::Iterator it(&renames_map);
     while (it.MoveNext()) {
       const intptr_t entry = it.Current();
-      if (renames_map.GetPayload(entry, 0) == piece.raw()) {
+      if (renames_map.GetPayload(entry, 0) == piece.ptr()) {
         piece ^= renames_map.GetKey(entry);
         pieces.SetAt(i, piece);
         break;
@@ -3211,7 +3212,7 @@
 
   const Array& renames = Array::Handle(
       thread->zone(), GetRenamesFromSavedState(obfuscation_state));
-  ObfuscationMap renames_map(renames.raw());
+  ObfuscationMap renames_map(renames.ptr());
 
   const char** result = new const char*[renames_map.NumOccupied() * 2 + 1];
   intptr_t idx = 0;
diff --git a/runtime/vm/compiler/aot/precompiler.h b/runtime/vm/compiler/aot/precompiler.h
index 4c722f9..42406c4 100644
--- a/runtime/vm/compiler/aot/precompiler.h
+++ b/runtime/vm/compiler/aot/precompiler.h
@@ -62,7 +62,7 @@
   static inline intptr_t Hashcode(Key key) { return key->Hash(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -73,7 +73,7 @@
   static uint32_t Hash(const Object& key) { return Function::Cast(key).Hash(); }
   static const char* Name() { return "FunctionKeyTraits"; }
   static bool IsMatch(const Object& x, const Object& y) {
-    return x.raw() == y.raw();
+    return x.ptr() == y.ptr();
   }
   static bool ReportStats() { return false; }
 };
@@ -100,7 +100,7 @@
   }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -120,7 +120,7 @@
   static inline intptr_t Hashcode(Key key) { return key->token_pos().Hash(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -140,7 +140,7 @@
   static inline intptr_t Hashcode(Key key) { return key->Hash(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -160,7 +160,7 @@
   static inline intptr_t Hashcode(Key key) { return key->Hash(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -180,7 +180,7 @@
   static inline intptr_t Hashcode(Key key) { return key->Hash(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -200,7 +200,7 @@
   static inline intptr_t Hashcode(Key key) { return key->Hash(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -220,7 +220,7 @@
   static inline intptr_t Hashcode(Key key) { return key->GetClassId(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair->raw() == key->raw();
+    return pair->ptr() == key->ptr();
   }
 };
 
@@ -405,7 +405,7 @@
   static bool ReportStats() { return false; }
 
   static bool IsMatch(const Object& a, const Object& b) {
-    return String::Cast(a).raw() == String::Cast(b).raw();
+    return String::Cast(a).ptr() == String::Cast(b).ptr();
   }
   static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
 };
@@ -421,7 +421,7 @@
 
   // Only for non-descriptor lookup and table expansion.
   static bool IsMatch(const Object& a, const Object& b) {
-    return a.raw() == b.raw();
+    return a.ptr() == b.ptr();
   }
 
   static uword Hash(const Object& key) { return String::Cast(key).Hash(); }
@@ -467,7 +467,7 @@
   // input and it always preserves leading '_' even for atomic renames.
   StringPtr Rename(const String& name, bool atomic = false) {
     if (state_ == NULL) {
-      return name.raw();
+      return name.ptr();
     }
 
     return state_->RenameImpl(name, atomic);
@@ -502,13 +502,13 @@
   static ArrayPtr GetRenamesFromSavedState(const Array& saved_state) {
     Array& renames = Array::Handle();
     renames ^= saved_state.At(kSavedStateRenamesIndex);
-    return renames.raw();
+    return renames.ptr();
   }
 
   static StringPtr GetNameFromSavedState(const Array& saved_state) {
     String& name = String::Handle();
     name ^= saved_state.At(kSavedStateNameIndex);
-    return name.raw();
+    return name.ptr();
   }
 
   class ObfuscationState : public ZoneAllocated {
@@ -605,7 +605,7 @@
   ~Obfuscator() {}
 
   StringPtr Rename(const String& name, bool atomic = false) {
-    return name.raw();
+    return name.ptr();
   }
 
   void PreventRenaming(const String& name) {}
diff --git a/runtime/vm/compiler/aot/precompiler_tracer.cc b/runtime/vm/compiler/aot/precompiler_tracer.cc
index 96dfbda..40f6f58 100644
--- a/runtime/vm/compiler/aot/precompiler_tracer.cc
+++ b/runtime/vm/compiler/aot/precompiler_tracer.cc
@@ -153,7 +153,7 @@
     } else if (obj.IsField()) {
       cls_ = Field::Cast(obj).Owner();
     }
-    if (cls_.raw() != Class::null()) {
+    if (cls_.ptr() != Class::null()) {
       InternEntity(cls_);
     }
   }
diff --git a/runtime/vm/compiler/aot/precompiler_tracer.h b/runtime/vm/compiler/aot/precompiler_tracer.h
index 3e315bd..70a9359 100644
--- a/runtime/vm/compiler/aot/precompiler_tracer.h
+++ b/runtime/vm/compiler/aot/precompiler_tracer.h
@@ -92,7 +92,7 @@
     static const char* Name() { return "EntityTableTraits"; }
 
     static bool IsMatch(const Object& a, const Object& b) {
-      return a.raw() == b.raw();
+      return a.ptr() == b.ptr();
     }
 
     static uword Hash(const Object& obj) {
diff --git a/runtime/vm/compiler/asm_intrinsifier_arm.cc b/runtime/vm/compiler/asm_intrinsifier_arm.cc
index 601d315..2bdbc86 100644
--- a/runtime/vm/compiler/asm_intrinsifier_arm.cc
+++ b/runtime/vm/compiler/asm_intrinsifier_arm.cc
@@ -1973,10 +1973,10 @@
   // R1: new object end address.
   // R2: allocation size.
   {
-    const intptr_t shift = target::ObjectLayout::kTagBitsSizeTagPos -
+    const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2;
 
-    __ CompareImmediate(R2, target::ObjectLayout::kSizeTagMaxSizeTag);
+    __ CompareImmediate(R2, target::UntaggedObject::kSizeTagMaxSizeTag);
     __ mov(R3, Operand(R2, LSL, shift), LS);
     __ mov(R3, Operand(0), HI);
 
diff --git a/runtime/vm/compiler/asm_intrinsifier_arm64.cc b/runtime/vm/compiler/asm_intrinsifier_arm64.cc
index fac06c9..80d2290 100644
--- a/runtime/vm/compiler/asm_intrinsifier_arm64.cc
+++ b/runtime/vm/compiler/asm_intrinsifier_arm64.cc
@@ -1776,7 +1776,7 @@
   // R0: Untagged address of header word (ldxr/stxr do not support offsets).
   __ sub(R0, R0, Operand(kHeapObjectTag));
   __ SmiUntag(R1);
-  __ LslImmediate(R1, R1, target::ObjectLayout::kHashTagPos);
+  __ LslImmediate(R1, R1, target::UntaggedObject::kHashTagPos);
   Label retry;
   __ Bind(&retry);
   __ ldxr(R2, R0, kEightBytes);
@@ -2008,7 +2008,7 @@
 
   // R1: Untagged address of header word (ldxr/stxr do not support offsets).
   __ sub(R1, R1, Operand(kHeapObjectTag));
-  __ LslImmediate(R0, R0, target::ObjectLayout::kHashTagPos);
+  __ LslImmediate(R0, R0, target::UntaggedObject::kHashTagPos);
   Label retry;
   __ Bind(&retry);
   __ ldxr(R2, R1, kEightBytes);
@@ -2016,7 +2016,7 @@
   __ stxr(R4, R2, R1, kEightBytes);
   __ cbnz(&retry, R4);
 
-  __ LsrImmediate(R0, R0, target::ObjectLayout::kHashTagPos);
+  __ LsrImmediate(R0, R0, target::UntaggedObject::kHashTagPos);
   __ SmiTag(R0);
   __ ret();
 }
@@ -2075,10 +2075,10 @@
   // R1: new object end address.
   // R2: allocation size.
   {
-    const intptr_t shift = target::ObjectLayout::kTagBitsSizeTagPos -
+    const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2;
 
-    __ CompareImmediate(R2, target::ObjectLayout::kSizeTagMaxSizeTag);
+    __ CompareImmediate(R2, target::UntaggedObject::kSizeTagMaxSizeTag);
     __ LslImmediate(R2, R2, shift);
     __ csel(R2, R2, ZR, LS);
 
diff --git a/runtime/vm/compiler/asm_intrinsifier_ia32.cc b/runtime/vm/compiler/asm_intrinsifier_ia32.cc
index e97ea6e..89f488c 100644
--- a/runtime/vm/compiler/asm_intrinsifier_ia32.cc
+++ b/runtime/vm/compiler/asm_intrinsifier_ia32.cc
@@ -1992,9 +1992,9 @@
   // EDI: allocation size.
   {
     Label size_tag_overflow, done;
-    __ cmpl(EDI, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+    __ cmpl(EDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
     __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-    __ shll(EDI, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+    __ shll(EDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2));
     __ jmp(&done, Assembler::kNearJump);
 
diff --git a/runtime/vm/compiler/asm_intrinsifier_x64.cc b/runtime/vm/compiler/asm_intrinsifier_x64.cc
index e4329c4..bcfbd8e 100644
--- a/runtime/vm/compiler/asm_intrinsifier_x64.cc
+++ b/runtime/vm/compiler/asm_intrinsifier_x64.cc
@@ -1715,7 +1715,7 @@
   __ movq(RAX, Address(RSP, +2 * target::kWordSize));  // Object.
   __ movq(RDX, Address(RSP, +1 * target::kWordSize));  // Value.
   __ SmiUntag(RDX);
-  __ shlq(RDX, Immediate(target::ObjectLayout::kHashTagPos));
+  __ shlq(RDX, Immediate(target::UntaggedObject::kHashTagPos));
   // lock+orq is an atomic read-modify-write.
   __ lock();
   __ orq(FieldAddress(RAX, target::Object::tags_offset()), RDX);
@@ -1953,11 +1953,11 @@
   __ j(NOT_EQUAL, &set_hash_code, Assembler::kNearJump);
   __ incq(RAX);
   __ Bind(&set_hash_code);
-  __ shlq(RAX, Immediate(target::ObjectLayout::kHashTagPos));
+  __ shlq(RAX, Immediate(target::UntaggedObject::kHashTagPos));
   // lock+orq is an atomic read-modify-write.
   __ lock();
   __ orq(FieldAddress(RBX, target::Object::tags_offset()), RAX);
-  __ sarq(RAX, Immediate(target::ObjectLayout::kHashTagPos));
+  __ sarq(RAX, Immediate(target::UntaggedObject::kHashTagPos));
   __ SmiTag(RAX);
   __ ret();
 }
@@ -2020,9 +2020,9 @@
   // RDI: allocation size.
   {
     Label size_tag_overflow, done;
-    __ cmpq(RDI, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+    __ cmpq(RDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
     __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-    __ shlq(RDI, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+    __ shlq(RDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2));
     __ jmp(&done, Assembler::kNearJump);
 
diff --git a/runtime/vm/compiler/assembler/assembler_arm.cc b/runtime/vm/compiler/assembler/assembler_arm.cc
index c28326d..4ba28d6 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm.cc
@@ -1739,7 +1739,7 @@
   //    in progress
   // If so, call the WriteBarrier stub, which will either add object to the
   // store buffer (case 1) or add value to the marking stack (case 2).
-  // Compare ObjectLayout::StorePointer.
+  // Compare UntaggedObject::StorePointer.
   Label done;
   if (can_be_smi == kValueCanBeSmi) {
     BranchIfSmi(value, &done);
@@ -1752,7 +1752,7 @@
     ldrb(TMP, FieldAddress(object, target::Object::tags_offset()));
     ldrb(LR, FieldAddress(value, target::Object::tags_offset()));
     and_(TMP, LR,
-         Operand(TMP, LSR, target::ObjectLayout::kBarrierOverlapShift));
+         Operand(TMP, LSR, target::UntaggedObject::kBarrierOverlapShift));
     ldr(LR, Address(THR, target::Thread::write_barrier_mask_offset()));
     tst(TMP, Operand(LR));
   });
@@ -1810,7 +1810,7 @@
   //    in progress
   // If so, call the WriteBarrier stub, which will either add object to the
   // store buffer (case 1) or add value to the marking stack (case 2).
-  // Compare ObjectLayout::StorePointer.
+  // Compare UntaggedObject::StorePointer.
   Label done;
   if (can_be_smi == kValueCanBeSmi) {
     BranchIfSmi(value, &done);
@@ -1824,7 +1824,7 @@
     ldrb(TMP, FieldAddress(object, target::Object::tags_offset()));
     ldrb(LR, FieldAddress(value, target::Object::tags_offset()));
     and_(TMP, LR,
-         Operand(TMP, LSR, target::ObjectLayout::kBarrierOverlapShift));
+         Operand(TMP, LSR, target::UntaggedObject::kBarrierOverlapShift));
     ldr(LR, Address(THR, target::Thread::write_barrier_mask_offset()));
     tst(TMP, Operand(LR));
   });
@@ -1867,7 +1867,7 @@
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
 
   ldrb(TMP, FieldAddress(object, target::Object::tags_offset()));
-  tst(TMP, Operand(1 << target::ObjectLayout::kOldAndNotRememberedBit));
+  tst(TMP, Operand(1 << target::UntaggedObject::kOldAndNotRememberedBit));
   b(&done, ZERO);
 
   Stop("Store buffer update is required");
@@ -1990,29 +1990,29 @@
 }
 
 void Assembler::ExtractClassIdFromTags(Register result, Register tags) {
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
-  Lsr(result, tags, Operand(target::ObjectLayout::kClassIdTagPos), AL);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
+  Lsr(result, tags, Operand(target::UntaggedObject::kClassIdTagPos), AL);
 }
 
 void Assembler::ExtractInstanceSizeFromTags(Register result, Register tags) {
-  ASSERT(target::ObjectLayout::kSizeTagPos == 8);
-  ASSERT(target::ObjectLayout::kSizeTagSize == 8);
+  ASSERT(target::UntaggedObject::kSizeTagPos == 8);
+  ASSERT(target::UntaggedObject::kSizeTagSize == 8);
   Lsr(result, tags,
-      Operand(target::ObjectLayout::kSizeTagPos -
+      Operand(target::UntaggedObject::kSizeTagPos -
               target::ObjectAlignment::kObjectAlignmentLog2),
       AL);
   AndImmediate(result, result,
-               (Utils::NBitMask(target::ObjectLayout::kSizeTagSize)
+               (Utils::NBitMask(target::UntaggedObject::kSizeTagSize)
                 << target::ObjectAlignment::kObjectAlignmentLog2));
 }
 
 void Assembler::LoadClassId(Register result, Register object, Condition cond) {
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
   const intptr_t class_id_offset =
       target::Object::tags_offset() +
-      target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
+      target::UntaggedObject::kClassIdTagPos / kBitsPerByte;
   ldrh(result, FieldAddress(object, class_id_offset), cond);
 }
 
diff --git a/runtime/vm/compiler/assembler/assembler_arm.h b/runtime/vm/compiler/assembler/assembler_arm.h
index c9d44f8..62a6414 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.h
+++ b/runtime/vm/compiler/assembler/assembler_arm.h
@@ -1254,7 +1254,7 @@
   // before the code can be used.
   //
   // The neccessary information for the "linker" (i.e. the relocation
-  // information) is stored in [CodeLayout::static_calls_target_table_]: an
+  // information) is stored in [UntaggedCode::static_calls_target_table_]: an
   // entry of the form
   //
   //   (Code::kPcRelativeCall & pc_offset, <target-code>, <target-function>)
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.cc b/runtime/vm/compiler/assembler/assembler_arm64.cc
index 885229b65..29a0ec9 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64.cc
@@ -996,7 +996,7 @@
   //    in progress
   // If so, call the WriteBarrier stub, which will either add object to the
   // store buffer (case 1) or add value to the marking stack (case 2).
-  // Compare ObjectLayout::StorePointer.
+  // Compare UntaggedObject::StorePointer.
   Label done;
   if (can_be_smi == kValueCanBeSmi) {
     BranchIfSmi(value, &done);
@@ -1006,7 +1006,7 @@
   ldr(TMP2, FieldAddress(value, target::Object::tags_offset(), kByte),
       kUnsignedByte);
   and_(TMP, TMP2,
-       Operand(TMP, LSR, target::ObjectLayout::kBarrierOverlapShift));
+       Operand(TMP, LSR, target::UntaggedObject::kBarrierOverlapShift));
   tst(TMP, Operand(BARRIER_MASK));
   b(&done, ZERO);
 
@@ -1064,7 +1064,7 @@
   //    in progress
   // If so, call the WriteBarrier stub, which will either add object to the
   // store buffer (case 1) or add value to the marking stack (case 2).
-  // Compare ObjectLayout::StorePointer.
+  // Compare UntaggedObject::StorePointer.
   Label done;
   if (can_be_smi == kValueCanBeSmi) {
     BranchIfSmi(value, &done);
@@ -1074,7 +1074,7 @@
   ldr(TMP2, FieldAddress(value, target::Object::tags_offset(), kByte),
       kUnsignedByte);
   and_(TMP, TMP2,
-       Operand(TMP, LSR, target::ObjectLayout::kBarrierOverlapShift));
+       Operand(TMP, LSR, target::UntaggedObject::kBarrierOverlapShift));
   tst(TMP, Operand(BARRIER_MASK));
   b(&done, ZERO);
   if (spill_lr) {
@@ -1104,7 +1104,7 @@
 
   ldr(TMP, FieldAddress(object, target::Object::tags_offset(), kByte),
       kUnsignedByte);
-  tsti(TMP, Immediate(1 << target::ObjectLayout::kOldAndNotRememberedBit));
+  tsti(TMP, Immediate(1 << target::UntaggedObject::kOldAndNotRememberedBit));
   b(&done, ZERO);
 
   Stop("Store buffer update is required");
@@ -1156,25 +1156,26 @@
 }
 
 void Assembler::ExtractClassIdFromTags(Register result, Register tags) {
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
-  LsrImmediate(result, tags, target::ObjectLayout::kClassIdTagPos, kFourBytes);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
+  LsrImmediate(result, tags, target::UntaggedObject::kClassIdTagPos,
+               kFourBytes);
 }
 
 void Assembler::ExtractInstanceSizeFromTags(Register result, Register tags) {
-  ASSERT(target::ObjectLayout::kSizeTagPos == 8);
-  ASSERT(target::ObjectLayout::kSizeTagSize == 8);
-  ubfx(result, tags, target::ObjectLayout::kSizeTagPos,
-       target::ObjectLayout::kSizeTagSize);
+  ASSERT(target::UntaggedObject::kSizeTagPos == 8);
+  ASSERT(target::UntaggedObject::kSizeTagSize == 8);
+  ubfx(result, tags, target::UntaggedObject::kSizeTagPos,
+       target::UntaggedObject::kSizeTagSize);
   LslImmediate(result, result, target::ObjectAlignment::kObjectAlignmentLog2);
 }
 
 void Assembler::LoadClassId(Register result, Register object) {
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
   const intptr_t class_id_offset =
       target::Object::tags_offset() +
-      target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
+      target::UntaggedObject::kClassIdTagPos / kBitsPerByte;
   LoadFromOffset(result, object, class_id_offset - kHeapObjectTag,
                  kUnsignedTwoBytes);
 }
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.h b/runtime/vm/compiler/assembler/assembler_arm64.h
index 660fe68..86a8b3d 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.h
+++ b/runtime/vm/compiler/assembler/assembler_arm64.h
@@ -1870,7 +1870,7 @@
   // the code can be used.
   //
   // The neccessary information for the "linker" (i.e. the relocation
-  // information) is stored in [CodeLayout::static_calls_target_table_]: an
+  // information) is stored in [UntaggedCode::static_calls_target_table_]: an
   // entry of the form
   //
   //   (Code::kPcRelativeCall & pc_offset, <target-code>, <target-function>)
diff --git a/runtime/vm/compiler/assembler/assembler_arm64_test.cc b/runtime/vm/compiler/assembler/assembler_arm64_test.cc
index e469f8e..c6d8847 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64_test.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64_test.cc
@@ -2643,7 +2643,7 @@
 }
 
 ASSEMBLER_TEST_RUN(CompareObjectNull, test) {
-  EXPECT_EQ(static_cast<uword>(Bool::True().raw()),
+  EXPECT_EQ(static_cast<uword>(Bool::True().ptr()),
             test->InvokeWithCodeAndThread<uword>());
 }
 
@@ -2657,7 +2657,7 @@
 }
 
 ASSEMBLER_TEST_RUN(LoadObjectTrue, test) {
-  EXPECT_EQ(static_cast<uword>(Bool::True().raw()),
+  EXPECT_EQ(static_cast<uword>(Bool::True().ptr()),
             test->InvokeWithCodeAndThread<uword>());
 }
 
@@ -2671,7 +2671,7 @@
 }
 
 ASSEMBLER_TEST_RUN(LoadObjectFalse, test) {
-  EXPECT_EQ(static_cast<uword>(Bool::False().raw()),
+  EXPECT_EQ(static_cast<uword>(Bool::False().ptr()),
             test->InvokeWithCodeAndThread<uword>());
 }
 
diff --git a/runtime/vm/compiler/assembler/assembler_ia32.cc b/runtime/vm/compiler/assembler/assembler_ia32.cc
index de9ce35..99a7a02 100644
--- a/runtime/vm/compiler/assembler/assembler_ia32.cc
+++ b/runtime/vm/compiler/assembler/assembler_ia32.cc
@@ -1990,7 +1990,7 @@
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
 
   testb(FieldAddress(object, target::Object::tags_offset()),
-        Immediate(1 << target::ObjectLayout::kOldAndNotRememberedBit));
+        Immediate(1 << target::UntaggedObject::kOldAndNotRememberedBit));
   j(ZERO, &done, Assembler::kNearJump);
 
   Stop("Store buffer update is required");
@@ -2693,11 +2693,11 @@
 }
 
 void Assembler::LoadClassId(Register result, Register object) {
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
   const intptr_t class_id_offset =
       target::Object::tags_offset() +
-      target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
+      target::UntaggedObject::kClassIdTagPos / kBitsPerByte;
   movzxw(result, FieldAddress(object, class_id_offset));
 }
 
@@ -2723,11 +2723,11 @@
                                      Register scratch,
                                      Label* is_smi) {
   ASSERT(kSmiTagShift == 1);
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
   const intptr_t class_id_offset =
       target::Object::tags_offset() +
-      target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
+      target::UntaggedObject::kClassIdTagPos / kBitsPerByte;
 
   // Untag optimistically. Tag bit is shifted into the CARRY.
   SmiUntag(object);
@@ -2754,7 +2754,7 @@
   } else {
     ASSERT(result != object);
     static const intptr_t kSmiCidSource =
-        kSmiCid << target::ObjectLayout::kClassIdTagPos;
+        kSmiCid << target::UntaggedObject::kClassIdTagPos;
 
     // Make a dummy "Object" whose cid is kSmiCid.
     movl(result, Immediate(reinterpret_cast<int32_t>(&kSmiCidSource) + 1));
diff --git a/runtime/vm/compiler/assembler/assembler_test.cc b/runtime/vm/compiler/assembler/assembler_test.cc
index 02f6b59..8488eae 100644
--- a/runtime/vm/compiler/assembler/assembler_test.cc
+++ b/runtime/vm/compiler/assembler/assembler_test.cc
@@ -29,38 +29,38 @@
   Smi& smi = Smi::Handle();
   Thread* thread = Thread::Current();
 
-  EXPECT(old_array.raw() == grow_old_array.data());
-  EXPECT(!thread->StoreBufferContains(grow_old_array.raw()));
-  EXPECT(old_array.raw() == grow_new_array.data());
-  EXPECT(!thread->StoreBufferContains(grow_new_array.raw()));
+  EXPECT(old_array.ptr() == grow_old_array.data());
+  EXPECT(!thread->StoreBufferContains(grow_old_array.ptr()));
+  EXPECT(old_array.ptr() == grow_new_array.data());
+  EXPECT(!thread->StoreBufferContains(grow_new_array.ptr()));
 
   // Store Smis into the old object.
   for (int i = -128; i < 128; i++) {
     smi = Smi::New(i);
-    TEST_CODE(smi.raw(), grow_old_array.raw(), thread);
-    EXPECT(static_cast<ArrayPtr>(smi.raw()) == grow_old_array.data());
-    EXPECT(!thread->StoreBufferContains(grow_old_array.raw()));
+    TEST_CODE(smi.ptr(), grow_old_array.ptr(), thread);
+    EXPECT(static_cast<ArrayPtr>(smi.ptr()) == grow_old_array.data());
+    EXPECT(!thread->StoreBufferContains(grow_old_array.ptr()));
   }
 
   // Store an old object into the old object.
-  TEST_CODE(old_array.raw(), grow_old_array.raw(), thread);
-  EXPECT(old_array.raw() == grow_old_array.data());
-  EXPECT(!thread->StoreBufferContains(grow_old_array.raw()));
+  TEST_CODE(old_array.ptr(), grow_old_array.ptr(), thread);
+  EXPECT(old_array.ptr() == grow_old_array.data());
+  EXPECT(!thread->StoreBufferContains(grow_old_array.ptr()));
 
   // Store a new object into the old object.
-  TEST_CODE(new_array.raw(), grow_old_array.raw(), thread);
-  EXPECT(new_array.raw() == grow_old_array.data());
-  EXPECT(thread->StoreBufferContains(grow_old_array.raw()));
+  TEST_CODE(new_array.ptr(), grow_old_array.ptr(), thread);
+  EXPECT(new_array.ptr() == grow_old_array.data());
+  EXPECT(thread->StoreBufferContains(grow_old_array.ptr()));
 
   // Store a new object into the new object.
-  TEST_CODE(new_array.raw(), grow_new_array.raw(), thread);
-  EXPECT(new_array.raw() == grow_new_array.data());
-  EXPECT(!thread->StoreBufferContains(grow_new_array.raw()));
+  TEST_CODE(new_array.ptr(), grow_new_array.ptr(), thread);
+  EXPECT(new_array.ptr() == grow_new_array.data());
+  EXPECT(!thread->StoreBufferContains(grow_new_array.ptr()));
 
   // Store an old object into the new object.
-  TEST_CODE(old_array.raw(), grow_new_array.raw(), thread);
-  EXPECT(old_array.raw() == grow_new_array.data());
-  EXPECT(!thread->StoreBufferContains(grow_new_array.raw()));
+  TEST_CODE(old_array.ptr(), grow_new_array.ptr(), thread);
+  EXPECT(old_array.ptr() == grow_new_array.data());
+  EXPECT(!thread->StoreBufferContains(grow_new_array.ptr()));
 }
 
 }  // namespace dart
diff --git a/runtime/vm/compiler/assembler/assembler_x64.cc b/runtime/vm/compiler/assembler/assembler_x64.cc
index b4185a9..3e02ea9 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.cc
+++ b/runtime/vm/compiler/assembler/assembler_x64.cc
@@ -1390,14 +1390,14 @@
   //    in progress
   // If so, call the WriteBarrier stub, which will either add object to the
   // store buffer (case 1) or add value to the marking stack (case 2).
-  // Compare ObjectLayout::StorePointer.
+  // Compare UntaggedObject::StorePointer.
   Label done;
   if (can_be_smi == kValueCanBeSmi) {
     testq(value, Immediate(kSmiTagMask));
     j(ZERO, &done, kNearJump);
   }
   movb(TMP, FieldAddress(object, target::Object::tags_offset()));
-  shrl(TMP, Immediate(target::ObjectLayout::kBarrierOverlapShift));
+  shrl(TMP, Immediate(target::UntaggedObject::kBarrierOverlapShift));
   andl(TMP, Address(THR, target::Thread::write_barrier_mask_offset()));
   testb(FieldAddress(value, target::Object::tags_offset()), TMP);
   j(ZERO, &done, kNearJump);
@@ -1442,14 +1442,14 @@
   //    in progress
   // If so, call the WriteBarrier stub, which will either add object to the
   // store buffer (case 1) or add value to the marking stack (case 2).
-  // Compare ObjectLayout::StorePointer.
+  // Compare UntaggedObject::StorePointer.
   Label done;
   if (can_be_smi == kValueCanBeSmi) {
     testq(value, Immediate(kSmiTagMask));
     j(ZERO, &done, kNearJump);
   }
   movb(TMP, FieldAddress(object, target::Object::tags_offset()));
-  shrl(TMP, Immediate(target::ObjectLayout::kBarrierOverlapShift));
+  shrl(TMP, Immediate(target::UntaggedObject::kBarrierOverlapShift));
   andl(TMP, Address(THR, target::Thread::write_barrier_mask_offset()));
   testb(FieldAddress(value, target::Object::tags_offset()), TMP);
   j(ZERO, &done, kNearJump);
@@ -1477,7 +1477,7 @@
   StoreIntoObjectFilter(object, value, &done, kValueCanBeSmi, kJumpToNoUpdate);
 
   testb(FieldAddress(object, target::Object::tags_offset()),
-        Immediate(1 << target::ObjectLayout::kOldAndNotRememberedBit));
+        Immediate(1 << target::UntaggedObject::kOldAndNotRememberedBit));
   j(ZERO, &done, Assembler::kNearJump);
 
   Stop("Store buffer update is required");
@@ -2160,29 +2160,29 @@
 }
 
 void Assembler::ExtractClassIdFromTags(Register result, Register tags) {
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
   movl(result, tags);
-  shrl(result, Immediate(target::ObjectLayout::kClassIdTagPos));
+  shrl(result, Immediate(target::UntaggedObject::kClassIdTagPos));
 }
 
 void Assembler::ExtractInstanceSizeFromTags(Register result, Register tags) {
-  ASSERT(target::ObjectLayout::kSizeTagPos == 8);
-  ASSERT(target::ObjectLayout::kSizeTagSize == 8);
+  ASSERT(target::UntaggedObject::kSizeTagPos == 8);
+  ASSERT(target::UntaggedObject::kSizeTagSize == 8);
   movzxw(result, tags);
-  shrl(result, Immediate(target::ObjectLayout::kSizeTagPos -
+  shrl(result, Immediate(target::UntaggedObject::kSizeTagPos -
                          target::ObjectAlignment::kObjectAlignmentLog2));
   AndImmediate(result,
-               Immediate(Utils::NBitMask(target::ObjectLayout::kSizeTagSize)
+               Immediate(Utils::NBitMask(target::UntaggedObject::kSizeTagSize)
                          << target::ObjectAlignment::kObjectAlignmentLog2));
 }
 
 void Assembler::LoadClassId(Register result, Register object) {
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
   const intptr_t class_id_offset =
       target::Object::tags_offset() +
-      target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
+      target::UntaggedObject::kClassIdTagPos / kBitsPerByte;
   movzxw(result, FieldAddress(object, class_id_offset));
 }
 
@@ -2207,11 +2207,11 @@
                                      intptr_t class_id,
                                      Label* is_smi) {
   ASSERT(kSmiTagShift == 1);
-  ASSERT(target::ObjectLayout::kClassIdTagPos == 16);
-  ASSERT(target::ObjectLayout::kClassIdTagSize == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagPos == 16);
+  ASSERT(target::UntaggedObject::kClassIdTagSize == 16);
   const intptr_t class_id_offset =
       target::Object::tags_offset() +
-      target::ObjectLayout::kClassIdTagPos / kBitsPerByte;
+      target::UntaggedObject::kClassIdTagPos / kBitsPerByte;
 
   // Untag optimistically. Tag bit is shifted into the CARRY.
   SmiUntag(object);
diff --git a/runtime/vm/compiler/assembler/assembler_x64.h b/runtime/vm/compiler/assembler/assembler_x64.h
index eb7d69a..1295e25 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.h
+++ b/runtime/vm/compiler/assembler/assembler_x64.h
@@ -1037,7 +1037,7 @@
   // before the code can be used.
   //
   // The neccessary information for the "linker" (i.e. the relocation
-  // information) is stored in [CodeLayout::static_calls_target_table_]: an
+  // information) is stored in [UntaggedCode::static_calls_target_table_]: an
   // entry of the form
   //
   //   (Code::kPcRelativeCall & pc_offset, <target-code>, <target-function>)
diff --git a/runtime/vm/compiler/assembler/disassembler.cc b/runtime/vm/compiler/assembler/disassembler.cc
index 8df2bfc..fdbe05c 100644
--- a/runtime/vm/compiler/assembler/disassembler.cc
+++ b/runtime/vm/compiler/assembler/disassembler.cc
@@ -309,20 +309,20 @@
     String& var_name = String::Handle(zone);
     for (intptr_t i = 0; i < var_desc_length; i++) {
       var_name = var_descriptors.GetName(i);
-      LocalVarDescriptorsLayout::VarInfo var_info;
+      UntaggedLocalVarDescriptors::VarInfo var_info;
       var_descriptors.GetInfo(i, &var_info);
       const int8_t kind = var_info.kind();
-      if (kind == LocalVarDescriptorsLayout::kSavedCurrentContext) {
+      if (kind == UntaggedLocalVarDescriptors::kSavedCurrentContext) {
         THR_Print("  saved current CTX reg offset %d\n", var_info.index());
       } else {
-        if (kind == LocalVarDescriptorsLayout::kContextLevel) {
+        if (kind == UntaggedLocalVarDescriptors::kContextLevel) {
           THR_Print("  context level %d scope %d", var_info.index(),
                     var_info.scope_id);
-        } else if (kind == LocalVarDescriptorsLayout::kStackVar) {
+        } else if (kind == UntaggedLocalVarDescriptors::kStackVar) {
           THR_Print("  stack var '%s' offset %d", var_name.ToCString(),
                     var_info.index());
         } else {
-          ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
+          ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar);
           THR_Print("  context var '%s' level %d offset %d",
                     var_name.ToCString(), var_info.scope_id, var_info.index());
         }
@@ -390,9 +390,9 @@
 
         dst_type = AbstractType::null();
         if (object.IsAbstractType()) {
-          dst_type = AbstractType::Cast(object).raw();
+          dst_type = AbstractType::Cast(object).ptr();
         } else if (object.IsCode()) {
-          code = Code::Cast(object).raw();
+          code = Code::Cast(object).ptr();
         }
 
         auto kind = Code::KindField::decode(kind_type_and_offset.Value());
diff --git a/runtime/vm/compiler/backend/block_builder.h b/runtime/vm/compiler/backend/block_builder.h
index f5854b1..ef51d8d 100644
--- a/runtime/vm/compiler/backend/block_builder.h
+++ b/runtime/vm/compiler/backend/block_builder.h
@@ -66,7 +66,7 @@
     const auto representation = FlowGraph::ReturnRepresentationOf(function);
     ReturnInstr* instr = new ReturnInstr(
         Source(), value, CompilerState::Current().GetNextDeoptId(),
-        PcDescriptorsLayout::kInvalidYieldIndex, representation);
+        UntaggedPcDescriptors::kInvalidYieldIndex, representation);
     AddInstruction(instr);
     entry_->set_last_instruction(instr);
     return instr;
diff --git a/runtime/vm/compiler/backend/constant_propagator.cc b/runtime/vm/compiler/backend/constant_propagator.cc
index 4cc16cf..427c0b9 100644
--- a/runtime/vm/compiler/backend/constant_propagator.cc
+++ b/runtime/vm/compiler/backend/constant_propagator.cc
@@ -68,12 +68,12 @@
   //
   // ASSERT(IsUnknown(definition->constant_value()) ||
   //        IsNonConstant(value) ||
-  //        (definition->constant_value().raw() == value.raw()));
+  //        (definition->constant_value().ptr() == value.ptr()));
   //
   // But the final disjunct is not true (e.g., mint or double constants are
   // heap-allocated and so not necessarily pointer-equal on each iteration).
-  if (definition->constant_value().raw() != value.raw()) {
-    definition->constant_value() = value.raw();
+  if (definition->constant_value().ptr() != value.ptr()) {
+    definition->constant_value() = value.ptr();
     if (definition->input_use_list() != NULL) {
       definition_worklist_.Add(definition);
     }
@@ -86,7 +86,7 @@
   // This should be kept in line with Identical_comparison (identical.cc)
   // (=> Instance::IsIdenticalTo in object.cc).
 
-  if (left.raw() == right.raw()) return true;
+  if (left.ptr() == right.ptr()) return true;
   if (left.GetClassId() != right.GetClassId()) return false;
   if (left.IsInteger()) {
     return Integer::Cast(left).Equals(Integer::Cast(right));
@@ -107,7 +107,7 @@
   // Join(unknown, X)      = X
   // Join(X, non-constant) = non-constant
   if (IsUnknown(*left) || IsNonConstant(right)) {
-    *left = right.raw();
+    *left = right.ptr();
     return;
   }
 
@@ -115,7 +115,7 @@
   if (IsIdenticalConstants(*left, right)) return;
 
   // Join(X, Y) = non-constant
-  *left = non_constant_.raw();
+  *left = non_constant_.ptr();
 }
 
 // --------------------------------------------------------------------------
@@ -254,7 +254,7 @@
       if (IsNonConstant(value)) {
         SetReachable(instr->true_successor());
         SetReachable(instr->false_successor());
-      } else if (value.raw() == Bool::True().raw()) {
+      } else if (value.ptr() == Bool::True().ptr()) {
         SetReachable(instr->true_successor());
       } else if (!IsUnknown(value)) {  // Any other constant.
         SetReachable(instr->false_successor());
@@ -875,7 +875,7 @@
     return;
   }
   if (value.IsBool()) {
-    bool val = value.raw() != Bool::True().raw();
+    bool val = value.ptr() != Bool::True().ptr();
     SetValue(instr, Bool::Get(val));
   } else {
     SetValue(instr, non_constant_);
@@ -908,7 +908,7 @@
       SetValue(instr, non_constant_);
     }
   } else if (IsConstant(value)) {
-    if (value.IsInstance() && (value.raw() != Object::sentinel().raw())) {
+    if (value.IsInstance() && (value.ptr() != Object::sentinel().ptr())) {
       const Instance& instance = Instance::Cast(value);
       if (instr->instantiator_type_arguments()->BindsToConstantNull() &&
           instr->function_type_arguments()->BindsToConstantNull()) {
@@ -1001,7 +1001,7 @@
     } else {
       Object& value = Object::Handle();
       if (instr->Evaluate(constant, &value)) {
-        SetValue(instr, Object::ZoneHandle(Z, value.raw()));
+        SetValue(instr, Object::ZoneHandle(Z, value.ptr()));
         return;
       }
     }
@@ -1021,7 +1021,7 @@
       return;
     }
     if (instantiator_type_args_obj.IsTypeArguments()) {
-      instantiator_type_args ^= instantiator_type_args_obj.raw();
+      instantiator_type_args ^= instantiator_type_args_obj.ptr();
     } else {
       SetValue(instr, non_constant_);
       return;
@@ -1035,7 +1035,7 @@
       return;
     }
     if (function_type_args_obj.IsTypeArguments()) {
-      function_type_args ^= function_type_args_obj.raw();
+      function_type_args ^= function_type_args_obj.ptr();
     } else {
       SetValue(instr, non_constant_);
       return;
@@ -1086,7 +1086,7 @@
       SetValue(instr, non_constant_);
       return;
     }
-    instantiator_type_args ^= instantiator_type_args_obj.raw();
+    instantiator_type_args ^= instantiator_type_args_obj.ptr();
     if (instr->CanShareInstantiatorTypeArguments()) {
       SetValue(instr, instantiator_type_args);
       return;
@@ -1105,7 +1105,7 @@
       SetValue(instr, non_constant_);
       return;
     }
-    function_type_args ^= function_type_args_obj.raw();
+    function_type_args ^= function_type_args_obj.ptr();
     if (instr->CanShareFunctionTypeArguments()) {
       SetValue(instr, function_type_args);
       return;
@@ -1148,7 +1148,7 @@
                                             binary_op->is_truncating(),
                                             binary_op->representation(), T));
     if (!result.IsNull()) {
-      SetValue(binary_op, Integer::ZoneHandle(Z, result.raw()));
+      SetValue(binary_op, Integer::ZoneHandle(Z, result.ptr()));
       return;
     }
   }
@@ -1218,7 +1218,7 @@
         Z, Evaluator::UnaryIntegerEvaluate(value, unary_op->op_kind(),
                                            unary_op->representation(), T));
     if (!result.IsNull()) {
-      SetValue(unary_op, Integer::ZoneHandle(Z, result.raw()));
+      SetValue(unary_op, Integer::ZoneHandle(Z, result.ptr()));
       return;
     }
   }
@@ -1712,7 +1712,7 @@
       THR_Print("Constant v%" Pd " = %s\n", defn->ssa_temp_index(),
                 defn->constant_value().ToCString());
     }
-    constant_value_ = defn->constant_value().raw();
+    constant_value_ = defn->constant_value().ptr();
     if ((constant_value_.IsString() || constant_value_.IsMint() ||
          constant_value_.IsDouble()) &&
         !constant_value_.IsCanonical()) {
diff --git a/runtime/vm/compiler/backend/constant_propagator.h b/runtime/vm/compiler/backend/constant_propagator.h
index d0dca7d..36368d0 100644
--- a/runtime/vm/compiler/backend/constant_propagator.h
+++ b/runtime/vm/compiler/backend/constant_propagator.h
@@ -36,7 +36,7 @@
   static void OptimizeBranches(FlowGraph* graph);
 
   // Used to initialize the abstract value of definitions.
-  static ObjectPtr Unknown() { return Object::unknown_constant().raw(); }
+  static ObjectPtr Unknown() { return Object::unknown_constant().ptr(); }
 
  private:
   void Analyze();
@@ -60,9 +60,9 @@
   // first one.
   void Join(Object* left, const Object& right);
 
-  bool IsUnknown(const Object& value) { return value.raw() == unknown_.raw(); }
+  bool IsUnknown(const Object& value) { return value.ptr() == unknown_.ptr(); }
   bool IsNonConstant(const Object& value) {
-    return value.raw() == non_constant_.raw();
+    return value.ptr() == non_constant_.ptr();
   }
   bool IsConstant(const Object& value) {
     return !IsNonConstant(value) && !IsUnknown(value);
diff --git a/runtime/vm/compiler/backend/evaluator.cc b/runtime/vm/compiler/backend/evaluator.cc
index ce4660c..5364b28 100644
--- a/runtime/vm/compiler/backend/evaluator.cc
+++ b/runtime/vm/compiler/backend/evaluator.cc
@@ -118,7 +118,7 @@
     result ^= result.Canonicalize(thread);
   }
 
-  return result.raw();
+  return result.ptr();
 }
 
 IntegerPtr Evaluator::UnaryIntegerEvaluate(const Object& value,
@@ -147,7 +147,7 @@
     result ^= result.Canonicalize(thread);
   }
 
-  return result.raw();
+  return result.ptr();
 }
 
 double Evaluator::EvaluateDoubleOp(const double left,
diff --git a/runtime/vm/compiler/backend/flow_graph.cc b/runtime/vm/compiler/backend/flow_graph.cc
index 683544b..5b8b058 100644
--- a/runtime/vm/compiler/backend/flow_graph.cc
+++ b/runtime/vm/compiler/backend/flow_graph.cc
@@ -192,7 +192,7 @@
   if (constant == nullptr) {
     // Otherwise, allocate and add it to the pool.
     constant =
-        new (zone()) ConstantInstr(Object::ZoneHandle(zone(), object.raw()));
+        new (zone()) ConstantInstr(Object::ZoneHandle(zone(), object.ptr()));
     constant->set_ssa_temp_index(alloc_ssa_temp_index());
     if (NeedsPairLocation(constant->representation())) {
       alloc_ssa_temp_index();
@@ -481,7 +481,7 @@
 
 FlowGraph::ToCheck FlowGraph::CheckForInstanceCall(
     InstanceCallInstr* call,
-    FunctionLayout::Kind kind) const {
+    UntaggedFunction::Kind kind) const {
   if (!FLAG_use_cha_deopt && !isolate()->all_classes_finalized()) {
     // Even if class or function are private, lazy class finalization
     // may later add overriding methods.
@@ -543,7 +543,7 @@
   }
 
   const String& method_name =
-      (kind == FunctionLayout::kMethodExtractor)
+      (kind == UntaggedFunction::kMethodExtractor)
           ? String::Handle(zone(), Field::NameFromGetter(call->function_name()))
           : call->function_name();
 
diff --git a/runtime/vm/compiler/backend/flow_graph.h b/runtime/vm/compiler/backend/flow_graph.h
index 1e60eea..8a8bb32 100644
--- a/runtime/vm/compiler/backend/flow_graph.h
+++ b/runtime/vm/compiler/backend/flow_graph.h
@@ -76,7 +76,7 @@
   }
 
   static inline bool IsKeyEqual(Pair kv, Key key) {
-    return kv->value().raw() == key.raw();
+    return kv->value().ptr() == key.ptr();
   }
 };
 
@@ -234,7 +234,7 @@
   // Return value indicates that the call needs no check at all,
   // just a null check, or a full class check.
   ToCheck CheckForInstanceCall(InstanceCallInstr* call,
-                               FunctionLayout::Kind kind) const;
+                               UntaggedFunction::Kind kind) const;
 
   Thread* thread() const { return thread_; }
   Zone* zone() const { return thread()->zone(); }
diff --git a/runtime/vm/compiler/backend/flow_graph_checker.cc b/runtime/vm/compiler/backend/flow_graph_checker.cc
index 67c01d8..0943943 100644
--- a/runtime/vm/compiler/backend/flow_graph_checker.cc
+++ b/runtime/vm/compiler/backend/flow_graph_checker.cc
@@ -24,8 +24,8 @@
 // succ/pred/block links are not maintained.
 static bool IsSpecialConstant(Definition* def) {
   if (auto c = def->AsConstant()) {
-    return c->value().raw() == Symbols::OptimizedOut().raw() ||
-           c->value().raw() == Object::ZoneHandle().raw();
+    return c->value().ptr() == Symbols::OptimizedOut().ptr() ||
+           c->value().ptr() == Object::ZoneHandle().ptr();
   }
   return false;
 }
@@ -141,8 +141,8 @@
                 ->OriginalDefinitionIgnoreBoxingAndConstraints();
         ASSERT((arg_def == env_def) ||
                (arg_def->IsConstant() && env_def->IsConstant() &&
-                arg_def->AsConstant()->value().raw() ==
-                    env_def->AsConstant()->value().raw()));
+                arg_def->AsConstant()->value().ptr() ==
+                    env_def->AsConstant()->value().ptr()));
       }
     }
   }
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc
index 6108d29..9bd27a4 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc
@@ -175,8 +175,8 @@
       pending_deoptimization_env_(NULL),
       deopt_id_to_ic_data_(deopt_id_to_ic_data),
       edge_counters_array_(Array::ZoneHandle()) {
-  ASSERT(flow_graph->parsed_function().function().raw() ==
-         parsed_function.function().raw());
+  ASSERT(flow_graph->parsed_function().function().ptr() ==
+         parsed_function.function().ptr());
   if (is_optimizing) {
     // No need to collect extra ICData objects created during compilation.
     deopt_id_to_ic_data_ = nullptr;
@@ -194,8 +194,8 @@
 #endif
   // Make sure that the function is at the position for inline_id 0.
   ASSERT(inline_id_to_function.length() >= 1);
-  ASSERT(inline_id_to_function[0]->raw() ==
-         flow_graph->parsed_function().function().raw());
+  ASSERT(inline_id_to_function[0]->ptr() ==
+         flow_graph->parsed_function().function().ptr());
   code_source_map_builder_ = new (zone_)
       CodeSourceMapBuilder(zone_, stack_traces_only, caller_inline_id,
                            inline_id_to_token_pos, inline_id_to_function);
@@ -273,7 +273,7 @@
     for (intptr_t i = 0; i < num_counters; ++i) {
       edge_counters.SetAt(i, Object::smi_zero());
     }
-    edge_counters_array_ = edge_counters.raw();
+    edge_counters_array_ = edge_counters.ptr();
   }
 }
 
@@ -292,7 +292,7 @@
 
 void FlowGraphCompiler::InsertBSSRelocation(BSS::Relocation reloc) {
   const intptr_t offset = assembler()->InsertAlignedRelocation(reloc);
-  AddDescriptor(PcDescriptorsLayout::kBSSRelocation, /*pc_offset=*/offset,
+  AddDescriptor(UntaggedPcDescriptors::kBSSRelocation, /*pc_offset=*/offset,
                 /*deopt_id=*/DeoptId::kNone, InstructionSource(),
                 /*try_index=*/-1);
 }
@@ -389,7 +389,7 @@
                                         intptr_t dst_index) {
   if (src.IsConstant()) {
     // Skip dead locations.
-    if (src.constant().raw() == Symbols::OptimizedOut().raw()) {
+    if (src.constant().ptr() == Symbols::OptimizedOut().ptr()) {
       return CatchEntryMove();
     }
     const intptr_t pool_index =
@@ -496,7 +496,7 @@
 
 void FlowGraphCompiler::EmitCallsiteMetadata(const InstructionSource& source,
                                              intptr_t deopt_id,
-                                             PcDescriptorsLayout::Kind kind,
+                                             UntaggedPcDescriptors::Kind kind,
                                              LocationSummary* locs,
                                              Environment* env) {
   AddCurrentDescriptor(kind, deopt_id, source);
@@ -511,7 +511,8 @@
     } else {
       // Add deoptimization continuation point after the call and before the
       // arguments are removed.
-      AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
+      AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id_after,
+                           source);
     }
   }
 }
@@ -519,7 +520,7 @@
 void FlowGraphCompiler::EmitYieldPositionMetadata(
     const InstructionSource& source,
     intptr_t yield_index) {
-  AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
+  AddDescriptor(UntaggedPcDescriptors::kOther, assembler()->CodeSize(),
                 DeoptId::kNone, source, CurrentTryIndex(), yield_index);
 }
 
@@ -529,7 +530,7 @@
       // Instructions that can be deoptimization targets need to record kDeopt
       // PcDescriptor corresponding to their deopt id. GotoInstr records its
       // own so that it can control the placement.
-      AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, instr->deopt_id(),
+      AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, instr->deopt_id(),
                            instr->source());
     }
     AllocateRegistersLocally(instr);
@@ -545,7 +546,7 @@
   const Function& function =
       *code_source_map_builder_->inline_id_to_function()[inlining_id];
   ASSERT(instr->env() == nullptr ||
-         instr->env()->function().raw() == function.raw());
+         instr->env()->function().ptr() == function.ptr());
   const auto& script = Script::Handle(zone(), function.script());
   intptr_t line_nr;
   if (script.GetTokenLocation(source.token_pos, &line_nr)) {
@@ -802,7 +803,7 @@
   exception_handlers_list_->SetNeedsStackTrace(try_index);
 }
 
-void FlowGraphCompiler::AddDescriptor(PcDescriptorsLayout::Kind kind,
+void FlowGraphCompiler::AddDescriptor(UntaggedPcDescriptors::Kind kind,
                                       intptr_t pc_offset,
                                       intptr_t deopt_id,
                                       const InstructionSource& source,
@@ -810,7 +811,7 @@
                                       intptr_t yield_index) {
   code_source_map_builder_->NoteDescriptor(kind, pc_offset, source);
   // Don't emit deopt-descriptors in AOT mode.
-  if (FLAG_precompiled_mode && (kind == PcDescriptorsLayout::kDeopt)) return;
+  if (FLAG_precompiled_mode && (kind == UntaggedPcDescriptors::kDeopt)) return;
   // Use the token position of the original call in the root function if source
   // has an inlining id.
   const auto& root_pos = code_source_map_builder_->RootPosition(source);
@@ -819,7 +820,7 @@
 }
 
 // Uses current pc position and try-index.
-void FlowGraphCompiler::AddCurrentDescriptor(PcDescriptorsLayout::Kind kind,
+void FlowGraphCompiler::AddCurrentDescriptor(UntaggedPcDescriptors::Kind kind,
                                              intptr_t deopt_id,
                                              const InstructionSource& source) {
   AddDescriptor(kind, assembler()->CodeSize(), deopt_id, source,
@@ -1200,7 +1201,7 @@
 ArrayPtr FlowGraphCompiler::CreateDeoptInfo(compiler::Assembler* assembler) {
   // No deopt information if we precompile (no deoptimization allowed).
   if (FLAG_precompiled_mode) {
-    return Array::empty_array().raw();
+    return Array::empty_array().ptr();
   }
   // For functions with optional arguments, all incoming arguments are copied
   // to spill slots. The deoptimization environment does not track them.
@@ -1211,7 +1212,7 @@
 
   intptr_t deopt_info_table_size = DeoptTable::SizeFor(deopt_infos_.length());
   if (deopt_info_table_size == 0) {
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   } else {
     const Array& array =
         Array::Handle(Array::New(deopt_info_table_size, Heap::kOld));
@@ -1225,7 +1226,7 @@
           deopt_infos_[i]->reason(), deopt_infos_[i]->flags());
       DeoptTable::SetEntry(array, i, offset, info, reason_and_flags);
     }
-    return array.raw();
+    return array.ptr();
   }
 }
 
@@ -1255,8 +1256,8 @@
     // descriptor for IrregexpFunction.
     ASSERT(parsed_function().scope() == nullptr);
     var_descs = LocalVarDescriptors::New(1);
-    LocalVarDescriptorsLayout::VarInfo info;
-    info.set_kind(LocalVarDescriptorsLayout::kSavedCurrentContext);
+    UntaggedLocalVarDescriptors::VarInfo info;
+    info.set_kind(UntaggedLocalVarDescriptors::kSavedCurrentContext);
     info.scope_id = 0;
     info.begin_pos = TokenPosition::kMinSource;
     info.end_pos = TokenPosition::kMinSource;
@@ -1391,7 +1392,7 @@
 
 void FlowGraphCompiler::GenerateStubCall(const InstructionSource& source,
                                          const Code& stub,
-                                         PcDescriptorsLayout::Kind kind,
+                                         UntaggedPcDescriptors::Kind kind,
                                          LocationSummary* locs,
                                          intptr_t deopt_id,
                                          Environment* env) {
@@ -1485,16 +1486,16 @@
                             args_info.size_with_type_args, deopt_id, source,
                             locs, entry_kind);
   } else {
-    ICData& call_ic_data = ICData::ZoneHandle(zone(), ic_data.raw());
+    ICData& call_ic_data = ICData::ZoneHandle(zone(), ic_data.ptr());
     if (call_ic_data.IsNull()) {
       const intptr_t kNumArgsChecked = 0;
       call_ic_data =
           GetOrAddStaticCallICData(deopt_id, function, arguments_descriptor,
                                    kNumArgsChecked, rebind_rule)
-              ->raw();
+              ->ptr();
       call_ic_data = call_ic_data.Original();
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRewind, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRewind, deopt_id, source);
     EmitUnoptimizedStaticCall(args_info.size_with_type_args, deopt_id, source,
                               locs, call_ic_data, entry_kind);
   }
@@ -1972,12 +1973,12 @@
       ((*deopt_id_to_ic_data_)[deopt_id] != NULL)) {
     const ICData* res = (*deopt_id_to_ic_data_)[deopt_id];
     ASSERT(res->deopt_id() == deopt_id);
-    ASSERT(res->target_name() == target_name.raw());
+    ASSERT(res->target_name() == target_name.ptr());
     ASSERT(res->NumArgsTested() == num_args_tested);
     ASSERT(res->TypeArgsLen() ==
            ArgumentsDescriptor(arguments_descriptor).TypeArgsLen());
     ASSERT(!res->is_static_call());
-    ASSERT(res->receivers_static_type() == receiver_type.raw());
+    ASSERT(res->receivers_static_type() == receiver_type.ptr());
     return res;
   }
 
@@ -2141,7 +2142,7 @@
       Function::Handle(zone, Resolver::ResolveDynamicForReceiverClass(
                                  cls, name, args_desc, allow_add));
   if (target_function.IsNull()) return false;
-  *fn_return = target_function.raw();
+  *fn_return = target_function.ptr();
   return true;
 }
 
@@ -2255,8 +2256,9 @@
     // Do not use the code from the function, but let the code be patched so
     // that we can record the outgoing edges to other code.
     const Function& function = *targets.TargetAt(smi_case)->target;
-    GenerateStaticDartCall(deopt_id, source_index, PcDescriptorsLayout::kOther,
-                           locs, function, entry_kind);
+    GenerateStaticDartCall(deopt_id, source_index,
+                           UntaggedPcDescriptors::kOther, locs, function,
+                           entry_kind);
     __ Drop(args_info.size_with_type_args);
     if (match_found != NULL) {
       __ Jump(match_found);
@@ -2305,8 +2307,9 @@
     // Do not use the code from the function, but let the code be patched so
     // that we can record the outgoing edges to other code.
     const Function& function = *targets.TargetAt(i)->target;
-    GenerateStaticDartCall(deopt_id, source_index, PcDescriptorsLayout::kOther,
-                           locs, function, entry_kind);
+    GenerateStaticDartCall(deopt_id, source_index,
+                           UntaggedPcDescriptors::kOther, locs, function,
+                           entry_kind);
     __ Drop(args_info.size_with_type_args);
     if (!is_last_check || add_megamorphic_call) {
       __ Jump(match_found);
@@ -2793,7 +2796,7 @@
     __ LoadUniqueObject(TypeTestABI::kDstTypeReg, type);
     __ LoadUniqueObject(TypeTestABI::kSubtypeTestCacheReg, test_cache);
     GenerateStubCall(source, StubCode::InstanceOf(),
-                     /*kind=*/PcDescriptorsLayout::kOther, locs, deopt_id);
+                     /*kind=*/UntaggedPcDescriptors::kOther, locs, deopt_id);
     __ Jump(&done, compiler::Assembler::kNearJump);
   }
   __ Bind(&is_not_instance);
@@ -2835,7 +2838,7 @@
   }
   GenerateBoolToJump(TypeTestABI::kSubtypeTestCacheResultReg, is_instance_lbl,
                      is_not_instance_lbl);
-  return type_test_cache.raw();
+  return type_test_cache.ptr();
 }
 
 // Generates an assignable check for a given object. Emits no code if the
@@ -2928,7 +2931,7 @@
   } else {
     GenerateIndirectTTSCall(assembler(), reg_with_type, sub_type_cache_index);
   }
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs);
 }
 
 // Optimize assignable type check by adding inlined tests for:
@@ -3173,7 +3176,7 @@
     __ CallRuntime(runtime_entry_, num_args);
   }
   const intptr_t deopt_id = instruction()->deopt_id();
-  compiler->AddDescriptor(PcDescriptorsLayout::kOther,
+  compiler->AddDescriptor(UntaggedPcDescriptors::kOther,
                           compiler->assembler()->CodeSize(), deopt_id,
                           instruction()->source(), try_index_);
   AddMetadataForRuntimeCall(compiler);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.h b/runtime/vm/compiler/backend/flow_graph_compiler.h
index f7390fa..7a145d1 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.h
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.h
@@ -619,27 +619,27 @@
 
   void GenerateStubCall(const InstructionSource& source,
                         const Code& stub,
-                        PcDescriptorsLayout::Kind kind,
+                        UntaggedPcDescriptors::Kind kind,
                         LocationSummary* locs,
                         intptr_t deopt_id = DeoptId::kNone,
                         Environment* env = nullptr);
 
   void GeneratePatchableCall(const InstructionSource& source,
                              const Code& stub,
-                             PcDescriptorsLayout::Kind kind,
+                             UntaggedPcDescriptors::Kind kind,
                              LocationSummary* locs);
 
   void GenerateDartCall(intptr_t deopt_id,
                         const InstructionSource& source,
                         const Code& stub,
-                        PcDescriptorsLayout::Kind kind,
+                        UntaggedPcDescriptors::Kind kind,
                         LocationSummary* locs,
                         Code::EntryKind entry_kind = Code::EntryKind::kNormal);
 
   void GenerateStaticDartCall(
       intptr_t deopt_id,
       const InstructionSource& source,
-      PcDescriptorsLayout::Kind kind,
+      UntaggedPcDescriptors::Kind kind,
       LocationSummary* locs,
       const Function& target,
       Code::EntryKind entry_kind = Code::EntryKind::kNormal);
@@ -802,7 +802,7 @@
   // `pending_deoptimization_env`.
   void EmitCallsiteMetadata(const InstructionSource& source,
                             intptr_t deopt_id,
-                            PcDescriptorsLayout::Kind kind,
+                            UntaggedPcDescriptors::Kind kind,
                             LocationSummary* locs,
                             Environment* env = nullptr);
 
@@ -841,16 +841,16 @@
                            const Array& handler_types,
                            bool needs_stacktrace);
   void SetNeedsStackTrace(intptr_t try_index);
-  void AddCurrentDescriptor(PcDescriptorsLayout::Kind kind,
+  void AddCurrentDescriptor(UntaggedPcDescriptors::Kind kind,
                             intptr_t deopt_id,
                             const InstructionSource& source);
   void AddDescriptor(
-      PcDescriptorsLayout::Kind kind,
+      UntaggedPcDescriptors::Kind kind,
       intptr_t pc_offset,
       intptr_t deopt_id,
       const InstructionSource& source,
       intptr_t try_index,
-      intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex);
+      intptr_t yield_index = UntaggedPcDescriptors::kInvalidYieldIndex);
 
   // Add NullCheck information for the current PC.
   void AddNullCheck(const InstructionSource& source, const String& name);
@@ -952,7 +952,7 @@
   void AddStubCallTarget(const Code& code);
   void AddDispatchTableCallTarget(const compiler::TableSelector* selector);
 
-  ArrayPtr edge_counters_array() const { return edge_counters_array_.raw(); }
+  ArrayPtr edge_counters_array() const { return edge_counters_array_.ptr(); }
 
   ArrayPtr InliningIdToFunction() const;
 
@@ -1134,7 +1134,7 @@
   void CompactBlocks();
 
   bool IsListClass(const Class& cls) const {
-    return cls.raw() == list_class_.raw();
+    return cls.ptr() == list_class_.ptr();
   }
 
   void EmitSourceLine(Instruction* instr);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
index 3d45acb..d9a48c0 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
@@ -429,7 +429,7 @@
 
 void FlowGraphCompiler::GeneratePatchableCall(const InstructionSource& source,
                                               const Code& stub,
-                                              PcDescriptorsLayout::Kind kind,
+                                              UntaggedPcDescriptors::Kind kind,
                                               LocationSummary* locs) {
   __ BranchLinkPatchable(stub);
   EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs);
@@ -438,7 +438,7 @@
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
                                          const InstructionSource& source,
                                          const Code& stub,
-                                         PcDescriptorsLayout::Kind kind,
+                                         UntaggedPcDescriptors::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -448,7 +448,7 @@
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
                                                const InstructionSource& source,
-                                               PcDescriptorsLayout::Kind kind,
+                                               UntaggedPcDescriptors::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
                                                Code::EntryKind entry_kind) {
@@ -476,7 +476,7 @@
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs);
 }
 
 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) {
@@ -522,7 +522,7 @@
   __ LoadObject(R8, parsed_function().function());
   __ LoadFromOffset(R0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
   __ LoadUniqueObject(R9, ic_data);
-  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+  GenerateDartCall(deopt_id, source, stub, UntaggedPcDescriptors::kIcCall, locs,
                    entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
@@ -545,7 +545,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ Call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
@@ -599,16 +599,16 @@
     if (try_index == kInvalidTryIndex) {
       try_index = CurrentTryIndex();
     }
-    AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
+    AddDescriptor(UntaggedPcDescriptors::kOther, assembler()->CodeSize(),
                   DeoptId::kNone, source, try_index);
   } else if (is_optimizing()) {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kOther, DeoptId::kNone, source);
     AddDeoptIndexAtCall(deopt_id_after);
   } else {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kOther, DeoptId::kNone, source);
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs());
@@ -655,7 +655,7 @@
   __ LoadUniqueObject(R9, data);
   CLOBBERS_LR(__ blx(LR));
 
-  EmitCallsiteMetadata(source, DeoptId::kNone, PcDescriptorsLayout::kOther,
+  EmitCallsiteMetadata(source, DeoptId::kNone, UntaggedPcDescriptors::kOther,
                        locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
@@ -672,7 +672,7 @@
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(R9, ic_data);
   GenerateDartCall(deopt_id, source, stub,
-                   PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
+                   UntaggedPcDescriptors::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args);
 }
 
@@ -695,7 +695,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, UntaggedPcDescriptors::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -744,7 +744,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     __ Drop(1);   // Discard constant.
     __ Pop(reg);  // Restore 'reg'.
@@ -768,7 +768,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     __ Pop(right);
     __ Pop(left);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
index 33d1808..40347c7 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
@@ -422,7 +422,7 @@
 
 void FlowGraphCompiler::GeneratePatchableCall(const InstructionSource& source,
                                               const Code& stub,
-                                              PcDescriptorsLayout::Kind kind,
+                                              UntaggedPcDescriptors::Kind kind,
                                               LocationSummary* locs) {
   __ BranchLinkPatchable(stub);
   EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs);
@@ -431,7 +431,7 @@
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
                                          const InstructionSource& source,
                                          const Code& stub,
-                                         PcDescriptorsLayout::Kind kind,
+                                         UntaggedPcDescriptors::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -441,7 +441,7 @@
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
                                                const InstructionSource& source,
-                                               PcDescriptorsLayout::Kind kind,
+                                               UntaggedPcDescriptors::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
                                                Code::EntryKind entry_kind) {
@@ -469,7 +469,7 @@
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs);
 }
 
 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) {
@@ -506,7 +506,7 @@
   __ LoadObject(R6, parsed_function().function());
   __ LoadFromOffset(R0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
   __ LoadUniqueObject(R5, ic_data);
-  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+  GenerateDartCall(deopt_id, source, stub, UntaggedPcDescriptors::kIcCall, locs,
                    entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
@@ -535,7 +535,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ Call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
@@ -585,16 +585,16 @@
     if (try_index == kInvalidTryIndex) {
       try_index = CurrentTryIndex();
     }
-    AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
+    AddDescriptor(UntaggedPcDescriptors::kOther, assembler()->CodeSize(),
                   DeoptId::kNone, source, try_index);
   } else if (is_optimizing()) {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kOther, DeoptId::kNone, source);
     AddDeoptIndexAtCall(deopt_id_after);
   } else {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kOther, DeoptId::kNone, source);
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs());
@@ -648,7 +648,7 @@
   }
   CLOBBERS_LR(__ blr(LR));
 
-  EmitCallsiteMetadata(source, DeoptId::kNone, PcDescriptorsLayout::kOther,
+  EmitCallsiteMetadata(source, DeoptId::kNone, UntaggedPcDescriptors::kOther,
                        locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
@@ -665,7 +665,7 @@
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(R5, ic_data);
   GenerateDartCall(deopt_id, source, stub,
-                   PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
+                   UntaggedPcDescriptors::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args);
 }
 
@@ -688,7 +688,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, UntaggedPcDescriptors::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -723,7 +723,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     // Discard constant.
     // Restore 'reg'.
@@ -747,7 +747,7 @@
     } else {
       __ BranchLinkPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmp, we need Z computed).
     __ PopPair(right, left);
   } else {
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
index 272dcd3..d89ec9a 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
@@ -249,7 +249,7 @@
   __ Drop(1);
   GenerateBoolToJump(TypeTestABI::kSubtypeTestCacheResultReg, is_instance_lbl,
                      is_not_instance_lbl);
-  return type_test_cache.raw();
+  return type_test_cache.ptr();
 }
 
 // If instanceof type test cannot be performed successfully at compile time and
@@ -364,7 +364,7 @@
     GenerateStubCall(source,
                      null_safety ? StubCode::TypeIsTopTypeForSubtypingNullSafe()
                                  : StubCode::TypeIsTopTypeForSubtyping(),
-                     PcDescriptorsLayout::kOther, locs, deopt_id);
+                     UntaggedPcDescriptors::kOther, locs, deopt_id);
     // TypeTestABI::kSubtypeTestCacheReg is 0 if the type is a top type.
     __ BranchIfZero(TypeTestABI::kSubtypeTestCacheReg, &is_assignable,
                     compiler::Assembler::kNearJump);
@@ -372,7 +372,7 @@
     GenerateStubCall(source,
                      null_safety ? StubCode::NullIsAssignableToTypeNullSafe()
                                  : StubCode::NullIsAssignableToType(),
-                     PcDescriptorsLayout::kOther, locs, deopt_id);
+                     UntaggedPcDescriptors::kOther, locs, deopt_id);
     // TypeTestABI::kSubtypeTestCacheReg is 0 if the object is null and is
     // assignable.
     __ BranchIfZero(TypeTestABI::kSubtypeTestCacheReg, &is_assignable,
@@ -539,7 +539,7 @@
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
                                          const InstructionSource& source,
                                          const Code& stub,
-                                         PcDescriptorsLayout::Kind kind,
+                                         UntaggedPcDescriptors::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -549,7 +549,7 @@
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
                                                const InstructionSource& source,
-                                               PcDescriptorsLayout::Kind kind,
+                                               UntaggedPcDescriptors::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
                                                Code::EntryKind entry_kind) {
@@ -566,7 +566,7 @@
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs);
 }
 
 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
@@ -581,7 +581,7 @@
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(ECX, ic_data);
   GenerateDartCall(deopt_id, source, stub,
-                   PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
+                   UntaggedPcDescriptors::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args);
 }
 
@@ -618,7 +618,7 @@
   __ movl(EBX, compiler::Address(
                    ESP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize));
   __ LoadObject(ECX, ic_data);
-  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+  GenerateDartCall(deopt_id, source, stub, UntaggedPcDescriptors::kIcCall, locs,
                    entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
@@ -643,7 +643,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs());
 }
 
@@ -670,7 +670,7 @@
   __ call(compiler::FieldAddress(
       CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
 
-  AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
+  AddCurrentDescriptor(UntaggedPcDescriptors::kOther, DeoptId::kNone, source);
   RecordSafepoint(locs, slow_path_argument_count);
   const intptr_t deopt_id_after = DeoptId::ToDeoptAfter(deopt_id);
   // Precompilation not implemented on ia32 platform.
@@ -680,7 +680,7 @@
   } else {
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs());
@@ -712,7 +712,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, UntaggedPcDescriptors::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args);
 }
@@ -747,7 +747,7 @@
     } else {
       __ Call(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpl, we need ZF computed).
     __ popl(reg);  // Discard constant.
     __ popl(reg);  // Restore 'reg'.
@@ -771,7 +771,7 @@
     } else {
       __ Call(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpl, we need ZF computed).
     __ popl(right);
     __ popl(left);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
index 25444d0..89f2a24 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
@@ -425,7 +425,7 @@
 
 void FlowGraphCompiler::GeneratePatchableCall(const InstructionSource& source,
                                               const Code& stub,
-                                              PcDescriptorsLayout::Kind kind,
+                                              UntaggedPcDescriptors::Kind kind,
                                               LocationSummary* locs) {
   __ CallPatchable(stub);
   EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs);
@@ -434,7 +434,7 @@
 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
                                          const InstructionSource& source,
                                          const Code& stub,
-                                         PcDescriptorsLayout::Kind kind,
+                                         UntaggedPcDescriptors::Kind kind,
                                          LocationSummary* locs,
                                          Code::EntryKind entry_kind) {
   ASSERT(CanCallDart());
@@ -444,7 +444,7 @@
 
 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id,
                                                const InstructionSource& source,
-                                               PcDescriptorsLayout::Kind kind,
+                                               UntaggedPcDescriptors::Kind kind,
                                                LocationSummary* locs,
                                                const Function& target,
                                                Code::EntryKind entry_kind) {
@@ -472,7 +472,7 @@
                                             intptr_t argument_count,
                                             LocationSummary* locs) {
   __ CallRuntime(entry, argument_count);
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs);
 }
 
 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
@@ -487,7 +487,7 @@
       StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
   __ LoadObject(RBX, ic_data);
   GenerateDartCall(deopt_id, source, stub,
-                   PcDescriptorsLayout::kUnoptStaticCall, locs, entry_kind);
+                   UntaggedPcDescriptors::kUnoptStaticCall, locs, entry_kind);
   __ Drop(size_with_type_args, RCX);
 }
 
@@ -525,7 +525,7 @@
   __ movq(RDX, compiler::Address(
                    RSP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize));
   __ LoadUniqueObject(RBX, ic_data);
-  GenerateDartCall(deopt_id, source, stub, PcDescriptorsLayout::kIcCall, locs,
+  GenerateDartCall(deopt_id, source, stub, UntaggedPcDescriptors::kIcCall, locs,
                    entry_kind);
   __ Drop(ic_data.SizeWithTypeArgs(), RCX);
 }
@@ -550,7 +550,7 @@
           ? Code::entry_point_offset(Code::EntryKind::kMonomorphic)
           : Code::entry_point_offset(Code::EntryKind::kMonomorphicUnchecked);
   __ call(compiler::FieldAddress(CODE_REG, entry_point_offset));
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kIcCall, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kIcCall, locs);
   __ Drop(ic_data.SizeWithTypeArgs(), RCX);
 }
 
@@ -601,16 +601,16 @@
     if (try_index == kInvalidTryIndex) {
       try_index = CurrentTryIndex();
     }
-    AddDescriptor(PcDescriptorsLayout::kOther, assembler()->CodeSize(),
+    AddDescriptor(UntaggedPcDescriptors::kOther, assembler()->CodeSize(),
                   DeoptId::kNone, source, try_index);
   } else if (is_optimizing()) {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kOther, DeoptId::kNone, source);
     AddDeoptIndexAtCall(deopt_id_after);
   } else {
-    AddCurrentDescriptor(PcDescriptorsLayout::kOther, DeoptId::kNone, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kOther, DeoptId::kNone, source);
     // Add deoptimization continuation point after the call and before the
     // arguments are removed.
-    AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id_after, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id_after, source);
   }
   RecordCatchEntryMoves(pending_deoptimization_env_, try_index);
   __ Drop(args_desc.SizeWithTypeArgs(), RCX);
@@ -653,7 +653,7 @@
   __ LoadUniqueObject(RBX, data);
   __ call(RCX);
 
-  EmitCallsiteMetadata(source, deopt_id, PcDescriptorsLayout::kOther, locs);
+  EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs);
   __ Drop(ic_data.SizeWithTypeArgs(), RCX);
 }
 
@@ -676,7 +676,7 @@
   }
   // Do not use the code from the function, but let the code be patched so that
   // we can record the outgoing edges to other code.
-  GenerateStaticDartCall(deopt_id, source, PcDescriptorsLayout::kOther, locs,
+  GenerateStaticDartCall(deopt_id, source, UntaggedPcDescriptors::kOther, locs,
                          function, entry_kind);
   __ Drop(size_with_type_args, RCX);
 }
@@ -720,7 +720,7 @@
     } else {
       __ CallPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpq, we need ZF computed).
     __ popq(reg);  // Discard constant.
     __ popq(reg);  // Restore 'reg'.
@@ -744,7 +744,7 @@
     } else {
       __ CallPatchable(StubCode::UnoptimizedIdenticalWithNumberCheck());
     }
-    AddCurrentDescriptor(PcDescriptorsLayout::kRuntimeCall, deopt_id, source);
+    AddCurrentDescriptor(UntaggedPcDescriptors::kRuntimeCall, deopt_id, source);
     // Stub returns result in flags (result of a cmpq, we need ZF computed).
     __ popq(right);
     __ popq(left);
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index 318ee1f..a276c17 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -217,7 +217,7 @@
       test_succeeded = cls_type.IsSubtypeOf(dst_type, Heap::kNew);
     } else {
       while (!cls.IsObjectClass()) {
-        if (cls.raw() == klass.raw()) {
+        if (cls.ptr() == klass.ptr()) {
           test_succeeded = true;
           break;
         }
@@ -1018,7 +1018,7 @@
 void AllocateTypedDataInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForTypedData(class_id()));
-  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                              locs());
 }
 
@@ -1058,15 +1058,15 @@
 }
 
 bool GuardFieldClassInstr::AttributesEqual(Instruction* other) const {
-  return field().raw() == other->AsGuardFieldClass()->field().raw();
+  return field().ptr() == other->AsGuardFieldClass()->field().ptr();
 }
 
 bool GuardFieldLengthInstr::AttributesEqual(Instruction* other) const {
-  return field().raw() == other->AsGuardFieldLength()->field().raw();
+  return field().ptr() == other->AsGuardFieldLength()->field().ptr();
 }
 
 bool GuardFieldTypeInstr::AttributesEqual(Instruction* other) const {
-  return field().raw() == other->AsGuardFieldType()->field().raw();
+  return field().ptr() == other->AsGuardFieldType()->field().ptr();
 }
 
 Instruction* AssertSubtypeInstr::Canonicalize(FlowGraph* flow_graph) {
@@ -1086,9 +1086,9 @@
             ? TypeArguments::null_type_arguments()
             : TypeArguments::Cast(function_type_arguments()->BoundConstant());
     auto& constant_sub_type = AbstractType::Handle(
-        Z, AbstractType::Cast(sub_type()->BoundConstant()).raw());
+        Z, AbstractType::Cast(sub_type()->BoundConstant()).ptr());
     auto& constant_super_type = AbstractType::Handle(
-        Z, AbstractType::Cast(super_type()->BoundConstant()).raw());
+        Z, AbstractType::Cast(super_type()->BoundConstant()).ptr());
 
     ASSERT(!constant_super_type.IsTypeRef());
     ASSERT(!constant_sub_type.IsTypeRef());
@@ -1132,13 +1132,13 @@
 
 bool LoadStaticFieldInstr::AttributesEqual(Instruction* other) const {
   ASSERT(IsFieldInitialized());
-  return field().raw() == other->AsLoadStaticField()->field().raw();
+  return field().ptr() == other->AsLoadStaticField()->field().ptr();
 }
 
 bool LoadStaticFieldInstr::IsFieldInitialized() const {
   const Field& field = this->field();
-  return (field.StaticValue() != Object::sentinel().raw()) &&
-         (field.StaticValue() != Object::transition_sentinel().raw());
+  return (field.StaticValue() != Object::sentinel().ptr()) &&
+         (field.StaticValue() != Object::transition_sentinel().ptr());
 }
 
 Definition* LoadStaticFieldInstr::Canonicalize(FlowGraph* flow_graph) {
@@ -1167,7 +1167,7 @@
   // values, and sentinel values are canonical by construction and so we skip
   // them here.
   if (!value.IsNull() && !value.IsSmi() && value.IsInstance() &&
-      !value.IsCanonical() && (value.raw() != Object::sentinel().raw())) {
+      !value.IsCanonical() && (value.ptr() != Object::sentinel().ptr())) {
     // The only allowed type for which IsCanonical() never answers true is
     // TypeParameter. (They are treated as canonical due to how they are
     // created, but there is no way to canonicalize a new TypeParameter
@@ -1200,7 +1200,7 @@
 bool ConstantInstr::AttributesEqual(Instruction* other) const {
   ConstantInstr* other_constant = other->AsConstant();
   ASSERT(other_constant != NULL);
-  return (value().raw() == other_constant->value().raw() &&
+  return (value().ptr() == other_constant->value().ptr() &&
           representation() == other_constant->representation());
 }
 
@@ -2880,10 +2880,10 @@
   // Check that instance really has the field which we
   // are trying to load from.
   Class& cls = Class::Handle(instance.clazz());
-  while (cls.raw() != Class::null() && cls.raw() != field.Owner()) {
+  while (cls.ptr() != Class::null() && cls.ptr() != field.Owner()) {
     cls = cls.SuperClass();
   }
-  if (cls.raw() != field.Owner()) {
+  if (cls.ptr() != field.Owner()) {
     // Failed to find the field in class or its superclasses.
     return false;
   }
@@ -3070,7 +3070,7 @@
 
   if (instantiator_type_arguments()->BindsToConstant()) {
     const Object& val = instantiator_type_arguments()->BoundConstant();
-    instantiator_type_args = (val.raw() == TypeArguments::null())
+    instantiator_type_args = (val.ptr() == TypeArguments::null())
                                  ? &TypeArguments::null_type_arguments()
                                  : &TypeArguments::Cast(val);
   }
@@ -3078,7 +3078,7 @@
   if (function_type_arguments()->BindsToConstant()) {
     const Object& val = function_type_arguments()->BoundConstant();
     function_type_args =
-        (val.raw() == TypeArguments::null())
+        (val.ptr() == TypeArguments::null())
             ? &TypeArguments::null_type_arguments()
             : &TypeArguments::Cast(function_type_arguments()->BoundConstant());
   }
@@ -3463,10 +3463,10 @@
   PassiveObject& constant = PassiveObject::Handle();
   Value* other = NULL;
   if (compare->right()->BindsToConstant()) {
-    constant = compare->right()->BoundConstant().raw();
+    constant = compare->right()->BoundConstant().ptr();
     other = compare->left();
   } else if (compare->left()->BindsToConstant()) {
-    constant = compare->left()->BoundConstant().raw();
+    constant = compare->left()->BoundConstant().ptr();
     other = compare->right();
   } else {
     return compare;
@@ -3476,17 +3476,17 @@
   Definition* other_defn = other->definition();
   Token::Kind kind = compare->kind();
   // Handle e === true.
-  if ((kind == Token::kEQ_STRICT) && (constant.raw() == Bool::True().raw()) &&
+  if ((kind == Token::kEQ_STRICT) && (constant.ptr() == Bool::True().ptr()) &&
       can_merge) {
     return other_defn;
   }
   // Handle e !== false.
-  if ((kind == Token::kNE_STRICT) && (constant.raw() == Bool::False().raw()) &&
+  if ((kind == Token::kNE_STRICT) && (constant.ptr() == Bool::False().ptr()) &&
       can_merge) {
     return other_defn;
   }
   // Handle e !== true.
-  if ((kind == Token::kNE_STRICT) && (constant.raw() == Bool::True().raw()) &&
+  if ((kind == Token::kNE_STRICT) && (constant.ptr() == Bool::True().ptr()) &&
       other_defn->IsComparison() && can_merge &&
       other_defn->HasOnlyUse(other)) {
     ComparisonInstr* comp = other_defn->AsComparison();
@@ -3496,7 +3496,7 @@
     }
   }
   // Handle e === false.
-  if ((kind == Token::kEQ_STRICT) && (constant.raw() == Bool::False().raw()) &&
+  if ((kind == Token::kEQ_STRICT) && (constant.ptr() == Bool::False().ptr()) &&
       other_defn->IsComparison() && can_merge &&
       other_defn->HasOnlyUse(other)) {
     ComparisonInstr* comp = other_defn->AsComparison();
@@ -3902,7 +3902,7 @@
   CallTargets* targets = new (zone) CallTargets(zone);
   const intptr_t count = 1;
   targets->cid_ranges_.Add(new (zone) TargetInfo(
-      receiver_cid, receiver_cid, &Function::ZoneHandle(zone, target.raw()),
+      receiver_cid, receiver_cid, &Function::ZoneHandle(zone, target.ptr()),
       count, StaticTypeExactnessState::NotTracking()));
   return targets;
 }
@@ -3950,7 +3950,7 @@
       bool class_is_abstract = false;
       if (FlowGraphCompiler::LookupMethodFor(i, name, args_desc, &fn,
                                              &class_is_abstract) &&
-          fn.raw() == target.raw()) {
+          fn.ptr() == target.ptr()) {
         if (!class_is_abstract) {
           target_info->cid_start = i;
           target_info->exactness = StaticTypeExactnessState::NotTracking();
@@ -3980,7 +3980,7 @@
       bool class_is_abstract = false;
       if (FlowGraphCompiler::LookupMethodFor(i, name, args_desc, &fn,
                                              &class_is_abstract) &&
-          fn.raw() == target.raw()) {
+          fn.ptr() == target.ptr()) {
         cid_end_including_abstract = i;
         if (!class_is_abstract) {
           target_info->cid_end = i;
@@ -3997,7 +3997,7 @@
     if ((cid_end_including_abstract > target_info->cid_end) &&
         (idx < length - 1) &&
         ((cid_end_including_abstract + 1) == targets[idx + 1].cid_start) &&
-        (target.raw() == targets.TargetAt(idx + 1)->target->raw())) {
+        (target.ptr() == targets.TargetAt(idx + 1)->target->ptr())) {
       target_info->cid_end = cid_end_including_abstract;
       target_info->exactness = StaticTypeExactnessState::NotTracking();
     }
@@ -4019,7 +4019,7 @@
   for (int src = 1; src < length(); src++) {
     const Function& target = *TargetAt(dest)->target;
     if (TargetAt(dest)->cid_end + 1 >= TargetAt(src)->cid_start &&
-        target.raw() == TargetAt(src)->target->raw() &&
+        target.ptr() == TargetAt(src)->target->ptr() &&
         !target.is_polymorphic_target()) {
       TargetAt(dest)->cid_end = TargetAt(src)->cid_end;
       TargetAt(dest)->count += TargetAt(src)->count;
@@ -4066,7 +4066,7 @@
 void JoinEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   __ Bind(compiler->GetJumpLabel(this));
   if (!compiler->is_optimizing()) {
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
                                    InstructionSource());
   }
   if (HasParallelMove()) {
@@ -4093,7 +4093,7 @@
     // The deoptimization descriptor points after the edge counter code for
     // uniformity with ARM, where we can reuse pattern matching code that
     // matches backwards from the end of the pattern.
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
                                    InstructionSource());
   }
   if (HasParallelMove()) {
@@ -4167,7 +4167,7 @@
     // The deoptimization descriptor points after the edge counter code for
     // uniformity with ARM, where we can reuse pattern matching code that
     // matches backwards from the end of the pattern.
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
                                    InstructionSource());
   }
   if (HasParallelMove()) {
@@ -4340,7 +4340,7 @@
     const auto& init_static_field_stub = Code::ZoneHandle(
         compiler->zone(), object_store->init_static_field_stub());
     compiler->GenerateStubCall(source(), init_static_field_stub,
-                               /*kind=*/PcDescriptorsLayout::kOther, locs(),
+                               /*kind=*/UntaggedPcDescriptors::kOther, locs(),
                                deopt_id());
     __ Bind(&no_call);
   }
@@ -4400,7 +4400,7 @@
   // so deoptimization environment has to be adjusted.
   // This adjustment is done in FlowGraph::AttachEnvironment.
   compiler->GenerateStubCall(source(), stub,
-                             /*kind=*/PcDescriptorsLayout::kOther, locs(),
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs(),
                              deopt_id());
   __ Bind(&no_call);
 }
@@ -4420,7 +4420,7 @@
       Code::ZoneHandle(compiler->zone(), object_store->throw_stub());
 
   compiler->GenerateStubCall(source(), throw_stub,
-                             /*kind=*/PcDescriptorsLayout::kOther, locs(),
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs(),
                              deopt_id());
   // Issue(dartbug.com/41353): Right now we have to emit an extra breakpoint
   // instruction: The ThrowInstr will terminate the current block. The very
@@ -4448,7 +4448,7 @@
 
   compiler->SetNeedsStackTrace(catch_try_index());
   compiler->GenerateStubCall(source(), re_throw_stub,
-                             /*kind=*/PcDescriptorsLayout::kOther, locs(),
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs(),
                              deopt_id());
   // Issue(dartbug.com/41353): Right now we have to emit an extra breakpoint
   // instruction: The ThrowInstr will terminate the current block. The very
@@ -4482,7 +4482,7 @@
   __ CompareObject(AssertBooleanABI::kObjectReg, Object::null_instance());
   __ BranchIf(NOT_EQUAL, &done);
   compiler->GenerateStubCall(source(), assert_boolean_stub,
-                             /*kind=*/PcDescriptorsLayout::kOther, locs(),
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs(),
                              deopt_id());
   __ Bind(&done);
 }
@@ -4781,11 +4781,11 @@
   }
   switch (kind) {
     case Token::kADD:
-      return StubCode::SmiAddInlineCache().raw();
+      return StubCode::SmiAddInlineCache().ptr();
     case Token::kLT:
-      return StubCode::SmiLessInlineCache().raw();
+      return StubCode::SmiLessInlineCache().ptr();
     case Token::kEQ:
-      return StubCode::SmiEqualInlineCache().raw();
+      return StubCode::SmiEqualInlineCache().ptr();
     default:
       return Code::null();
   }
@@ -4857,7 +4857,7 @@
     smi_op_target = Resolver::ResolveDynamicAnyArgs(zone, smi_class, demangled);
   }
 #endif
-  return smi_op_target.raw();
+  return smi_op_target.ptr();
 }
 
 void InstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
@@ -4882,14 +4882,14 @@
 
     AbstractType& receivers_static_type = AbstractType::Handle(zone);
     if (receivers_static_type_ != nullptr) {
-      receivers_static_type = receivers_static_type_->raw();
+      receivers_static_type = receivers_static_type_->ptr();
     }
 
     call_ic_data = compiler->GetOrAddInstanceCallICData(
         deopt_id(), function_name(), arguments_descriptor,
         checked_argument_count(), receivers_static_type, binary_smi_op_target);
   } else {
-    call_ic_data = &ICData::ZoneHandle(zone, ic_data()->raw());
+    call_ic_data = &ICData::ZoneHandle(zone, ic_data()->ptr());
   }
 
   if (compiler->is_optimizing() && HasICData()) {
@@ -4907,7 +4907,7 @@
     }
   } else {
     // Unoptimized code.
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kRewind, deopt_id(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kRewind, deopt_id(),
                                    source());
 
     // If the ICData contains a (Smi, Smi, <binary-smi-op-target>) stub already
@@ -4920,7 +4920,7 @@
       auto& target = Function::Handle();
       call_ic_data->GetCheckAt(0, &class_ids, &target);
       if (class_ids[0] == kSmiCid && class_ids[1] == kSmiCid &&
-          target.raw() == binary_smi_op_target.raw()) {
+          target.ptr() == binary_smi_op_target.ptr()) {
         use_specialized_smi_ic_stub = true;
       }
     }
@@ -5036,7 +5036,7 @@
   compiler->EmitDispatchTableCall(cid_reg, selector()->offset,
                                   arguments_descriptor);
   compiler->EmitCallsiteMetadata(source(), DeoptId::kNone,
-                                 PcDescriptorsLayout::kOther, locs());
+                                 UntaggedPcDescriptors::kOther, locs());
   if (selector()->called_on_null && !selector()->on_null_interface) {
     Value* receiver = ArgumentValueAt(FirstArgIndex());
     if (receiver->Type()->is_nullable()) {
@@ -5107,7 +5107,7 @@
 bool CallTargets::HasSingleTarget() const {
   if (length() == 0) return false;
   for (int i = 0; i < length(); i++) {
-    if (TargetAt(i)->target->raw() != TargetAt(0)->target->raw()) return false;
+    if (TargetAt(i)->target->ptr() != TargetAt(0)->target->ptr()) return false;
   }
   return true;
 }
@@ -5140,7 +5140,7 @@
   const intptr_t len = targets_.length();
   Function& target = Function::Handle();
   for (intptr_t i = 0; i < len; i++) {
-    target = targets_.TargetAt(i)->target->raw();
+    target = targets_.TargetAt(i)->target->ptr();
     if (!target.IsDispatcherOrImplicitAccessor()) {
       return false;
     }
@@ -5176,8 +5176,8 @@
 
   const intptr_t num_checks = targets.length();
   for (intptr_t i = 0; i < num_checks; i++) {
-    ASSERT(targets.TargetAt(i)->target->raw() ==
-           targets.TargetAt(0)->target->raw());
+    ASSERT(targets.TargetAt(i)->target->ptr() ==
+           targets.TargetAt(0)->target->ptr());
     const intptr_t start = targets[i].cid_start;
     const intptr_t end = targets[i].cid_end;
     for (intptr_t cid = start; cid <= end; cid++) {
@@ -5319,7 +5319,7 @@
         deopt_id(), function(), arguments_descriptor, num_args_checked,
         rebind_rule_);
   } else {
-    call_ic_data = &ICData::ZoneHandle(ic_data()->raw());
+    call_ic_data = &ICData::ZoneHandle(ic_data()->ptr());
   }
   ArgumentsInfo args_info(type_args_len(), ArgumentCount(), ArgumentsSize(),
                           argument_names());
@@ -5391,7 +5391,7 @@
   __ Drop(5);
 #else
   compiler->GenerateStubCall(source(), StubCode::AssertSubtype(),
-                             PcDescriptorsLayout::kOther, locs());
+                             UntaggedPcDescriptors::kOther, locs());
 #endif
 }
 
@@ -5902,7 +5902,7 @@
   //   v8 <- StringInterpolate(v2)
 
   // Don't compile-time fold when optimizing the interpolation function itself.
-  if (flow_graph->function().raw() == CallFunction().raw()) {
+  if (flow_graph->function().ptr() == CallFunction().ptr()) {
     return this;
   }
 
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 9b39ae5..169949e 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -2066,7 +2066,7 @@
                                   /*stack_depth=*/0),
         graph_entry_(graph_entry),
         predecessor_(NULL),
-        catch_handler_types_(Array::ZoneHandle(handler_types.raw())),
+        catch_handler_types_(Array::ZoneHandle(handler_types.ptr())),
         catch_try_index_(catch_try_index),
         exception_var_(exception_var),
         stacktrace_var_(stacktrace_var),
@@ -2972,7 +2972,7 @@
   ReturnInstr(const InstructionSource& source,
               Value* value,
               intptr_t deopt_id,
-              intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex,
+              intptr_t yield_index = UntaggedPcDescriptors::kInvalidYieldIndex,
               Representation representation = kTagged)
       : TemplateInstruction(source, deopt_id),
         token_pos_(source.token_pos),
@@ -5276,7 +5276,7 @@
 class DebugStepCheckInstr : public TemplateInstruction<0, NoThrow> {
  public:
   DebugStepCheckInstr(const InstructionSource& source,
-                      PcDescriptorsLayout::Kind stub_kind,
+                      UntaggedPcDescriptors::Kind stub_kind,
                       intptr_t deopt_id)
       : TemplateInstruction(source, deopt_id),
         token_pos_(source.token_pos),
@@ -5293,7 +5293,7 @@
 
  private:
   const TokenPosition token_pos_;
-  const PcDescriptorsLayout::Kind stub_kind_;
+  const UntaggedPcDescriptors::Kind stub_kind_;
 
   DISALLOW_COPY_AND_ASSIGN(DebugStepCheckInstr);
 };
@@ -5506,7 +5506,7 @@
 
 // For a field of static type G<T0, ..., Tn> and a stored value of runtime
 // type T checks that type arguments of T at G exactly match <T0, ..., Tn>
-// and updates guarded state (FieldLayout::static_type_exactness_state_)
+// and updates guarded state (UntaggedField::static_type_exactness_state_)
 // accordingly.
 //
 // See StaticTypeExactnessState for more information.
@@ -6138,7 +6138,7 @@
 
   const Function& closure_function() const { return closure_function_; }
   void set_closure_function(const Function& function) {
-    closure_function_ = function.raw();
+    closure_function_ = function.ptr();
   }
 
   virtual intptr_t InputCount() const {
@@ -9613,7 +9613,7 @@
   if (auto static_call = this->AsStaticCall()) {
     return static_call->function().name();
   } else if (auto instance_call = this->AsInstanceCall()) {
-    return instance_call->function_name().raw();
+    return instance_call->function_name().ptr();
   } else {
     UNREACHABLE();
   }
@@ -9621,7 +9621,7 @@
 
 inline bool Value::CanBe(const Object& value) {
   ConstantInstr* constant = definition()->AsConstant();
-  return (constant == nullptr) || constant->value().raw() == value.raw();
+  return (constant == nullptr) || constant->value().ptr() == value.ptr();
 }
 
 class SuccessorsIterable {
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index e354b26..694c7f9 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -504,7 +504,7 @@
   __ Bind(&stack_ok);
 #endif
   ASSERT(__ constant_pool_allowed());
-  if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
+  if (yield_index() != UntaggedPcDescriptors::kInvalidYieldIndex) {
     compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveDartFrameAndReturn();  // Disallows constant pool use.
@@ -625,7 +625,7 @@
   }
   __ blx(R2);
   compiler->EmitCallsiteMetadata(source(), deopt_id(),
-                                 PcDescriptorsLayout::kOther, locs());
+                                 UntaggedPcDescriptors::kOther, locs());
   __ Drop(argument_count);
 }
 
@@ -1270,9 +1270,9 @@
                          : compiler::ObjectPoolBuilderEntry::kNotPatchable);
   if (link_lazily()) {
     compiler->GeneratePatchableCall(source(), *stub,
-                                    PcDescriptorsLayout::kOther, locs());
+                                    UntaggedPcDescriptors::kOther, locs());
   } else {
-    compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), *stub, UntaggedPcDescriptors::kOther,
                                locs());
   }
   __ Pop(result);
@@ -1315,7 +1315,7 @@
   // instruction. Therefore we emit the metadata here, 8 bytes (2 instructions)
   // after the original mov.
   compiler->EmitCallsiteMetadata(InstructionSource(), deopt_id(),
-                                 PcDescriptorsLayout::Kind::kOther, locs());
+                                 UntaggedPcDescriptors::Kind::kOther, locs());
 
   // Update information in the thread object and enter a safepoint.
   if (CanExecuteGeneratedCodeInSafepoint()) {
@@ -2359,9 +2359,9 @@
 }
 
 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const intptr_t value_cid = value()->Type()->ToCid();
   const intptr_t field_cid = field().guarded_cid();
@@ -2620,7 +2620,7 @@
 
     compiler->SaveLiveRegisters(locs);
     compiler->GenerateStubCall(InstructionSource(),  // No token position.
-                               stub, PcDescriptorsLayout::kOther, locs);
+                               stub, UntaggedPcDescriptors::kOther, locs);
     __ MoveRegister(result_, R0);
     compiler->RestoreLiveRegisters(locs);
     __ b(exit_label());
@@ -2836,9 +2836,9 @@
 }
 
 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   compiler::Label skip_store;
 
@@ -3127,7 +3127,7 @@
   // data area to be initialized.
   // R8: null
   if (num_elements > 0) {
-    const intptr_t array_size = instance_size - sizeof(ArrayLayout);
+    const intptr_t array_size = instance_size - sizeof(UntaggedArray);
     __ LoadObject(R8, Object::null_object());
     if (num_elements >= 2) {
       __ mov(R9, compiler::Operand(R8));
@@ -3137,7 +3137,7 @@
       __ LoadImmediate(R9, 0x1);
 #endif  // DEBUG
     }
-    __ AddImmediate(R6, R0, sizeof(ArrayLayout) - kHeapObjectTag);
+    __ AddImmediate(R6, R0, sizeof(UntaggedArray) - kHeapObjectTag);
     if (array_size < (kInlineArraySize * compiler::target::kWordSize)) {
       __ InitializeFieldsNoBarrierUnrolled(
           R0, R6, 0, num_elements * compiler::target::kWordSize, R8, R9);
@@ -3180,7 +3180,7 @@
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
   compiler->GenerateStubCall(source(), allocate_array_stub,
-                             PcDescriptorsLayout::kOther, locs(), deopt_id());
+                             UntaggedPcDescriptors::kOther, locs(), deopt_id());
   __ Bind(&done);
   ASSERT(locs()->out(0).reg() == kResultReg);
 }
@@ -3266,9 +3266,9 @@
 }
 
 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const Register instance_reg = locs()->in(0).reg();
   if (slot().representation() != kTagged) {
@@ -3538,7 +3538,7 @@
     __ b(&type_arguments_instantiated, EQ);
   }
   // Lookup cache in stub before calling runtime.
-  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), GetStub(), UntaggedPcDescriptors::kOther,
                              locs());
   __ Bind(&type_arguments_instantiated);
 }
@@ -3579,7 +3579,7 @@
         compiler->zone(), object_store->allocate_context_stub());
     __ LoadImmediate(R1, instruction()->num_context_variables());
     compiler->GenerateStubCall(instruction()->source(), allocate_context_stub,
-                               PcDescriptorsLayout::kOther, locs);
+                               UntaggedPcDescriptors::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == R0);
     compiler->RestoreLiveRegisters(instruction()->locs());
     __ b(exit_label());
@@ -3629,7 +3629,7 @@
       Code::ZoneHandle(compiler->zone(), object_store->allocate_context_stub());
   __ LoadImmediate(R1, num_context_variables());
   compiler->GenerateStubCall(source(), allocate_context_stub,
-                             PcDescriptorsLayout::kOther, locs());
+                             UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
@@ -3651,7 +3651,7 @@
   const auto& clone_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->clone_context_stub());
   compiler->GenerateStubCall(source(), clone_context_stub,
-                             /*kind=*/PcDescriptorsLayout::kOther, locs());
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
@@ -3672,7 +3672,7 @@
     if (compiler->is_optimizing()) {
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
                                      InstructionSource());
     }
   }
@@ -3757,7 +3757,7 @@
       compiler->RecordSafepoint(instruction()->locs(), kNumSlowPathArgs);
       compiler->RecordCatchEntryMoves();
       compiler->AddDescriptor(
-          PcDescriptorsLayout::kOther, compiler->assembler()->CodeSize(),
+          UntaggedPcDescriptors::kOther, compiler->assembler()->CodeSize(),
           instruction()->deopt_id(), instruction()->source(),
           compiler->CurrentTryIndex());
     } else {
@@ -3769,7 +3769,7 @@
     if (compiler->isolate_group()->use_osr() && !compiler->is_optimizing() &&
         instruction()->in_loop()) {
       // In unoptimized code, record loop stack checks as possible OSR entries.
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kOsrEntry,
                                      instruction()->deopt_id(),
                                      InstructionSource());
     }
@@ -3811,7 +3811,7 @@
     // the stub above).
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
     compiler->EmitCallsiteMetadata(source(), deopt_id(),
-                                   PcDescriptorsLayout::kOther, locs(),
+                                   UntaggedPcDescriptors::kOther, locs(),
                                    extended_env);
     return;
   }
@@ -5010,10 +5010,10 @@
   auto object_store = IsolateGroup::Current()->object_store();
   const bool stubs_in_vm_isolate =
       object_store->allocate_mint_with_fpu_regs_stub()
-          ->ptr()
+          ->untag()
           ->InVMIsolateHeap() ||
       object_store->allocate_mint_without_fpu_regs_stub()
-          ->ptr()
+          ->untag()
           ->InVMIsolateHeap();
   const bool shared_slow_path_call = SlowPathSharingSupported(opt) &&
                                      FLAG_use_bare_instructions &&
@@ -5074,7 +5074,7 @@
     ASSERT(!locs()->live_registers()->ContainsRegister(
         AllocateMintABI::kResultReg));
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                                locs(), DeoptId::kNone, extended_env);
   } else {
     BoxAllocationSlowPath::Allocate(compiler, this, compiler->mint_class(),
@@ -6631,7 +6631,7 @@
     // the stub above).
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
     compiler->EmitCallsiteMetadata(source(), deopt_id(),
-                                   PcDescriptorsLayout::kOther, locs(),
+                                   UntaggedPcDescriptors::kOther, locs(),
                                    extended_env);
     CheckNullInstr::AddMetadataForRuntimeCall(this, compiler);
     return;
@@ -7529,7 +7529,7 @@
     }
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
                                    InstructionSource());
   }
   if (HasParallelMove()) {
@@ -7703,7 +7703,7 @@
   }
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                              locs());
 }
 
diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index 7c54051..26ea233 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -412,7 +412,7 @@
   __ Bind(&stack_ok);
 #endif
   ASSERT(__ constant_pool_allowed());
-  if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
+  if (yield_index() != UntaggedPcDescriptors::kInvalidYieldIndex) {
     compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveDartFrame();  // Disallows constant pool use.
@@ -529,7 +529,7 @@
   }
   __ blr(R2);
   compiler->EmitCallsiteMetadata(source(), deopt_id(),
-                                 PcDescriptorsLayout::kOther, locs());
+                                 UntaggedPcDescriptors::kOther, locs());
   __ Drop(argument_count);
 }
 
@@ -960,7 +960,7 @@
   Location right = locs()->in(1);
   if (right.IsConstant()) {
     ASSERT(right.constant().IsSmi());
-    const int64_t imm = static_cast<int64_t>(right.constant().raw());
+    const int64_t imm = static_cast<int64_t>(right.constant().ptr());
     __ TestImmediate(left, imm);
   } else {
     __ tst(left, compiler::Operand(right.reg()));
@@ -1103,9 +1103,9 @@
                                    : ObjectPool::Patchability::kNotPatchable);
   if (link_lazily()) {
     compiler->GeneratePatchableCall(source(), *stub,
-                                    PcDescriptorsLayout::kOther, locs());
+                                    UntaggedPcDescriptors::kOther, locs());
   } else {
-    compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), *stub, UntaggedPcDescriptors::kOther,
                                locs());
   }
   __ Pop(result);
@@ -1143,7 +1143,7 @@
   // instruction.
   __ adr(temp, compiler::Immediate(Instr::kInstrSize));
   compiler->EmitCallsiteMetadata(source(), deopt_id(),
-                                 PcDescriptorsLayout::Kind::kOther, locs());
+                                 UntaggedPcDescriptors::Kind::kOther, locs());
 
   __ StoreToOffset(temp, FPREG, kSavedCallerPcSlotFromFp * kWordSize);
 
@@ -2063,9 +2063,9 @@
 }
 
 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const intptr_t value_cid = value()->Type()->ToCid();
   const intptr_t field_cid = field().guarded_cid();
@@ -2309,7 +2309,7 @@
 
     compiler->SaveLiveRegisters(locs);
     compiler->GenerateStubCall(InstructionSource(),  // No token position.
-                               stub, PcDescriptorsLayout::kOther, locs);
+                               stub, UntaggedPcDescriptors::kOther, locs);
     __ MoveRegister(result_, R0);
     compiler->RestoreLiveRegisters(locs);
     __ b(exit_label());
@@ -2395,9 +2395,9 @@
 }
 
 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   compiler::Label skip_store;
 
@@ -2673,9 +2673,9 @@
   // data area to be initialized.
   // R6: null
   if (num_elements > 0) {
-    const intptr_t array_size = instance_size - sizeof(ArrayLayout);
+    const intptr_t array_size = instance_size - sizeof(UntaggedArray);
     __ LoadObject(R6, Object::null_object());
-    __ AddImmediate(R8, R0, sizeof(ArrayLayout) - kHeapObjectTag);
+    __ AddImmediate(R8, R0, sizeof(UntaggedArray) - kHeapObjectTag);
     if (array_size < (kInlineArraySize * kWordSize)) {
       intptr_t current_offset = 0;
       while (current_offset < array_size) {
@@ -2727,7 +2727,7 @@
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
   compiler->GenerateStubCall(source(), allocate_array_stub,
-                             PcDescriptorsLayout::kOther, locs(), deopt_id());
+                             UntaggedPcDescriptors::kOther, locs(), deopt_id());
   ASSERT(locs()->out(0).reg() == kResultReg);
   __ Bind(&done);
 }
@@ -2804,9 +2804,9 @@
 }
 
 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const Register instance_reg = locs()->in(0).reg();
   if (slot().representation() != kTagged) {
@@ -3051,7 +3051,7 @@
   }
   // Lookup cache in stub before calling runtime.
 
-  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), GetStub(), UntaggedPcDescriptors::kOther,
                              locs());
   __ Bind(&type_arguments_instantiated);
 }
@@ -3093,7 +3093,7 @@
 
     __ LoadImmediate(R1, instruction()->num_context_variables());
     compiler->GenerateStubCall(instruction()->source(), allocate_context_stub,
-                               PcDescriptorsLayout::kOther, locs);
+                               UntaggedPcDescriptors::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == R0);
     compiler->RestoreLiveRegisters(instruction()->locs());
     __ b(exit_label());
@@ -3143,7 +3143,7 @@
       Code::ZoneHandle(compiler->zone(), object_store->allocate_context_stub());
   __ LoadImmediate(R1, num_context_variables());
   compiler->GenerateStubCall(source(), allocate_context_stub,
-                             PcDescriptorsLayout::kOther, locs());
+                             UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
@@ -3165,7 +3165,7 @@
   const auto& clone_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->clone_context_stub());
   compiler->GenerateStubCall(source(), clone_context_stub,
-                             /*kind=*/PcDescriptorsLayout::kOther, locs());
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
@@ -3186,7 +3186,7 @@
     if (compiler->is_optimizing()) {
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
                                      InstructionSource());
     }
   }
@@ -3282,7 +3282,7 @@
       compiler->RecordSafepoint(locs, kNumSlowPathArgs);
       compiler->RecordCatchEntryMoves();
       compiler->AddDescriptor(
-          PcDescriptorsLayout::kOther, compiler->assembler()->CodeSize(),
+          UntaggedPcDescriptors::kOther, compiler->assembler()->CodeSize(),
           instruction()->deopt_id(), instruction()->source(),
           compiler->CurrentTryIndex());
     } else {
@@ -3294,7 +3294,7 @@
     if (compiler->isolate_group()->use_osr() && !compiler->is_optimizing() &&
         instruction()->in_loop()) {
       // In unoptimized code, record loop stack checks as possible OSR entries.
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kOsrEntry,
                                      instruction()->deopt_id(),
                                      InstructionSource());
     }
@@ -3777,7 +3777,7 @@
   if (locs()->in(1).IsConstant()) {
     const Object& constant = locs()->in(1).constant();
     ASSERT(constant.IsSmi());
-    const int64_t imm = static_cast<int64_t>(constant.raw());
+    const int64_t imm = static_cast<int64_t>(constant.ptr());
     switch (op_kind()) {
       case Token::kADD: {
         if (deopt == NULL) {
@@ -4236,10 +4236,10 @@
   auto object_store = IsolateGroup::Current()->object_store();
   const bool stubs_in_vm_isolate =
       object_store->allocate_mint_with_fpu_regs_stub()
-          ->ptr()
+          ->untag()
           ->InVMIsolateHeap() ||
       object_store->allocate_mint_without_fpu_regs_stub()
-          ->ptr()
+          ->untag()
           ->InVMIsolateHeap();
   const bool shared_slow_path_call = SlowPathSharingSupported(opt) &&
                                      FLAG_use_bare_instructions &&
@@ -4293,7 +4293,7 @@
     ASSERT(!locs()->live_registers()->ContainsRegister(
         AllocateMintABI::kResultReg));
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                                locs(), DeoptId::kNone, extended_env);
   } else {
     BoxAllocationSlowPath::Allocate(compiler, this, compiler->mint_class(), out,
@@ -5667,7 +5667,7 @@
   if (index_loc.IsConstant()) {
     const Register length = length_loc.reg();
     const Smi& index = Smi::Cast(index_loc.constant());
-    __ CompareImmediate(length, static_cast<int64_t>(index.raw()));
+    __ CompareImmediate(length, static_cast<int64_t>(index.ptr()));
     __ b(deopt, LS);
   } else if (length_loc.IsConstant()) {
     const Smi& length = Smi::Cast(length_loc.constant());
@@ -5679,7 +5679,7 @@
       __ tst(index, compiler::Operand(index));
       __ b(deopt, MI);
     } else {
-      __ CompareImmediate(index, static_cast<int64_t>(length.raw()));
+      __ CompareImmediate(index, static_cast<int64_t>(length.ptr()));
       __ b(deopt, CS);
     }
   } else {
@@ -6567,7 +6567,7 @@
     }
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
                                    InstructionSource());
   }
   if (HasParallelMove()) {
@@ -6734,7 +6734,7 @@
   }
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                              locs());
 }
 
diff --git a/runtime/vm/compiler/backend/il_deserializer.cc b/runtime/vm/compiler/backend/il_deserializer.cc
index 5650a15..eb7f1fa 100644
--- a/runtime/vm/compiler/backend/il_deserializer.cc
+++ b/runtime/vm/compiler/backend/il_deserializer.cc
@@ -1018,10 +1018,10 @@
 DebugStepCheckInstr* FlowGraphDeserializer::DeserializeDebugStepCheck(
     SExpList* sexp,
     const InstrInfo& info) {
-  auto kind = PcDescriptorsLayout::kAnyKind;
+  auto kind = UntaggedPcDescriptors::kAnyKind;
   if (auto const kind_sexp = CheckSymbol(Retrieve(sexp, "stub_kind"))) {
-    if (!PcDescriptorsLayout::ParseKind(kind_sexp->value(), &kind)) {
-      StoreError(kind_sexp, "not a valid PcDescriptorsLayout::Kind name");
+    if (!UntaggedPcDescriptors::ParseKind(kind_sexp->value(), &kind)) {
+      StoreError(kind_sexp, "not a valid UntaggedPcDescriptors::Kind name");
       return nullptr;
     }
   }
@@ -1466,7 +1466,7 @@
     // early if we parse one.
     if (sym->Equals("null")) return true;
     if (sym->Equals("sentinel")) {
-      *out = Object::sentinel().raw();
+      *out = Object::sentinel().ptr();
       return true;
     }
 
@@ -1478,7 +1478,7 @@
       StoreError(sym, "not a reference to a constant definition");
       return false;
     }
-    *out = val->BoundConstant().raw();
+    *out = val->BoundConstant().ptr();
     // Values used in constant definitions have already been canonicalized,
     // so just exit.
     return true;
@@ -1487,7 +1487,7 @@
   // Other instance values may need to be canonicalized, so do that before
   // returning.
   if (auto const b = sexp->AsBool()) {
-    *out = Bool::Get(b->value()).raw();
+    *out = Bool::Get(b->value()).ptr();
   } else if (auto const str = sexp->AsString()) {
     *out = String::New(str->value(), Heap::kOld);
   } else if (auto const i = sexp->AsInteger()) {
@@ -1651,13 +1651,13 @@
   auto& function = Function::Cast(*out);
   // Check the kind expected by the S-expression if one was specified.
   if (auto const kind_sexp = CheckSymbol(list->ExtraLookupValue("kind"))) {
-    FunctionLayout::Kind kind;
-    if (!FunctionLayout::ParseKind(kind_sexp->value(), &kind)) {
+    UntaggedFunction::Kind kind;
+    if (!UntaggedFunction::ParseKind(kind_sexp->value(), &kind)) {
       StoreError(kind_sexp, "unexpected function kind");
       return false;
     }
     if (function.kind() != kind) {
-      auto const kind_str = FunctionLayout::KindToCString(function.kind());
+      auto const kind_str = UntaggedFunction::KindToCString(function.kind());
       StoreError(list, "retrieved function has kind %s", kind_str);
       return false;
     }
@@ -1697,7 +1697,7 @@
   sig.set_parameter_types(parameter_types);
   sig.set_parameter_names(parameter_names);
   sig.set_packed_fields(packed_fields);
-  *out = sig.raw();
+  *out = sig.ptr();
   return true;
 }
 
@@ -1770,7 +1770,7 @@
       StoreError(list, "class for instance has non-final instance fields");
       return false;
     }
-    auto& fresh_handle = Field::Handle(zone(), instance_field_.raw());
+    auto& fresh_handle = Field::Handle(zone(), instance_field_.ptr());
     final_fields.Add(&fresh_handle);
   }
 
@@ -1850,7 +1850,7 @@
       StoreError(sexp, "reference to non-constant definition");
       return false;
     }
-    *out = val->BoundConstant().raw();
+    *out = val->BoundConstant().ptr();
     if (!out->IsType()) {
       StoreError(sexp, "expected Type constant");
       return false;
@@ -1940,7 +1940,7 @@
       StoreError(sexp, "reference to non-constant definition");
       return false;
     }
-    *out = val->BoundConstant().raw();
+    *out = val->BoundConstant().ptr();
     if (!out->IsTypeArguments()) {
       StoreError(sexp, "expected TypeArguments constant");
       return false;
@@ -2052,7 +2052,7 @@
       String::FromUTF8(reinterpret_cast<const uint8_t*>(name), lib_end - name);
   name_library_ = Library::LookupLibrary(thread(), tmp_string_);
   if (*lib_end == '\0') {
-    *obj = name_library_.raw();
+    *obj = name_library_.ptr();
     return true;
   }
   const char* const class_start = lib_end + 1;
@@ -2081,7 +2081,7 @@
     return false;
   }
   if (*class_end == '\0') {
-    *obj = name_class_.raw();
+    *obj = name_class_.ptr();
     return true;
   }
   if (*class_end == '.') {
@@ -2100,7 +2100,7 @@
                  empty_name ? "at top level" : name_class_.ToCString());
       return false;
     }
-    *obj = name_field_.raw();
+    *obj = name_field_.ptr();
     return true;
   }
   if (class_end[1] == '\0') {
@@ -2177,7 +2177,7 @@
     }
     func_start = func_end + 1;
   }
-  *obj = name_function_.raw();
+  *obj = name_function_.ptr();
   return true;
 }
 
diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc
index 9f68daa..a78e47b 100644
--- a/runtime/vm/compiler/backend/il_ia32.cc
+++ b/runtime/vm/compiler/backend/il_ia32.cc
@@ -254,7 +254,7 @@
   __ int3();
   __ Bind(&done);
 #endif
-  if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
+  if (yield_index() != UntaggedPcDescriptors::kInvalidYieldIndex) {
     compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveFrame();
@@ -448,7 +448,7 @@
     } else {
       if (compiler::Assembler::IsSafeSmi(value_) || value_.IsNull()) {
         __ movl(LocationToStackSlotAddress(destination),
-                compiler::Immediate(static_cast<int32_t>(value_.raw())));
+                compiler::Immediate(static_cast<int32_t>(value_.ptr())));
       } else {
         __ pushl(EAX);
         __ LoadObjectSafely(EAX, value_);
@@ -834,7 +834,7 @@
   Location right = locs()->in(1);
   if (right.IsConstant()) {
     ASSERT(right.constant().IsSmi());
-    const int32_t imm = static_cast<int32_t>(right.constant().raw());
+    const int32_t imm = static_cast<int32_t>(right.constant().ptr());
     __ testl(left, compiler::Immediate(imm));
   } else {
     __ testl(left, right.reg());
@@ -977,7 +977,7 @@
   const compiler::ExternalLabel label(
       reinterpret_cast<uword>(native_c_function()));
   __ movl(ECX, compiler::Immediate(label.address()));
-  compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), *stub, UntaggedPcDescriptors::kOther,
                              locs());
 
   __ popl(result);
@@ -1017,7 +1017,7 @@
   compiler::Label get_pc;
   __ call(&get_pc);
   compiler->EmitCallsiteMetadata(InstructionSource(), deopt_id(),
-                                 PcDescriptorsLayout::Kind::kOther, locs());
+                                 UntaggedPcDescriptors::Kind::kOther, locs());
   __ Bind(&get_pc);
   __ popl(temp);
   __ movl(compiler::Address(FPREG, kSavedCallerPcSlotFromFp * kWordSize), temp);
@@ -1891,9 +1891,9 @@
 }
 
 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const intptr_t value_cid = value()->Type()->ToCid();
   const intptr_t field_cid = field().guarded_cid();
@@ -2141,7 +2141,7 @@
 
     compiler->SaveLiveRegisters(locs);
     compiler->GenerateStubCall(InstructionSource(), stub,
-                               PcDescriptorsLayout::kOther, locs);
+                               UntaggedPcDescriptors::kOther, locs);
     __ MoveRegister(result_, EAX);
     compiler->RestoreLiveRegisters(locs);
     __ jmp(exit_label());
@@ -2225,9 +2225,9 @@
 }
 
 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   compiler::Label skip_store;
 
@@ -2487,10 +2487,10 @@
   // EDI: iterator which initially points to the start of the variable
   // data area to be initialized.
   if (num_elements > 0) {
-    const intptr_t array_size = instance_size - sizeof(ArrayLayout);
+    const intptr_t array_size = instance_size - sizeof(UntaggedArray);
     const compiler::Immediate& raw_null =
         compiler::Immediate(static_cast<intptr_t>(Object::null()));
-    __ leal(EDI, compiler::FieldAddress(EAX, sizeof(ArrayLayout)));
+    __ leal(EDI, compiler::FieldAddress(EAX, sizeof(UntaggedArray)));
     if (array_size < (kInlineArraySize * kWordSize)) {
       intptr_t current_offset = 0;
       __ movl(EBX, raw_null);
@@ -2534,7 +2534,7 @@
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
   compiler->GenerateStubCall(source(), allocate_array_stub,
-                             PcDescriptorsLayout::kOther, locs(), deopt_id());
+                             UntaggedPcDescriptors::kOther, locs(), deopt_id());
   __ Bind(&done);
   ASSERT(locs()->out(0).reg() == kResultReg);
 }
@@ -2609,9 +2609,9 @@
 }
 
 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const Register instance_reg = locs()->in(0).reg();
   if (slot().representation() != kTagged) {
@@ -2847,7 +2847,7 @@
     __ Bind(&non_null_type_args);
   }
   // Lookup cache in stub before calling runtime.
-  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), GetStub(), UntaggedPcDescriptors::kOther,
                              locs());
   __ Bind(&type_arguments_instantiated);
 }
@@ -2885,7 +2885,7 @@
     __ movl(EDX, compiler::Immediate(instruction()->num_context_variables()));
     compiler->GenerateStubCall(instruction()->source(),
                                StubCode::AllocateContext(),
-                               PcDescriptorsLayout::kOther, locs);
+                               UntaggedPcDescriptors::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == EAX);
     compiler->RestoreLiveRegisters(instruction()->locs());
     __ jmp(exit_label());
@@ -2933,7 +2933,7 @@
 
   __ movl(EDX, compiler::Immediate(num_context_variables()));
   compiler->GenerateStubCall(source(), StubCode::AllocateContext(),
-                             PcDescriptorsLayout::kOther, locs());
+                             UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
@@ -2952,7 +2952,7 @@
   ASSERT(locs()->out(0).reg() == EAX);
 
   compiler->GenerateStubCall(source(), StubCode::CloneContext(),
-                             /*kind=*/PcDescriptorsLayout::kOther, locs());
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
@@ -2973,7 +2973,7 @@
     if (compiler->is_optimizing()) {
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
                                      InstructionSource());
     }
   }
@@ -3047,7 +3047,7 @@
     if (compiler->isolate_group()->use_osr() && !compiler->is_optimizing() &&
         instruction()->in_loop()) {
       // In unoptimized code, record loop stack checks as possible OSR entries.
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kOsrEntry,
                                      instruction()->deopt_id(),
                                      InstructionSource());
     }
@@ -5698,17 +5698,17 @@
       __ testl(index, index);
       __ j(NEGATIVE, deopt);
     } else {
-      __ cmpl(index, compiler::Immediate(static_cast<int32_t>(length.raw())));
+      __ cmpl(index, compiler::Immediate(static_cast<int32_t>(length.ptr())));
       __ j(ABOVE_EQUAL, deopt);
     }
   } else if (index_loc.IsConstant()) {
     const Smi& index = Smi::Cast(index_loc.constant());
     if (length_loc.IsStackSlot()) {
       const compiler::Address& length = LocationToStackSlotAddress(length_loc);
-      __ cmpl(length, compiler::Immediate(static_cast<int32_t>(index.raw())));
+      __ cmpl(length, compiler::Immediate(static_cast<int32_t>(index.ptr())));
     } else {
       Register length = length_loc.reg();
-      __ cmpl(length, compiler::Immediate(static_cast<int32_t>(index.raw())));
+      __ cmpl(length, compiler::Immediate(static_cast<int32_t>(index.ptr())));
     }
     __ j(BELOW_EQUAL, deopt);
   } else if (length_loc.IsStackSlot()) {
@@ -6448,7 +6448,7 @@
     }
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
                                    InstructionSource());
   }
   if (HasParallelMove()) {
@@ -6636,7 +6636,7 @@
   __ xorl(ECX, ECX);
   __ call(EBX);
   compiler->EmitCallsiteMetadata(source(), deopt_id(),
-                                 PcDescriptorsLayout::kOther, locs());
+                                 UntaggedPcDescriptors::kOther, locs());
   __ Drop(argument_count);
 }
 
@@ -6685,7 +6685,7 @@
 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                              locs());
 }
 
diff --git a/runtime/vm/compiler/backend/il_printer.cc b/runtime/vm/compiler/backend/il_printer.cc
index e971b18..79b93d5 100644
--- a/runtime/vm/compiler/backend/il_printer.cc
+++ b/runtime/vm/compiler/backend/il_printer.cc
@@ -127,7 +127,7 @@
     const CidRange& range = targets[i];
     const auto target_info = targets.TargetAt(i);
     const intptr_t count = target_info->count;
-    target = target_info->target->raw();
+    target = target_info->target->ptr();
     if (i > 0) {
       f->AddString(" | ");
     }
@@ -1059,7 +1059,7 @@
 
 void ReturnInstr::PrintOperandsTo(BaseTextBuffer* f) const {
   Instruction::PrintOperandsTo(f);
-  if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
+  if (yield_index() != UntaggedPcDescriptors::kInvalidYieldIndex) {
     f->Printf(", yield_index = %" Pd "", yield_index());
   }
 }
@@ -1152,7 +1152,7 @@
   } else {
     const Object& owner = Object::Handle(code_.owner());
     if (owner.IsFunction()) {
-      name = Function::Handle(Function::RawCast(owner.raw()))
+      name = Function::Handle(Function::RawCast(owner.ptr()))
                  .ToFullyQualifiedCString();
     }
   }
diff --git a/runtime/vm/compiler/backend/il_serializer.cc b/runtime/vm/compiler/backend/il_serializer.cc
index 1d8e825..31fd469 100644
--- a/runtime/vm/compiler/backend/il_serializer.cc
+++ b/runtime/vm/compiler/backend/il_serializer.cc
@@ -435,7 +435,7 @@
     open_recursive_types_.Insert(hash, &t);
   }
   if (t.IsFunctionType()) {
-    const auto& sig = FunctionType::Handle(zone(), FunctionType::Cast(t).raw());
+    const auto& sig = FunctionType::Handle(zone(), FunctionType::Cast(t).ptr());
     AddSymbol(sexp, "FunctionType");
     function_type_args_ = sig.type_parameters();
     if (auto const ta_sexp = NonEmptyTypeArgumentsToSExp(function_type_args_)) {
@@ -584,9 +584,9 @@
       AddExtraSymbol(sexp, "native_name", tmp_string_.ToCString());
     }
   }
-  if (func.kind() != FunctionLayout::Kind::kRegularFunction ||
+  if (func.kind() != UntaggedFunction::Kind::kRegularFunction ||
       FLAG_verbose_flow_graph_serialization) {
-    AddExtraSymbol(sexp, "kind", FunctionLayout::KindToCString(func.kind()));
+    AddExtraSymbol(sexp, "kind", UntaggedFunction::KindToCString(func.kind()));
   }
   function_type_args_ = func.type_parameters();
   if (auto const ta_sexp = NonEmptyTypeArgumentsToSExp(function_type_args_)) {
@@ -676,7 +676,7 @@
   if (dartval.IsNull()) {
     return new (zone()) SExpSymbol("null");
   }
-  if (dartval.raw() == Object::sentinel().raw()) {
+  if (dartval.ptr() == Object::sentinel().ptr()) {
     return new (zone()) SExpSymbol("sentinel");
   }
   if (dartval.IsString()) {
@@ -1079,9 +1079,10 @@
     SExpList* sexp,
     FlowGraphSerializer* s) const {
   Instruction::AddExtraInfoToSExpression(sexp, s);
-  if (stub_kind_ != PcDescriptorsLayout::kAnyKind ||
+  if (stub_kind_ != UntaggedPcDescriptors::kAnyKind ||
       FLAG_verbose_flow_graph_serialization) {
-    auto const stub_kind_name = PcDescriptorsLayout::KindToCString(stub_kind_);
+    auto const stub_kind_name =
+        UntaggedPcDescriptors::KindToCString(stub_kind_);
     ASSERT(stub_kind_name != nullptr);
     s->AddExtraSymbol(sexp, "stub_kind", stub_kind_name);
   }
@@ -1259,8 +1260,8 @@
     }
   } else {
     if (interface_target().IsNull() ||
-        (function_name().raw() != interface_target().name() &&
-         function_name().raw() != tearoff_interface_target().name())) {
+        (function_name().ptr() != interface_target().name() &&
+         function_name().ptr() != tearoff_interface_target().name())) {
       s->AddExtraString(sexp, "function_name", function_name().ToCString());
     }
   }
diff --git a/runtime/vm/compiler/backend/il_serializer.h b/runtime/vm/compiler/backend/il_serializer.h
index 10ba537..fda5731 100644
--- a/runtime/vm/compiler/backend/il_serializer.h
+++ b/runtime/vm/compiler/backend/il_serializer.h
@@ -143,10 +143,10 @@
     static bool ReportStats() { return false; }
 
     static bool IsMatch(const Object& a, const Object& b) {
-      return a.raw() == b.raw();
+      return a.ptr() == b.ptr();
     }
     static uword Hash(const Object& obj) {
-      if (obj.IsSmi()) return static_cast<uword>(obj.raw());
+      if (obj.IsSmi()) return static_cast<uword>(obj.ptr());
       if (obj.IsInstance()) return Instance::Cast(obj).CanonicalizeHash();
       return obj.GetClassId();
     }
diff --git a/runtime/vm/compiler/backend/il_test_helper.cc b/runtime/vm/compiler/backend/il_test_helper.cc
index a3f5664..f552aff 100644
--- a/runtime/vm/compiler/backend/il_test_helper.cc
+++ b/runtime/vm/compiler/backend/il_test_helper.cc
@@ -35,7 +35,7 @@
   auto& lib = Library::Handle();
   lib ^= Api::UnwrapHandle(api_lib);
   EXPECT(!lib.IsNull());
-  return lib.raw();
+  return lib.ptr();
 }
 
 FunctionPtr GetFunction(const Library& lib, const char* name) {
@@ -43,7 +43,7 @@
   const auto& func = Function::Handle(lib.LookupFunctionAllowPrivate(
       String::Handle(Symbols::New(thread, name))));
   EXPECT(!func.IsNull());
-  return func.raw();
+  return func.ptr();
 }
 
 ClassPtr GetClass(const Library& lib, const char* name) {
@@ -51,14 +51,14 @@
   const auto& cls = Class::Handle(
       lib.LookupClassAllowPrivate(String::Handle(Symbols::New(thread, name))));
   EXPECT(!cls.IsNull());
-  return cls.raw();
+  return cls.ptr();
 }
 
 TypeParameterPtr GetClassTypeParameter(const Class& klass, const char* name) {
   const auto& param = TypeParameter::Handle(
       klass.LookupTypeParameter(String::Handle(String::New(name))));
   EXPECT(!param.IsNull());
-  return param.raw();
+  return param.ptr();
 }
 
 TypeParameterPtr GetFunctionTypeParameter(const Function& fun,
@@ -67,12 +67,12 @@
   const auto& param = TypeParameter::Handle(
       fun.LookupTypeParameter(String::Handle(String::New(name)), &fun_level));
   EXPECT(!param.IsNull());
-  return param.raw();
+  return param.ptr();
 }
 
 ObjectPtr Invoke(const Library& lib, const char* name) {
   Thread* thread = Thread::Current();
-  Dart_Handle api_lib = Api::NewHandle(thread, lib.raw());
+  Dart_Handle api_lib = Api::NewHandle(thread, lib.ptr());
   Dart_Handle result;
   {
     TransitionVMToNative transition(thread);
@@ -97,7 +97,7 @@
   auto pipeline = CompilationPipeline::New(zone, function_);
 
   parsed_function_ = new (zone)
-      ParsedFunction(thread, Function::ZoneHandle(zone, function_.raw()));
+      ParsedFunction(thread, Function::ZoneHandle(zone, function_.ptr()));
   pipeline->ParseFunction(parsed_function_);
 
   // Extract type feedback before the graph is built, as the graph
diff --git a/runtime/vm/compiler/backend/il_test_helper.h b/runtime/vm/compiler/backend/il_test_helper.h
index aedac86..e34a5fe 100644
--- a/runtime/vm/compiler/backend/il_test_helper.h
+++ b/runtime/vm/compiler/backend/il_test_helper.h
@@ -317,7 +317,7 @@
         FunctionType::ZoneHandle(FunctionType::New());
     const Function& func = Function::ZoneHandle(Function::New(
         signature, String::Handle(Symbols::New(thread, "dummy")),
-        FunctionLayout::kRegularFunction,
+        UntaggedFunction::kRegularFunction,
         /*is_static=*/true,
         /*is_const=*/false,
         /*is_abstract=*/false,
diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc
index b49f95c..d81e582 100644
--- a/runtime/vm/compiler/backend/il_x64.cc
+++ b/runtime/vm/compiler/backend/il_x64.cc
@@ -326,7 +326,7 @@
   __ Bind(&done);
 #endif
   ASSERT(__ constant_pool_allowed());
-  if (yield_index() != PcDescriptorsLayout::kInvalidYieldIndex) {
+  if (yield_index() != UntaggedPcDescriptors::kInvalidYieldIndex) {
     compiler->EmitYieldPositionMetadata(source(), yield_index());
   }
   __ LeaveDartFrame();  // Disallows constant pool use.
@@ -899,7 +899,7 @@
   Location right = locs()->in(1);
   if (right.IsConstant()) {
     ASSERT(right.constant().IsSmi());
-    const int64_t imm = static_cast<int64_t>(right.constant().raw());
+    const int64_t imm = static_cast<int64_t>(right.constant().ptr());
     __ TestImmediate(left_reg, compiler::Immediate(imm));
   } else {
     __ testq(left_reg, right.reg());
@@ -1024,7 +1024,7 @@
     __ LoadNativeEntry(RBX, &label,
                        compiler::ObjectPoolBuilderEntry::kPatchable);
     compiler->GeneratePatchableCall(source(), *stub,
-                                    PcDescriptorsLayout::kOther, locs());
+                                    UntaggedPcDescriptors::kOther, locs());
   } else {
     if (is_bootstrap_native()) {
       stub = &StubCode::CallBootstrapNative();
@@ -1037,7 +1037,7 @@
         reinterpret_cast<uword>(native_c_function()));
     __ LoadNativeEntry(RBX, &label,
                        compiler::ObjectPoolBuilderEntry::kNotPatchable);
-    compiler->GenerateStubCall(source(), *stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), *stub, UntaggedPcDescriptors::kOther,
                                locs());
   }
   __ popq(result);
@@ -1075,7 +1075,7 @@
   // 'movq'.
   __ leaq(TMP, compiler::Address::AddressRIPRelative(0));
   compiler->EmitCallsiteMetadata(InstructionSource(), deopt_id(),
-                                 PcDescriptorsLayout::Kind::kOther, locs());
+                                 UntaggedPcDescriptors::Kind::kOther, locs());
   __ movq(compiler::Address(FPREG, kSavedCallerPcSlotFromFp * kWordSize), TMP);
 
   if (CanExecuteGeneratedCodeInSafepoint()) {
@@ -1519,7 +1519,7 @@
 
     compiler->SaveLiveRegisters(locs);
     compiler->GenerateStubCall(InstructionSource(),  // No token position.
-                               stub, PcDescriptorsLayout::kOther, locs);
+                               stub, UntaggedPcDescriptors::kOther, locs);
     __ MoveRegister(result_, RAX);
     compiler->RestoreLiveRegisters(locs);
     __ jmp(exit_label());
@@ -2044,9 +2044,9 @@
 }
 
 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const intptr_t value_cid = value()->Type()->ToCid();
   const intptr_t field_cid = field().guarded_cid();
@@ -2404,9 +2404,9 @@
 }
 
 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   compiler::Label skip_store;
 
@@ -2694,9 +2694,9 @@
   // RDI: iterator which initially points to the start of the variable
   // data area to be initialized.
   if (num_elements > 0) {
-    const intptr_t array_size = instance_size - sizeof(ArrayLayout);
+    const intptr_t array_size = instance_size - sizeof(UntaggedArray);
     __ LoadObject(R12, Object::null_object());
-    __ leaq(RDI, compiler::FieldAddress(RAX, sizeof(ArrayLayout)));
+    __ leaq(RDI, compiler::FieldAddress(RAX, sizeof(UntaggedArray)));
     if (array_size < (kInlineArraySize * kWordSize)) {
       intptr_t current_offset = 0;
       while (current_offset < array_size) {
@@ -2747,7 +2747,7 @@
   const auto& allocate_array_stub =
       Code::ZoneHandle(compiler->zone(), object_store->allocate_array_stub());
   compiler->GenerateStubCall(source(), allocate_array_stub,
-                             PcDescriptorsLayout::kOther, locs(), deopt_id());
+                             UntaggedPcDescriptors::kOther, locs(), deopt_id());
   __ Bind(&done);
   ASSERT(locs()->out(0).reg() == kResultReg);
 }
@@ -2826,9 +2826,9 @@
 }
 
 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  ASSERT(compiler::target::ObjectLayout::kClassIdTagSize == 16);
-  ASSERT(sizeof(FieldLayout::guarded_cid_) == 2);
-  ASSERT(sizeof(FieldLayout::is_nullable_) == 2);
+  ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
+  ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
+  ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
 
   const Register instance_reg = locs()->in(0).reg();
   if (slot().representation() != kTagged) {
@@ -3080,7 +3080,7 @@
     __ Bind(&non_null_type_args);
   }
   // Lookup cache in stub before calling runtime.
-  compiler->GenerateStubCall(source(), GetStub(), PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), GetStub(), UntaggedPcDescriptors::kOther,
                              locs());
   __ Bind(&type_arguments_instantiated);
 }
@@ -3122,7 +3122,7 @@
     __ LoadImmediate(
         R10, compiler::Immediate(instruction()->num_context_variables()));
     compiler->GenerateStubCall(instruction()->source(), allocate_context_stub,
-                               PcDescriptorsLayout::kOther, locs);
+                               UntaggedPcDescriptors::kOther, locs);
     ASSERT(instruction()->locs()->out(0).reg() == RAX);
     compiler->RestoreLiveRegisters(instruction()->locs());
     __ jmp(exit_label());
@@ -3173,7 +3173,7 @@
 
   __ LoadImmediate(R10, compiler::Immediate(num_context_variables()));
   compiler->GenerateStubCall(source(), allocate_context_stub,
-                             PcDescriptorsLayout::kOther, locs());
+                             UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
@@ -3195,7 +3195,7 @@
   const auto& clone_context_stub =
       Code::ZoneHandle(compiler->zone(), object_store->clone_context_stub());
   compiler->GenerateStubCall(source(), clone_context_stub,
-                             /*kind=*/PcDescriptorsLayout::kOther, locs());
+                             /*kind=*/UntaggedPcDescriptors::kOther, locs());
 }
 
 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
@@ -3216,7 +3216,7 @@
     if (compiler->is_optimizing()) {
       compiler->AddDeoptIndexAtCall(deopt_id);
     } else {
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, deopt_id,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
                                      InstructionSource());
     }
   }
@@ -3299,7 +3299,7 @@
       compiler->RecordSafepoint(instruction()->locs(), kNumSlowPathArgs);
       compiler->RecordCatchEntryMoves();
       compiler->AddDescriptor(
-          PcDescriptorsLayout::kOther, compiler->assembler()->CodeSize(),
+          UntaggedPcDescriptors::kOther, compiler->assembler()->CodeSize(),
           instruction()->deopt_id(), instruction()->source(),
           compiler->CurrentTryIndex());
     } else {
@@ -3311,7 +3311,7 @@
     if (compiler->isolate_group()->use_osr() && !compiler->is_optimizing() &&
         instruction()->in_loop()) {
       // In unoptimized code, record loop stack checks as possible OSR entries.
-      compiler->AddCurrentDescriptor(PcDescriptorsLayout::kOsrEntry,
+      compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kOsrEntry,
                                      instruction()->deopt_id(),
                                      InstructionSource());
     }
@@ -3788,7 +3788,7 @@
 
 static bool CanBeImmediate(const Object& constant) {
   return constant.IsSmi() &&
-         compiler::Immediate(static_cast<int64_t>(constant.raw())).is_int32();
+         compiler::Immediate(static_cast<int64_t>(constant.ptr())).is_int32();
 }
 
 static bool IsSmiValue(const Object& constant, intptr_t value) {
@@ -3897,7 +3897,7 @@
   if (locs()->in(1).IsConstant()) {
     const Object& constant = locs()->in(1).constant();
     ASSERT(constant.IsSmi());
-    const int64_t imm = static_cast<int64_t>(constant.raw());
+    const int64_t imm = static_cast<int64_t>(constant.ptr());
     switch (op_kind()) {
       case Token::kADD: {
         __ AddImmediate(left, compiler::Immediate(imm));
@@ -4507,10 +4507,10 @@
   auto object_store = IsolateGroup::Current()->object_store();
   const bool stubs_in_vm_isolate =
       object_store->allocate_mint_with_fpu_regs_stub()
-          ->ptr()
+          ->untag()
           ->InVMIsolateHeap() ||
       object_store->allocate_mint_without_fpu_regs_stub()
-          ->ptr()
+          ->untag()
           ->InVMIsolateHeap();
   const bool shared_slow_path_call = SlowPathSharingSupported(opt) &&
                                      FLAG_use_bare_instructions &&
@@ -4564,7 +4564,7 @@
     ASSERT(!locs()->live_registers()->ContainsRegister(
         AllocateMintABI::kResultReg));
     auto extended_env = compiler->SlowPathEnvironmentFor(this, 0);
-    compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+    compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                                locs(), DeoptId::kNone, extended_env);
   } else {
     BoxAllocationSlowPath::Allocate(compiler, this, compiler->mint_class(), out,
@@ -5960,7 +5960,7 @@
     Register length = length_loc.reg();
     const Smi& index = Smi::Cast(index_loc.constant());
     __ CompareImmediate(length,
-                        compiler::Immediate(static_cast<int64_t>(index.raw())));
+                        compiler::Immediate(static_cast<int64_t>(index.ptr())));
     __ j(BELOW_EQUAL, deopt);
   } else if (length_loc.IsConstant()) {
     const Smi& length = Smi::Cast(length_loc.constant());
@@ -5973,7 +5973,7 @@
       __ j(NEGATIVE, deopt);
     } else {
       __ CompareImmediate(
-          index, compiler::Immediate(static_cast<int64_t>(length.raw())));
+          index, compiler::Immediate(static_cast<int64_t>(length.ptr())));
       __ j(ABOVE_EQUAL, deopt);
     }
   } else {
@@ -6848,7 +6848,7 @@
     }
     // Add a deoptimization descriptor for deoptimizing instructions that
     // may be inserted before this instruction.
-    compiler->AddCurrentDescriptor(PcDescriptorsLayout::kDeopt, GetDeoptId(),
+    compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
                                    InstructionSource());
   }
   if (HasParallelMove()) {
@@ -6979,7 +6979,7 @@
   }
   __ call(RCX);
   compiler->EmitCallsiteMetadata(source(), deopt_id(),
-                                 PcDescriptorsLayout::kOther, locs());
+                                 UntaggedPcDescriptors::kOther, locs());
   __ Drop(argument_count);
 }
 
@@ -7035,7 +7035,7 @@
   }
   const Code& stub = Code::ZoneHandle(
       compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
-  compiler->GenerateStubCall(source(), stub, PcDescriptorsLayout::kOther,
+  compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
                              locs());
 }
 
diff --git a/runtime/vm/compiler/backend/inliner.cc b/runtime/vm/compiler/backend/inliner.cc
index b991d39..ce2f680 100644
--- a/runtime/vm/compiler/backend/inliner.cc
+++ b/runtime/vm/compiler/backend/inliner.cc
@@ -111,7 +111,7 @@
 static bool IsCallRecursive(const Function& function, Definition* call) {
   Environment* env = call->env();
   while (env != NULL) {
-    if (function.raw() == env->function().raw()) {
+    if (function.ptr() == env->function().ptr()) {
       return true;
     }
     env = env->outer();
@@ -421,11 +421,11 @@
         if (current->IsPolymorphicInstanceCall()) {
           PolymorphicInstanceCallInstr* instance_call =
               current->AsPolymorphicInstanceCall();
-          target = instance_call->targets().FirstTarget().raw();
+          target = instance_call->targets().FirstTarget().ptr();
           call = instance_call;
         } else if (current->IsStaticCall()) {
           StaticCallInstr* static_call = current->AsStaticCall();
-          target = static_call->function().raw();
+          target = static_call->function().ptr();
           call = static_call;
         } else if (current->IsClosureCall()) {
           // TODO(srdjan): Add data for closure calls.
@@ -1281,7 +1281,7 @@
 
         if (error.IsLanguageError() &&
             (LanguageError::Cast(error).kind() == Report::kBailout)) {
-          if (error.raw() == Object::background_compilation_error().raw()) {
+          if (error.ptr() == Object::background_compilation_error().ptr()) {
             // Fall through to exit the compilation, and retry it later.
           } else {
             TRACE_INLINING(
@@ -1338,7 +1338,7 @@
         continue;
       }
       if ((info.inlined_depth == depth) &&
-          (info.caller->raw() == caller.raw()) &&
+          (info.caller->ptr() == caller.ptr()) &&
           !Contains(call_instructions_printed, info.call_instr->GetDeoptId())) {
         for (int t = 0; t < depth; t++) {
           THR_Print("  ");
@@ -1357,7 +1357,7 @@
         continue;
       }
       if ((info.inlined_depth == depth) &&
-          (info.caller->raw() == caller.raw()) &&
+          (info.caller->ptr() == caller.ptr()) &&
           !Contains(call_instructions_printed, info.call_instr->GetDeoptId())) {
         for (int t = 0; t < depth; t++) {
           THR_Print("  ");
@@ -1396,7 +1396,7 @@
     // TODO(zerny): Use a hash map for the cache.
     for (intptr_t i = 0; i < function_cache_.length(); ++i) {
       ParsedFunction* parsed_function = function_cache_[i];
-      if (parsed_function->function().raw() == function.raw()) {
+      if (parsed_function->function().ptr() == function.ptr()) {
         *in_cache = true;
         return parsed_function;
       }
@@ -1480,7 +1480,7 @@
           call->Receiver()->definition()->OriginalDefinition();
       if (AllocateObjectInstr* alloc = receiver->AsAllocateObject()) {
         if (!alloc->closure_function().IsNull()) {
-          target = alloc->closure_function().raw();
+          target = alloc->closure_function().ptr();
           ASSERT(alloc->cls().IsClosureClass());
         }
       } else if (ConstantInstr* constant = receiver->AsConstant()) {
@@ -1686,7 +1686,7 @@
 //   * JoinEntry: the inlined body is shared and this is a subsequent variant.
 bool PolymorphicInliner::CheckInlinedDuplicate(const Function& target) {
   for (intptr_t i = 0; i < inlined_variants_.length(); ++i) {
-    if ((target.raw() == inlined_variants_.TargetAt(i)->target->raw()) &&
+    if ((target.ptr() == inlined_variants_.TargetAt(i)->target->ptr()) &&
         !target.is_polymorphic_target()) {
       // The call target is shared with a previous inlined variant.  Share
       // the graph.  This requires a join block at the entry, and edge-split
@@ -1743,7 +1743,7 @@
 
 bool PolymorphicInliner::CheckNonInlinedDuplicate(const Function& target) {
   for (intptr_t i = 0; i < non_inlined_variants_->length(); ++i) {
-    if (target.raw() == non_inlined_variants_->TargetAt(i)->target->raw()) {
+    if (target.ptr() == non_inlined_variants_->TargetAt(i)->target->ptr()) {
       return true;
     }
   }
@@ -1768,7 +1768,7 @@
       Array::ZoneHandle(Z, call_->GetArgumentsDescriptor());
   InlinedCallData call_data(call_, arguments_descriptor, call_->FirstArgIndex(),
                             &arguments, caller_function_);
-  Function& target = Function::ZoneHandle(zone(), target_info.target->raw());
+  Function& target = Function::ZoneHandle(zone(), target_info.target->ptr());
   if (!owner_->TryInlining(target, call_->argument_names(), &call_data,
                            false)) {
     return false;
@@ -2257,10 +2257,10 @@
 // Use function name to determine if inlineable operator.
 // Add names as necessary.
 static bool IsInlineableOperator(const Function& function) {
-  return (function.name() == Symbols::IndexToken().raw()) ||
-         (function.name() == Symbols::AssignIndexToken().raw()) ||
-         (function.name() == Symbols::Plus().raw()) ||
-         (function.name() == Symbols::Minus().raw());
+  return (function.name() == Symbols::IndexToken().ptr()) ||
+         (function.name() == Symbols::AssignIndexToken().ptr()) ||
+         (function.name() == Symbols::Plus().ptr()) ||
+         (function.name() == Symbols::Minus().ptr());
 }
 
 bool FlowGraphInliner::FunctionHasPreferInlinePragma(const Function& function) {
@@ -2286,7 +2286,7 @@
   // replace them with inline FG before inlining introduces any superfluous
   // AssertAssignable instructions.
   if (function.IsDispatcherOrImplicitAccessor() &&
-      !(function.kind() == FunctionLayout::kDynamicInvocationForwarder &&
+      !(function.kind() == UntaggedFunction::kDynamicInvocationForwarder &&
         function.IsRecognized())) {
     // Smaller or same size as the call.
     return true;
@@ -2299,7 +2299,7 @@
 
   if (function.IsGetterFunction() || function.IsSetterFunction() ||
       IsInlineableOperator(function) ||
-      (function.kind() == FunctionLayout::kConstructor)) {
+      (function.kind() == UntaggedFunction::kConstructor)) {
     const intptr_t count = function.optimized_instruction_count();
     if ((count != 0) && (count < FLAG_inline_getters_setters_smaller_than)) {
       return true;
diff --git a/runtime/vm/compiler/backend/redundancy_elimination.cc b/runtime/vm/compiler/backend/redundancy_elimination.cc
index f623a31..573bc43 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination.cc
@@ -1756,7 +1756,7 @@
       value = store_static->value();
     }
     return value != nullptr && value->BindsToConstant() &&
-           (value->BoundConstant().raw() == Object::sentinel().raw());
+           (value->BoundConstant().ptr() == Object::sentinel().ptr());
   }
 
   // This optimization pass tries to get rid of lazy initializer calls in
diff --git a/runtime/vm/compiler/backend/slot.cc b/runtime/vm/compiler/backend/slot.cc
index bf23f54..5b814c5 100644
--- a/runtime/vm/compiler/backend/slot.cc
+++ b/runtime/vm/compiler/backend/slot.cc
@@ -364,8 +364,8 @@
     case Kind::kCapturedVariable:
       return (offset_in_bytes_ == other->offset_in_bytes_) &&
              (flags_ == other->flags_) &&
-             (DataAs<const String>()->raw() ==
-              other->DataAs<const String>()->raw());
+             (DataAs<const String>()->ptr() ==
+              other->DataAs<const String>()->ptr());
 
     case Kind::kDartField:
       return (offset_in_bytes_ == other->offset_in_bytes_) &&
diff --git a/runtime/vm/compiler/backend/slot.h b/runtime/vm/compiler/backend/slot.h
index d5e5c6c..1b554c5 100644
--- a/runtime/vm/compiler/backend/slot.h
+++ b/runtime/vm/compiler/backend/slot.h
@@ -52,17 +52,18 @@
 //   (i.e. initialized once at construction time and does not change after
 //   that) or like a non-final field.
 #define NULLABLE_BOXED_NATIVE_SLOTS_LIST(V)                                    \
-  V(Function, FunctionLayout, signature, FunctionType, FINAL)                  \
-  V(Context, ContextLayout, parent, Context, FINAL)                            \
-  V(Closure, ClosureLayout, instantiator_type_arguments, TypeArguments, FINAL) \
-  V(Closure, ClosureLayout, delayed_type_arguments, TypeArguments, FINAL)      \
-  V(Closure, ClosureLayout, function_type_arguments, TypeArguments, FINAL)     \
-  V(ClosureData, ClosureDataLayout, default_type_arguments, TypeArguments,     \
+  V(Function, UntaggedFunction, signature, FunctionType, FINAL)                \
+  V(Context, UntaggedContext, parent, Context, FINAL)                          \
+  V(Closure, UntaggedClosure, instantiator_type_arguments, TypeArguments,      \
     FINAL)                                                                     \
-  V(Type, TypeLayout, arguments, TypeArguments, FINAL)                         \
-  V(FunctionType, FunctionTypeLayout, type_parameters, TypeArguments, FINAL)   \
-  V(WeakProperty, WeakPropertyLayout, key, Dynamic, VAR)                       \
-  V(WeakProperty, WeakPropertyLayout, value, Dynamic, VAR)
+  V(Closure, UntaggedClosure, delayed_type_arguments, TypeArguments, FINAL)    \
+  V(Closure, UntaggedClosure, function_type_arguments, TypeArguments, FINAL)   \
+  V(ClosureData, UntaggedClosureData, default_type_arguments, TypeArguments,   \
+    FINAL)                                                                     \
+  V(Type, UntaggedType, arguments, TypeArguments, FINAL)                       \
+  V(FunctionType, UntaggedFunctionType, type_parameters, TypeArguments, FINAL) \
+  V(WeakProperty, UntaggedWeakProperty, key, Dynamic, VAR)                     \
+  V(WeakProperty, UntaggedWeakProperty, value, Dynamic, VAR)
 
 // The list of slots that correspond to non-nullable boxed fields of native
 // objects in the following format:
@@ -78,35 +79,35 @@
 //   (i.e. initialized once at construction time and does not change after
 //   that) or like a non-final field.
 #define NONNULLABLE_BOXED_NATIVE_SLOTS_LIST(V)                                 \
-  V(Array, ArrayLayout, length, Smi, FINAL)                                    \
-  V(Closure, ClosureLayout, function, Function, FINAL)                         \
-  V(Closure, ClosureLayout, context, Context, FINAL)                           \
-  V(Closure, ClosureLayout, hash, Context, VAR)                                \
-  V(ClosureData, ClosureDataLayout, default_type_arguments_info, Smi, FINAL)   \
-  V(Function, FunctionLayout, data, Dynamic, FINAL)                            \
-  V(Function, FunctionLayout, parameter_names, Array, FINAL)                   \
-  V(FunctionType, FunctionTypeLayout, parameter_types, Array, FINAL)           \
-  V(GrowableObjectArray, GrowableObjectArrayLayout, length, Smi, VAR)          \
-  V(GrowableObjectArray, GrowableObjectArrayLayout, data, Array, VAR)          \
-  V(TypedDataBase, TypedDataBaseLayout, length, Smi, FINAL)                    \
-  V(TypedDataView, TypedDataViewLayout, offset_in_bytes, Smi, FINAL)           \
-  V(TypedDataView, TypedDataViewLayout, data, Dynamic, FINAL)                  \
-  V(String, StringLayout, length, Smi, FINAL)                                  \
-  V(LinkedHashMap, LinkedHashMapLayout, index, TypedDataUint32Array, VAR)      \
-  V(LinkedHashMap, LinkedHashMapLayout, data, Array, VAR)                      \
-  V(LinkedHashMap, LinkedHashMapLayout, hash_mask, Smi, VAR)                   \
-  V(LinkedHashMap, LinkedHashMapLayout, used_data, Smi, VAR)                   \
-  V(LinkedHashMap, LinkedHashMapLayout, deleted_keys, Smi, VAR)                \
-  V(ArgumentsDescriptor, ArrayLayout, type_args_len, Smi, FINAL)               \
-  V(ArgumentsDescriptor, ArrayLayout, positional_count, Smi, FINAL)            \
-  V(ArgumentsDescriptor, ArrayLayout, count, Smi, FINAL)                       \
-  V(ArgumentsDescriptor, ArrayLayout, size, Smi, FINAL)                        \
-  V(PointerBase, PointerBaseLayout, data_field, Dynamic, FINAL)                \
-  V(TypeArguments, TypeArgumentsLayout, length, Smi, FINAL)                    \
-  V(TypeParameter, TypeParameterLayout, bound, Dynamic, FINAL)                 \
-  V(TypeParameter, TypeParameterLayout, name, Dynamic, FINAL)                  \
-  V(UnhandledException, UnhandledExceptionLayout, exception, Dynamic, FINAL)   \
-  V(UnhandledException, UnhandledExceptionLayout, stacktrace, Dynamic, FINAL)
+  V(Array, UntaggedArray, length, Smi, FINAL)                                  \
+  V(Closure, UntaggedClosure, function, Function, FINAL)                       \
+  V(Closure, UntaggedClosure, context, Context, FINAL)                         \
+  V(Closure, UntaggedClosure, hash, Context, VAR)                              \
+  V(ClosureData, UntaggedClosureData, default_type_arguments_info, Smi, FINAL) \
+  V(Function, UntaggedFunction, data, Dynamic, FINAL)                          \
+  V(Function, UntaggedFunction, parameter_names, Array, FINAL)                 \
+  V(FunctionType, UntaggedFunctionType, parameter_types, Array, FINAL)         \
+  V(GrowableObjectArray, UntaggedGrowableObjectArray, length, Smi, VAR)        \
+  V(GrowableObjectArray, UntaggedGrowableObjectArray, data, Array, VAR)        \
+  V(TypedDataBase, UntaggedTypedDataBase, length, Smi, FINAL)                  \
+  V(TypedDataView, UntaggedTypedDataView, offset_in_bytes, Smi, FINAL)         \
+  V(TypedDataView, UntaggedTypedDataView, data, Dynamic, FINAL)                \
+  V(String, UntaggedString, length, Smi, FINAL)                                \
+  V(LinkedHashMap, UntaggedLinkedHashMap, index, TypedDataUint32Array, VAR)    \
+  V(LinkedHashMap, UntaggedLinkedHashMap, data, Array, VAR)                    \
+  V(LinkedHashMap, UntaggedLinkedHashMap, hash_mask, Smi, VAR)                 \
+  V(LinkedHashMap, UntaggedLinkedHashMap, used_data, Smi, VAR)                 \
+  V(LinkedHashMap, UntaggedLinkedHashMap, deleted_keys, Smi, VAR)              \
+  V(ArgumentsDescriptor, UntaggedArray, type_args_len, Smi, FINAL)             \
+  V(ArgumentsDescriptor, UntaggedArray, positional_count, Smi, FINAL)          \
+  V(ArgumentsDescriptor, UntaggedArray, count, Smi, FINAL)                     \
+  V(ArgumentsDescriptor, UntaggedArray, size, Smi, FINAL)                      \
+  V(PointerBase, UntaggedPointerBase, data_field, Dynamic, FINAL)              \
+  V(TypeArguments, UntaggedTypeArguments, length, Smi, FINAL)                  \
+  V(TypeParameter, UntaggedTypeParameter, bound, Dynamic, FINAL)               \
+  V(TypeParameter, UntaggedTypeParameter, name, Dynamic, FINAL)                \
+  V(UnhandledException, UntaggedUnhandledException, exception, Dynamic, FINAL) \
+  V(UnhandledException, UntaggedUnhandledException, stacktrace, Dynamic, FINAL)
 
 // List of slots that correspond to unboxed fields of native objects in the
 // following format:
@@ -124,10 +125,10 @@
 //
 // Note: As the underlying field is unboxed, these slots cannot be nullable.
 #define UNBOXED_NATIVE_SLOTS_LIST(V)                                           \
-  V(Function, FunctionLayout, kind_tag, Uint32, FINAL)                         \
-  V(Function, FunctionLayout, packed_fields, Uint32, FINAL)                    \
-  V(FunctionType, FunctionTypeLayout, packed_fields, Uint32, FINAL)            \
-  V(TypeParameter, TypeParameterLayout, flags, Uint8, FINAL)
+  V(Function, UntaggedFunction, kind_tag, Uint32, FINAL)                       \
+  V(Function, UntaggedFunction, packed_fields, Uint32, FINAL)                  \
+  V(FunctionType, UntaggedFunctionType, packed_fields, Uint32, FINAL)          \
+  V(TypeParameter, UntaggedTypeParameter, flags, Uint8, FINAL)
 
 // For uses that do not need the exact_type (boxed) or representation (unboxed)
 // or whether a boxed native slot is nullable. (Generally, such users only need
diff --git a/runtime/vm/compiler/backend/slot_test.cc b/runtime/vm/compiler/backend/slot_test.cc
index 647e564..a7e95b4 100644
--- a/runtime/vm/compiler/backend/slot_test.cc
+++ b/runtime/vm/compiler/backend/slot_test.cc
@@ -49,7 +49,7 @@
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   const Function& dummy_function = Function::ZoneHandle(
       Function::New(signature, String::Handle(Symbols::New(thread, "foo")),
-                    FunctionLayout::kRegularFunction, false, false, false,
+                    UntaggedFunction::kRegularFunction, false, false, false,
                     false, false, dummy_class, TokenPosition::kMinSource));
 
   const Field& field = Field::Handle(
@@ -83,8 +83,8 @@
 
   // Check that the field was added (once) to the list of guarded fields.
   EXPECT_EQ(1, parsed_function->guarded_fields()->length());
-  EXPECT_EQ(parsed_function->guarded_fields()->At(0)->raw(),
-            field_clone_1.raw());
+  EXPECT_EQ(parsed_function->guarded_fields()->At(0)->ptr(),
+            field_clone_1.ptr());
 
   // Change the guarded state of the field to "unknown" - emulating concurrent
   // modification of the guarded state in mutator) and create a new clone of
@@ -100,8 +100,8 @@
   const Slot& slot3 = Slot::Get(field_clone_3, parsed_function2);
   EXPECT_EQ(&slot1, &slot3);
   EXPECT_EQ(1, parsed_function2->guarded_fields()->length());
-  EXPECT_EQ(parsed_function2->guarded_fields()->At(0)->raw(),
-            field_clone_1.raw());
+  EXPECT_EQ(parsed_function2->guarded_fields()->At(0)->ptr(),
+            field_clone_1.ptr());
 }
 
 }  // namespace dart
diff --git a/runtime/vm/compiler/backend/type_propagator.cc b/runtime/vm/compiler/backend/type_propagator.cc
index a6fc022..3ae21f6 100644
--- a/runtime/vm/compiler/backend/type_propagator.cc
+++ b/runtime/vm/compiler/backend/type_propagator.cc
@@ -393,7 +393,7 @@
   } else if ((is_simple_instance_of || (instance_of != NULL)) &&
              comparison->InputAt(1)->BindsToConstant() &&
              comparison->InputAt(1)->BoundConstant().IsBool()) {
-    if (comparison->InputAt(1)->BoundConstant().raw() == Bool::False().raw()) {
+    if (comparison->InputAt(1)->BoundConstant().ptr() == Bool::False().ptr()) {
       negated = !negated;
     }
     BlockEntryInstr* true_successor =
@@ -863,7 +863,7 @@
     // *not* assignable to it (because int implements Comparable<num> and not
     // Comparable<int>).
     if (type.IsFutureOrType() ||
-        type.type_class() == CompilerState::Current().ComparableClass().raw()) {
+        type.type_class() == CompilerState::Current().ComparableClass().ptr()) {
       const auto& args = TypeArguments::Handle(Type::Cast(type).arguments());
       const auto& arg0 = AbstractType::Handle(args.TypeAt(0));
       return !recurse || CanPotentiallyBeSmi(arg0, /*recurse=*/true);
@@ -1146,7 +1146,7 @@
     // Do not trust static parameter type of 'operator ==' as it is a
     // non-nullable Object but VM handles comparison with null in
     // the callee, so 'operator ==' can take null as an argument.
-    if ((function.name() != Symbols::EqualOperator().raw()) &&
+    if ((function.name() != Symbols::EqualOperator().ptr()) &&
         (param->was_type_checked_by_caller() ||
          (is_unchecked_entry_param &&
           !param->is_explicit_covariant_parameter()))) {
@@ -1500,7 +1500,7 @@
 CompileType LoadFieldInstr::ComputeType() const {
   const AbstractType& field_type = slot().static_type();
   CompileType compile_type_cid = slot().ComputeCompileType();
-  if (field_type.raw() == AbstractType::null()) {
+  if (field_type.ptr() == AbstractType::null()) {
     return compile_type_cid;
   }
 
@@ -1749,7 +1749,7 @@
         AbstractType::Handle(TypeParameter::Cast(array_type).bound()));
   }
   if (!array_type.IsType()) {
-    return Object::dynamic_type().raw();
+    return Object::dynamic_type().ptr();
   }
   const intptr_t cid = array_type.type_class_id();
   if (cid == kGrowableObjectArrayCid || cid == kArrayCid ||
@@ -1759,7 +1759,7 @@
     const auto& type_args = TypeArguments::Handle(array_type.arguments());
     return type_args.TypeAtNullSafe(Array::kElementTypeTypeArgPos);
   }
-  return Object::dynamic_type().raw();
+  return Object::dynamic_type().ptr();
 }
 
 static AbstractTypePtr GetElementTypeFromArray(Value* array) {
@@ -1770,7 +1770,7 @@
     auto& elem_type = AbstractType::Handle(ExtractElementTypeFromArrayType(
         *(array->definition()->Type()->ToAbstractType())));
     if (!elem_type.IsDynamicType()) {
-      return elem_type.raw();
+      return elem_type.ptr();
     }
   }
   return ExtractElementTypeFromArrayType(*(array->Type()->ToAbstractType()));
diff --git a/runtime/vm/compiler/backend/type_propagator_test.cc b/runtime/vm/compiler/backend/type_propagator_test.cc
index c5415fa..a278009 100644
--- a/runtime/vm/compiler/backend/type_propagator_test.cc
+++ b/runtime/vm/compiler/backend/type_propagator_test.cc
@@ -172,7 +172,7 @@
   const FunctionType& signature = FunctionType::Handle(FunctionType::New());
   const Function& target_func = Function::ZoneHandle(Function::New(
       signature, String::Handle(Symbols::New(thread, "dummy2")),
-      FunctionLayout::kRegularFunction,
+      UntaggedFunction::kRegularFunction,
       /*is_static=*/true,
       /*is_const=*/false,
       /*is_abstract=*/false,
diff --git a/runtime/vm/compiler/backend/typed_data_aot_test.cc b/runtime/vm/compiler/backend/typed_data_aot_test.cc
index 26f6629..68bca62 100644
--- a/runtime/vm/compiler/backend/typed_data_aot_test.cc
+++ b/runtime/vm/compiler/backend/typed_data_aot_test.cc
@@ -135,10 +135,10 @@
       kMatchReturn,
   }));
 
-  EXPECT(length_call->Selector() == Symbols::GetLength().raw());
+  EXPECT(length_call->Selector() == Symbols::GetLength().ptr());
   EXPECT(pusharg1->InputAt(0)->definition()->IsParameter());
   EXPECT(pusharg2->InputAt(0)->definition()->IsParameter());
-  EXPECT(index_get_call->Selector() == Symbols::IndexToken().raw());
+  EXPECT(index_get_call->Selector() == Symbols::IndexToken().ptr());
 }
 
 // This test asserts that we are inlining get:length, [] and []= for all typed
diff --git a/runtime/vm/compiler/backend/yield_position_test.cc b/runtime/vm/compiler/backend/yield_position_test.cc
index 338595d..c28b700 100644
--- a/runtime/vm/compiler/backend/yield_position_test.cc
+++ b/runtime/vm/compiler/backend/yield_position_test.cc
@@ -27,7 +27,7 @@
     while (!it.Done()) {
       if (auto return_instr = it.Current()->AsReturn()) {
         if (return_instr->yield_index() !=
-            PcDescriptorsLayout::kInvalidYieldIndex) {
+            UntaggedPcDescriptors::kInvalidYieldIndex) {
           ASSERT(return_instr->yield_index() > 0);
           array->Add(
               Pair(return_instr->yield_index(), return_instr->token_pos()));
@@ -43,9 +43,9 @@
 static YieldPoints* GetYieldPointsFromCode(const Code& code) {
   auto array = new YieldPoints();
   const auto& pc_descriptor = PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator it(pc_descriptor, PcDescriptorsLayout::kOther);
+  PcDescriptors::Iterator it(pc_descriptor, UntaggedPcDescriptors::kOther);
   while (it.MoveNext()) {
-    if (it.YieldIndex() != PcDescriptorsLayout::kInvalidYieldIndex) {
+    if (it.YieldIndex() != UntaggedPcDescriptors::kInvalidYieldIndex) {
       array->Add(Pair(it.YieldIndex(), it.TokenPos()));
     }
   }
diff --git a/runtime/vm/compiler/call_specializer.cc b/runtime/vm/compiler/call_specializer.cc
index 7d815bf..2c4b6b0 100644
--- a/runtime/vm/compiler/call_specializer.cc
+++ b/runtime/vm/compiler/call_specializer.cc
@@ -744,8 +744,8 @@
     field = field.CloneFromOriginal();
   }
 
-  switch (flow_graph()->CheckForInstanceCall(call,
-                                             FunctionLayout::kImplicitGetter)) {
+  switch (flow_graph()->CheckForInstanceCall(
+      call, UntaggedFunction::kImplicitGetter)) {
     case FlowGraph::ToCheck::kCheckNull:
       AddCheckNull(call->Receiver(), call->function_name(), call->deopt_id(),
                    call->env(), call);
@@ -801,7 +801,7 @@
     return false;
   }
   const Function& target = targets.FirstTarget();
-  if (target.kind() != FunctionLayout::kImplicitSetter) {
+  if (target.kind() != UntaggedFunction::kImplicitSetter) {
     // Non-implicit setter are inlined like normal method calls.
     return false;
   }
@@ -811,8 +811,8 @@
     field = field.CloneFromOriginal();
   }
 
-  switch (flow_graph()->CheckForInstanceCall(instr,
-                                             FunctionLayout::kImplicitSetter)) {
+  switch (flow_graph()->CheckForInstanceCall(
+      instr, UntaggedFunction::kImplicitSetter)) {
     case FlowGraph::ToCheck::kCheckNull:
       AddCheckNull(instr->Receiver(), instr->function_name(), instr->deopt_id(),
                    instr->env(), instr);
@@ -962,7 +962,7 @@
     return false;
   }
   const Function& target = targets.FirstTarget();
-  if (target.kind() != FunctionLayout::kImplicitGetter) {
+  if (target.kind() != UntaggedFunction::kImplicitGetter) {
     // Non-implicit getters are inlined like normal methods by conventional
     // inlining in FlowGraphInliner.
     return false;
@@ -1126,14 +1126,14 @@
     results->Add(cls.id());
     results->Add(static_cast<intptr_t>(is_subtype));
     if (prev.IsNull()) {
-      prev = Bool::Get(is_subtype).raw();
+      prev = Bool::Get(is_subtype).ptr();
     } else {
       if (is_subtype != prev.value()) {
         results_differ = true;
       }
     }
   }
-  return results_differ ? Bool::null() : prev.raw();
+  return results_differ ? Bool::null() : prev.ptr();
 }
 
 // Returns true if checking against this type is a direct class id comparison.
@@ -1265,12 +1265,12 @@
     instantiator_type_args = flow_graph()->constant_null();
     function_type_args = flow_graph()->constant_null();
     ASSERT(call->MatchesCoreName(Symbols::_simpleInstanceOf()));
-    type = AbstractType::Cast(call->ArgumentAt(1)->AsConstant()->value()).raw();
+    type = AbstractType::Cast(call->ArgumentAt(1)->AsConstant()->value()).ptr();
   } else {
     ASSERT(call->ArgumentCount() == 4);
     instantiator_type_args = call->ArgumentAt(1);
     function_type_args = call->ArgumentAt(2);
-    type = AbstractType::Cast(call->ArgumentAt(3)->AsConstant()->value()).raw();
+    type = AbstractType::Cast(call->ArgumentAt(3)->AsConstant()->value()).ptr();
   }
 
   if (TryOptimizeInstanceOfUsingStaticTypes(call, type)) {
@@ -1561,10 +1561,10 @@
 }
 
 void TypedDataSpecializer::TryInlineCall(TemplateDartCall<0>* call) {
-  const bool is_length_getter = call->Selector() == Symbols::GetLength().raw();
-  const bool is_index_get = call->Selector() == Symbols::IndexToken().raw();
+  const bool is_length_getter = call->Selector() == Symbols::GetLength().ptr();
+  const bool is_index_get = call->Selector() == Symbols::IndexToken().ptr();
   const bool is_index_set =
-      call->Selector() == Symbols::AssignIndexToken().raw();
+      call->Selector() == Symbols::AssignIndexToken().ptr();
 
   if (is_length_getter || is_index_get || is_index_set) {
     EnsureIsInitialized();
diff --git a/runtime/vm/compiler/cha.cc b/runtime/vm/compiler/cha.cc
index a21714f..790c237 100644
--- a/runtime/vm/compiler/cha.cc
+++ b/runtime/vm/compiler/cha.cc
@@ -13,11 +13,11 @@
 
 void CHA::AddToGuardedClasses(const Class& cls, intptr_t subclass_count) {
   for (intptr_t i = 0; i < guarded_classes_.length(); i++) {
-    if (guarded_classes_[i].cls->raw() == cls.raw()) {
+    if (guarded_classes_[i].cls->ptr() == cls.ptr()) {
       return;
     }
   }
-  GuardedClassInfo info = {&Class::ZoneHandle(thread_->zone(), cls.raw()),
+  GuardedClassInfo info = {&Class::ZoneHandle(thread_->zone(), cls.ptr()),
                            subclass_count};
   guarded_classes_.Add(info);
   return;
diff --git a/runtime/vm/compiler/ffi/call.cc b/runtime/vm/compiler/ffi/call.cc
index 5dc219a..1735a69 100644
--- a/runtime/vm/compiler/ffi/call.cc
+++ b/runtime/vm/compiler/ffi/call.cc
@@ -23,7 +23,7 @@
   const Class& owner_class = Class::Handle(zone, lib.toplevel_class());
   FunctionType& signature = FunctionType::Handle(zone, FunctionType::New());
   Function& function = Function::Handle(
-      zone, Function::New(signature, name, FunctionLayout::kFfiTrampoline,
+      zone, Function::New(signature, name, UntaggedFunction::kFfiTrampoline,
                           /*is_static=*/true,
                           /*is_const=*/false,
                           /*is_abstract=*/false,
@@ -43,7 +43,7 @@
   const intptr_t num_params = dart_signature.num_fixed_parameters();
   for (intptr_t i = 0; i < num_params; ++i) {
     if (i == 0) {
-      name = Symbols::ClosureParameter().raw();
+      name = Symbols::ClosureParameter().ptr();
     } else {
       name = Symbols::NewFormatted(thread, ":ffi_param%" Pd, i);
     }
@@ -54,7 +54,7 @@
   signature ^= ClassFinalizer::FinalizeType(signature);
   function.set_signature(signature);
 
-  return function.raw();
+  return function.ptr();
 }
 
 }  // namespace ffi
diff --git a/runtime/vm/compiler/ffi/callback.cc b/runtime/vm/compiler/ffi/callback.cc
index 7e23851..c38f9d6 100644
--- a/runtime/vm/compiler/ffi/callback.cc
+++ b/runtime/vm/compiler/ffi/callback.cc
@@ -30,7 +30,7 @@
   const Class& owner_class = Class::Handle(zone, lib.toplevel_class());
   const Function& function =
       Function::Handle(zone, Function::New(Object::null_function_type(), name,
-                                           FunctionLayout::kFfiTrampoline,
+                                           UntaggedFunction::kFfiTrampoline,
                                            /*is_static=*/true,
                                            /*is_const=*/false,
                                            /*is_abstract=*/false,
@@ -60,7 +60,7 @@
     function.SetFfiCallbackExceptionalReturn(exceptional_return);
   }
 
-  return function.raw();
+  return function.ptr();
 }
 
 }  // namespace ffi
diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
index 96e9e57..c931003 100644
--- a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
@@ -1046,9 +1046,9 @@
 #ifdef PRODUCT
   return Fragment();
 #else
-  return Fragment(new (Z) DebugStepCheckInstr(InstructionSource(position),
-                                              PcDescriptorsLayout::kRuntimeCall,
-                                              GetNextDeoptId()));
+  return Fragment(new (Z) DebugStepCheckInstr(
+      InstructionSource(position), UntaggedPcDescriptors::kRuntimeCall,
+      GetNextDeoptId()));
 #endif
 }
 
@@ -1108,7 +1108,7 @@
 Fragment BaseFlowGraphBuilder::BuildEntryPointsIntrospection() {
   if (!FLAG_enable_testing_pragmas) return Drop();
 
-  auto& function = Function::Handle(Z, parsed_function_->function().raw());
+  auto& function = Function::Handle(Z, parsed_function_->function().ptr());
 
   if (function.IsImplicitClosureFunction()) {
     const auto& parent = Function::Handle(Z, function.parent_function());
@@ -1125,7 +1125,7 @@
       options.IsNull() || !options.IsClosure()) {
     return Drop();
   }
-  auto& closure = Closure::ZoneHandle(Z, Closure::Cast(options).raw());
+  auto& closure = Closure::ZoneHandle(Z, Closure::Cast(options).ptr());
   LocalVariable* entry_point_num = MakeTemporary();
 
   auto& function_name = String::ZoneHandle(
diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.h b/runtime/vm/compiler/frontend/base_flow_graph_builder.h
index 413fb19..50e198a 100644
--- a/runtime/vm/compiler/frontend/base_flow_graph_builder.h
+++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.h
@@ -287,7 +287,7 @@
                                TargetEntryInstr** otherwise_entry);
   Fragment Return(
       TokenPosition position,
-      intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex);
+      intptr_t yield_index = UntaggedPcDescriptors::kInvalidYieldIndex);
   Fragment CheckStackOverflow(TokenPosition position,
                               intptr_t stack_depth,
                               intptr_t loop_depth);
diff --git a/runtime/vm/compiler/frontend/constant_reader.cc b/runtime/vm/compiler/frontend/constant_reader.cc
index af38fcf..f968c45 100644
--- a/runtime/vm/compiler/frontend/constant_reader.cc
+++ b/runtime/vm/compiler/frontend/constant_reader.cc
@@ -29,7 +29,7 @@
                     "Not a constant expression: unexpected kernel tag %s (%d)",
                     Reader::TagName(tag), tag);
   }
-  return result_.raw();
+  return result_.ptr();
 }
 
 InstancePtr ConstantReader::ReadConstantExpression() {
@@ -54,7 +54,7 @@
                     "Not a constant expression: unexpected kernel tag %s (%d)",
                     Reader::TagName(tag), tag);
   }
-  return result_.raw();
+  return result_.ptr();
 }
 
 ObjectPtr ConstantReader::ReadAnnotations() {
@@ -85,7 +85,7 @@
         H.thread()->isolate_group()->kernel_constants_mutex());
     KernelConstantsMap constant_map(H.info().constants());
     result_ ^= constant_map.GetOrNull(constant_offset);
-    ASSERT(constant_map.Release().raw() == H.info().constants());
+    ASSERT(constant_map.Release().ptr() == H.info().constants());
   }
 
   // On miss, evaluate, and insert value.
@@ -96,10 +96,10 @@
         H.thread()->isolate_group()->kernel_constants_mutex());
     KernelConstantsMap constant_map(H.info().constants());
     auto insert = constant_map.InsertNewOrGetValue(constant_offset, result_);
-    ASSERT(insert == result_.raw());
+    ASSERT(insert == result_.ptr());
     H.info().set_constants(constant_map.Release());  // update!
   }
-  return result_.raw();
+  return result_.ptr();
 }
 
 bool ConstantReader::IsInstanceConstant(intptr_t constant_offset,
@@ -111,7 +111,7 @@
   // Peek for an instance of the given clazz.
   if (reader.ReadByte() == kInstanceConstant) {
     const NameIndex index = reader.ReadCanonicalNameReference();
-    return H.LookupClassByKernelClass(index) == clazz.raw();
+    return H.LookupClassByKernelClass(index) == clazz.ptr();
   }
   return false;
 }
@@ -130,8 +130,8 @@
       instance = Instance::null();
       break;
     case kBoolConstant:
-      instance = reader.ReadByte() == 1 ? Object::bool_true().raw()
-                                        : Object::bool_false().raw();
+      instance = reader.ReadByte() == 1 ? Object::bool_true().ptr()
+                                        : Object::bool_false().ptr();
       break;
     case kIntConstant: {
       uint8_t payload = 0;
@@ -170,7 +170,7 @@
       instance = Double::New(reader.ReadDouble(), Heap::kOld);
       break;
     case kStringConstant:
-      instance = H.DartSymbolPlain(reader.ReadStringReference()).raw();
+      instance = H.DartSymbolPlain(reader.ReadStringReference()).ptr();
       break;
     case kSymbolConstant: {
       Library& library = Library::Handle(Z);
@@ -222,7 +222,7 @@
         constant = ReadConstant(entry_offset);
         array.SetAt(j, constant);
       }
-      instance = array.raw();
+      instance = array.ptr();
       break;
     }
     case kInstanceConstant: {
@@ -295,7 +295,7 @@
       }
       type_arguments = type_arguments.Canonicalize(Thread::Current(), nullptr);
       // Make a copy of the old closure, and set delayed type arguments.
-      Closure& closure = Closure::Handle(Z, Closure::RawCast(constant.raw()));
+      Closure& closure = Closure::Handle(Z, Closure::RawCast(constant.ptr()));
       Function& function = Function::Handle(Z, closure.function());
       const auto& type_arguments2 =
           TypeArguments::Handle(Z, closure.instantiator_type_arguments());
@@ -324,7 +324,7 @@
       // safety mode currently in use (sound or unsound) or migration state of
       // the declaring library (legacy or opted-in).
       TypeTranslator type_translator(&reader, this, active_class_, true);
-      instance = type_translator.BuildType().raw();
+      instance = type_translator.BuildType().ptr();
       break;
     }
     default:
diff --git a/runtime/vm/compiler/frontend/constant_reader.h b/runtime/vm/compiler/frontend/constant_reader.h
index 85f230d..94e8b3b 100644
--- a/runtime/vm/compiler/frontend/constant_reader.h
+++ b/runtime/vm/compiler/frontend/constant_reader.h
@@ -59,7 +59,7 @@
     return (key1.Value() == key2.Value());
   }
   static bool IsMatch(const intptr_t key1, const Object& b) {
-    return KeyAsSmi(key1) == Smi::Cast(b).raw();
+    return KeyAsSmi(key1) == Smi::Cast(b).ptr();
   }
   static uword Hash(const Object& obj) {
     const Smi& key = Smi::Cast(obj);
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index c59eb92..fbced06 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -321,7 +321,7 @@
         if (class_field.is_late()) {
           if (!is_constructor_initialized) {
             instructions += BuildLateFieldInitializer(
-                Field::ZoneHandle(Z, class_field.raw()),
+                Field::ZoneHandle(Z, class_field.ptr()),
                 initializer_tag == kSomething);
           }
         } else if (initializer_tag == kSomething) {
@@ -330,7 +330,7 @@
           // value produced by the field initializer. However we still need to
           // execute it for its side effects.
           instructions += BuildFieldInitializer(
-              Field::ZoneHandle(Z, class_field.raw()),
+              Field::ZoneHandle(Z, class_field.ptr()),
               /*only_for_side_effects=*/is_constructor_initialized);
           ExitScope(field_offset);
         }
@@ -357,7 +357,7 @@
         case kFieldInitializer: {
           ReadCanonicalNameReference();
           instructions += BuildFieldInitializer(
-              Field::ZoneHandle(Z, initializer_fields[i]->raw()),
+              Field::ZoneHandle(Z, initializer_fields[i]->ptr()),
               /*only_for_size_effects=*/false);
           break;
         }
@@ -708,7 +708,7 @@
   // included in b).  So we just use the normal implementation in the body.
   Fragment body;
   if ((dart_function.NumParameters() == 2) &&
-      (dart_function.name() == Symbols::EqualOperator().raw()) &&
+      (dart_function.name() == Symbols::EqualOperator().ptr()) &&
       (dart_function.Owner() != IG->object_store()->object_class())) {
     TargetEntryInstr* null_entry;
     TargetEntryInstr* non_null_entry;
@@ -1011,40 +1011,40 @@
   ParseKernelASTFunction();
 
   switch (function.kind()) {
-    case FunctionLayout::kRegularFunction:
-    case FunctionLayout::kGetterFunction:
-    case FunctionLayout::kSetterFunction:
-    case FunctionLayout::kClosureFunction:
-    case FunctionLayout::kConstructor: {
+    case UntaggedFunction::kRegularFunction:
+    case UntaggedFunction::kGetterFunction:
+    case UntaggedFunction::kSetterFunction:
+    case UntaggedFunction::kClosureFunction:
+    case UntaggedFunction::kConstructor: {
       if (B->IsRecognizedMethodForFlowGraph(function)) {
         return B->BuildGraphOfRecognizedMethod(function);
       }
       return BuildGraphOfFunction(function.IsGenerativeConstructor());
     }
-    case FunctionLayout::kImplicitGetter:
-    case FunctionLayout::kImplicitStaticGetter:
-    case FunctionLayout::kImplicitSetter: {
+    case UntaggedFunction::kImplicitGetter:
+    case UntaggedFunction::kImplicitStaticGetter:
+    case UntaggedFunction::kImplicitSetter: {
       const Field& field = Field::Handle(Z, function.accessor_field());
       if (field.is_const() && field.IsUninitialized()) {
         EvaluateConstFieldValue(field);
       }
       return B->BuildGraphOfFieldAccessor(function);
     }
-    case FunctionLayout::kFieldInitializer:
+    case UntaggedFunction::kFieldInitializer:
       return BuildGraphOfFieldInitializer();
-    case FunctionLayout::kDynamicInvocationForwarder:
+    case UntaggedFunction::kDynamicInvocationForwarder:
       return B->BuildGraphOfDynamicInvocationForwarder(function);
-    case FunctionLayout::kMethodExtractor:
+    case UntaggedFunction::kMethodExtractor:
       return flow_graph_builder_->BuildGraphOfMethodExtractor(function);
-    case FunctionLayout::kNoSuchMethodDispatcher:
+    case UntaggedFunction::kNoSuchMethodDispatcher:
       return flow_graph_builder_->BuildGraphOfNoSuchMethodDispatcher(function);
-    case FunctionLayout::kInvokeFieldDispatcher:
+    case UntaggedFunction::kInvokeFieldDispatcher:
       return flow_graph_builder_->BuildGraphOfInvokeFieldDispatcher(function);
-    case FunctionLayout::kImplicitClosureFunction:
+    case UntaggedFunction::kImplicitClosureFunction:
       return flow_graph_builder_->BuildGraphOfImplicitClosureFunction(function);
-    case FunctionLayout::kFfiTrampoline:
+    case UntaggedFunction::kFfiTrampoline:
       return flow_graph_builder_->BuildGraphOfFfiTrampoline(function);
-    case FunctionLayout::kIrregexpFunction:
+    case UntaggedFunction::kIrregexpFunction:
       break;
   }
   UNREACHABLE();
@@ -1061,13 +1061,13 @@
 
   // Mark forwarding stubs.
   switch (function.kind()) {
-    case FunctionLayout::kRegularFunction:
-    case FunctionLayout::kImplicitClosureFunction:
-    case FunctionLayout::kGetterFunction:
-    case FunctionLayout::kSetterFunction:
-    case FunctionLayout::kClosureFunction:
-    case FunctionLayout::kConstructor:
-    case FunctionLayout::kDynamicInvocationForwarder:
+    case UntaggedFunction::kRegularFunction:
+    case UntaggedFunction::kImplicitClosureFunction:
+    case UntaggedFunction::kGetterFunction:
+    case UntaggedFunction::kSetterFunction:
+    case UntaggedFunction::kClosureFunction:
+    case UntaggedFunction::kConstructor:
+    case UntaggedFunction::kDynamicInvocationForwarder:
       ReadForwardingStubTarget(function);
       break;
     default:
@@ -1077,31 +1077,31 @@
   set_scopes(parsed_function()->EnsureKernelScopes());
 
   switch (function.kind()) {
-    case FunctionLayout::kRegularFunction:
-    case FunctionLayout::kGetterFunction:
-    case FunctionLayout::kSetterFunction:
-    case FunctionLayout::kClosureFunction:
-    case FunctionLayout::kConstructor:
-    case FunctionLayout::kImplicitClosureFunction:
+    case UntaggedFunction::kRegularFunction:
+    case UntaggedFunction::kGetterFunction:
+    case UntaggedFunction::kSetterFunction:
+    case UntaggedFunction::kClosureFunction:
+    case UntaggedFunction::kConstructor:
+    case UntaggedFunction::kImplicitClosureFunction:
       ReadUntilFunctionNode();
       SetupDefaultParameterValues();
       break;
-    case FunctionLayout::kImplicitGetter:
-    case FunctionLayout::kImplicitStaticGetter:
-    case FunctionLayout::kImplicitSetter:
-    case FunctionLayout::kFieldInitializer:
-    case FunctionLayout::kMethodExtractor:
-    case FunctionLayout::kNoSuchMethodDispatcher:
-    case FunctionLayout::kInvokeFieldDispatcher:
-    case FunctionLayout::kFfiTrampoline:
+    case UntaggedFunction::kImplicitGetter:
+    case UntaggedFunction::kImplicitStaticGetter:
+    case UntaggedFunction::kImplicitSetter:
+    case UntaggedFunction::kFieldInitializer:
+    case UntaggedFunction::kMethodExtractor:
+    case UntaggedFunction::kNoSuchMethodDispatcher:
+    case UntaggedFunction::kInvokeFieldDispatcher:
+    case UntaggedFunction::kFfiTrampoline:
       break;
-    case FunctionLayout::kDynamicInvocationForwarder:
+    case UntaggedFunction::kDynamicInvocationForwarder:
       if (PeekTag() != kField) {
         ReadUntilFunctionNode();
         SetupDefaultParameterValues();
       }
       break;
-    case FunctionLayout::kIrregexpFunction:
+    case UntaggedFunction::kIrregexpFunction:
       UNREACHABLE();
       break;
   }
@@ -2203,7 +2203,7 @@
     interface_target = &Function::ZoneHandle(
         Z,
         H.LookupMethodByMember(itarget_name, H.DartGetterName(itarget_name)));
-    ASSERT(getter_name.raw() == interface_target->name());
+    ASSERT(getter_name.ptr() == interface_target->name());
   } else if (!H.IsRoot(itarget_name) && H.IsMethod(itarget_name)) {
     tearoff_interface_target = &Function::ZoneHandle(
         Z,
@@ -2295,7 +2295,7 @@
     interface_target = &Function::ZoneHandle(
         Z,
         H.LookupMethodByMember(itarget_name, H.DartSetterName(itarget_name)));
-    ASSERT(setter_name.raw() == interface_target->name());
+    ASSERT(setter_name.ptr() == interface_target->name());
   }
 
   if (direct_call.check_receiver_for_null_) {
@@ -2345,7 +2345,7 @@
                                       Zone* zone,
                                       const Class& klass) {
   Function& nsm_function = Function::Handle(zone);
-  Class& iterate_klass = Class::Handle(zone, klass.raw());
+  Class& iterate_klass = Class::Handle(zone, klass.ptr());
   if (!iterate_klass.IsNull() &&
       iterate_klass.EnsureIsFinalized(thread) == Error::null()) {
     while (!iterate_klass.IsNull()) {
@@ -2387,13 +2387,13 @@
 
   // Push the arguments for allocating the invocation mirror:
   //   - the name.
-  instructions += Constant(String::ZoneHandle(Z, name.raw()));
+  instructions += Constant(String::ZoneHandle(Z, name.ptr()));
 
   //   - the arguments descriptor.
   const Array& args_descriptor =
       Array::Handle(Z, ArgumentsDescriptor::NewBoxed(
                            num_type_arguments, num_arguments, argument_names));
-  instructions += Constant(Array::ZoneHandle(Z, args_descriptor.raw()));
+  instructions += Constant(Array::ZoneHandle(Z, args_descriptor.ptr()));
 
   //   - an array containing the actual arguments.
   instructions += LoadLocal(actuals_array);
@@ -2474,7 +2474,7 @@
 
     Function& nsm_function = GetNoSuchMethodOrDie(thread(), Z, parent_klass);
     instructions +=
-        StaticCall(position, Function::ZoneHandle(Z, nsm_function.raw()),
+        StaticCall(position, Function::ZoneHandle(Z, nsm_function.ptr()),
                    /* argument_count = */ 2, ICData::kNSMDispatch);
     instructions += DropTempsPreserveTop(1);  // Drop array
   } else {
@@ -2484,7 +2484,7 @@
     instructions += LoadLocal(parsed_function()->receiver_var());
 
     instructions +=
-        StaticCall(position, Function::ZoneHandle(Z, function.raw()),
+        StaticCall(position, Function::ZoneHandle(Z, function.ptr()),
                    /* argument_count = */ 1, Array::null_array(),
                    ICData::kSuper, &result_type);
   }
@@ -2532,7 +2532,7 @@
 
     Function& nsm_function = GetNoSuchMethodOrDie(thread(), Z, klass);
     instructions +=
-        StaticCall(position, Function::ZoneHandle(Z, nsm_function.raw()),
+        StaticCall(position, Function::ZoneHandle(Z, nsm_function.ptr()),
                    /* argument_count = */ 2, ICData::kNSMDispatch);
     instructions += Drop();  // Drop result of NoSuchMethod invocation
     instructions += Drop();  // Drop array
@@ -2546,7 +2546,7 @@
     SkipInterfaceMemberNameReference();  // skip target_reference.
 
     instructions += StaticCall(
-        position, Function::ZoneHandle(Z, function.raw()),
+        position, Function::ZoneHandle(Z, function.ptr()),
         /* argument_count = */ 2, Array::null_array(), ICData::kSuper,
         /*result_type=*/nullptr, /*type_args_len=*/0,
         /*use_unchecked_entry=*/true);
@@ -2579,7 +2579,7 @@
       // during loading. See also Class::InjectCIDFields.
       ASSERT(Class::Handle(field.Owner()).library() ==
                  Library::InternalLibrary() &&
-             Class::Handle(field.Owner()).Name() == Symbols::ClassID().raw());
+             Class::Handle(field.Owner()).Name() == Symbols::ClassID().ptr());
       return Constant(Instance::ZoneHandle(
           Z, Instance::RawCast(field.StaticConstFieldValue())));
     } else {
@@ -2793,7 +2793,7 @@
     interface_target = &Function::ZoneHandle(
         Z, H.LookupMethodByMember(itarget_name,
                                   H.DartProcedureName(itarget_name)));
-    ASSERT(name.raw() == interface_target->name());
+    ASSERT(name.ptr() == interface_target->name());
     ASSERT(!interface_target->IsGetterFunction());
   }
 
@@ -2812,7 +2812,7 @@
   //     those cases require a dynamic invocation forwarder.
   const Function* direct_call_target = &direct_call.target_;
   if (H.IsRoot(itarget_name) &&
-      (name.raw() != Symbols::EqualOperator().raw())) {
+      (name.ptr() != Symbols::EqualOperator().ptr())) {
     mangled_name = &String::ZoneHandle(
         Z, Function::CreateDynamicInvocationForwarderName(name));
     if (!direct_call_target->IsNull()) {
@@ -2862,7 +2862,7 @@
   // used. We must guarantee this invariant because violation will lead to an
   // illegal IL once we replace x.[]=(...) with a sequence that does not
   // actually produce any value. See http://dartbug.com/29135 for more details.
-  if (name.raw() == Symbols::AssignIndexToken().raw()) {
+  if (name.ptr() == Symbols::AssignIndexToken().ptr()) {
     instructions += Drop();
     instructions += NullConstant();
   }
@@ -2973,7 +2973,7 @@
 
     Function& nsm_function = GetNoSuchMethodOrDie(thread(), Z, klass);
     instructions += StaticCall(TokenPosition::kNoSource,
-                               Function::ZoneHandle(Z, nsm_function.raw()),
+                               Function::ZoneHandle(Z, nsm_function.ptr()),
                                /* argument_count = */ 2, ICData::kNSMDispatch);
     instructions += DropTempsPreserveTop(1);  // Drop actuals_array temp.
     return instructions;
@@ -3002,7 +3002,7 @@
     ++argument_count;                             // include receiver
     SkipInterfaceMemberNameReference();           // interfaceTargetReference
     return instructions +
-           StaticCall(position, Function::ZoneHandle(Z, function.raw()),
+           StaticCall(position, Function::ZoneHandle(Z, function.ptr()),
                       argument_count, argument_names, ICData::kSuper,
                       &result_type, type_args_len,
                       /*use_unchecked_entry_point=*/true);
@@ -3042,11 +3042,11 @@
 
   const bool special_case_unchecked_cast =
       klass.IsTopLevel() && (klass.library() == Library::InternalLibrary()) &&
-      (target.name() == Symbols::UnsafeCast().raw());
+      (target.name() == Symbols::UnsafeCast().ptr());
 
   const bool special_case_identical =
       klass.IsTopLevel() && (klass.library() == Library::CoreLibrary()) &&
-      (target.name() == Symbols::Identical().raw());
+      (target.name() == Symbols::Identical().ptr());
 
   const bool special_case =
       special_case_identical || special_case_unchecked_cast;
@@ -3141,7 +3141,7 @@
 
   if (klass.NumTypeArguments() > 0) {
     if (!klass.IsGeneric()) {
-      Type& type = Type::ZoneHandle(Z, T.ReceiverType(klass).raw());
+      Type& type = Type::ZoneHandle(Z, T.ReceiverType(klass).ptr());
 
       // TODO(27590): Can we move this code into [ReceiverType]?
       type ^= ClassFinalizer::FinalizeType(type, ClassFinalizer::kFinalize);
@@ -4789,9 +4789,9 @@
     //
     LocalVariable* exception_var = parsed_function()->ParameterVariable(2);
     LocalVariable* stack_trace_var = parsed_function()->ParameterVariable(3);
-    ASSERT(exception_var->name().raw() == Symbols::ExceptionParameter().raw());
-    ASSERT(stack_trace_var->name().raw() ==
-           Symbols::StackTraceParameter().raw());
+    ASSERT(exception_var->name().ptr() == Symbols::ExceptionParameter().ptr());
+    ASSERT(stack_trace_var->name().ptr() ==
+           Symbols::StackTraceParameter().ptr());
 
     TargetEntryInstr* no_error;
     TargetEntryInstr* error;
@@ -4924,7 +4924,7 @@
         // NOTE: This is not TokenPosition in the general sense!
         if (!closure_owner_.IsNull()) {
           function = Function::NewClosureFunctionWithKind(
-              FunctionLayout::kClosureFunction, *name,
+              UntaggedFunction::kClosureFunction, *name,
               parsed_function()->function(), position, closure_owner_);
         } else {
           function = Function::NewClosureFunction(
@@ -4935,14 +4935,14 @@
                                    FunctionNodeHelper::kSync);
         switch (function_node_helper.dart_async_marker_) {
           case FunctionNodeHelper::kSyncStar:
-            function.set_modifier(FunctionLayout::kSyncGen);
+            function.set_modifier(UntaggedFunction::kSyncGen);
             break;
           case FunctionNodeHelper::kAsync:
-            function.set_modifier(FunctionLayout::kAsync);
+            function.set_modifier(UntaggedFunction::kAsync);
             function.set_is_inlinable(!FLAG_causal_async_stacks);
             break;
           case FunctionNodeHelper::kAsyncStar:
-            function.set_modifier(FunctionLayout::kAsyncGen);
+            function.set_modifier(UntaggedFunction::kAsyncGen);
             function.set_is_inlinable(!FLAG_causal_async_stacks);
             break;
           default:
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
index 0718d9d..f469fc4 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
@@ -155,7 +155,7 @@
   Fragment LoadLocal(LocalVariable* variable);
   Fragment Return(
       TokenPosition position,
-      intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex);
+      intptr_t yield_index = UntaggedPcDescriptors::kInvalidYieldIndex);
   Fragment EvaluateAssertion();
   Fragment RethrowException(TokenPosition position, int catch_try_index);
   Fragment ThrowNoSuchMethodError(const Function& target);
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.cc b/runtime/vm/compiler/frontend/kernel_to_il.cc
index 6588cc1..054b900 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.cc
+++ b/runtime/vm/compiler/frontend/kernel_to_il.cc
@@ -149,7 +149,7 @@
   if (scopes_ != nullptr && scopes_->type_arguments_variable != nullptr) {
 #ifdef DEBUG
     Function& function =
-        Function::Handle(Z, parsed_function_->function().raw());
+        Function::Handle(Z, parsed_function_->function().ptr());
     while (function.IsClosureFunction()) {
       function = function.parent_function();
     }
@@ -695,7 +695,7 @@
     level = InvocationMirror::Level::kTopLevel;
   } else {
     receiver = owner.RareType();
-    if (target.kind() == FunctionLayout::kConstructor) {
+    if (target.kind() == UntaggedFunction::kConstructor) {
       level = InvocationMirror::Level::kConstructor;
     } else {
       level = InvocationMirror::Level::kStatic;
@@ -1715,7 +1715,7 @@
     AssertAssignableInstr::Kind kind) {
   Fragment instructions;
 
-  instructions += Constant(AbstractType::ZoneHandle(dst_type.raw()));
+  instructions += Constant(AbstractType::ZoneHandle(dst_type.ptr()));
 
   if (!dst_type.IsInstantiated(kCurrentClass)) {
     instructions += LoadInstantiatorTypeArguments();
@@ -1741,9 +1741,9 @@
   Fragment instructions;
   instructions += LoadInstantiatorTypeArguments();
   instructions += LoadFunctionTypeArguments();
-  instructions += Constant(AbstractType::ZoneHandle(Z, sub_type_value.raw()));
-  instructions += Constant(AbstractType::ZoneHandle(Z, super_type_value.raw()));
-  instructions += Constant(String::ZoneHandle(Z, dst_name_value.raw()));
+  instructions += Constant(AbstractType::ZoneHandle(Z, sub_type_value.ptr()));
+  instructions += Constant(AbstractType::ZoneHandle(Z, super_type_value.ptr()));
+  instructions += Constant(String::ZoneHandle(Z, dst_name_value.ptr()));
   instructions += AssertSubtype(position);
   return instructions;
 }
@@ -1917,7 +1917,7 @@
     name = function.ParameterNameAt(num_fixed_params + i);
     names.SetAt(i, name);
   }
-  return names.raw();
+  return names.ptr();
 }
 
 Fragment FlowGraphBuilder::PushExplicitParameters(
@@ -2558,7 +2558,7 @@
   loop_body += LoadNativeField(Slot::TypeParameter_flags());
   loop_body += Box(kUnboxedUint8);
   loop_body += IntConstant(
-      TypeParameterLayout::GenericCovariantImplBit::mask_in_place());
+      UntaggedTypeParameter::GenericCovariantImplBit::mask_in_place());
   loop_body += SmiBinaryOp(Token::kBIT_AND);
   loop_body += IntConstant(0);
   TargetEntryInstr *is_noncovariant, *is_covariant;
@@ -2815,7 +2815,7 @@
   // Determine if this is `class Closure { get call => this; }`
   const Class& closure_class =
       Class::Handle(Z, IG->object_store()->closure_class());
-  const bool is_closure_call = (owner.raw() == closure_class.raw()) &&
+  const bool is_closure_call = (owner.ptr() == closure_class.ptr()) &&
                                field_name.Equals(Symbols::Call());
 
   graph_entry_ =
@@ -3366,7 +3366,7 @@
   // type check mode in this case.
   const auto& target = Function::Handle(
       Z, function.IsDynamicInvocationForwarder() ? function.ForwardingTarget()
-                                                 : function.raw());
+                                                 : function.ptr());
   ASSERT(target.IsImplicitGetterOrSetter());
 
   const bool is_method = !function.IsStaticFunction();
@@ -3439,7 +3439,7 @@
     if (value.IsError()) {
       Report::LongJump(Error::Cast(value));
     }
-    body += Constant(Instance::ZoneHandle(Z, Instance::RawCast(value.raw())));
+    body += Constant(Instance::ZoneHandle(Z, Instance::RawCast(value.ptr())));
   } else {
     // Static fields
     //  - with trivial initializer
@@ -3499,7 +3499,7 @@
 
   // Should never build a dynamic invocation forwarder for equality
   // operator.
-  ASSERT(function.name() != Symbols::EqualOperator().raw());
+  ASSERT(function.name() != Symbols::EqualOperator().ptr());
 
   // Even if the caller did not pass argument vector we would still
   // call the target with instantiate-to-bounds type arguments.
@@ -3542,7 +3542,7 @@
   // used. We must guarantee this invariant because violation will lead to an
   // illegal IL once we replace x.[]=(...) with a sequence that does not
   // actually produce any value. See http://dartbug.com/29135 for more details.
-  if (name.raw() == Symbols::AssignIndexToken().raw()) {
+  if (name.ptr() == Symbols::AssignIndexToken().ptr()) {
     body += Drop();
     body += NullConstant();
   }
@@ -3685,7 +3685,7 @@
   code += LoadLocal(MakeTemporary());  // Duplicate handle pointer.
   code += ConvertUnboxedToUntagged(kUnboxedIntPtr);
   code += LoadLocal(object);
-  code += RawStoreField(compiler::target::LocalHandle::raw_offset());
+  code += RawStoreField(compiler::target::LocalHandle::ptr_offset());
 
   code += DropTempsPreserveTop(1);  // Drop object below handle.
   return code;
@@ -3694,7 +3694,7 @@
 Fragment FlowGraphBuilder::UnwrapHandle() {
   Fragment code;
   code += ConvertUnboxedToUntagged(kUnboxedIntPtr);
-  code += IntConstant(compiler::target::LocalHandle::raw_offset());
+  code += IntConstant(compiler::target::LocalHandle::ptr_offset());
   code += UnboxTruncate(kUnboxedIntPtr);
   code += LoadIndexed(kArrayCid, /*index_scale=*/1, /*index_unboxed=*/true);
   return code;
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.h b/runtime/vm/compiler/frontend/kernel_to_il.h
index 5b6b303..c1dd552 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.h
+++ b/runtime/vm/compiler/frontend/kernel_to_il.h
@@ -203,7 +203,7 @@
   Fragment Return(
       TokenPosition position,
       bool omit_result_type_check = false,
-      intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex);
+      intptr_t yield_index = UntaggedPcDescriptors::kInvalidYieldIndex);
   void SetResultTypeForStaticCall(StaticCallInstr* call,
                                   const Function& target,
                                   intptr_t argument_count,
diff --git a/runtime/vm/compiler/frontend/kernel_translation_helper.cc b/runtime/vm/compiler/frontend/kernel_translation_helper.cc
index c3c6bc6..d7fe4f9 100644
--- a/runtime/vm/compiler/frontend/kernel_translation_helper.cc
+++ b/runtime/vm/compiler/frontend/kernel_translation_helper.cc
@@ -94,7 +94,7 @@
     funcs = GrowableObjectArray::New(16, Heap::kNew);
     info_.set_potential_pragma_functions(funcs);
   }
-  return funcs.raw();
+  return funcs.ptr();
 }
 
 void TranslationHelper::AddPotentialExtensionLibrary(const Library& library) {
@@ -109,53 +109,53 @@
   if (potential_extension_libraries_ != nullptr) {
     GrowableObjectArray* result = potential_extension_libraries_;
     potential_extension_libraries_ = nullptr;
-    return result->raw();
+    return result->ptr();
   }
   return GrowableObjectArray::null();
 }
 
 void TranslationHelper::SetStringOffsets(const TypedData& string_offsets) {
   ASSERT(string_offsets_.IsNull());
-  string_offsets_ = string_offsets.raw();
+  string_offsets_ = string_offsets.ptr();
 }
 
 void TranslationHelper::SetStringData(const ExternalTypedData& string_data) {
   ASSERT(string_data_.IsNull());
-  string_data_ = string_data.raw();
+  string_data_ = string_data.ptr();
 }
 
 void TranslationHelper::SetCanonicalNames(const TypedData& canonical_names) {
   ASSERT(canonical_names_.IsNull());
-  canonical_names_ = canonical_names.raw();
+  canonical_names_ = canonical_names.ptr();
 }
 
 void TranslationHelper::SetMetadataPayloads(
     const ExternalTypedData& metadata_payloads) {
   ASSERT(metadata_payloads_.IsNull());
   ASSERT(Utils::IsAligned(metadata_payloads.DataAddr(0), kWordSize));
-  metadata_payloads_ = metadata_payloads.raw();
+  metadata_payloads_ = metadata_payloads.ptr();
 }
 
 void TranslationHelper::SetMetadataMappings(
     const ExternalTypedData& metadata_mappings) {
   ASSERT(metadata_mappings_.IsNull());
-  metadata_mappings_ = metadata_mappings.raw();
+  metadata_mappings_ = metadata_mappings.ptr();
 }
 
 void TranslationHelper::SetConstants(const Array& constants) {
   ASSERT(constants_.IsNull() ||
          (constants.IsNull() || constants.Length() == 0));
-  constants_ = constants.raw();
+  constants_ = constants.ptr();
 }
 
 void TranslationHelper::SetConstantsTable(
     const ExternalTypedData& constants_table) {
   ASSERT(constants_table_.IsNull());
-  constants_table_ = constants_table.raw();
+  constants_table_ = constants_table.ptr();
 }
 
 void TranslationHelper::SetKernelProgramInfo(const KernelProgramInfo& info) {
-  info_ = info.raw();
+  info_ = info.ptr();
 }
 
 intptr_t TranslationHelper::StringOffset(StringIndex index) const {
@@ -340,7 +340,7 @@
 }
 
 InstancePtr TranslationHelper::Canonicalize(const Instance& instance) {
-  if (instance.IsNull()) return instance.raw();
+  if (instance.IsNull()) return instance.ptr();
 
   return instance.Canonicalize(thread());
 }
@@ -602,7 +602,7 @@
       Z, klass.LookupFieldAllowPrivate(
              DartSymbolObfuscate(CanonicalNameString(kernel_field))));
   CheckStaticLookup(field);
-  return field.raw();
+  return field.ptr();
 }
 
 FunctionPtr TranslationHelper::LookupStaticMethodByKernelProcedure(
@@ -618,7 +618,7 @@
     Function& function =
         Function::Handle(Z, library.LookupFunctionAllowPrivate(procedure_name));
     CheckStaticLookup(function);
-    return function.raw();
+    return function.ptr();
   } else {
     ASSERT(IsClass(enclosing));
     Class& klass = Class::Handle(Z, LookupClassByKernelClass(enclosing));
@@ -627,7 +627,7 @@
     Function& function = Function::ZoneHandle(
         Z, klass.LookupFunctionAllowPrivate(procedure_name));
     CheckStaticLookup(function);
-    return function.raw();
+    return function.ptr();
   }
 }
 
@@ -649,7 +649,7 @@
   Function& function = Function::Handle(
       Z, owner.LookupConstructorAllowPrivate(DartConstructorName(constructor)));
   CheckStaticLookup(function);
-  return function.raw();
+  return function.ptr();
 }
 
 FunctionPtr TranslationHelper::LookupConstructorByKernelConstructor(
@@ -686,13 +686,13 @@
 #endif
   CheckStaticLookup(function);
   ASSERT(!function.IsNull());
-  return function.raw();
+  return function.ptr();
 }
 
 FunctionPtr TranslationHelper::LookupDynamicFunction(const Class& klass,
                                                      const String& name) {
   // Search the superclass chain for the selector.
-  Class& iterate_klass = Class::Handle(Z, klass.raw());
+  Class& iterate_klass = Class::Handle(Z, klass.ptr());
   while (!iterate_klass.IsNull()) {
     FunctionPtr function =
         iterate_klass.LookupDynamicFunctionAllowPrivate(name);
@@ -2870,7 +2870,7 @@
 
   Function& f = Function::Handle(Z);
   TypeArguments& f_params = TypeArguments::Handle(Z);
-  for (f = innermost.raw(); f.parent_function() != Object::null();
+  for (f = innermost.ptr(); f.parent_function() != Object::null();
        f = f.parent_function()) {
     f_params = f.type_parameters();
     num_params += f_params.Length();
@@ -2881,7 +2881,7 @@
       TypeArguments::Handle(Z, TypeArguments::New(num_params));
 
   intptr_t index = num_params;
-  for (f = innermost.raw(); f.parent_function() != Object::null();
+  for (f = innermost.ptr(); f.parent_function() != Object::null();
        f = f.parent_function()) {
     f_params = f.type_parameters();
     for (intptr_t j = f_params.Length() - 1; j >= 0; --j) {
@@ -2943,7 +2943,7 @@
 
   // We return a new `ZoneHandle` here on purpose: The intermediate language
   // instructions do not make a copy of the handle, so we do it.
-  return AbstractType::ZoneHandle(Z, result_.raw());
+  return AbstractType::ZoneHandle(Z, result_.ptr());
 }
 
 AbstractType& TypeTranslator::BuildTypeWithoutFinalization() {
@@ -2954,7 +2954,7 @@
 
   // We return a new `ZoneHandle` here on purpose: The intermediate language
   // instructions do not make a copy of the handle, so we do it.
-  return AbstractType::ZoneHandle(Z, result_.raw());
+  return AbstractType::ZoneHandle(Z, result_.ptr());
 }
 
 void TypeTranslator::BuildTypeInternal() {
@@ -2962,10 +2962,10 @@
   switch (tag) {
     case kInvalidType:
     case kDynamicType:
-      result_ = Object::dynamic_type().raw();
+      result_ = Object::dynamic_type().ptr();
       break;
     case kVoidType:
-      result_ = Object::void_type().raw();
+      result_ = Object::void_type().ptr();
       break;
     case kNeverType: {
       Nullability nullability = helper_->ReadNullability();
@@ -3140,7 +3140,7 @@
     signature ^= ClassFinalizer::FinalizeType(signature);
   }
 
-  result_ = signature.raw();
+  result_ = signature.ptr();
 }
 
 void TypeTranslator::BuildTypeParameterType() {
@@ -3356,7 +3356,7 @@
         // Erase provided name and use a canonical one instead.
         name = Symbols::NewFormatted(H.thread(), "X%" Pd, offset + i);
       } else {
-        name = H.DartIdentifier(lib, helper.name_index_).raw();
+        name = H.DartIdentifier(lib, helper.name_index_).ptr();
       }
       parameter = TypeParameter::New(
           parameterized_class, offset, offset + i, name, null_bound,
@@ -3401,7 +3401,7 @@
       derived ^= active_class->derived_type_parameters->At(i);
       if (derived.bound() == AbstractType::null() &&
           ((!parameterized_class.IsNull() &&
-            derived.parameterized_class() == parameterized_class.raw()) ||
+            derived.parameterized_class() == parameterized_class.ptr()) ||
            (!parameterized_signature.IsNull() &&
             derived.parameterized_class() == Class::null() &&
             derived.index() >= offset &&
diff --git a/runtime/vm/compiler/frontend/kernel_translation_helper.h b/runtime/vm/compiler/frontend/kernel_translation_helper.h
index 044e034..9eaf327 100644
--- a/runtime/vm/compiler/frontend/kernel_translation_helper.h
+++ b/runtime/vm/compiler/frontend/kernel_translation_helper.h
@@ -196,7 +196,7 @@
 
   void SetExpressionEvaluationFunction(const Function& function) {
     ASSERT(expression_evaluation_function_ == nullptr);
-    expression_evaluation_function_ = &Function::Handle(zone_, function.raw());
+    expression_evaluation_function_ = &Function::Handle(zone_, function.ptr());
   }
   const Function& GetExpressionEvaluationFunction() {
     if (expression_evaluation_function_ == nullptr) {
@@ -207,11 +207,11 @@
   void SetExpressionEvaluationRealClass(const Class& real_class) {
     ASSERT(expression_evaluation_real_class_ == nullptr);
     ASSERT(!real_class.IsNull());
-    expression_evaluation_real_class_ = &Class::Handle(zone_, real_class.raw());
+    expression_evaluation_real_class_ = &Class::Handle(zone_, real_class.ptr());
   }
   ClassPtr GetExpressionEvaluationRealClass() {
     ASSERT(expression_evaluation_real_class_ != nullptr);
-    return expression_evaluation_real_class_->raw();
+    return expression_evaluation_real_class_->ptr();
   }
 
  private:
@@ -1316,12 +1316,12 @@
 
   bool MemberIsProcedure() {
     ASSERT(member != NULL);
-    FunctionLayout::Kind function_kind = member->kind();
-    return function_kind == FunctionLayout::kRegularFunction ||
-           function_kind == FunctionLayout::kGetterFunction ||
-           function_kind == FunctionLayout::kSetterFunction ||
-           function_kind == FunctionLayout::kMethodExtractor ||
-           function_kind == FunctionLayout::kDynamicInvocationForwarder ||
+    UntaggedFunction::Kind function_kind = member->kind();
+    return function_kind == UntaggedFunction::kRegularFunction ||
+           function_kind == UntaggedFunction::kGetterFunction ||
+           function_kind == UntaggedFunction::kSetterFunction ||
+           function_kind == UntaggedFunction::kMethodExtractor ||
+           function_kind == UntaggedFunction::kDynamicInvocationForwarder ||
            member->IsFactory();
   }
 
@@ -1346,7 +1346,7 @@
   void RecordDerivedTypeParameter(Zone* zone,
                                   const TypeParameter& original,
                                   const TypeParameter& derived) {
-    if (original.raw() != derived.raw() &&
+    if (original.ptr() != derived.ptr() &&
         original.bound() == AbstractType::null()) {
       if (derived_type_parameters == nullptr) {
         derived_type_parameters = &GrowableObjectArray::Handle(
diff --git a/runtime/vm/compiler/frontend/scope_builder.cc b/runtime/vm/compiler/frontend/scope_builder.cc
index 0d6d036..11f7de3 100644
--- a/runtime/vm/compiler/frontend/scope_builder.cc
+++ b/runtime/vm/compiler/frontend/scope_builder.cc
@@ -116,7 +116,7 @@
           function.kernel_offset());
 
   switch (function.kind()) {
-    case FunctionLayout::kImplicitClosureFunction: {
+    case UntaggedFunction::kImplicitClosureFunction: {
       const auto& parent = Function::Handle(Z, function.parent_function());
       const auto& target =
           Function::Handle(Z, function.ImplicitClosureTarget(Z));
@@ -129,11 +129,11 @@
       }
     }
       FALL_THROUGH;
-    case FunctionLayout::kClosureFunction:
-    case FunctionLayout::kRegularFunction:
-    case FunctionLayout::kGetterFunction:
-    case FunctionLayout::kSetterFunction:
-    case FunctionLayout::kConstructor: {
+    case UntaggedFunction::kClosureFunction:
+    case UntaggedFunction::kRegularFunction:
+    case UntaggedFunction::kGetterFunction:
+    case UntaggedFunction::kSetterFunction:
+    case UntaggedFunction::kConstructor: {
       const Tag tag = helper_.PeekTag();
       helper_.ReadUntilFunctionNode();
       function_node_helper.ReadUntilExcluding(
@@ -204,7 +204,7 @@
         ASSERT(
             Class::Handle(
                 AbstractType::Handle(function.ParameterTypeAt(1)).type_class())
-                .ScrubbedName() == Symbols::_SyncIterator().raw());
+                .ScrubbedName() == Symbols::_SyncIterator().ptr());
         type_check_mode = kTypeCheckForStaticFunction;
       } else if (function.is_static()) {
         // In static functions we don't check anything.
@@ -247,8 +247,8 @@
       }
       break;
     }
-    case FunctionLayout::kImplicitGetter:
-    case FunctionLayout::kImplicitSetter: {
+    case UntaggedFunction::kImplicitGetter:
+    case UntaggedFunction::kImplicitSetter: {
       ASSERT(helper_.PeekTag() == kField);
       const bool is_setter = function.IsImplicitSetterFunction();
       const bool is_method = !function.IsStaticFunction();
@@ -293,7 +293,7 @@
       }
       break;
     }
-    case FunctionLayout::kImplicitStaticGetter: {
+    case UntaggedFunction::kImplicitStaticGetter: {
       ASSERT(helper_.PeekTag() == kField);
       ASSERT(function.IsStaticFunction());
       // In addition to static field initializers, scopes/local variables
@@ -304,7 +304,7 @@
       }
       break;
     }
-    case FunctionLayout::kFieldInitializer: {
+    case UntaggedFunction::kFieldInitializer: {
       ASSERT(helper_.PeekTag() == kField);
       if (!function.is_static()) {
         Class& klass = Class::Handle(Z, function.Owner());
@@ -318,7 +318,7 @@
       VisitNode();
       break;
     }
-    case FunctionLayout::kDynamicInvocationForwarder: {
+    case UntaggedFunction::kDynamicInvocationForwarder: {
       const String& name = String::Handle(Z, function.name());
       ASSERT(Function::IsDynamicInvocationForwarderName(name));
 
@@ -366,7 +366,7 @@
           attrs);
       break;
     }
-    case FunctionLayout::kMethodExtractor: {
+    case UntaggedFunction::kMethodExtractor: {
       // Add a receiver parameter.  Though it is captured, we emit code to
       // explicitly copy it to a fixed offset in a freshly-allocated context
       // instead of using the generic code for regular functions.
@@ -380,7 +380,7 @@
       parsed_function_->set_receiver_var(variable);
       break;
     }
-    case FunctionLayout::kFfiTrampoline: {
+    case UntaggedFunction::kFfiTrampoline: {
       needs_expr_temp_ = true;
       // Callbacks and calls with handles need try/catch variables.
       if ((function.FfiCallbackTarget() != Function::null() ||
@@ -396,7 +396,7 @@
       }
       FALL_THROUGH;
     }
-    case FunctionLayout::kInvokeFieldDispatcher: {
+    case UntaggedFunction::kInvokeFieldDispatcher: {
       if (function.IsDynamicClosureCallDispatcher()) {
         auto const vars = parsed_function_->EnsureDynamicClosureCallVars();
         ASSERT(vars != nullptr);
@@ -409,19 +409,19 @@
       }
     }
       FALL_THROUGH;
-    case FunctionLayout::kNoSuchMethodDispatcher: {
+    case UntaggedFunction::kNoSuchMethodDispatcher: {
       for (intptr_t i = 0; i < function.NumParameters(); ++i) {
         LocalVariable* variable = MakeVariable(
             TokenPosition::kNoSource, TokenPosition::kNoSource,
             String::ZoneHandle(Z, function.ParameterNameAt(i)),
             AbstractType::ZoneHandle(Z, function.IsFfiTrampoline()
                                             ? function.ParameterTypeAt(i)
-                                            : Object::dynamic_type().raw()));
+                                            : Object::dynamic_type().ptr()));
         scope_->InsertParameterAt(i, variable);
       }
       break;
     }
-    case FunctionLayout::kIrregexpFunction:
+    case UntaggedFunction::kIrregexpFunction:
       UNREACHABLE();
   }
   if (needs_expr_temp_) {
@@ -1401,7 +1401,7 @@
 }
 
 void ScopeBuilder::VisitTypeParameterType() {
-  Function& function = Function::Handle(Z, parsed_function_->function().raw());
+  Function& function = Function::Handle(Z, parsed_function_->function().ptr());
   while (function.IsClosureFunction()) {
     function = function.parent_function();
   }
@@ -1537,7 +1537,7 @@
   if (helper.IsCovariant()) {
     variable->set_is_explicit_covariant_parameter();
   }
-  if (variable->name().raw() == Symbols::IteratorParameter().raw()) {
+  if (variable->name().ptr() == Symbols::IteratorParameter().ptr()) {
     variable->set_is_forced_stack();
   }
 
diff --git a/runtime/vm/compiler/intrinsifier.cc b/runtime/vm/compiler/intrinsifier.cc
index d95a65b..a8f9559 100644
--- a/runtime/vm/compiler/intrinsifier.cc
+++ b/runtime/vm/compiler/intrinsifier.cc
@@ -216,7 +216,7 @@
   };
 
   for (intptr_t i = 0; i < kNumLibs; i++) {
-    lib = intrinsics[i].library.raw();
+    lib = intrinsics[i].library.ptr();
     for (IntrinsicDesc* intrinsic = intrinsics[i].intrinsics;
          intrinsic->class_name != nullptr; intrinsic++) {
       func = Function::null();
diff --git a/runtime/vm/compiler/jit/compiler.cc b/runtime/vm/compiler/jit/compiler.cc
index 16ec8c5..06056b5 100644
--- a/runtime/vm/compiler/jit/compiler.cc
+++ b/runtime/vm/compiler/jit/compiler.cc
@@ -346,7 +346,7 @@
 
   // CreateDeoptInfo uses the object pool and needs to be done before
   // FinalizeCode.
-  Array& deopt_info_array = Array::Handle(zone, Object::empty_array().raw());
+  Array& deopt_info_array = Array::Handle(zone, Object::empty_array().ptr());
   deopt_info_array = graph_compiler->CreateDeoptInfo(assembler);
 
   // Allocates instruction object. Since this occurs only at safepoint,
@@ -426,7 +426,7 @@
         // OSR is not compiled in background.
         ASSERT(!Compiler::IsBackgroundCompilation());
       }
-      ASSERT(code.owner() == function.raw());
+      ASSERT(code.owner() == function.ptr());
     } else {
       code = Code::null();
     }
@@ -469,7 +469,7 @@
       function.SetUsageCounter(0);
     }
   }
-  return code.raw();
+  return code.ptr();
 }
 
 void CompileParsedFunctionHelper::CheckIfBackgroundCompilerIsBeingStopped(
@@ -663,13 +663,13 @@
       // We bailed out or we encountered an error.
       const Error& error = Error::Handle(thread()->StealStickyError());
 
-      if (error.raw() == Object::branch_offset_error().raw()) {
+      if (error.ptr() == Object::branch_offset_error().ptr()) {
         // Compilation failed due to an out of range branch offset in the
         // assembler. We try again (done = false) with far branches enabled.
         done = false;
         ASSERT(!use_far_branches);
         use_far_branches = true;
-      } else if (error.raw() == Object::speculative_inlining_error().raw()) {
+      } else if (error.ptr() == Object::speculative_inlining_error().ptr()) {
         // Can only happen with precompilation.
         UNREACHABLE();
       } else {
@@ -691,7 +691,7 @@
       }
     }
   }
-  return result->raw();
+  return result->ptr();
 }
 
 static ObjectPtr CompileFunctionHelper(CompilationPipeline* pipeline,
@@ -714,7 +714,7 @@
     per_compile_timer.Start();
 
     ParsedFunction* parsed_function = new (zone)
-        ParsedFunction(thread, Function::ZoneHandle(zone, function.raw()));
+        ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
     if (trace_compiler) {
       const intptr_t token_size = function.SourceSize();
       THR_Print("Compiling %s%sfunction %s: '%s' @ token %s, size %" Pd "\n",
@@ -747,7 +747,7 @@
 
         // We got an error during compilation.
         // If it was a bailout, then disable optimization.
-        if (error.raw() == Object::background_compilation_error().raw()) {
+        if (error.ptr() == Object::background_compilation_error().ptr()) {
           if (FLAG_trace_compiler) {
             THR_Print(
                 "--> disabling background optimizations for '%s' (will "
@@ -775,7 +775,7 @@
           // The background compiler does not execute Dart code or handle
           // isolate messages.
           ASSERT(!error.IsUnwindError());
-          return error.raw();
+          return error.ptr();
         }
       }
       if (optimized) {
@@ -794,7 +794,7 @@
           function.SetIsOptimizable(false);
           return Error::null();
         }
-        return error.raw();
+        return error.ptr();
       } else {
         ASSERT(!optimized);
         // The non-optimizing compiler can get an unhandled exception
@@ -803,7 +803,7 @@
         ASSERT(error.IsUnhandledException() || error.IsUnwindError() ||
                (error.IsLanguageError() &&
                 LanguageError::Cast(error).kind() != Report::kBailout));
-        return error.raw();
+        return error.ptr();
       }
       UNREACHABLE();
     }
@@ -817,14 +817,14 @@
                 code.Size(), per_compile_timer.TotalElapsedTime());
     }
 
-    return result.raw();
+    return result.ptr();
   } else {
     Thread* const thread = Thread::Current();
     StackZone stack_zone(thread);
     // We got an error during compilation or it is a bailout from background
     // compilation (e.g., during parsing with EnsureIsFinalized).
     const Error& error = Error::Handle(thread->StealStickyError());
-    if (error.raw() == Object::background_compilation_error().raw()) {
+    if (error.ptr() == Object::background_compilation_error().ptr()) {
       // Exit compilation, retry it later.
       if (FLAG_trace_bailout) {
         THR_Print("Aborted background compilation: %s\n",
@@ -834,7 +834,7 @@
     }
     // Do not attempt to optimize functions that can cause errors.
     function.set_is_optimizable(false);
-    return error.raw();
+    return error.ptr();
   }
   UNREACHABLE();
   return Object::null();
@@ -886,16 +886,16 @@
       CompileFunctionHelper(pipeline, function, false, /* not optimized */
                             kNoOSRDeoptId));
   if (result.IsError()) {
-    return Error::Cast(result).raw();
+    return Error::Cast(result).ptr();
   }
   // Since CompileFunctionHelper replaces the current code, re-attach the
   // the original code if the function was already compiled.
-  if (!original_code.IsNull() && result.raw() == function.CurrentCode() &&
+  if (!original_code.IsNull() && result.ptr() == function.CurrentCode() &&
       !original_code.IsDisabled()) {
     function.AttachCode(original_code);
   }
   ASSERT(function.unoptimized_code() != Object::null());
-  ASSERT(function.unoptimized_code() == result.raw());
+  ASSERT(function.unoptimized_code() == result.ptr());
   if (FLAG_trace_compiler) {
     THR_Print("Ensure unoptimized code for %s\n", function.ToCString());
   }
@@ -939,7 +939,7 @@
   LongJumpScope jump;
   if (setjmp(*jump.Set()) == 0) {
     ParsedFunction* parsed_function =
-        new ParsedFunction(thread, Function::ZoneHandle(zone, function.raw()));
+        new ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
     ZoneGrowableArray<const ICData*>* ic_data_array =
         new ZoneGrowableArray<const ICData*>();
     ZoneGrowableArray<intptr_t>* context_level_array =
@@ -978,7 +978,7 @@
     if (!func.HasCode() && !func.is_abstract()) {
       result = CompileFunction(thread, func);
       if (result.IsError()) {
-        return Error::Cast(result).raw();
+        return Error::Cast(result).ptr();
       }
       ASSERT(!result.IsNull());
     }
@@ -1010,7 +1010,7 @@
 class QueueElement {
  public:
   explicit QueueElement(const Function& function)
-      : next_(NULL), function_(function.raw()) {}
+      : next_(NULL), function_(function.ptr()) {}
 
   virtual ~QueueElement() {
     next_ = NULL;
@@ -1023,7 +1023,9 @@
   QueueElement* next() const { return next_; }
 
   ObjectPtr function() const { return function_; }
-  ObjectPtr* function_ptr() { return reinterpret_cast<ObjectPtr*>(&function_); }
+  ObjectPtr* function_untag() {
+    return reinterpret_cast<ObjectPtr*>(&function_);
+  }
 
  private:
   QueueElement* next_;
@@ -1043,7 +1045,7 @@
     ASSERT(visitor != NULL);
     QueueElement* p = first_;
     while (p != NULL) {
-      visitor->VisitPointer(p->function_ptr());
+      visitor->VisitPointer(p->function_untag());
       p = p->next();
     }
   }
@@ -1088,7 +1090,7 @@
   bool ContainsObj(const Object& obj) const {
     QueueElement* p = first_;
     while (p != NULL) {
-      if (p->function() == obj.raw()) {
+      if (p->function() == obj.ptr()) {
         return true;
       }
       p = p->next();
diff --git a/runtime/vm/compiler/jit/compiler.h b/runtime/vm/compiler/jit/compiler.h
index 75d59521..2e2d563 100644
--- a/runtime/vm/compiler/jit/compiler.h
+++ b/runtime/vm/compiler/jit/compiler.h
@@ -79,8 +79,8 @@
   // Generates code for given function without optimization and sets its code
   // field.
   //
-  // Returns the raw code object if compilation succeeds.  Otherwise returns a
-  // RawError.  Also installs the generated code on the function.
+  // Returns the raw code object if compilation succeeds.  Otherwise returns an
+  // ErrorPtr.  Also installs the generated code on the function.
   static ObjectPtr CompileFunction(Thread* thread, const Function& function);
 
   // Generates unoptimized code if not present, current code is unchanged.
diff --git a/runtime/vm/compiler/method_recognizer.cc b/runtime/vm/compiler/method_recognizer.cc
index ecd99a1..3abd57b 100644
--- a/runtime/vm/compiler/method_recognizer.cc
+++ b/runtime/vm/compiler/method_recognizer.cc
@@ -291,43 +291,43 @@
 }
 
 static Token::Kind RecognizeTokenKindHelper(const String& name) {
-  if (name.raw() == Symbols::Plus().raw()) {
+  if (name.ptr() == Symbols::Plus().ptr()) {
     return Token::kADD;
-  } else if (name.raw() == Symbols::Minus().raw()) {
+  } else if (name.ptr() == Symbols::Minus().ptr()) {
     return Token::kSUB;
-  } else if (name.raw() == Symbols::Star().raw()) {
+  } else if (name.ptr() == Symbols::Star().ptr()) {
     return Token::kMUL;
-  } else if (name.raw() == Symbols::Slash().raw()) {
+  } else if (name.ptr() == Symbols::Slash().ptr()) {
     return Token::kDIV;
-  } else if (name.raw() == Symbols::TruncDivOperator().raw()) {
+  } else if (name.ptr() == Symbols::TruncDivOperator().ptr()) {
     return Token::kTRUNCDIV;
-  } else if (name.raw() == Symbols::Percent().raw()) {
+  } else if (name.ptr() == Symbols::Percent().ptr()) {
     return Token::kMOD;
-  } else if (name.raw() == Symbols::BitOr().raw()) {
+  } else if (name.ptr() == Symbols::BitOr().ptr()) {
     return Token::kBIT_OR;
-  } else if (name.raw() == Symbols::Ampersand().raw()) {
+  } else if (name.ptr() == Symbols::Ampersand().ptr()) {
     return Token::kBIT_AND;
-  } else if (name.raw() == Symbols::Caret().raw()) {
+  } else if (name.ptr() == Symbols::Caret().ptr()) {
     return Token::kBIT_XOR;
-  } else if (name.raw() == Symbols::LeftShiftOperator().raw()) {
+  } else if (name.ptr() == Symbols::LeftShiftOperator().ptr()) {
     return Token::kSHL;
-  } else if (name.raw() == Symbols::RightShiftOperator().raw()) {
+  } else if (name.ptr() == Symbols::RightShiftOperator().ptr()) {
     return Token::kSHR;
-  } else if (name.raw() == Symbols::Tilde().raw()) {
+  } else if (name.ptr() == Symbols::Tilde().ptr()) {
     return Token::kBIT_NOT;
-  } else if (name.raw() == Symbols::UnaryMinus().raw()) {
+  } else if (name.ptr() == Symbols::UnaryMinus().ptr()) {
     return Token::kNEGATE;
-  } else if (name.raw() == Symbols::EqualOperator().raw()) {
+  } else if (name.ptr() == Symbols::EqualOperator().ptr()) {
     return Token::kEQ;
-  } else if (name.raw() == Symbols::Token(Token::kNE).raw()) {
+  } else if (name.ptr() == Symbols::Token(Token::kNE).ptr()) {
     return Token::kNE;
-  } else if (name.raw() == Symbols::LAngleBracket().raw()) {
+  } else if (name.ptr() == Symbols::LAngleBracket().ptr()) {
     return Token::kLT;
-  } else if (name.raw() == Symbols::RAngleBracket().raw()) {
+  } else if (name.ptr() == Symbols::RAngleBracket().ptr()) {
     return Token::kGT;
-  } else if (name.raw() == Symbols::LessEqualOperator().raw()) {
+  } else if (name.ptr() == Symbols::LessEqualOperator().ptr()) {
     return Token::kLTE;
-  } else if (name.raw() == Symbols::GreaterEqualOperator().raw()) {
+  } else if (name.ptr() == Symbols::GreaterEqualOperator().ptr()) {
     return Token::kGTE;
   } else if (Field::IsGetterName(name)) {
     return Token::kGET;
@@ -366,8 +366,8 @@
   ASSERT(factory.IsFactory());
   const Class& function_class = Class::Handle(factory.Owner());
   const Library& lib = Library::Handle(function_class.library());
-  ASSERT((lib.raw() == Library::CoreLibrary()) ||
-         (lib.raw() == Library::TypedDataLibrary()));
+  ASSERT((lib.ptr() == Library::CoreLibrary()) ||
+         (lib.ptr() == Library::TypedDataLibrary()));
   const String& factory_name = String::Handle(factory.name());
   for (intptr_t i = 0;
        factory_recognizer_list[i].symbol_id != Symbols::kIllegal; i++) {
@@ -393,11 +393,11 @@
     return kDynamicCid;
   }
 
-  if (owner.Name() == Symbols::List().raw()) {
-    if (function.name() == Symbols::ListFactory().raw()) {
+  if (owner.Name() == Symbols::List().ptr()) {
+    if (function.name() == Symbols::ListFactory().ptr()) {
       ASSERT(argument_count == 1 || argument_count == 2);
       return (argument_count == 1) ? kGrowableObjectArrayCid : kArrayCid;
-    } else if (function.name() == Symbols::ListFilledFactory().raw()) {
+    } else if (function.name() == Symbols::ListFilledFactory().ptr()) {
       ASSERT(argument_count == 3 || argument_count == 4);
       return (argument_count == 3) ? kArrayCid : kDynamicCid;
     }
diff --git a/runtime/vm/compiler/relocation.cc b/runtime/vm/compiler/relocation.cc
index 9e3c5c9..ff5ea61 100644
--- a/runtime/vm/compiler/relocation.cc
+++ b/runtime/vm/compiler/relocation.cc
@@ -53,7 +53,7 @@
     current_caller = (*code_objects_)[i];
 
     const intptr_t code_text_offset = next_text_offset_;
-    if (!AddInstructionsToText(current_caller.raw())) {
+    if (!AddInstructionsToText(current_caller.ptr())) {
       continue;
     }
 
@@ -280,8 +280,8 @@
     const intptr_t text_offset =
         code_text_offset + AdjustPayloadOffset(call_instruction_offset);
 
-    UnresolvedCall unresolved_call(code.raw(), call_instruction_offset,
-                                   text_offset, destination_.raw(),
+    UnresolvedCall unresolved_call(code.ptr(), call_instruction_offset,
+                                   text_offset, destination_.ptr(),
                                    offset_into_target, is_tail_call);
     if (!TryResolveBackwardsCall(&unresolved_call)) {
       EnqueueUnresolvedCall(new UnresolvedCall(unresolved_call));
@@ -427,7 +427,7 @@
   target_ = call.Get<Code::kSCallTableCodeOrTypeTarget>();
   if (target_.IsAbstractType()) {
     target_ = AbstractType::Cast(target_).type_test_stub();
-    destination_ = Code::Cast(target_).raw();
+    destination_ = Code::Cast(target_).ptr();
 
     // The AssertAssignableInstr will emit pc-relative calls to the TTS iff
     // dst_type is instantiated. If we happened to not install an optimized
@@ -442,22 +442,22 @@
     if (destination_.InVMIsolateHeap()) {
       auto object_store = thread_->isolate_group()->object_store();
 
-      if (destination_.raw() == StubCode::DefaultTypeTest().raw()) {
+      if (destination_.ptr() == StubCode::DefaultTypeTest().ptr()) {
         destination_ = object_store->default_tts_stub();
-      } else if (destination_.raw() ==
-                 StubCode::DefaultNullableTypeTest().raw()) {
+      } else if (destination_.ptr() ==
+                 StubCode::DefaultNullableTypeTest().ptr()) {
         destination_ = object_store->default_nullable_tts_stub();
-      } else if (destination_.raw() == StubCode::TopTypeTypeTest().raw()) {
+      } else if (destination_.ptr() == StubCode::TopTypeTypeTest().ptr()) {
         destination_ = object_store->top_type_tts_stub();
-      } else if (destination_.raw() == StubCode::UnreachableTypeTest().raw()) {
+      } else if (destination_.ptr() == StubCode::UnreachableTypeTest().ptr()) {
         destination_ = object_store->unreachable_tts_stub();
-      } else if (destination_.raw() == StubCode::SlowTypeTest().raw()) {
+      } else if (destination_.ptr() == StubCode::SlowTypeTest().ptr()) {
         destination_ = object_store->slow_tts_stub();
-      } else if (destination_.raw() ==
-                 StubCode::NullableTypeParameterTypeTest().raw()) {
+      } else if (destination_.ptr() ==
+                 StubCode::NullableTypeParameterTypeTest().ptr()) {
         destination_ = object_store->nullable_type_parameter_tts_stub();
-      } else if (destination_.raw() ==
-                 StubCode::TypeParameterTypeTest().raw()) {
+      } else if (destination_.ptr() ==
+                 StubCode::TypeParameterTypeTest().ptr()) {
         destination_ = object_store->type_parameter_tts_stub();
       } else {
         UNREACHABLE();
@@ -465,10 +465,10 @@
     }
   } else {
     ASSERT(target_.IsCode());
-    destination_ = Code::Cast(target_).raw();
+    destination_ = Code::Cast(target_).ptr();
   }
   ASSERT(!destination_.InVMIsolateHeap());
-  return destination_.raw();
+  return destination_.ptr();
 }
 
 void CodeRelocator::BuildTrampolinesForAlmostOutOfRangeCalls() {
diff --git a/runtime/vm/compiler/runtime_api.cc b/runtime/vm/compiler/runtime_api.cc
index 31b95b2..a13025f 100644
--- a/runtime/vm/compiler/runtime_api.cc
+++ b/runtime/vm/compiler/runtime_api.cc
@@ -55,7 +55,7 @@
   } else if (a.IsDouble() && b.IsDouble()) {
     return Double::Cast(a).value() == Double::Cast(b).value();
   }
-  return a.raw() == b.raw();
+  return a.ptr() == b.ptr();
 }
 
 bool IsEqualType(const AbstractType& a, const AbstractType& b) {
@@ -121,7 +121,7 @@
 }
 
 Object& NewZoneHandle(Zone* zone, const Object& obj) {
-  return Object::ZoneHandle(zone, obj.raw());
+  return Object::ZoneHandle(zone, obj.ptr());
 }
 
 const Object& NullObject() {
@@ -294,54 +294,56 @@
 }
 
 bool SizeFitsInSizeTag(uword instance_size) {
-  return dart::ObjectLayout::SizeTag::SizeFits(
+  return dart::UntaggedObject::SizeTag::SizeFits(
       TranslateOffsetInWordsToHost(instance_size));
 }
 
 uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size) {
-  return dart::ObjectLayout::SizeTag::encode(
+  return dart::UntaggedObject::SizeTag::encode(
              TranslateOffsetInWordsToHost(instance_size)) |
-         dart::ObjectLayout::ClassIdTag::encode(cid) |
-         dart::ObjectLayout::NewBit::encode(true);
+         dart::UntaggedObject::ClassIdTag::encode(cid) |
+         dart::UntaggedObject::NewBit::encode(true);
 }
 
 word Object::tags_offset() {
   return 0;
 }
 
-const word ObjectLayout::kCardRememberedBit =
-    dart::ObjectLayout::kCardRememberedBit;
+const word UntaggedObject::kCardRememberedBit =
+    dart::UntaggedObject::kCardRememberedBit;
 
-const word ObjectLayout::kOldAndNotRememberedBit =
-    dart::ObjectLayout::kOldAndNotRememberedBit;
+const word UntaggedObject::kOldAndNotRememberedBit =
+    dart::UntaggedObject::kOldAndNotRememberedBit;
 
-const word ObjectLayout::kOldAndNotMarkedBit =
-    dart::ObjectLayout::kOldAndNotMarkedBit;
+const word UntaggedObject::kOldAndNotMarkedBit =
+    dart::UntaggedObject::kOldAndNotMarkedBit;
 
-const word ObjectLayout::kSizeTagPos = dart::ObjectLayout::kSizeTagPos;
+const word UntaggedObject::kSizeTagPos = dart::UntaggedObject::kSizeTagPos;
 
-const word ObjectLayout::kSizeTagSize = dart::ObjectLayout::kSizeTagSize;
+const word UntaggedObject::kSizeTagSize = dart::UntaggedObject::kSizeTagSize;
 
-const word ObjectLayout::kClassIdTagPos = dart::ObjectLayout::kClassIdTagPos;
+const word UntaggedObject::kClassIdTagPos =
+    dart::UntaggedObject::kClassIdTagPos;
 
-const word ObjectLayout::kClassIdTagSize = dart::ObjectLayout::kClassIdTagSize;
+const word UntaggedObject::kClassIdTagSize =
+    dart::UntaggedObject::kClassIdTagSize;
 
-const word ObjectLayout::kHashTagPos = dart::ObjectLayout::kHashTagPos;
+const word UntaggedObject::kHashTagPos = dart::UntaggedObject::kHashTagPos;
 
-const word ObjectLayout::kHashTagSize = dart::ObjectLayout::kHashTagSize;
+const word UntaggedObject::kHashTagSize = dart::UntaggedObject::kHashTagSize;
 
-const word ObjectLayout::kSizeTagMaxSizeTag =
-    dart::ObjectLayout::SizeTag::kMaxSizeTagInUnitsOfAlignment *
+const word UntaggedObject::kSizeTagMaxSizeTag =
+    dart::UntaggedObject::SizeTag::kMaxSizeTagInUnitsOfAlignment *
     ObjectAlignment::kObjectAlignment;
 
-const word ObjectLayout::kTagBitsSizeTagPos =
-    dart::ObjectLayout::TagBits::kSizeTagPos;
+const word UntaggedObject::kTagBitsSizeTagPos =
+    dart::UntaggedObject::TagBits::kSizeTagPos;
 
-const word AbstractTypeLayout::kTypeStateFinalizedInstantiated =
-    dart::AbstractTypeLayout::kFinalizedInstantiated;
+const word UntaggedAbstractType::kTypeStateFinalizedInstantiated =
+    dart::UntaggedAbstractType::kFinalizedInstantiated;
 
-const word ObjectLayout::kBarrierOverlapShift =
-    dart::ObjectLayout::kBarrierOverlapShift;
+const word UntaggedObject::kBarrierOverlapShift =
+    dart::UntaggedObject::kBarrierOverlapShift;
 
 bool IsTypedDataClassId(intptr_t cid) {
   return dart::IsTypedDataClassId(cid);
@@ -780,7 +782,7 @@
 
 word ToRawSmi(const dart::Object& a) {
   RELEASE_ASSERT(IsSmi(a));
-  return static_cast<word>(static_cast<intptr_t>(a.raw()));
+  return static_cast<word>(static_cast<intptr_t>(a.ptr()));
 }
 
 word ToRawSmi(intptr_t value) {
@@ -808,7 +810,7 @@
   static_assert(kHostWordSize == kWordSize,
                 "Can't embed raw pointers to runtime objects when host and "
                 "target word sizes are different");
-  return static_cast<word>(a.raw());
+  return static_cast<word>(a.ptr());
 }
 #endif  // defined(TARGET_ARCH_IA32)
 
diff --git a/runtime/vm/compiler/runtime_api.h b/runtime/vm/compiler/runtime_api.h
index 9af3244..d690ef4 100644
--- a/runtime/vm/compiler/runtime_api.h
+++ b/runtime/vm/compiler/runtime_api.h
@@ -40,7 +40,7 @@
 class Zone;
 
 #define DO(clazz)                                                              \
-  class clazz##Layout;                                                         \
+  class Untagged##clazz;                                                       \
   class clazz;
 CLASS_LIST_FOR_HANDLES(DO)
 #undef DO
@@ -336,7 +336,7 @@
   return Utils::RoundUp(size, kObjectAlignment);
 }
 // Information about frame_layout that compiler should be targeting.
-extern FrameLayout frame_layout;
+extern UntaggedFrame frame_layout;
 
 constexpr intptr_t kIntSpillFactor = sizeof(int64_t) / kWordSize;
 constexpr intptr_t kDoubleSpillFactor = sizeof(double) / kWordSize;
@@ -406,7 +406,7 @@
 // Currently we use the same names for classes, constants and getters to make
 // migration easier.
 
-class ObjectLayout : public AllStatic {
+class UntaggedObject : public AllStatic {
  public:
   static const word kCardRememberedBit;
   static const word kOldAndNotRememberedBit;
@@ -424,7 +424,7 @@
   static bool IsTypedDataClassId(intptr_t cid);
 };
 
-class AbstractTypeLayout : public AllStatic {
+class UntaggedAbstractType : public AllStatic {
  public:
   static const word kTypeStateFinalizedInstantiated;
 };
@@ -454,7 +454,7 @@
 
   static word super_type_offset();
 
-  // The offset of the ObjectLayout::num_type_arguments_ field in bytes.
+  // The offset of the UntaggedObject::num_type_arguments_ field in bytes.
   static word num_type_arguments_offset();
 
   // The value used if no type arguments vector is present.
@@ -485,7 +485,8 @@
 
 class Instance : public AllStatic {
  public:
-  // Returns the offset to the first field of [RawInstance].
+  // Returns the offset to the first field of [UntaggedInstance].
+  // Returns the offset to the first field of [UntaggedInstance].
   static word first_field_offset();
   static word DataOffsetFor(intptr_t cid);
   static word ElementSizeFor(intptr_t cid);
@@ -642,7 +643,7 @@
 
 class LocalHandle : public AllStatic {
  public:
-  static word raw_offset();
+  static word ptr_offset();
 };
 
 class Pointer : public PointerBase {
diff --git a/runtime/vm/compiler/runtime_offsets_extracted.h b/runtime/vm/compiler/runtime_offsets_extracted.h
index 30ae528..9694444 100644
--- a/runtime/vm/compiler/runtime_offsets_extracted.h
+++ b/runtime/vm/compiler/runtime_offsets_extracted.h
@@ -179,7 +179,7 @@
     LinkedHashMap_type_arguments_offset = 4;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     20;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 8;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 4;
@@ -704,7 +704,7 @@
     LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     40;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 8;
@@ -1233,7 +1233,7 @@
     LinkedHashMap_type_arguments_offset = 4;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     20;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 8;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 4;
@@ -1755,7 +1755,7 @@
     LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     40;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 8;
@@ -2284,7 +2284,7 @@
     LinkedHashMap_type_arguments_offset = 4;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     20;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 8;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 4;
@@ -2803,7 +2803,7 @@
     LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     40;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 8;
@@ -3326,7 +3326,7 @@
     LinkedHashMap_type_arguments_offset = 4;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     20;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 8;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 4;
@@ -3842,7 +3842,7 @@
     LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
     40;
-static constexpr dart::compiler::target::word LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word MarkingStackBlock_top_offset = 8;
@@ -4382,7 +4382,7 @@
     AOT_LinkedHashMap_type_arguments_offset = 4;
 static constexpr dart::compiler::target::word
     AOT_LinkedHashMap_used_data_offset = 20;
-static constexpr dart::compiler::target::word AOT_LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     AOT_MarkingStackBlock_pointers_offset = 8;
 static constexpr dart::compiler::target::word AOT_MarkingStackBlock_top_offset =
@@ -4966,7 +4966,7 @@
     AOT_LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word
     AOT_LinkedHashMap_used_data_offset = 40;
-static constexpr dart::compiler::target::word AOT_LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     AOT_MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word AOT_MarkingStackBlock_top_offset =
@@ -5556,7 +5556,7 @@
     AOT_LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word
     AOT_LinkedHashMap_used_data_offset = 40;
-static constexpr dart::compiler::target::word AOT_LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     AOT_MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word AOT_MarkingStackBlock_top_offset =
@@ -6141,7 +6141,7 @@
     AOT_LinkedHashMap_type_arguments_offset = 4;
 static constexpr dart::compiler::target::word
     AOT_LinkedHashMap_used_data_offset = 20;
-static constexpr dart::compiler::target::word AOT_LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     AOT_MarkingStackBlock_pointers_offset = 8;
 static constexpr dart::compiler::target::word AOT_MarkingStackBlock_top_offset =
@@ -6718,7 +6718,7 @@
     AOT_LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word
     AOT_LinkedHashMap_used_data_offset = 40;
-static constexpr dart::compiler::target::word AOT_LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     AOT_MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word AOT_MarkingStackBlock_top_offset =
@@ -7301,7 +7301,7 @@
     AOT_LinkedHashMap_type_arguments_offset = 8;
 static constexpr dart::compiler::target::word
     AOT_LinkedHashMap_used_data_offset = 40;
-static constexpr dart::compiler::target::word AOT_LocalHandle_raw_offset = 0;
+static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
 static constexpr dart::compiler::target::word
     AOT_MarkingStackBlock_pointers_offset = 16;
 static constexpr dart::compiler::target::word AOT_MarkingStackBlock_top_offset =
diff --git a/runtime/vm/compiler/runtime_offsets_list.h b/runtime/vm/compiler/runtime_offsets_list.h
index 86412d2..75c72ff 100644
--- a/runtime/vm/compiler/runtime_offsets_list.h
+++ b/runtime/vm/compiler/runtime_offsets_list.h
@@ -139,7 +139,7 @@
   FIELD(LinkedHashMap, index_offset)                                           \
   FIELD(LinkedHashMap, type_arguments_offset)                                  \
   FIELD(LinkedHashMap, used_data_offset)                                       \
-  FIELD(LocalHandle, raw_offset)                                               \
+  FIELD(LocalHandle, ptr_offset)                                               \
   FIELD(MarkingStackBlock, pointers_offset)                                    \
   FIELD(MarkingStackBlock, top_offset)                                         \
   FIELD(MegamorphicCache, buckets_offset)                                      \
@@ -302,86 +302,87 @@
       kNumberOfCpuRegisters - 1,                                               \
       [](Register reg) { return (kDartAvailableCpuRegs & (1 << reg)) != 0; })) \
                                                                                \
-  SIZEOF(AbstractType, InstanceSize, AbstractTypeLayout)                       \
-  SIZEOF(ApiError, InstanceSize, ApiErrorLayout)                               \
-  SIZEOF(Array, InstanceSize, ArrayLayout)                                     \
-  SIZEOF(Array, header_size, ArrayLayout)                                      \
-  SIZEOF(Bool, InstanceSize, BoolLayout)                                       \
-  SIZEOF(Capability, InstanceSize, CapabilityLayout)                           \
-  SIZEOF(Class, InstanceSize, ClassLayout)                                     \
-  SIZEOF(Closure, InstanceSize, ClosureLayout)                                 \
-  SIZEOF(ClosureData, InstanceSize, ClosureDataLayout)                         \
-  SIZEOF(Code, InstanceSize, CodeLayout)                                       \
-  SIZEOF(CodeSourceMap, HeaderSize, CodeSourceMapLayout)                       \
-  SIZEOF(CompressedStackMaps, HeaderSize, CompressedStackMapsLayout)           \
-  SIZEOF(Context, InstanceSize, ContextLayout)                                 \
-  SIZEOF(Context, header_size, ContextLayout)                                  \
-  SIZEOF(ContextScope, InstanceSize, ContextScopeLayout)                       \
-  SIZEOF(Double, InstanceSize, DoubleLayout)                                   \
-  SIZEOF(DynamicLibrary, InstanceSize, DynamicLibraryLayout)                   \
-  SIZEOF(ExceptionHandlers, InstanceSize, ExceptionHandlersLayout)             \
-  SIZEOF(ExternalOneByteString, InstanceSize, ExternalOneByteStringLayout)     \
-  SIZEOF(ExternalTwoByteString, InstanceSize, ExternalTwoByteStringLayout)     \
-  SIZEOF(ExternalTypedData, InstanceSize, ExternalTypedDataLayout)             \
-  SIZEOF(FfiTrampolineData, InstanceSize, FfiTrampolineDataLayout)             \
-  SIZEOF(Field, InstanceSize, FieldLayout)                                     \
-  SIZEOF(Float32x4, InstanceSize, Float32x4Layout)                             \
-  SIZEOF(Float64x2, InstanceSize, Float64x2Layout)                             \
-  SIZEOF(Function, InstanceSize, FunctionLayout)                               \
-  SIZEOF(FunctionType, InstanceSize, FunctionTypeLayout)                       \
-  SIZEOF(FutureOr, InstanceSize, FutureOrLayout)                               \
-  SIZEOF(GrowableObjectArray, InstanceSize, GrowableObjectArrayLayout)         \
-  SIZEOF(ICData, InstanceSize, ICDataLayout)                                   \
-  SIZEOF(Instance, InstanceSize, InstanceLayout)                               \
-  SIZEOF(Instructions, UnalignedHeaderSize, InstructionsLayout)                \
-  SIZEOF(InstructionsSection, UnalignedHeaderSize, InstructionsSectionLayout)  \
-  SIZEOF(Int32x4, InstanceSize, Int32x4Layout)                                 \
-  SIZEOF(Integer, InstanceSize, IntegerLayout)                                 \
-  SIZEOF(KernelProgramInfo, InstanceSize, KernelProgramInfoLayout)             \
-  SIZEOF(LanguageError, InstanceSize, LanguageErrorLayout)                     \
-  SIZEOF(Library, InstanceSize, LibraryLayout)                                 \
-  SIZEOF(LibraryPrefix, InstanceSize, LibraryPrefixLayout)                     \
-  SIZEOF(LinkedHashMap, InstanceSize, LinkedHashMapLayout)                     \
-  SIZEOF(LocalVarDescriptors, InstanceSize, LocalVarDescriptorsLayout)         \
-  SIZEOF(MegamorphicCache, InstanceSize, MegamorphicCacheLayout)               \
-  SIZEOF(Mint, InstanceSize, MintLayout)                                       \
-  SIZEOF(MirrorReference, InstanceSize, MirrorReferenceLayout)                 \
-  SIZEOF(MonomorphicSmiableCall, InstanceSize, MonomorphicSmiableCallLayout)   \
-  SIZEOF(Namespace, InstanceSize, NamespaceLayout)                             \
+  SIZEOF(AbstractType, InstanceSize, UntaggedAbstractType)                     \
+  SIZEOF(ApiError, InstanceSize, UntaggedApiError)                             \
+  SIZEOF(Array, InstanceSize, UntaggedArray)                                   \
+  SIZEOF(Array, header_size, UntaggedArray)                                    \
+  SIZEOF(Bool, InstanceSize, UntaggedBool)                                     \
+  SIZEOF(Capability, InstanceSize, UntaggedCapability)                         \
+  SIZEOF(Class, InstanceSize, UntaggedClass)                                   \
+  SIZEOF(Closure, InstanceSize, UntaggedClosure)                               \
+  SIZEOF(ClosureData, InstanceSize, UntaggedClosureData)                       \
+  SIZEOF(Code, InstanceSize, UntaggedCode)                                     \
+  SIZEOF(CodeSourceMap, HeaderSize, UntaggedCodeSourceMap)                     \
+  SIZEOF(CompressedStackMaps, HeaderSize, UntaggedCompressedStackMaps)         \
+  SIZEOF(Context, InstanceSize, UntaggedContext)                               \
+  SIZEOF(Context, header_size, UntaggedContext)                                \
+  SIZEOF(ContextScope, InstanceSize, UntaggedContextScope)                     \
+  SIZEOF(Double, InstanceSize, UntaggedDouble)                                 \
+  SIZEOF(DynamicLibrary, InstanceSize, UntaggedDynamicLibrary)                 \
+  SIZEOF(ExceptionHandlers, InstanceSize, UntaggedExceptionHandlers)           \
+  SIZEOF(ExternalOneByteString, InstanceSize, UntaggedExternalOneByteString)   \
+  SIZEOF(ExternalTwoByteString, InstanceSize, UntaggedExternalTwoByteString)   \
+  SIZEOF(ExternalTypedData, InstanceSize, UntaggedExternalTypedData)           \
+  SIZEOF(FfiTrampolineData, InstanceSize, UntaggedFfiTrampolineData)           \
+  SIZEOF(Field, InstanceSize, UntaggedField)                                   \
+  SIZEOF(Float32x4, InstanceSize, UntaggedFloat32x4)                           \
+  SIZEOF(Float64x2, InstanceSize, UntaggedFloat64x2)                           \
+  SIZEOF(Function, InstanceSize, UntaggedFunction)                             \
+  SIZEOF(FunctionType, InstanceSize, UntaggedFunctionType)                     \
+  SIZEOF(FutureOr, InstanceSize, UntaggedFutureOr)                             \
+  SIZEOF(GrowableObjectArray, InstanceSize, UntaggedGrowableObjectArray)       \
+  SIZEOF(ICData, InstanceSize, UntaggedICData)                                 \
+  SIZEOF(Instance, InstanceSize, UntaggedInstance)                             \
+  SIZEOF(Instructions, UnalignedHeaderSize, UntaggedInstructions)              \
+  SIZEOF(InstructionsSection, UnalignedHeaderSize,                             \
+         UntaggedInstructionsSection)                                          \
+  SIZEOF(Int32x4, InstanceSize, UntaggedInt32x4)                               \
+  SIZEOF(Integer, InstanceSize, UntaggedInteger)                               \
+  SIZEOF(KernelProgramInfo, InstanceSize, UntaggedKernelProgramInfo)           \
+  SIZEOF(LanguageError, InstanceSize, UntaggedLanguageError)                   \
+  SIZEOF(Library, InstanceSize, UntaggedLibrary)                               \
+  SIZEOF(LibraryPrefix, InstanceSize, UntaggedLibraryPrefix)                   \
+  SIZEOF(LinkedHashMap, InstanceSize, UntaggedLinkedHashMap)                   \
+  SIZEOF(LocalVarDescriptors, InstanceSize, UntaggedLocalVarDescriptors)       \
+  SIZEOF(MegamorphicCache, InstanceSize, UntaggedMegamorphicCache)             \
+  SIZEOF(Mint, InstanceSize, UntaggedMint)                                     \
+  SIZEOF(MirrorReference, InstanceSize, UntaggedMirrorReference)               \
+  SIZEOF(MonomorphicSmiableCall, InstanceSize, UntaggedMonomorphicSmiableCall) \
+  SIZEOF(Namespace, InstanceSize, UntaggedNamespace)                           \
   SIZEOF(NativeArguments, StructSize, NativeArguments)                         \
-  SIZEOF(Number, InstanceSize, NumberLayout)                                   \
-  SIZEOF(Object, InstanceSize, ObjectLayout)                                   \
-  SIZEOF(ObjectPool, InstanceSize, ObjectPoolLayout)                           \
-  SIZEOF(OneByteString, InstanceSize, OneByteStringLayout)                     \
-  SIZEOF(PatchClass, InstanceSize, PatchClassLayout)                           \
-  SIZEOF(PcDescriptors, HeaderSize, PcDescriptorsLayout)                       \
-  SIZEOF(Pointer, InstanceSize, PointerLayout)                                 \
-  SIZEOF(ReceivePort, InstanceSize, ReceivePortLayout)                         \
-  SIZEOF(RegExp, InstanceSize, RegExpLayout)                                   \
-  SIZEOF(Script, InstanceSize, ScriptLayout)                                   \
-  SIZEOF(SendPort, InstanceSize, SendPortLayout)                               \
-  SIZEOF(SingleTargetCache, InstanceSize, SingleTargetCacheLayout)             \
-  SIZEOF(Smi, InstanceSize, SmiLayout)                                         \
-  SIZEOF(StackTrace, InstanceSize, StackTraceLayout)                           \
-  SIZEOF(String, InstanceSize, StringLayout)                                   \
-  SIZEOF(SubtypeTestCache, InstanceSize, SubtypeTestCacheLayout)               \
-  SIZEOF(LoadingUnit, InstanceSize, LoadingUnitLayout)                         \
-  SIZEOF(TransferableTypedData, InstanceSize, TransferableTypedDataLayout)     \
-  SIZEOF(TwoByteString, InstanceSize, TwoByteStringLayout)                     \
-  SIZEOF(Type, InstanceSize, TypeLayout)                                       \
-  SIZEOF(TypeArguments, InstanceSize, TypeArgumentsLayout)                     \
-  SIZEOF(TypeParameter, InstanceSize, TypeParameterLayout)                     \
-  SIZEOF(TypeRef, InstanceSize, TypeRefLayout)                                 \
-  SIZEOF(TypedData, InstanceSize, TypedDataLayout)                             \
-  SIZEOF(TypedDataBase, InstanceSize, TypedDataBaseLayout)                     \
-  SIZEOF(TypedDataView, InstanceSize, TypedDataViewLayout)                     \
-  SIZEOF(UnhandledException, InstanceSize, UnhandledExceptionLayout)           \
-  SIZEOF(UnlinkedCall, InstanceSize, UnlinkedCallLayout)                       \
-  SIZEOF(UnwindError, InstanceSize, UnwindErrorLayout)                         \
-  SIZEOF(UserTag, InstanceSize, UserTagLayout)                                 \
-  SIZEOF(WeakProperty, InstanceSize, WeakPropertyLayout)                       \
+  SIZEOF(Number, InstanceSize, UntaggedNumber)                                 \
+  SIZEOF(Object, InstanceSize, UntaggedObject)                                 \
+  SIZEOF(ObjectPool, InstanceSize, UntaggedObjectPool)                         \
+  SIZEOF(OneByteString, InstanceSize, UntaggedOneByteString)                   \
+  SIZEOF(PatchClass, InstanceSize, UntaggedPatchClass)                         \
+  SIZEOF(PcDescriptors, HeaderSize, UntaggedPcDescriptors)                     \
+  SIZEOF(Pointer, InstanceSize, UntaggedPointer)                               \
+  SIZEOF(ReceivePort, InstanceSize, UntaggedReceivePort)                       \
+  SIZEOF(RegExp, InstanceSize, UntaggedRegExp)                                 \
+  SIZEOF(Script, InstanceSize, UntaggedScript)                                 \
+  SIZEOF(SendPort, InstanceSize, UntaggedSendPort)                             \
+  SIZEOF(SingleTargetCache, InstanceSize, UntaggedSingleTargetCache)           \
+  SIZEOF(Smi, InstanceSize, UntaggedSmi)                                       \
+  SIZEOF(StackTrace, InstanceSize, UntaggedStackTrace)                         \
+  SIZEOF(String, InstanceSize, UntaggedString)                                 \
+  SIZEOF(SubtypeTestCache, InstanceSize, UntaggedSubtypeTestCache)             \
+  SIZEOF(LoadingUnit, InstanceSize, UntaggedLoadingUnit)                       \
+  SIZEOF(TransferableTypedData, InstanceSize, UntaggedTransferableTypedData)   \
+  SIZEOF(TwoByteString, InstanceSize, UntaggedTwoByteString)                   \
+  SIZEOF(Type, InstanceSize, UntaggedType)                                     \
+  SIZEOF(TypeArguments, InstanceSize, UntaggedTypeArguments)                   \
+  SIZEOF(TypeParameter, InstanceSize, UntaggedTypeParameter)                   \
+  SIZEOF(TypeRef, InstanceSize, UntaggedTypeRef)                               \
+  SIZEOF(TypedData, InstanceSize, UntaggedTypedData)                           \
+  SIZEOF(TypedDataBase, InstanceSize, UntaggedTypedDataBase)                   \
+  SIZEOF(TypedDataView, InstanceSize, UntaggedTypedDataView)                   \
+  SIZEOF(UnhandledException, InstanceSize, UntaggedUnhandledException)         \
+  SIZEOF(UnlinkedCall, InstanceSize, UntaggedUnlinkedCall)                     \
+  SIZEOF(UnwindError, InstanceSize, UntaggedUnwindError)                       \
+  SIZEOF(UserTag, InstanceSize, UntaggedUserTag)                               \
+  SIZEOF(WeakProperty, InstanceSize, UntaggedWeakProperty)                     \
   SIZEOF(WeakSerializationReference, InstanceSize,                             \
-         WeakSerializationReferenceLayout)                                     \
+         UntaggedWeakSerializationReference)                                   \
   PAYLOAD_SIZEOF(CodeSourceMap, InstanceSize, HeaderSize)                      \
   PAYLOAD_SIZEOF(CompressedStackMaps, InstanceSize, HeaderSize)                \
   PAYLOAD_SIZEOF(InstructionsSection, InstanceSize, HeaderSize)                \
diff --git a/runtime/vm/compiler/stub_code_compiler.cc b/runtime/vm/compiler/stub_code_compiler.cc
index 8a434fe..e08b548 100644
--- a/runtime/vm/compiler/stub_code_compiler.cc
+++ b/runtime/vm/compiler/stub_code_compiler.cc
@@ -619,7 +619,7 @@
                          target::Type::type_state_offset(), kByte);
   __ CompareImmediate(
       TypeTestABI::kScratchReg,
-      target::AbstractTypeLayout::kTypeStateFinalizedInstantiated);
+      target::UntaggedAbstractType::kTypeStateFinalizedInstantiated);
   __ BranchIf(NOT_EQUAL, &is_complex_case, Assembler::kNearJump);
 
   // This [Type] could be a FutureOr. Subtype2TestCache does not support Smi.
diff --git a/runtime/vm/compiler/stub_code_compiler_arm.cc b/runtime/vm/compiler/stub_code_compiler_arm.cc
index 01e5a1f..0dfaa61 100644
--- a/runtime/vm/compiler/stub_code_compiler_arm.cc
+++ b/runtime/vm/compiler/stub_code_compiler_arm.cc
@@ -1058,10 +1058,10 @@
     // R3: new object end address.
     // R9: allocation size.
     {
-      const intptr_t shift = target::ObjectLayout::kTagBitsSizeTagPos -
+      const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
                              target::ObjectAlignment::kObjectAlignmentLog2;
 
-      __ CompareImmediate(R9, target::ObjectLayout::kSizeTagMaxSizeTag);
+      __ CompareImmediate(R9, target::UntaggedObject::kSizeTagMaxSizeTag);
       __ mov(R8, Operand(R9, LSL, shift), LS);
       __ mov(R8, Operand(0), HI);
 
@@ -1305,7 +1305,7 @@
 // Input:
 //   R1: number of context variables.
 // Output:
-//   R0: new allocated RawContext object.
+//   R0: new allocated Context object.
 // Clobbered:
 //   R2, R3, R8, R9
 static void GenerateAllocateContext(Assembler* assembler, Label* slow_case) {
@@ -1349,9 +1349,9 @@
   // R1: number of context variables.
   // R2: object size.
   // R3: next object start.
-  const intptr_t shift = target::ObjectLayout::kTagBitsSizeTagPos -
+  const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
                          target::ObjectAlignment::kObjectAlignmentLog2;
-  __ CompareImmediate(R2, target::ObjectLayout::kSizeTagMaxSizeTag);
+  __ CompareImmediate(R2, target::UntaggedObject::kSizeTagMaxSizeTag);
   // If no size tag overflow, shift R2 left, else set R2 to zero.
   __ mov(R9, Operand(R2, LSL, shift), LS);
   __ mov(R9, Operand(0), HI);
@@ -1377,7 +1377,7 @@
 // Input:
 //   R1: number of context variables.
 // Output:
-//   R0: new allocated RawContext object.
+//   R0: new allocated Context object.
 // Clobbered:
 //   Potentially any since is can go to runtime.
 void StubCodeCompiler::GenerateAllocateContextStub(Assembler* assembler) {
@@ -1438,7 +1438,7 @@
 // Input:
 //   R4: context variable to clone.
 // Output:
-//   R0: new allocated RawContext object.
+//   R0: new allocated Context object.
 // Clobbered:
 //   Potentially any since it can go to runtime.
 void StubCodeCompiler::GenerateCloneContextStub(Assembler* assembler) {
@@ -1545,13 +1545,13 @@
 
   if (cards) {
     __ ldr(TMP, FieldAddress(R1, target::Object::tags_offset()));
-    __ tst(TMP, Operand(1 << target::ObjectLayout::kCardRememberedBit));
+    __ tst(TMP, Operand(1 << target::UntaggedObject::kCardRememberedBit));
     __ b(&remember_card, NOT_ZERO);
   } else {
 #if defined(DEBUG)
     Label ok;
     __ ldr(TMP, FieldAddress(R1, target::Object::tags_offset()));
-    __ tst(TMP, Operand(1 << target::ObjectLayout::kCardRememberedBit));
+    __ tst(TMP, Operand(1 << target::UntaggedObject::kCardRememberedBit));
     __ b(&ok, ZERO);
     __ Stop("Wrong barrier");
     __ Bind(&ok);
@@ -1568,7 +1568,7 @@
   Label retry;
   __ Bind(&retry);
   __ ldrex(R2, R3);
-  __ bic(R2, R2, Operand(1 << target::ObjectLayout::kOldAndNotRememberedBit));
+  __ bic(R2, R2, Operand(1 << target::UntaggedObject::kOldAndNotRememberedBit));
   __ strex(R4, R2, R3);
   __ cmp(R4, Operand(1));
   __ b(&retry, EQ);
@@ -1616,9 +1616,9 @@
   // R3: Untagged address of header word (ldrex/strex do not support offsets).
   __ Bind(&marking_retry);
   __ ldrex(R2, R3);
-  __ tst(R2, Operand(1 << target::ObjectLayout::kOldAndNotMarkedBit));
+  __ tst(R2, Operand(1 << target::UntaggedObject::kOldAndNotMarkedBit));
   __ b(&lost_race, ZERO);
-  __ bic(R2, R2, Operand(1 << target::ObjectLayout::kOldAndNotMarkedBit));
+  __ bic(R2, R2, Operand(1 << target::UntaggedObject::kOldAndNotMarkedBit));
   __ strex(R4, R2, R3);
   __ cmp(R4, Operand(1));
   __ b(&marking_retry, EQ);
@@ -3484,10 +3484,10 @@
   /* R1: new object end address. */
   /* R2: allocation size. */
   {
-    __ CompareImmediate(R2, target::ObjectLayout::kSizeTagMaxSizeTag);
+    __ CompareImmediate(R2, target::UntaggedObject::kSizeTagMaxSizeTag);
     __ mov(R3,
            Operand(R2, LSL,
-                   target::ObjectLayout::kTagBitsSizeTagPos -
+                   target::UntaggedObject::kTagBitsSizeTagPos -
                        target::ObjectAlignment::kObjectAlignmentLog2),
            LS);
     __ mov(R3, Operand(0), HI);
diff --git a/runtime/vm/compiler/stub_code_compiler_arm64.cc b/runtime/vm/compiler/stub_code_compiler_arm64.cc
index e8d950d..2bbe95e 100644
--- a/runtime/vm/compiler/stub_code_compiler_arm64.cc
+++ b/runtime/vm/compiler/stub_code_compiler_arm64.cc
@@ -1189,9 +1189,9 @@
     // R2: array length as Smi.
     // R3: array size.
     // R7: new object end address.
-    const intptr_t shift = target::ObjectLayout::kTagBitsSizeTagPos -
+    const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2;
-    __ CompareImmediate(R3, target::ObjectLayout::kSizeTagMaxSizeTag);
+    __ CompareImmediate(R3, target::UntaggedObject::kSizeTagMaxSizeTag);
     // If no size tag overflow, shift R1 left, else set R1 to zero.
     __ LslImmediate(TMP, R3, shift);
     __ csel(R1, TMP, R1, LS);
@@ -1442,7 +1442,7 @@
 // Input:
 //   R1: number of context variables.
 // Output:
-//   R0: new allocated RawContext object.
+//   R0: new allocated Context object.
 // Clobbered:
 //   R2, R3, R4, TMP
 static void GenerateAllocateContextSpaceStub(Assembler* assembler,
@@ -1485,9 +1485,9 @@
   // R0: new object.
   // R1: number of context variables.
   // R2: object size.
-  const intptr_t shift = target::ObjectLayout::kTagBitsSizeTagPos -
+  const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
                          target::ObjectAlignment::kObjectAlignmentLog2;
-  __ CompareImmediate(R2, target::ObjectLayout::kSizeTagMaxSizeTag);
+  __ CompareImmediate(R2, target::UntaggedObject::kSizeTagMaxSizeTag);
   // If no size tag overflow, shift R2 left, else set R2 to zero.
   __ LslImmediate(TMP, R2, shift);
   __ csel(R2, TMP, R2, LS);
@@ -1512,7 +1512,7 @@
 // Input:
 //   R1: number of context variables.
 // Output:
-//   R0: new allocated RawContext object.
+//   R0: new allocated Context object.
 // Clobbered:
 //   R2, R3, R4, TMP
 void StubCodeCompiler::GenerateAllocateContextStub(Assembler* assembler) {
@@ -1576,7 +1576,7 @@
 // Input:
 //   R5: context variable to clone.
 // Output:
-//   R0: new allocated RawContext object.
+//   R0: new allocated Context object.
 // Clobbered:
 //   R1, (R2), R3, R4, (TMP)
 void StubCodeCompiler::GenerateCloneContextStub(Assembler* assembler) {
@@ -1687,12 +1687,12 @@
 
   if (cards) {
     __ LoadFieldFromOffset(TMP, R1, target::Object::tags_offset(), kFourBytes);
-    __ tbnz(&remember_card, TMP, target::ObjectLayout::kCardRememberedBit);
+    __ tbnz(&remember_card, TMP, target::UntaggedObject::kCardRememberedBit);
   } else {
 #if defined(DEBUG)
     Label ok;
     __ LoadFieldFromOffset(TMP, R1, target::Object::tags_offset(), kFourBytes);
-    __ tbz(&ok, TMP, target::ObjectLayout::kCardRememberedBit);
+    __ tbz(&ok, TMP, target::UntaggedObject::kCardRememberedBit);
     __ Stop("Wrong barrier");
     __ Bind(&ok);
 #endif
@@ -1711,7 +1711,7 @@
   __ Bind(&retry);
   __ ldxr(R2, R3, kEightBytes);
   __ AndImmediate(R2, R2,
-                  ~(1 << target::ObjectLayout::kOldAndNotRememberedBit));
+                  ~(1 << target::UntaggedObject::kOldAndNotRememberedBit));
   __ stxr(R4, R2, R3, kEightBytes);
   __ cbnz(&retry, R4);
 
@@ -1761,8 +1761,8 @@
   // R3: Untagged address of header word (ldxr/stxr do not support offsets).
   __ Bind(&marking_retry);
   __ ldxr(R2, R3, kEightBytes);
-  __ tbz(&lost_race, R2, target::ObjectLayout::kOldAndNotMarkedBit);
-  __ AndImmediate(R2, R2, ~(1 << target::ObjectLayout::kOldAndNotMarkedBit));
+  __ tbz(&lost_race, R2, target::UntaggedObject::kOldAndNotMarkedBit);
+  __ AndImmediate(R2, R2, ~(1 << target::UntaggedObject::kOldAndNotMarkedBit));
   __ stxr(R4, R2, R3, kEightBytes);
   __ cbnz(&marking_retry, R4);
 
@@ -2777,7 +2777,7 @@
 // Used to check class and type arguments. Arguments passed in registers:
 //
 // Inputs (mostly from TypeTestABI struct):
-//   - kSubtypeTestCacheReg: SubtypeTestCacheLayout
+//   - kSubtypeTestCacheReg: UntaggedSubtypeTestCache
 //   - kInstanceReg: instance to test against.
 //   - kDstTypeReg: destination type (for n>=3).
 //   - kInstantiatorTypeArgumentsReg: instantiator type arguments (for n=5).
@@ -3615,9 +3615,9 @@
   /* R1: new object end address. */
   /* R2: allocation size. */
   {
-    __ CompareImmediate(R2, target::ObjectLayout::kSizeTagMaxSizeTag);
+    __ CompareImmediate(R2, target::UntaggedObject::kSizeTagMaxSizeTag);
     __ LslImmediate(R2, R2,
-                    target::ObjectLayout::kTagBitsSizeTagPos -
+                    target::UntaggedObject::kTagBitsSizeTagPos -
                         target::ObjectAlignment::kObjectAlignmentLog2);
     __ csel(R2, ZR, R2, HI);
 
diff --git a/runtime/vm/compiler/stub_code_compiler_ia32.cc b/runtime/vm/compiler/stub_code_compiler_ia32.cc
index 65939b2..229be66 100644
--- a/runtime/vm/compiler/stub_code_compiler_ia32.cc
+++ b/runtime/vm/compiler/stub_code_compiler_ia32.cc
@@ -866,9 +866,9 @@
     {
       Label size_tag_overflow, done;
       __ movl(EDI, EBX);
-      __ cmpl(EDI, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+      __ cmpl(EDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
       __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-      __ shll(EDI, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+      __ shll(EDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                              target::ObjectAlignment::kObjectAlignmentLog2));
       __ jmp(&done, Assembler::kNearJump);
 
@@ -1078,7 +1078,7 @@
 // Input:
 // EDX: number of context variables.
 // Output:
-// EAX: new allocated RawContext object.
+// EAX: new allocated Context object.
 // Clobbered:
 // EBX
 static void GenerateAllocateContextSpaceStub(Assembler* assembler,
@@ -1128,9 +1128,9 @@
     Label size_tag_overflow, done;
     __ leal(EBX, Address(EDX, TIMES_4, fixed_size_plus_alignment_padding));
     __ andl(EBX, Immediate(-target::ObjectAlignment::kObjectAlignment));
-    __ cmpl(EBX, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+    __ cmpl(EBX, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
     __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-    __ shll(EBX, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+    __ shll(EBX, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2));
     __ jmp(&done);
 
@@ -1157,7 +1157,7 @@
 // Input:
 // EDX: number of context variables.
 // Output:
-// EAX: new allocated RawContext object.
+// EAX: new allocated Context object.
 // Clobbered:
 // EBX, EDX
 void StubCodeCompiler::GenerateAllocateContextStub(Assembler* assembler) {
@@ -1223,7 +1223,7 @@
 // Input:
 //   ECX: context variable.
 // Output:
-//   EAX: new allocated RawContext object.
+//   EAX: new allocated Context object.
 // Clobbered:
 //   EBX, ECX, EDX
 void StubCodeCompiler::GenerateCloneContextStub(Assembler* assembler) {
@@ -1320,7 +1320,8 @@
   // Spilled: EAX, ECX
   // EDX: Address being stored
   __ movl(EAX, FieldAddress(EDX, target::Object::tags_offset()));
-  __ testl(EAX, Immediate(1 << target::ObjectLayout::kOldAndNotRememberedBit));
+  __ testl(EAX,
+           Immediate(1 << target::UntaggedObject::kOldAndNotRememberedBit));
   __ j(NOT_EQUAL, &add_to_buffer, Assembler::kNearJump);
   __ popl(ECX);
   __ popl(EAX);
@@ -1333,12 +1334,12 @@
 
   if (cards) {
     // Check if this object is using remembered cards.
-    __ testl(EAX, Immediate(1 << target::ObjectLayout::kCardRememberedBit));
+    __ testl(EAX, Immediate(1 << target::UntaggedObject::kCardRememberedBit));
     __ j(NOT_EQUAL, &remember_card, Assembler::kFarJump);  // Unlikely.
   } else {
 #if defined(DEBUG)
     Label ok;
-    __ testl(EAX, Immediate(1 << target::ObjectLayout::kCardRememberedBit));
+    __ testl(EAX, Immediate(1 << target::UntaggedObject::kCardRememberedBit));
     __ j(ZERO, &ok, Assembler::kFarJump);  // Unlikely.
     __ Stop("Wrong barrier");
     __ Bind(&ok);
@@ -1348,7 +1349,7 @@
   // lock+andl is an atomic read-modify-write.
   __ lock();
   __ andl(FieldAddress(EDX, target::Object::tags_offset()),
-          Immediate(~(1 << target::ObjectLayout::kOldAndNotRememberedBit)));
+          Immediate(~(1 << target::UntaggedObject::kOldAndNotRememberedBit)));
 
   // Load the StoreBuffer block out of the thread. Then load top_ out of the
   // StoreBufferBlock and add the address to the pointers_.
@@ -2991,9 +2992,9 @@
   /* EDI: allocation size. */
   {
     Label size_tag_overflow, done;
-    __ cmpl(EDI, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+    __ cmpl(EDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
     __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-    __ shll(EDI, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+    __ shll(EDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2));
     __ jmp(&done, Assembler::kNearJump);
     __ Bind(&size_tag_overflow);
diff --git a/runtime/vm/compiler/stub_code_compiler_x64.cc b/runtime/vm/compiler/stub_code_compiler_x64.cc
index fc4f6ac..175ed5d 100644
--- a/runtime/vm/compiler/stub_code_compiler_x64.cc
+++ b/runtime/vm/compiler/stub_code_compiler_x64.cc
@@ -1100,9 +1100,9 @@
     // RDI: allocation size.
     {
       Label size_tag_overflow, done;
-      __ cmpq(RDI, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+      __ cmpq(RDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
       __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-      __ shlq(RDI, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+      __ shlq(RDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                              target::ObjectAlignment::kObjectAlignmentLog2));
       __ jmp(&done, Assembler::kNearJump);
 
@@ -1374,7 +1374,7 @@
 // Input:
 //   R10: number of context variables.
 // Output:
-//   RAX: new, uinitialised allocated RawContext object.
+//   RAX: new, uinitialised allocated Context object.
 // Clobbered:
 //   R13
 static void GenerateAllocateContextSpaceStub(Assembler* assembler,
@@ -1420,9 +1420,9 @@
     Label size_tag_overflow, done;
     __ leaq(R13, Address(R10, TIMES_8, fixed_size_plus_alignment_padding));
     __ andq(R13, Immediate(-target::ObjectAlignment::kObjectAlignment));
-    __ cmpq(R13, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+    __ cmpq(R13, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
     __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-    __ shlq(R13, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+    __ shlq(R13, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2));
     __ jmp(&done);
 
@@ -1449,7 +1449,7 @@
 // Input:
 //   R10: number of context variables.
 // Output:
-//   RAX: new allocated RawContext object.
+//   RAX: new allocated Context object.
 // Clobbered:
 //   R9, R13
 void StubCodeCompiler::GenerateAllocateContextStub(Assembler* assembler) {
@@ -1517,7 +1517,7 @@
 // Input:
 //   R9: context to clone.
 // Output:
-//   RAX: new allocated RawContext object.
+//   RAX: new allocated Context object.
 // Clobbered:
 //   R10, R13
 void StubCodeCompiler::GenerateCloneContextStub(Assembler* assembler) {
@@ -1621,13 +1621,13 @@
 
   if (cards) {
     __ movl(TMP, FieldAddress(RDX, target::Object::tags_offset()));
-    __ testl(TMP, Immediate(1 << target::ObjectLayout::kCardRememberedBit));
+    __ testl(TMP, Immediate(1 << target::UntaggedObject::kCardRememberedBit));
     __ j(NOT_ZERO, &remember_card, Assembler::kFarJump);
   } else {
 #if defined(DEBUG)
     Label ok;
     __ movl(TMP, FieldAddress(RDX, target::Object::tags_offset()));
-    __ testl(TMP, Immediate(1 << target::ObjectLayout::kCardRememberedBit));
+    __ testl(TMP, Immediate(1 << target::UntaggedObject::kCardRememberedBit));
     __ j(ZERO, &ok, Assembler::kFarJump);
     __ Stop("Wrong barrier");
     __ Bind(&ok);
@@ -1640,7 +1640,7 @@
   // lock+andq is an atomic read-modify-write.
   __ lock();
   __ andq(FieldAddress(RDX, target::Object::tags_offset()),
-          Immediate(~(1 << target::ObjectLayout::kOldAndNotRememberedBit)));
+          Immediate(~(1 << target::UntaggedObject::kOldAndNotRememberedBit)));
 
   // Save registers being destroyed.
   __ pushq(RAX);
@@ -1690,9 +1690,9 @@
   __ movq(RAX, FieldAddress(TMP, target::Object::tags_offset()));
   __ Bind(&retry);
   __ movq(RCX, RAX);
-  __ testq(RCX, Immediate(1 << target::ObjectLayout::kOldAndNotMarkedBit));
+  __ testq(RCX, Immediate(1 << target::UntaggedObject::kOldAndNotMarkedBit));
   __ j(ZERO, &lost_race);  // Marked by another thread.
-  __ andq(RCX, Immediate(~(1 << target::ObjectLayout::kOldAndNotMarkedBit)));
+  __ andq(RCX, Immediate(~(1 << target::UntaggedObject::kOldAndNotMarkedBit)));
   __ LockCmpxchgq(FieldAddress(TMP, target::Object::tags_offset()), RCX);
   __ j(NOT_EQUAL, &retry, Assembler::kNearJump);
 
@@ -2367,7 +2367,7 @@
     __ j(EQUAL, &call_target_function_through_unchecked_entry);
 
     // Check trivial exactness.
-    // Note: ICDataLayout::receivers_static_type_ is guaranteed to be not null
+    // Note: UntaggedICData::receivers_static_type_ is guaranteed to be not null
     // because we only emit calls to this stub when it is not null.
     __ movq(RCX,
             FieldAddress(RBX, target::ICData::receivers_static_type_offset()));
@@ -2731,7 +2731,7 @@
 // Used to check class and type arguments. Arguments passed in registers:
 //
 // Input registers (from TypeTestABI struct):
-//   - kSubtypeTestCacheReg: SubtypeTestCacheLayout
+//   - kSubtypeTestCacheReg: UntaggedSubtypeTestCache
 //   - kInstanceReg: instance to test against (must be preserved).
 //   - kDstTypeReg: destination type (for n>=3).
 //   - kInstantiatorTypeArgumentsReg : instantiator type arguments (for n>=5).
@@ -3567,9 +3567,9 @@
   /* R13: scratch register. */
   {
     Label size_tag_overflow, done;
-    __ cmpq(RDI, Immediate(target::ObjectLayout::kSizeTagMaxSizeTag));
+    __ cmpq(RDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
     __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump);
-    __ shlq(RDI, Immediate(target::ObjectLayout::kTagBitsSizeTagPos -
+    __ shlq(RDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
                            target::ObjectAlignment::kObjectAlignmentLog2));
     __ jmp(&done, Assembler::kNearJump);
 
diff --git a/runtime/vm/compiler/write_barrier_elimination.cc b/runtime/vm/compiler/write_barrier_elimination.cc
index efc79e8..84b000f 100644
--- a/runtime/vm/compiler/write_barrier_elimination.cc
+++ b/runtime/vm/compiler/write_barrier_elimination.cc
@@ -351,9 +351,10 @@
 
 #define FOR_EACH_NATIVE_SLOT(class, underlying_type, field, __, ___)           \
   case Slot::Kind::k##class##_##field:                                         \
-    return std::is_base_of<InstanceLayout, underlying_type>::value ||          \
-           std::is_base_of<ContextLayout, underlying_type>::value ||           \
-           std::is_base_of<UnhandledExceptionLayout, underlying_type>::value;
+    return std::is_base_of<UntaggedInstance, underlying_type>::value ||        \
+           std::is_base_of<UntaggedContext, underlying_type>::value ||         \
+           std::is_base_of<UntaggedUnhandledException,                         \
+                           underlying_type>::value;
 
       NATIVE_SLOTS_LIST(FOR_EACH_NATIVE_SLOT)
 #undef FOR_EACH_NATIVE_SLOT
diff --git a/runtime/vm/dart.cc b/runtime/vm/dart.cc
index 3eab3b1..ce1f9bc 100644
--- a/runtime/vm/dart.cc
+++ b/runtime/vm/dart.cc
@@ -220,7 +220,7 @@
     return Utils::StrDup("--causal-async-stacks is deprecated!");
   }
 
-  FrameLayout::Init();
+  UntaggedFrame::Init();
 
   set_thread_exit_callback(thread_exit);
   SetFileCallbacks(file_open, file_read, file_write, file_close);
@@ -702,7 +702,7 @@
   Error& error = Error::Handle(T->zone());
   error = Object::Init(IG, kernel_buffer, kernel_buffer_size);
   if (!error.IsNull()) {
-    return error.raw();
+    return error.ptr();
   }
   if ((snapshot_data != NULL) && kernel_buffer == NULL) {
     // Read the snapshot and setup the initial state.
@@ -729,7 +729,7 @@
     FullSnapshotReader reader(snapshot, snapshot_instructions, T);
     const Error& error = Error::Handle(reader.ReadProgramSnapshot());
     if (!error.IsNull()) {
-      return error.raw();
+      return error.ptr();
     }
 
     {
@@ -900,7 +900,7 @@
         InitIsolateFromSnapshot(T, I, snapshot_data, snapshot_instructions,
                                 kernel_buffer, kernel_buffer_size));
     if (!error.IsNull()) {
-      return error.raw();
+      return error.ptr();
     }
   }
 
@@ -937,11 +937,11 @@
     Error& error = Error::Handle();
     error ^= IG->object_store()->PreallocateObjects();
     if (!error.IsNull()) {
-      return error.raw();
+      return error.ptr();
     }
     error ^= I->isolate_object_store()->PreallocateObjects();
     if (!error.IsNull()) {
-      return error.raw();
+      return error.ptr();
     }
   }
 
diff --git a/runtime/vm/dart_api_impl.cc b/runtime/vm/dart_api_impl.cc
index 7566ee6..35356fa 100644
--- a/runtime/vm/dart_api_impl.cc
+++ b/runtime/vm/dart_api_impl.cc
@@ -151,7 +151,7 @@
     if (Class::IsSubtypeOf(obj_class, Object::null_type_arguments(),
                            Nullability::kNonNullable, list_rare_type,
                            Heap::kNew)) {
-      return instance.raw();
+      return instance.ptr();
     }
   }
   return Instance::null();
@@ -168,7 +168,7 @@
     if (Class::IsSubtypeOf(obj_class, Object::null_type_arguments(),
                            Nullability::kNonNullable, map_rare_type,
                            Heap::kNew)) {
-      return instance.raw();
+      return instance.ptr();
     }
   }
   return Instance::null();
@@ -205,7 +205,7 @@
   obj = arguments->NativeArgAt(arg_index);
   if (IsStringClassId(obj.GetClassId())) {
     ASSERT(thread->api_top_scope() != NULL);
-    *str = Api::NewHandle(thread, obj.raw());
+    *str = Api::NewHandle(thread, obj.ptr());
     return true;
   }
   if (obj.IsNull()) {
@@ -331,7 +331,7 @@
   LocalHandles* local_handles = Api::TopScope(thread)->local_handles();
   ASSERT(local_handles != NULL);
   LocalHandle* ref = local_handles->AllocateHandle();
-  ref->set_raw(raw);
+  ref->set_ptr(raw);
   return ref->apiHandle();
 }
 
@@ -339,10 +339,10 @@
   if (raw == Object::null()) {
     return Null();
   }
-  if (raw == Bool::True().raw()) {
+  if (raw == Bool::True().ptr()) {
     return True();
   }
-  if (raw == Bool::False().raw()) {
+  if (raw == Bool::False().ptr()) {
     return False();
   }
   ASSERT(thread->execution_state() == Thread::kThreadInVM);
@@ -359,10 +359,10 @@
          thread->isolate()->group()->api_state()->IsActivePersistentHandle(
              reinterpret_cast<Dart_PersistentHandle>(object)) ||
          Dart::IsReadOnlyApiHandle(object));
-  ASSERT(FinalizablePersistentHandle::raw_offset() == 0 &&
-         PersistentHandle::raw_offset() == 0 && LocalHandle::raw_offset() == 0);
+  ASSERT(FinalizablePersistentHandle::ptr_offset() == 0 &&
+         PersistentHandle::ptr_offset() == 0 && LocalHandle::ptr_offset() == 0);
 #endif
-  return (reinterpret_cast<LocalHandle*>(object))->raw();
+  return (reinterpret_cast<LocalHandle*>(object))->ptr();
 }
 
 #define DEFINE_UNWRAP(type)                                                    \
@@ -460,7 +460,7 @@
   if (!error.IsError()) {
     error = UnhandledException::New(Instance::Cast(error), Instance::Handle());
   }
-  return Api::NewHandle(T, error.raw());
+  return Api::NewHandle(T, error.ptr());
 }
 
 Dart_Handle Api::AcquiredError(IsolateGroup* isolate_group) {
@@ -502,9 +502,9 @@
 }
 
 static Dart_Handle InitNewReadOnlyApiHandle(ObjectPtr raw) {
-  ASSERT(raw->ptr()->InVMIsolateHeap());
+  ASSERT(raw->untag()->InVMIsolateHeap());
   LocalHandle* ref = Dart::AllocateReadOnlyApiHandle();
-  ref->set_raw(raw);
+  ref->set_ptr(raw);
   return ref->apiHandle();
 }
 
@@ -516,16 +516,16 @@
   ASSERT(state != NULL);
 
   ASSERT(true_handle_ == NULL);
-  true_handle_ = InitNewReadOnlyApiHandle(Bool::True().raw());
+  true_handle_ = InitNewReadOnlyApiHandle(Bool::True().ptr());
 
   ASSERT(false_handle_ == NULL);
-  false_handle_ = InitNewReadOnlyApiHandle(Bool::False().raw());
+  false_handle_ = InitNewReadOnlyApiHandle(Bool::False().ptr());
 
   ASSERT(null_handle_ == NULL);
   null_handle_ = InitNewReadOnlyApiHandle(Object::null());
 
   ASSERT(empty_string_handle_ == NULL);
-  empty_string_handle_ = InitNewReadOnlyApiHandle(Symbols::Empty().raw());
+  empty_string_handle_ = InitNewReadOnlyApiHandle(Symbols::Empty().ptr());
 }
 
 void Api::Cleanup() {
@@ -547,7 +547,7 @@
   if (cid == kExternalOneByteStringCid) {
     ExternalOneByteStringPtr raw_string =
         static_cast<ExternalOneByteStringPtr>(raw_obj);
-    *peer = raw_string->ptr()->peer_;
+    *peer = raw_string->untag()->peer_;
     return true;
   }
   if (cid == kOneByteStringCid || cid == kTwoByteStringCid) {
@@ -558,7 +558,7 @@
   if (cid == kExternalTwoByteStringCid) {
     ExternalTwoByteStringPtr raw_string =
         static_cast<ExternalTwoByteStringPtr>(raw_obj);
-    *peer = raw_string->ptr()->peer_;
+    *peer = raw_string->untag()->peer_;
     return true;
   }
   return false;
@@ -572,11 +572,11 @@
     if (cid >= kNumPredefinedCids) {
       ASSERT(Instance::Cast(Object::Handle(raw_obj)).IsValidNativeIndex(0));
       TypedDataPtr native_fields = *reinterpret_cast<TypedDataPtr*>(
-          ObjectLayout::ToAddr(raw_obj) + sizeof(ObjectLayout));
+          UntaggedObject::ToAddr(raw_obj) + sizeof(UntaggedObject));
       if (native_fields == TypedData::null()) {
         *value = 0;
       } else {
-        *value = *bit_cast<intptr_t*, uint8_t*>(native_fields->ptr()->data());
+        *value = *bit_cast<intptr_t*, uint8_t*>(native_fields->untag()->data());
       }
       return true;
     }
@@ -592,7 +592,7 @@
   if (raw_obj->IsHeapObject()) {
     intptr_t cid = raw_obj->GetClassId();
     if (cid == kBoolCid) {
-      *value = (raw_obj == Object::bool_true().raw());
+      *value = (raw_obj == Object::bool_true().ptr());
       return true;
     }
     if (cid == kNullCid) {
@@ -611,7 +611,7 @@
   if (raw_obj->IsHeapObject()) {
     intptr_t cid = raw_obj->GetClassId();
     if (cid == kMintCid) {
-      *value = static_cast<MintPtr>(raw_obj)->ptr()->value_;
+      *value = static_cast<MintPtr>(raw_obj)->untag()->value_;
       return true;
     }
     return false;
@@ -628,12 +628,12 @@
   if (raw_obj->IsHeapObject()) {
     intptr_t cid = raw_obj->GetClassId();
     if (cid == kDoubleCid) {
-      *value = static_cast<DoublePtr>(raw_obj)->ptr()->value_;
+      *value = static_cast<DoublePtr>(raw_obj)->untag()->value_;
       return true;
     }
     if (cid == kMintCid) {
       *value =
-          static_cast<double>(static_cast<MintPtr>(raw_obj)->ptr()->value_);
+          static_cast<double>(static_cast<MintPtr>(raw_obj)->untag()->value_);
       return true;
     }
     return false;
@@ -653,29 +653,29 @@
                              ->isolate_group()
                              ->class_table()
                              ->At(cid)
-                             ->ptr()
+                             ->untag()
                              ->num_native_fields_;
   if (num_fields != class_num_fields) {
     // No native fields or mismatched native field count.
     return false;
   }
   TypedDataPtr native_fields = *reinterpret_cast<TypedDataPtr*>(
-      ObjectLayout::ToAddr(raw_obj) + sizeof(ObjectLayout));
+      UntaggedObject::ToAddr(raw_obj) + sizeof(UntaggedObject));
   if (native_fields == TypedData::null()) {
     // Native fields not initialized.
     memset(field_values, 0, (num_fields * sizeof(field_values[0])));
     return true;
   }
-  ASSERT(class_num_fields == Smi::Value(native_fields->ptr()->length()));
+  ASSERT(class_num_fields == Smi::Value(native_fields->untag()->length()));
   intptr_t* native_values =
-      reinterpret_cast<intptr_t*>(native_fields->ptr()->data());
+      reinterpret_cast<intptr_t*>(native_fields->untag()->data());
   memmove(field_values, native_values, (num_fields * sizeof(field_values[0])));
   return true;
 }
 
 void Api::SetWeakHandleReturnValue(NativeArguments* args,
                                    Dart_WeakPersistentHandle retval) {
-  args->SetReturnUnsafe(FinalizablePersistentHandle::Cast(retval)->raw());
+  args->SetReturnUnsafe(FinalizablePersistentHandle::Cast(retval)->ptr());
 }
 
 PersistentHandle* PersistentHandle::Cast(Dart_PersistentHandle handle) {
@@ -703,7 +703,7 @@
 void FinalizablePersistentHandle::Finalize(
     IsolateGroup* isolate_group,
     FinalizablePersistentHandle* handle) {
-  if (!handle->raw()->IsHeapObject()) {
+  if (!handle->ptr()->IsHeapObject()) {
     return;  // Free handle.
   }
   Dart_HandleFinalizer callback = handle->callback();
@@ -835,7 +835,7 @@
     const Object& excp = Object::Handle(Z, Api::UnwrapHandle(exception));
     obj = String::New(GetErrorString(T, excp));
   } else {
-    obj = Api::UnwrapInstanceHandle(Z, exception).raw();
+    obj = Api::UnwrapInstanceHandle(Z, exception).ptr();
     if (obj.IsNull()) {
       RETURN_TYPE_ERROR(Z, exception, Instance);
     }
@@ -868,7 +868,7 @@
     // that GC won't touch the raw error object before creating a valid
     // handle for it in the surviving zone.
     NoSafepointScope no_safepoint;
-    ErrorPtr raw_error = Api::UnwrapErrorHandle(thread->zone(), handle).raw();
+    ErrorPtr raw_error = Api::UnwrapErrorHandle(thread->zone(), handle).ptr();
     thread->UnwindScopes(thread->top_exit_frame_info());
     // Note that thread's zone is different here than at the beginning of this
     // function.
@@ -882,7 +882,7 @@
   DARTSCOPE(Thread::Current());
   const Object& obj = Object::Handle(Z, Api::UnwrapHandle(object));
   if (obj.IsString()) {
-    return Api::NewHandle(T, obj.raw());
+    return Api::NewHandle(T, obj.ptr());
   } else if (obj.IsInstance()) {
     CHECK_CALLBACK_STATE(T);
     const Instance& receiver = Instance::Cast(obj);
@@ -920,7 +920,7 @@
   TransitionNativeToVM transition(thread);
   NoSafepointScope no_safepoint_scope;
   PersistentHandle* ref = PersistentHandle::Cast(object);
-  return Api::NewHandle(thread, ref->raw());
+  return Api::NewHandle(thread, ref->ptr());
 }
 
 DART_EXPORT Dart_Handle
@@ -937,7 +937,7 @@
   if (weak_ref->IsFinalizedNotFreed()) {
     return Dart_Null();
   }
-  return Api::NewHandle(thread, weak_ref->raw());
+  return Api::NewHandle(thread, weak_ref->ptr());
 }
 
 static Dart_Handle HandleFromFinalizable(Dart_FinalizableHandle object) {
@@ -950,7 +950,7 @@
   NoSafepointScope no_safepoint_scope;
   FinalizablePersistentHandle* weak_ref =
       FinalizablePersistentHandle::Cast(object);
-  return Api::NewHandle(thread, weak_ref->raw());
+  return Api::NewHandle(thread, weak_ref->ptr());
 }
 
 DART_EXPORT Dart_PersistentHandle Dart_NewPersistentHandle(Dart_Handle object) {
@@ -960,7 +960,7 @@
   ASSERT(state != NULL);
   const Object& old_ref = Object::Handle(Z, Api::UnwrapHandle(object));
   PersistentHandle* new_ref = state->AllocatePersistentHandle();
-  new_ref->set_raw(old_ref);
+  new_ref->set_ptr(old_ref);
   return new_ref->apiHandle();
 }
 
@@ -973,7 +973,7 @@
   ASSERT(state->IsValidPersistentHandle(obj1));
   const Object& obj2_ref = Object::Handle(Z, Api::UnwrapHandle(obj2));
   PersistentHandle* obj1_ref = PersistentHandle::Cast(obj1);
-  obj1_ref->set_raw(obj2_ref);
+  obj1_ref->set_ptr(obj2_ref);
 }
 
 static Dart_WeakPersistentHandle AllocateWeakPersistentHandle(
@@ -982,7 +982,7 @@
     void* peer,
     intptr_t external_allocation_size,
     Dart_HandleFinalizer callback) {
-  if (!ref.raw()->IsHeapObject()) {
+  if (!ref.ptr()->IsHeapObject()) {
     return NULL;
   }
   FinalizablePersistentHandle* finalizable_ref =
@@ -1011,7 +1011,7 @@
     void* peer,
     intptr_t external_allocation_size,
     Dart_HandleFinalizer callback) {
-  if (!ref.raw()->IsHeapObject()) {
+  if (!ref.ptr()->IsHeapObject()) {
     return NULL;
   }
 
@@ -1739,15 +1739,15 @@
   NoSafepointScope no_safepoint_scope;
   const Error& error_handle = Api::UnwrapErrorHandle(Z, error);
   if ((isolate->sticky_error() != Error::null()) &&
-      (error_handle.raw() != Object::null())) {
+      (error_handle.ptr() != Object::null())) {
     FATAL1("%s expects there to be no sticky error.", CURRENT_FUNC);
   }
   if (!error_handle.IsUnhandledException() &&
-      (error_handle.raw() != Object::null())) {
+      (error_handle.ptr() != Object::null())) {
     FATAL1("%s expects the error to be an unhandled exception error or null.",
            CURRENT_FUNC);
   }
-  isolate->SetStickyError(error_handle.raw());
+  isolate->SetStickyError(error_handle.ptr());
 }
 
 DART_EXPORT bool Dart_HasStickyError() {
@@ -2039,7 +2039,7 @@
   Object& result =
       Object::Handle(Z, DartLibraryCalls::EnsureScheduleImmediate());
   if (result.IsError()) {
-    return Api::NewHandle(T, result.raw());
+    return Api::NewHandle(T, result.ptr());
   }
 
   // Drain the microtask queue. Propagate any errors to the entry frame.
@@ -2049,7 +2049,7 @@
     const Error* error;
     {
       NoSafepointScope no_safepoint;
-      ErrorPtr raw_error = Error::Cast(result).raw();
+      ErrorPtr raw_error = Error::Cast(result).ptr();
       T->UnwindScopes(T->top_exit_frame_info());
       error = &Error::Handle(T->zone(), raw_error);
     }
@@ -2249,7 +2249,7 @@
     *value = Bool::Cast(result).value();
     return Api::Success();
   } else if (result.IsError()) {
-    return Api::NewHandle(T, result.raw());
+    return Api::NewHandle(T, result.ptr());
   } else {
     return Api::NewError("Expected boolean result from ==");
   }
@@ -2564,7 +2564,7 @@
   if (library.IsNull()) {
     return Dart_Null();
   }
-  return Api::NewHandle(Thread::Current(), library.raw());
+  return Api::NewHandle(Thread::Current(), library.ptr());
 }
 
 // --- Numbers, Integers and Doubles ----
@@ -2771,7 +2771,7 @@
     return Api::NewError("function_name must refer to a static method.");
   }
 
-  if (func.kind() != FunctionLayout::kRegularFunction) {
+  if (func.kind() != UntaggedFunction::kRegularFunction) {
     return Api::NewError(
         "function_name must be the name of a regular function.");
   }
@@ -3046,7 +3046,7 @@
     ASSERT(*peer != NULL);
   } else {
     NoSafepointScope no_safepoint_scope;
-    *peer = thread->heap()->GetPeer(str.raw());
+    *peer = thread->heap()->GetPeer(str.ptr());
   }
   *char_size = str.CharSize();
   *str_len = str.Length();
@@ -3091,7 +3091,7 @@
         Z, TypeArgumentsForElementType(T->isolate_group()->object_store(),
                                        element_type_id)));
   }
-  return Api::NewHandle(T, arr.raw());
+  return Api::NewHandle(T, arr.ptr());
 }
 
 static bool CanTypeContainNull(const Type& type) {
@@ -3152,12 +3152,12 @@
   for (intptr_t i = 0; i < arr.Length(); ++i) {
     arr.SetAt(i, instance);
   }
-  return Api::NewHandle(T, arr.raw());
+  return Api::NewHandle(T, arr.ptr());
 }
 
 #define GET_LIST_LENGTH(zone, type, obj, len)                                  \
   type& array = type::Handle(zone);                                            \
-  array ^= obj.raw();                                                          \
+  array ^= obj.ptr();                                                          \
   *len = array.Length();                                                       \
   return Api::Success();
 
@@ -3216,7 +3216,7 @@
         "Length of List object is greater than the "
         "maximum value that 'len' parameter can hold");
   } else if (retval.IsError()) {
-    return Api::NewHandle(T, retval.raw());
+    return Api::NewHandle(T, retval.ptr());
   } else {
     return Api::NewError("Length of List object is not an integer");
   }
@@ -3391,10 +3391,10 @@
   String& dot_name = String::Handle(String::New("."));
   String& constr_name = String::Handle(String::Concat(class_name, dot_name));
   result = ResolveConstructor(CURRENT_FUNC, cls, class_name, constr_name, 1);
-  if (result.IsError()) return result.raw();
+  if (result.IsError()) return result.ptr();
   ASSERT(result.IsFunction());
   Function& constructor = Function::Handle(zone);
-  constructor ^= result.raw();
+  constructor ^= result.ptr();
   if (!constructor.IsGenerativeConstructor()) {
     const String& message = String::Handle(
         String::NewFormatted("%s: class '%s' is not a constructor.",
@@ -3407,7 +3407,7 @@
   args.SetAt(0, exception);
   args.SetAt(1, String::Handle(String::New(exception_message)));
   result = DartEntry::InvokeFunction(constructor, args);
-  if (result.IsError()) return result.raw();
+  if (result.IsError()) return result.ptr();
   ASSERT(result.IsNull());
 
   if (thread->top_exit_frame_info() == 0) {
@@ -3422,7 +3422,7 @@
   const Instance* saved_exception;
   {
     NoSafepointScope no_safepoint;
-    InstancePtr raw_exception = exception.raw();
+    InstancePtr raw_exception = exception.ptr();
     thread->UnwindScopes(thread->top_exit_frame_info());
     saved_exception = &Instance::Handle(raw_exception);
   }
@@ -3543,7 +3543,7 @@
         args.SetAt(1, intobj);
         result = DartEntry::InvokeFunction(function, args);
         if (result.IsError()) {
-          return Api::NewHandle(T, result.raw());
+          return Api::NewHandle(T, result.ptr());
         }
         if (!result.IsInteger()) {
           return Api::NewError(
@@ -3632,7 +3632,7 @@
         const Object& result =
             Object::Handle(Z, DartEntry::InvokeFunction(function, args));
         if (result.IsError()) {
-          return Api::NewHandle(T, result.raw());
+          return Api::NewHandle(T, result.ptr());
         }
       }
       return Api::Success();
@@ -3686,7 +3686,7 @@
     const Object& iterator = Object::Handle(
         Send0Arg(instance, String::Handle(Z, String::New("get:keys"))));
     if (!iterator.IsInstance()) {
-      return Api::NewHandle(T, iterator.raw());
+      return Api::NewHandle(T, iterator.ptr());
     }
     return Api::NewHandle(T, Send0Arg(Instance::Cast(iterator),
                                       String::Handle(String::New("toList"))));
@@ -3845,7 +3845,7 @@
   // Invoke the constructor and return the new object.
   result = DartEntry::InvokeFunction(factory, args);
   ASSERT(result.IsInstance() || result.IsNull() || result.IsError());
-  return Api::NewHandle(thread, result.raw());
+  return Api::NewHandle(thread, result.ptr());
 }
 
 static Dart_Handle NewTypedData(Thread* thread, intptr_t cid, intptr_t length) {
@@ -3867,7 +3867,7 @@
       Class::Handle(zone, thread->isolate_group()->class_table()->At(cid));
   auto& result = Object::Handle(zone, cls.EnsureIsAllocateFinalized(thread));
   if (result.IsError()) {
-    return Api::NewHandle(thread, result.raw());
+    return Api::NewHandle(thread, result.ptr());
   }
   result = ExternalTypedData::New(cid, reinterpret_cast<uint8_t*>(data), length,
                                   thread->heap()->SpaceForExternal(bytes));
@@ -3875,7 +3875,7 @@
     AllocateFinalizableHandle(thread, result, peer, external_allocation_size,
                               callback);
   }
-  return Api::NewHandle(thread, result.raw());
+  return Api::NewHandle(thread, result.ptr());
 }
 
 static Dart_Handle NewExternalByteData(Thread* thread,
@@ -3915,7 +3915,7 @@
   // Invoke the constructor and return the new object.
   result = DartEntry::InvokeFunction(factory, args);
   ASSERT(result.IsNull() || result.IsInstance() || result.IsError());
-  return Api::NewHandle(thread, result.raw());
+  return Api::NewHandle(thread, result.ptr());
 }
 
 DART_EXPORT Dart_Handle Dart_NewTypedData(Dart_TypedData_Type type,
@@ -4089,7 +4089,7 @@
   // Invoke the factory constructor and return the new object.
   result = DartEntry::InvokeFunction(factory, args);
   ASSERT(result.IsInstance() || result.IsNull() || result.IsError());
-  return Api::NewHandle(T, result.raw());
+  return Api::NewHandle(T, result.ptr());
 }
 
 // Structure to record acquired typed data for verification purposes.
@@ -4195,7 +4195,7 @@
     }
     const Object& obj = Object::Handle(Z, Api::UnwrapHandle(object));
     WeakTable* table = I->group()->api_state()->acquired_table();
-    intptr_t current = table->GetValue(obj.raw());
+    intptr_t current = table->GetValue(obj.ptr());
     if (current != 0) {
       return Api::NewError("Data was already acquired for this object.");
     }
@@ -4203,7 +4203,7 @@
     // data to remain in place, even though the API spec doesn't guarantee it.
     // TODO(koda/asiva): Make final decision and document it.
     AcquiredData* ad = new AcquiredData(data_tmp, size_in_bytes, !external);
-    table->SetValue(obj.raw(), reinterpret_cast<intptr_t>(ad));
+    table->SetValue(obj.ptr(), reinterpret_cast<intptr_t>(ad));
     data_tmp = ad->GetData();
   }
   *data = data_tmp;
@@ -4224,12 +4224,12 @@
   if (FLAG_verify_acquired_data) {
     const Object& obj = Object::Handle(Z, Api::UnwrapHandle(object));
     WeakTable* table = I->group()->api_state()->acquired_table();
-    intptr_t current = table->GetValue(obj.raw());
+    intptr_t current = table->GetValue(obj.ptr());
     if (current == 0) {
       return Api::NewError("Data was not acquired for this object.");
     }
     AcquiredData* ad = reinterpret_cast<AcquiredData*>(current);
-    table->SetValue(obj.raw(), 0);  // Delete entry from table.
+    table->SetValue(obj.ptr(), 0);  // Delete entry from table.
     delete ad;
   }
   return Api::Success();
@@ -4295,7 +4295,7 @@
   }
   ErrorPtr error = constructor.VerifyCallEntryPoint();
   if (error != Error::null()) return error;
-  return constructor.raw();
+  return constructor.ptr();
 }
 
 DART_EXPORT Dart_Handle Dart_New(Dart_Handle type,
@@ -4318,7 +4318,7 @@
     RETURN_TYPE_ERROR(Z, type, Type);
   }
   Type& type_obj = Type::Handle();
-  type_obj ^= unchecked_type.raw();
+  type_obj ^= unchecked_type.ptr();
   if (!type_obj.IsFinalized()) {
     return Api::NewError(
         "%s expects argument 'type' to be a fully resolved type.",
@@ -4336,7 +4336,7 @@
   String& dot_name = String::Handle(Z);
   result = Api::UnwrapHandle(constructor_name);
   if (result.IsNull()) {
-    dot_name = Symbols::Dot().raw();
+    dot_name = Symbols::Dot().ptr();
   } else if (result.IsString()) {
     dot_name = String::Concat(Symbols::Dot(), String::Cast(result));
   } else {
@@ -4349,11 +4349,11 @@
   result = ResolveConstructor("Dart_New", cls, base_constructor_name,
                               constr_name, number_of_arguments);
   if (result.IsError()) {
-    return Api::NewHandle(T, result.raw());
+    return Api::NewHandle(T, result.ptr());
   }
   ASSERT(result.IsFunction());
   Function& constructor = Function::Handle(Z);
-  constructor ^= result.raw();
+  constructor ^= result.ptr();
 
   Instance& new_object = Instance::Handle(Z);
   if (constructor.IsGenerativeConstructor()) {
@@ -4391,7 +4391,7 @@
     argument = Api::UnwrapHandle(arguments[i]);
     if (!argument.IsNull() && !argument.IsInstance()) {
       if (argument.IsError()) {
-        return Api::NewHandle(T, argument.raw());
+        return Api::NewHandle(T, argument.ptr());
       } else {
         return Api::NewError(
             "%s expects arguments[%d] to be an Instance handle.", CURRENT_FUNC,
@@ -4404,23 +4404,23 @@
   // Invoke the constructor and return the new object.
   result = DartEntry::InvokeFunction(constructor, args);
   if (result.IsError()) {
-    return Api::NewHandle(T, result.raw());
+    return Api::NewHandle(T, result.ptr());
   }
 
   if (constructor.IsGenerativeConstructor()) {
     ASSERT(result.IsNull());
   } else {
     ASSERT(result.IsNull() || result.IsInstance());
-    new_object ^= result.raw();
+    new_object ^= result.ptr();
   }
-  return Api::NewHandle(T, new_object.raw());
+  return Api::NewHandle(T, new_object.ptr());
 }
 
 static InstancePtr AllocateObject(Thread* thread, const Class& cls) {
   if (!cls.is_fields_marked_nullable()) {
     // Mark all fields as nullable.
     Zone* zone = thread->zone();
-    Class& iterate_cls = Class::Handle(zone, cls.raw());
+    Class& iterate_cls = Class::Handle(zone, cls.ptr());
     Field& field = Field::Handle(zone);
     Array& fields = Array::Handle(zone);
     while (!iterate_cls.IsNull()) {
@@ -4492,7 +4492,7 @@
   }
   const Instance& instance = Instance::Handle(Z, AllocateObject(T, cls));
   instance.SetNativeFields(num_native_fields, native_fields);
-  return Api::NewHandle(T, instance.raw());
+  return Api::NewHandle(T, instance.ptr());
 }
 
 static Dart_Handle SetupArguments(Thread* thread,
@@ -4509,7 +4509,7 @@
     if (!arg.IsNull() && !arg.IsInstance()) {
       *args = Array::null();
       if (arg.IsError()) {
-        return Api::NewHandle(thread, arg.raw());
+        return Api::NewHandle(thread, arg.ptr());
       } else {
         return Api::NewError(
             "%s expects arguments[%d] to be an Instance handle.", "Dart_Invoke",
@@ -4587,9 +4587,9 @@
       const Object& retval =
           Object::Handle(Z, DartEntry::InvokeFunction(constructor, args));
       if (retval.IsError()) {
-        result = Api::NewHandle(T, retval.raw());
+        result = Api::NewHandle(T, retval.ptr());
       } else {
-        result = Api::NewHandle(T, instance.raw());
+        result = Api::NewHandle(T, instance.ptr());
       }
     }
     return result;
@@ -4607,7 +4607,7 @@
   CHECK_CALLBACK_STATE(T);
 
   String& function_name =
-      String::Handle(Z, Api::UnwrapStringHandle(Z, name).raw());
+      String::Handle(Z, Api::UnwrapStringHandle(Z, name).ptr());
   if (function_name.IsNull()) {
     RETURN_TYPE_ERROR(Z, name, String);
   }
@@ -4652,7 +4652,7 @@
     // receiver is already resolved and finalized, hence it is not necessary
     // to check here.
     Instance& instance = Instance::Handle(Z);
-    instance ^= obj.raw();
+    instance ^= obj.ptr();
 
     // Setup args and check for malformed arguments in the arguments list.
     result = SetupArguments(T, number_of_arguments, arguments, 1, &args);
@@ -4730,7 +4730,7 @@
   CHECK_CALLBACK_STATE(T);
 
   String& field_name =
-      String::Handle(Z, Api::UnwrapStringHandle(Z, name).raw());
+      String::Handle(Z, Api::UnwrapStringHandle(Z, name).ptr());
   if (field_name.IsNull()) {
     RETURN_TYPE_ERROR(Z, name, String);
   }
@@ -4755,7 +4755,7 @@
                             respect_reflectable, check_is_entrypoint));
   } else if (obj.IsNull() || obj.IsInstance()) {
     Instance& instance = Instance::Handle(Z);
-    instance ^= obj.raw();
+    instance ^= obj.ptr();
     if (Library::IsPrivate(field_name)) {
       const Class& cls = Class::Handle(Z, instance.clazz());
       const Library& lib = Library::Handle(Z, cls.library());
@@ -4795,7 +4795,7 @@
   CHECK_CALLBACK_STATE(T);
 
   String& field_name =
-      String::Handle(Z, Api::UnwrapStringHandle(Z, name).raw());
+      String::Handle(Z, Api::UnwrapStringHandle(Z, name).ptr());
   if (field_name.IsNull()) {
     RETURN_TYPE_ERROR(Z, name, String);
   }
@@ -4806,7 +4806,7 @@
     RETURN_TYPE_ERROR(Z, value, Instance);
   }
   Instance& value_instance = Instance::Handle(Z);
-  value_instance ^= value_obj.raw();
+  value_instance ^= value_obj.ptr();
 
   const Object& obj = Object::Handle(Z, Api::UnwrapHandle(container));
   const bool respect_reflectable = false;
@@ -4831,7 +4831,7 @@
                             check_is_entrypoint));
   } else if (obj.IsNull() || obj.IsInstance()) {
     Instance& instance = Instance::Handle(Z);
-    instance ^= obj.raw();
+    instance ^= obj.ptr();
     if (Library::IsPrivate(field_name)) {
       const Class& cls = Class::Handle(Z, instance.clazz());
       const Library& lib = Library::Handle(Z, cls.library());
@@ -4893,7 +4893,7 @@
   {
     NoSafepointScope no_safepoint;
     InstancePtr raw_exception =
-        Api::UnwrapInstanceHandle(zone, exception).raw();
+        Api::UnwrapInstanceHandle(zone, exception).ptr();
     thread->UnwindScopes(thread->top_exit_frame_info());
     saved_exception = &Instance::Handle(raw_exception);
   }
@@ -4931,9 +4931,9 @@
   {
     NoSafepointScope no_safepoint;
     InstancePtr raw_exception =
-        Api::UnwrapInstanceHandle(zone, exception).raw();
+        Api::UnwrapInstanceHandle(zone, exception).ptr();
     StackTracePtr raw_stacktrace =
-        Api::UnwrapStackTraceHandle(zone, stacktrace).raw();
+        Api::UnwrapStackTraceHandle(zone, stacktrace).ptr();
     thread->UnwindScopes(thread->top_exit_frame_info());
     saved_exception = &Instance::Handle(raw_exception);
     saved_stacktrace = &StackTrace::Handle(raw_stacktrace);
@@ -5302,26 +5302,26 @@
     // "_", and the mirrors library, if it is not supported.
 
     if (!FLAG_enable_mirrors && name.Equals(Symbols::DartLibraryMirrors())) {
-      return Symbols::False().raw();
+      return Symbols::False().ptr();
     }
 
     if (!Api::IsFfiEnabled() && name.Equals(Symbols::DartLibraryFfi())) {
-      return Symbols::False().raw();
+      return Symbols::False().ptr();
     }
 
     if (name.Equals(Symbols::DartVMProduct())) {
 #ifdef PRODUCT
-      return Symbols::True().raw();
+      return Symbols::True().ptr();
 #else
-      return Symbols::False().raw();
+      return Symbols::False().ptr();
 #endif
     }
 
     if (name.Equals(Symbols::DartDeveloperTimeline())) {
 #ifdef SUPPORT_TIMELINE
-      return Symbols::True().raw();
+      return Symbols::True().ptr();
 #else
-      return Symbols::False().raw();
+      return Symbols::False().ptr();
 #endif
     }
 
@@ -5337,17 +5337,17 @@
         const Library& library =
             Library::Handle(Library::LookupLibrary(thread, dart_library_name));
         if (!library.IsNull()) {
-          return Symbols::True().raw();
+          return Symbols::True().ptr();
         }
       }
     }
     // Check for default VM provided values. If it was not overridden on the
     // command line.
     if (Symbols::DartIsVM().Equals(name)) {
-      return Symbols::True().raw();
+      return Symbols::True().ptr();
     }
   }
-  return result.raw();
+  return result.ptr();
 }
 
 StringPtr Api::CallEnvironmentCallback(Thread* thread, const String& name) {
@@ -5355,7 +5355,7 @@
   Dart_EnvironmentCallback callback = isolate->environment_callback();
   if (callback != NULL) {
     Scope api_scope(thread);
-    Dart_Handle api_name = Api::NewHandle(thread, name.raw());
+    Dart_Handle api_name = Api::NewHandle(thread, name.ptr());
     Dart_Handle api_response;
     {
       TransitionVMToNative transition(thread);
@@ -5364,7 +5364,7 @@
     const Object& response =
         Object::Handle(thread->zone(), Api::UnwrapHandle(api_response));
     if (response.IsString()) {
-      return String::Cast(response).raw();
+      return String::Cast(response).ptr();
     } else if (response.IsError()) {
       Exceptions::ThrowArgumentError(
           String::Handle(String::New(Error::Cast(response).ToErrorCString())));
@@ -5493,7 +5493,7 @@
   program.reset();
 
   if (tmp.IsError()) {
-    return Api::NewHandle(T, tmp.raw());
+    return Api::NewHandle(T, tmp.ptr());
   }
 
   IG->source()->script_kernel_size = buffer_size;
@@ -5506,9 +5506,9 @@
     return Api::NewError("%s: The binary program does not contain 'main'.",
                          CURRENT_FUNC);
   }
-  library ^= tmp.raw();
+  library ^= tmp.ptr();
   IG->object_store()->set_root_library(library);
-  return Api::NewHandle(T, library.raw());
+  return Api::NewHandle(T, library.ptr());
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
 }
 
@@ -5526,7 +5526,7 @@
   const Object& obj = Object::Handle(Z, Api::UnwrapHandle(library));
   if (obj.IsNull() || obj.IsLibrary()) {
     Library& lib = Library::Handle(Z);
-    lib ^= obj.raw();
+    lib ^= obj.ptr();
     T->isolate_group()->object_store()->set_root_library(lib);
     return library;
   }
@@ -5630,7 +5630,7 @@
     type ^= Type::New(cls, type_args_obj, nullability);
   }
   type ^= ClassFinalizer::FinalizeType(type);
-  return Api::NewHandle(T, type.raw());
+  return Api::NewHandle(T, type.ptr());
 }
 
 DART_EXPORT Dart_Handle Dart_GetType(Dart_Handle library,
@@ -5716,7 +5716,7 @@
   }
   const String& url = String::Handle(Z, lib.url());
   ASSERT(!url.IsNull());
-  return Api::NewHandle(T, url.raw());
+  return Api::NewHandle(T, url.ptr());
 }
 
 DART_EXPORT Dart_Handle Dart_LibraryResolvedUrl(Dart_Handle library) {
@@ -5731,7 +5731,7 @@
   ASSERT(!script.IsNull());
   const String& url = String::Handle(script.resolved_url());
   ASSERT(!url.IsNull());
-  return Api::NewHandle(T, url.raw());
+  return Api::NewHandle(T, url.ptr());
 }
 
 DART_EXPORT Dart_Handle Dart_GetLoadedLibraries() {
@@ -5750,7 +5750,7 @@
     ASSERT(!lib.IsNull());
     library_list.SetAt(i, lib);
   }
-  return Api::NewHandle(T, library_list.raw());
+  return Api::NewHandle(T, library_list.ptr());
 }
 
 DART_EXPORT Dart_Handle Dart_LookupLibrary(Dart_Handle url) {
@@ -5765,7 +5765,7 @@
     return Api::NewError("%s: library '%s' not found.", CURRENT_FUNC,
                          url_str.ToCString());
   } else {
-    return Api::NewHandle(T, library.raw());
+    return Api::NewHandle(T, library.ptr());
   }
 }
 
@@ -5817,7 +5817,7 @@
   IsolateGroupSource* source = Isolate::Current()->source();
   source->add_loaded_blob(Z, td);
 
-  return Api::NewHandle(T, result.raw());
+  return Api::NewHandle(T, result.ptr());
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
 }
 
@@ -5949,7 +5949,7 @@
     FullSnapshotReader reader(snapshot, snapshot_instructions, T);
     const Error& error = Error::Handle(reader.ReadUnitSnapshot(unit));
     if (!error.IsNull()) {
-      return Api::NewHandle(T, error.raw());
+      return Api::NewHandle(T, error.ptr());
     }
 
     return Api::NewHandle(T, unit.CompleteLoad(String::Handle(), false));
@@ -6036,7 +6036,7 @@
   }
   {
     NoSafepointScope no_safepoint;
-    ObjectPtr raw_obj = obj.raw();
+    ObjectPtr raw_obj = obj.ptr();
     *peer = thread->heap()->GetPeer(raw_obj);
   }
   return Api::Success();
@@ -6056,7 +6056,7 @@
   }
   {
     NoSafepointScope no_safepoint;
-    ObjectPtr raw_obj = obj.raw();
+    ObjectPtr raw_obj = obj.ptr();
     thread->heap()->SetPeer(raw_obj, peer);
   }
   return Api::Success();
@@ -6467,7 +6467,7 @@
   const Object& error =
       Object::Handle(loader.CompileTrace(buffer, buffer_length));
   if (error.IsError()) {
-    return Api::NewHandle(T, Error::Cast(error).raw());
+    return Api::NewHandle(T, Error::Cast(error).ptr());
   }
   return Api::Success();
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
@@ -6490,7 +6490,7 @@
   TypeFeedbackLoader loader(thread);
   const Object& error = Object::Handle(loader.LoadFeedback(&stream));
   if (error.IsError()) {
-    return Api::NewHandle(T, Error::Cast(error).raw());
+    return Api::NewHandle(T, Error::Cast(error).ptr());
   }
   return Api::Success();
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
diff --git a/runtime/vm/dart_api_impl_test.cc b/runtime/vm/dart_api_impl_test.cc
index cb554c8..fc4d13a 100644
--- a/runtime/vm/dart_api_impl_test.cc
+++ b/runtime/vm/dart_api_impl_test.cc
@@ -930,7 +930,7 @@
   {
     TransitionNativeToVM transition(thread);
     const Type& null_type_obj = Api::UnwrapTypeHandle(zone, type);
-    EXPECT(null_type_obj.raw() == Type::NullType());
+    EXPECT(null_type_obj.ptr() == Type::NullType());
   }
 
   Dart_Handle instance = Dart_True();
@@ -940,7 +940,7 @@
   {
     TransitionNativeToVM transition(thread);
     const Type& bool_type_obj = Api::UnwrapTypeHandle(zone, type);
-    EXPECT(bool_type_obj.raw() == Type::BoolType());
+    EXPECT(bool_type_obj.ptr() == Type::BoolType());
   }
 
   // Errors propagate.
@@ -2942,7 +2942,7 @@
     HANDLESCOPE(thread);
     String& str1 = String::Handle();
     str1 = String::New("Test String");
-    Dart_Handle ref = Api::NewHandle(thread, str1.raw());
+    Dart_Handle ref = Api::NewHandle(thread, str1.ptr());
     String& str2 = String::Handle();
     str2 ^= Api::UnwrapHandle(ref);
     EXPECT(str1.Equals(str2));
@@ -2996,22 +2996,22 @@
     HANDLESCOPE(thread);
     for (int i = 0; i < 500; i++) {
       String& str = String::Handle();
-      str ^= PersistentHandle::Cast(handles[i])->raw();
+      str ^= PersistentHandle::Cast(handles[i])->ptr();
       EXPECT(str.Equals(kTestString1));
     }
     for (int i = 500; i < 1000; i++) {
       String& str = String::Handle();
-      str ^= PersistentHandle::Cast(handles[i])->raw();
+      str ^= PersistentHandle::Cast(handles[i])->ptr();
       EXPECT(str.Equals(kTestString2));
     }
     for (int i = 1000; i < 1500; i++) {
       String& str = String::Handle();
-      str ^= PersistentHandle::Cast(handles[i])->raw();
+      str ^= PersistentHandle::Cast(handles[i])->ptr();
       EXPECT(str.Equals(kTestString1));
     }
     for (int i = 1500; i < 2000; i++) {
       String& str = String::Handle();
-      str ^= PersistentHandle::Cast(handles[i])->raw();
+      str ^= PersistentHandle::Cast(handles[i])->ptr();
       EXPECT(str.Equals(kTestString2));
     }
   }
@@ -3071,7 +3071,7 @@
     TransitionNativeToVM transition(T);
     HANDLESCOPE(T);
     String& str = String::Handle();
-    str ^= PersistentHandle::Cast(obj)->raw();
+    str ^= PersistentHandle::Cast(obj)->ptr();
     EXPECT(str.Equals(kTestString1));
   }
 
@@ -3082,7 +3082,7 @@
     TransitionNativeToVM transition(T);
     HANDLESCOPE(T);
     String& str = String::Handle();
-    str ^= PersistentHandle::Cast(obj)->raw();
+    str ^= PersistentHandle::Cast(obj)->ptr();
     EXPECT(str.Equals(kTestString2));
   }
 
@@ -5094,7 +5094,7 @@
   // (1 + 2) * kWordSize + size of object header.
   // We check to make sure the instance size computed by the VM matches
   // our expectations.
-  intptr_t header_size = sizeof(ObjectLayout);
+  intptr_t header_size = sizeof(UntaggedObject);
   EXPECT_EQ(
       Utils::RoundUp(((1 + 2) * kWordSize) + header_size, kObjectAlignment),
       cls.host_instance_size());
diff --git a/runtime/vm/dart_api_message.cc b/runtime/vm/dart_api_message.cc
index 7c3e3de..73df245 100644
--- a/runtime/vm/dart_api_message.cc
+++ b/runtime/vm/dart_api_message.cc
@@ -195,7 +195,7 @@
   switch (cid) {
     case kOneByteStringCid: {
       OneByteStringPtr raw_str = static_cast<OneByteStringPtr>(raw);
-      const char* str = reinterpret_cast<const char*>(raw_str->ptr()->data());
+      const char* str = reinterpret_cast<const char*>(raw_str->untag()->data());
       ASSERT(str != NULL);
       Dart_CObject* object = NULL;
       for (intptr_t i = 0; i < vm_isolate_references_.length(); i++) {
@@ -331,10 +331,10 @@
 Dart_CObject* ApiMessageReader::CreateDartCObjectString(ObjectPtr raw) {
   ASSERT(IsOneByteStringClassId(raw->GetClassId()));
   OneByteStringPtr raw_str = static_cast<OneByteStringPtr>(raw);
-  intptr_t len = Smi::Value(raw_str->ptr()->length());
+  intptr_t len = Smi::Value(raw_str->untag()->length());
   Dart_CObject* object = AllocateDartCObjectString(len);
   char* p = object->value.as_string;
-  memmove(p, raw_str->ptr()->data(), len);
+  memmove(p, raw_str->untag()->data(), len);
   p[len] = '\0';
   return object;
 }
diff --git a/runtime/vm/dart_api_state.h b/runtime/vm/dart_api_state.h
index 1757cce..0741b90 100644
--- a/runtime/vm/dart_api_state.h
+++ b/runtime/vm/dart_api_state.h
@@ -127,9 +127,9 @@
 class LocalHandle {
  public:
   // Accessors.
-  ObjectPtr raw() const { return raw_; }
-  void set_raw(ObjectPtr raw) { raw_ = raw; }
-  static intptr_t raw_offset() { return OFFSET_OF(LocalHandle, raw_); }
+  ObjectPtr ptr() const { return ptr_; }
+  void set_ptr(ObjectPtr ptr) { ptr_ = ptr; }
+  static intptr_t ptr_offset() { return OFFSET_OF(LocalHandle, ptr_); }
 
   Dart_Handle apiHandle() { return reinterpret_cast<Dart_Handle>(this); }
 
@@ -137,7 +137,7 @@
   LocalHandle() {}
   ~LocalHandle() {}
 
-  ObjectPtr raw_;
+  ObjectPtr ptr_;
   DISALLOW_ALLOCATION();  // Allocated through AllocateHandle methods.
   DISALLOW_COPY_AND_ASSIGN(LocalHandle);
 };
@@ -151,16 +151,17 @@
 class PersistentHandle {
  public:
   // Accessors.
-  ObjectPtr raw() const { return raw_; }
-  void set_raw(ObjectPtr ref) { raw_ = ref; }
-  void set_raw(const LocalHandle& ref) { raw_ = ref.raw(); }
-  void set_raw(const Object& object) { raw_ = object.raw(); }
-  ObjectPtr* raw_addr() { return &raw_; }
+  ObjectPtr ptr() const { return ptr_; }
+  void set_ptr(ObjectPtr ref) { ptr_ = ref; }
+  void set_ptr(const LocalHandle& ref) { ptr_ = ref.ptr(); }
+  void set_ptr(const Object& object) { ptr_ = object.ptr(); }
+  ObjectPtr* raw_addr() { return &ptr_; }
+
   Dart_PersistentHandle apiHandle() {
     return reinterpret_cast<Dart_PersistentHandle>(this);
   }
 
-  static intptr_t raw_offset() { return OFFSET_OF(PersistentHandle, raw_); }
+  static intptr_t ptr_offset() { return OFFSET_OF(PersistentHandle, ptr_); }
 
   static PersistentHandle* Cast(Dart_PersistentHandle handle);
 
@@ -170,18 +171,18 @@
   PersistentHandle() {}
   ~PersistentHandle() {}
 
-  // Overload the raw_ field as a next pointer when adding freed
+  // Overload the ptr_ field as a next pointer when adding freed
   // handles to the free list.
   PersistentHandle* Next() {
-    return reinterpret_cast<PersistentHandle*>(static_cast<uword>(raw_));
+    return reinterpret_cast<PersistentHandle*>(static_cast<uword>(ptr_));
   }
   void SetNext(PersistentHandle* free_list) {
-    raw_ = static_cast<ObjectPtr>(reinterpret_cast<uword>(free_list));
-    ASSERT(!raw_->IsHeapObject());
+    ptr_ = static_cast<ObjectPtr>(reinterpret_cast<uword>(free_list));
+    ASSERT(!ptr_->IsHeapObject());
   }
   void FreeHandle(PersistentHandle* free_list) { SetNext(free_list); }
 
-  ObjectPtr raw_;
+  ObjectPtr ptr_;
   DISALLOW_ALLOCATION();  // Allocated through AllocateHandle methods.
   DISALLOW_COPY_AND_ASSIGN(PersistentHandle);
 };
@@ -198,10 +199,10 @@
                                           bool auto_delete);
 
   // Accessors.
-  ObjectPtr raw() const { return raw_; }
-  ObjectPtr* raw_addr() { return &raw_; }
-  static intptr_t raw_offset() {
-    return OFFSET_OF(FinalizablePersistentHandle, raw_);
+  ObjectPtr ptr() const { return ptr_; }
+  ObjectPtr* ptr_addr() { return &ptr_; }
+  static intptr_t ptr_offset() {
+    return OFFSET_OF(FinalizablePersistentHandle, ptr_);
   }
   void* peer() const { return peer_; }
   Dart_HandleFinalizer callback() const { return callback_; }
@@ -215,7 +216,7 @@
   bool auto_delete() const { return auto_delete_; }
 
   bool IsFinalizedNotFreed() const {
-    return raw_ == static_cast<ObjectPtr>(reinterpret_cast<uword>(this));
+    return ptr_ == static_cast<ObjectPtr>(reinterpret_cast<uword>(this));
   }
 
   intptr_t external_size() const {
@@ -287,21 +288,21 @@
   friend class FinalizablePersistentHandles;
 
   FinalizablePersistentHandle()
-      : raw_(nullptr), peer_(NULL), external_data_(0), callback_(NULL) {}
+      : ptr_(nullptr), peer_(NULL), external_data_(0), callback_(NULL) {}
   ~FinalizablePersistentHandle() {}
 
   static void Finalize(IsolateGroup* isolate_group,
                        FinalizablePersistentHandle* handle);
 
-  // Overload the raw_ field as a next pointer when adding freed
+  // Overload the ptr_ field as a next pointer when adding freed
   // handles to the free list.
   FinalizablePersistentHandle* Next() {
     return reinterpret_cast<FinalizablePersistentHandle*>(
-        static_cast<uword>(raw_));
+        static_cast<uword>(ptr_));
   }
   void SetNext(FinalizablePersistentHandle* free_list) {
-    raw_ = static_cast<ObjectPtr>(reinterpret_cast<uword>(free_list));
-    ASSERT(!raw_->IsHeapObject());
+    ptr_ = static_cast<ObjectPtr>(reinterpret_cast<uword>(free_list));
+    ASSERT(!ptr_->IsHeapObject());
   }
 
   void SetFinalizedNotFreed() {
@@ -315,16 +316,16 @@
   }
 
   void Clear() {
-    raw_ = Object::null();
+    ptr_ = Object::null();
     peer_ = nullptr;
     external_data_ = 0;
     callback_ = nullptr;
     auto_delete_ = false;
   }
 
-  void set_raw(ObjectPtr raw) { raw_ = raw; }
-  void set_raw(const LocalHandle& ref) { raw_ = ref.raw(); }
-  void set_raw(const Object& object) { raw_ = object.raw(); }
+  void set_ptr(ObjectPtr raw) { ptr_ = raw; }
+  void set_ptr(const LocalHandle& ref) { ptr_ = ref.ptr(); }
+  void set_ptr(const Object& object) { ptr_ = object.ptr(); }
 
   void set_peer(void* peer) { peer_ = peer; }
 
@@ -354,10 +355,10 @@
   // Returns the space to charge for the external size.
   Heap::Space SpaceForExternal() const {
     // Non-heap and VM-heap objects count as old space here.
-    return raw_->IsSmiOrOldObject() ? Heap::kOld : Heap::kNew;
+    return ptr_->IsSmiOrOldObject() ? Heap::kOld : Heap::kNew;
   }
 
-  ObjectPtr raw_;
+  ObjectPtr ptr_;
   void* peer_;
   uword external_data_;
   Dart_HandleFinalizer callback_;
@@ -483,7 +484,7 @@
     } else {
       handle = reinterpret_cast<PersistentHandle*>(AllocateScopedHandle());
     }
-    handle->set_raw(Object::null());
+    handle->set_ptr(Object::null());
     return handle;
   }
 
@@ -561,7 +562,7 @@
     if (free_list_ != NULL) {
       handle = free_list_;
       free_list_ = handle->Next();
-      handle->set_raw(Object::null());
+      handle->set_ptr(Object::null());
       return handle;
     }
 
@@ -828,7 +829,7 @@
     MutexLocker ml(&mutex_);
     if (acquired_error_ == nullptr) {
       acquired_error_ = persistent_handles_.AllocateHandle();
-      acquired_error_->set_raw(ApiError::typed_data_acquire_error());
+      acquired_error_->set_ptr(ApiError::typed_data_acquire_error());
     }
     return acquired_error_;
   }
@@ -873,7 +874,7 @@
   ApiState* state = isolate_group->api_state();
   ASSERT(state != NULL);
   FinalizablePersistentHandle* ref = state->AllocateWeakPersistentHandle();
-  ref->set_raw(object);
+  ref->set_ptr(object);
   ref->set_peer(peer);
   ref->set_callback(callback);
   ref->set_auto_delete(auto_delete);
diff --git a/runtime/vm/dart_entry.cc b/runtime/vm/dart_entry.cc
index 0d01009..4f201b3 100644
--- a/runtime/vm/dart_entry.cc
+++ b/runtime/vm/dart_entry.cc
@@ -134,7 +134,7 @@
     const Object& result =
         Object::Handle(zone, Compiler::CompileFunction(thread, function));
     if (result.IsError()) {
-      return Error::Cast(result).raw();
+      return Error::Cast(result).ptr();
     }
 
     // At this point we should have native code.
@@ -211,7 +211,7 @@
       }
 
       if (matches) {
-        return function.raw();
+        return function.ptr();
       }
     }
 
@@ -239,7 +239,7 @@
     const Object& getter_result = Object::Handle(
         zone, DartEntry::InvokeFunction(function, getter_arguments));
     if (getter_result.IsError()) {
-      return getter_result.raw();
+      return getter_result.ptr();
     }
     ASSERT(getter_result.IsNull() || getter_result.IsInstance());
 
@@ -299,11 +299,11 @@
   const Object& resolved_result = Object::Handle(
       zone, ResolveCallable(thread, arguments, arguments_descriptor));
   if (resolved_result.IsError()) {
-    return resolved_result.raw();
+    return resolved_result.ptr();
   }
 
   const auto& function =
-      Function::Handle(zone, Function::RawCast(resolved_result.raw()));
+      Function::Handle(zone, Function::RawCast(resolved_result.ptr()));
   return InvokeCallable(thread, function, arguments, arguments_descriptor);
 }
 
@@ -314,7 +314,7 @@
                                         const Array& arguments_descriptor) {
   auto const zone = thread->zone();
   const ArgumentsDescriptor args_desc(arguments_descriptor);
-  ASSERT(receiver.raw() == arguments.At(args_desc.FirstArgIndex()));
+  ASSERT(receiver.ptr() == arguments.At(args_desc.FirstArgIndex()));
   // Allocate an Invocation object.
   const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
 
@@ -389,7 +389,7 @@
       kFirstNamedEntryIndex + (index * kNamedEntrySize) + kNameOffset;
   String& result = String::Handle();
   result ^= array_.At(offset);
-  return result.raw();
+  return result.ptr();
 }
 
 intptr_t ArgumentsDescriptor::PositionAt(intptr_t index) const {
@@ -400,7 +400,7 @@
 
 bool ArgumentsDescriptor::MatchesNameAt(intptr_t index,
                                         const String& other) const {
-  return NameAt(index) == other.raw();
+  return NameAt(index) == other.ptr();
 }
 
 ArrayPtr ArgumentsDescriptor::GetArgumentNames() const {
@@ -420,7 +420,7 @@
     ASSERT(names.At(index) == Object::null());
     names.SetAt(index, name);
   }
-  return names.raw();
+  return names.ptr();
 }
 
 void ArgumentsDescriptor::PrintTo(BaseTextBuffer* buffer) const {
@@ -516,7 +516,7 @@
   descriptor.MakeImmutable();
   descriptor ^= descriptor.Canonicalize(thread);
   ASSERT(!descriptor.IsNull());
-  return descriptor.raw();
+  return descriptor.ptr();
 }
 
 ArrayPtr ArgumentsDescriptor::New(intptr_t type_args_len,
@@ -571,7 +571,7 @@
     descriptor ^= descriptor.Canonicalize(thread);
   }
   ASSERT(!descriptor.IsNull());
-  return descriptor.raw();
+  return descriptor.ptr();
 }
 
 void ArgumentsDescriptor::Init() {
@@ -615,9 +615,9 @@
       DartEntry::InvokeFunction(constructor, constructor_arguments));
   ASSERT(retval.IsNull() || retval.IsError());
   if (retval.IsError()) {
-    return retval.raw();
+    return retval.ptr();
   }
-  return exception_object.raw();
+  return exception_object.ptr();
 }
 
 ObjectPtr DartLibraryCalls::ToString(const Instance& receiver) {
@@ -636,7 +636,7 @@
   const Object& result =
       Object::Handle(DartEntry::InvokeFunction(function, args));
   ASSERT(result.IsInstance() || result.IsError());
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::HashCode(const Instance& receiver) {
@@ -652,7 +652,7 @@
   const Object& result =
       Object::Handle(DartEntry::InvokeFunction(function, args));
   ASSERT(result.IsInstance() || result.IsError());
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::Equals(const Instance& left,
@@ -671,10 +671,10 @@
   const Object& result =
       Object::Handle(DartEntry::InvokeFunction(function, args));
   ASSERT(result.IsInstance() || result.IsError());
-  return result.raw();
+  return result.ptr();
 }
 
-// On success, returns a RawInstance.  On failure, a RawError.
+// On success, returns an InstancePtr.  On failure, an ErrorPtr.
 ObjectPtr DartLibraryCalls::IdentityHashCode(const Instance& object) {
   const int kNumArguments = 1;
   Thread* thread = Thread::Current();
@@ -689,7 +689,7 @@
   const Object& result =
       Object::Handle(zone, DartEntry::InvokeFunction(function, args));
   ASSERT(result.IsInstance() || result.IsError());
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::LookupHandler(Dart_Port port_id) {
@@ -721,7 +721,7 @@
   args.SetAt(0, Integer::Handle(zone, Integer::New(port_id)));
   const Object& result =
       Object::Handle(zone, DartEntry::InvokeFunction(function, args));
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::LookupOpenPorts() {
@@ -746,7 +746,7 @@
   }
   const Object& result = Object::Handle(
       zone, DartEntry::InvokeFunction(function, Object::empty_array()));
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::HandleMessage(const Object& handler,
@@ -791,7 +791,7 @@
   const Object& result =
       Object::Handle(zone, DartEntry::InvokeFunction(function, args));
   ASSERT(result.IsNull() || result.IsError());
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::DrainMicrotaskQueue() {
@@ -804,7 +804,7 @@
   const Object& result = Object::Handle(
       zone, DartEntry::InvokeFunction(function, Object::empty_array()));
   ASSERT(result.IsNull() || result.IsError());
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::EnsureScheduleImmediate() {
@@ -818,7 +818,7 @@
   const Object& result = Object::Handle(
       zone, DartEntry::InvokeFunction(function, Object::empty_array()));
   ASSERT(result.IsNull() || result.IsError());
-  return result.raw();
+  return result.ptr();
 }
 
 ObjectPtr DartLibraryCalls::MapSetAt(const Instance& map,
@@ -837,7 +837,7 @@
   args.SetAt(2, value);
   const Object& result =
       Object::Handle(DartEntry::InvokeFunction(function, args));
-  return result.raw();
+  return result.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/vm/dart_entry.h b/runtime/vm/dart_entry.h
index c85f683..cfd0c06 100644
--- a/runtime/vm/dart_entry.h
+++ b/runtime/vm/dart_entry.h
@@ -185,21 +185,21 @@
  public:
   // Invokes the specified instance function or static function.
   // The first argument of an instance function is the receiver.
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr.  On failure, an ErrorPtr.
   // This is used when there is no type argument vector and
   // no named arguments in the call.
   static ObjectPtr InvokeFunction(const Function& function,
                                   const Array& arguments);
 
   // Invokes the specified code as if it was a Dart function.
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr.  On failure, an ErrorPtr.
   static ObjectPtr InvokeCode(const Code& code,
                               const Array& arguments_descriptor,
                               const Array& arguments,
                               Thread* thread);
 
   // Invokes the specified instance, static, or closure function.
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr.  On failure, an ErrorPtr.
   static ObjectPtr InvokeFunction(
       const Function& function,
       const Array& arguments,
@@ -210,7 +210,7 @@
   // object, performing any needed dynamic checks if the callable cannot receive
   // dynamic invocation.
   //
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr.  On failure, an ErrorPtr.
   //
   // Used when an ArgumentsDescriptor is not required, that is, when there
   // are no type arguments or named arguments.
@@ -220,13 +220,13 @@
   // object, performing any needed dynamic checks if the callable cannot receive
   // dynamic invocation.
   //
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr.  On failure, an ErrorPtr.
   static ObjectPtr InvokeClosure(Thread* thread,
                                  const Array& arguments,
                                  const Array& arguments_descriptor);
 
   // Invokes the noSuchMethod instance function on the receiver.
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr.  On failure, an ErrorPtr.
   static ObjectPtr InvokeNoSuchMethod(Thread* thread,
                                       const Instance& receiver,
                                       const String& target_name,
@@ -241,7 +241,7 @@
   // callable or, if Function::null() is returned, an appropriate target for
   // invoking noSuchMethod.
   //
-  // On success, returns a RawFunction. On failure, a RawError.
+  // On success, returns a FunctionPtr. On failure, an ErrorPtr.
   static ObjectPtr ResolveCallable(Thread* thread,
                                    const Array& arguments,
                                    const Array& arguments_descriptor);
@@ -250,7 +250,7 @@
   // checks needed if the function cannot receive dynamic invocation. Helper
   // method used within InvokeClosure.
   //
-  // On success, returns a RawInstance. On failure, a RawError.
+  // On success, returns an InstancePtr. On failure, an ErrorPtr.
   static ObjectPtr InvokeCallable(Thread* thread,
                                   const Function& callable_function,
                                   const Array& arguments,
@@ -261,22 +261,22 @@
 // Each may return an exception object.
 class DartLibraryCalls : public AllStatic {
  public:
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr. On failure, an ErrorPtr.
   static ObjectPtr InstanceCreate(const Library& library,
                                   const String& exception_name,
                                   const String& constructor_name,
                                   const Array& arguments);
 
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr. On failure, an ErrorPtr.
   static ObjectPtr ToString(const Instance& receiver);
 
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr. On failure, an ErrorPtr.
   static ObjectPtr HashCode(const Instance& receiver);
 
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr. On failure, an ErrorPtr.
   static ObjectPtr Equals(const Instance& left, const Instance& right);
 
-  // On success, returns a RawInstance.  On failure, a RawError.
+  // On success, returns an InstancePtr. On failure, an ErrorPtr.
   static ObjectPtr IdentityHashCode(const Instance& object);
 
   // Returns the handler if one has been registered for this port id.
@@ -285,21 +285,21 @@
   // Returns a list of open ReceivePorts.
   static ObjectPtr LookupOpenPorts();
 
-  // Returns null on success, a RawError on failure.
+  // Returns null on success, an ErrorPtr on failure.
   static ObjectPtr HandleMessage(const Object& handler,
                                  const Instance& dart_message);
 
-  // Returns null on success, a RawError on failure.
+  // Returns null on success, an ErrorPtr on failure.
   static ObjectPtr DrainMicrotaskQueue();
 
   // Ensures that the isolate's _pendingImmediateCallback is set to
   // _startMicrotaskLoop from dart:async.
-  // Returns null on success, a RawError on failure.
+  // Returns null on success, an ErrorPtr on failure.
   static ObjectPtr EnsureScheduleImmediate();
 
   // map[key] = value;
   //
-  // Returns null on success, a RawError on failure.
+  // Returns null on success, an ErrorPtr on failure.
   static ObjectPtr MapSetAt(const Instance& map,
                             const Instance& key,
                             const Instance& value);
diff --git a/runtime/vm/debugger.cc b/runtime/vm/debugger.cc
index ce152a5..7f6775d 100644
--- a/runtime/vm/debugger.cc
+++ b/runtime/vm/debugger.cc
@@ -62,7 +62,7 @@
                                        TokenPosition end_token_pos,
                                        intptr_t requested_line_number,
                                        intptr_t requested_column_number)
-    : script_(script.raw()),
+    : script_(script.ptr()),
       url_(script.url()),
       token_pos_(token_pos),
       end_token_pos_(end_token_pos),
@@ -81,7 +81,7 @@
                                        intptr_t requested_line_number,
                                        intptr_t requested_column_number)
     : script_(Script::null()),
-      url_(url.raw()),
+      url_(url.ptr()),
       token_pos_(TokenPosition::kNoSource),
       end_token_pos_(TokenPosition::kNoSource),
       next_(NULL),
@@ -112,7 +112,7 @@
   ASSERT(func.script() == script_);
   ASSERT(token_pos.IsWithin(func.token_pos(), func.end_token_pos()));
   ASSERT(func.is_debuggable());
-  function_ = func.raw();
+  function_ = func.ptr();
   token_pos_ = token_pos;
   end_token_pos_ = token_pos;
   code_token_pos_ = token_pos;
@@ -184,7 +184,7 @@
       fp_(fp),
       sp_(sp),
       ctx_(Context::ZoneHandle()),
-      code_(Code::ZoneHandle(code.raw())),
+      code_(Code::ZoneHandle(code.ptr())),
       function_(Function::ZoneHandle(code.function())),
       live_frame_((kind == kRegular) || (kind == kAsyncActivation)),
       token_pos_initialized_(false),
@@ -194,7 +194,7 @@
       line_number_(-1),
       column_number_(-1),
       context_level_(-1),
-      deopt_frame_(Array::ZoneHandle(deopt_frame.raw())),
+      deopt_frame_(Array::ZoneHandle(deopt_frame.ptr())),
       deopt_frame_offset_(deopt_frame_offset),
       kind_(kind),
       vars_initialized_(false),
@@ -365,7 +365,7 @@
   if (!for_over_await) {
     bpt = breakpoints();
     while (bpt != NULL) {
-      if (bpt->IsPerClosure() && (bpt->closure() == closure.raw())) break;
+      if (bpt->IsPerClosure() && (bpt->closure() == closure.ptr())) break;
       bpt = bpt->next();
     }
   }
@@ -398,21 +398,21 @@
   if (token_pos.IsWithin(func_start, func.end_token_pos()) ||
       func_start.IsWithin(token_pos, end_token_pos)) {
     // Check script equality last because it allocates handles as a side effect.
-    return func.script() == script.raw();
+    return func.script() == script.ptr();
   }
   return false;
 }
 
 static bool IsImplicitFunction(const Function& func) {
   switch (func.kind()) {
-    case FunctionLayout::kImplicitGetter:
-    case FunctionLayout::kImplicitSetter:
-    case FunctionLayout::kImplicitStaticGetter:
-    case FunctionLayout::kFieldInitializer:
-    case FunctionLayout::kMethodExtractor:
-    case FunctionLayout::kNoSuchMethodDispatcher:
-    case FunctionLayout::kInvokeFieldDispatcher:
-    case FunctionLayout::kIrregexpFunction:
+    case UntaggedFunction::kImplicitGetter:
+    case UntaggedFunction::kImplicitSetter:
+    case UntaggedFunction::kImplicitStaticGetter:
+    case UntaggedFunction::kFieldInitializer:
+    case UntaggedFunction::kMethodExtractor:
+    case UntaggedFunction::kNoSuchMethodDispatcher:
+    case UntaggedFunction::kInvokeFieldDispatcher:
+    case UntaggedFunction::kIrregexpFunction:
       return true;
     default:
       if (func.token_pos() == func.end_token_pos()) {
@@ -443,7 +443,7 @@
   }
   CodeBreakpoint* cbpt = code_breakpoints_;
   while (cbpt != NULL) {
-    if (func.raw() == cbpt->function()) {
+    if (func.ptr() == cbpt->function()) {
       return true;
     }
     cbpt = cbpt->next_;
@@ -454,7 +454,7 @@
 bool Debugger::HasBreakpoint(const Code& code) {
   CodeBreakpoint* cbpt = code_breakpoints_;
   while (cbpt != NULL) {
-    if (code.raw() == cbpt->code_) {
+    if (code.ptr() == cbpt->code_) {
       return true;
     }
     cbpt = cbpt->next_;
@@ -535,7 +535,7 @@
     token_pos_initialized_ = true;
     token_pos_ = TokenPosition::kNoSource;
     GetPcDescriptors();
-    PcDescriptors::Iterator iter(pc_desc_, PcDescriptorsLayout::kAnyKind);
+    PcDescriptors::Iterator iter(pc_desc_, UntaggedPcDescriptors::kAnyKind);
     const uword pc_offset = pc_ - code().PayloadStart();
     while (iter.MoveNext()) {
       if (iter.PcOffset() == pc_offset) {
@@ -646,10 +646,10 @@
     // We store the deopt ids as real token positions.
     const auto to_compare = TokenPosition::Deserialize(deopt_id);
     for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) {
-      LocalVarDescriptorsLayout::VarInfo var_info;
+      UntaggedLocalVarDescriptors::VarInfo var_info;
       var_descriptors_.GetInfo(cur_idx, &var_info);
       const int8_t kind = var_info.kind();
-      if ((kind == LocalVarDescriptorsLayout::kContextLevel) &&
+      if ((kind == UntaggedLocalVarDescriptors::kContextLevel) &&
           to_compare.IsWithin(var_info.begin_pos, var_info.end_pos)) {
         context_level_ = var_info.index();
         found = true;
@@ -673,18 +673,18 @@
   intptr_t ctxt_slot = -1;
   intptr_t var_desc_len = var_descriptors_.Length();
   for (intptr_t i = 0; i < var_desc_len; i++) {
-    LocalVarDescriptorsLayout::VarInfo var_info;
+    UntaggedLocalVarDescriptors::VarInfo var_info;
     var_descriptors_.GetInfo(i, &var_info);
-    if (var_descriptors_.GetName(i) == name.raw()) {
+    if (var_descriptors_.GetName(i) == name.ptr()) {
       const int8_t kind = var_info.kind();
       if (!live_frame_) {
-        ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
+        ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar);
       }
       const auto variable_index = VariableIndex(var_info.index());
-      if (kind == LocalVarDescriptorsLayout::kStackVar) {
+      if (kind == UntaggedLocalVarDescriptors::kStackVar) {
         return GetStackVar(variable_index);
       } else {
-        ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
+        ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar);
         var_ctxt_level = var_info.scope_id;
         ctxt_slot = variable_index.value();
         break;
@@ -720,7 +720,7 @@
 
     if (!closure.IsNull() && caller_closure_finder->IsRunningAsync(closure)) {
       closure = caller_closure_finder->FindCaller(closure);
-      return closure.raw();
+      return closure.ptr();
     }
   }
 
@@ -778,11 +778,11 @@
   intptr_t var_desc_len = var_descriptors_.Length();
   intptr_t await_jump_var = -1;
   for (intptr_t i = 0; i < var_desc_len; i++) {
-    LocalVarDescriptorsLayout::VarInfo var_info;
+    UntaggedLocalVarDescriptors::VarInfo var_info;
     var_descriptors_.GetInfo(i, &var_info);
     const int8_t kind = var_info.kind();
-    if (var_descriptors_.GetName(i) == Symbols::AwaitJumpVar().raw()) {
-      ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
+    if (var_descriptors_.GetName(i) == Symbols::AwaitJumpVar().ptr()) {
+      ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar);
       ASSERT(!ctx_.IsNull());
       var_ctxt_level = var_info.scope_id;
       ctxt_slot = var_info.index();
@@ -814,7 +814,7 @@
   const auto& pc_descriptors =
       PcDescriptors::Handle(zone, code().pc_descriptors());
   ASSERT(!pc_descriptors.IsNull());
-  PcDescriptors::Iterator it(pc_descriptors, PcDescriptorsLayout::kOther);
+  PcDescriptors::Iterator it(pc_descriptors, UntaggedPcDescriptors::kOther);
   while (it.MoveNext()) {
     if (it.YieldIndex() == await_jump_var) {
       try_index_ = it.TryIndex();
@@ -828,7 +828,7 @@
 bool ActivationFrame::IsAsyncMachinery() const {
   ASSERT(!function_.IsNull());
   auto isolate_group = IsolateGroup::Current();
-  if (function_.raw() ==
+  if (function_.ptr() ==
       isolate_group->object_store()->complete_on_async_return()) {
     // We are completing an async function's completer.
     return true;
@@ -848,10 +848,10 @@
   intptr_t var_desc_len = var_descriptors_.Length();
   Object& obj = Object::Handle();
   for (intptr_t i = 0; i < var_desc_len; i++) {
-    LocalVarDescriptorsLayout::VarInfo var_info;
+    UntaggedLocalVarDescriptors::VarInfo var_info;
     var_descriptors_.GetInfo(i, &var_info);
     const int8_t kind = var_info.kind();
-    if (kind == LocalVarDescriptorsLayout::kSavedCurrentContext) {
+    if (kind == UntaggedLocalVarDescriptors::kSavedCurrentContext) {
       if (FLAG_trace_debugger_stacktrace) {
         OS::PrintErr("\tFound saved current ctx at index %d\n",
                      var_info.index());
@@ -859,14 +859,14 @@
       const auto variable_index = VariableIndex(var_info.index());
       obj = GetStackVar(variable_index);
       if (obj.IsClosure()) {
-        ASSERT(function().name() == Symbols::Call().raw());
+        ASSERT(function().name() == Symbols::Call().ptr());
         ASSERT(function().IsInvokeFieldDispatcher());
         // Closure.call frames.
         ctx_ = Closure::Cast(obj).context();
       } else if (obj.IsContext()) {
-        ctx_ = Context::Cast(obj).raw();
+        ctx_ = Context::Cast(obj).ptr();
       } else {
-        ASSERT(obj.IsNull() || obj.raw() == Symbols::OptimizedOut().raw());
+        ASSERT(obj.IsNull() || obj.ptr() == Symbols::OptimizedOut().ptr());
         ctx_ = Context::null();
       }
       return ctx_;
@@ -876,7 +876,7 @@
 }
 
 ObjectPtr ActivationFrame::GetAsyncOperation() {
-  if (function().name() == Symbols::AsyncOperation().raw()) {
+  if (function().name() == Symbols::AsyncOperation().ptr()) {
     return GetParameter(0);
   }
   return Object::null();
@@ -915,17 +915,17 @@
   intptr_t var_desc_len = var_descriptors_.Length();
   for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) {
     ASSERT(var_names.length() == desc_indices_.length());
-    LocalVarDescriptorsLayout::VarInfo var_info;
+    UntaggedLocalVarDescriptors::VarInfo var_info;
     var_descriptors_.GetInfo(cur_idx, &var_info);
     const int8_t kind = var_info.kind();
-    if ((kind != LocalVarDescriptorsLayout::kStackVar) &&
-        (kind != LocalVarDescriptorsLayout::kContextVar)) {
+    if ((kind != UntaggedLocalVarDescriptors::kStackVar) &&
+        (kind != UntaggedLocalVarDescriptors::kContextVar)) {
       continue;
     }
     if (!activation_token_pos.IsWithin(var_info.begin_pos, var_info.end_pos)) {
       continue;
     }
-    if ((kind == LocalVarDescriptorsLayout::kContextVar) &&
+    if ((kind == UntaggedLocalVarDescriptors::kContextVar) &&
         (ContextLevel() < var_info.scope_id)) {
       // The variable is textually in scope but the context level
       // at the activation frame's PC is lower than the context
@@ -946,7 +946,7 @@
         // Found two local variables with the same name. Now determine
         // which one is shadowed.
         name_match_found = true;
-        LocalVarDescriptorsLayout::VarInfo i_var_info;
+        UntaggedLocalVarDescriptors::VarInfo i_var_info;
         var_descriptors_.GetInfo(desc_indices_[i], &i_var_info);
         if (i_var_info.begin_pos < var_info.begin_pos) {
           // The variable we found earlier is in an outer scope
@@ -1025,7 +1025,7 @@
   Object& obj = Object::Handle();
   for (int i = 0; i < deopt_frame_.Length(); i++) {
     obj = deopt_frame_.At(i);
-    if (obj.raw() == Symbols::OptimizedOut().raw()) {
+    if (obj.ptr() == Symbols::OptimizedOut().ptr()) {
       return false;
     }
   }
@@ -1093,7 +1093,7 @@
 
   *name = var_descriptors_.GetName(desc_index);
 
-  LocalVarDescriptorsLayout::VarInfo var_info;
+  UntaggedLocalVarDescriptors::VarInfo var_info;
   var_descriptors_.GetInfo(desc_index, &var_info);
   ASSERT(declaration_token_pos != NULL);
   *declaration_token_pos = var_info.declaration_pos;
@@ -1104,10 +1104,10 @@
   ASSERT(value != NULL);
   const int8_t kind = var_info.kind();
   const auto variable_index = VariableIndex(var_info.index());
-  if (kind == LocalVarDescriptorsLayout::kStackVar) {
+  if (kind == UntaggedLocalVarDescriptors::kStackVar) {
     *value = GetStackVar(variable_index);
   } else {
-    ASSERT(kind == LocalVarDescriptorsLayout::kContextVar);
+    ASSERT(kind == UntaggedLocalVarDescriptors::kContextVar);
     *value = GetContextVar(var_info.scope_id, variable_index.value());
   }
 }
@@ -1128,7 +1128,7 @@
   // It's possible that ctx was optimized out as no locals were captured by the
   // context. See issue #38182.
   if (ctx.IsNull()) {
-    return Symbols::OptimizedOut().raw();
+    return Symbols::OptimizedOut().ptr();
   }
 
   intptr_t level_diff = frame_ctx_level - var_ctx_level;
@@ -1139,7 +1139,7 @@
     ASSERT((ctx_slot >= 0) && (ctx_slot < ctx.num_variables()));
     return ctx.At(ctx_slot);
   } else if (level_diff > 0) {
-    Context& var_ctx = Context::Handle(ctx.raw());
+    Context& var_ctx = Context::Handle(ctx.ptr());
     while (level_diff > 0 && !var_ctx.IsNull()) {
       level_diff--;
       var_ctx = var_ctx.parent();
@@ -1169,7 +1169,7 @@
     list.SetAt(2 * i, var_name);
     list.SetAt((2 * i) + 1, value);
   }
-  return list.raw();
+  return list.ptr();
 }
 
 ObjectPtr ActivationFrame::GetReceiver() {
@@ -1181,10 +1181,10 @@
     TokenPosition ignore = TokenPosition::kNoSource;
     VariableAt(i, &var_name, &ignore, &ignore, &ignore, &value);
     if (var_name.Equals(Symbols::This())) {
-      return value.raw();
+      return value.ptr();
     }
   }
-  return Symbols::OptimizedOut().raw();
+  return Symbols::OptimizedOut().ptr();
 }
 
 static bool IsSyntheticVariableName(const String& var_name) {
@@ -1233,7 +1233,7 @@
     VariableAt(i, &name, &ignore, &ignore, &ignore, &value);
     if (name.Equals(Symbols::FunctionTypeArgumentsVar())) {
       type_arguments_available = true;
-      type_arguments ^= value.raw();
+      type_arguments ^= value.ptr();
     } else if (!name.Equals(Symbols::This()) &&
                !IsSyntheticVariableName(name)) {
       if (IsPrivateVariableName(name)) {
@@ -1264,7 +1264,7 @@
     type_params_names.SetLength(num_vars);
     TypeArguments& type_params = TypeArguments::Handle();
     TypeParameter& type_param = TypeParameter::Handle();
-    Function& current = Function::Handle(function().raw());
+    Function& current = Function::Handle(function().ptr());
     intptr_t mapping_offset = num_vars;
     for (intptr_t i = 0; !current.IsNull(); i += current.NumTypeParameters(),
                   current = current.parent_function()) {
@@ -1294,7 +1294,7 @@
     }
   }
 
-  return type_arguments.raw();
+  return type_arguments.ptr();
 }
 
 const char* ActivationFrame::ToCString() {
@@ -1413,15 +1413,15 @@
                                  ActivationFrame::kAsyncCausal));
 }
 
-const uint8_t kSafepointKind = PcDescriptorsLayout::kIcCall |
-                               PcDescriptorsLayout::kUnoptStaticCall |
-                               PcDescriptorsLayout::kRuntimeCall;
+const uint8_t kSafepointKind = UntaggedPcDescriptors::kIcCall |
+                               UntaggedPcDescriptors::kUnoptStaticCall |
+                               UntaggedPcDescriptors::kRuntimeCall;
 
 CodeBreakpoint::CodeBreakpoint(const Code& code,
                                TokenPosition token_pos,
                                uword pc,
-                               PcDescriptorsLayout::Kind kind)
-    : code_(code.raw()),
+                               UntaggedPcDescriptors::Kind kind)
+    : code_(code.ptr()),
       token_pos_(token_pos),
       pc_(pc),
       line_number_(-1),
@@ -1445,7 +1445,7 @@
   pc_ = 0ul;
   bpt_location_ = NULL;
   next_ = NULL;
-  breakpoint_kind_ = PcDescriptorsLayout::kOther;
+  breakpoint_kind_ = UntaggedPcDescriptors::kOther;
 #endif
 }
 
@@ -1733,7 +1733,7 @@
   isolate->set_deopt_context(NULL);
   delete deopt_context;
 
-  return dest_frame.raw();
+  return dest_frame.ptr();
 }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
@@ -1858,7 +1858,7 @@
   for (intptr_t i = 0; i < length; ++i) {
     code ^= code_array.At(i);
 
-    if (code.raw() == StubCode::AsynchronousGapMarker().raw()) {
+    if (code.ptr() == StubCode::AsynchronousGapMarker().ptr()) {
       stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker);
       // Once we reach a gap, the rest is async.
       async_frames = true;
@@ -2042,7 +2042,7 @@
       break;
     }
 
-    async_activation = Closure::RawCast(next_async_activation.raw());
+    async_activation = Closure::RawCast(next_async_activation.ptr());
   }
 
   return stack_trace;
@@ -2112,11 +2112,11 @@
     // fewer frames that the pre-allocated trace (such as memory exhaustion with
     // a shallow stack).
     if (!code_object.IsNull()) {
-      code ^= code_object.raw();
+      code ^= code_object.ptr();
       ASSERT(code.IsFunctionCode());
       function = code.function();
       if (function.is_visible()) {
-        ASSERT(function.raw() == code.function());
+        ASSERT(function.ptr() == code.function());
         uword pc =
             code.PayloadStart() + Smi::Value(ex_trace.PcOffsetAtFrame(i));
         if (code.is_optimized() && ex_trace.expand_inlined()) {
@@ -2125,7 +2125,7 @@
                it.Advance()) {
             function = it.function();
             code = it.code();
-            ASSERT(function.raw() == code.function());
+            ASSERT(function.ptr() == code.function());
             uword pc = it.pc();
             ASSERT(pc != 0);
             ASSERT(code.PayloadStart() <= pc);
@@ -2446,7 +2446,7 @@
   ASSERT(!code.IsNull());
   PcDescriptors& desc = PcDescriptors::Handle(code.pc_descriptors());
   uword lowest_pc_offset = kUwordMax;
-  PcDescriptorsLayout::Kind lowest_kind = PcDescriptorsLayout::kAnyKind;
+  UntaggedPcDescriptors::Kind lowest_kind = UntaggedPcDescriptors::kAnyKind;
   // Find the safe point with the lowest compiled code address
   // that maps to the token position of the source breakpoint.
   PcDescriptors::Iterator iter(desc, kSafepointKind);
@@ -2492,7 +2492,7 @@
     ASSERT(!function.IsNull());
     if ((function.token_pos() == start_pos) &&
         (function.end_token_pos() == end_pos) &&
-        (function.script() == script.raw())) {
+        (function.script() == script.ptr())) {
       if (function.is_debuggable() && function.HasCode()) {
         code_function_list->Add(function);
       }
@@ -2533,7 +2533,7 @@
           if (function.is_debuggable() && function.HasCode() &&
               function.token_pos() == start_pos &&
               function.end_token_pos() == end_pos &&
-              function.script() == script.raw()) {
+              function.script() == script.ptr()) {
             code_function_list->Add(function);
             function_added = true;
           }
@@ -2551,12 +2551,12 @@
 
 static void UpdateBestFit(Function* best_fit, const Function& func) {
   if (best_fit->IsNull()) {
-    *best_fit = func.raw();
+    *best_fit = func.ptr();
   } else if ((best_fit->token_pos().IsSynthetic() ||
               func.token_pos().IsSynthetic() ||
               (best_fit->token_pos() < func.token_pos())) &&
              (func.end_token_pos() <= best_fit->end_token_pos())) {
-    *best_fit = func.raw();
+    *best_fit = func.ptr();
   }
 }
 
@@ -2587,7 +2587,7 @@
     const Array& scripts = Array::Handle(zone, lib.LoadedScripts());
     bool lib_has_script = false;
     for (intptr_t j = 0; j < scripts.Length(); j++) {
-      if (scripts.At(j) == script.raw()) {
+      if (scripts.At(j) == script.ptr()) {
         lib_has_script = true;
         break;
       }
@@ -2638,7 +2638,7 @@
       cls = class_table.At(cid);
       // This class is relevant to us only if it belongs to the
       // library to which |script| belongs.
-      if (cls.library() != lib.raw()) {
+      if (cls.library() != lib.ptr()) {
         continue;
       }
       // Parse class definition if not done yet.
@@ -2666,7 +2666,7 @@
             // present in the functions of a class. Hence, we can return
             // right away as looking through other functions of a class
             // will not narrow down to any inner function/closure.
-            *best_fit = function.raw();
+            *best_fit = function.ptr();
             return true;
           }
         }
@@ -2684,7 +2684,7 @@
           TokenPosition end = TokenPosition::kNoSource;
           field ^= fields.At(pos);
           ASSERT(!field.IsNull());
-          if (field.Script() != script.raw()) {
+          if (field.Script() != script.ptr()) {
             // The field should be defined in the script we want to set
             // the breakpoint in.
             continue;
@@ -2774,7 +2774,7 @@
     }
     // If func was not set (still Null), the best fit is a field.
   } else {
-    func = function.raw();
+    func = function.ptr();
     if (!func.token_pos().IsReal()) {
       return NULL;  // Missing source positions?
     }
@@ -2916,7 +2916,7 @@
     Breakpoint* bpt = loc->breakpoints();
     while (bpt != NULL) {
       if (bpt->IsPerClosure()) {
-        if (closure.raw() == bpt->closure()) {
+        if (closure.ptr() == bpt->closure()) {
           return bpt;
         }
       }
@@ -2979,8 +2979,8 @@
     script_for_lib = lib.LookupScript(script_url, !is_package);
     if (!script_for_lib.IsNull()) {
       if (script.IsNull()) {
-        script = script_for_lib.raw();
-      } else if (script.raw() != script_for_lib.raw()) {
+        script = script_for_lib.ptr();
+      } else if (script.ptr() != script_for_lib.ptr()) {
         if (FLAG_verbose_debug) {
           OS::PrintErr("Multiple scripts match url '%s'\n",
                        script_url.ToCString());
@@ -3315,13 +3315,14 @@
   uword pc_offset = return_address - code.PayloadStart();
   const PcDescriptors& descriptors =
       PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator iter(
-      descriptors, PcDescriptorsLayout::kRewind | PcDescriptorsLayout::kIcCall |
-                       PcDescriptorsLayout::kUnoptStaticCall);
+  PcDescriptors::Iterator iter(descriptors,
+                               UntaggedPcDescriptors::kRewind |
+                                   UntaggedPcDescriptors::kIcCall |
+                                   UntaggedPcDescriptors::kUnoptStaticCall);
   intptr_t rewind_deopt_id = -1;
   uword rewind_pc = 0;
   while (iter.MoveNext()) {
-    if (iter.Kind() == PcDescriptorsLayout::kRewind) {
+    if (iter.Kind() == UntaggedPcDescriptors::kRewind) {
       // Remember the last rewind so we don't need to iterator twice.
       rewind_pc = code.PayloadStart() + iter.PcOffset();
       rewind_deopt_id = iter.DeoptId();
@@ -3523,10 +3524,10 @@
       return false;
     }
     const TokenPosition looking_for = top_frame->TokenPos();
-    PcDescriptors::Iterator it(pc_descriptors, PcDescriptorsLayout::kOther);
+    PcDescriptors::Iterator it(pc_descriptors, UntaggedPcDescriptors::kOther);
     while (it.MoveNext()) {
       if (it.TokenPos() == looking_for &&
-          it.YieldIndex() != PcDescriptorsLayout::kInvalidYieldIndex) {
+          it.YieldIndex() != UntaggedPcDescriptors::kInvalidYieldIndex) {
         return true;
       }
     }
@@ -3766,7 +3767,7 @@
       Object& closure = Object::Handle(top_frame->GetClosure());
       ASSERT(closure.IsInstance());
       ASSERT(Instance::Cast(closure).IsClosure());
-      if (closure.raw() == bpt->closure()) {
+      if (closure.ptr() == bpt->closure()) {
         return bpt;
       }
     }
@@ -3834,12 +3835,12 @@
         (!closure_start.IsReal() || !func_start.IsReal() ||
          (closure_start > func_start)) &&
         token_pos.IsWithin(closure_start, closure_end) &&
-        (closure.script() == outer_origin.raw())) {
+        (closure.script() == outer_origin.ptr())) {
       UpdateBestFit(&best_fit, closure);
     }
     return true;  // Continue iteration.
   });
-  return best_fit.raw();
+  return best_fit.ptr();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -4222,7 +4223,7 @@
     TokenPosition code_token_pos) {
   BreakpointLocation* loc = breakpoint_locations_;
   while (loc != NULL) {
-    if (loc->script_ == script.raw() &&
+    if (loc->script_ == script.ptr() &&
         (!token_pos.IsReal() || (loc->token_pos_ == token_pos)) &&
         ((requested_line == -1) ||
          (loc->requested_line_number_ == requested_line)) &&
diff --git a/runtime/vm/debugger.h b/runtime/vm/debugger.h
index 0312163..653c38b 100644
--- a/runtime/vm/debugger.h
+++ b/runtime/vm/debugger.h
@@ -74,7 +74,7 @@
   void SetIsPerClosure(const Instance& closure) {
     ASSERT(kind_ == kNone);
     kind_ = kPerClosure;
-    closure_ = closure.raw();
+    closure_ = closure.ptr();
   }
 
   // Mark that this breakpoint is a result of a step OverAwait request.
@@ -192,7 +192,7 @@
   CodeBreakpoint(const Code& code,
                  TokenPosition token_pos,
                  uword pc,
-                 PcDescriptorsLayout::Kind kind);
+                 UntaggedPcDescriptors::Kind kind);
   ~CodeBreakpoint();
 
   FunctionPtr function() const;
@@ -230,7 +230,7 @@
   BreakpointLocation* bpt_location_;
   CodeBreakpoint* next_;
 
-  PcDescriptorsLayout::Kind breakpoint_kind_;
+  UntaggedPcDescriptors::Kind breakpoint_kind_;
   CodePtr saved_value_;
 
   friend class Debugger;
diff --git a/runtime/vm/debugger_api_impl_test.cc b/runtime/vm/debugger_api_impl_test.cc
index bcd63b0..7d9d0ab 100644
--- a/runtime/vm/debugger_api_impl_test.cc
+++ b/runtime/vm/debugger_api_impl_test.cc
@@ -35,7 +35,7 @@
       return Api::NewError("%s expects argument '%s' to be of type %s.",       \
                            CURRENT_FUNC, #param, #type);                       \
     }                                                                          \
-    var ^= tmp.raw();                                                          \
+    var ^= tmp.ptr();                                                          \
   } while (0)
 
 #define CHECK_AND_CAST(type, var, param)                                       \
diff --git a/runtime/vm/debugger_arm.cc b/runtime/vm/debugger_arm.cc
index 9862592..7fe5605 100644
--- a/runtime/vm/debugger_arm.cc
+++ b/runtime/vm/debugger_arm.cc
@@ -23,14 +23,14 @@
   ASSERT(!is_enabled_);
   Code& stub_target = Code::Handle();
   switch (breakpoint_kind_) {
-    case PcDescriptorsLayout::kIcCall:
-      stub_target = StubCode::ICCallBreakpoint().raw();
+    case UntaggedPcDescriptors::kIcCall:
+      stub_target = StubCode::ICCallBreakpoint().ptr();
       break;
-    case PcDescriptorsLayout::kUnoptStaticCall:
-      stub_target = StubCode::UnoptStaticCallBreakpoint().raw();
+    case UntaggedPcDescriptors::kUnoptStaticCall:
+      stub_target = StubCode::UnoptStaticCallBreakpoint().ptr();
       break;
-    case PcDescriptorsLayout::kRuntimeCall:
-      stub_target = StubCode::RuntimeCallBreakpoint().raw();
+    case UntaggedPcDescriptors::kRuntimeCall:
+      stub_target = StubCode::RuntimeCallBreakpoint().ptr();
       break;
     default:
       UNREACHABLE();
@@ -45,9 +45,9 @@
   ASSERT(is_enabled_);
   const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case PcDescriptorsLayout::kIcCall:
-    case PcDescriptorsLayout::kUnoptStaticCall:
-    case PcDescriptorsLayout::kRuntimeCall: {
+    case UntaggedPcDescriptors::kIcCall:
+    case UntaggedPcDescriptors::kUnoptStaticCall:
+    case UntaggedPcDescriptors::kRuntimeCall: {
       CodePatcher::PatchStaticCallAt(pc_, code, Code::Handle(saved_value_));
       break;
     }
diff --git a/runtime/vm/debugger_arm64.cc b/runtime/vm/debugger_arm64.cc
index dc132f8..d3f251b 100644
--- a/runtime/vm/debugger_arm64.cc
+++ b/runtime/vm/debugger_arm64.cc
@@ -23,20 +23,20 @@
   ASSERT(!is_enabled_);
   const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case PcDescriptorsLayout::kIcCall: {
+    case UntaggedPcDescriptors::kIcCall: {
       Object& data = Object::Handle();
       saved_value_ = CodePatcher::GetInstanceCallAt(pc_, code, &data);
       CodePatcher::PatchInstanceCallAt(pc_, code, data,
                                        StubCode::ICCallBreakpoint());
       break;
     }
-    case PcDescriptorsLayout::kUnoptStaticCall: {
+    case UntaggedPcDescriptors::kUnoptStaticCall: {
       saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
       CodePatcher::PatchPoolPointerCallAt(
           pc_, code, StubCode::UnoptStaticCallBreakpoint());
       break;
     }
-    case PcDescriptorsLayout::kRuntimeCall: {
+    case UntaggedPcDescriptors::kRuntimeCall: {
       saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           StubCode::RuntimeCallBreakpoint());
@@ -52,15 +52,15 @@
   ASSERT(is_enabled_);
   const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case PcDescriptorsLayout::kIcCall: {
+    case UntaggedPcDescriptors::kIcCall: {
       Object& data = Object::Handle();
       CodePatcher::GetInstanceCallAt(pc_, code, &data);
       CodePatcher::PatchInstanceCallAt(pc_, code, data,
                                        Code::Handle(saved_value_));
       break;
     }
-    case PcDescriptorsLayout::kUnoptStaticCall:
-    case PcDescriptorsLayout::kRuntimeCall: {
+    case UntaggedPcDescriptors::kUnoptStaticCall:
+    case UntaggedPcDescriptors::kRuntimeCall: {
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           Code::Handle(saved_value_));
       break;
diff --git a/runtime/vm/debugger_ia32.cc b/runtime/vm/debugger_ia32.cc
index d580561..f9de84f 100644
--- a/runtime/vm/debugger_ia32.cc
+++ b/runtime/vm/debugger_ia32.cc
@@ -33,17 +33,17 @@
   thread->isolate_group()->RunWithStoppedMutators([&]() {
     WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
     switch (breakpoint_kind_) {
-      case PcDescriptorsLayout::kIcCall: {
-        stub_target = StubCode::ICCallBreakpoint().raw();
+      case UntaggedPcDescriptors::kIcCall: {
+        stub_target = StubCode::ICCallBreakpoint().ptr();
         break;
       }
-      case PcDescriptorsLayout::kUnoptStaticCall: {
-        stub_target = StubCode::UnoptStaticCallBreakpoint().raw();
+      case UntaggedPcDescriptors::kUnoptStaticCall: {
+        stub_target = StubCode::UnoptStaticCallBreakpoint().ptr();
         break;
       }
-      case PcDescriptorsLayout::kRuntimeCall: {
+      case UntaggedPcDescriptors::kRuntimeCall: {
         saved_value_ = CodePatcher::GetStaticCallTargetAt(pc_, code);
-        stub_target = StubCode::RuntimeCallBreakpoint().raw();
+        stub_target = StubCode::RuntimeCallBreakpoint().ptr();
         break;
       }
       default:
@@ -64,9 +64,9 @@
   thread->isolate_group()->RunWithStoppedMutators([&]() {
     WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
     switch (breakpoint_kind_) {
-      case PcDescriptorsLayout::kIcCall:
-      case PcDescriptorsLayout::kUnoptStaticCall:
-      case PcDescriptorsLayout::kRuntimeCall: {
+      case UntaggedPcDescriptors::kIcCall:
+      case UntaggedPcDescriptors::kUnoptStaticCall:
+      case UntaggedPcDescriptors::kRuntimeCall: {
         CodePatcher::PatchStaticCallAt(pc_, code, Code::Handle(saved_value_));
         break;
       }
diff --git a/runtime/vm/debugger_x64.cc b/runtime/vm/debugger_x64.cc
index dfb07df..7a0e046 100644
--- a/runtime/vm/debugger_x64.cc
+++ b/runtime/vm/debugger_x64.cc
@@ -24,14 +24,14 @@
   ASSERT(!is_enabled_);
   Code& stub_target = Code::Handle();
   switch (breakpoint_kind_) {
-    case PcDescriptorsLayout::kIcCall:
-      stub_target = StubCode::ICCallBreakpoint().raw();
+    case UntaggedPcDescriptors::kIcCall:
+      stub_target = StubCode::ICCallBreakpoint().ptr();
       break;
-    case PcDescriptorsLayout::kUnoptStaticCall:
-      stub_target = StubCode::UnoptStaticCallBreakpoint().raw();
+    case UntaggedPcDescriptors::kUnoptStaticCall:
+      stub_target = StubCode::UnoptStaticCallBreakpoint().ptr();
       break;
-    case PcDescriptorsLayout::kRuntimeCall:
-      stub_target = StubCode::RuntimeCallBreakpoint().raw();
+    case UntaggedPcDescriptors::kRuntimeCall:
+      stub_target = StubCode::RuntimeCallBreakpoint().ptr();
       break;
     default:
       UNREACHABLE();
@@ -46,9 +46,9 @@
   ASSERT(is_enabled_);
   const Code& code = Code::Handle(code_);
   switch (breakpoint_kind_) {
-    case PcDescriptorsLayout::kIcCall:
-    case PcDescriptorsLayout::kUnoptStaticCall:
-    case PcDescriptorsLayout::kRuntimeCall: {
+    case UntaggedPcDescriptors::kIcCall:
+    case UntaggedPcDescriptors::kUnoptStaticCall:
+    case UntaggedPcDescriptors::kRuntimeCall: {
       CodePatcher::PatchPoolPointerCallAt(pc_, code,
                                           Code::Handle(saved_value_));
       break;
diff --git a/runtime/vm/deferred_objects.cc b/runtime/vm/deferred_objects.cc
index 4d52f84..eddfb5e 100644
--- a/runtime/vm/deferred_objects.cc
+++ b/runtime/vm/deferred_objects.cc
@@ -32,7 +32,7 @@
   ASSERT(!Smi::IsValid(value()));
   Mint& mint = Mint::Handle();
   mint ^= Integer::New(value());
-  *mint_slot = mint.raw();
+  *mint_slot = mint.ptr();
 
   if (FLAG_trace_deoptimization_verbose) {
     OS::PrintErr("materializing mint at %" Px ": %" Pd64 "\n",
@@ -46,10 +46,10 @@
   *float32x4_slot = raw_float32x4;
 
   if (FLAG_trace_deoptimization_verbose) {
-    float x = raw_float32x4->ptr()->x();
-    float y = raw_float32x4->ptr()->y();
-    float z = raw_float32x4->ptr()->z();
-    float w = raw_float32x4->ptr()->w();
+    float x = raw_float32x4->untag()->x();
+    float y = raw_float32x4->untag()->y();
+    float z = raw_float32x4->untag()->z();
+    float w = raw_float32x4->untag()->w();
     OS::PrintErr("materializing Float32x4 at %" Px ": %g,%g,%g,%g\n",
                  reinterpret_cast<uword>(slot()), x, y, z, w);
   }
@@ -61,8 +61,8 @@
   *float64x2_slot = raw_float64x2;
 
   if (FLAG_trace_deoptimization_verbose) {
-    double x = raw_float64x2->ptr()->x();
-    double y = raw_float64x2->ptr()->y();
+    double x = raw_float64x2->untag()->x();
+    double y = raw_float64x2->untag()->y();
     OS::PrintErr("materializing Float64x2 at %" Px ": %g,%g\n",
                  reinterpret_cast<uword>(slot()), x, y);
   }
@@ -74,10 +74,10 @@
   *int32x4_slot = raw_int32x4;
 
   if (FLAG_trace_deoptimization_verbose) {
-    uint32_t x = raw_int32x4->ptr()->x();
-    uint32_t y = raw_int32x4->ptr()->y();
-    uint32_t z = raw_int32x4->ptr()->z();
-    uint32_t w = raw_int32x4->ptr()->w();
+    uint32_t x = raw_int32x4->untag()->x();
+    uint32_t y = raw_int32x4->untag()->y();
+    uint32_t z = raw_int32x4->untag()->z();
+    uint32_t w = raw_int32x4->untag()->w();
     OS::PrintErr("materializing Int32x4 at %" Px ": %x,%x,%x,%x\n",
                  reinterpret_cast<uword>(slot()), x, y, z, w);
   }
@@ -107,7 +107,7 @@
   const Code& code = Code::Handle(zone, function.unoptimized_code());
 
   uword continue_at_pc =
-      code.GetPcForDeoptId(deopt_id_, PcDescriptorsLayout::kDeopt);
+      code.GetPcForDeoptId(deopt_id_, UntaggedPcDescriptors::kDeopt);
   if (continue_at_pc == 0) {
     FATAL2("Can't locate continuation PC for deoptid %" Pd " within %s\n",
            deopt_id_, function.ToFullyQualifiedCString());
@@ -120,7 +120,7 @@
                  reinterpret_cast<uword>(slot()), continue_at_pc);
   }
 
-  uword pc = code.GetPcForDeoptId(deopt_id_, PcDescriptorsLayout::kIcCall);
+  uword pc = code.GetPcForDeoptId(deopt_id_, UntaggedPcDescriptors::kIcCall);
   if (pc != 0) {
     // If the deoptimization happened at an IC call, update the IC data
     // to avoid repeated deoptimization at the same site next time around.
@@ -160,7 +160,7 @@
   const Code& code = Code::Handle(zone, function.unoptimized_code());
   ASSERT(!code.IsNull());
   ASSERT(function.HasCode());
-  *reinterpret_cast<ObjectPtr*>(dest_addr) = code.raw();
+  *reinterpret_cast<ObjectPtr*>(dest_addr) = code.ptr();
 
   if (FLAG_trace_deoptimization_verbose) {
     THR_Print("materializing pc marker at 0x%" Px ": %s, %s\n",
@@ -213,7 +213,7 @@
   if (object_ == NULL) {
     Create();
   }
-  return object_->raw();
+  return object_->ptr();
 }
 
 void DeferredObject::Create() {
diff --git a/runtime/vm/deopt_instructions.cc b/runtime/vm/deopt_instructions.cc
index f9e3662..0a63ce4 100644
--- a/runtime/vm/deopt_instructions.cc
+++ b/runtime/vm/deopt_instructions.cc
@@ -33,7 +33,7 @@
                            intptr_t* cpu_registers,
                            bool is_lazy_deopt,
                            bool deoptimizing_code)
-    : code_(code.raw()),
+    : code_(code.ptr()),
       object_pool_(code.GetObjectPool()),
       deopt_info_(TypedData::null()),
       dest_frame_is_allocated_(false),
@@ -64,7 +64,7 @@
   }
 #endif
   ASSERT(!deopt_info.IsNull());
-  deopt_info_ = deopt_info.raw();
+  deopt_info_ = deopt_info.ptr();
 
   const Function& function = Function::Handle(code.function());
 
@@ -405,7 +405,7 @@
     obj = static_cast<ObjectPtr>(dest_frame_[i]);
     dest_array.SetAt(i, obj);
   }
-  return dest_array.raw();
+  return dest_array.ptr();
 }
 
 // Deoptimization instruction creating return address using function and
@@ -475,7 +475,7 @@
   void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) {
     const PassiveObject& obj = PassiveObject::Handle(
         deopt_context->zone(), deopt_context->ObjectAt(object_table_index_));
-    *reinterpret_cast<ObjectPtr*>(dest_addr) = obj.raw();
+    *reinterpret_cast<ObjectPtr*>(dest_addr) = obj.ptr();
   }
 
   CatchEntryMove ToCatchEntryMove(DeoptContext* deopt_context,
@@ -714,8 +714,8 @@
     if (function.IsNull()) {
       *reinterpret_cast<ObjectPtr*>(dest_addr) =
           deopt_context->is_lazy_deopt()
-              ? StubCode::DeoptimizeLazyFromReturn().raw()
-              : StubCode::Deoptimize().raw();
+              ? StubCode::DeoptimizeLazyFromReturn().ptr()
+              : StubCode::Deoptimize().ptr();
       return;
     }
 
@@ -725,7 +725,7 @@
     // materialization to maintain the invariant that Dart frames always have
     // a pc marker.
     *reinterpret_cast<ObjectPtr*>(dest_addr) =
-        StubCode::FrameAwaitingMaterialization().raw();
+        StubCode::FrameAwaitingMaterialization().ptr();
     deopt_context->DeferPcMarkerMaterialization(object_table_index_, dest_addr);
   }
 
@@ -892,7 +892,7 @@
   *code = function.unoptimized_code();
   ASSERT(!code->IsNull());
   uword res = code->GetPcForDeoptId(ret_address_instr->deopt_id(),
-                                    PcDescriptorsLayout::kDeopt);
+                                    UntaggedPcDescriptors::kDeopt);
   ASSERT(res != 0);
   return res;
 }
@@ -1294,7 +1294,7 @@
   frame_start_ = -1;
 
   ++current_info_number_;
-  return deopt_info.raw();
+  return deopt_info.ptr();
 }
 
 intptr_t DeoptTable::SizeFor(intptr_t length) {
diff --git a/runtime/vm/deopt_instructions.h b/runtime/vm/deopt_instructions.h
index dd896cc..cbd15c2 100644
--- a/runtime/vm/deopt_instructions.h
+++ b/runtime/vm/deopt_instructions.h
@@ -389,7 +389,7 @@
 
   RegisterSource(Kind kind, intptr_t index)
       : source_index_(KindField::encode(kind) |
-                      IndexFieldLayout::encode(index)) {}
+                      UntaggedIndexField::encode(index)) {}
 
   template <typename T>
   T Value(DeoptContext* context) const {
@@ -419,13 +419,15 @@
 
  private:
   class KindField : public BitField<intptr_t, intptr_t, 0, 1> {};
-  class IndexFieldLayout
+  class UntaggedIndexField
       : public BitField<intptr_t, intptr_t, 1, kBitsPerWord - 1> {};
 
   bool is_register() const {
     return KindField::decode(source_index_) == kRegister;
   }
-  intptr_t raw_index() const { return IndexFieldLayout::decode(source_index_); }
+  intptr_t raw_index() const {
+    return UntaggedIndexField::decode(source_index_);
+  }
 
   RegisterType reg() const { return static_cast<RegisterType>(raw_index()); }
 
diff --git a/runtime/vm/dwarf.cc b/runtime/vm/dwarf.cc
index 8df88dd..beee2d1 100644
--- a/runtime/vm/dwarf.cc
+++ b/runtime/vm/dwarf.cc
@@ -134,7 +134,7 @@
   }
 
   // Generate an appropriately zoned ZoneHandle for storing.
-  const auto& code = Code::ZoneHandle(zone_, orig_code.raw());
+  const auto& code = Code::ZoneHandle(zone_, orig_code.ptr());
   codes_.Add(&code);
   // Currently assumes the name has the same lifetime as the Zone of the
   // Dwarf object (which is currently true).  Otherwise, need to copy.
@@ -162,7 +162,7 @@
     return pair->index_;
   }
   intptr_t index = functions_.length();
-  const Function& zone_func = Function::ZoneHandle(zone_, function.raw());
+  const Function& zone_func = Function::ZoneHandle(zone_, function.ptr());
   function_to_index_.Insert(FunctionIndexPair(&zone_func, index));
   functions_.Add(&zone_func);
   const Script& script = Script::Handle(zone_, function.script());
@@ -178,7 +178,7 @@
   }
   // DWARF file numbers start from 1.
   intptr_t index = scripts_.length() + 1;
-  const Script& zone_script = Script::ZoneHandle(zone_, script.raw());
+  const Script& zone_script = Script::ZoneHandle(zone_, script.ptr());
   script_to_index_.Insert(ScriptIndexPair(&zone_script, index));
   scripts_.Add(&zone_script);
   return index;
diff --git a/runtime/vm/dwarf.h b/runtime/vm/dwarf.h
index ef72088..b66788e 100644
--- a/runtime/vm/dwarf.h
+++ b/runtime/vm/dwarf.h
@@ -32,7 +32,7 @@
   }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair.script_->raw() == key->raw();
+    return pair.script_->ptr() == key->ptr();
   }
 
   ScriptIndexPair(const Script* s, intptr_t index) : script_(s), index_(index) {
@@ -63,7 +63,7 @@
   static inline intptr_t Hashcode(Key key) { return key->token_pos().Hash(); }
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
-    return pair.function_->raw() == key->raw();
+    return pair.function_->ptr() == key->ptr();
   }
 
   FunctionIndexPair(const Function* f, intptr_t index)
@@ -116,7 +116,7 @@
 
   static inline bool IsKeyEqual(Pair pair, Key key) {
     // Code objects are always allocated in old space, so they don't move.
-    return pair.code->raw() == key->raw();
+    return pair.code->ptr() == key->ptr();
   }
 };
 
diff --git a/runtime/vm/exceptions.cc b/runtime/vm/exceptions.cc
index c0fc071..0390269 100644
--- a/runtime/vm/exceptions.cc
+++ b/runtime/vm/exceptions.cc
@@ -69,7 +69,7 @@
         cur_index_(0),
         dropped_frames_(0) {
     ASSERT(
-        stacktrace_.raw() ==
+        stacktrace_.ptr() ==
         Isolate::Current()->isolate_object_store()->preallocated_stack_trace());
   }
   ~PreallocatedStackTraceBuilder() {}
@@ -297,7 +297,7 @@
           UNREACHABLE();
       }
 
-      dst_values.Add(&Object::Handle(zone, value.raw()));
+      dst_values.Add(&Object::Handle(zone, value.ptr()));
     }
 
     {
@@ -305,7 +305,7 @@
 
       for (int j = 0; j < moves.count(); j++) {
         const CatchEntryMove& move = moves.At(j);
-        *TaggedSlotAt(fp, move.dest_slot()) = dst_values[j]->raw();
+        *TaggedSlotAt(fp, move.dest_slot()) = dst_values[j]->ptr();
       }
     }
   }
@@ -732,7 +732,7 @@
   Class& test_class = Class::Handle(zone, instance.clazz());
   AbstractType& type = AbstractType::Handle(zone, AbstractType::null());
   while (true) {
-    if (test_class.raw() == error_class.raw()) {
+    if (test_class.ptr() == error_class.ptr()) {
       return error_class.LookupInstanceFieldAllowPrivate(
           Symbols::_stackTrace());
     }
@@ -764,18 +764,18 @@
   // Do not notify debugger on stack overflow and out of memory exceptions.
   // The VM would crash when the debugger calls back into the VM to
   // get values of variables.
-  if (incoming_exception.raw() != object_store->out_of_memory() &&
-      incoming_exception.raw() != object_store->stack_overflow()) {
+  if (incoming_exception.ptr() != object_store->out_of_memory() &&
+      incoming_exception.ptr() != object_store->stack_overflow()) {
     isolate->debugger()->PauseException(incoming_exception);
   }
 #endif
   bool use_preallocated_stacktrace = false;
-  Instance& exception = Instance::Handle(zone, incoming_exception.raw());
+  Instance& exception = Instance::Handle(zone, incoming_exception.ptr());
   if (exception.IsNull()) {
     exception ^=
         Exceptions::Create(Exceptions::kNullThrown, Object::empty_array());
-  } else if (exception.raw() == object_store->out_of_memory() ||
-             exception.raw() == object_store->stack_overflow()) {
+  } else if (exception.ptr() == object_store->out_of_memory() ||
+             exception.ptr() == object_store->stack_overflow()) {
     use_preallocated_stacktrace = true;
   }
   // Find the exception handler and determine if the handler needs a
@@ -790,7 +790,7 @@
   if (use_preallocated_stacktrace) {
     if (handler_pc == 0) {
       // No Dart frame.
-      ASSERT(incoming_exception.raw() == object_store->out_of_memory());
+      ASSERT(incoming_exception.ptr() == object_store->out_of_memory());
       const UnhandledException& error = UnhandledException::Handle(
           zone,
           isolate->isolate_object_store()->preallocated_unhandled_exception());
@@ -800,7 +800,7 @@
     stacktrace = isolate->isolate_object_store()->preallocated_stack_trace();
     PreallocatedStackTraceBuilder frame_builder(stacktrace);
     ASSERT(existing_stacktrace.IsNull() ||
-           (existing_stacktrace.raw() == stacktrace.raw()));
+           (existing_stacktrace.ptr() == stacktrace.ptr()));
     ASSERT(existing_stacktrace.IsNull() || is_rethrow);
     if (handler_needs_stacktrace && existing_stacktrace.IsNull()) {
       BuildStackTrace(&frame_builder);
@@ -811,7 +811,7 @@
       // reverse is not necessarily true (e.g. Dart_PropagateError can cause
       // a rethrow being called without an existing stacktrace.)
       ASSERT(is_rethrow);
-      stacktrace = existing_stacktrace.raw();
+      stacktrace = existing_stacktrace.ptr();
     } else {
       // Get stacktrace field of class Error to determine whether we have a
       // subclass of Error which carries around its stack trace.
@@ -855,7 +855,7 @@
     // the isolate etc.). This can happen in the compiler, which is not
     // allowed to allocate in new space, so we pass the kOld argument.
     const UnhandledException& unhandled_exception = UnhandledException::Handle(
-        zone, exception.raw() == object_store->out_of_memory()
+        zone, exception.ptr() == object_store->out_of_memory()
                   ? isolate->isolate_object_store()
                         ->preallocated_unhandled_exception()
                   : UnhandledException::New(exception, stacktrace, Heap::kOld));
@@ -910,13 +910,13 @@
   const Array& args = Array::Handle(zone, Array::New(4));
 
   ExceptionType exception_type =
-      (dst_name.raw() == Symbols::InTypeCast().raw()) ? kCast : kType;
+      (dst_name.ptr() == Symbols::InTypeCast().ptr()) ? kCast : kType;
 
   DartFrameIterator iterator(thread,
                              StackFrameIterator::kNoCrossThreadIteration);
   const Script& script = Script::Handle(zone, GetCallerScript(&iterator));
   const String& url = String::Handle(
-      zone, script.IsNull() ? Symbols::OptimizedOut().raw() : script.url());
+      zone, script.IsNull() ? Symbols::OptimizedOut().ptr() : script.url());
   intptr_t line = -1;
   intptr_t column = -1;
   if (!script.IsNull()) {
diff --git a/runtime/vm/exceptions.h b/runtime/vm/exceptions.h
index 20aef13..090615c 100644
--- a/runtime/vm/exceptions.h
+++ b/runtime/vm/exceptions.h
@@ -88,8 +88,8 @@
       const String& name);
   DART_NORETURN static void ThrowLateFieldNotInitialized(const String& name);
 
-  // Returns a RawInstance if the exception is successfully created,
-  // otherwise returns a RawError.
+  // Returns an InstancePtr if the exception is successfully created,
+  // otherwise returns an ErrorPtr.
   static ObjectPtr Create(ExceptionType type, const Array& arguments);
 
   // Returns RawUnhandledException that wraps exception of type [type] with
diff --git a/runtime/vm/field_table.cc b/runtime/vm/field_table.cc
index fe40d10..42aef79 100644
--- a/runtime/vm/field_table.cc
+++ b/runtime/vm/field_table.cc
@@ -68,7 +68,7 @@
     ASSERT(top_ < capacity_);
     ASSERT(expected_field_id == -1 || expected_field_id == top_);
     field.set_field_id(top_);
-    table_[top_] = Object::sentinel().raw();
+    table_[top_] = Object::sentinel().ptr();
 
     ++top_;
     return grown_backing_store;
@@ -79,7 +79,7 @@
   intptr_t reused_free = free_head_;
   free_head_ = Smi::Value(Smi::RawCast(table_[free_head_]));
   field.set_field_id(reused_free);
-  table_[reused_free] = Object::sentinel().raw();
+  table_[reused_free] = Object::sentinel().ptr();
   return false;
 }
 
diff --git a/runtime/vm/frame_layout.h b/runtime/vm/frame_layout.h
index 4327050..d410e93 100644
--- a/runtime/vm/frame_layout.h
+++ b/runtime/vm/frame_layout.h
@@ -8,7 +8,7 @@
 #include "platform/assert.h"
 #include "platform/globals.h"
 
-// FrameLayout structure captures configuration specific properties of the
+// UntaggedFrame structure captures configuration specific properties of the
 // frame layout used by the runtime system and compiler.
 //
 // Runtime system uses runtime_frame_layout defined in stack_frame.h.
@@ -19,7 +19,7 @@
 // Forward declarations.
 class LocalVariable;
 
-struct FrameLayout {
+struct UntaggedFrame {
   // The offset (in words) from FP to the first object.
   int first_object_from_fp;
 
diff --git a/runtime/vm/growable_array.h b/runtime/vm/growable_array.h
index 85bfedc..84e378d 100644
--- a/runtime/vm/growable_array.h
+++ b/runtime/vm/growable_array.h
@@ -54,14 +54,14 @@
       : zone_(zone), array_(zone, initial_capacity) {}
 
   // Use unique zone handles to store objects.
-  void Add(const T& t) { array_.Add(&T::ZoneHandle(zone_, t.raw())); }
+  void Add(const T& t) { array_.Add(&T::ZoneHandle(zone_, t.ptr())); }
 
   T& operator[](intptr_t index) const { return *array_[index]; }
 
   const T& At(intptr_t index) const { return operator[](index); }
 
   void SetAt(intptr_t index, const T& t) {
-    array_[index] = &T::ZoneHandle(zone_, t.raw());
+    array_[index] = &T::ZoneHandle(zone_, t.ptr());
   }
 
   intptr_t length() const { return array_.length(); }
diff --git a/runtime/vm/growable_array_test.cc b/runtime/vm/growable_array_test.cc
index 7c0ee5d..91d0074 100644
--- a/runtime/vm/growable_array_test.cc
+++ b/runtime/vm/growable_array_test.cc
@@ -97,13 +97,13 @@
   GrowableHandlePtrArray<const String> test1(zone, 1);
   EXPECT_EQ(0, test1.length());
   test1.Add(Symbols::Int());
-  EXPECT(test1[0].raw() == Symbols::Int().raw());
+  EXPECT(test1[0].ptr() == Symbols::Int().ptr());
   EXPECT_EQ(1, test1.length());
 
   ZoneGrowableHandlePtrArray<const String>* test2 =
       new ZoneGrowableHandlePtrArray<const String>(zone, 1);
   test2->Add(Symbols::GetterPrefix());
-  EXPECT((*test2)[0].raw() == Symbols::GetterPrefix().raw());
+  EXPECT((*test2)[0].ptr() == Symbols::GetterPrefix().ptr());
   EXPECT_EQ(1, test2->length());
 }
 
diff --git a/runtime/vm/guard_field_test.cc b/runtime/vm/guard_field_test.cc
index 5c13efe..7645e46 100644
--- a/runtime/vm/guard_field_test.cc
+++ b/runtime/vm/guard_field_test.cc
@@ -24,7 +24,7 @@
   Field& field =
       Field::ZoneHandle(cls.LookupInstanceFieldAllowPrivate(fieldname));
   EXPECT(!field.IsNull());
-  return field.raw();
+  return field.ptr();
 }
 
 TEST_CASE(GuardFieldSimpleTest) {
diff --git a/runtime/vm/hash_table.h b/runtime/vm/hash_table.h
index b558abe..cb6d01d 100644
--- a/runtime/vm/hash_table.h
+++ b/runtime/vm/hash_table.h
@@ -49,7 +49,7 @@
 // If you *know* that no mutating operations were called, you can optimize:
 //  ...
 //  obj ^= cache.GetOrNull(name);
-//  ASSERT(cache.Release().raw() == get_foo_cache());
+//  ASSERT(cache.Release().ptr() == get_foo_cache());
 //
 // TODO(koda): When exposing these to Dart code, document and assert that
 // KeyTraits methods must not run Dart code (since the C++ code doesn't check
@@ -225,8 +225,8 @@
   // Sets the key of a previously unoccupied entry. This must not be the last
   // unoccupied entry.
   void InsertKey(intptr_t entry, const Object& key) const {
-    ASSERT(key.raw() != UnusedMarker().raw());
-    ASSERT(key.raw() != DeletedMarker().raw());
+    ASSERT(key.ptr() != UnusedMarker().ptr());
+    ASSERT(key.ptr() != DeletedMarker().ptr());
     ASSERT(!IsOccupied(entry));
     AdjustSmiValueAt(kOccupiedEntriesIndex, 1);
     if (IsDeleted(entry)) {
@@ -243,13 +243,13 @@
   const Object& DeletedMarker() const { return *data_; }
 
   bool IsUnused(intptr_t entry) const {
-    return InternalGetKey(entry) == UnusedMarker().raw();
+    return InternalGetKey(entry) == UnusedMarker().ptr();
   }
   bool IsOccupied(intptr_t entry) const {
     return !IsUnused(entry) && !IsDeleted(entry);
   }
   bool IsDeleted(intptr_t entry) const {
-    return InternalGetKey(entry) == DeletedMarker().raw();
+    return InternalGetKey(entry) == DeletedMarker().ptr();
   }
 
   ObjectPtr GetKey(intptr_t entry) const {
@@ -307,7 +307,7 @@
   }
   void UpdateCollisions(intptr_t collisions) const {
     if (KeyTraits::ReportStats()) {
-      if (data_->raw()->ptr()->InVMIsolateHeap()) {
+      if (data_->ptr()->untag()->InVMIsolateHeap()) {
         return;
       }
       AdjustSmiValueAt(kNumProbesIndex, collisions + 1);
@@ -447,14 +447,14 @@
         Thread::Current()->zone(),
         Array::New(Table::ArrayLengthForNumOccupied(initial_capacity), space));
     table.Initialize();
-    return table.Release().raw();
+    return table.Release().ptr();
   }
 
   template <typename Table>
   static ArrayPtr New(const Array& array) {
-    Table table(Thread::Current()->zone(), array.raw());
+    Table table(Thread::Current()->zone(), array.ptr());
     table.Initialize();
-    return table.Release().raw();
+    return table.Release().ptr();
   }
 
   // Clears 'to' and inserts all elements from 'from', in iteration order.
@@ -506,7 +506,7 @@
     Table new_table(New<Table>(new_capacity,  // Is rounded up to power of 2.
                                table.data_->IsOld() ? Heap::kOld : Heap::kNew));
     Copy(table, new_table);
-    *table.data_ = new_table.Release().raw();
+    *table.data_ = new_table.Release().ptr();
     NOT_IN_PRODUCT(table.UpdateGrowth(); table.PrintStats();)
   }
 
@@ -529,7 +529,7 @@
         }
       }
     }
-    return result.raw();
+    return result.ptr();
   }
 };
 
@@ -581,7 +581,7 @@
     if (!BaseIterTable::FindKeyOrDeletedOrUnused(key, &entry)) {
       BaseIterTable::InsertKey(entry, key);
       BaseIterTable::UpdatePayload(entry, 0, value_if_absent);
-      return value_if_absent.raw();
+      return value_if_absent.ptr();
     } else {
       return BaseIterTable::GetPayload(entry, 0);
     }
@@ -597,7 +597,7 @@
           BaseIterTable::BaseTable::Traits::NewKey(key);
       BaseIterTable::InsertKey(entry, BaseIterTable::KeyHandle());
       BaseIterTable::UpdatePayload(entry, 0, value_if_absent);
-      return value_if_absent.raw();
+      return value_if_absent.ptr();
     } else {
       return BaseIterTable::GetPayload(entry, 0);
     }
@@ -659,7 +659,7 @@
     intptr_t entry = -1;
     if (!BaseIterTable::FindKeyOrDeletedOrUnused(key, &entry)) {
       BaseIterTable::InsertKey(entry, key);
-      return key.raw();
+      return key.ptr();
     } else {
       return BaseIterTable::GetKey(entry);
     }
@@ -674,7 +674,7 @@
       BaseIterTable::KeyHandle() =
           BaseIterTable::BaseTable::Traits::NewKey(key);
       BaseIterTable::InsertKey(entry, BaseIterTable::KeyHandle());
-      return BaseIterTable::KeyHandle().raw();
+      return BaseIterTable::KeyHandle().ptr();
     } else {
       return BaseIterTable::GetKey(entry);
     }
@@ -725,8 +725,8 @@
     Object& entry = Object::Handle();
     for (intptr_t i = 0; i < this->data_->Length(); i++) {
       entry = this->data_->At(i);
-      if (entry.raw() == BaseSet::UnusedMarker().raw() ||
-          entry.raw() == BaseSet::DeletedMarker().raw() || entry.IsSmi()) {
+      if (entry.ptr() == BaseSet::UnusedMarker().ptr() ||
+          entry.ptr() == BaseSet::DeletedMarker().ptr() || entry.IsSmi()) {
         // empty, deleted, num_used/num_deleted
         OS::PrintErr("%" Pd ": %s\n", i, entry.ToCString());
       } else {
diff --git a/runtime/vm/heap/become.cc b/runtime/vm/heap/become.cc
index 1a1415b..ea959e1 100644
--- a/runtime/vm/heap/become.cc
+++ b/runtime/vm/heap/become.cc
@@ -24,16 +24,16 @@
   ForwardingCorpse* result = reinterpret_cast<ForwardingCorpse*>(addr);
 
   uword tags = result->tags_;  // Carry-over any identity hash.
-  tags = ObjectLayout::SizeTag::update(size, tags);
-  tags = ObjectLayout::ClassIdTag::update(kForwardingCorpse, tags);
+  tags = UntaggedObject::SizeTag::update(size, tags);
+  tags = UntaggedObject::ClassIdTag::update(kForwardingCorpse, tags);
   bool is_old = (addr & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
-  tags = ObjectLayout::OldBit::update(is_old, tags);
-  tags = ObjectLayout::OldAndNotMarkedBit::update(is_old, tags);
-  tags = ObjectLayout::OldAndNotRememberedBit::update(is_old, tags);
-  tags = ObjectLayout::NewBit::update(!is_old, tags);
+  tags = UntaggedObject::OldBit::update(is_old, tags);
+  tags = UntaggedObject::OldAndNotMarkedBit::update(is_old, tags);
+  tags = UntaggedObject::OldAndNotRememberedBit::update(is_old, tags);
+  tags = UntaggedObject::NewBit::update(!is_old, tags);
 
   result->tags_ = tags;
-  if (size > ObjectLayout::SizeTag::kMaxSizeTag) {
+  if (size > UntaggedObject::SizeTag::kMaxSizeTag) {
     *result->SizeAddress() = size;
   }
   result->set_target(Object::null());
@@ -60,7 +60,7 @@
 }
 
 static void ForwardObjectTo(ObjectPtr before_obj, ObjectPtr after_obj) {
-  const intptr_t size_before = before_obj->ptr()->HeapSize();
+  const intptr_t size_before = before_obj->untag()->HeapSize();
 
   uword corpse_addr = static_cast<uword>(before_obj) - kHeapObjectTag;
   ForwardingCorpse* forwarder =
@@ -70,7 +70,7 @@
     FATAL("become: ForwardObjectTo failure.");
   }
   // Still need to be able to iterate over the forwarding corpse.
-  const intptr_t size_after = before_obj->ptr()->HeapSize();
+  const intptr_t size_after = before_obj->untag()->HeapSize();
   if (size_before != size_after) {
     FATAL("become: Before and after sizes do not match.");
   }
@@ -99,10 +99,10 @@
       }
       if (visiting_object_ == nullptr) {
         *p = new_target;
-      } else if (visiting_object_->ptr()->IsCardRemembered()) {
-        visiting_object_->ptr()->StoreArrayPointer(p, new_target, thread_);
+      } else if (visiting_object_->untag()->IsCardRemembered()) {
+        visiting_object_->untag()->StoreArrayPointer(p, new_target, thread_);
       } else {
-        visiting_object_->ptr()->StorePointer(p, new_target, thread_);
+        visiting_object_->untag()->StorePointer(p, new_target, thread_);
       }
     }
   }
@@ -111,10 +111,11 @@
     visiting_object_ = obj;
     // The incoming remembered bit may be unreliable. Clear it so we can
     // consistently reapply the barrier to all slots.
-    if ((obj != nullptr) && obj->IsOldObject() && obj->ptr()->IsRemembered()) {
+    if ((obj != nullptr) && obj->IsOldObject() &&
+        obj->untag()->IsRemembered()) {
       ASSERT(!obj->IsForwardingCorpse());
       ASSERT(!obj->IsFreeListElement());
-      obj->ptr()->ClearRememberedBit();
+      obj->untag()->ClearRememberedBit();
     }
   }
 
@@ -132,7 +133,7 @@
 
   virtual void VisitObject(ObjectPtr obj) {
     pointer_visitor_->VisitingObject(obj);
-    obj->ptr()->VisitPointers(pointer_visitor_);
+    obj->untag()->VisitPointers(pointer_visitor_);
   }
 
  private:
@@ -149,8 +150,8 @@
   virtual void VisitHandle(uword addr) {
     FinalizablePersistentHandle* handle =
         reinterpret_cast<FinalizablePersistentHandle*>(addr);
-    if (IsForwardingObject(handle->raw())) {
-      *handle->raw_addr() = GetForwardedObject(handle->raw());
+    if (IsForwardingObject(handle->ptr())) {
+      *handle->ptr_addr() = GetForwardedObject(handle->ptr());
     }
   }
 
@@ -191,7 +192,7 @@
 void Become::MakeDummyObject(const Instance& instance) {
   // Make the forward pointer point to itself.
   // This is needed to distinguish it from a real forward object.
-  ForwardObjectTo(instance.raw(), instance.raw());
+  ForwardObjectTo(instance.ptr(), instance.ptr());
 }
 
 static bool IsDummyObject(ObjectPtr object) {
@@ -206,13 +207,13 @@
   OS::PrintErr("BEFORE IS HEAP OBJECT: %s\n",
                before_obj->IsHeapObject() ? "YES" : "NO");
   OS::PrintErr("BEFORE IN VMISOLATE HEAP OBJECT: %s\n",
-               before_obj->ptr()->InVMIsolateHeap() ? "YES" : "NO");
+               before_obj->untag()->InVMIsolateHeap() ? "YES" : "NO");
 
   OS::PrintErr("AFTER ADDRESS: %#" Px "\n", static_cast<uword>(after_obj));
   OS::PrintErr("AFTER IS HEAP OBJECT: %s\n",
                after_obj->IsHeapObject() ? "YES" : "NO");
   OS::PrintErr("AFTER IN VMISOLATE HEAP OBJECT: %s\n",
-               after_obj->ptr()->InVMIsolateHeap() ? "YES" : "NO");
+               after_obj->untag()->InVMIsolateHeap() ? "YES" : "NO");
 
   if (before_obj->IsHeapObject()) {
     OS::PrintErr("BEFORE OBJECT CLASS ID=%" Pd "\n", before_obj->GetClassId());
@@ -251,7 +252,7 @@
       CrashDump(before_obj, after_obj);
       FATAL("become: Cannot become immediates");
     }
-    if (before_obj->ptr()->InVMIsolateHeap()) {
+    if (before_obj->untag()->InVMIsolateHeap()) {
       CrashDump(before_obj, after_obj);
       FATAL("become: Cannot forward VM heap objects");
     }
diff --git a/runtime/vm/heap/become.h b/runtime/vm/heap/become.h
index 5bcc42c..9914056 100644
--- a/runtime/vm/heap/become.h
+++ b/runtime/vm/heap/become.h
@@ -27,7 +27,7 @@
   void set_target(ObjectPtr target) { target_ = target; }
 
   intptr_t HeapSize() {
-    intptr_t size = ObjectLayout::SizeTag::decode(tags_);
+    intptr_t size = UntaggedObject::SizeTag::decode(tags_);
     if (size != 0) return size;
     return *SizeAddress();
   }
diff --git a/runtime/vm/heap/become_test.cc b/runtime/vm/heap/become_test.cc
index 7834a2e..a375486 100644
--- a/runtime/vm/heap/become_test.cc
+++ b/runtime/vm/heap/become_test.cc
@@ -16,7 +16,7 @@
   const String& before_obj = String::Handle(String::New("old", before_space));
   const String& after_obj = String::Handle(String::New("new", after_space));
 
-  EXPECT(before_obj.raw() != after_obj.raw());
+  EXPECT(before_obj.ptr() != after_obj.ptr());
 
   // Allocate the arrays in old space to test the remembered set.
   const Array& before = Array::Handle(Array::New(1, Heap::kOld));
@@ -26,11 +26,11 @@
 
   Become::ElementsForwardIdentity(before, after);
 
-  EXPECT(before_obj.raw() == after_obj.raw());
+  EXPECT(before_obj.ptr() == after_obj.ptr());
 
   GCTestHelper::CollectAllGarbage();
 
-  EXPECT(before_obj.raw() == after_obj.raw());
+  EXPECT(before_obj.ptr() == after_obj.ptr());
 }
 
 ISOLATE_UNIT_TEST_CASE(BecomeFowardOldToOld) {
@@ -54,13 +54,13 @@
 
   const Array& before_obj = Array::Handle(Array::New(0, Heap::kOld));
   const Array& after_obj = Array::Handle(Array::New(0, Heap::kOld));
-  EXPECT(before_obj.raw() != after_obj.raw());
+  EXPECT(before_obj.ptr() != after_obj.ptr());
 
   void* peer = reinterpret_cast<void*>(42);
   void* no_peer = reinterpret_cast<void*>(0);
-  heap->SetPeer(before_obj.raw(), peer);
-  EXPECT_EQ(peer, heap->GetPeer(before_obj.raw()));
-  EXPECT_EQ(no_peer, heap->GetPeer(after_obj.raw()));
+  heap->SetPeer(before_obj.ptr(), peer);
+  EXPECT_EQ(peer, heap->GetPeer(before_obj.ptr()));
+  EXPECT_EQ(no_peer, heap->GetPeer(after_obj.ptr()));
 
   const Array& before = Array::Handle(Array::New(1, Heap::kOld));
   before.SetAt(0, before_obj);
@@ -68,9 +68,9 @@
   after.SetAt(0, after_obj);
   Become::ElementsForwardIdentity(before, after);
 
-  EXPECT(before_obj.raw() == after_obj.raw());
-  EXPECT_EQ(peer, heap->GetPeer(before_obj.raw()));
-  EXPECT_EQ(peer, heap->GetPeer(after_obj.raw()));
+  EXPECT(before_obj.ptr() == after_obj.ptr());
+  EXPECT_EQ(peer, heap->GetPeer(before_obj.ptr()));
+  EXPECT_EQ(peer, heap->GetPeer(after_obj.ptr()));
 }
 
 ISOLATE_UNIT_TEST_CASE(BecomeForwardRememberedObject) {
@@ -80,10 +80,10 @@
   const Array& after_obj = Array::Handle(Array::New(1, Heap::kOld));
   before_obj.SetAt(0, new_element);
   after_obj.SetAt(0, old_element);
-  EXPECT(before_obj.raw()->ptr()->IsRemembered());
-  EXPECT(!after_obj.raw()->ptr()->IsRemembered());
+  EXPECT(before_obj.ptr()->untag()->IsRemembered());
+  EXPECT(!after_obj.ptr()->untag()->IsRemembered());
 
-  EXPECT(before_obj.raw() != after_obj.raw());
+  EXPECT(before_obj.ptr() != after_obj.ptr());
 
   const Array& before = Array::Handle(Array::New(1, Heap::kOld));
   before.SetAt(0, before_obj);
@@ -92,20 +92,20 @@
 
   Become::ElementsForwardIdentity(before, after);
 
-  EXPECT(before_obj.raw() == after_obj.raw());
-  EXPECT(!after_obj.raw()->ptr()->IsRemembered());
+  EXPECT(before_obj.ptr() == after_obj.ptr());
+  EXPECT(!after_obj.ptr()->untag()->IsRemembered());
 
   GCTestHelper::CollectAllGarbage();
 
-  EXPECT(before_obj.raw() == after_obj.raw());
+  EXPECT(before_obj.ptr() == after_obj.ptr());
 }
 
 ISOLATE_UNIT_TEST_CASE(BecomeForwardRememberedCards) {
   const intptr_t length = Heap::kNewAllocatableSize / kWordSize;
   ASSERT(Array::UseCardMarkingForAllocation(length));
   const Array& card_remembered_array = Array::Handle(Array::New(length));
-  EXPECT(card_remembered_array.raw()->ptr()->IsCardRemembered());
-  EXPECT(!card_remembered_array.raw()->ptr()->IsRemembered());
+  EXPECT(card_remembered_array.ptr()->untag()->IsCardRemembered());
+  EXPECT(!card_remembered_array.ptr()->untag()->IsRemembered());
 
   const String& old_element = String::Handle(String::New("old", Heap::kOld));
   const String& new_element = String::Handle(String::New("new", Heap::kNew));
@@ -123,10 +123,10 @@
   after.SetAt(0, new_element);
   Become::ElementsForwardIdentity(before, after);
 
-  EXPECT(old_element.raw() == new_element.raw());
-  EXPECT(old_element.raw()->IsNewObject());
-  EXPECT(card_remembered_array.raw()->ptr()->IsCardRemembered());
-  EXPECT(!card_remembered_array.raw()->ptr()->IsRemembered());
+  EXPECT(old_element.ptr() == new_element.ptr());
+  EXPECT(old_element.ptr()->IsNewObject());
+  EXPECT(card_remembered_array.ptr()->untag()->IsCardRemembered());
+  EXPECT(!card_remembered_array.ptr()->untag()->IsRemembered());
 
   {
     HANDLESCOPE(thread);
@@ -136,9 +136,9 @@
 
   GCTestHelper::CollectAllGarbage();
 
-  EXPECT(old_element.raw() == new_element.raw());
-  EXPECT(card_remembered_array.raw()->ptr()->IsCardRemembered());
-  EXPECT(!card_remembered_array.raw()->ptr()->IsRemembered());
+  EXPECT(old_element.ptr() == new_element.ptr());
+  EXPECT(card_remembered_array.ptr()->untag()->IsCardRemembered());
+  EXPECT(!card_remembered_array.ptr()->untag()->IsRemembered());
 
   {
     HANDLESCOPE(thread);
diff --git a/runtime/vm/heap/compactor.cc b/runtime/vm/heap/compactor.cc
index e60eed9..5a432f7 100644
--- a/runtime/vm/heap/compactor.cc
+++ b/runtime/vm/heap/compactor.cc
@@ -274,13 +274,14 @@
     const intptr_t length = typed_data_views_.length();
     for (intptr_t i = 0; i < length; ++i) {
       auto raw_view = typed_data_views_[i];
-      const classid_t cid = raw_view->ptr()->typed_data_->GetClassIdMayBeSmi();
+      const classid_t cid =
+          raw_view->untag()->typed_data_->GetClassIdMayBeSmi();
 
       // If we have external typed data we can simply return, since the backing
       // store lives in C-heap and will not move. Otherwise we have to update
       // the inner pointer.
       if (IsTypedDataClassId(cid)) {
-        raw_view->ptr()->RecomputeDataFieldForInternalTypedData();
+        raw_view->untag()->RecomputeDataFieldForInternalTypedData();
       } else {
         ASSERT(IsExternalTypedDataClassId(cid));
       }
@@ -475,9 +476,9 @@
   intptr_t block_dead_size = 0;
   uword current = first_object;
   while (current < block_end) {
-    ObjectPtr obj = ObjectLayout::FromAddr(current);
-    intptr_t size = obj->ptr()->HeapSize();
-    if (obj->ptr()->IsMarked()) {
+    ObjectPtr obj = UntaggedObject::FromAddr(current);
+    intptr_t size = obj->untag()->HeapSize();
+    if (obj->untag()->IsMarked()) {
       forwarding_block->RecordLive(current, size);
       ASSERT(static_cast<intptr_t>(forwarding_block->Lookup(current)) ==
              block_live_size);
@@ -505,9 +506,9 @@
 
   uword old_addr = first_object;
   while (old_addr < block_end) {
-    ObjectPtr old_obj = ObjectLayout::FromAddr(old_addr);
-    intptr_t size = old_obj->ptr()->HeapSize();
-    if (old_obj->ptr()->IsMarked()) {
+    ObjectPtr old_obj = UntaggedObject::FromAddr(old_addr);
+    intptr_t size = old_obj->untag()->HeapSize();
+    if (old_obj->untag()->IsMarked()) {
       uword new_addr = forwarding_block->Lookup(old_addr);
       if (new_addr != free_current_) {
         // The only situation where these two don't match is if we are moving
@@ -526,7 +527,7 @@
         free_end_ = free_page_->object_end();
         ASSERT(free_current_ == new_addr);
       }
-      ObjectPtr new_obj = ObjectLayout::FromAddr(new_addr);
+      ObjectPtr new_obj = UntaggedObject::FromAddr(new_addr);
 
       // Fast path for no movement. There's often a large block of objects at
       // the beginning that don't move.
@@ -536,11 +537,11 @@
                 reinterpret_cast<void*>(old_addr), size);
 
         if (IsTypedDataClassId(new_obj->GetClassId())) {
-          static_cast<TypedDataPtr>(new_obj)->ptr()->RecomputeDataField();
+          static_cast<TypedDataPtr>(new_obj)->untag()->RecomputeDataField();
         }
       }
-      new_obj->ptr()->ClearMarkBit();
-      new_obj->ptr()->VisitPointers(compactor_);
+      new_obj->untag()->ClearMarkBit();
+      new_obj->untag()->VisitPointers(compactor_);
 
       ASSERT(free_current_ == new_addr);
       free_current_ += size;
@@ -602,7 +603,7 @@
     return;  // Not moved.
   }
 
-  uword old_addr = ObjectLayout::ToAddr(old_target);
+  uword old_addr = UntaggedObject::ToAddr(old_target);
   intptr_t lo = 0;
   intptr_t hi = image_page_hi_;
   while (lo <= hi) {
@@ -625,7 +626,7 @@
   }
 
   ObjectPtr new_target =
-      ObjectLayout::FromAddr(forwarding_page->Lookup(old_addr));
+      UntaggedObject::FromAddr(forwarding_page->Lookup(old_addr));
   ASSERT(!new_target->IsSmiOrNewObject());
   *ptr = new_target;
 }
@@ -634,9 +635,9 @@
                                              ObjectPtr* first,
                                              ObjectPtr* last) {
   // First we forward all fields of the typed data view.
-  ObjectPtr old_backing = view->ptr()->typed_data_;
+  ObjectPtr old_backing = view->untag()->typed_data_;
   VisitPointers(first, last);
-  ObjectPtr new_backing = view->ptr()->typed_data_;
+  ObjectPtr new_backing = view->untag()->typed_data_;
 
   const bool backing_moved = old_backing != new_backing;
   if (backing_moved) {
@@ -656,10 +657,10 @@
   } else {
     // The backing store didn't move, we therefore don't need to update the
     // inner pointer.
-    if (view->ptr()->data_ == 0) {
-      ASSERT(RawSmiValue(view->ptr()->offset_in_bytes_) == 0 &&
-             RawSmiValue(view->ptr()->length_) == 0 &&
-             view->ptr()->typed_data_ == Object::null());
+    if (view->untag()->data_ == 0) {
+      ASSERT(RawSmiValue(view->untag()->offset_in_bytes_) == 0 &&
+             RawSmiValue(view->untag()->length_) == 0 &&
+             view->untag()->typed_data_ == Object::null());
     }
   }
 }
@@ -675,7 +676,7 @@
 void GCCompactor::VisitHandle(uword addr) {
   FinalizablePersistentHandle* handle =
       reinterpret_cast<FinalizablePersistentHandle*>(addr);
-  ForwardPointer(handle->raw_addr());
+  ForwardPointer(handle->ptr_addr());
 }
 
 void GCCompactor::ForwardStackPointers() {
diff --git a/runtime/vm/heap/freelist.cc b/runtime/vm/heap/freelist.cc
index e2fd5f8..edc9534 100644
--- a/runtime/vm/heap/freelist.cc
+++ b/runtime/vm/heap/freelist.cc
@@ -22,16 +22,16 @@
   FreeListElement* result = reinterpret_cast<FreeListElement*>(addr);
 
   uword tags = 0;
-  tags = ObjectLayout::SizeTag::update(size, tags);
-  tags = ObjectLayout::ClassIdTag::update(kFreeListElement, tags);
+  tags = UntaggedObject::SizeTag::update(size, tags);
+  tags = UntaggedObject::ClassIdTag::update(kFreeListElement, tags);
   ASSERT((addr & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset);
-  tags = ObjectLayout::OldBit::update(true, tags);
-  tags = ObjectLayout::OldAndNotMarkedBit::update(true, tags);
-  tags = ObjectLayout::OldAndNotRememberedBit::update(true, tags);
-  tags = ObjectLayout::NewBit::update(false, tags);
+  tags = UntaggedObject::OldBit::update(true, tags);
+  tags = UntaggedObject::OldAndNotMarkedBit::update(true, tags);
+  tags = UntaggedObject::OldAndNotRememberedBit::update(true, tags);
+  tags = UntaggedObject::NewBit::update(false, tags);
   result->tags_ = tags;
 
-  if (size > ObjectLayout::SizeTag::kMaxSizeTag) {
+  if (size > UntaggedObject::SizeTag::kMaxSizeTag) {
     *result->SizeAddress() = size;
   }
   result->set_next(NULL);
@@ -47,7 +47,7 @@
 
 intptr_t FreeListElement::HeaderSizeFor(intptr_t size) {
   if (size == 0) return 0;
-  return ((size > ObjectLayout::SizeTag::kMaxSizeTag) ? 3 : 2) * kWordSize;
+  return ((size > UntaggedObject::SizeTag::kMaxSizeTag) ? 3 : 2) * kWordSize;
 }
 
 FreeList::FreeList() : mutex_() {
diff --git a/runtime/vm/heap/freelist.h b/runtime/vm/heap/freelist.h
index b868c19..7a1e2d0 100644
--- a/runtime/vm/heap/freelist.h
+++ b/runtime/vm/heap/freelist.h
@@ -29,7 +29,7 @@
   void set_next(FreeListElement* next) { next_ = next; }
 
   intptr_t HeapSize() {
-    intptr_t size = ObjectLayout::SizeTag::decode(tags_);
+    intptr_t size = UntaggedObject::SizeTag::decode(tags_);
     if (size != 0) return size;
     return *SizeAddress();
   }
diff --git a/runtime/vm/heap/heap_test.cc b/runtime/vm/heap/heap_test.cc
index 364840d..fc85962 100644
--- a/runtime/vm/heap/heap_test.cc
+++ b/runtime/vm/heap/heap_test.cc
@@ -94,7 +94,7 @@
   const Class& cls = Class::Handle(
       lib.LookupClass(String::Handle(Symbols::New(Thread::Current(), name))));
   EXPECT(!cls.IsNull());  // No ambiguity error expected.
-  return cls.raw();
+  return cls.ptr();
 }
 
 TEST_CASE(ClassHeapStats) {
@@ -241,8 +241,8 @@
     {
       HeapIterationScope iteration(thread);
       NoSafepointScope no_safepoint;
-      FindOnly find_only(obj.raw());
-      EXPECT(obj.raw() == heap->FindObject(&find_only));
+      FindOnly find_only(obj.ptr());
+      EXPECT(obj.ptr() == heap->FindObject(&find_only));
     }
   }
   {
@@ -261,11 +261,11 @@
   GCTestHelper::WaitForGCTasks();
 
   Heap* heap = IsolateGroup::Current()->heap();
-  EXPECT(heap->Contains(ObjectLayout::ToAddr(obj.raw())));
+  EXPECT(heap->Contains(UntaggedObject::ToAddr(obj.ptr())));
   heap->WriteProtect(true);
-  EXPECT(heap->Contains(ObjectLayout::ToAddr(obj.raw())));
+  EXPECT(heap->Contains(UntaggedObject::ToAddr(obj.ptr())));
   heap->WriteProtect(false);
-  EXPECT(heap->Contains(ObjectLayout::ToAddr(obj.raw())));
+  EXPECT(heap->Contains(UntaggedObject::ToAddr(obj.ptr())));
 }
 
 ISOLATE_UNIT_TEST_CASE(CollectAllGarbage_DeadOldToNew) {
@@ -540,8 +540,8 @@
       PersistentHandle* handle = bequest->handle();
       // Object in the receiving isolate's heap.
       EXPECT(isolate()->group()->heap()->Contains(
-          ObjectLayout::ToAddr(handle->raw())));
-      response_obj = handle->raw();
+          UntaggedObject::ToAddr(handle->ptr())));
+      response_obj = handle->ptr();
       isolate()->group()->api_state()->FreePersistentHandle(handle);
     } else {
       Thread* thread = Thread::Current();
@@ -550,12 +550,12 @@
     }
     if (response_obj.IsString()) {
       String& response = String::Handle();
-      response ^= response_obj.raw();
+      response ^= response_obj.ptr();
       msg_.reset(Utils::StrDup(response.ToCString()));
     } else {
       ASSERT(response_obj.IsArray());
       Array& response_array = Array::Handle();
-      response_array ^= response_obj.raw();
+      response_array ^= response_obj.ptr();
       ASSERT(response_array.Length() == 1);
       ExternalTypedData& response = ExternalTypedData::Handle();
       response ^= response_array.At(0);
@@ -598,7 +598,7 @@
       String& string = String::Handle(String::New(TEST_MESSAGE));
       PersistentHandle* handle =
           Isolate::Current()->group()->api_state()->AllocatePersistentHandle();
-      handle->set_raw(string.raw());
+      handle->set_ptr(string.ptr());
 
       reinterpret_cast<Isolate*>(worker)->bequeath(
           std::unique_ptr<Bequest>(new Bequest(handle, port_id)));
@@ -633,7 +633,7 @@
 
     PersistentHandle* handle =
         Isolate::Current()->group()->api_state()->AllocatePersistentHandle();
-    handle->set_raw(string.raw());
+    handle->set_ptr(string.ptr());
 
     reinterpret_cast<Isolate*>(worker)->bequeath(
         std::unique_ptr<Bequest>(new Bequest(handle, port_id)));
diff --git a/runtime/vm/heap/marker.cc b/runtime/vm/heap/marker.cc
index bceb31c..5a6229b 100644
--- a/runtime/vm/heap/marker.cc
+++ b/runtime/vm/heap/marker.cc
@@ -50,18 +50,18 @@
     WeakPropertyPtr cur_weak = delayed_weak_properties_;
     delayed_weak_properties_ = WeakProperty::null();
     while (cur_weak != WeakProperty::null()) {
-      WeakPropertyPtr next_weak = cur_weak->ptr()->next_;
-      ObjectPtr raw_key = cur_weak->ptr()->key_;
+      WeakPropertyPtr next_weak = cur_weak->untag()->next_;
+      ObjectPtr raw_key = cur_weak->untag()->key_;
       // Reset the next pointer in the weak property.
-      cur_weak->ptr()->next_ = WeakProperty::null();
-      if (raw_key->ptr()->IsMarked()) {
-        ObjectPtr raw_val = cur_weak->ptr()->value_;
-        marked =
-            marked || (raw_val->IsHeapObject() && !raw_val->ptr()->IsMarked());
+      cur_weak->untag()->next_ = WeakProperty::null();
+      if (raw_key->untag()->IsMarked()) {
+        ObjectPtr raw_val = cur_weak->untag()->value_;
+        marked = marked ||
+                 (raw_val->IsHeapObject() && !raw_val->untag()->IsMarked());
 
         // The key is marked so we make sure to properly visit all pointers
         // originating from this weak property.
-        cur_weak->ptr()->VisitPointersNonvirtual(this);
+        cur_weak->untag()->VisitPointersNonvirtual(this);
       } else {
         // Requeue this weak property to be handled later.
         EnqueueWeakProperty(cur_weak);
@@ -89,7 +89,7 @@
 
         intptr_t size;
         if (class_id != kWeakPropertyCid) {
-          size = raw_obj->ptr()->VisitPointersNonvirtual(this);
+          size = raw_obj->untag()->VisitPointersNonvirtual(this);
         } else {
           WeakPropertyPtr raw_weak = static_cast<WeakPropertyPtr>(raw_obj);
           size = ProcessWeakProperty(raw_weak, /* did_mark */ true);
@@ -132,25 +132,25 @@
     ASSERT(raw_weak->IsHeapObject());
     ASSERT(raw_weak->IsOldObject());
     ASSERT(raw_weak->IsWeakProperty());
-    ASSERT(raw_weak->ptr()->IsMarked());
-    ASSERT(raw_weak->ptr()->next_ == WeakProperty::null());
-    raw_weak->ptr()->next_ = delayed_weak_properties_;
+    ASSERT(raw_weak->untag()->IsMarked());
+    ASSERT(raw_weak->untag()->next_ == WeakProperty::null());
+    raw_weak->untag()->next_ = delayed_weak_properties_;
     delayed_weak_properties_ = raw_weak;
   }
 
   intptr_t ProcessWeakProperty(WeakPropertyPtr raw_weak, bool did_mark) {
     // The fate of the weak property is determined by its key.
-    ObjectPtr raw_key = LoadPointerIgnoreRace(&raw_weak->ptr()->key_);
+    ObjectPtr raw_key = LoadPointerIgnoreRace(&raw_weak->untag()->key_);
     if (raw_key->IsHeapObject() && raw_key->IsOldObject() &&
-        !raw_key->ptr()->IsMarked()) {
+        !raw_key->untag()->IsMarked()) {
       // Key was white. Enqueue the weak property.
       if (did_mark) {
         EnqueueWeakProperty(raw_weak);
       }
-      return raw_weak->ptr()->HeapSize();
+      return raw_weak->untag()->HeapSize();
     }
     // Key is gray or black. Make the weak property black.
-    return raw_weak->ptr()->VisitPointersNonvirtual(this);
+    return raw_weak->untag()->VisitPointersNonvirtual(this);
   }
 
   void ProcessDeferredMarking() {
@@ -162,7 +162,7 @@
       const intptr_t class_id = raw_obj->GetClassId();
       intptr_t size;
       if (class_id != kWeakPropertyCid) {
-        size = raw_obj->ptr()->VisitPointersNonvirtual(this);
+        size = raw_obj->untag()->VisitPointersNonvirtual(this);
       } else {
         WeakPropertyPtr raw_weak = static_cast<WeakPropertyPtr>(raw_obj);
         size = ProcessWeakProperty(raw_weak, did_mark);
@@ -188,9 +188,9 @@
     delayed_weak_properties_ = WeakProperty::null();
     intptr_t weak_properties_cleared = 0;
     while (cur_weak != WeakProperty::null()) {
-      WeakPropertyPtr next_weak = cur_weak->ptr()->next_;
-      cur_weak->ptr()->next_ = WeakProperty::null();
-      RELEASE_ASSERT(!cur_weak->ptr()->key_->ptr()->IsMarked());
+      WeakPropertyPtr next_weak = cur_weak->untag()->next_;
+      cur_weak->untag()->next_ = WeakProperty::null();
+      RELEASE_ASSERT(!cur_weak->untag()->key_->untag()->IsMarked());
       WeakProperty::Clear(cur_weak);
       weak_properties_cleared++;
       // Advance to next weak property in the queue.
@@ -209,7 +209,7 @@
     ASSERT(raw_obj->IsOldObject());
 
     // Push the marked object on the marking stack.
-    ASSERT(raw_obj->ptr()->IsMarked());
+    ASSERT(raw_obj->untag()->IsMarked());
     work_list_.Push(raw_obj);
   }
 
@@ -219,10 +219,10 @@
       raw_obj = OldPage::ToWritable(raw_obj);
     }
     if (!sync) {
-      raw_obj->ptr()->SetMarkBitUnsynchronized();
+      raw_obj->untag()->SetMarkBitUnsynchronized();
       return true;
     } else {
-      return raw_obj->ptr()->TryAcquireMarkBit();
+      return raw_obj->untag()->TryAcquireMarkBit();
     }
   }
 
@@ -244,7 +244,7 @@
     // was allocated after the concurrent marker started. It can read either a
     // zero or the header of an object allocated black, both of which appear
     // marked.
-    if (raw_obj->ptr()->IsMarkedIgnoreRace()) {
+    if (raw_obj->untag()->IsMarkedIgnoreRace()) {
       return;
     }
 
@@ -290,7 +290,7 @@
   if (!raw_obj->IsOldObject()) {
     return false;
   }
-  return !raw_obj->ptr()->IsMarked();
+  return !raw_obj->untag()->IsMarked();
 }
 
 class MarkingWeakVisitor : public HandleVisitor {
@@ -302,7 +302,7 @@
   void VisitHandle(uword addr) {
     FinalizablePersistentHandle* handle =
         reinterpret_cast<FinalizablePersistentHandle*>(addr);
-    ObjectPtr raw_obj = handle->raw();
+    ObjectPtr raw_obj = handle->ptr();
     if (IsUnreachable(raw_obj)) {
       handle->UpdateUnreachable(thread()->isolate_group());
     }
@@ -426,7 +426,7 @@
     for (intptr_t i = 0; i < size; i++) {
       if (table->IsValidEntryAtExclusive(i)) {
         ObjectPtr raw_obj = table->ObjectAtExclusive(i);
-        if (raw_obj->IsHeapObject() && !raw_obj->ptr()->IsMarked()) {
+        if (raw_obj->IsHeapObject() && !raw_obj->untag()->IsMarked()) {
           table->InvalidateAtExclusive(i);
         }
       }
@@ -447,8 +447,8 @@
     while (!reading->IsEmpty()) {
       ObjectPtr raw_object = reading->Pop();
       ASSERT(!raw_object->IsForwardingCorpse());
-      ASSERT(raw_object->ptr()->IsRemembered());
-      if (raw_object->ptr()->IsMarked()) {
+      ASSERT(raw_object->untag()->IsRemembered());
+      if (raw_object->untag()->IsMarked()) {
         writing->Push(raw_object);
         if (writing->IsFull()) {
           store_buffer->PushBlock(writing, StoreBuffer::kIgnoreThreshold);
@@ -473,7 +473,7 @@
     for (ObjectPtr* current = first; current <= last; current++) {
       ObjectPtr raw_obj = *current;
       ASSERT(raw_obj->IsHeapObject());
-      if (raw_obj->IsOldObject() && !raw_obj->ptr()->IsMarked()) {
+      if (raw_obj->IsOldObject() && !raw_obj->untag()->IsMarked()) {
         // Object has become garbage. Replace it will null.
         *current = Object::null();
       }
diff --git a/runtime/vm/heap/pages.cc b/runtime/vm/heap/pages.cc
index 12d4165..43628c6 100644
--- a/runtime/vm/heap/pages.cc
+++ b/runtime/vm/heap/pages.cc
@@ -97,9 +97,9 @@
   uword obj_addr = object_start();
   uword end_addr = object_end();
   while (obj_addr < end_addr) {
-    ObjectPtr raw_obj = ObjectLayout::FromAddr(obj_addr);
+    ObjectPtr raw_obj = UntaggedObject::FromAddr(obj_addr);
     visitor->VisitObject(raw_obj);
-    obj_addr += raw_obj->ptr()->HeapSize();
+    obj_addr += raw_obj->untag()->HeapSize();
   }
   ASSERT(obj_addr == end_addr);
 }
@@ -111,8 +111,8 @@
   uword obj_addr = object_start();
   uword end_addr = object_end();
   while (obj_addr < end_addr) {
-    ObjectPtr raw_obj = ObjectLayout::FromAddr(obj_addr);
-    obj_addr += raw_obj->ptr()->VisitPointers(visitor);
+    ObjectPtr raw_obj = UntaggedObject::FromAddr(obj_addr);
+    obj_addr += raw_obj->untag()->VisitPointers(visitor);
   }
   ASSERT(obj_addr == end_addr);
 }
@@ -128,11 +128,12 @@
 
   bool table_is_empty = false;
 
-  ArrayPtr obj = static_cast<ArrayPtr>(ObjectLayout::FromAddr(object_start()));
+  ArrayPtr obj =
+      static_cast<ArrayPtr>(UntaggedObject::FromAddr(object_start()));
   ASSERT(obj->IsArray());
-  ASSERT(obj->ptr()->IsCardRemembered());
-  ObjectPtr* obj_from = obj->ptr()->from();
-  ObjectPtr* obj_to = obj->ptr()->to(Smi::Value(obj->ptr()->length_));
+  ASSERT(obj->untag()->IsCardRemembered());
+  ObjectPtr* obj_from = obj->untag()->from();
+  ObjectPtr* obj_to = obj->untag()->to(Smi::Value(obj->untag()->length_));
 
   const intptr_t size = card_table_size();
   for (intptr_t i = 0; i < size; i++) {
@@ -183,10 +184,10 @@
   uword end_addr = object_end();
   if (visitor->VisitRange(obj_addr, end_addr)) {
     while (obj_addr < end_addr) {
-      ObjectPtr raw_obj = ObjectLayout::FromAddr(obj_addr);
-      uword next_obj_addr = obj_addr + raw_obj->ptr()->HeapSize();
+      ObjectPtr raw_obj = UntaggedObject::FromAddr(obj_addr);
+      uword next_obj_addr = obj_addr + raw_obj->untag()->HeapSize();
       if (visitor->VisitRange(obj_addr, next_obj_addr) &&
-          raw_obj->ptr()->FindObject(visitor)) {
+          raw_obj->untag()->FindObject(visitor)) {
         return raw_obj;  // Found object, return it.
       }
       obj_addr = next_obj_addr;
@@ -887,7 +888,7 @@
  public:
   explicit HeapMapAsJSONVisitor(JSONArray* array) : array_(array) {}
   virtual void VisitObject(ObjectPtr obj) {
-    array_->AddValue(obj->ptr()->HeapSize() / kObjectAlignment);
+    array_->AddValue(obj->untag()->HeapSize() / kObjectAlignment);
     array_->AddValue(obj->GetClassId());
   }
 
@@ -1034,7 +1035,7 @@
   if (oom_reservation_ == nullptr) {
     return false;
   }
-  ObjectLayout* ptr = reinterpret_cast<ObjectLayout*>(oom_reservation_);
+  UntaggedObject* ptr = reinterpret_cast<UntaggedObject*>(oom_reservation_);
   if (!ptr->IsMarked()) {
     ptr->SetMarkBit();
   }
@@ -1056,10 +1057,10 @@
     // FreeListElements are generally held untagged, but ObjectPointerVisitors
     // expect tagged pointers.
     ObjectPtr ptr =
-        ObjectLayout::FromAddr(reinterpret_cast<uword>(oom_reservation_));
+        UntaggedObject::FromAddr(reinterpret_cast<uword>(oom_reservation_));
     visitor->VisitPointer(&ptr);
     oom_reservation_ =
-        reinterpret_cast<FreeListElement*>(ObjectLayout::ToAddr(ptr));
+        reinterpret_cast<FreeListElement*>(UntaggedObject::ToAddr(ptr));
   }
 }
 
@@ -1436,7 +1437,7 @@
 }
 
 bool PageSpace::IsObjectFromImagePages(dart::ObjectPtr object) {
-  uword object_addr = ObjectLayout::ToAddr(object);
+  uword object_addr = UntaggedObject::ToAddr(object);
   OldPage* image_page = image_pages_;
   while (image_page != nullptr) {
     if (image_page->Contains(object_addr)) {
diff --git a/runtime/vm/heap/pages.h b/runtime/vm/heap/pages.h
index 48b77ca..3004841 100644
--- a/runtime/vm/heap/pages.h
+++ b/runtime/vm/heap/pages.h
@@ -95,9 +95,9 @@
     if (alias_offset == 0) {
       return obj;  // Not aliased.
     }
-    uword addr = ObjectLayout::ToAddr(obj);
+    uword addr = UntaggedObject::ToAddr(obj);
     if (memory->Contains(addr)) {
-      return ObjectLayout::FromAddr(addr + alias_offset);
+      return UntaggedObject::FromAddr(addr + alias_offset);
     }
     // obj is executable.
     ASSERT(memory->ContainsAlias(addr));
@@ -112,9 +112,9 @@
     if (alias_offset == 0) {
       return obj;  // Not aliased.
     }
-    uword addr = ObjectLayout::ToAddr(obj);
+    uword addr = UntaggedObject::ToAddr(obj);
     if (memory->ContainsAlias(addr)) {
-      return ObjectLayout::FromAddr(addr - alias_offset);
+      return UntaggedObject::FromAddr(addr - alias_offset);
     }
     // obj is writable.
     ASSERT(memory->Contains(addr));
diff --git a/runtime/vm/heap/scavenger.cc b/runtime/vm/heap/scavenger.cc
index 2920df3..baba05a 100644
--- a/runtime/vm/heap/scavenger.cc
+++ b/runtime/vm/heap/scavenger.cc
@@ -43,7 +43,7 @@
 // object headers, and which doesn't intersect with the target address because
 // of object alignment.
 enum {
-  kForwardingMask = 1 << ObjectLayout::kCardRememberedBit,
+  kForwardingMask = 1 << UntaggedObject::kCardRememberedBit,
   kNotForwarded = 0,
   kForwarded = kForwardingMask,
 };
@@ -127,22 +127,23 @@
     // First we forward all fields of the typed data view.
     VisitPointers(first, last);
 
-    if (view->ptr()->data_ == nullptr) {
-      ASSERT(RawSmiValue(view->ptr()->offset_in_bytes_) == 0 &&
-             RawSmiValue(view->ptr()->length_) == 0);
+    if (view->untag()->data_ == nullptr) {
+      ASSERT(RawSmiValue(view->untag()->offset_in_bytes_) == 0 &&
+             RawSmiValue(view->untag()->length_) == 0);
       return;
     }
 
     // Validate 'this' is a typed data view.
     const uword view_header =
-        *reinterpret_cast<uword*>(ObjectLayout::ToAddr(view));
+        *reinterpret_cast<uword*>(UntaggedObject::ToAddr(view));
     ASSERT(!IsForwarding(view_header) || view->IsOldObject());
     ASSERT(IsTypedDataViewClassId(view->GetClassIdMayBeSmi()));
 
     // Validate that the backing store is not a forwarding word.
-    TypedDataBasePtr td = view->ptr()->typed_data_;
+    TypedDataBasePtr td = view->untag()->typed_data_;
     ASSERT(td->IsHeapObject());
-    const uword td_header = *reinterpret_cast<uword*>(ObjectLayout::ToAddr(td));
+    const uword td_header =
+        *reinterpret_cast<uword*>(UntaggedObject::ToAddr(td));
     ASSERT(!IsForwarding(td_header) || td->IsOldObject());
 
     // We can always obtain the class id from the forwarded backing store.
@@ -156,7 +157,7 @@
 
     // Now we update the inner pointer.
     ASSERT(IsTypedDataClassId(cid));
-    view->ptr()->RecomputeDataFieldForInternalTypedData();
+    view->untag()->RecomputeDataFieldForInternalTypedData();
   }
 
   virtual void VisitPointers(ObjectPtr* first, ObjectPtr* last) {
@@ -172,7 +173,7 @@
     visiting_old_object_ = obj;
     if (obj != nullptr) {
       // Card update happens in OldPage::VisitRememberedCards.
-      ASSERT(!obj->ptr()->IsCardRemembered());
+      ASSERT(!obj->untag()->IsCardRemembered());
     }
   }
 
@@ -255,10 +256,10 @@
   void UpdateStoreBuffer(ObjectPtr* p, ObjectPtr obj) {
     ASSERT(obj->IsHeapObject());
     // If the newly written object is not a new object, drop it immediately.
-    if (!obj->IsNewObject() || visiting_old_object_->ptr()->IsRemembered()) {
+    if (!obj->IsNewObject() || visiting_old_object_->untag()->IsRemembered()) {
       return;
     }
-    visiting_old_object_->ptr()->SetRememberedBit();
+    visiting_old_object_->untag()->SetRememberedBit();
     thread_->StoreBufferAddObjectGC(visiting_old_object_);
   }
 
@@ -271,7 +272,7 @@
       return;
     }
 
-    uword raw_addr = ObjectLayout::ToAddr(raw_obj);
+    uword raw_addr = UntaggedObject::ToAddr(raw_obj);
     // The scavenger is only expects objects located in the from space.
     ASSERT(from_->Contains(raw_addr));
     // Read the header word of the object and determine if the object has
@@ -283,7 +284,7 @@
       // Get the new location of the object.
       new_obj = ForwardedObj(header);
     } else {
-      intptr_t size = raw_obj->ptr()->HeapSize(header);
+      intptr_t size = raw_obj->untag()->HeapSize(header);
       uword new_addr = 0;
       // Check whether object should be promoted.
       if (!NewPage::Of(raw_obj)->IsSurvivor(raw_addr)) {
@@ -298,7 +299,7 @@
         if (LIKELY(new_addr != 0)) {
           // If promotion succeeded then we need to remember it so that it can
           // be traversed later.
-          promoted_list_.Push(ObjectLayout::FromAddr(new_addr));
+          promoted_list_.Push(UntaggedObject::FromAddr(new_addr));
           bytes_promoted_ += size;
         } else {
           // Promotion did not succeed. Copy into the to space instead.
@@ -316,26 +317,26 @@
       objcpy(reinterpret_cast<void*>(new_addr),
              reinterpret_cast<void*>(raw_addr), size);
 
-      new_obj = ObjectLayout::FromAddr(new_addr);
+      new_obj = UntaggedObject::FromAddr(new_addr);
       if (new_obj->IsOldObject()) {
         // Promoted: update age/barrier tags.
         uword tags = static_cast<uword>(header);
-        tags = ObjectLayout::OldBit::update(true, tags);
-        tags = ObjectLayout::OldAndNotRememberedBit::update(true, tags);
-        tags = ObjectLayout::NewBit::update(false, tags);
+        tags = UntaggedObject::OldBit::update(true, tags);
+        tags = UntaggedObject::OldAndNotRememberedBit::update(true, tags);
+        tags = UntaggedObject::NewBit::update(false, tags);
         // Setting the forwarding pointer below will make this tenured object
         // visible to the concurrent marker, but we haven't visited its slots
         // yet. We mark the object here to prevent the concurrent marker from
         // adding it to the mark stack and visiting its unprocessed slots. We
         // push it to the mark stack after forwarding its slots.
-        tags = ObjectLayout::OldAndNotMarkedBit::update(!thread_->is_marking(),
-                                                        tags);
-        new_obj->ptr()->tags_ = tags;
+        tags = UntaggedObject::OldAndNotMarkedBit::update(
+            !thread_->is_marking(), tags);
+        new_obj->untag()->tags_ = tags;
       }
 
-      intptr_t cid = ObjectLayout::ClassIdTag::decode(header);
+      intptr_t cid = UntaggedObject::ClassIdTag::decode(header);
       if (IsTypedDataClassId(cid)) {
-        static_cast<TypedDataPtr>(new_obj)->ptr()->RecomputeDataField();
+        static_cast<TypedDataPtr>(new_obj)->untag()->RecomputeDataField();
       }
 
       // Try to install forwarding address.
@@ -451,7 +452,7 @@
   void VisitHandle(uword addr) {
     FinalizablePersistentHandle* handle =
         reinterpret_cast<FinalizablePersistentHandle*>(addr);
-    ObjectPtr* p = handle->raw_addr();
+    ObjectPtr* p = handle->ptr_addr();
     if (scavenger_->IsUnreachable(p)) {
       handle->UpdateUnreachable(thread()->isolate_group());
     } else {
@@ -775,8 +776,8 @@
   void VisitPointers(ObjectPtr* from, ObjectPtr* to) {
     for (ObjectPtr* ptr = from; ptr <= to; ptr++) {
       ObjectPtr raw_obj = *ptr;
-      RELEASE_ASSERT(!raw_obj->ptr()->IsCardRemembered());
-      RELEASE_ASSERT(raw_obj->ptr()->IsRemembered());
+      RELEASE_ASSERT(!raw_obj->untag()->IsCardRemembered());
+      RELEASE_ASSERT(raw_obj->untag()->IsRemembered());
       RELEASE_ASSERT(raw_obj->IsOldObject());
       in_store_buffer_->Add(raw_obj);
     }
@@ -799,18 +800,18 @@
     if (raw_obj->IsPseudoObject()) return;
     RELEASE_ASSERT(raw_obj->IsOldObject());
 
-    if (raw_obj->ptr()->IsCardRemembered()) {
-      RELEASE_ASSERT(!raw_obj->ptr()->IsRemembered());
+    if (raw_obj->untag()->IsCardRemembered()) {
+      RELEASE_ASSERT(!raw_obj->untag()->IsRemembered());
       // TODO(rmacnak): Verify card tables.
       return;
     }
 
-    RELEASE_ASSERT(raw_obj->ptr()->IsRemembered() ==
+    RELEASE_ASSERT(raw_obj->untag()->IsRemembered() ==
                    in_store_buffer_->Contains(raw_obj));
 
     visiting_ = raw_obj;
-    is_remembered_ = raw_obj->ptr()->IsRemembered();
-    raw_obj->ptr()->VisitPointers(this);
+    is_remembered_ = raw_obj->untag()->IsRemembered();
+    raw_obj->untag()->VisitPointers(this);
   }
 
   void VisitPointers(ObjectPtr* from, ObjectPtr* to) {
@@ -825,7 +826,7 @@
               " reverse-continue to find the store with a missing barrier.\n",
               static_cast<uword>(visiting_), static_cast<uword>(raw_obj), ptr);
         }
-        RELEASE_ASSERT(to_->Contains(ObjectLayout::ToAddr(raw_obj)));
+        RELEASE_ASSERT(to_->Contains(UntaggedObject::ToAddr(raw_obj)));
       }
     }
   }
@@ -1023,12 +1024,12 @@
     while (!pending->IsEmpty()) {
       ObjectPtr raw_object = pending->Pop();
       ASSERT(!raw_object->IsForwardingCorpse());
-      ASSERT(raw_object->ptr()->IsRemembered());
-      raw_object->ptr()->ClearRememberedBit();
+      ASSERT(raw_object->untag()->IsRemembered());
+      raw_object->untag()->ClearRememberedBit();
       visitor->VisitingOldObject(raw_object);
       // Note that this treats old-space WeakProperties as strong. A dead key
       // won't be reclaimed until after the key is promoted.
-      raw_object->ptr()->VisitPointersNonvirtual(visitor);
+      raw_object->untag()->VisitPointersNonvirtual(visitor);
     }
     pending->Reset();
     // Return the emptied block for recycling (no need to check threshold).
@@ -1101,7 +1102,7 @@
   if (!raw_obj->IsNewObject()) {
     return false;
   }
-  uword raw_addr = ObjectLayout::ToAddr(raw_obj);
+  uword raw_addr = UntaggedObject::ToAddr(raw_obj);
   if (to_->Contains(raw_addr)) {
     return false;
   }
@@ -1125,7 +1126,7 @@
   while (scan_ != nullptr) {
     uword resolved_top = scan_->resolved_top_;
     while (resolved_top < scan_->top_) {
-      ObjectPtr raw_obj = ObjectLayout::FromAddr(resolved_top);
+      ObjectPtr raw_obj = UntaggedObject::FromAddr(resolved_top);
       resolved_top += ProcessCopied(raw_obj);
     }
     scan_->resolved_top_ = resolved_top;
@@ -1146,10 +1147,10 @@
     // Resolve or copy all objects referred to by the current object. This
     // can potentially push more objects on this stack as well as add more
     // objects to be resolved in the to space.
-    ASSERT(!raw_object->ptr()->IsRemembered());
+    ASSERT(!raw_object->untag()->IsRemembered());
     VisitingOldObject(raw_object);
-    raw_object->ptr()->VisitPointersNonvirtual(this);
-    if (raw_object->ptr()->IsMarked()) {
+    raw_object->untag()->VisitPointersNonvirtual(this);
+    if (raw_object->untag()->IsMarked()) {
       // Complete our promise from ScavengePointer. Note that marker cannot
       // visit this object until it pops a block from the mark stack, which
       // involves a memory fence from the mutex, so even on architectures
@@ -1171,23 +1172,23 @@
   WeakPropertyPtr cur_weak = delayed_weak_properties_;
   delayed_weak_properties_ = WeakProperty::null();
   while (cur_weak != WeakProperty::null()) {
-    WeakPropertyPtr next_weak = cur_weak->ptr()->next_;
+    WeakPropertyPtr next_weak = cur_weak->untag()->next_;
     // Promoted weak properties are not enqueued. So we can guarantee that
     // we do not need to think about store barriers here.
     ASSERT(cur_weak->IsNewObject());
-    ObjectPtr raw_key = cur_weak->ptr()->key_;
+    ObjectPtr raw_key = cur_weak->untag()->key_;
     ASSERT(raw_key->IsHeapObject());
     // Key still points into from space even if the object has been
     // promoted to old space by now. The key will be updated accordingly
     // below when VisitPointers is run.
     ASSERT(raw_key->IsNewObject());
-    uword raw_addr = ObjectLayout::ToAddr(raw_key);
+    uword raw_addr = UntaggedObject::ToAddr(raw_key);
     ASSERT(from_->Contains(raw_addr));
     uword header = *reinterpret_cast<uword*>(raw_addr);
     // Reset the next pointer in the weak property.
-    cur_weak->ptr()->next_ = WeakProperty::null();
+    cur_weak->untag()->next_ = WeakProperty::null();
     if (IsForwarding(header)) {
-      cur_weak->ptr()->VisitPointersNonvirtual(this);
+      cur_weak->untag()->VisitPointersNonvirtual(this);
     } else {
       EnqueueWeakProperty(cur_weak);
     }
@@ -1228,12 +1229,12 @@
   ASSERT(raw_weak->IsNewObject());
   ASSERT(raw_weak->IsWeakProperty());
 #if defined(DEBUG)
-  uword raw_addr = ObjectLayout::ToAddr(raw_weak);
+  uword raw_addr = UntaggedObject::ToAddr(raw_weak);
   uword header = *reinterpret_cast<uword*>(raw_addr);
   ASSERT(!IsForwarding(header));
 #endif  // defined(DEBUG)
-  ASSERT(raw_weak->ptr()->next_ == WeakProperty::null());
-  raw_weak->ptr()->next_ = delayed_weak_properties_;
+  ASSERT(raw_weak->untag()->next_ == WeakProperty::null());
+  raw_weak->untag()->next_ = delayed_weak_properties_;
   delayed_weak_properties_ = raw_weak;
 }
 
@@ -1243,19 +1244,19 @@
   if (UNLIKELY(class_id == kWeakPropertyCid)) {
     WeakPropertyPtr raw_weak = static_cast<WeakPropertyPtr>(raw_obj);
     // The fate of the weak property is determined by its key.
-    ObjectPtr raw_key = raw_weak->ptr()->key_;
+    ObjectPtr raw_key = raw_weak->untag()->key_;
     if (raw_key->IsHeapObject() && raw_key->IsNewObject()) {
-      uword raw_addr = ObjectLayout::ToAddr(raw_key);
+      uword raw_addr = UntaggedObject::ToAddr(raw_key);
       uword header = *reinterpret_cast<uword*>(raw_addr);
       if (!IsForwarding(header)) {
         // Key is white.  Enqueue the weak property.
         EnqueueWeakProperty(raw_weak);
-        return raw_weak->ptr()->HeapSize();
+        return raw_weak->untag()->HeapSize();
       }
     }
     // Key is gray or black.  Make the weak property black.
   }
-  return raw_obj->ptr()->VisitPointersNonvirtual(this);
+  return raw_obj->untag()->VisitPointersNonvirtual(this);
 }
 
 void Scavenger::MournWeakTables() {
@@ -1268,7 +1269,7 @@
       if (table->IsValidEntryAtExclusive(i)) {
         ObjectPtr raw_obj = table->ObjectAtExclusive(i);
         ASSERT(raw_obj->IsHeapObject());
-        uword raw_addr = ObjectLayout::ToAddr(raw_obj);
+        uword raw_addr = UntaggedObject::ToAddr(raw_obj);
         uword header = *reinterpret_cast<uword*>(raw_addr);
         if (IsForwarding(header)) {
           // The object has survived.  Preserve its record.
@@ -1320,13 +1321,13 @@
   WeakPropertyPtr cur_weak = delayed_weak_properties_;
   delayed_weak_properties_ = WeakProperty::null();
   while (cur_weak != WeakProperty::null()) {
-    WeakPropertyPtr next_weak = cur_weak->ptr()->next_;
+    WeakPropertyPtr next_weak = cur_weak->untag()->next_;
     // Reset the next pointer in the weak property.
-    cur_weak->ptr()->next_ = WeakProperty::null();
+    cur_weak->untag()->next_ = WeakProperty::null();
 
 #if defined(DEBUG)
-    ObjectPtr raw_key = cur_weak->ptr()->key_;
-    uword raw_addr = ObjectLayout::ToAddr(raw_key);
+    ObjectPtr raw_key = cur_weak->untag()->key_;
+    uword raw_addr = UntaggedObject::ToAddr(raw_key);
     uword header = *reinterpret_cast<uword*>(raw_addr);
     ASSERT(!IsForwarding(header));
     ASSERT(raw_key->IsHeapObject());
@@ -1369,10 +1370,10 @@
     uword cur = page->object_start();
     if (!visitor->VisitRange(cur, page->object_end())) continue;
     while (cur < page->object_end()) {
-      ObjectPtr raw_obj = ObjectLayout::FromAddr(cur);
-      uword next = cur + raw_obj->ptr()->HeapSize();
+      ObjectPtr raw_obj = UntaggedObject::FromAddr(cur);
+      uword next = cur + raw_obj->untag()->HeapSize();
       if (visitor->VisitRange(cur, next) &&
-          raw_obj->ptr()->FindObject(visitor)) {
+          raw_obj->untag()->FindObject(visitor)) {
         return raw_obj;  // Found object, return it.
       }
       cur = next;
@@ -1591,11 +1592,11 @@
   class ReverseFromForwardingVisitor : public ObjectVisitor {
     uword ReadHeader(ObjectPtr raw_obj) {
       return reinterpret_cast<std::atomic<uword>*>(
-                 ObjectLayout::ToAddr(raw_obj))
+                 UntaggedObject::ToAddr(raw_obj))
           ->load(std::memory_order_relaxed);
     }
     void WriteHeader(ObjectPtr raw_obj, uword header) {
-      reinterpret_cast<std::atomic<uword>*>(ObjectLayout::ToAddr(raw_obj))
+      reinterpret_cast<std::atomic<uword>*>(UntaggedObject::ToAddr(raw_obj))
           ->store(header, std::memory_order_relaxed);
     }
     void VisitObject(ObjectPtr from_obj) {
@@ -1603,20 +1604,20 @@
       if (IsForwarding(from_header)) {
         ObjectPtr to_obj = ForwardedObj(from_header);
         uword to_header = ReadHeader(to_obj);
-        intptr_t size = to_obj->ptr()->HeapSize();
+        intptr_t size = to_obj->untag()->HeapSize();
 
         // Reset the ages bits in case this was a promotion.
         uword from_header = static_cast<uword>(to_header);
-        from_header = ObjectLayout::OldBit::update(false, from_header);
+        from_header = UntaggedObject::OldBit::update(false, from_header);
         from_header =
-            ObjectLayout::OldAndNotRememberedBit::update(false, from_header);
-        from_header = ObjectLayout::NewBit::update(true, from_header);
+            UntaggedObject::OldAndNotRememberedBit::update(false, from_header);
+        from_header = UntaggedObject::NewBit::update(true, from_header);
         from_header =
-            ObjectLayout::OldAndNotMarkedBit::update(false, from_header);
+            UntaggedObject::OldAndNotMarkedBit::update(false, from_header);
 
         WriteHeader(from_obj, from_header);
 
-        ForwardingCorpse::AsForwarder(ObjectLayout::ToAddr(to_obj), size)
+        ForwardingCorpse::AsForwarder(UntaggedObject::ToAddr(to_obj), size)
             ->set_target(from_obj);
       }
     }
diff --git a/runtime/vm/heap/scavenger.h b/runtime/vm/heap/scavenger.h
index 1c52430..ed36fe1 100644
--- a/runtime/vm/heap/scavenger.h
+++ b/runtime/vm/heap/scavenger.h
@@ -57,17 +57,17 @@
     uword addr = object_start();
     uword end = object_end();
     while (addr < end) {
-      ObjectPtr obj = ObjectLayout::FromAddr(addr);
+      ObjectPtr obj = UntaggedObject::FromAddr(addr);
       visitor->VisitObject(obj);
-      addr += obj->ptr()->HeapSize();
+      addr += obj->untag()->HeapSize();
     }
   }
   void VisitObjectPointers(ObjectPointerVisitor* visitor) const {
     uword addr = object_start();
     uword end = object_end();
     while (addr < end) {
-      ObjectPtr obj = ObjectLayout::FromAddr(addr);
-      intptr_t size = obj->ptr()->VisitPointers(visitor);
+      ObjectPtr obj = UntaggedObject::FromAddr(addr);
+      intptr_t size = obj->untag()->VisitPointers(visitor);
       addr += size;
     }
   }
diff --git a/runtime/vm/heap/sweeper.cc b/runtime/vm/heap/sweeper.cc
index 9013eba..d814521 100644
--- a/runtime/vm/heap/sweeper.cc
+++ b/runtime/vm/heap/sweeper.cc
@@ -27,28 +27,28 @@
   uword current = start;
 
   while (current < end) {
-    ObjectPtr raw_obj = ObjectLayout::FromAddr(current);
+    ObjectPtr raw_obj = UntaggedObject::FromAddr(current);
     ASSERT(OldPage::Of(raw_obj) == page);
     // These acquire operations balance release operations in array
     // truncaton, ensuring the writes creating the filler object are ordered
     // before the writes inserting the filler object into the freelist.
-    uword tags = raw_obj->ptr()->tags_.load(std::memory_order_acquire);
-    intptr_t obj_size = raw_obj->ptr()->HeapSize(tags);
-    if (ObjectLayout::IsMarked(tags)) {
+    uword tags = raw_obj->untag()->tags_.load(std::memory_order_acquire);
+    intptr_t obj_size = raw_obj->untag()->HeapSize(tags);
+    if (UntaggedObject::IsMarked(tags)) {
       // Found marked object. Clear the mark bit and update swept bytes.
-      raw_obj->ptr()->ClearMarkBit();
+      raw_obj->untag()->ClearMarkBit();
       used_in_bytes += obj_size;
     } else {
       uword free_end = current + obj_size;
       while (free_end < end) {
-        ObjectPtr next_obj = ObjectLayout::FromAddr(free_end);
-        tags = next_obj->ptr()->tags_.load(std::memory_order_acquire);
-        if (ObjectLayout::IsMarked(tags)) {
+        ObjectPtr next_obj = UntaggedObject::FromAddr(free_end);
+        tags = next_obj->untag()->tags_.load(std::memory_order_acquire);
+        if (UntaggedObject::IsMarked(tags)) {
           // Reached the end of the free block.
           break;
         }
         // Expand the free block by the size of this object.
-        free_end += next_obj->ptr()->HeapSize(tags);
+        free_end += next_obj->untag()->HeapSize(tags);
       }
       obj_size = free_end - current;
       if (is_executable) {
@@ -84,21 +84,22 @@
   ASSERT(!page->is_image_page());
 
   intptr_t words_to_end = 0;
-  ObjectPtr raw_obj = ObjectLayout::FromAddr(page->object_start());
+  ObjectPtr raw_obj = UntaggedObject::FromAddr(page->object_start());
   ASSERT(OldPage::Of(raw_obj) == page);
-  if (raw_obj->ptr()->IsMarked()) {
-    raw_obj->ptr()->ClearMarkBit();
-    words_to_end = (raw_obj->ptr()->HeapSize() >> kWordSizeLog2);
+  if (raw_obj->untag()->IsMarked()) {
+    raw_obj->untag()->ClearMarkBit();
+    words_to_end = (raw_obj->untag()->HeapSize() >> kWordSizeLog2);
   }
 #ifdef DEBUG
   // Array::MakeFixedLength creates trailing filler objects,
   // but they are always unreachable. Verify that they are not marked.
-  uword current = ObjectLayout::ToAddr(raw_obj) + raw_obj->ptr()->HeapSize();
+  uword current =
+      UntaggedObject::ToAddr(raw_obj) + raw_obj->untag()->HeapSize();
   uword end = page->object_end();
   while (current < end) {
-    ObjectPtr cur_obj = ObjectLayout::FromAddr(current);
-    ASSERT(!cur_obj->ptr()->IsMarked());
-    intptr_t obj_size = cur_obj->ptr()->HeapSize();
+    ObjectPtr cur_obj = UntaggedObject::FromAddr(current);
+    ASSERT(!cur_obj->untag()->IsMarked());
+    intptr_t obj_size = cur_obj->untag()->HeapSize();
     memset(reinterpret_cast<void*>(current), Heap::kZapByte, obj_size);
     current += obj_size;
   }
diff --git a/runtime/vm/heap/verifier.cc b/runtime/vm/heap/verifier.cc
index 4dc7da0..34a6593 100644
--- a/runtime/vm/heap/verifier.cc
+++ b/runtime/vm/heap/verifier.cc
@@ -18,22 +18,22 @@
 
 void VerifyObjectVisitor::VisitObject(ObjectPtr raw_obj) {
   if (raw_obj->IsHeapObject()) {
-    uword raw_addr = ObjectLayout::ToAddr(raw_obj);
+    uword raw_addr = UntaggedObject::ToAddr(raw_obj);
     if (raw_obj->IsFreeListElement() || raw_obj->IsForwardingCorpse()) {
-      if (raw_obj->IsOldObject() && raw_obj->ptr()->IsMarked()) {
+      if (raw_obj->IsOldObject() && raw_obj->untag()->IsMarked()) {
         FATAL1("Marked free list element encountered %#" Px "\n", raw_addr);
       }
     } else {
       switch (mark_expectation_) {
         case kForbidMarked:
-          if (raw_obj->IsOldObject() && raw_obj->ptr()->IsMarked()) {
+          if (raw_obj->IsOldObject() && raw_obj->untag()->IsMarked()) {
             FATAL1("Marked object encountered %#" Px "\n", raw_addr);
           }
           break;
         case kAllowMarked:
           break;
         case kRequireMarked:
-          if (raw_obj->IsOldObject() && !raw_obj->ptr()->IsMarked()) {
+          if (raw_obj->IsOldObject() && !raw_obj->untag()->IsMarked()) {
             FATAL1("Unmarked object encountered %#" Px "\n", raw_addr);
           }
           break;
@@ -53,8 +53,8 @@
             allocated_set_->Contains(OldPage::ToWritable(raw_obj))) {
           continue;
         }
-        FATAL2("Invalid object pointer encountered %#" Px ": %#" Px "\n",
-               reinterpret_cast<uword>(current), static_cast<uword>(raw_obj));
+        uword raw_addr = UntaggedObject::ToAddr(raw_obj);
+        FATAL1("Invalid object pointer encountered %#" Px "\n", raw_addr);
       }
     }
   }
@@ -63,7 +63,7 @@
 void VerifyWeakPointersVisitor::VisitHandle(uword addr) {
   FinalizablePersistentHandle* handle =
       reinterpret_cast<FinalizablePersistentHandle*>(addr);
-  ObjectPtr raw_obj = handle->raw();
+  ObjectPtr raw_obj = handle->ptr();
   visitor_->VisitPointer(&raw_obj);
 }
 
@@ -107,7 +107,7 @@
   if (!FLAG_enable_isolate_groups || FLAG_precompiled_mode) {
     if ((obj->GetClassId() >= kInstanceCid) &&
         (obj->GetClassId() != kTypeArgumentsCid)) {
-      if (obj->ptr()->IsCanonical()) {
+      if (obj->untag()->IsCanonical()) {
         instanceHandle_ ^= obj;
         const bool is_canonical = instanceHandle_.CheckIsCanonical(thread_);
         if (!is_canonical) {
diff --git a/runtime/vm/heap/weak_code.cc b/runtime/vm/heap/weak_code.cc
index 97f832a..a50936a 100644
--- a/runtime/vm/heap/weak_code.cc
+++ b/runtime/vm/heap/weak_code.cc
@@ -50,7 +50,7 @@
   WeakProperty& weak_property = WeakProperty::Handle();
   for (intptr_t i = 0; i < dependent_code.Length(); i++) {
     weak_property ^= dependent_code.At(i);
-    if (code.raw() == weak_property.key()) {
+    if (code.ptr() == weak_property.key()) {
       return true;
     }
   }
@@ -59,7 +59,7 @@
 
 void WeakCodeReferences::DisableCode() {
   Thread* thread = Thread::Current();
-  const Array& code_objects = Array::Handle(thread->zone(), array_.raw());
+  const Array& code_objects = Array::Handle(thread->zone(), array_.ptr());
 #if defined(DART_PRECOMPILED_RUNTIME)
   ASSERT(code_objects.IsNull());
   return;
@@ -101,10 +101,10 @@
     }
     owner = code.owner();
     if (owner.IsFunction()) {
-      function ^= owner.raw();
+      function ^= owner.ptr();
     } else if (owner.IsClass()) {
       Class& cls = Class::Handle();
-      cls ^= owner.raw();
+      cls ^= owner.ptr();
       cls.DisableAllocationStub();
       continue;
     } else if (owner.IsNull()) {
@@ -113,10 +113,10 @@
     }
 
     // If function uses dependent code switch it to unoptimized.
-    if (code.is_optimized() && (function.CurrentCode() == code.raw())) {
+    if (code.is_optimized() && (function.CurrentCode() == code.ptr())) {
       ReportSwitchingCode(code);
       function.SwitchToUnoptimizedCode();
-    } else if (function.unoptimized_code() == code.raw()) {
+    } else if (function.unoptimized_code() == code.ptr()) {
       ReportSwitchingCode(code);
       function.SetWasCompiled(false);
       function.ClearICDataArray();
diff --git a/runtime/vm/heap/weak_table_test.cc b/runtime/vm/heap/weak_table_test.cc
index edbca24..f7ceb89 100644
--- a/runtime/vm/heap/weak_table_test.cc
+++ b/runtime/vm/heap/weak_table_test.cc
@@ -20,43 +20,43 @@
   // Initially absent.
   Heap* heap = thread->heap();
   const intptr_t kNoValue = WeakTable::kNoValue;
-  EXPECT_EQ(kNoValue, heap->GetObjectId(old_obj.raw()));
-  EXPECT_EQ(kNoValue, heap->GetObjectId(new_obj.raw()));
-  EXPECT_EQ(kNoValue, heap->GetObjectId(imm_obj.raw()));
+  EXPECT_EQ(kNoValue, heap->GetObjectId(old_obj.ptr()));
+  EXPECT_EQ(kNoValue, heap->GetObjectId(new_obj.ptr()));
+  EXPECT_EQ(kNoValue, heap->GetObjectId(imm_obj.ptr()));
 
   // Found after insert.
-  heap->SetObjectId(old_obj.raw(), 100);
-  heap->SetObjectId(new_obj.raw(), 200);
-  heap->SetObjectId(imm_obj.raw(), 300);
-  EXPECT_EQ(100, heap->GetObjectId(old_obj.raw()));
-  EXPECT_EQ(200, heap->GetObjectId(new_obj.raw()));
-  EXPECT_EQ(300, heap->GetObjectId(imm_obj.raw()));
+  heap->SetObjectId(old_obj.ptr(), 100);
+  heap->SetObjectId(new_obj.ptr(), 200);
+  heap->SetObjectId(imm_obj.ptr(), 300);
+  EXPECT_EQ(100, heap->GetObjectId(old_obj.ptr()));
+  EXPECT_EQ(200, heap->GetObjectId(new_obj.ptr()));
+  EXPECT_EQ(300, heap->GetObjectId(imm_obj.ptr()));
 
   // Found after update.
-  heap->SetObjectId(old_obj.raw(), 400);
-  heap->SetObjectId(new_obj.raw(), 500);
-  heap->SetObjectId(imm_obj.raw(), 600);
-  EXPECT_EQ(400, heap->GetObjectId(old_obj.raw()));
-  EXPECT_EQ(500, heap->GetObjectId(new_obj.raw()));
-  EXPECT_EQ(600, heap->GetObjectId(imm_obj.raw()));
+  heap->SetObjectId(old_obj.ptr(), 400);
+  heap->SetObjectId(new_obj.ptr(), 500);
+  heap->SetObjectId(imm_obj.ptr(), 600);
+  EXPECT_EQ(400, heap->GetObjectId(old_obj.ptr()));
+  EXPECT_EQ(500, heap->GetObjectId(new_obj.ptr()));
+  EXPECT_EQ(600, heap->GetObjectId(imm_obj.ptr()));
 
   // Found after GC.
   GCTestHelper::CollectNewSpace();
-  EXPECT_EQ(400, heap->GetObjectId(old_obj.raw()));
-  EXPECT_EQ(500, heap->GetObjectId(new_obj.raw()));
-  EXPECT_EQ(600, heap->GetObjectId(imm_obj.raw()));
+  EXPECT_EQ(400, heap->GetObjectId(old_obj.ptr()));
+  EXPECT_EQ(500, heap->GetObjectId(new_obj.ptr()));
+  EXPECT_EQ(600, heap->GetObjectId(imm_obj.ptr()));
 
   // Found after GC.
   GCTestHelper::CollectOldSpace();
-  EXPECT_EQ(400, heap->GetObjectId(old_obj.raw()));
-  EXPECT_EQ(500, heap->GetObjectId(new_obj.raw()));
-  EXPECT_EQ(600, heap->GetObjectId(imm_obj.raw()));
+  EXPECT_EQ(400, heap->GetObjectId(old_obj.ptr()));
+  EXPECT_EQ(500, heap->GetObjectId(new_obj.ptr()));
+  EXPECT_EQ(600, heap->GetObjectId(imm_obj.ptr()));
 
   // Absent after reset.
   heap->ResetObjectIdTable();
-  EXPECT_EQ(kNoValue, heap->GetObjectId(old_obj.raw()));
-  EXPECT_EQ(kNoValue, heap->GetObjectId(new_obj.raw()));
-  EXPECT_EQ(kNoValue, heap->GetObjectId(imm_obj.raw()));
+  EXPECT_EQ(kNoValue, heap->GetObjectId(old_obj.ptr()));
+  EXPECT_EQ(kNoValue, heap->GetObjectId(new_obj.ptr()));
+  EXPECT_EQ(kNoValue, heap->GetObjectId(imm_obj.ptr()));
 }
 
 }  // namespace dart
diff --git a/runtime/vm/image_snapshot.cc b/runtime/vm/image_snapshot.cc
index 02c2c18..6147404 100644
--- a/runtime/vm/image_snapshot.cc
+++ b/runtime/vm/image_snapshot.cc
@@ -42,15 +42,15 @@
             "Print sizes of all instruction objects to the given file");
 #endif
 
-const InstructionsSectionLayout* Image::ExtraInfo(const uword raw_memory,
-                                                  const uword size) {
+const UntaggedInstructionsSection* Image::ExtraInfo(const uword raw_memory,
+                                                    const uword size) {
 #if defined(DART_PRECOMPILED_RUNTIME)
   auto const raw_value =
       FieldValue(raw_memory, HeaderField::InstructionsSectionOffset);
   if (raw_value != kNoInstructionsSection) {
     ASSERT(raw_value >= kHeaderSize);
     ASSERT(raw_value <= size - InstructionsSection::HeaderSize());
-    auto const layout = reinterpret_cast<const InstructionsSectionLayout*>(
+    auto const layout = reinterpret_cast<const UntaggedInstructionsSection*>(
         raw_memory + raw_value);
     // The instructions section is likely non-empty in bare instructions mode
     // (unless splitting into multiple outputs and there are no Code objects
@@ -146,8 +146,8 @@
   ObjectPtr obj = key;
   ASSERT(!obj->IsSmi());
 
-  uword body = ObjectLayout::ToAddr(obj) + sizeof(ObjectLayout);
-  uword end = ObjectLayout::ToAddr(obj) + obj->ptr()->HeapSize();
+  uword body = UntaggedObject::ToAddr(obj) + sizeof(UntaggedObject);
+  uword end = UntaggedObject::ToAddr(obj) + obj->untag()->HeapSize();
 
   uint32_t hash = obj->GetClassId();
   // Don't include the header. Objects in the image are pre-marked, but objects
@@ -169,16 +169,16 @@
     return false;
   }
 
-  intptr_t heap_size = a->ptr()->HeapSize();
-  if (b->ptr()->HeapSize() != heap_size) {
+  intptr_t heap_size = a->untag()->HeapSize();
+  if (b->untag()->HeapSize() != heap_size) {
     return false;
   }
 
   // Don't include the header. Objects in the image are pre-marked, but objects
   // in the current isolate are not.
-  uword body_a = ObjectLayout::ToAddr(a) + sizeof(ObjectLayout);
-  uword body_b = ObjectLayout::ToAddr(b) + sizeof(ObjectLayout);
-  uword body_size = heap_size - sizeof(ObjectLayout);
+  uword body_a = UntaggedObject::ToAddr(a) + sizeof(UntaggedObject);
+  uword body_b = UntaggedObject::ToAddr(b) + sizeof(UntaggedObject);
+  uword body_size = heap_size - sizeof(UntaggedObject);
   return 0 == memcmp(reinterpret_cast<const void*>(body_a),
                      reinterpret_cast<const void*>(body_b), body_size);
 }
@@ -259,12 +259,12 @@
     case kCodeSourceMapCid: {
       auto raw_map = CodeSourceMap::RawCast(raw_object);
       return compiler::target::CodeSourceMap::InstanceSize(
-          raw_map->ptr()->length_);
+          raw_map->untag()->length_);
     }
     case kPcDescriptorsCid: {
       auto raw_desc = PcDescriptors::RawCast(raw_object);
       return compiler::target::PcDescriptors::InstanceSize(
-          raw_desc->ptr()->length_);
+          raw_desc->untag()->length_);
     }
     case kInstructionsCid: {
       auto raw_insns = Instructions::RawCast(raw_object);
@@ -377,7 +377,7 @@
       js.PrintPropertyStr("l", url);
       js.PrintPropertyStr("c", name);
     } else if (owner.IsClass()) {
-      cls ^= owner.raw();
+      cls ^= owner.ptr();
       name = cls.ScrubbedName();
       lib = cls.library();
       url = lib.url();
@@ -388,7 +388,7 @@
                      data.code_->QualifiedName(
                          NameFormattingParams::DisambiguatedWithoutClassName(
                              Object::kInternalName)));
-    js.PrintProperty("s", SizeInSnapshot(data.insns_->raw()));
+    js.PrintProperty("s", SizeInSnapshot(data.insns_->ptr()));
     js.CloseObject();
   }
   if (trampolines_total_size != 0) {
@@ -451,7 +451,7 @@
 
     // Reset object id as an isolate snapshot after a VM snapshot will not use
     // the VM snapshot's text image.
-    heap->SetObjectId(data.insns_->raw(), 0);
+    heap->SetObjectId(data.insns_->ptr(), 0);
   }
   for (intptr_t i = 0; i < objects_.length(); i++) {
     ObjectData& data = objects_[i];
@@ -514,10 +514,10 @@
     if (obj.IsCompressedStackMaps()) {
       const CompressedStackMaps& map = CompressedStackMaps::Cast(obj);
       const intptr_t payload_size = map.payload_size();
-      stream->WriteTargetWord(map.raw()->ptr()->flags_and_size_);
+      stream->WriteTargetWord(map.ptr()->untag()->flags_and_size_);
       ASSERT_EQUAL(stream->Position() - object_start,
                    compiler::target::CompressedStackMaps::HeaderSize());
-      stream->WriteBytes(map.raw()->ptr()->data(), payload_size);
+      stream->WriteBytes(map.ptr()->untag()->data(), payload_size);
     } else if (obj.IsCodeSourceMap()) {
       const CodeSourceMap& map = CodeSourceMap::Cast(obj);
       stream->WriteTargetWord(map.Length());
@@ -529,15 +529,15 @@
       stream->WriteTargetWord(desc.Length());
       ASSERT_EQUAL(stream->Position() - object_start,
                    compiler::target::PcDescriptors::HeaderSize());
-      stream->WriteBytes(desc.raw()->ptr()->data(), desc.Length());
+      stream->WriteBytes(desc.ptr()->untag()->data(), desc.Length());
     } else if (obj.IsString()) {
       const String& str = String::Cast(obj);
-      RELEASE_ASSERT(String::GetCachedHash(str.raw()) != 0);
+      RELEASE_ASSERT(String::GetCachedHash(str.ptr()) != 0);
       RELEASE_ASSERT(str.IsOneByteString() || str.IsTwoByteString());
 
-      stream->WriteTargetWord(static_cast<uword>(str.raw()->ptr()->length_));
+      stream->WriteTargetWord(static_cast<uword>(str.ptr()->untag()->length_));
 #if !defined(HASH_IN_OBJECT_HEADER)
-      stream->WriteTargetWord(static_cast<uword>(str.raw()->ptr()->hash_));
+      stream->WriteTargetWord(static_cast<uword>(str.ptr()->untag()->hash_));
 #endif
       ASSERT_EQUAL(stream->Position() - object_start,
                    compiler::target::String::InstanceSize());
@@ -558,15 +558,15 @@
 }
 
 static UNLESS_DEBUG(constexpr) const uword kReadOnlyGCBits =
-    ObjectLayout::OldBit::encode(true) |
-    ObjectLayout::OldAndNotMarkedBit::encode(false) |
-    ObjectLayout::OldAndNotRememberedBit::encode(true) |
-    ObjectLayout::NewBit::encode(false);
+    UntaggedObject::OldBit::encode(true) |
+    UntaggedObject::OldAndNotMarkedBit::encode(false) |
+    UntaggedObject::OldAndNotRememberedBit::encode(true) |
+    UntaggedObject::NewBit::encode(false);
 
 uword ImageWriter::GetMarkedTags(classid_t cid,
                                  intptr_t size,
                                  bool is_canonical /* = false */) {
-  // ObjectLayout::SizeTag expects a size divisible by kObjectAlignment and
+  // UntaggedObject::SizeTag expects a size divisible by kObjectAlignment and
   // checks this in debug mode, but the size on the target machine may not be
   // divisible by the host machine's object alignment if they differ.
   //
@@ -583,16 +583,17 @@
       size << (kObjectAlignmentLog2 -
                compiler::target::ObjectAlignment::kObjectAlignmentLog2);
 
-  return kReadOnlyGCBits | ObjectLayout::ClassIdTag::encode(cid) |
-         ObjectLayout::SizeTag::encode(adjusted_size) |
-         ObjectLayout::CanonicalBit::encode(is_canonical);
+  return kReadOnlyGCBits | UntaggedObject::ClassIdTag::encode(cid) |
+         UntaggedObject::SizeTag::encode(adjusted_size) |
+         UntaggedObject::CanonicalBit::encode(is_canonical);
 }
 
 uword ImageWriter::GetMarkedTags(const Object& obj) {
-  uword tags = GetMarkedTags(obj.raw()->GetClassId(), SizeInSnapshot(obj),
-                             obj.IsCanonical());
+  uword tags = GetMarkedTags(obj.ptr()->untag()->GetClassId(),
+                             SizeInSnapshot(obj), obj.IsCanonical());
 #if defined(HASH_IN_OBJECT_HEADER)
-  tags = ObjectLayout::HashTag::update(obj.raw()->ptr()->GetHeaderHash(), tags);
+  tags = UntaggedObject::HashTag::update(obj.ptr()->untag()->GetHeaderHash(),
+                                         tags);
 #endif
   return tags;
 }
@@ -740,7 +741,7 @@
       const V8SnapshotProfileWriter::ObjectId id(offset_space_, text_offset);
       auto const type = is_trampoline ? trampoline_type_ : instructions_type_;
       const intptr_t size = is_trampoline ? data.trampoline_length
-                                          : SizeInSnapshot(data.insns_->raw());
+                                          : SizeInSnapshot(data.insns_->ptr());
       profile_writer_->SetObjectTypeAndName(id, type, object_name);
       profile_writer_->AttributeBytesTo(id, size);
       const intptr_t element_offset = id.second - parent_id.second;
@@ -769,7 +770,7 @@
 
       // Write Instructions with the mark and read-only bits set.
       text_offset += WriteTargetWord(GetMarkedTags(insns));
-      text_offset += WriteFixed(insns.raw_ptr()->size_and_flags_);
+      text_offset += WriteFixed(insns.untag()->size_and_flags_);
       text_offset +=
           Align(compiler::target::Instructions::kNonBarePayloadAlignment,
                 text_offset);
@@ -798,7 +799,7 @@
       const uword payload_size = insns.Size();
       descriptors = code.pc_descriptors();
       PcDescriptors::Iterator iterator(
-          descriptors, /*kind_mask=*/PcDescriptorsLayout::kBSSRelocation);
+          descriptors, /*kind_mask=*/UntaggedPcDescriptors::kBSSRelocation);
 
       auto const payload_end = payload_start + payload_size;
       auto cursor = payload_start;
@@ -835,7 +836,7 @@
             : compiler::target::ObjectAlignment::kObjectAlignment;
     text_offset += AlignWithBreakInstructions(alignment, text_offset);
 
-    ASSERT_EQUAL(text_offset - instr_start, SizeInSnapshot(insns.raw()));
+    ASSERT_EQUAL(text_offset - instr_start, SizeInSnapshot(insns.ptr()));
   }
 
   // Should be a no-op unless writing bare instruction payloads, in which case
@@ -1556,10 +1557,10 @@
   ASSERT(!FLAG_precompiled_mode || !FLAG_use_bare_instructions);
   ASSERT(Utils::IsAligned(offset, kObjectAlignment));
 
-  ObjectPtr result = ObjectLayout::FromAddr(
+  ObjectPtr result = UntaggedObject::FromAddr(
       reinterpret_cast<uword>(instructions_image_) + offset);
   ASSERT(result->IsInstructions());
-  ASSERT(result->ptr()->IsMarked());
+  ASSERT(result->untag()->IsMarked());
 
   return Instructions::RawCast(result);
 }
@@ -1568,8 +1569,8 @@
   ASSERT(Utils::IsAligned(offset, kObjectAlignment));
 
   ObjectPtr result =
-      ObjectLayout::FromAddr(reinterpret_cast<uword>(data_image_) + offset);
-  ASSERT(result->ptr()->IsMarked());
+      UntaggedObject::FromAddr(reinterpret_cast<uword>(data_image_) + offset);
+  ASSERT(result->untag()->IsMarked());
 
   return result;
 }
diff --git a/runtime/vm/image_snapshot.h b/runtime/vm/image_snapshot.h
index 87b7a5a..a11356f 100644
--- a/runtime/vm/image_snapshot.h
+++ b/runtime/vm/image_snapshot.h
@@ -126,13 +126,13 @@
 
   // We don't use a handle or the tagged pointer because this object cannot be
   // moved in memory by the GC.
-  static const InstructionsSectionLayout* ExtraInfo(const uword raw_memory,
-                                                    const uword size);
+  static const UntaggedInstructionsSection* ExtraInfo(const uword raw_memory,
+                                                      const uword size);
 
   // Most internal uses would cast this to uword, so just store it as such.
   const uword raw_memory_;
   const intptr_t snapshot_size_;
-  const InstructionsSectionLayout* const extra_info_;
+  const UntaggedInstructionsSection* const extra_info_;
 
   // For access to private constants.
   friend class AssemblyImageWriter;
@@ -295,7 +295,7 @@
 
   static intptr_t SizeInSnapshot(ObjectPtr object);
   static intptr_t SizeInSnapshot(const Object& object) {
-    return SizeInSnapshot(object.raw());
+    return SizeInSnapshot(object.ptr());
   }
 
   // Returns nullptr if there is no profile writer.
@@ -430,7 +430,7 @@
   // instruction for the target architecture is used.
   intptr_t AlignWithBreakInstructions(intptr_t alignment, intptr_t offset);
 
-  Heap* heap_;  // Used for mapping RawInstructiosn to object ids.
+  Heap* heap_;  // Used for mapping InstructionsPtr to object ids.
   intptr_t next_data_offset_;
   intptr_t next_text_offset_;
   GrowableArray<ObjectData> objects_;
diff --git a/runtime/vm/instructions_arm.h b/runtime/vm/instructions_arm.h
index 1ddbc90..5fe9ab3 100644
--- a/runtime/vm/instructions_arm.h
+++ b/runtime/vm/instructions_arm.h
@@ -25,7 +25,7 @@
 class Code;
 class Object;
 class ObjectPool;
-class CodeLayout;
+class UntaggedCode;
 
 class InstructionPattern : public AllStatic {
  public:
diff --git a/runtime/vm/instructions_arm64_test.cc b/runtime/vm/instructions_arm64_test.cc
index 2076c45..6ede71a 100644
--- a/runtime/vm/instructions_arm64_test.cc
+++ b/runtime/vm/instructions_arm64_test.cc
@@ -30,7 +30,7 @@
   // before the end of the code buffer.
   uword end = test->payload_start() + test->code().Size();
   CallPattern call(end - Instr::kInstrSize, test->code());
-  EXPECT_EQ(StubCode::InvokeDartCode().raw(), call.TargetCode());
+  EXPECT_EQ(StubCode::InvokeDartCode().ptr(), call.TargetCode());
 }
 
 }  // namespace dart
diff --git a/runtime/vm/instructions_arm_test.cc b/runtime/vm/instructions_arm_test.cc
index c254353..90f439d 100644
--- a/runtime/vm/instructions_arm_test.cc
+++ b/runtime/vm/instructions_arm_test.cc
@@ -30,7 +30,7 @@
   // before the end of the code buffer.
   uword end = test->payload_start() + test->code().Size();
   CallPattern call(end - Instr::kInstrSize, test->code());
-  EXPECT_EQ(StubCode::InvokeDartCode().raw(), call.TargetCode());
+  EXPECT_EQ(StubCode::InvokeDartCode().ptr(), call.TargetCode());
 }
 
 }  // namespace dart
diff --git a/runtime/vm/isolate.cc b/runtime/vm/isolate.cc
index 60ca667..c9ec72a 100644
--- a/runtime/vm/isolate.cc
+++ b/runtime/vm/isolate.cc
@@ -125,8 +125,8 @@
 
 static std::unique_ptr<Message> SerializeMessage(Dart_Port dest_port,
                                                  const Instance& obj) {
-  if (ApiObjectConverter::CanConvert(obj.raw())) {
-    return Message::New(dest_port, obj.raw(), Message::kNormalPriority);
+  if (ApiObjectConverter::CanConvert(obj.ptr())) {
+    return Message::New(dest_port, obj.ptr(), Message::kNormalPriority);
   } else {
     MessageWriter writer(false);
     return writer.WriteMessage(obj, dest_port, Message::kNormalPriority);
@@ -191,7 +191,7 @@
     Array& new_array =
         Array::Handle(Array::Grow(loaded_blobs, length + 1, Heap::kOld));
     new_array.SetAt(length, weak_property);
-    loaded_blobs_ = new_array.raw();
+    loaded_blobs_ = new_array.ptr();
   }
   num_blob_loads_++;
 }
@@ -522,7 +522,7 @@
 }
 
 void IsolateGroup::set_saved_unlinked_calls(const Array& saved_unlinked_calls) {
-  saved_unlinked_calls_ = saved_unlinked_calls.raw();
+  saved_unlinked_calls_ = saved_unlinked_calls.ptr();
 }
 
 Thread* IsolateGroup::ScheduleThreadLocked(MonitorLocker* ml,
@@ -904,7 +904,7 @@
   const bool need_to_grow_backing_store =
       initial_field_table()->Register(field);
   const intptr_t field_id = field.field_id();
-  initial_field_table()->SetAt(field_id, initial_value.raw());
+  initial_field_table()->SetAt(field_id, initial_value.ptr());
 
   if (need_to_grow_backing_store) {
     // We have to stop other isolates from accessing their field state, since
@@ -914,7 +914,7 @@
       auto field_table = isolate->field_table();
       if (field_table->IsReadyToUse()) {
         field_table->Register(field, field_id);
-        field_table->SetAt(field_id, initial_value.raw());
+        field_table->SetAt(field_id, initial_value.ptr());
       }
     }
   } else {
@@ -922,7 +922,7 @@
       auto field_table = isolate->field_table();
       if (field_table->IsReadyToUse()) {
         field_table->Register(field, field_id);
-        field_table->SetAt(field_id, initial_value.raw());
+        field_table->SetAt(field_id, initial_value.ptr());
       }
     }
   }
@@ -1115,7 +1115,7 @@
             const UnwindError& error =
                 UnwindError::Handle(UnwindError::New(msg));
             error.set_is_user_initiated(true);
-            return error.raw();
+            return error.ptr();
           } else if (msg_type == Isolate::kInternalKillMsg) {
             const String& msg =
                 String::Handle(String::New("isolate terminated by vm"));
@@ -1302,12 +1302,12 @@
   if (message->IsRaw()) {
     msg_obj = message->raw_obj();
     // We should only be sending RawObjects that can be converted to CObjects.
-    ASSERT(ApiObjectConverter::CanConvert(msg_obj.raw()));
+    ASSERT(ApiObjectConverter::CanConvert(msg_obj.ptr()));
   } else if (message->IsBequest()) {
     Bequest* bequest = message->bequest();
     PersistentHandle* handle = bequest->handle();
-    const Object& obj = Object::Handle(zone, handle->raw());
-    msg_obj = obj.raw();
+    const Object& obj = Object::Handle(zone, handle->ptr());
+    msg_obj = obj.ptr();
   } else {
     MessageSnapshotReader reader(message.get(), thread);
     msg_obj = reader.ReadObject();
@@ -1325,7 +1325,7 @@
     UNREACHABLE();
   }
   Instance& msg = Instance::Handle(zone);
-  msg ^= msg_obj.raw();  // Can't use Instance::Cast because may be null.
+  msg ^= msg_obj.ptr();  // Can't use Instance::Cast because may be null.
 
   MessageStatus status = kOK;
   if (message->IsOOB()) {
@@ -1481,9 +1481,9 @@
     Zone* zone = T->zone();
     const UnhandledException& uhe = UnhandledException::Cast(result);
     const Instance& exception = Instance::Handle(zone, uhe.exception());
-    if (exception.raw() == IG->object_store()->out_of_memory()) {
+    if (exception.ptr() == IG->object_store()->out_of_memory()) {
       exception_cstr = "Out of Memory";  // Cf. OutOfMemoryError.toString().
-    } else if (exception.raw() == IG->object_store()->stack_overflow()) {
+    } else if (exception.ptr() == IG->object_store()->stack_overflow()) {
       exception_cstr = "Stack Overflow";  // Cf. StackOverflowError.toString().
     } else {
       const Object& exception_str =
@@ -1869,8 +1869,8 @@
                                   const Object& arg2) {
   Thread* thread = Thread::Current();
   Api::Scope api_scope(thread);
-  Dart_Handle api_arg1 = Api::NewHandle(thread, arg1.raw());
-  Dart_Handle api_arg2 = Api::NewHandle(thread, arg2.raw());
+  Dart_Handle api_arg1 = Api::NewHandle(thread, arg1.ptr());
+  Dart_Handle api_arg2 = Api::NewHandle(thread, arg2.ptr());
   Dart_Handle api_result;
   {
     TransitionVMToNative transition(thread);
@@ -1949,7 +1949,7 @@
   const Error& error = Error::Handle(debugger_->PausePostRequest());
   if (!error.IsNull()) {
     if (Thread::Current()->top_exit_frame_info() == 0) {
-      return error.raw();
+      return error.ptr();
     } else {
       Exceptions::PropagateError(error);
       UNREACHABLE();
@@ -2816,7 +2816,7 @@
   raw_class = group()->class_table()->At(cid);
 #endif  // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
   ASSERT(raw_class != nullptr);
-  ASSERT(remapping_cids() || raw_class->ptr()->id_ == cid);
+  ASSERT(remapping_cids() || raw_class->untag()->id_ == cid);
   return raw_class;
 }
 
@@ -3060,27 +3060,27 @@
 #endif
 
 void Isolate::set_tag_table(const GrowableObjectArray& value) {
-  tag_table_ = value.raw();
+  tag_table_ = value.ptr();
 }
 
 void Isolate::set_current_tag(const UserTag& tag) {
   uword user_tag = tag.tag();
   ASSERT(user_tag < kUwordMax);
   set_user_tag(user_tag);
-  current_tag_ = tag.raw();
+  current_tag_ = tag.ptr();
 }
 
 void Isolate::set_default_tag(const UserTag& tag) {
-  default_tag_ = tag.raw();
+  default_tag_ = tag.ptr();
 }
 
 void Isolate::set_ic_miss_code(const Code& code) {
-  ic_miss_code_ = code.raw();
+  ic_miss_code_ = code.ptr();
 }
 
 void Isolate::set_deoptimized_code_array(const GrowableObjectArray& value) {
   ASSERT(Thread::Current()->IsMutatorThread());
-  deoptimized_code_array_ = value.raw();
+  deoptimized_code_array_ = value.ptr();
 }
 
 void Isolate::TrackDeoptimizedCode(const Code& code) {
@@ -3106,12 +3106,12 @@
 #if !defined(PRODUCT)
 void Isolate::set_pending_service_extension_calls(
     const GrowableObjectArray& value) {
-  pending_service_extension_calls_ = value.raw();
+  pending_service_extension_calls_ = value.ptr();
 }
 
 void Isolate::set_registered_service_extension_handlers(
     const GrowableObjectArray& value) {
-  registered_service_extension_handlers_ = value.raw();
+  registered_service_extension_handlers_ = value.ptr();
 }
 #endif  // !defined(PRODUCT)
 
@@ -3204,14 +3204,14 @@
         Service::PostError(method_name, parameter_keys, parameter_values,
                            reply_port, id, Error::Cast(result));
       }
-      return Error::Cast(result).raw();
+      return Error::Cast(result).ptr();
     }
     // Drain the microtask queue.
     result = DartLibraryCalls::DrainMicrotaskQueue();
     // Propagate the error.
     if (result.IsError()) {
       // Remaining service extension calls are dropped.
-      return Error::Cast(result).raw();
+      return Error::Cast(result).ptr();
     }
   }
   return Error::null();
diff --git a/runtime/vm/isolate.h b/runtime/vm/isolate.h
index 1a84194..4a0bab0 100644
--- a/runtime/vm/isolate.h
+++ b/runtime/vm/isolate.h
@@ -712,7 +712,7 @@
   friend class Dart;  // For `object_store_ = ` in Dart::Init
   friend class Heap;
   friend class StackFrame;  // For `[isolates_].First()`.
-  // For `object_store_shared_ptr()`, `class_table_shared_ptr()`
+  // For `object_store_shared_untag()`, `class_table_shared_untag()`
   friend class Isolate;
 
 #define ISOLATE_GROUP_FLAG_BITS(V)                                             \
diff --git a/runtime/vm/isolate_reload.cc b/runtime/vm/isolate_reload.cc
index 94ce65c..3d8f990 100644
--- a/runtime/vm/isolate_reload.cc
+++ b/runtime/vm/isolate_reload.cc
@@ -162,7 +162,7 @@
     }
 
     if (new_field) {
-      const Field& field = Field::Handle(to_field.raw());
+      const Field& field = Field::Handle(to_field.ptr());
       field.set_needs_load_guard(true);
       field.set_is_unboxing_candidate_unsafe(false);
       new_fields_offsets->Add(field.HostOffset());
@@ -224,8 +224,8 @@
     result.SetCanonical();
   }
 #if defined(HASH_IN_OBJECT_HEADER)
-  const uint32_t hash = Object::GetCachedHash(instance.raw());
-  Object::SetCachedHash(result.raw(), hash);
+  const uint32_t hash = Object::GetCachedHash(instance.ptr());
+  Object::SetCachedHash(result.ptr(), hash);
 #endif
 
   // Morph the context from instance to result using mapping_.
@@ -246,7 +246,7 @@
 
   // Convert the instance into a filler object.
   Become::MakeDummyObject(instance);
-  return result.raw();
+  return result.ptr();
 }
 
 void InstanceMorpher::CreateMorphedCopies() {
@@ -305,8 +305,8 @@
                                                    const Class& from,
                                                    const Class& to)
     : ReasonForCancelling(zone),
-      from_(Class::ZoneHandle(zone, from.raw())),
-      to_(Class::ZoneHandle(zone, to.raw())) {}
+      from_(Class::ZoneHandle(zone, from.ptr())),
+      to_(Class::ZoneHandle(zone, to.ptr())) {}
 
 void ClassReasonForCancelling::AppendTo(JSONArray* array) {
   JSONObject jsobj(array);
@@ -385,7 +385,7 @@
   static const char* Name() { return "BecomeMapTraits"; }
 
   static bool IsMatch(const Object& a, const Object& b) {
-    return a.raw() == b.raw();
+    return a.ptr() == b.ptr();
   }
 
   static uword Hash(const Object& obj) {
@@ -420,7 +420,7 @@
   const Library& b_lib = Library::Handle(b.library());
 
   if (a_lib.IsNull() || b_lib.IsNull()) {
-    return a_lib.raw() == b_lib.raw();
+    return a_lib.ptr() == b_lib.ptr();
   }
   return (a_lib.private_key() == b_lib.private_key());
 }
@@ -510,12 +510,12 @@
  public:
   Aborted(Zone* zone, const Error& error)
       : ReasonForCancelling(zone),
-        error_(Error::ZoneHandle(zone, error.raw())) {}
+        error_(Error::ZoneHandle(zone, error.ptr())) {}
 
  private:
   const Error& error_;
 
-  ErrorPtr ToError() { return error_.raw(); }
+  ErrorPtr ToError() { return error_.ptr(); }
   StringPtr ToString() {
     return String::NewFormatted("%s", error_.ToErrorCString());
   }
@@ -620,7 +620,7 @@
     }
 
     ExternalTypedData& external_typed_data =
-        ExternalTypedData::Handle(Z, kernel_program.get()->typed_data()->raw());
+        ExternalTypedData::Handle(Z, kernel_program.get()->typed_data()->ptr());
     IsolateGroupSource* source = Isolate::Current()->source();
     source->add_loaded_blob(Z, external_typed_data);
 
@@ -978,7 +978,7 @@
     while (entries.HasNext()) {
       entry = entries.GetNext();
       if (entry.IsLibraryPrefix()) {
-        prefix ^= entry.raw();
+        prefix ^= entry.ptr();
         ports = prefix.imports();
         for (intptr_t import_idx = 0; import_idx < ports.Length();
              import_idx++) {
@@ -1017,7 +1017,7 @@
   if (root_script_url != nullptr) {
     root_lib_url_ = String::New(root_script_url);
   } else {
-    root_lib_url_ = old_root_lib_url.raw();
+    root_lib_url_ = old_root_lib_url.ptr();
   }
 
   // Check to see if the base url of the loaded libraries has moved.
@@ -1096,13 +1096,13 @@
   if (setjmp(*jump.Set()) == 0) {
     const Object& tmp = kernel::KernelLoader::LoadEntireProgram(program);
     if (tmp.IsError()) {
-      return tmp.raw();
+      return tmp.ptr();
     }
 
     // If main method disappeared or were not there to begin with then
     // KernelLoader will return null. In this case lookup library by
     // URL.
-    auto& lib = Library::Handle(Library::RawCast(tmp.raw()));
+    auto& lib = Library::Handle(Library::RawCast(tmp.ptr()));
     if (lib.IsNull()) {
       lib = Library::LookupLibrary(thread, root_lib_url);
     }
@@ -1154,7 +1154,7 @@
   }
   VTIR_Print("Registering class: %s\n", new_cls.ToCString());
   new_cls.set_id(old_cls.id());
-  IG->class_table()->SetAt(old_cls.id(), new_cls.raw());
+  IG->class_table()->SetAt(old_cls.id(), new_cls.ptr());
   if (!old_cls.is_enum_class()) {
     new_cls.CopyCanonicalConstants(old_cls);
   }
@@ -1343,7 +1343,7 @@
       ASSERT(!already_present);
     }
   }
-  old_classes_set_storage_ = old_classes_set.Release().raw();
+  old_classes_set_storage_ = old_classes_set.Release().ptr();
   TIR_Print("---- System had %" Pd " classes\n", saved_num_cids_);
 }
 
@@ -1445,12 +1445,12 @@
   TIR_Print("---- CHECKPOINTING LIBRARIES\n");
   // Save the root library in case we abort the reload.
   const Library& root_lib = Library::Handle(object_store()->root_library());
-  saved_root_library_ = root_lib.raw();
+  saved_root_library_ = root_lib.ptr();
 
   // Save the old libraries array in case we abort the reload.
   const GrowableObjectArray& libs =
       GrowableObjectArray::Handle(object_store()->libraries());
-  saved_libraries_ = libs.raw();
+  saved_libraries_ = libs.ptr();
 
   // Make a filtered copy of the old libraries array. Keep "clean" libraries
   // that we will use instead of reloading.
@@ -1481,7 +1481,7 @@
     bool already_present = old_libraries_set.Insert(lib);
     ASSERT(!already_present);
   }
-  old_libraries_set_storage_ = old_libraries_set.Release().raw();
+  old_libraries_set_storage_ = old_libraries_set.Release().ptr();
 
   // Reset the registered libraries to the filtered array.
   Library::RegisterLibraries(Thread::Current(), new_libs);
@@ -1611,7 +1611,7 @@
         const intptr_t entry = it.Current();
         new_cls = Class::RawCast(class_map.GetKey(entry));
         old_cls = Class::RawCast(class_map.GetPayload(entry, 0));
-        if (new_cls.raw() != old_cls.raw()) {
+        if (new_cls.ptr() != old_cls.ptr()) {
           ASSERT(new_cls.is_enum_class() == old_cls.is_enum_class());
           if (new_cls.is_enum_class() && new_cls.is_finalized()) {
             new_cls.ReplaceEnum(this, old_cls);
@@ -1836,7 +1836,7 @@
       const intptr_t entry = it.Current();
       new_lib = Library::RawCast(map.GetKey(entry));
       lib = Library::RawCast(map.GetPayload(entry, 0));
-      if (new_lib.raw() != lib.raw()) {
+      if (new_lib.ptr() != lib.ptr()) {
         lib.CheckReload(new_lib, this);
       }
     }
@@ -1854,7 +1854,7 @@
       const intptr_t entry = it.Current();
       new_cls = Class::RawCast(map.GetKey(entry));
       cls = Class::RawCast(map.GetPayload(entry, 0));
-      if (new_cls.raw() != cls.raw()) {
+      if (new_cls.ptr() != cls.ptr()) {
         cls.CheckReload(new_cls, this);
       }
     }
@@ -2162,7 +2162,7 @@
         continue;  // Already guarding.
       }
       value_ = field.StaticValue();
-      if (value_.raw() != Object::sentinel().raw()) {
+      if (value_.ptr() != Object::sentinel().ptr()) {
         CheckValueType(null_safety, value_, field);
       }
     }
@@ -2205,7 +2205,7 @@
       return;  // Already guarding.
     }
     value_ ^= instance.GetField(field);
-    if (value_.raw() == Object::sentinel().raw()) {
+    if (value_.ptr() == Object::sentinel().ptr()) {
       if (field.is_late()) {
         // Late fields already have lazy initialization logic.
         return;
@@ -2262,24 +2262,24 @@
     for (intptr_t i = 0; entries_.At(i) != Object::null();
          i += SubtypeTestCache::kTestEntryLength) {
       if ((entries_.At(i + SubtypeTestCache::kInstanceClassIdOrFunction) ==
-           instance_cid_or_function_.raw()) &&
+           instance_cid_or_function_.ptr()) &&
           (entries_.At(i + SubtypeTestCache::kDestinationType) ==
-           type_.raw()) &&
+           type_.ptr()) &&
           (entries_.At(i + SubtypeTestCache::kInstanceTypeArguments) ==
-           instance_type_arguments_.raw()) &&
+           instance_type_arguments_.ptr()) &&
           (entries_.At(i + SubtypeTestCache::kInstantiatorTypeArguments) ==
-           instantiator_type_arguments_.raw()) &&
+           instantiator_type_arguments_.ptr()) &&
           (entries_.At(i + SubtypeTestCache::kFunctionTypeArguments) ==
-           function_type_arguments_.raw()) &&
+           function_type_arguments_.ptr()) &&
           (entries_.At(
                i + SubtypeTestCache::kInstanceParentFunctionTypeArguments) ==
-           parent_function_type_arguments_.raw()) &&
+           parent_function_type_arguments_.ptr()) &&
           (entries_.At(
                i + SubtypeTestCache::kInstanceDelayedFunctionTypeArguments) ==
-           delayed_function_type_arguments_.raw())) {
+           delayed_function_type_arguments_.ptr())) {
         cache_hit = true;
         if (entries_.At(i + SubtypeTestCache::kTestResult) !=
-            Bool::True().raw()) {
+            Bool::True().ptr()) {
           ASSERT(!FLAG_identity_reload);
           field.set_needs_load_guard(true);
         }
@@ -2344,8 +2344,8 @@
   UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
   Class& cls = Class::Handle();
   cls ^= old_classes_set.GetOrNull(replacement_or_new);
-  old_classes_set_storage_ = old_classes_set.Release().raw();
-  return cls.raw();
+  old_classes_set_storage_ = old_classes_set.Release().ptr();
+  return cls.ptr();
 }
 
 StringPtr IsolateReloadContext::FindLibraryPrivateKey(
@@ -2375,7 +2375,7 @@
       (group_reload_context_->old_root_url_prefix_ != String::null())) {
     return OldLibraryOrNullBaseMoved(replacement_or_new);
   }
-  return lib.raw();
+  return lib.ptr();
 }
 
 // Attempt to find the pair to |replacement_or_new| with the knowledge that
@@ -2412,7 +2412,7 @@
     if (old_suffix.Equals(suffix)) {
       TIR_Print("`%s` is moving to `%s`\n", old_url.ToCString(),
                 new_url.ToCString());
-      return old.raw();
+      return old.ptr();
     }
   }
   return Library::null();
@@ -2486,7 +2486,7 @@
       const intptr_t entry = it_library.Current();
       new_library ^= library_map.GetKey(entry);
       old_library ^= library_map.GetPayload(entry, 0);
-      if (new_library.raw() != old_library.raw()) {
+      if (new_library.ptr() != old_library.ptr()) {
         mapped_old_library_set.InsertOrGet(old_library);
       }
     }
@@ -2518,7 +2518,7 @@
       }
     }
   }
-  removed_class_set_storage_ = removed_class_set.Release().raw();
+  removed_class_set_storage_ = removed_class_set.Release().ptr();
 
   old_classes_set.Release();
   mapped_old_classes_set.Release();
@@ -2533,7 +2533,7 @@
   ASSERT(!update);
   // The storage given to the map may have been reallocated, remember the new
   // address.
-  class_map_storage_ = map.Release().raw();
+  class_map_storage_ = map.Release().ptr();
 }
 
 void IsolateReloadContext::AddLibraryMapping(const Library& replacement_or_new,
@@ -2543,7 +2543,7 @@
   ASSERT(!update);
   // The storage given to the map may have been reallocated, remember the new
   // address.
-  library_map_storage_ = map.Release().raw();
+  library_map_storage_ = map.Release().ptr();
 }
 
 void IsolateReloadContext::AddStaticFieldMapping(const Field& old_field,
@@ -2560,7 +2560,7 @@
   UnorderedHashMap<BecomeMapTraits> become_map(become_map_storage_);
   bool update = become_map.UpdateOrInsert(old, neu);
   ASSERT(!update);
-  become_map_storage_ = become_map.Release().raw();
+  become_map_storage_ = become_map.Release().ptr();
 }
 
 void IsolateReloadContext::AddEnumBecomeMapping(const Object& old,
diff --git a/runtime/vm/isolate_reload.h b/runtime/vm/isolate_reload.h
index 6e6cd74..7718309 100644
--- a/runtime/vm/isolate_reload.h
+++ b/runtime/vm/isolate_reload.h
@@ -296,7 +296,7 @@
   friend class ReasonForCancelling;
   friend class IsolateReloadContext;
   friend class IsolateGroup;  // GetClassSizeForHeapWalkAt
-  friend class ObjectLayout;  // GetClassSizeForHeapWalkAt
+  friend class UntaggedObject;  // GetClassSizeForHeapWalkAt
 
   static Dart_FileModifiedCallback file_modified_callback_;
 };
diff --git a/runtime/vm/json_stream.cc b/runtime/vm/json_stream.cc
index 5b3c8fe..01b6c5f 100644
--- a/runtime/vm/json_stream.cc
+++ b/runtime/vm/json_stream.cc
@@ -54,12 +54,12 @@
                        const Array& param_values,
                        bool parameters_are_dart_objects) {
   set_reply_port(reply_port);
-  seq_ = &Instance::ZoneHandle(seq.raw());
+  seq_ = &Instance::ZoneHandle(seq.ptr());
   method_ = method.ToCString();
 
   if (parameters_are_dart_objects) {
-    parameter_keys_ = &Array::ZoneHandle(param_keys.raw());
-    parameter_values_ = &Array::ZoneHandle(param_values.raw());
+    parameter_keys_ = &Array::ZoneHandle(param_keys.ptr());
+    parameter_values_ = &Array::ZoneHandle(param_values.ptr());
     ASSERT(parameter_keys_->Length() == parameter_values_->Length());
   } else if (param_keys.Length() > 0) {
     String& string_iterator = String::Handle();
diff --git a/runtime/vm/kernel.cc b/runtime/vm/kernel.cc
index 59f66bf..442f8ce 100644
--- a/runtime/vm/kernel.cc
+++ b/runtime/vm/kernel.cc
@@ -193,7 +193,7 @@
 static ArrayPtr AsSortedDuplicateFreeArray(GrowableArray<intptr_t>* source) {
   intptr_t size = source->length();
   if (size == 0) {
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   }
 
   source->Sort(LowestFirst);
@@ -211,7 +211,7 @@
     smi_value = Smi::New(source->At(i));
     array_object.SetAt(i, smi_value);
   }
-  return array_object.raw();
+  return array_object.ptr();
 }
 
 static void CollectKernelDataTokenPositions(
@@ -264,7 +264,7 @@
       data = ExternalTypedData::null();
       if (entry.IsClass()) {
         const Class& klass = Class::Cast(entry);
-        if (klass.script() == interesting_script.raw()) {
+        if (klass.script() == interesting_script.ptr()) {
           token_positions.Add(klass.token_pos().Serialize());
           token_positions.Add(klass.end_token_pos().Serialize());
         }
@@ -277,7 +277,7 @@
               continue;
             }
             entry_script = temp_field.Script();
-            if (entry_script.raw() != interesting_script.raw()) {
+            if (entry_script.ptr() != interesting_script.ptr()) {
               continue;
             }
             data = temp_field.KernelData();
@@ -291,7 +291,7 @@
           for (intptr_t i = 0; i < temp_array.Length(); ++i) {
             temp_function ^= temp_array.At(i);
             entry_script = temp_function.script();
-            if (entry_script.raw() != interesting_script.raw()) {
+            if (entry_script.ptr() != interesting_script.ptr()) {
               continue;
             }
             data = temp_function.KernelData();
@@ -311,7 +311,7 @@
           const intptr_t class_offset = klass.kernel_offset();
 
           entry_script = klass.script();
-          if (entry_script.raw() != interesting_script.raw()) {
+          if (entry_script.ptr() != interesting_script.ptr()) {
             continue;
           }
           CollectKernelDataTokenPositions(
@@ -319,9 +319,9 @@
               library_kernel_offset, zone, &helper, &token_positions);
         }
       } else if (entry.IsFunction()) {
-        temp_function ^= entry.raw();
+        temp_function ^= entry.ptr();
         entry_script = temp_function.script();
-        if (entry_script.raw() != interesting_script.raw()) {
+        if (entry_script.ptr() != interesting_script.ptr()) {
           continue;
         }
         data = temp_function.KernelData();
@@ -336,7 +336,7 @@
           continue;
         }
         entry_script = field.Script();
-        if (entry_script.raw() != interesting_script.raw()) {
+        if (entry_script.ptr() != interesting_script.ptr()) {
           continue;
         }
         data = field.KernelData();
@@ -347,7 +347,7 @@
     }
   }
 
-  Script& script = Script::Handle(zone, interesting_script.raw());
+  Script& script = Script::Handle(zone, interesting_script.ptr());
   Array& array_object = Array::Handle(zone);
   array_object = AsSortedDuplicateFreeArray(&token_positions);
   script.set_debug_positions(array_object);
@@ -379,7 +379,7 @@
         zone, helper.LookupConstructorByKernelConstructor(klass, kernel_name));
     constructors.SetAt(i, target);
   }
-  return constructors.raw();
+  return constructors.ptr();
 }
 #endif  // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
 
@@ -575,7 +575,7 @@
                              Object::null_instance());
     }
   }
-  return param_descriptor.raw();
+  return param_descriptor.ptr();
 }
 
 ObjectPtr BuildParameterDescriptor(const Function& function) {
diff --git a/runtime/vm/kernel_binary.cc b/runtime/vm/kernel_binary.cc
index c34523e..7e6f741 100644
--- a/runtime/vm/kernel_binary.cc
+++ b/runtime/vm/kernel_binary.cc
@@ -75,7 +75,7 @@
     }
   }
 
-  return line_starts_data.raw();
+  return line_starts_data.ptr();
 }
 
 const char* kKernelInvalidFilesize =
@@ -208,12 +208,12 @@
       isolate->CallTagHandler(Dart_kKernelTag, Object::null_object(), uri));
   if (ret.IsExternalTypedData()) {
     const auto& typed_data = ExternalTypedData::Handle(
-        thread->zone(), ExternalTypedData::RawCast(ret.raw()));
+        thread->zone(), ExternalTypedData::RawCast(ret.ptr()));
     kernel_program = kernel::Program::ReadFromTypedData(typed_data);
     return kernel_program;
   } else if (error != nullptr) {
     Api::Scope api_scope(thread);
-    Dart_Handle retval = Api::NewHandle(thread, ret.raw());
+    Dart_Handle retval = Api::NewHandle(thread, ret.ptr());
     {
       TransitionVMToNative transition(thread);
       *error = Dart_GetError(retval);
diff --git a/runtime/vm/kernel_loader.cc b/runtime/vm/kernel_loader.cc
index 437cbd7..804e6e5 100644
--- a/runtime/vm/kernel_loader.cc
+++ b/runtime/vm/kernel_loader.cc
@@ -91,10 +91,10 @@
         *simple_value_ = H.Canonicalize(*simple_value_);
         return true;
       case kTrueLiteral:
-        simple_value_ = &Bool::Handle(Z, Bool::Get(true).raw());
+        simple_value_ = &Bool::Handle(Z, Bool::Get(true).ptr());
         return true;
       case kFalseLiteral:
-        simple_value_ = &Bool::Handle(Z, Bool::Get(false).raw());
+        simple_value_ = &Bool::Handle(Z, Bool::Get(false).ptr());
         return true;
       case kNullLiteral:
         simple_value_ = &Instance::ZoneHandle(Z, Instance::null());
@@ -122,7 +122,7 @@
   for (intptr_t i = 0; i < len; i++) {
     res.SetAt(i, *fields_[i]);
   }
-  return res.raw();
+  return res.ptr();
 }
 
 ArrayPtr KernelLoader::MakeFunctionsArray() {
@@ -131,7 +131,7 @@
   for (intptr_t i = 0; i < len; i++) {
     res.SetAt(i, *functions_[i]);
   }
-  return res.raw();
+  return res.ptr();
 }
 
 LibraryPtr BuildingTranslationHelper::LookupLibraryByKernelLibrary(
@@ -322,7 +322,7 @@
     if (load_result.IsError()) return load_result;
 
     if (load_result.IsLibrary()) {
-      library ^= load_result.raw();
+      library ^= load_result.ptr();
     }
   }
 
@@ -367,7 +367,7 @@
   for (intptr_t i = 0; i < source_table_size; ++i) {
     const String& source_uri = reader.SourceTableUriFor(i);
     if (source_uri.EndsWith(uri)) {
-      return reader.GetSourceFor(i).raw();
+      return reader.GetSourceFor(i).ptr();
     }
   }
   return String::null();
@@ -487,7 +487,7 @@
       expression_evaluation_library_(Library::Handle(Z)) {
   ASSERT(T.active_class_ == &active_class_);
   T.finalize_ = false;
-  library_kernel_data_ = kernel_data.raw();
+  library_kernel_data_ = kernel_data.ptr();
   H.InitFromKernelProgramInfo(kernel_program_info_);
 }
 
@@ -556,7 +556,7 @@
         if (constant_reader.IsInstanceConstant(constant_table_offset,
                                                external_name_class_)) {
           constant = constant_reader.ReadConstant(constant_table_offset);
-          ASSERT(constant.clazz() == external_name_class_.raw());
+          ASSERT(constant.clazz() == external_name_class_.ptr());
           // We found the annotation, let's flag the function as native and
           // set the native name!
           native_name ^= constant.GetField(external_name_field_);
@@ -604,7 +604,7 @@
   intptr_t list_length = helper_.ReadListLength();  // read list length.
   ASSERT(list_length == 0);
 
-  return result.raw();
+  return result.ptr();
 }
 
 bool KernelLoader::IsClassName(NameIndex name,
@@ -673,7 +673,7 @@
         if (constant_reader.IsInstanceConstant(constant_table_offset,
                                                external_name_class_)) {
           constant = constant_reader.ReadConstant(constant_table_offset);
-          ASSERT(constant.clazz() == external_name_class_.raw());
+          ASSERT(constant.clazz() == external_name_class_.ptr());
           uri_path ^= constant.GetField(external_name_field_);
         }
       } else if (tag == kConstructorInvocation ||
@@ -810,7 +810,7 @@
   H.SetExpressionEvaluationRealClass(real_class);
   const Object& result = Object::Handle(Z, LoadProgram(true));
   if (result.IsError()) {
-    return result.raw();
+    return result.ptr();
   }
   const Function& function = H.GetExpressionEvaluationFunction();
   ASSERT(!function.IsNull());
@@ -830,7 +830,7 @@
 
   function.set_owner(real_class);
 
-  return function.raw();
+  return function.ptr();
 }
 
 void KernelLoader::FindModifiedLibraries(Program* program,
@@ -1015,7 +1015,7 @@
   Library& library =
       Library::Handle(Z, LookupLibrary(library_helper.canonical_name_));
 
-  if (library.Loaded()) return library.raw();
+  if (library.Loaded()) return library.ptr();
 
   library.set_is_nnbd(library_helper.IsNonNullableByDefault());
   const NNBDCompiledMode mode =
@@ -1056,7 +1056,7 @@
   // The bootstrapper will take care of creating the native wrapper classes, but
   // we will add the synthetic constructors to them here.
   if (library.name() ==
-      Symbols::Symbol(Symbols::kDartNativeWrappersLibNameId).raw()) {
+      Symbols::Symbol(Symbols::kDartNativeWrappersLibNameId).ptr()) {
     ASSERT(library.LoadInProgress());
     loading_native_wrappers_library_ = true;
   } else {
@@ -1088,7 +1088,7 @@
   // The expression evaluation functions should be GC-able as soon as
   // they are not reachable anymore and we never look them up by name.
   const bool register_class =
-      library.raw() != expression_evaluation_library_.raw();
+      library.ptr() != expression_evaluation_library_.ptr();
 
   Class& toplevel_class =
       Class::Handle(Z, Class::New(library, Symbols::TopLevel(), script,
@@ -1143,7 +1143,7 @@
   }
   if (!library.Loaded()) library.SetLoaded();
 
-  return library.raw();
+  return library.ptr();
 }
 
 void KernelLoader::FinishTopLevelClassLoading(
@@ -1358,13 +1358,13 @@
     Library& target_library = Library::Handle(
         Z, LookupLibrary(dependency_helper.target_library_canonical_name_));
     if (!FLAG_enable_mirrors &&
-        target_library.url() == Symbols::DartMirrors().raw()) {
+        target_library.url() == Symbols::DartMirrors().ptr()) {
       H.ReportError(
           "import of dart:mirrors is not supported in the current Dart "
           "runtime");
     }
     if (!Api::IsFfiEnabled() &&
-        target_library.url() == Symbols::DartFfi().raw()) {
+        target_library.url() == Symbols::DartFfi().ptr()) {
       H.ReportError(
           "import of dart:ffi is not supported in the current Dart runtime");
     }
@@ -1521,7 +1521,7 @@
   // The expression evaluation functions should be GC-able as soon as
   // they are not reachable anymore and we never look them up by name.
   const bool register_class =
-      library.raw() != expression_evaluation_library_.raw();
+      library.ptr() != expression_evaluation_library_.ptr();
 
   if (loading_native_wrappers_library_ || !register_class) {
     FinishClassLoading(*out_class, library, toplevel_class, class_offset,
@@ -1655,8 +1655,8 @@
     // optimized. We immediately set the guarded_cid_ to kDynamicCid, which
     // is effectively the same as calling this method first with Pointer and
     // subsequently with TypedData with field guards.
-    if (klass.Name() == Symbols::Struct().raw() &&
-        Library::Handle(Z, klass.library()).url() == Symbols::DartFfi().raw()) {
+    if (klass.Name() == Symbols::Struct().ptr() &&
+        Library::Handle(Z, klass.library()).url() == Symbols::DartFfi().ptr()) {
       ASSERT(fields_.length() == 1);
       ASSERT(String::Handle(Z, fields_[0]->name())
                  .StartsWith(Symbols::_addressOf()));
@@ -1705,7 +1705,7 @@
 
     FunctionType& signature = FunctionType::Handle(Z, FunctionType::New());
     const Function& function = Function::ZoneHandle(
-        Z, Function::New(signature, name, FunctionLayout::kConstructor,
+        Z, Function::New(signature, name, UntaggedFunction::kConstructor,
                          false,  // is_static
                          constructor_helper.IsConst(),
                          false,  // is_abstract
@@ -1810,7 +1810,7 @@
   LibraryIndex library_index(library_kernel_data, info.kernel_binary_version());
 
   if (klass.IsTopLevel()) {
-    ASSERT(klass.raw() == toplevel_class.raw());
+    ASSERT(klass.ptr() == toplevel_class.ptr());
     kernel_loader.FinishTopLevelClassLoading(klass, library, library_index);
     return;
   }
@@ -1858,7 +1858,7 @@
       const intptr_t start = helper_.ReaderOffset();
       detected_name = DetectExternalNameCtor();
       if (!detected_name.IsNull()) {
-        *native_name = detected_name.raw();
+        *native_name = detected_name.ptr();
         continue;
       }
 
@@ -1933,7 +1933,7 @@
         if (constant_reader.IsInstanceConstant(constant_table_offset,
                                                external_name_class_)) {
           constant = constant_reader.ReadConstant(constant_table_offset);
-          ASSERT(constant.clazz() == external_name_class_.raw());
+          ASSERT(constant.clazz() == external_name_class_.ptr());
           *native_name ^= constant.GetField(external_name_field_);
         } else if (constant_reader.IsInstanceConstant(constant_table_offset,
                                                       pragma_class_)) {
@@ -1985,7 +1985,7 @@
   procedure_helper.SetJustRead(ProcedureHelper::kAnnotations);
   const Object& script_class =
       ClassForScriptAt(owner, procedure_helper.source_uri_index_);
-  FunctionLayout::Kind kind = GetFunctionType(procedure_helper.kind_);
+  UntaggedFunction::Kind kind = GetFunctionType(procedure_helper.kind_);
 
   // We do not register expression evaluation libraries with the VM:
   // The expression evaluation functions should be GC-able as soon as
@@ -2013,7 +2013,7 @@
   function.set_is_extension_member(is_extension_member);
   if ((library.is_dart_scheme() &&
        H.IsPrivate(procedure_helper.canonical_name_)) ||
-      (function.is_static() && (library.raw() == Library::InternalLibrary()))) {
+      (function.is_static() && (library.ptr() == Library::InternalLibrary()))) {
     function.set_is_reflectable(false);
   }
   if (procedure_helper.IsMemberSignature()) {
@@ -2032,19 +2032,19 @@
                              FunctionNodeHelper::kSync);
   switch (function_node_helper.dart_async_marker_) {
     case FunctionNodeHelper::kSyncStar:
-      function.set_modifier(FunctionLayout::kSyncGen);
+      function.set_modifier(UntaggedFunction::kSyncGen);
       function.set_is_visible(!FLAG_causal_async_stacks &&
                               !FLAG_lazy_async_stacks);
       break;
     case FunctionNodeHelper::kAsync:
-      function.set_modifier(FunctionLayout::kAsync);
+      function.set_modifier(UntaggedFunction::kAsync);
       function.set_is_inlinable(!FLAG_causal_async_stacks &&
                                 !FLAG_lazy_async_stacks);
       function.set_is_visible(!FLAG_causal_async_stacks &&
                               !FLAG_lazy_async_stacks);
       break;
     case FunctionNodeHelper::kAsyncStar:
-      function.set_modifier(FunctionLayout::kAsyncGen);
+      function.set_modifier(UntaggedFunction::kAsyncGen);
       function.set_is_inlinable(!FLAG_causal_async_stacks &&
                                 !FLAG_lazy_async_stacks);
       function.set_is_visible(!FLAG_causal_async_stacks &&
@@ -2097,7 +2097,7 @@
 const Object& KernelLoader::ClassForScriptAt(const Class& klass,
                                              intptr_t source_uri_index) {
   const Script& correct_script = Script::Handle(Z, ScriptAt(source_uri_index));
-  if (klass.script() != correct_script.raw()) {
+  if (klass.script() != correct_script.ptr()) {
     // Lazily create the [patch_classes_] array in case we need it.
     if (patch_classes_.IsNull()) {
       const Array& scripts = Array::Handle(Z, kernel_program_info_.scripts());
@@ -2108,7 +2108,7 @@
     // Use cache for patch classes. This works best for in-order usages.
     PatchClass& patch_class = PatchClass::ZoneHandle(Z);
     patch_class ^= patch_classes_.At(source_uri_index);
-    if (patch_class.IsNull() || patch_class.origin_class() != klass.raw()) {
+    if (patch_class.IsNull() || patch_class.origin_class() != klass.ptr()) {
       ASSERT(!library_kernel_data_.IsNull());
       patch_class = PatchClass::New(klass, correct_script);
       patch_class.set_library_kernel_data(library_kernel_data_);
@@ -2138,8 +2138,8 @@
     wrapper.uri = &uri_string;
     UriToSourceTableEntry* pair = uri_to_source_table->LookupValue(&wrapper);
     if (pair != nullptr) {
-      sources = pair->sources->raw();
-      line_starts = pair->line_starts->raw();
+      sources = pair->sources->ptr();
+      line_starts = pair->line_starts->ptr();
     }
   }
 
@@ -2147,7 +2147,7 @@
     const String& script_source = helper_.GetSourceFor(index);
     line_starts = helper_.GetLineStartsFor(index);
 
-    if (script_source.raw() == Symbols::Empty().raw() &&
+    if (script_source.ptr() == Symbols::Empty().ptr() &&
         line_starts.Length() == 0 && uri_string.Length() > 0) {
       // Entry included only to provide URI - actual source should already exist
       // in the VM, so try to find it.
@@ -2165,7 +2165,7 @@
         }
       }
     } else {
-      sources = script_source.raw();
+      sources = script_source.ptr();
     }
   }
 
@@ -2178,7 +2178,7 @@
   script.set_constant_coverage(constant_coverage);
 #endif  // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
   script.set_debug_positions(Array::null_array());
-  return script.raw();
+  return script.ptr();
 }
 
 InstancePtr KernelLoader::GenerateFieldAccessors(const Class& klass,
@@ -2193,7 +2193,7 @@
         converter.IsSimple(helper_.ReaderOffset() + 1);  // ignore the tag.
     if (has_simple_initializer) {
       if (field_helper->IsStatic()) {
-        return converter.SimpleValue().raw();
+        return converter.SimpleValue().ptr();
       } else {
         // Note: optimizer relies on DoubleInitialized bit in its field-unboxing
         // heuristics. See JitCallSpecializer::VisitStoreInstanceField for more
@@ -2225,8 +2225,8 @@
       Z,
       Function::New(
           signature, getter_name,
-          field_helper->IsStatic() ? FunctionLayout::kImplicitStaticGetter
-                                   : FunctionLayout::kImplicitGetter,
+          field_helper->IsStatic() ? UntaggedFunction::kImplicitStaticGetter
+                                   : UntaggedFunction::kImplicitGetter,
           field_helper->IsStatic(),
           // The functions created by the parser have is_const for static fields
           // that are const (not just final) and they have is_const for
@@ -2257,7 +2257,7 @@
         FunctionType::Handle(Z, FunctionType::New());
     Function& setter = Function::ZoneHandle(
         Z,
-        Function::New(signature, setter_name, FunctionLayout::kImplicitSetter,
+        Function::New(signature, setter_name, UntaggedFunction::kImplicitSetter,
                       field_helper->IsStatic(),
                       false,  // is_const
                       false,  // is_abstract
@@ -2277,7 +2277,7 @@
   }
 
   // If static, we do need a getter that evaluates the initializer if necessary.
-  return field_helper->IsStatic() ? Instance::sentinel().raw()
+  return field_helper->IsStatic() ? Instance::sentinel().ptr()
                                   : Instance::null();
 }
 
@@ -2324,9 +2324,9 @@
   if (url.Equals(Symbols::EvalSourceUri())) {
     if (expression_evaluation_library_.IsNull()) {
       handle = Library::New(url);
-      expression_evaluation_library_ = handle.raw();
+      expression_evaluation_library_ = handle.ptr();
     }
-    return expression_evaluation_library_.raw();
+    return expression_evaluation_library_.ptr();
   }
   handle = Library::LookupLibrary(thread_, url);
   if (handle.IsNull()) {
@@ -2362,7 +2362,7 @@
     // We do not register expression evaluation classes with the VM:
     // The expression evaluation functions should be GC-able as soon as
     // they are not reachable anymore and we never look them up by name.
-    register_class = library.raw() != expression_evaluation_library_.raw();
+    register_class = library.ptr() != expression_evaluation_library_.ptr();
 
     handle = Class::New(library, name, Script::Handle(Z),
                         TokenPosition::kNoSource, register_class);
@@ -2375,21 +2375,21 @@
     name_index_handle_ = Smi::New(klass);
     kernel_program_info_.InsertClass(thread_, name_index_handle_, handle);
   }
-  return handle.raw();
+  return handle.ptr();
 }
 
-FunctionLayout::Kind KernelLoader::GetFunctionType(
+UntaggedFunction::Kind KernelLoader::GetFunctionType(
     ProcedureHelper::Kind procedure_kind) {
   intptr_t lookuptable[] = {
-      FunctionLayout::kRegularFunction,  // Procedure::kMethod
-      FunctionLayout::kGetterFunction,   // Procedure::kGetter
-      FunctionLayout::kSetterFunction,   // Procedure::kSetter
-      FunctionLayout::kRegularFunction,  // Procedure::kOperator
-      FunctionLayout::kConstructor,      // Procedure::kFactory
+      UntaggedFunction::kRegularFunction,  // Procedure::kMethod
+      UntaggedFunction::kGetterFunction,   // Procedure::kGetter
+      UntaggedFunction::kSetterFunction,   // Procedure::kSetter
+      UntaggedFunction::kRegularFunction,  // Procedure::kOperator
+      UntaggedFunction::kConstructor,      // Procedure::kFactory
   };
   intptr_t kind = static_cast<int>(procedure_kind);
   ASSERT(0 <= kind && kind <= ProcedureHelper::kFactory);
-  return static_cast<FunctionLayout::Kind>(lookuptable[kind]);
+  return static_cast<UntaggedFunction::Kind>(lookuptable[kind]);
 }
 
 FunctionPtr CreateFieldInitializerFunction(Thread* thread,
@@ -2419,7 +2419,7 @@
   FunctionType& signature = FunctionType::Handle(zone, FunctionType::New());
   const Function& initializer_fun = Function::Handle(
       zone,
-      Function::New(signature, init_name, FunctionLayout::kFieldInitializer,
+      Function::New(signature, init_name, UntaggedFunction::kFieldInitializer,
                     field.is_static(),  // is_static
                     false,              // is_const
                     false,              // is_abstract
@@ -2449,7 +2449,7 @@
   initializer_fun.set_signature(signature);
 
   field.SetInitializerFunction(initializer_fun);
-  return initializer_fun.raw();
+  return initializer_fun.ptr();
 }
 
 }  // namespace kernel
diff --git a/runtime/vm/kernel_loader.h b/runtime/vm/kernel_loader.h
index 24e53af..00f6335 100644
--- a/runtime/vm/kernel_loader.h
+++ b/runtime/vm/kernel_loader.h
@@ -345,7 +345,7 @@
   LibraryPtr LookupLibraryFromClass(NameIndex klass);
   ClassPtr LookupClass(const Library& library, NameIndex klass);
 
-  FunctionLayout::Kind GetFunctionType(ProcedureHelper::Kind procedure_kind);
+  UntaggedFunction::Kind GetFunctionType(ProcedureHelper::Kind procedure_kind);
 
   void EnsureExternalClassIsLookedUp() {
     if (external_name_class_.IsNull()) {
diff --git a/runtime/vm/megamorphic_cache_table.cc b/runtime/vm/megamorphic_cache_table.cc
index 76c676f..d1607d4 100644
--- a/runtime/vm/megamorphic_cache_table.cc
+++ b/runtime/vm/megamorphic_cache_table.cc
@@ -32,16 +32,16 @@
   } else {
     for (intptr_t i = 0; i < table.Length(); i++) {
       cache ^= table.At(i);
-      if ((cache.target_name() == name.raw()) &&
-          (cache.arguments_descriptor() == descriptor.raw())) {
-        return cache.raw();
+      if ((cache.target_name() == name.ptr()) &&
+          (cache.arguments_descriptor() == descriptor.ptr())) {
+        return cache.ptr();
       }
     }
   }
 
   cache = MegamorphicCache::New(name, descriptor);
   table.Add(cache, Heap::kOld);
-  return cache.raw();
+  return cache.ptr();
 }
 
 void MegamorphicCacheTable::PrintSizes(Isolate* isolate) {
diff --git a/runtime/vm/message.cc b/runtime/vm/message.cc
index b4d4c69..e533e32 100644
--- a/runtime/vm/message.cc
+++ b/runtime/vm/message.cc
@@ -44,7 +44,7 @@
       snapshot_length_(0),
       finalizable_data_(NULL),
       priority_(priority) {
-  ASSERT(!raw_obj->IsHeapObject() || raw_obj->ptr()->InVMIsolateHeap());
+  ASSERT(!raw_obj->IsHeapObject() || raw_obj->untag()->InVMIsolateHeap());
   ASSERT((priority == kNormalPriority) ||
          (delivery_failure_port == kIllegalPort));
   ASSERT(IsRaw());
diff --git a/runtime/vm/native_api_impl.cc b/runtime/vm/native_api_impl.cc
index 81f2f1e..9ed7641 100644
--- a/runtime/vm/native_api_impl.cc
+++ b/runtime/vm/native_api_impl.cc
@@ -201,7 +201,7 @@
   CHECK_CALLBACK_STATE(T);
   const Error& error = Error::Handle(T->zone(), Library::CompileAll());
   if (!error.IsNull()) {
-    return Api::NewHandle(T, error.raw());
+    return Api::NewHandle(T, error.ptr());
   }
   return Api::Success();
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
@@ -221,7 +221,7 @@
   CHECK_CALLBACK_STATE(T);
   const Error& error = Error::Handle(T->zone(), Library::FinalizeAllClasses());
   if (!error.IsNull()) {
-    return Api::NewHandle(T, error.raw());
+    return Api::NewHandle(T, error.ptr());
   }
   return Api::Success();
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
diff --git a/runtime/vm/native_arguments.h b/runtime/vm/native_arguments.h
index b942e80..5b206ed 100644
--- a/runtime/vm/native_arguments.h
+++ b/runtime/vm/native_arguments.h
@@ -96,7 +96,7 @@
     ASSERT((index >= 0) && (index < ArgCount()));
     ObjectPtr* arg_ptr =
         &(argv_[ReverseArgOrderBit::decode(argc_tag_) ? index : -index]);
-    *arg_ptr = value.raw();
+    *arg_ptr = value.ptr();
   }
 
   // Does not include hidden type arguments vector.
@@ -152,14 +152,14 @@
     TypeArguments& type_args = TypeArguments::Handle(NativeTypeArgs());
     if (type_args.IsNull()) {
       // null vector represents infinite list of dynamics
-      return Type::dynamic_type().raw();
+      return Type::dynamic_type().ptr();
     }
     return type_args.TypeAt(index);
   }
 
   void SetReturn(const Object& value) const {
     ASSERT(thread_->execution_state() == Thread::kThreadInVM);
-    *retval_ = value.raw();
+    *retval_ = value.ptr();
   }
 
   ObjectPtr ReturnValue() const {
diff --git a/runtime/vm/native_entry.cc b/runtime/vm/native_entry.cc
index a299e5d..e37b70d 100644
--- a/runtime/vm/native_entry.cc
+++ b/runtime/vm/native_entry.cc
@@ -48,7 +48,7 @@
   {
     Thread* T = Thread::Current();
     Api::Scope api_scope(T);
-    Dart_Handle api_function_name = Api::NewHandle(T, function_name.raw());
+    Dart_Handle api_function_name = Api::NewHandle(T, function_name.ptr());
     {
       Dart_NativeEntryResolver resolver = library.native_entry_resolver();
       TransitionVMToNative transition(T);
@@ -139,7 +139,7 @@
     // been set.
     ObjectPtr return_value_unsafe = reinterpret_cast<BootstrapNativeFunction>(
         func)(thread, zone.GetZone(), arguments);
-    if (return_value_unsafe != Object::sentinel().raw()) {
+    if (return_value_unsafe != Object::sentinel().ptr()) {
       ASSERT(return_value_unsafe->IsDartInstance());
       arguments->SetReturnUnsafe(return_value_unsafe);
     }
@@ -295,18 +295,18 @@
     ASSERT(current_function ==
                reinterpret_cast<NativeFunction>(LinkNativeCall) ||
            current_function == target_function);
-    ASSERT(current_trampoline.raw() == StubCode::CallBootstrapNative().raw() ||
+    ASSERT(current_trampoline.ptr() == StubCode::CallBootstrapNative().ptr() ||
            current_function == target_function);
 #endif
 
     NativeFunction patch_target_function = target_function;
     Code& trampoline = Code::Handle(zone);
     if (is_bootstrap_native) {
-      trampoline = StubCode::CallBootstrapNative().raw();
+      trampoline = StubCode::CallBootstrapNative().ptr();
     } else if (is_auto_scope) {
-      trampoline = StubCode::CallAutoScopeNative().raw();
+      trampoline = StubCode::CallAutoScopeNative().ptr();
     } else {
-      trampoline = StubCode::CallNoScopeNative().raw();
+      trampoline = StubCode::CallNoScopeNative().ptr();
     }
     CodePatcher::PatchNativeCallAt(caller_frame->pc(), code,
                                    patch_target_function, trampoline);
@@ -338,15 +338,15 @@
 
 // Note: not GC safe. Use with care.
 NativeEntryData::Payload* NativeEntryData::FromTypedArray(TypedDataPtr data) {
-  return reinterpret_cast<Payload*>(data->ptr()->data());
+  return reinterpret_cast<Payload*>(data->untag()->data());
 }
 
 MethodRecognizer::Kind NativeEntryData::kind() const {
-  return FromTypedArray(data_.raw())->kind;
+  return FromTypedArray(data_.ptr())->kind;
 }
 
 void NativeEntryData::set_kind(MethodRecognizer::Kind value) const {
-  FromTypedArray(data_.raw())->kind = value;
+  FromTypedArray(data_.ptr())->kind = value;
 }
 
 MethodRecognizer::Kind NativeEntryData::GetKind(TypedDataPtr data) {
@@ -354,11 +354,11 @@
 }
 
 NativeFunctionWrapper NativeEntryData::trampoline() const {
-  return FromTypedArray(data_.raw())->trampoline;
+  return FromTypedArray(data_.ptr())->trampoline;
 }
 
 void NativeEntryData::set_trampoline(NativeFunctionWrapper value) const {
-  FromTypedArray(data_.raw())->trampoline = value;
+  FromTypedArray(data_.ptr())->trampoline = value;
 }
 
 NativeFunctionWrapper NativeEntryData::GetTrampoline(TypedDataPtr data) {
@@ -366,11 +366,11 @@
 }
 
 NativeFunction NativeEntryData::native_function() const {
-  return FromTypedArray(data_.raw())->native_function;
+  return FromTypedArray(data_.ptr())->native_function;
 }
 
 void NativeEntryData::set_native_function(NativeFunction value) const {
-  FromTypedArray(data_.raw())->native_function = value;
+  FromTypedArray(data_.ptr())->native_function = value;
 }
 
 NativeFunction NativeEntryData::GetNativeFunction(TypedDataPtr data) {
@@ -378,11 +378,11 @@
 }
 
 intptr_t NativeEntryData::argc_tag() const {
-  return FromTypedArray(data_.raw())->argc_tag;
+  return FromTypedArray(data_.ptr())->argc_tag;
 }
 
 void NativeEntryData::set_argc_tag(intptr_t value) const {
-  FromTypedArray(data_.raw())->argc_tag = value;
+  FromTypedArray(data_.ptr())->argc_tag = value;
 }
 
 intptr_t NativeEntryData::GetArgcTag(TypedDataPtr data) {
@@ -400,7 +400,7 @@
   native_entry.set_trampoline(trampoline);
   native_entry.set_native_function(native_function);
   native_entry.set_argc_tag(argc_tag);
-  return data.raw();
+  return data.ptr();
 }
 
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
diff --git a/runtime/vm/native_entry_test.cc b/runtime/vm/native_entry_test.cc
index 2eae7dd..b3e0b19 100644
--- a/runtime/vm/native_entry_test.cc
+++ b/runtime/vm/native_entry_test.cc
@@ -62,7 +62,7 @@
     Dart_Handle arg = Dart_GetNativeArgument(args, i);
     GET_NATIVE_ARGUMENT(Integer, argument, arguments->NativeArgAt(i));
     EXPECT(argument.IsInteger());                       // May be null.
-    EXPECT_EQ(Api::UnwrapHandle(arg), argument.raw());  // May be null.
+    EXPECT_EQ(Api::UnwrapHandle(arg), argument.ptr());  // May be null.
     int64_t arg_value = -1;
     if (argument.IsNull()) {
       EXPECT_ERROR(Dart_IntegerToInt64(arg, &arg_value),
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index ffad3f4..7ffbaf6 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -438,7 +438,7 @@
 
   if (start == 0) {
     // This unmangled_name is fine as it is.
-    return name.raw();
+    return name.ptr();
   }
 
   if (is_extension) {
@@ -463,9 +463,9 @@
     }
   }
 
-  return result.raw();
+  return result.ptr();
 #endif                // !defined(DART_PRECOMPILED_RUNTIME)
-  return name.raw();  // In AOT, return argument unchanged.
+  return name.ptr();  // In AOT, return argument unchanged.
 }
 
 template <typename type>
@@ -529,7 +529,7 @@
     null_ = static_cast<InstancePtr>(address + kHeapObjectTag);
     // The call below is using 'null_' to initialize itself.
     InitializeObject(address, kNullCid, Instance::InstanceSize());
-    null_->ptr()->SetCanonical();
+    null_->untag()->SetCanonical();
   }
 
   // Allocate and initialize the bool instances.
@@ -540,23 +540,23 @@
     // Allocate a dummy bool object to give true the desired alignment.
     uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld);
     InitializeObject(address, kBoolCid, Bool::InstanceSize());
-    static_cast<BoolPtr>(address + kHeapObjectTag)->ptr()->value_ = false;
+    static_cast<BoolPtr>(address + kHeapObjectTag)->untag()->value_ = false;
   }
   {
     // Allocate true.
     uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld);
     true_ = static_cast<BoolPtr>(address + kHeapObjectTag);
     InitializeObject(address, kBoolCid, Bool::InstanceSize());
-    true_->ptr()->value_ = true;
-    true_->ptr()->SetCanonical();
+    true_->untag()->value_ = true;
+    true_->untag()->SetCanonical();
   }
   {
     // Allocate false.
     uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld);
     false_ = static_cast<BoolPtr>(address + kHeapObjectTag);
     InitializeObject(address, kBoolCid, Bool::InstanceSize());
-    false_->ptr()->value_ = false;
-    false_->ptr()->SetCanonical();
+    false_->untag()->value_ = false;
+    false_->untag()->SetCanonical();
   }
 
   // Check that the objects have been allocated at appropriate addresses.
@@ -700,9 +700,9 @@
 
     Class fake;
     // Initialization from Class::New<Class>.
-    // Directly set raw_ to break a circular dependency: SetRaw will attempt
+    // Directly set ptr_ to break a circular dependency: SetRaw will attempt
     // to lookup class class in the class table where it is not registered yet.
-    cls.raw_ = class_class_;
+    cls.ptr_ = class_class_;
     ASSERT(builtin_vtables_[kClassCid] == fake.vtable());
     cls.set_instance_size(
         Class::InstanceSize(),
@@ -773,108 +773,108 @@
 
   // Allocate the remaining VM internal classes.
   cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate_group);
-  type_arguments_class_ = cls.raw();
+  type_arguments_class_ = cls.ptr();
 
   cls = Class::New<PatchClass, RTN::PatchClass>(isolate_group);
-  patch_class_class_ = cls.raw();
+  patch_class_class_ = cls.ptr();
 
   cls = Class::New<Function, RTN::Function>(isolate_group);
-  function_class_ = cls.raw();
+  function_class_ = cls.ptr();
 
   cls = Class::New<ClosureData, RTN::ClosureData>(isolate_group);
-  closure_data_class_ = cls.raw();
+  closure_data_class_ = cls.ptr();
 
   cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate_group);
-  ffi_trampoline_data_class_ = cls.raw();
+  ffi_trampoline_data_class_ = cls.ptr();
 
   cls = Class::New<Field, RTN::Field>(isolate_group);
-  field_class_ = cls.raw();
+  field_class_ = cls.ptr();
 
   cls = Class::New<Script, RTN::Script>(isolate_group);
-  script_class_ = cls.raw();
+  script_class_ = cls.ptr();
 
   cls = Class::New<Library, RTN::Library>(isolate_group);
-  library_class_ = cls.raw();
+  library_class_ = cls.ptr();
 
   cls = Class::New<Namespace, RTN::Namespace>(isolate_group);
-  namespace_class_ = cls.raw();
+  namespace_class_ = cls.ptr();
 
   cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate_group);
-  kernel_program_info_class_ = cls.raw();
+  kernel_program_info_class_ = cls.ptr();
 
   cls = Class::New<Code, RTN::Code>(isolate_group);
-  code_class_ = cls.raw();
+  code_class_ = cls.ptr();
 
   cls = Class::New<Instructions, RTN::Instructions>(isolate_group);
-  instructions_class_ = cls.raw();
+  instructions_class_ = cls.ptr();
 
   cls =
       Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group);
-  instructions_section_class_ = cls.raw();
+  instructions_section_class_ = cls.ptr();
 
   cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group);
-  object_pool_class_ = cls.raw();
+  object_pool_class_ = cls.ptr();
 
   cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate_group);
-  pc_descriptors_class_ = cls.raw();
+  pc_descriptors_class_ = cls.ptr();
 
   cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate_group);
-  code_source_map_class_ = cls.raw();
+  code_source_map_class_ = cls.ptr();
 
   cls =
       Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate_group);
-  compressed_stackmaps_class_ = cls.raw();
+  compressed_stackmaps_class_ = cls.ptr();
 
   cls =
       Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate_group);
-  var_descriptors_class_ = cls.raw();
+  var_descriptors_class_ = cls.ptr();
 
   cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate_group);
-  exception_handlers_class_ = cls.raw();
+  exception_handlers_class_ = cls.ptr();
 
   cls = Class::New<Context, RTN::Context>(isolate_group);
-  context_class_ = cls.raw();
+  context_class_ = cls.ptr();
 
   cls = Class::New<ContextScope, RTN::ContextScope>(isolate_group);
-  context_scope_class_ = cls.raw();
+  context_scope_class_ = cls.ptr();
 
   cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate_group);
-  singletargetcache_class_ = cls.raw();
+  singletargetcache_class_ = cls.ptr();
 
   cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate_group);
-  unlinkedcall_class_ = cls.raw();
+  unlinkedcall_class_ = cls.ptr();
 
   cls = Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>(
       isolate_group);
-  monomorphicsmiablecall_class_ = cls.raw();
+  monomorphicsmiablecall_class_ = cls.ptr();
 
   cls = Class::New<ICData, RTN::ICData>(isolate_group);
-  icdata_class_ = cls.raw();
+  icdata_class_ = cls.ptr();
 
   cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate_group);
-  megamorphic_cache_class_ = cls.raw();
+  megamorphic_cache_class_ = cls.ptr();
 
   cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate_group);
-  subtypetestcache_class_ = cls.raw();
+  subtypetestcache_class_ = cls.ptr();
 
   cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate_group);
-  loadingunit_class_ = cls.raw();
+  loadingunit_class_ = cls.ptr();
 
   cls = Class::New<ApiError, RTN::ApiError>(isolate_group);
-  api_error_class_ = cls.raw();
+  api_error_class_ = cls.ptr();
 
   cls = Class::New<LanguageError, RTN::LanguageError>(isolate_group);
-  language_error_class_ = cls.raw();
+  language_error_class_ = cls.ptr();
 
   cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate_group);
-  unhandled_exception_class_ = cls.raw();
+  unhandled_exception_class_ = cls.ptr();
 
   cls = Class::New<UnwindError, RTN::UnwindError>(isolate_group);
-  unwind_error_class_ = cls.raw();
+  unwind_error_class_ = cls.ptr();
 
   cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>(
       isolate_group);
-  weak_serialization_reference_class_ = cls.raw();
+  weak_serialization_reference_class_ = cls.ptr();
 
   ASSERT(class_class() != null_);
 
@@ -921,7 +921,7 @@
     InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(0));
     Array::initializeHandle(empty_array_,
                             static_cast<ArrayPtr>(address + kHeapObjectTag));
-    empty_array_->raw_ptr()->set_length(Smi::New(0));
+    empty_array_->untag()->set_length(Smi::New(0));
     empty_array_->SetCanonical();
   }
 
@@ -932,7 +932,7 @@
     InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(1));
     Array::initializeHandle(zero_array_,
                             static_cast<ArrayPtr>(address + kHeapObjectTag));
-    zero_array_->raw_ptr()->set_length(Smi::New(1));
+    zero_array_->untag()->set_length(Smi::New(1));
     smi = Smi::New(0);
     zero_array_->SetAt(0, smi);
     zero_array_->SetCanonical();
@@ -946,9 +946,9 @@
         empty_context_scope_,
         static_cast<ContextScopePtr>(address + kHeapObjectTag));
     empty_context_scope_->StoreNonPointer(
-        &empty_context_scope_->raw_ptr()->num_variables_, 0);
+        &empty_context_scope_->untag()->num_variables_, 0);
     empty_context_scope_->StoreNonPointer(
-        &empty_context_scope_->raw_ptr()->is_implicit_, true);
+        &empty_context_scope_->untag()->is_implicit_, true);
     empty_context_scope_->SetCanonical();
   }
 
@@ -959,7 +959,7 @@
     ObjectPool::initializeHandle(
         empty_object_pool_,
         static_cast<ObjectPoolPtr>(address + kHeapObjectTag));
-    empty_object_pool_->StoreNonPointer(&empty_object_pool_->raw_ptr()->length_,
+    empty_object_pool_->StoreNonPointer(&empty_object_pool_->untag()->length_,
                                         0);
     empty_object_pool_->SetCanonical();
   }
@@ -973,7 +973,7 @@
         empty_compressed_stackmaps_,
         static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag));
     empty_compressed_stackmaps_->StoreNonPointer(
-        &empty_compressed_stackmaps_->raw_ptr()->flags_and_size_, 0);
+        &empty_compressed_stackmaps_->untag()->flags_and_size_, 0);
     empty_compressed_stackmaps_->SetCanonical();
   }
 
@@ -985,7 +985,7 @@
     PcDescriptors::initializeHandle(
         empty_descriptors_,
         static_cast<PcDescriptorsPtr>(address + kHeapObjectTag));
-    empty_descriptors_->StoreNonPointer(&empty_descriptors_->raw_ptr()->length_,
+    empty_descriptors_->StoreNonPointer(&empty_descriptors_->untag()->length_,
                                         0);
     empty_descriptors_->SetCanonical();
   }
@@ -1000,7 +1000,7 @@
         empty_var_descriptors_,
         static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag));
     empty_var_descriptors_->StoreNonPointer(
-        &empty_var_descriptors_->raw_ptr()->num_entries_, 0);
+        &empty_var_descriptors_->untag()->num_entries_, 0);
     empty_var_descriptors_->SetCanonical();
   }
 
@@ -1016,7 +1016,7 @@
         empty_exception_handlers_,
         static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
     empty_exception_handlers_->StoreNonPointer(
-        &empty_exception_handlers_->raw_ptr()->num_entries_, 0);
+        &empty_exception_handlers_->untag()->num_entries_, 0);
     empty_exception_handlers_->SetCanonical();
   }
 
@@ -1028,15 +1028,15 @@
     TypeArguments::initializeHandle(
         empty_type_arguments_,
         static_cast<TypeArgumentsPtr>(address + kHeapObjectTag));
-    empty_type_arguments_->raw_ptr()->set_length(Smi::New(0));
-    empty_type_arguments_->raw_ptr()->set_hash(Smi::New(0));
+    empty_type_arguments_->untag()->set_length(Smi::New(0));
+    empty_type_arguments_->untag()->set_hash(Smi::New(0));
     empty_type_arguments_->ComputeHash();
     empty_type_arguments_->SetCanonical();
   }
 
   // The VM isolate snapshot object table is initialized to an empty array
   // as we do not have any VM isolate snapshot at this time.
-  *vm_isolate_snapshot_object_table_ = Object::empty_array().raw();
+  *vm_isolate_snapshot_object_table_ = Object::empty_array().ptr();
 
   cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate_group);
   cls.set_is_abstract();
@@ -1044,14 +1044,14 @@
   cls.set_is_allocate_finalized();
   cls.set_is_declaration_loaded();
   cls.set_is_type_finalized();
-  dynamic_class_ = cls.raw();
+  dynamic_class_ = cls.ptr();
 
   cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate_group);
   cls.set_num_type_arguments(0);
   cls.set_is_allocate_finalized();
   cls.set_is_declaration_loaded();
   cls.set_is_type_finalized();
-  void_class_ = cls.raw();
+  void_class_ = cls.ptr();
 
   cls = Class::New<Type, RTN::Type>(isolate_group);
   cls.set_is_allocate_finalized();
@@ -1265,11 +1265,11 @@
 
   void VisitObject(ObjectPtr obj) {
     // Free list elements should never be marked.
-    ASSERT(!obj->ptr()->IsMarked());
+    ASSERT(!obj->untag()->IsMarked());
     // No forwarding corpses in the VM isolate.
     ASSERT(!obj->IsForwardingCorpse());
     if (!obj->IsFreeListElement()) {
-      obj->ptr()->SetMarkBitUnsynchronized();
+      obj->untag()->SetMarkBitUnsynchronized();
       Object::FinalizeReadOnlyObject(obj);
 #if defined(HASH_IN_OBJECT_HEADER)
       // These objects end up in the read-only VM isolate which is shared
@@ -1386,9 +1386,9 @@
       String::SetCachedHash(str, hash);
     }
     intptr_t size = OneByteString::UnroundedSize(str);
-    ASSERT(size <= str->ptr()->HeapSize());
-    memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(str) + size), 0,
-           str->ptr()->HeapSize() - size);
+    ASSERT(size <= str->untag()->HeapSize());
+    memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0,
+           str->untag()->HeapSize() - size);
   } else if (cid == kTwoByteStringCid) {
     TwoByteStringPtr str = static_cast<TwoByteStringPtr>(object);
     if (String::GetCachedHash(str) == 0) {
@@ -1397,9 +1397,9 @@
     }
     ASSERT(String::GetCachedHash(str) != 0);
     intptr_t size = TwoByteString::UnroundedSize(str);
-    ASSERT(size <= str->ptr()->HeapSize());
-    memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(str) + size), 0,
-           str->ptr()->HeapSize() - size);
+    ASSERT(size <= str->untag()->HeapSize());
+    memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0,
+           str->untag()->HeapSize() - size);
   } else if (cid == kExternalOneByteStringCid) {
     ExternalOneByteStringPtr str =
         static_cast<ExternalOneByteStringPtr>(object);
@@ -1417,27 +1417,27 @@
   } else if (cid == kCodeSourceMapCid) {
     CodeSourceMapPtr map = CodeSourceMap::RawCast(object);
     intptr_t size = CodeSourceMap::UnroundedSize(map);
-    ASSERT(size <= map->ptr()->HeapSize());
-    memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(map) + size), 0,
-           map->ptr()->HeapSize() - size);
+    ASSERT(size <= map->untag()->HeapSize());
+    memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(map) + size), 0,
+           map->untag()->HeapSize() - size);
   } else if (cid == kCompressedStackMapsCid) {
     CompressedStackMapsPtr maps = CompressedStackMaps::RawCast(object);
     intptr_t size = CompressedStackMaps::UnroundedSize(maps);
-    ASSERT(size <= maps->ptr()->HeapSize());
-    memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(maps) + size), 0,
-           maps->ptr()->HeapSize() - size);
+    ASSERT(size <= maps->untag()->HeapSize());
+    memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(maps) + size), 0,
+           maps->untag()->HeapSize() - size);
   } else if (cid == kPcDescriptorsCid) {
     PcDescriptorsPtr desc = PcDescriptors::RawCast(object);
     intptr_t size = PcDescriptors::UnroundedSize(desc);
-    ASSERT(size <= desc->ptr()->HeapSize());
-    memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(desc) + size), 0,
-           desc->ptr()->HeapSize() - size);
+    ASSERT(size <= desc->untag()->HeapSize());
+    memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(desc) + size), 0,
+           desc->untag()->HeapSize() - size);
   }
 }
 
 void Object::set_vm_isolate_snapshot_object_table(const Array& table) {
   ASSERT(Isolate::Current() == Dart::vm_isolate());
-  *vm_isolate_snapshot_object_table_ = table.raw();
+  *vm_isolate_snapshot_object_table_ = table.ptr();
 }
 
 // Make unused space in an object whose type has been transformed safe
@@ -1453,47 +1453,49 @@
   if (original_size > used_size) {
     intptr_t leftover_size = original_size - used_size;
 
-    uword addr = ObjectLayout::ToAddr(obj.raw()) + used_size;
+    uword addr = UntaggedObject::ToAddr(obj.ptr()) + used_size;
     if (leftover_size >= TypedData::InstanceSize(0)) {
       // Update the leftover space as a TypedDataInt8Array object.
       TypedDataPtr raw =
-          static_cast<TypedDataPtr>(ObjectLayout::FromAddr(addr));
+          static_cast<TypedDataPtr>(UntaggedObject::FromAddr(addr));
       uword new_tags =
-          ObjectLayout::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
-      new_tags = ObjectLayout::SizeTag::update(leftover_size, new_tags);
-      const bool is_old = obj.raw()->IsOldObject();
-      new_tags = ObjectLayout::OldBit::update(is_old, new_tags);
-      new_tags = ObjectLayout::OldAndNotMarkedBit::update(is_old, new_tags);
-      new_tags = ObjectLayout::OldAndNotRememberedBit::update(is_old, new_tags);
-      new_tags = ObjectLayout::NewBit::update(!is_old, new_tags);
+          UntaggedObject::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
+      new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags);
+      const bool is_old = obj.ptr()->IsOldObject();
+      new_tags = UntaggedObject::OldBit::update(is_old, new_tags);
+      new_tags = UntaggedObject::OldAndNotMarkedBit::update(is_old, new_tags);
+      new_tags =
+          UntaggedObject::OldAndNotRememberedBit::update(is_old, new_tags);
+      new_tags = UntaggedObject::NewBit::update(!is_old, new_tags);
       // On architectures with a relaxed memory model, the concurrent marker may
       // observe the write of the filler object's header before observing the
       // new array length, and so treat it as a pointer. Ensure it is a Smi so
       // the marker won't dereference it.
       ASSERT((new_tags & kSmiTagMask) == kSmiTag);
-      raw->ptr()->tags_ = new_tags;
+      raw->untag()->tags_ = new_tags;
 
       intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0));
       ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
-      raw->ptr()->set_length(Smi::New(leftover_len));
-      raw->ptr()->RecomputeDataField();
+      raw->untag()->set_length(Smi::New(leftover_len));
+      raw->untag()->RecomputeDataField();
     } else {
       // Update the leftover space as a basic object.
       ASSERT(leftover_size == Object::InstanceSize());
-      ObjectPtr raw = static_cast<ObjectPtr>(ObjectLayout::FromAddr(addr));
-      uword new_tags = ObjectLayout::ClassIdTag::update(kInstanceCid, 0);
-      new_tags = ObjectLayout::SizeTag::update(leftover_size, new_tags);
-      const bool is_old = obj.raw()->IsOldObject();
-      new_tags = ObjectLayout::OldBit::update(is_old, new_tags);
-      new_tags = ObjectLayout::OldAndNotMarkedBit::update(is_old, new_tags);
-      new_tags = ObjectLayout::OldAndNotRememberedBit::update(is_old, new_tags);
-      new_tags = ObjectLayout::NewBit::update(!is_old, new_tags);
+      ObjectPtr raw = static_cast<ObjectPtr>(UntaggedObject::FromAddr(addr));
+      uword new_tags = UntaggedObject::ClassIdTag::update(kInstanceCid, 0);
+      new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags);
+      const bool is_old = obj.ptr()->IsOldObject();
+      new_tags = UntaggedObject::OldBit::update(is_old, new_tags);
+      new_tags = UntaggedObject::OldAndNotMarkedBit::update(is_old, new_tags);
+      new_tags =
+          UntaggedObject::OldAndNotRememberedBit::update(is_old, new_tags);
+      new_tags = UntaggedObject::NewBit::update(!is_old, new_tags);
       // On architectures with a relaxed memory model, the concurrent marker may
       // observe the write of the filler object's header before observing the
       // new array length, and so treat it as a pointer. Ensure it is a Smi so
       // the marker won't dereference it.
       ASSERT((new_tags & kSmiTagMask) == kSmiTag);
-      raw->ptr()->tags_ = new_tags;
+      raw->untag()->tags_ = new_tags;
     }
   }
 }
@@ -1673,7 +1675,7 @@
     // declared number of type parameters is still 0. It will become 1 after
     // patching. The array type allocated below represents the raw type _List
     // and not _List<E> as we could expect. Use with caution.
-    type = Type::New(Class::Handle(zone, cls.raw()),
+    type = Type::New(Class::Handle(zone, cls.ptr()),
                      TypeArguments::Handle(zone), Nullability::kNonNullable);
     type.SetIsFinalized();
     type ^= type.Canonicalize(thread, nullptr);
@@ -1727,7 +1729,7 @@
     }
     object_store->set_bootstrap_library(ObjectStore::kIsolate, isolate_lib);
     ASSERT(!isolate_lib.IsNull());
-    ASSERT(isolate_lib.raw() == Library::IsolateLibrary());
+    ASSERT(isolate_lib.ptr() == Library::IsolateLibrary());
 
     cls = Class::New<Capability, RTN::Capability>(isolate_group);
     RegisterPrivateClass(cls, Symbols::_CapabilityImpl(), isolate_lib);
@@ -1861,7 +1863,7 @@
     }
     object_store->set_bootstrap_library(ObjectStore::kMirrors, lib);
     ASSERT(!lib.IsNull());
-    ASSERT(lib.raw() == Library::MirrorsLibrary());
+    ASSERT(lib.ptr() == Library::MirrorsLibrary());
 
     cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group);
     RegisterPrivateClass(cls, Symbols::_MirrorReference(), lib);
@@ -1877,7 +1879,7 @@
 
     object_store->set_bootstrap_library(ObjectStore::kCollection, lib);
     ASSERT(!lib.IsNull());
-    ASSERT(lib.raw() == Library::CollectionLibrary());
+    ASSERT(lib.ptr() == Library::CollectionLibrary());
     cls = Class::New<LinkedHashMap, RTN::LinkedHashMap>(isolate_group);
     object_store->set_linked_hash_map_class(cls);
     cls.set_type_arguments_field_offset(
@@ -1897,7 +1899,7 @@
     }
     object_store->set_bootstrap_library(ObjectStore::kAsync, lib);
     ASSERT(!lib.IsNull());
-    ASSERT(lib.raw() == Library::AsyncLibrary());
+    ASSERT(lib.ptr() == Library::AsyncLibrary());
     cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group);
     cls.set_type_arguments_field_offset(FutureOr::type_arguments_offset(),
                                         RTN::FutureOr::type_arguments_offset());
@@ -1915,7 +1917,7 @@
     }
     object_store->set_bootstrap_library(ObjectStore::kDeveloper, lib);
     ASSERT(!lib.IsNull());
-    ASSERT(lib.raw() == Library::DeveloperLibrary());
+    ASSERT(lib.ptr() == Library::DeveloperLibrary());
     cls = Class::New<UserTag, RTN::UserTag>(isolate_group);
     RegisterPrivateClass(cls, Symbols::_UserTag(), lib);
     pending_classes.Add(cls);
@@ -1935,7 +1937,7 @@
     }
     object_store->set_bootstrap_library(ObjectStore::kTypedData, lib);
     ASSERT(!lib.IsNull());
-    ASSERT(lib.raw() == Library::TypedDataLibrary());
+    ASSERT(lib.ptr() == Library::TypedDataLibrary());
 #define REGISTER_TYPED_DATA_CLASS(clazz)                                       \
   cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid, isolate_group);  \
   RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib);
@@ -2087,7 +2089,7 @@
     type = type.ToNullability(Nullability::kNullable, Heap::kOld);
     object_store->set_nullable_double_type(type);
 
-    name = Symbols::_String().raw();
+    name = Symbols::_String().ptr();
     cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
                                               /*register_class=*/true,
                                               /*is_abstract=*/true);
@@ -2292,7 +2294,7 @@
     const Error& error = Error::Handle(
         zone, Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size));
     if (!error.IsNull()) {
-      return error.raw();
+      return error.ptr();
     }
 
     isolate_group->class_table()->CopySizesFromClassObjects();
@@ -2454,16 +2456,16 @@
 
 #if defined(DEBUG)
 bool Object::InVMIsolateHeap() const {
-  if (FLAG_verify_handles && raw()->ptr()->InVMIsolateHeap()) {
+  if (FLAG_verify_handles && ptr()->untag()->InVMIsolateHeap()) {
     Heap* vm_isolate_heap = Dart::vm_isolate_group()->heap();
-    uword addr = ObjectLayout::ToAddr(raw());
+    uword addr = UntaggedObject::ToAddr(ptr());
     if (!vm_isolate_heap->Contains(addr)) {
       ASSERT(FLAG_write_protect_code);
-      addr = ObjectLayout::ToAddr(OldPage::ToWritable(raw()));
+      addr = UntaggedObject::ToAddr(OldPage::ToWritable(ptr()));
       ASSERT(vm_isolate_heap->Contains(addr));
     }
   }
-  return raw()->ptr()->InVMIsolateHeap();
+  return ptr()->untag()->InVMIsolateHeap();
 }
 #endif  // DEBUG
 
@@ -2479,7 +2481,7 @@
   // Note: we skip the header word here to avoid a racy read in the concurrent
   // marker from observing the null object when it reads into a heap page
   // allocated after marking started.
-  uword cur = address + sizeof(ObjectLayout);
+  uword cur = address + sizeof(UntaggedObject);
   uword end = address + size;
   if (class_id == kInstructionsCid) {
     compiler::target::uword initial_value = kBreakInstructionFiller;
@@ -2519,33 +2521,33 @@
   }
   uword tags = 0;
   ASSERT(class_id != kIllegalCid);
-  tags = ObjectLayout::ClassIdTag::update(class_id, tags);
-  tags = ObjectLayout::SizeTag::update(size, tags);
+  tags = UntaggedObject::ClassIdTag::update(class_id, tags);
+  tags = UntaggedObject::SizeTag::update(size, tags);
   const bool is_old =
       (address & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
-  tags = ObjectLayout::OldBit::update(is_old, tags);
-  tags = ObjectLayout::OldAndNotMarkedBit::update(is_old, tags);
-  tags = ObjectLayout::OldAndNotRememberedBit::update(is_old, tags);
-  tags = ObjectLayout::NewBit::update(!is_old, tags);
-  reinterpret_cast<ObjectLayout*>(address)->tags_ = tags;
+  tags = UntaggedObject::OldBit::update(is_old, tags);
+  tags = UntaggedObject::OldAndNotMarkedBit::update(is_old, tags);
+  tags = UntaggedObject::OldAndNotRememberedBit::update(is_old, tags);
+  tags = UntaggedObject::NewBit::update(!is_old, tags);
+  reinterpret_cast<UntaggedObject*>(address)->tags_ = tags;
 }
 
 void Object::CheckHandle() const {
 #if defined(DEBUG)
-  if (raw_ != Object::null()) {
-    intptr_t cid = raw_->GetClassIdMayBeSmi();
+  if (ptr_ != Object::null()) {
+    intptr_t cid = ptr_->GetClassIdMayBeSmi();
     if (cid >= kNumPredefinedCids) {
       cid = kInstanceCid;
     }
     ASSERT(vtable() == builtin_vtables_[cid]);
-    if (FLAG_verify_handles && raw_->IsHeapObject()) {
+    if (FLAG_verify_handles && ptr_->IsHeapObject()) {
       Heap* isolate_heap = IsolateGroup::Current()->heap();
       if (!isolate_heap->new_space()->scavenging()) {
         Heap* vm_isolate_heap = Dart::vm_isolate_group()->heap();
-        uword addr = ObjectLayout::ToAddr(raw_);
+        uword addr = UntaggedObject::ToAddr(ptr_);
         if (!isolate_heap->Contains(addr) && !vm_isolate_heap->Contains(addr)) {
           ASSERT(FLAG_write_protect_code);
-          addr = ObjectLayout::ToAddr(OldPage::ToWritable(raw_));
+          addr = UntaggedObject::ToAddr(OldPage::ToWritable(ptr_));
           ASSERT(isolate_heap->Contains(addr) ||
                  vm_isolate_heap->Contains(addr));
         }
@@ -2592,13 +2594,13 @@
   NoSafepointScope no_safepoint;
   InitializeObject(address, cls_id, size);
   ObjectPtr raw_obj = static_cast<ObjectPtr>(address + kHeapObjectTag);
-  ASSERT(cls_id == ObjectLayout::ClassIdTag::decode(raw_obj->ptr()->tags_));
+  ASSERT(cls_id == UntaggedObject::ClassIdTag::decode(raw_obj->untag()->tags_));
   if (raw_obj->IsOldObject() && UNLIKELY(thread->is_marking())) {
     // Black allocation. Prevents a data race between the mutator and concurrent
     // marker on ARM and ARM64 (the marker may observe a publishing store of
     // this object before the stores that initialize its slots), and helps the
     // collection to finish sooner.
-    raw_obj->ptr()->SetMarkBitUnsynchronized();
+    raw_obj->untag()->SetMarkBitUnsynchronized();
     // Setting the mark bit must not be ordered after a publishing store of this
     // object. Adding a barrier here is cheaper than making every store into the
     // heap a store-release. Compare Scavenger::ScavengePointer.
@@ -2622,14 +2624,14 @@
       for (ObjectPtr* slot = from; slot <= to; ++slot) {
         ObjectPtr value = *slot;
         if (value->IsHeapObject()) {
-          old_obj_->ptr()->CheckArrayPointerStore(slot, value, thread_);
+          old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_);
         }
       }
     } else {
       for (ObjectPtr* slot = from; slot <= to; ++slot) {
         ObjectPtr value = *slot;
         if (value->IsHeapObject()) {
-          old_obj_->ptr()->CheckHeapPointerStore(value, thread_);
+          old_obj_->untag()->CheckHeapPointerStore(value, thread_);
         }
       }
     }
@@ -2652,13 +2654,13 @@
 
 ObjectPtr Object::Clone(const Object& orig, Heap::Space space) {
   const Class& cls = Class::Handle(orig.clazz());
-  intptr_t size = orig.raw()->ptr()->HeapSize();
+  intptr_t size = orig.ptr()->untag()->HeapSize();
   ObjectPtr raw_clone = Object::Allocate(cls.id(), size, space);
   NoSafepointScope no_safepoint;
   // Copy the body of the original into the clone.
-  uword orig_addr = ObjectLayout::ToAddr(orig.raw());
-  uword clone_addr = ObjectLayout::ToAddr(raw_clone);
-  static const intptr_t kHeaderSizeInBytes = sizeof(ObjectLayout);
+  uword orig_addr = UntaggedObject::ToAddr(orig.ptr());
+  uword clone_addr = UntaggedObject::ToAddr(raw_clone);
+  static const intptr_t kHeaderSizeInBytes = sizeof(UntaggedObject);
   memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
           reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
           size - kHeaderSizeInBytes);
@@ -2669,12 +2671,12 @@
     return raw_clone;
   }
   WriteBarrierUpdateVisitor visitor(Thread::Current(), raw_clone);
-  raw_clone->ptr()->VisitPointers(&visitor);
+  raw_clone->untag()->VisitPointers(&visitor);
   return raw_clone;
 }
 
 StringPtr Class::Name() const {
-  return raw_ptr()->name();
+  return untag()->name();
 }
 
 StringPtr Class::ScrubbedName() const {
@@ -2687,8 +2689,8 @@
 
 StringPtr Class::UserVisibleName() const {
 #if !defined(PRODUCT)
-  ASSERT(raw_ptr()->user_name() != String::null());
-  return raw_ptr()->user_name();
+  ASSERT(untag()->user_name() != String::null());
+  return untag()->user_name();
 #endif  // !defined(PRODUCT)
   // No caching in PRODUCT, regenerate.
   return Symbols::New(Thread::Current(), GenerateUserVisibleName());
@@ -2696,8 +2698,8 @@
 
 const char* Class::UserVisibleNameCString() const {
 #if !defined(PRODUCT)
-  ASSERT(raw_ptr()->user_name() != String::null());
-  return String::Handle(raw_ptr()->user_name()).ToCString();
+  ASSERT(untag()->user_name() != String::null());
+  return String::Handle(untag()->user_name()).ToCString();
 #endif                               // !defined(PRODUCT)
   return GenerateUserVisibleName();  // No caching in PRODUCT, regenerate.
 }
@@ -2723,7 +2725,7 @@
         Type::Handle(Type::RawCast(interfaces.At(interfaces.Length() - 1)));
     return mixin_type.type_class();
   }
-  return raw();
+  return ptr();
 }
 
 NNBDMode Class::nnbd_mode() const {
@@ -2732,8 +2734,8 @@
 
 bool Class::IsInFullSnapshot() const {
   NoSafepointScope no_safepoint;
-  return LibraryLayout::InFullSnapshotBit::decode(
-      raw_ptr()->library()->ptr()->flags_);
+  return UntaggedLibrary::InFullSnapshotBit::decode(
+      untag()->library()->untag()->flags_);
 }
 
 AbstractTypePtr Class::RareType() const {
@@ -2790,7 +2792,7 @@
   if (register_class) {
     isolate_group->class_table()->Register(result);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 static void ReportTooManyTypeArguments(const Class& cls) {
@@ -2806,7 +2808,7 @@
   if (!Utils::IsInt(16, value)) {
     ReportTooManyTypeArguments(*this);
   }
-  StoreNonPointer(&raw_ptr()->num_type_arguments_, value);
+  StoreNonPointer(&untag()->num_type_arguments_, value);
 }
 
 void Class::set_has_pragma(bool value) const {
@@ -2815,23 +2817,23 @@
 
 // Initialize class fields of type Array with empty array.
 void Class::InitEmptyFields() {
-  if (Object::empty_array().raw() == Array::null()) {
+  if (Object::empty_array().ptr() == Array::null()) {
     // The empty array has not been initialized yet.
     return;
   }
-  raw_ptr()->set_interfaces(Object::empty_array().raw());
-  raw_ptr()->set_constants(Object::null_array().raw());
+  untag()->set_interfaces(Object::empty_array().ptr());
+  untag()->set_constants(Object::null_array().ptr());
   set_functions(Object::empty_array());
   set_fields(Object::empty_array());
   set_invocation_dispatcher_cache(Object::empty_array());
 }
 
 ArrayPtr Class::OffsetToFieldMap(bool original_classes) const {
-  if (raw_ptr()->offset_in_words_to_field() == Array::null()) {
+  if (untag()->offset_in_words_to_field() == Array::null()) {
     ASSERT(is_finalized());
-    const intptr_t length = raw_ptr()->host_instance_size_in_words_;
+    const intptr_t length = untag()->host_instance_size_in_words_;
     const Array& array = Array::Handle(Array::New(length, Heap::kOld));
-    Class& cls = Class::Handle(this->raw());
+    Class& cls = Class::Handle(this->ptr());
     Array& fields = Array::Handle();
     Field& f = Field::Handle();
     while (!cls.IsNull()) {
@@ -2844,9 +2846,9 @@
       }
       cls = cls.SuperClass(original_classes);
     }
-    raw_ptr()->set_offset_in_words_to_field(array.raw());
+    untag()->set_offset_in_words_to_field(array.ptr());
   }
-  return raw_ptr()->offset_in_words_to_field();
+  return untag()->offset_in_words_to_field();
 }
 
 bool Class::HasInstanceFields() const {
@@ -2867,7 +2869,7 @@
       : name_(name), tmp_string_(tmp_string) {}
   bool Matches(const Function& function) const {
     if (name_.IsSymbol()) {
-      return name_.raw() == function.name();
+      return name_.ptr() == function.name();
     } else {
       *tmp_string_ = function.name();
       return name_.Equals(*tmp_string_);
@@ -2890,7 +2892,7 @@
   static bool IsMatch(const Object& a, const Object& b) {
     ASSERT(a.IsFunction() && b.IsFunction());
     // Function objects are always canonical.
-    return a.raw() == b.raw();
+    return a.ptr() == b.ptr();
   }
   static bool IsMatch(const FunctionName& name, const Object& obj) {
     return name.Matches(Function::Cast(obj));
@@ -2926,12 +2928,12 @@
     for (intptr_t i = 0; i < len; ++i) {
       func ^= value.At(i);
       // Verify that all the functions in the array have this class as owner.
-      ASSERT(func.Owner() == raw());
+      ASSERT(func.Owner() == ptr());
       set.Insert(func);
     }
-    raw_ptr()->set_functions_hash_table(set.Release().raw());
+    untag()->set_functions_hash_table(set.Release().ptr());
   } else {
-    raw_ptr()->set_functions_hash_table(Array::null());
+    untag()->set_functions_hash_table(Array::null());
   }
 }
 
@@ -2954,9 +2956,9 @@
     // Transition to using hash table.
     SetFunctions(new_array);
   } else if (new_len > kFunctionLookupHashTreshold) {
-    ClassFunctionsSet set(raw_ptr()->functions_hash_table());
+    ClassFunctionsSet set(untag()->functions_hash_table());
     set.Insert(function);
-    raw_ptr()->set_functions_hash_table(set.Release().raw());
+    untag()->set_functions_hash_table(set.Release().ptr());
   }
 }
 
@@ -2968,7 +2970,7 @@
   Function& func = Function::Handle();
   func ^= funcs.At(idx);
   ASSERT(!func.IsNull());
-  return func.raw();
+  return func.ptr();
 }
 
 FunctionPtr Class::ImplicitClosureFunctionFromIndex(intptr_t idx) const {
@@ -2985,7 +2987,7 @@
   const Function& closure_func =
       Function::Handle(func.ImplicitClosureFunction());
   ASSERT(!closure_func.IsNull());
-  return closure_func.raw();
+  return closure_func.ptr();
 }
 
 intptr_t Class::FindImplicitClosureFunctionIndex(const Function& needle) const {
@@ -3008,7 +3010,7 @@
       // Skip non-implicit closure functions.
       continue;
     }
-    if (needle.raw() == implicit_closure.raw()) {
+    if (needle.ptr() == implicit_closure.ptr()) {
       return i;
     }
   }
@@ -3034,7 +3036,7 @@
     // The invocation_dispatcher_cache is a table with some entries that
     // are functions.
     if (object.IsFunction()) {
-      if (Function::Cast(object).raw() == needle.raw()) {
+      if (Function::Cast(object).ptr() == needle.ptr()) {
         return i;
       }
     }
@@ -3054,41 +3056,41 @@
   if (!object.IsFunction()) {
     return Function::null();
   }
-  return Function::Cast(object).raw();
+  return Function::Cast(object).ptr();
 }
 
 void Class::set_state_bits(intptr_t bits) const {
   StoreNonPointer<uint32_t, uint32_t, std::memory_order_release>(
-      &raw_ptr()->state_bits_, static_cast<uint32_t>(bits));
+      &untag()->state_bits_, static_cast<uint32_t>(bits));
 }
 
 void Class::set_library(const Library& value) const {
-  raw_ptr()->set_library(value.raw());
+  untag()->set_library(value.ptr());
 }
 
 void Class::set_type_parameters(const TypeArguments& value) const {
   ASSERT((num_type_arguments() == kUnknownNumTypeArguments) ||
          is_prefinalized());
-  raw_ptr()->set_type_parameters(value.raw());
+  untag()->set_type_parameters(value.ptr());
 }
 
 void Class::set_functions(const Array& value) const {
   // Ensure all writes to the [Function]s are visible by the time the array
   // is visible.
-  raw_ptr()->set_functions<std::memory_order_release>(value.raw());
+  untag()->set_functions<std::memory_order_release>(value.ptr());
 }
 
 void Class::set_fields(const Array& value) const {
   // Ensure all writes to the [Field]s are visible by the time the array
   // is visible.
-  raw_ptr()->set_fields<std::memory_order_release>(value.raw());
+  untag()->set_fields<std::memory_order_release>(value.ptr());
 }
 
 void Class::set_invocation_dispatcher_cache(const Array& cache) const {
   // Ensure all writes to the cache are visible by the time the array
   // is visible.
-  raw_ptr()->set_invocation_dispatcher_cache<std::memory_order_release>(
-      cache.raw());
+  untag()->set_invocation_dispatcher_cache<std::memory_order_release>(
+      cache.ptr());
 }
 
 intptr_t Class::NumTypeParameters(Thread* thread) const {
@@ -3198,7 +3200,7 @@
     const TypeArguments& parameters) {
   ASSERT(thread != nullptr);
   if (parameters.IsNull()) {
-    return Object::empty_type_arguments().raw();
+    return Object::empty_type_arguments().ptr();
   }
   auto const zone = thread->zone();
   const auto& result = TypeArguments::Handle(
@@ -3243,7 +3245,7 @@
 
 void Class::set_super_type(const AbstractType& value) const {
   ASSERT(value.IsNull() || (value.IsType() && !value.IsDynamicType()));
-  raw_ptr()->set_super_type(value.raw());
+  untag()->set_super_type(value.ptr());
 }
 
 TypeParameterPtr Class::LookupTypeParameter(const String& type_name) const {
@@ -3263,7 +3265,7 @@
       type_param ^= type_params.TypeAt(i);
       type_param_name = type_param.name();
       if (type_param_name.Equals(type_name)) {
-        return type_param.raw();
+        return type_param.ptr();
       }
     }
   }
@@ -3339,17 +3341,17 @@
         intptr_t field_size;
         switch (field.guarded_cid()) {
           case kDoubleCid:
-            field_size = sizeof(DoubleLayout::value_);
+            field_size = sizeof(UntaggedDouble::value_);
             break;
           case kFloat32x4Cid:
-            field_size = sizeof(Float32x4Layout::value_);
+            field_size = sizeof(UntaggedFloat32x4::value_);
             break;
           case kFloat64x2Cid:
-            field_size = sizeof(Float64x2Layout::value_);
+            field_size = sizeof(UntaggedFloat64x2::value_);
             break;
           default:
             if (field.is_non_nullable_integer()) {
-              field_size = sizeof(MintLayout::value_);
+              field_size = sizeof(UntaggedMint::value_);
             } else {
               UNREACHABLE();
               field_size = 0;
@@ -3431,11 +3433,11 @@
 
 FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
                                            const Array& args_desc,
-                                           FunctionLayout::Kind kind,
+                                           UntaggedFunction::Kind kind,
                                            bool create_if_absent) const {
-  ASSERT(kind == FunctionLayout::kNoSuchMethodDispatcher ||
-         kind == FunctionLayout::kInvokeFieldDispatcher ||
-         kind == FunctionLayout::kDynamicInvocationForwarder);
+  ASSERT(kind == UntaggedFunction::kNoSuchMethodDispatcher ||
+         kind == UntaggedFunction::kInvokeFieldDispatcher ||
+         kind == UntaggedFunction::kDynamicInvocationForwarder);
   auto thread = Thread::Current();
   auto Z = thread->zone();
   auto& function = Function::Handle(Z);
@@ -3454,10 +3456,10 @@
       if (name.IsNull()) break;  // Reached last entry.
       if (!name.Equals(target_name)) continue;
       desc = dispatcher.Get<Class::kInvocationDispatcherArgsDesc>();
-      if (desc.raw() != args_desc.raw()) continue;
+      if (desc.ptr() != args_desc.ptr()) continue;
       function = dispatcher.Get<Class::kInvocationDispatcherFunction>();
       if (function.kind() == kind) {
-        return function.raw();
+        return function.ptr();
       }
     }
     return Function::null();
@@ -3466,7 +3468,7 @@
   // First we'll try to find it without using locks.
   function = find_entry();
   if (!function.IsNull() || !create_if_absent) {
-    return function.raw();
+    return function.ptr();
   }
 
   // If we failed to find it and possibly need to create it, use a write lock.
@@ -3474,17 +3476,18 @@
 
   // Try to find it again & return if it was added in the meantime.
   function = find_entry();
-  if (!function.IsNull()) return function.raw();
+  if (!function.IsNull()) return function.ptr();
 
   // Otherwise create it & add it.
   function = CreateInvocationDispatcher(target_name, args_desc, kind);
   AddInvocationDispatcher(target_name, args_desc, function);
-  return function.raw();
+  return function.ptr();
 }
 
-FunctionPtr Class::CreateInvocationDispatcher(const String& target_name,
-                                              const Array& args_desc,
-                                              FunctionLayout::Kind kind) const {
+FunctionPtr Class::CreateInvocationDispatcher(
+    const String& target_name,
+    const Array& args_desc,
+    UntaggedFunction::Kind kind) const {
   Thread* thread = Thread::Current();
   Zone* zone = thread->zone();
   FunctionType& signature = FunctionType::Handle(zone, FunctionType::New());
@@ -3539,7 +3542,7 @@
   signature ^= ClassFinalizer::FinalizeType(signature);
   invocation.set_signature(signature);
 
-  return invocation.raw();
+  return invocation.ptr();
 }
 
 // Method extractors are used to create implicit closures from methods.
@@ -3560,7 +3563,7 @@
       zone,
       Function::New(signature,
                     String::Handle(zone, Symbols::New(thread, getter_name)),
-                    FunctionLayout::kMethodExtractor,
+                    UntaggedFunction::kMethodExtractor,
                     false,  // Not static.
                     false,  // Not const.
                     is_abstract(),
@@ -3588,7 +3591,7 @@
 
   owner.AddFunction(extractor);
 
-  return extractor.raw();
+  return extractor.ptr();
 }
 
 FunctionPtr Function::GetMethodExtractor(const String& getter_name) const {
@@ -3610,8 +3613,8 @@
       result = CreateMethodExtractor(getter_name);
     }
   }
-  ASSERT(result.kind() == FunctionLayout::kMethodExtractor);
-  return result.raw();
+  ASSERT(result.kind() == UntaggedFunction::kMethodExtractor);
+  return result.ptr();
 }
 
 bool Library::FindPragma(Thread* T,
@@ -3665,9 +3668,9 @@
   auto& pragma = Object::Handle(Z);
   for (intptr_t i = 0; i < metadata.Length(); ++i) {
     pragma = metadata.At(i);
-    if (pragma.clazz() != pragma_class.raw() ||
+    if (pragma.clazz() != pragma_class.ptr() ||
         Instance::Cast(pragma).GetField(pragma_name_field) !=
-            pragma_name.raw()) {
+            pragma_name.ptr()) {
       continue;
     }
     if (options != nullptr) {
@@ -3680,11 +3683,11 @@
 }
 
 bool Function::IsDynamicInvocationForwarderName(const String& name) {
-  return IsDynamicInvocationForwarderName(name.raw());
+  return IsDynamicInvocationForwarderName(name.ptr());
 }
 
 bool Function::IsDynamicInvocationForwarderName(StringPtr name) {
-  return String::StartsWith(name, Symbols::DynamicPrefix().raw());
+  return String::StartsWith(name, Symbols::DynamicPrefix().ptr());
 }
 
 StringPtr Function::DemangleDynamicInvocationForwarderName(const String& name) {
@@ -3713,7 +3716,7 @@
   forwarder.set_is_native(false);
   // TODO(dartbug.com/37737): Currently, we intentionally keep the recognized
   // kind when creating the dynamic invocation forwarder.
-  forwarder.set_kind(FunctionLayout::kDynamicInvocationForwarder);
+  forwarder.set_kind(UntaggedFunction::kDynamicInvocationForwarder);
   forwarder.set_is_debuggable(false);
 
   // TODO(vegorov) for error reporting reasons it is better to make this
@@ -3736,7 +3739,7 @@
   checks.SetAt(0, *this);
   forwarder.SetForwardingChecks(checks);
 
-  return forwarder.raw();
+  return forwarder.ptr();
 }
 
 FunctionPtr Function::GetDynamicInvocationForwarder(
@@ -3749,34 +3752,34 @@
   Function& result = Function::Handle(zone);
 
   // First we'll try to find it without using locks.
-  result =
-      owner.GetInvocationDispatcher(mangled_name, Array::null_array(),
-                                    FunctionLayout::kDynamicInvocationForwarder,
-                                    /*create_if_absent=*/false);
-  if (!result.IsNull()) return result.raw();
+  result = owner.GetInvocationDispatcher(
+      mangled_name, Array::null_array(),
+      UntaggedFunction::kDynamicInvocationForwarder,
+      /*create_if_absent=*/false);
+  if (!result.IsNull()) return result.ptr();
 
   const bool needs_dyn_forwarder =
       kernel::NeedsDynamicInvocationForwarder(*this);
   if (!allow_add) {
-    return needs_dyn_forwarder ? Function::null() : raw();
+    return needs_dyn_forwarder ? Function::null() : ptr();
   }
 
   // If we failed to find it and possibly need to create it, use a write lock.
   SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
 
   // Try to find it again & return if it was added in the mean time.
-  result =
-      owner.GetInvocationDispatcher(mangled_name, Array::null_array(),
-                                    FunctionLayout::kDynamicInvocationForwarder,
-                                    /*create_if_absent=*/false);
-  if (!result.IsNull()) return result.raw();
+  result = owner.GetInvocationDispatcher(
+      mangled_name, Array::null_array(),
+      UntaggedFunction::kDynamicInvocationForwarder,
+      /*create_if_absent=*/false);
+  if (!result.IsNull()) return result.ptr();
 
   // Otherwise create it & add it.
   result = needs_dyn_forwarder ? CreateDynamicInvocationForwarder(mangled_name)
-                               : raw();
+                               : ptr();
   owner.AddInvocationDispatcher(mangled_name, Array::null_array(), result);
 
-  return result.raw();
+  return result.ptr();
 }
 
 #endif
@@ -3798,7 +3801,7 @@
 }
 
 ArrayPtr Class::invocation_dispatcher_cache() const {
-  return raw_ptr()->invocation_dispatcher_cache<std::memory_order_acquire>();
+  return untag()->invocation_dispatcher_cache<std::memory_order_acquire>();
 }
 
 void Class::Finalize() const {
@@ -3813,9 +3816,9 @@
     // Compute offsets of instance fields, instance size and bitmap for unboxed
     // fields.
     const auto host_bitmap = CalculateFieldOffsets();
-    if (raw() == isolate_group->class_table()->At(id())) {
+    if (ptr() == isolate_group->class_table()->At(id())) {
       // Sets the new size in the class table.
-      isolate_group->class_table()->SetAt(id(), raw());
+      isolate_group->class_table()->SetAt(id(), ptr());
       if (FLAG_precompiled_mode && !ClassTable::IsTopLevelCid(id())) {
         isolate_group->shared_class_table()->SetUnboxedFieldsMapAt(id(),
                                                                    host_bitmap);
@@ -3945,13 +3948,13 @@
 ArrayPtr Class::dependent_code() const {
   DEBUG_ASSERT(
       IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
-  return raw_ptr()->dependent_code();
+  return untag()->dependent_code();
 }
 
 void Class::set_dependent_code(const Array& array) const {
   DEBUG_ASSERT(
       IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
-  raw_ptr()->set_dependent_code(array.raw());
+  untag()->set_dependent_code(array.ptr());
 }
 
 // Conventions:
@@ -4059,7 +4062,7 @@
       // Fall through case: Indicate that we didn't find any function or field
       // using a special null instance. This is different from a field being
       // null. Callers make sure that this null does not leak into Dartland.
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
 
     // Invoke the getter and return the result.
@@ -4135,7 +4138,7 @@
                           argument_name);
   }
   field.SetStaticValue(value);
-  return value.raw();
+  return value.ptr();
 }
 
 // Creates a new array of boxed arguments suitable for invoking the callable
@@ -4173,7 +4176,7 @@
   }
   // Set the receiver slot in the callable args.
   callable_args.SetAt(first_arg_index, receiver);
-  return callable_args.raw();
+  return callable_args.ptr();
 }
 
 ObjectPtr Class::Invoke(const String& function_name,
@@ -4205,7 +4208,7 @@
     const Object& getter_result = Object::Handle(
         zone, InvokeGetter(function_name, false, respect_reflectable,
                            check_is_entrypoint));
-    if (getter_result.raw() != Object::sentinel().raw()) {
+    if (getter_result.ptr() != Object::sentinel().ptr()) {
       if (check_is_entrypoint) {
         CHECK_ERROR(EntryPointFieldInvocationError(function_name));
       }
@@ -4303,7 +4306,7 @@
       UNREACHABLE();
     }
   }
-  return error.raw();
+  return error.ptr();
 }
 
 // Ensure that code outdated by finalized class is cleaned up, new instance of
@@ -4336,7 +4339,7 @@
     return Error::null();
   }
   error ^= ClassFinalizer::AllocateFinalizeClass(*this);
-  return error.raw();
+  return error.ptr();
 }
 
 void Class::SetFields(const Array& value) const {
@@ -4351,7 +4354,7 @@
   for (intptr_t i = 0; i < len; i++) {
     field ^= value.At(i);
     ASSERT(field.IsOriginal());
-    ASSERT(field.Owner() == raw());
+    ASSERT(field.Owner() == ptr());
   }
 #endif
   // The value of static fields is already initialized to null.
@@ -4390,7 +4393,7 @@
 
 bool Class::InjectCIDFields() const {
   if (library() != Library::InternalLibrary() ||
-      Name() != Symbols::ClassID().raw()) {
+      Name() != Symbols::ClassID().ptr()) {
     return false;
   }
 
@@ -4473,7 +4476,7 @@
   result.set_state_bits(0);
   NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
   result.InitEmptyFields();
-  return result.raw();
+  return result.ptr();
 }
 
 template <class FakeInstance, class TargetFakeInstance>
@@ -4489,7 +4492,7 @@
   if (register_class) {
     isolate_group->class_table()->Register(result);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ClassPtr Class::New(const Library& lib,
@@ -4512,7 +4515,7 @@
   if (register_class) {
     Isolate::Current()->RegisterClass(result);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ClassPtr Class::NewInstanceClass() {
@@ -4532,14 +4535,14 @@
     cls.set_super_type(Type::Handle(Type::ObjectType()));
     // Compute instance size. First word contains a pointer to a properly
     // sized typed array once the first native field has been set.
-    const intptr_t host_instance_size = sizeof(InstanceLayout) + kWordSize;
+    const intptr_t host_instance_size = sizeof(UntaggedInstance) + kWordSize;
 #if defined(DART_PRECOMPILER)
     const intptr_t target_instance_size =
         compiler::target::Instance::InstanceSize() +
         compiler::target::kWordSize;
 #else
     const intptr_t target_instance_size =
-        sizeof(InstanceLayout) + compiler::target::kWordSize;
+        sizeof(UntaggedInstance) + compiler::target::kWordSize;
 #endif
     cls.set_instance_size(
         RoundedAllocationSize(host_instance_size),
@@ -4554,7 +4557,7 @@
     cls.set_is_type_finalized();
     cls.set_is_synthesized_class();
     library.AddClass(cls);
-    return cls.raw();
+    return cls.ptr();
   } else {
     return Class::null();
   }
@@ -4590,7 +4593,7 @@
                                target_next_field_offset);
   result.set_is_prefinalized();
   isolate_group->class_table()->Register(result);
-  return result.raw();
+  return result.ptr();
 }
 
 ClassPtr Class::NewTypedDataClass(intptr_t class_id,
@@ -4609,7 +4612,7 @@
                                target_next_field_offset);
   result.set_is_prefinalized();
   isolate_group->class_table()->Register(result);
-  return result.raw();
+  return result.ptr();
 }
 
 ClassPtr Class::NewTypedDataViewClass(intptr_t class_id,
@@ -4629,7 +4632,7 @@
                                target_next_field_offset);
   result.set_is_prefinalized();
   isolate_group->class_table()->Register(result);
-  return result.raw();
+  return result.ptr();
 }
 
 ClassPtr Class::NewExternalTypedDataClass(intptr_t class_id,
@@ -4649,7 +4652,7 @@
                                target_next_field_offset);
   result.set_is_prefinalized();
   isolate_group->class_table()->Register(result);
-  return result.raw();
+  return result.ptr();
 }
 
 ClassPtr Class::NewPointerClass(intptr_t class_id,
@@ -4671,15 +4674,15 @@
                                target_next_field_offset);
   result.set_is_prefinalized();
   isolate_group->class_table()->Register(result);
-  return result.raw();
+  return result.ptr();
 }
 
 void Class::set_name(const String& value) const {
-  ASSERT(raw_ptr()->name() == String::null());
+  ASSERT(untag()->name() == String::null());
   ASSERT(value.IsSymbol());
-  raw_ptr()->set_name(value.raw());
+  untag()->set_name(value.ptr());
 #if !defined(PRODUCT)
-  if (raw_ptr()->user_name() == String::null()) {
+  if (untag()->user_name() == String::null()) {
     // TODO(johnmccutchan): Eagerly set user name for VM isolate classes,
     // lazily set user name for the other classes.
     // Generate and set user_name.
@@ -4692,7 +4695,7 @@
 
 #if !defined(PRODUCT)
 void Class::set_user_name(const String& value) const {
-  raw_ptr()->set_user_name(value.raw());
+  untag()->set_user_name(value.ptr());
 }
 #endif  // !defined(PRODUCT)
 
@@ -4845,7 +4848,7 @@
   }
   String& name = String::Handle(Name());
   name = Symbols::New(Thread::Current(), String::ScrubName(name));
-  if (name.raw() == Symbols::FutureImpl().raw() &&
+  if (name.ptr() == Symbols::FutureImpl().ptr() &&
       library() == Library::AsyncLibrary()) {
     return Symbols::Future().ToCString();
   }
@@ -4853,17 +4856,17 @@
 }
 
 void Class::set_script(const Script& value) const {
-  raw_ptr()->set_script(value.raw());
+  untag()->set_script(value.ptr());
 }
 
 void Class::set_token_pos(TokenPosition token_pos) const {
   ASSERT(!token_pos.IsClassifying());
-  StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
+  StoreNonPointer(&untag()->token_pos_, token_pos);
 }
 
 void Class::set_end_token_pos(TokenPosition token_pos) const {
   ASSERT(!token_pos.IsClassifying());
-  StoreNonPointer(&raw_ptr()->end_token_pos_, token_pos);
+  StoreNonPointer(&untag()->end_token_pos_, token_pos);
 }
 
 int32_t Class::SourceFingerprint() const {
@@ -4885,15 +4888,15 @@
 
 void Class::set_is_declaration_loaded() const {
   ASSERT(!is_declaration_loaded());
-  set_state_bits(
-      ClassLoadingBits::update(ClassLayout::kDeclarationLoaded, state_bits()));
+  set_state_bits(ClassLoadingBits::update(UntaggedClass::kDeclarationLoaded,
+                                          state_bits()));
 }
 
 void Class::set_is_type_finalized() const {
   ASSERT(is_declaration_loaded());
   ASSERT(!is_type_finalized());
   set_state_bits(
-      ClassLoadingBits::update(ClassLayout::kTypeFinalized, state_bits()));
+      ClassLoadingBits::update(UntaggedClass::kTypeFinalized, state_bits()));
 }
 
 void Class::set_is_synthesized_class() const {
@@ -4927,24 +4930,24 @@
 void Class::set_is_finalized() const {
   ASSERT(!is_finalized());
   set_state_bits(
-      ClassFinalizedBits::update(ClassLayout::kFinalized, state_bits()));
+      ClassFinalizedBits::update(UntaggedClass::kFinalized, state_bits()));
 }
 
 void Class::set_is_allocate_finalized() const {
   ASSERT(!is_allocate_finalized());
-  set_state_bits(ClassFinalizedBits::update(ClassLayout::kAllocateFinalized,
+  set_state_bits(ClassFinalizedBits::update(UntaggedClass::kAllocateFinalized,
                                             state_bits()));
 }
 
 void Class::set_is_prefinalized() const {
   ASSERT(!is_finalized());
   set_state_bits(
-      ClassFinalizedBits::update(ClassLayout::kPreFinalized, state_bits()));
+      ClassFinalizedBits::update(UntaggedClass::kPreFinalized, state_bits()));
 }
 
 void Class::set_interfaces(const Array& value) const {
   ASSERT(!value.IsNull());
-  raw_ptr()->set_interfaces(value.raw());
+  untag()->set_interfaces(value.ptr());
 }
 
 void Class::AddDirectImplementor(const Class& implementor,
@@ -4953,10 +4956,10 @@
   ASSERT(is_implemented());
   ASSERT(!implementor.IsNull());
   GrowableObjectArray& direct_implementors =
-      GrowableObjectArray::Handle(raw_ptr()->direct_implementors());
+      GrowableObjectArray::Handle(untag()->direct_implementors());
   if (direct_implementors.IsNull()) {
     direct_implementors = GrowableObjectArray::New(4, Heap::kOld);
-    raw_ptr()->set_direct_implementors(direct_implementors.raw());
+    untag()->set_direct_implementors(direct_implementors.ptr());
   }
 #if defined(DEBUG)
   // Verify that the same class is not added twice.
@@ -4966,7 +4969,7 @@
   // This is rare and harmless.
   if (!is_mixin) {
     for (intptr_t i = 0; i < direct_implementors.Length(); i++) {
-      ASSERT(direct_implementors.At(i) != implementor.raw());
+      ASSERT(direct_implementors.At(i) != implementor.ptr());
     }
   }
 #endif
@@ -4975,25 +4978,25 @@
 
 void Class::ClearDirectImplementors() const {
   ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
-  raw_ptr()->set_direct_implementors(GrowableObjectArray::null());
+  untag()->set_direct_implementors(GrowableObjectArray::null());
 }
 
 void Class::AddDirectSubclass(const Class& subclass) const {
   ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
   ASSERT(!subclass.IsNull());
-  ASSERT(subclass.SuperClass() == raw());
+  ASSERT(subclass.SuperClass() == ptr());
   // Do not keep track of the direct subclasses of class Object.
   ASSERT(!IsObjectClass());
   GrowableObjectArray& direct_subclasses =
-      GrowableObjectArray::Handle(raw_ptr()->direct_subclasses());
+      GrowableObjectArray::Handle(untag()->direct_subclasses());
   if (direct_subclasses.IsNull()) {
     direct_subclasses = GrowableObjectArray::New(4, Heap::kOld);
-    raw_ptr()->set_direct_subclasses(direct_subclasses.raw());
+    untag()->set_direct_subclasses(direct_subclasses.ptr());
   }
 #if defined(DEBUG)
   // Verify that the same class is not added twice.
   for (intptr_t i = 0; i < direct_subclasses.Length(); i++) {
-    ASSERT(direct_subclasses.At(i) != subclass.raw());
+    ASSERT(direct_subclasses.At(i) != subclass.ptr());
   }
 #endif
   direct_subclasses.Add(subclass, Heap::kOld);
@@ -5001,28 +5004,28 @@
 
 void Class::ClearDirectSubclasses() const {
   ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
-  raw_ptr()->set_direct_subclasses(GrowableObjectArray::null());
+  untag()->set_direct_subclasses(GrowableObjectArray::null());
 }
 
 ArrayPtr Class::constants() const {
-  return raw_ptr()->constants();
+  return untag()->constants();
 }
 
 void Class::set_constants(const Array& value) const {
-  raw_ptr()->set_constants(value.raw());
+  untag()->set_constants(value.ptr());
 }
 
 void Class::set_declaration_type(const Type& value) const {
   ASSERT(id() != kDynamicCid && id() != kVoidCid);
   ASSERT(!value.IsNull() && value.IsCanonical() && value.IsOld());
   ASSERT((declaration_type() == Object::null()) ||
-         (declaration_type() == value.raw()));  // Set during own finalization.
+         (declaration_type() == value.ptr()));  // Set during own finalization.
   // Since DeclarationType is used as the runtime type of instances of a
   // non-generic class, its nullability must be kNonNullable.
   // The exception is DeclarationType of Null which is kNullable.
   ASSERT(value.type_class_id() != kNullCid || value.IsNullable());
   ASSERT(value.type_class_id() == kNullCid || value.IsNonNullable());
-  raw_ptr()->set_declaration_type<std::memory_order_release>(value.raw());
+  untag()->set_declaration_type<std::memory_order_release>(value.ptr());
 }
 
 TypePtr Class::DeclarationType() const {
@@ -5048,15 +5051,15 @@
                              Nullability::kNonNullable));
   type ^= ClassFinalizer::FinalizeType(type);
   set_declaration_type(type);
-  return type.raw();
+  return type.ptr();
 }
 
 void Class::set_allocation_stub(const Code& value) const {
   // Never clear the stub as it may still be a target, but will be GC-d if
   // not referenced.
   ASSERT(!value.IsNull());
-  ASSERT(raw_ptr()->allocation_stub() == Code::null());
-  raw_ptr()->set_allocation_stub(value.raw());
+  ASSERT(untag()->allocation_stub() == Code::null());
+  untag()->set_allocation_stub(value.ptr());
 }
 
 void Class::DisableAllocationStub() const {
@@ -5068,18 +5071,18 @@
   // Change the stub so that the next caller will regenerate the stub.
   existing_stub.DisableStubCode();
   // Disassociate the existing stub from class.
-  raw_ptr()->set_allocation_stub(Code::null());
+  untag()->set_allocation_stub(Code::null());
 }
 
 bool Class::IsDartFunctionClass() const {
-  return raw() == Type::Handle(Type::DartFunctionType()).type_class();
+  return ptr() == Type::Handle(Type::DartFunctionType()).type_class();
 }
 
 bool Class::IsFutureClass() const {
   // Looking up future_class in the object store would not work, because
   // this function is called during class finalization, before the object store
   // field would be initialized by InitKnownObjects().
-  return (Name() == Symbols::Future().raw()) &&
+  return (Name() == Symbols::Future().ptr()) &&
          (library() == Library::AsyncLibrary());
 }
 
@@ -5120,7 +5123,7 @@
       TypeArguments::Handle(zone, other.arguments());
   // Use the 'this_class' object as if it was the receiver of this method, but
   // instead of recursing, reset it to the super class and loop.
-  Class& this_class = Class::Handle(zone, cls.raw());
+  Class& this_class = Class::Handle(zone, cls.ptr());
   while (true) {
     // Apply additional subtyping rules if T0 or T1 are 'FutureOr'.
 
@@ -5187,7 +5190,7 @@
     }
 
     // Check for reflexivity.
-    if (this_class.raw() == other_class.raw()) {
+    if (this_class.ptr() == other_class.ptr()) {
       const intptr_t num_type_params = this_class.NumTypeParameters();
       if (num_type_params == 0) {
         return true;
@@ -5258,7 +5261,7 @@
 }
 
 bool Class::IsTopLevel() const {
-  return Name() == Symbols::TopLevel().raw();
+  return Name() == Symbols::TopLevel().ptr();
 }
 
 bool Class::IsPrivate() const {
@@ -5338,24 +5341,24 @@
 FunctionPtr Class::CheckFunctionType(const Function& func, MemberKind kind) {
   if ((kind == kInstance) || (kind == kInstanceAllowAbstract)) {
     if (func.IsDynamicFunction(kind == kInstanceAllowAbstract)) {
-      return func.raw();
+      return func.ptr();
     }
   } else if (kind == kStatic) {
     if (func.IsStaticFunction()) {
-      return func.raw();
+      return func.ptr();
     }
   } else if (kind == kConstructor) {
     if (func.IsGenerativeConstructor()) {
       ASSERT(!func.is_static());
-      return func.raw();
+      return func.ptr();
     }
   } else if (kind == kFactory) {
     if (func.IsFactory()) {
       ASSERT(func.is_static());
-      return func.raw();
+      return func.ptr();
     }
   } else if (kind == kAny) {
-    return func.raw();
+    return func.ptr();
   }
   return Function::null();
 }
@@ -5383,13 +5386,13 @@
     // If we want to increase resolver speed by avoiding the need for read lock,
     // we could make change this hash table to be lock-free for the reader.
     const Array& hash_table =
-        Array::Handle(thread->zone(), raw_ptr()->functions_hash_table());
+        Array::Handle(thread->zone(), untag()->functions_hash_table());
     if (!hash_table.IsNull()) {
-      ClassFunctionsSet set(hash_table.raw());
+      ClassFunctionsSet set(hash_table.ptr());
       REUSABLE_STRING_HANDLESCOPE(thread);
       function ^= set.GetOrNull(FunctionName(name, &(thread->StringHandle())));
       // No mutations.
-      ASSERT(set.Release().raw() == hash_table.raw());
+      ASSERT(set.Release().ptr() == hash_table.ptr());
       return function.IsNull() ? Function::null()
                                : CheckFunctionType(function, kind);
     }
@@ -5399,7 +5402,7 @@
     NoSafepointScope no_safepoint;
     for (intptr_t i = 0; i < len; i++) {
       function ^= funcs.At(i);
-      if (function.name() == name.raw()) {
+      if (function.name() == name.ptr()) {
         return CheckFunctionType(function, kind);
       }
     }
@@ -5472,7 +5475,7 @@
     function ^= funcs.At(i);
     function_name = function.name();
     if (MatchesAccessorName(function_name, prefix, prefix_length, name)) {
-      return function.raw();
+      return function.ptr();
     }
   }
 
@@ -5510,14 +5513,14 @@
     // Use fast raw pointer string compare for symbols.
     for (intptr_t i = 0; i < len; i++) {
       field ^= flds.At(i);
-      if (name.raw() == field.name()) {
+      if (name.ptr() == field.name()) {
         if (kind == kInstance) {
-          return field.is_static() ? Field::null() : field.raw();
+          return field.is_static() ? Field::null() : field.ptr();
         } else if (kind == kStatic) {
-          return field.is_static() ? field.raw() : Field::null();
+          return field.is_static() ? field.ptr() : Field::null();
         }
         ASSERT(kind == kAny);
-        return field.raw();
+        return field.ptr();
       }
     }
   } else {
@@ -5527,12 +5530,12 @@
       field_name = field.name();
       if (name.Equals(field_name)) {
         if (kind == kInstance) {
-          return field.is_static() ? Field::null() : field.raw();
+          return field.is_static() ? Field::null() : field.ptr();
         } else if (kind == kStatic) {
-          return field.is_static() ? field.raw() : Field::null();
+          return field.is_static() ? field.ptr() : Field::null();
         }
         ASSERT(kind == kAny);
-        return field.raw();
+        return field.ptr();
       }
     }
   }
@@ -5564,7 +5567,7 @@
       continue;
     }
     if (String::EqualsIgnoringPrivateKey(field_name, name)) {
-      return field.raw();
+      return field.ptr();
     }
   }
   return Field::null();
@@ -5573,7 +5576,7 @@
 FieldPtr Class::LookupInstanceFieldAllowPrivate(const String& name) const {
   Field& field = Field::Handle(LookupFieldAllowPrivate(name, true));
   if (!field.IsNull() && !field.is_static()) {
-    return field.raw();
+    return field.ptr();
   }
   return Field::null();
 }
@@ -5581,7 +5584,7 @@
 FieldPtr Class::LookupStaticFieldAllowPrivate(const String& name) const {
   Field& field = Field::Handle(LookupFieldAllowPrivate(name));
   if (!field.IsNull() && field.is_static()) {
-    return field.raw();
+    return field.ptr();
   }
   return Field::null();
 }
@@ -5655,7 +5658,7 @@
 
   // Called when growing the table.
   static bool IsMatch(const Object& a, const Object& b) {
-    return a.raw() == b.raw();
+    return a.ptr() == b.ptr();
   }
   static bool IsMatch(const KeyType& a, const Object& b) {
     return a.Matches(ObjectType::Cast(b));
@@ -5666,7 +5669,7 @@
   static uword Hash(const KeyType& key) { return key.Hash(); }
   static ObjectPtr NewKey(const KeyType& obj) {
     if (obj.key_ != NULL) {
-      return obj.key_->raw();
+      return obj.key_->ptr();
     } else {
       UNIMPLEMENTED();
       return NULL;
@@ -5680,7 +5683,7 @@
 
 // Returns an instance of Double or Double::null().
 DoublePtr Class::LookupCanonicalDouble(Zone* zone, double value) const {
-  ASSERT(this->raw() ==
+  ASSERT(this->ptr() ==
          IsolateGroup::Current()->object_store()->double_class());
   if (this->constants() == Array::null()) return Double::null();
 
@@ -5688,19 +5691,19 @@
   CanonicalDoubleSet constants(zone, this->constants());
   canonical_value ^= constants.GetOrNull(CanonicalDoubleKey(value));
   this->set_constants(constants.Release());
-  return canonical_value.raw();
+  return canonical_value.ptr();
 }
 
 // Returns an instance of Mint or Mint::null().
 MintPtr Class::LookupCanonicalMint(Zone* zone, int64_t value) const {
-  ASSERT(this->raw() == IsolateGroup::Current()->object_store()->mint_class());
+  ASSERT(this->ptr() == IsolateGroup::Current()->object_store()->mint_class());
   if (this->constants() == Array::null()) return Mint::null();
 
   Mint& canonical_value = Mint::Handle(zone);
   CanonicalMintSet constants(zone, this->constants());
   canonical_value ^= constants.GetOrNull(CanonicalMintKey(value));
   this->set_constants(constants.Release());
-  return canonical_value.raw();
+  return canonical_value.ptr();
 }
 
 class CanonicalInstanceKey {
@@ -5733,7 +5736,7 @@
   static bool IsMatch(const Object& a, const Object& b) {
     ASSERT(!(a.IsString() || a.IsInteger() || a.IsAbstractType()));
     ASSERT(!(b.IsString() || b.IsInteger() || b.IsAbstractType()));
-    return a.raw() == b.raw();
+    return a.ptr() == b.ptr();
   }
   static bool IsMatch(const CanonicalInstanceKey& a, const Object& b) {
     return a.Matches(Instance::Cast(b));
@@ -5745,14 +5748,14 @@
   }
   static uword Hash(const CanonicalInstanceKey& key) { return key.Hash(); }
   static ObjectPtr NewKey(const CanonicalInstanceKey& obj) {
-    return obj.key_.raw();
+    return obj.key_.ptr();
   }
 };
 typedef UnorderedHashSet<CanonicalInstanceTraits> CanonicalInstancesSet;
 
 InstancePtr Class::LookupCanonicalInstance(Zone* zone,
                                            const Instance& value) const {
-  ASSERT(this->raw() == value.clazz());
+  ASSERT(this->ptr() == value.clazz());
   ASSERT(is_finalized() || is_prefinalized());
   Instance& canonical_value = Instance::Handle(zone);
   if (this->constants() != Array::null()) {
@@ -5760,12 +5763,12 @@
     canonical_value ^= constants.GetOrNull(CanonicalInstanceKey(value));
     this->set_constants(constants.Release());
   }
-  return canonical_value.raw();
+  return canonical_value.ptr();
 }
 
 InstancePtr Class::InsertCanonicalConstant(Zone* zone,
                                            const Instance& constant) const {
-  ASSERT(this->raw() == constant.clazz());
+  ASSERT(this->ptr() == constant.clazz());
   Instance& canonical_value = Instance::Handle(zone);
   if (this->constants() == Array::null()) {
     CanonicalInstancesSet constants(
@@ -5778,7 +5781,7 @@
     canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
     this->set_constants(constants.Release());
   }
-  return canonical_value.raw();
+  return canonical_value.ptr();
 }
 
 void Class::InsertCanonicalDouble(Zone* zone, const Double& constant) const {
@@ -5814,7 +5817,7 @@
 
   set_constants(Object::null_array());
 
-  CanonicalInstancesSet set(zone, old_constants.raw());
+  CanonicalInstancesSet set(zone, old_constants.ptr());
   Instance& constant = Instance::Handle(zone);
   CanonicalInstancesSet::Iterator it(&set);
   while (it.MoveNext()) {
@@ -5885,7 +5888,7 @@
 }
 
 void TypeArguments::set_nullability(intptr_t value) const {
-  raw_ptr()->set_nullability(Smi::New(value));
+  untag()->set_nullability(Smi::New(value));
 }
 
 intptr_t TypeArguments::HashForRange(intptr_t from_index, intptr_t len) const {
@@ -5922,10 +5925,10 @@
                                         intptr_t total_length) const {
   if (other_length == 0) {
     ASSERT(IsCanonical());
-    return raw();
+    return ptr();
   } else if (other_length == total_length) {
     ASSERT(other.IsCanonical());
-    return other.raw();
+    return other.ptr();
   } else if (IsNull() && other.IsNull()) {
     return TypeArguments::null();
   }
@@ -5960,7 +5963,7 @@
     type = other.TypeAt(i);
     result.SetTypeAt(this_len + i, type);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 StringPtr TypeArguments::Name() const {
@@ -6006,7 +6009,7 @@
   if (IsNull()) {
     return buffer->AddString("null");
   }
-  buffer->Printf("(H%" Px ")", Smi::Value(raw_ptr()->hash()));
+  buffer->Printf("(H%" Px ")", Smi::Value(untag()->hash()));
   auto& type_at = AbstractType::Handle();
   for (intptr_t i = 0; i < Length(); i++) {
     type_at = TypeAt(i);
@@ -6019,7 +6022,7 @@
                                           intptr_t len,
                                           TypeEquality kind,
                                           TrailPtr trail) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     return true;
   }
   if (IsNull() || other.IsNull()) {
@@ -6128,7 +6131,7 @@
 ArrayPtr TypeArguments::instantiations() const {
   // We rely on the fact that any loads from the array are dependent loads and
   // avoid the load-acquire barrier here.
-  return raw_ptr()->instantiations();
+  return untag()->instantiations();
 }
 
 void TypeArguments::set_instantiations(const Array& value) const {
@@ -6136,7 +6139,7 @@
   // when releasing the pointer to the array pointer.
   // => We have to use store-release here.
   ASSERT(!value.IsNull());
-  raw_ptr()->set_instantiations<std::memory_order_release>(value.raw());
+  untag()->set_instantiations<std::memory_order_release>(value.ptr());
 }
 
 bool TypeArguments::HasCount(intptr_t count) const {
@@ -6150,26 +6153,26 @@
   if (IsNull()) {
     return 0;
   }
-  return Smi::Value(raw_ptr()->length());
+  return Smi::Value(untag()->length());
 }
 
 intptr_t TypeArguments::nullability() const {
   if (IsNull()) {
     return 0;
   }
-  return Smi::Value(raw_ptr()->nullability());
+  return Smi::Value(untag()->nullability());
 }
 
 AbstractTypePtr TypeArguments::TypeAt(intptr_t index) const {
   ASSERT(!IsNull());
   ASSERT((index >= 0) && (index < Length()));
-  return raw_ptr()->element(index);
+  return untag()->element(index);
 }
 
 AbstractTypePtr TypeArguments::TypeAtNullSafe(intptr_t index) const {
   if (IsNull()) {
     // null vector represents infinite list of dynamics
-    return Type::dynamic_type().raw();
+    return Type::dynamic_type().ptr();
   }
   ASSERT((index >= 0) && (index < Length()));
   return TypeAt(index);
@@ -6178,7 +6181,7 @@
 void TypeArguments::SetTypeAt(intptr_t index, const AbstractType& value) const {
   ASSERT(!IsCanonical());
   ASSERT((index >= 0) && (index < Length()));
-  return raw_ptr()->set_element(index, value.raw());
+  return untag()->set_element(index, value.ptr());
 }
 
 bool TypeArguments::IsSubvectorInstantiated(intptr_t from_index,
@@ -6389,7 +6392,7 @@
   if ((instantiator_type_arguments.IsNull() ||
        instantiator_type_arguments.Length() == Length()) &&
       IsUninstantiatedIdentity()) {
-    return instantiator_type_arguments.raw();
+    return instantiator_type_arguments.ptr();
   }
   const intptr_t num_types = Length();
   TypeArguments& instantiated_array =
@@ -6411,12 +6414,12 @@
       // A returned null type indicates a failed instantiation in dead code that
       // must be propagated up to the caller, the optimizing compiler.
       if (type.IsNull()) {
-        return Object::empty_type_arguments().raw();
+        return Object::empty_type_arguments().ptr();
       }
     }
     instantiated_array.SetTypeAt(i, type);
   }
-  return instantiated_array.raw();
+  return instantiated_array.ptr();
 }
 
 TypeArgumentsPtr TypeArguments::InstantiateAndCanonicalizeFrom(
@@ -6443,10 +6446,10 @@
     if ((prior_instantiations.At(
              index +
              TypeArguments::Instantiation::kInstantiatorTypeArgsIndex) ==
-         instantiator_type_arguments.raw()) &&
+         instantiator_type_arguments.ptr()) &&
         (prior_instantiations.At(
              index + TypeArguments::Instantiation::kFunctionTypeArgsIndex) ==
-         function_type_arguments.raw())) {
+         function_type_arguments.ptr())) {
       return TypeArguments::RawCast(prior_instantiations.At(
           index + TypeArguments::Instantiation::kInstantiatedTypeArgsIndex));
     }
@@ -6464,7 +6467,7 @@
   result = result.Canonicalize(thread, nullptr);
   // InstantiateAndCanonicalizeFrom is not reentrant. It cannot have been called
   // indirectly, so the prior_instantiations array cannot have grown.
-  ASSERT(prior_instantiations.raw() == instantiations());
+  ASSERT(prior_instantiations.ptr() == instantiations());
   // Add instantiator and function type args and result to instantiations array.
   intptr_t length = prior_instantiations.Length();
   if ((index + TypeArguments::Instantiation::kSizeInWords) >= length) {
@@ -6502,7 +6505,7 @@
   prior_instantiations.SetAtRelease(
       index + TypeArguments::Instantiation::kInstantiatorTypeArgsIndex,
       instantiator_type_arguments);
-  return result.raw();
+  return result.ptr();
 }
 
 TypeArgumentsPtr TypeArguments::New(intptr_t len, Heap::Space space) {
@@ -6522,28 +6525,28 @@
     result.set_nullability(0);
   }
   // The zero array should have been initialized.
-  ASSERT(Object::zero_array().raw() != Array::null());
+  ASSERT(Object::zero_array().ptr() != Array::null());
   COMPILE_ASSERT(TypeArguments::kNoInstantiator == 0);
   result.set_instantiations(Object::zero_array());
-  return result.raw();
+  return result.ptr();
 }
 
 void TypeArguments::SetLength(intptr_t value) const {
   ASSERT(!IsCanonical());
   // This is only safe because we create a new Smi, which does not cause
   // heap allocation.
-  raw_ptr()->set_length(Smi::New(value));
+  untag()->set_length(Smi::New(value));
 }
 
 TypeArgumentsPtr TypeArguments::Canonicalize(Thread* thread,
                                              TrailPtr trail) const {
   if (IsNull() || IsCanonical()) {
     ASSERT(IsOld());
-    return this->raw();
+    return this->ptr();
   }
   const intptr_t num_types = Length();
   if (num_types == 0) {
-    return TypeArguments::empty_type_arguments().raw();
+    return TypeArguments::empty_type_arguments().ptr();
   } else if (IsRaw(0, num_types)) {
     return TypeArguments::null();
   }
@@ -6567,7 +6570,7 @@
       if (IsCanonical()) {
         // Canonicalizing this type_arg canonicalized this type.
         ASSERT(IsRecursive());
-        return this->raw();
+        return this->ptr();
       }
       SetTypeAt(i, type_arg);
     }
@@ -6588,7 +6591,7 @@
       if (this->IsNew()) {
         result ^= Object::Clone(*this, Heap::kOld);
       } else {
-        result = this->raw();
+        result = this->ptr();
       }
       ASSERT(result.IsOld());
       result.ComputeNullability();
@@ -6603,7 +6606,7 @@
   ASSERT(!result.IsNull());
   ASSERT(result.IsTypeArguments());
   ASSERT(result.IsCanonical());
-  return result.raw();
+  return result.ptr();
 }
 
 void TypeArguments::EnumerateURIs(URIs* uris) const {
@@ -6642,7 +6645,7 @@
   result.set_origin_class(origin_class);
   result.set_script(Script::Handle(origin_class.script()));
   result.set_library_kernel_offset(-1);
-  return result.raw();
+  return result.ptr();
 }
 
 PatchClassPtr PatchClass::New(const Class& patched_class,
@@ -6652,7 +6655,7 @@
   result.set_origin_class(patched_class);
   result.set_script(script);
   result.set_library_kernel_offset(-1);
-  return result.raw();
+  return result.ptr();
 }
 
 PatchClassPtr PatchClass::New() {
@@ -6663,19 +6666,19 @@
 }
 
 void PatchClass::set_patched_class(const Class& value) const {
-  raw_ptr()->set_patched_class(value.raw());
+  untag()->set_patched_class(value.ptr());
 }
 
 void PatchClass::set_origin_class(const Class& value) const {
-  raw_ptr()->set_origin_class(value.raw());
+  untag()->set_origin_class(value.ptr());
 }
 
 void PatchClass::set_script(const Script& value) const {
-  raw_ptr()->set_script(value.raw());
+  untag()->set_script(value.ptr());
 }
 
 void PatchClass::set_library_kernel_data(const ExternalTypedData& data) const {
-  raw_ptr()->set_library_kernel_data(data.raw());
+  untag()->set_library_kernel_data(data.ptr());
 }
 
 intptr_t Function::Hash() const {
@@ -6707,9 +6710,9 @@
 }
 
 void Function::SetInstructionsSafe(const Code& value) const {
-  raw_ptr()->set_code(value.raw());
-  StoreNonPointer(&raw_ptr()->entry_point_, value.EntryPoint());
-  StoreNonPointer(&raw_ptr()->unchecked_entry_point_,
+  untag()->set_code(value.ptr());
+  StoreNonPointer(&untag()->entry_point_, value.EntryPoint());
+  StoreNonPointer(&untag()->unchecked_entry_point_,
                   value.UncheckedEntryPoint());
 }
 
@@ -6719,19 +6722,19 @@
   value.set_owner(*this);
   SetInstructions(value);
   ASSERT(Function::Handle(value.function()).IsNull() ||
-         (value.function() == this->raw()));
+         (value.function() == this->ptr()));
 }
 
 bool Function::HasCode() const {
   NoSafepointScope no_safepoint;
-  ASSERT(raw_ptr()->code() != Code::null());
-  return raw_ptr()->code() != StubCode::LazyCompile().raw();
+  ASSERT(untag()->code() != Code::null());
+  return untag()->code() != StubCode::LazyCompile().ptr();
 }
 
 bool Function::HasCode(FunctionPtr function) {
   NoSafepointScope no_safepoint;
-  ASSERT(function->ptr()->code() != Code::null());
-  return function->ptr()->code() != StubCode::LazyCompile().raw();
+  ASSERT(function->untag()->code() != Code::null());
+  return function->untag()->code() != StubCode::LazyCompile().ptr();
 }
 
 void Function::ClearCode() const {
@@ -6740,7 +6743,7 @@
 #else
   ASSERT(Thread::Current()->IsMutatorThread());
 
-  raw_ptr()->set_unoptimized_code(Code::null());
+  untag()->set_unoptimized_code(Code::null());
 
   SetInstructions(StubCode::LazyCompile());
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
@@ -6822,13 +6825,13 @@
 #else
   DEBUG_ASSERT(IsMutatorOrAtSafepoint());
   ASSERT(value.IsNull() || !value.is_optimized());
-  raw_ptr()->set_unoptimized_code(value.raw());
+  untag()->set_unoptimized_code(value.ptr());
 #endif
 }
 
 ContextScopePtr Function::context_scope() const {
   if (IsClosureFunction()) {
-    const Object& obj = Object::Handle(raw_ptr()->data());
+    const Object& obj = Object::Handle(untag()->data());
     ASSERT(!obj.IsNull());
     return ClosureData::Cast(obj).context_scope();
   }
@@ -6837,7 +6840,7 @@
 
 void Function::set_context_scope(const ContextScope& value) const {
   if (IsClosureFunction()) {
-    const Object& obj = Object::Handle(raw_ptr()->data());
+    const Object& obj = Object::Handle(untag()->data());
     ASSERT(!obj.IsNull());
     ClosureData::Cast(obj).set_context_scope(value);
     return;
@@ -6847,7 +6850,7 @@
 
 InstancePtr Function::implicit_static_closure() const {
   if (IsImplicitStaticClosureFunction()) {
-    const Object& obj = Object::Handle(raw_ptr()->data());
+    const Object& obj = Object::Handle(untag()->data());
     ASSERT(!obj.IsNull());
     return ClosureData::Cast(obj).implicit_static_closure();
   }
@@ -6856,7 +6859,7 @@
 
 void Function::set_implicit_static_closure(const Instance& closure) const {
   if (IsImplicitStaticClosureFunction()) {
-    const Object& obj = Object::Handle(raw_ptr()->data());
+    const Object& obj = Object::Handle(untag()->data());
     ASSERT(!obj.IsNull());
     ClosureData::Cast(obj).set_implicit_static_closure(closure);
     return;
@@ -6865,69 +6868,68 @@
 }
 
 ScriptPtr Function::eval_script() const {
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   if (obj.IsScript()) {
-    return Script::Cast(obj).raw();
+    return Script::Cast(obj).ptr();
   }
   return Script::null();
 }
 
 void Function::set_eval_script(const Script& script) const {
   ASSERT(token_pos() == TokenPosition::kMinSource);
-  ASSERT(raw_ptr()->data() == Object::null());
+  ASSERT(untag()->data() == Object::null());
   set_data(script);
 }
 
 FunctionPtr Function::extracted_method_closure() const {
-  ASSERT(kind() == FunctionLayout::kMethodExtractor);
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  ASSERT(kind() == UntaggedFunction::kMethodExtractor);
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(obj.IsFunction());
-  return Function::Cast(obj).raw();
+  return Function::Cast(obj).ptr();
 }
 
 void Function::set_extracted_method_closure(const Function& value) const {
-  ASSERT(kind() == FunctionLayout::kMethodExtractor);
-  ASSERT(raw_ptr()->data() == Object::null());
+  ASSERT(kind() == UntaggedFunction::kMethodExtractor);
+  ASSERT(untag()->data() == Object::null());
   set_data(value);
 }
 
 ArrayPtr Function::saved_args_desc() const {
-  ASSERT(kind() == FunctionLayout::kNoSuchMethodDispatcher ||
-         kind() == FunctionLayout::kInvokeFieldDispatcher);
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
+         kind() == UntaggedFunction::kInvokeFieldDispatcher);
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(obj.IsArray());
-  return Array::Cast(obj).raw();
+  return Array::Cast(obj).ptr();
 }
 
 void Function::set_saved_args_desc(const Array& value) const {
-  ASSERT(kind() == FunctionLayout::kNoSuchMethodDispatcher ||
-         kind() == FunctionLayout::kInvokeFieldDispatcher);
-  ASSERT(raw_ptr()->data() == Object::null());
+  ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
+         kind() == UntaggedFunction::kInvokeFieldDispatcher);
+  ASSERT(untag()->data() == Object::null());
   set_data(value);
 }
 
 FieldPtr Function::accessor_field() const {
-  ASSERT(kind() == FunctionLayout::kImplicitGetter ||
-         kind() == FunctionLayout::kImplicitSetter ||
-         kind() == FunctionLayout::kImplicitStaticGetter ||
-         kind() == FunctionLayout::kFieldInitializer);
-  return Field::RawCast(raw_ptr()->data());
+  ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
+         kind() == UntaggedFunction::kImplicitSetter ||
+         kind() == UntaggedFunction::kImplicitStaticGetter ||
+         kind() == UntaggedFunction::kFieldInitializer);
+  return Field::RawCast(untag()->data());
 }
 
 void Function::set_accessor_field(const Field& value) const {
-  ASSERT(kind() == FunctionLayout::kImplicitGetter ||
-         kind() == FunctionLayout::kImplicitSetter ||
-         kind() == FunctionLayout::kImplicitStaticGetter ||
-         kind() == FunctionLayout::kFieldInitializer);
+  ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
+         kind() == UntaggedFunction::kImplicitSetter ||
+         kind() == UntaggedFunction::kImplicitStaticGetter ||
+         kind() == UntaggedFunction::kFieldInitializer);
   // Top level classes may be finalized multiple times.
-  ASSERT(raw_ptr()->data() == Object::null() ||
-         raw_ptr()->data() == value.raw());
+  ASSERT(untag()->data() == Object::null() || untag()->data() == value.ptr());
   set_data(value);
 }
 
 FunctionPtr Function::parent_function() const {
   if (IsClosureFunction()) {
-    const Object& obj = Object::Handle(raw_ptr()->data());
+    const Object& obj = Object::Handle(untag()->data());
     ASSERT(!obj.IsNull());
     return ClosureData::Cast(obj).parent_function();
   }
@@ -6935,7 +6937,7 @@
 }
 
 void Function::set_parent_function(const Function& value) const {
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   ASSERT(IsClosureFunction());
   ClosureData::Cast(obj).set_parent_function(value);
@@ -6955,7 +6957,7 @@
     if (kind_out != nullptr) {
       *kind_out = DefaultTypeArgumentsKind::kIsInstantiated;
     }
-    return Object::empty_type_arguments().raw();
+    return Object::empty_type_arguments().ptr();
   }
   auto& result = TypeArguments::Handle(thread->zone(), type_parameters());
   result = InstantiateTypeArgumentsToBounds(thread, result);
@@ -6967,7 +6969,7 @@
                     ? DefaultTypeArgumentsKind::kIsInstantiated
                     : DefaultTypeArgumentsKind::kNeedsInstantiation;
   }
-  return result.raw();
+  return result.ptr();
 }
 
 void Function::UpdateCachedDefaultTypeArguments(Thread* thread) const {
@@ -7016,7 +7018,7 @@
     UNREACHABLE();
   }
   const auto& closure_data =
-      ClosureData::Handle(ClosureData::RawCast(raw_ptr()->data()));
+      ClosureData::Handle(ClosureData::RawCast(untag()->data()));
   ASSERT(!closure_data.IsNull());
   if (kind_out != nullptr) {
     *kind_out = DefaultTypeArgumentsKindField::decode(
@@ -7030,7 +7032,7 @@
     UNREACHABLE();
   }
   const auto& closure_data =
-      ClosureData::Handle(ClosureData::RawCast(raw_ptr()->data()));
+      ClosureData::Handle(ClosureData::RawCast(untag()->data()));
   ASSERT(!closure_data.IsNull());
   intptr_t updated_info = closure_data.default_type_arguments_info();
   auto kind = DefaultTypeArgumentsKindFor(value);
@@ -7063,14 +7065,14 @@
 FunctionPtr Function::GetOutermostFunction() const {
   FunctionPtr parent = parent_function();
   if (parent == Object::null()) {
-    return raw();
+    return ptr();
   }
   Function& function = Function::Handle();
   do {
     function = parent;
     parent = function.parent_function();
   } while (parent != Object::null());
-  return function.raw();
+  return function.ptr();
 }
 
 bool Function::HasGenericParent() const {
@@ -7094,23 +7096,23 @@
       IsFieldInitializer() || IsFfiTrampoline()) {
     return Function::null();
   }
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(obj.IsNull() || obj.IsScript() || obj.IsFunction() || obj.IsArray());
   if (obj.IsNull() || obj.IsScript()) {
     return Function::null();
   }
   if (obj.IsFunction()) {
-    return Function::Cast(obj).raw();
+    return Function::Cast(obj).ptr();
   }
   ASSERT(is_native());
   ASSERT(obj.IsArray());
   const Object& res = Object::Handle(Array::Cast(obj).At(1));
-  return res.IsNull() ? Function::null() : Function::Cast(res).raw();
+  return res.IsNull() ? Function::null() : Function::Cast(res).ptr();
 }
 
 void Function::set_implicit_closure_function(const Function& value) const {
   ASSERT(!IsClosureFunction());
-  const Object& old_data = Object::Handle(raw_ptr()->data());
+  const Object& old_data = Object::Handle(untag()->data());
   if (is_native()) {
     ASSERT(old_data.IsArray());
     ASSERT((Array::Cast(old_data).At(1) == Object::null()) || value.IsNull());
@@ -7129,14 +7131,14 @@
 
 void Function::SetFfiCSignature(const FunctionType& sig) const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   FfiTrampolineData::Cast(obj).set_c_signature(sig);
 }
 
 FunctionTypePtr Function::FfiCSignature() const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   return FfiTrampolineData::Cast(obj).c_signature();
 }
@@ -7167,59 +7169,59 @@
 
 int32_t Function::FfiCallbackId() const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   return FfiTrampolineData::Cast(obj).callback_id();
 }
 
 void Function::SetFfiCallbackId(int32_t value) const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   FfiTrampolineData::Cast(obj).set_callback_id(value);
 }
 
 FunctionPtr Function::FfiCallbackTarget() const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   return FfiTrampolineData::Cast(obj).callback_target();
 }
 
 void Function::SetFfiCallbackTarget(const Function& target) const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   FfiTrampolineData::Cast(obj).set_callback_target(target);
 }
 
 InstancePtr Function::FfiCallbackExceptionalReturn() const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   return FfiTrampolineData::Cast(obj).callback_exceptional_return();
 }
 
 void Function::SetFfiCallbackExceptionalReturn(const Instance& value) const {
   ASSERT(IsFfiTrampoline());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(!obj.IsNull());
   FfiTrampolineData::Cast(obj).set_callback_exceptional_return(value);
 }
 
-const char* Function::KindToCString(FunctionLayout::Kind kind) {
-  return FunctionLayout::KindToCString(kind);
+const char* Function::KindToCString(UntaggedFunction::Kind kind) {
+  return UntaggedFunction::KindToCString(kind);
 }
 
 FunctionPtr Function::ForwardingTarget() const {
-  ASSERT(kind() == FunctionLayout::kDynamicInvocationForwarder);
+  ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
   Array& checks = Array::Handle();
-  checks ^= raw_ptr()->data();
+  checks ^= untag()->data();
   return Function::RawCast(checks.At(0));
 }
 
 void Function::SetForwardingChecks(const Array& checks) const {
-  ASSERT(kind() == FunctionLayout::kDynamicInvocationForwarder);
+  ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
   ASSERT(checks.Length() >= 1);
   ASSERT(Object::Handle(checks.At(0)).IsFunction());
   set_data(checks);
@@ -7246,7 +7248,7 @@
 //   dyn inv forwarder:       Array[0] = Function target
 //                            Array[1] = TypeArguments default type args
 void Function::set_data(const Object& value) const {
-  raw_ptr()->set_data(value.raw());
+  untag()->set_data(value.ptr());
 }
 
 bool Function::IsInFactoryScope() const {
@@ -7262,42 +7264,43 @@
 
 void Function::set_name(const String& value) const {
   ASSERT(value.IsSymbol());
-  raw_ptr()->set_name(value.raw());
+  untag()->set_name(value.ptr());
 }
 
 void Function::set_owner(const Object& value) const {
   ASSERT(!value.IsNull());
-  raw_ptr()->set_owner(value.raw());
+  untag()->set_owner(value.ptr());
 }
 
 RegExpPtr Function::regexp() const {
-  ASSERT(kind() == FunctionLayout::kIrregexpFunction);
-  const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data()));
+  ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
+  const Array& pair = Array::Cast(Object::Handle(untag()->data()));
   return RegExp::RawCast(pair.At(0));
 }
 
 class StickySpecialization : public BitField<intptr_t, bool, 0, 1> {};
 class StringSpecializationCid
-    : public BitField<intptr_t, intptr_t, 1, ObjectLayout::kClassIdTagSize> {};
+    : public BitField<intptr_t, intptr_t, 1, UntaggedObject::kClassIdTagSize> {
+};
 
 intptr_t Function::string_specialization_cid() const {
-  ASSERT(kind() == FunctionLayout::kIrregexpFunction);
-  const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data()));
+  ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
+  const Array& pair = Array::Cast(Object::Handle(untag()->data()));
   return StringSpecializationCid::decode(Smi::Value(Smi::RawCast(pair.At(1))));
 }
 
 bool Function::is_sticky_specialization() const {
-  ASSERT(kind() == FunctionLayout::kIrregexpFunction);
-  const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data()));
+  ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
+  const Array& pair = Array::Cast(Object::Handle(untag()->data()));
   return StickySpecialization::decode(Smi::Value(Smi::RawCast(pair.At(1))));
 }
 
 void Function::SetRegExpData(const RegExp& regexp,
                              intptr_t string_specialization_cid,
                              bool sticky) const {
-  ASSERT(kind() == FunctionLayout::kIrregexpFunction);
+  ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
   ASSERT(IsStringClassId(string_specialization_cid));
-  ASSERT(raw_ptr()->data() == Object::null());
+  ASSERT(untag()->data() == Object::null());
   const Array& pair = Array::Handle(Array::New(2, Heap::kOld));
   pair.SetAt(0, regexp);
   pair.SetAt(1, Smi::Handle(Smi::New(StickySpecialization::encode(sticky) |
@@ -7308,7 +7311,7 @@
 
 StringPtr Function::native_name() const {
   ASSERT(is_native());
-  const Object& obj = Object::Handle(raw_ptr()->data());
+  const Object& obj = Object::Handle(untag()->data());
   ASSERT(obj.IsArray());
   return String::RawCast(Array::Cast(obj).At(0));
 }
@@ -7325,10 +7328,10 @@
   // closure function.
   //
   // We therefore handle both cases.
-  const Object& old_data = Object::Handle(zone, raw_ptr()->data());
+  const Object& old_data = Object::Handle(zone, untag()->data());
   ASSERT(old_data.IsNull() ||
          (old_data.IsFunction() &&
-          Function::Handle(zone, Function::RawCast(old_data.raw()))
+          Function::Handle(zone, Function::RawCast(old_data.ptr()))
               .IsImplicitClosureFunction()));
 
   const Array& pair = Array::Handle(zone, Array::New(2, Heap::kOld));
@@ -7339,7 +7342,7 @@
 
 void Function::set_signature(const FunctionType& value) const {
   // Signature may be reset to null in aot to save space.
-  raw_ptr()->set_signature(value.raw());
+  untag()->set_signature(value.ptr());
   if (!value.IsNull()) {
     ASSERT(NumImplicitParameters() == value.num_implicit_parameters());
     UpdateCachedDefaultTypeArguments(Thread::Current());
@@ -7348,66 +7351,66 @@
 
 void FunctionType::set_result_type(const AbstractType& value) const {
   ASSERT(!value.IsNull());
-  raw_ptr()->set_result_type(value.raw());
+  untag()->set_result_type(value.ptr());
 }
 
 AbstractTypePtr Function::ParameterTypeAt(intptr_t index) const {
   const Array& parameter_types =
-      Array::Handle(raw_ptr()->signature()->ptr()->parameter_types());
+      Array::Handle(untag()->signature()->untag()->parameter_types());
   return AbstractType::RawCast(parameter_types.At(index));
 }
 
 AbstractTypePtr FunctionType::ParameterTypeAt(intptr_t index) const {
-  const Array& parameter_types = Array::Handle(raw_ptr()->parameter_types());
+  const Array& parameter_types = Array::Handle(untag()->parameter_types());
   return AbstractType::RawCast(parameter_types.At(index));
 }
 
 void FunctionType::SetParameterTypeAt(intptr_t index,
                                       const AbstractType& value) const {
   ASSERT(!value.IsNull());
-  const Array& parameter_types = Array::Handle(raw_ptr()->parameter_types());
+  const Array& parameter_types = Array::Handle(untag()->parameter_types());
   parameter_types.SetAt(index, value);
 }
 
 void Function::set_parameter_types(const Array& value) const {
   ASSERT(value.IsNull() || value.Length() > 0);
-  raw_ptr()->signature()->ptr()->set_parameter_types(value.raw());
+  untag()->signature()->untag()->set_parameter_types(value.ptr());
 }
 
 void FunctionType::set_parameter_types(const Array& value) const {
   ASSERT(value.IsNull() || value.Length() > 0);
-  raw_ptr()->set_parameter_types(value.raw());
+  untag()->set_parameter_types(value.ptr());
 }
 
 StringPtr Function::ParameterNameAt(intptr_t index) const {
-  const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names());
+  const Array& parameter_names = Array::Handle(untag()->parameter_names());
   return String::RawCast(parameter_names.At(index));
 }
 
 void Function::SetParameterNamesFrom(const FunctionType& signature) const {
-  raw_ptr()->set_parameter_names(signature.parameter_names());
+  untag()->set_parameter_names(signature.parameter_names());
 }
 
 StringPtr FunctionType::ParameterNameAt(intptr_t index) const {
-  const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names());
+  const Array& parameter_names = Array::Handle(untag()->parameter_names());
   return String::RawCast(parameter_names.At(index));
 }
 
 void FunctionType::SetParameterNameAt(intptr_t index,
                                       const String& value) const {
   ASSERT(!value.IsNull() && value.IsSymbol());
-  const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names());
+  const Array& parameter_names = Array::Handle(untag()->parameter_names());
   parameter_names.SetAt(index, value);
 }
 
 void Function::set_parameter_names(const Array& value) const {
   ASSERT(value.IsNull() || value.Length() > 0);
-  raw_ptr()->set_parameter_names(value.raw());
+  untag()->set_parameter_names(value.ptr());
 }
 
 void FunctionType::set_parameter_names(const Array& value) const {
   ASSERT(value.IsNull() || value.Length() > 0);
-  raw_ptr()->set_parameter_names(value.raw());
+  untag()->set_parameter_names(value.ptr());
 }
 
 void FunctionType::CreateNameArrayIncludingFlags(Heap::Space space) const {
@@ -7483,7 +7486,7 @@
   }
   intptr_t flag_mask;
   const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
-  const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names());
+  const Array& parameter_names = Array::Handle(untag()->parameter_names());
   if (flag_index >= parameter_names.Length()) {
     return false;
   }
@@ -7495,7 +7498,7 @@
 void FunctionType::SetIsRequiredAt(intptr_t index) const {
   intptr_t flag_mask;
   const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
-  const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names());
+  const Array& parameter_names = Array::Handle(untag()->parameter_names());
   ASSERT(flag_index < parameter_names.Length());
   const intptr_t flags =
       Smi::Value(Smi::RawCast(parameter_names.At(flag_index)));
@@ -7505,7 +7508,7 @@
 void FunctionType::TruncateUnusedParameterFlags() const {
   const intptr_t num_params = NumParameters();
   if (num_params == 0) return;
-  const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names());
+  const Array& parameter_names = Array::Handle(untag()->parameter_names());
   if (parameter_names.Length() == num_params) {
     // No flag slots to truncate.
     return;
@@ -7526,15 +7529,15 @@
     function.SetParameterNamesFrom(*this);
     // Unless the function is a dispatcher, its number of type parameters
     // must match the number of type parameters in its signature.
-    ASSERT(function.kind() == FunctionLayout::kNoSuchMethodDispatcher ||
-           function.kind() == FunctionLayout::kInvokeFieldDispatcher ||
-           function.kind() == FunctionLayout::kDynamicInvocationForwarder ||
+    ASSERT(function.kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
+           function.kind() == UntaggedFunction::kInvokeFieldDispatcher ||
+           function.kind() == UntaggedFunction::kDynamicInvocationForwarder ||
            function.NumTypeParameters() == NumTypeParameters());
   }
 }
 
 void FunctionType::set_type_parameters(const TypeArguments& value) const {
-  raw_ptr()->set_type_parameters(value.raw());
+  untag()->set_type_parameters(value.ptr());
 }
 
 static void ReportTooManyTypeParameters(const Function& function) {
@@ -7556,22 +7559,23 @@
 
 void FunctionType::SetNumParentTypeArguments(intptr_t value) const {
   ASSERT(value >= 0);
-  if (!Utils::IsUint(FunctionTypeLayout::kMaxParentTypeArgumentsBits, value)) {
+  if (!Utils::IsUint(UntaggedFunctionType::kMaxParentTypeArgumentsBits,
+                     value)) {
     ReportTooManyTypeParameters(*this);
   }
-  const uint32_t* original = &raw_ptr()->packed_fields_;
+  const uint32_t* original = &untag()->packed_fields_;
   StoreNonPointer(original,
-                  FunctionTypeLayout::PackedNumParentTypeArguments::update(
+                  UntaggedFunctionType::PackedNumParentTypeArguments::update(
                       value, *original));
 }
 
 void Function::SetNumTypeParameters(intptr_t value) const {
   ASSERT(value >= 0);
-  if (!Utils::IsUint(FunctionLayout::kMaxTypeParametersBits, value)) {
+  if (!Utils::IsUint(UntaggedFunction::kMaxTypeParametersBits, value)) {
     ReportTooManyTypeParameters(*this);
   }
-  const uint32_t* original = &raw_ptr()->packed_fields_;
-  StoreNonPointer(original, FunctionLayout::PackedNumTypeParameters::update(
+  const uint32_t* original = &untag()->packed_fields_;
+  StoreNonPointer(original, UntaggedFunction::PackedNumTypeParameters::update(
                                 value, *original));
 }
 
@@ -7614,7 +7618,7 @@
   String& type_param_name = thread->StringHandle();
   Function& function = thread->FunctionHandle();
 
-  function = this->raw();
+  function = this->ptr();
   while (!function.IsNull()) {
     if (function.signature() != FunctionType::null()) {
       type_params = function.type_parameters();
@@ -7624,7 +7628,7 @@
           type_param ^= type_params.TypeAt(i);
           type_param_name = type_param.name();
           if (type_param_name.Equals(type_name)) {
-            return type_param.raw();
+            return type_param.ptr();
           }
         }
       }
@@ -7642,18 +7646,18 @@
   return TypeParameter::null();
 }
 
-void Function::set_kind(FunctionLayout::Kind value) const {
-  set_kind_tag(KindBits::update(value, raw_ptr()->kind_tag_));
+void Function::set_kind(UntaggedFunction::Kind value) const {
+  set_kind_tag(KindBits::update(value, untag()->kind_tag_));
 }
 
-void Function::set_modifier(FunctionLayout::AsyncModifier value) const {
-  set_kind_tag(ModifierBits::update(value, raw_ptr()->kind_tag_));
+void Function::set_modifier(UntaggedFunction::AsyncModifier value) const {
+  set_kind_tag(ModifierBits::update(value, untag()->kind_tag_));
 }
 
 void Function::set_recognized_kind(MethodRecognizer::Kind value) const {
   // Prevent multiple settings of kind.
   ASSERT((value == MethodRecognizer::kUnknown) || !IsRecognized());
-  set_kind_tag(RecognizedBits::update(value, raw_ptr()->kind_tag_));
+  set_kind_tag(RecognizedBits::update(value, untag()->kind_tag_));
 }
 
 void Function::set_token_pos(TokenPosition token_pos) const {
@@ -7661,16 +7665,16 @@
   UNREACHABLE();
 #else
   ASSERT(!token_pos.IsClassifying() || IsMethodExtractor());
-  StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
+  StoreNonPointer(&untag()->token_pos_, token_pos);
 #endif
 }
 
 void Function::set_kind_tag(uint32_t value) const {
-  StoreNonPointer(&raw_ptr()->kind_tag_, static_cast<uint32_t>(value));
+  StoreNonPointer(&untag()->kind_tag_, static_cast<uint32_t>(value));
 }
 
 void Function::set_packed_fields(uint32_t packed_fields) const {
-  StoreNonPointer(&raw_ptr()->packed_fields_, packed_fields);
+  StoreNonPointer(&untag()->packed_fields_, packed_fields);
 }
 
 bool Function::IsOptimizable() const {
@@ -7735,22 +7739,22 @@
 }
 
 intptr_t Function::NumImplicitParameters() const {
-  const FunctionLayout::Kind k = kind();
-  if (k == FunctionLayout::kConstructor) {
+  const UntaggedFunction::Kind k = kind();
+  if (k == UntaggedFunction::kConstructor) {
     // Type arguments for factory; instance for generative constructor.
     return 1;
   }
-  if ((k == FunctionLayout::kClosureFunction) ||
-      (k == FunctionLayout::kImplicitClosureFunction) ||
-      (k == FunctionLayout::kFfiTrampoline)) {
+  if ((k == UntaggedFunction::kClosureFunction) ||
+      (k == UntaggedFunction::kImplicitClosureFunction) ||
+      (k == UntaggedFunction::kFfiTrampoline)) {
     return 1;  // Closure object.
   }
   if (!is_static()) {
     // Closure functions defined inside instance (i.e. non-static) functions are
     // marked as non-static, but they do not have a receiver.
     // Closures are handled above.
-    ASSERT((k != FunctionLayout::kClosureFunction) &&
-           (k != FunctionLayout::kImplicitClosureFunction));
+    ASSERT((k != UntaggedFunction::kClosureFunction) &&
+           (k != UntaggedFunction::kImplicitClosureFunction));
     return 1;  // Receiver.
   }
   return 0;  // No implicit parameters.
@@ -7933,7 +7937,7 @@
   const intptr_t kNumParentTypeArgs = function.NumParentTypeArguments();
   const intptr_t kNumTypeArgs = kNumCurrentTypeArgs + kNumParentTypeArgs;
   // Non-generic functions don't receive type arguments.
-  if (kNumTypeArgs == 0) return Object::empty_type_arguments().raw();
+  if (kNumTypeArgs == 0) return Object::empty_type_arguments().ptr();
   // Closure functions require that the receiver be provided (and is a closure).
   ASSERT(!function.IsClosureFunction() || receiver.IsClosure());
 
@@ -7947,7 +7951,7 @@
   // We don't try to instantiate the parent type parameters to their bounds
   // if not provided or check any closed-over type arguments against the parent
   // type parameter bounds (since they have been type checked already).
-  if (kNumCurrentTypeArgs == 0) return parent_type_args.raw();
+  if (kNumCurrentTypeArgs == 0) return parent_type_args.ptr();
 
   auto& function_type_args = TypeArguments::Handle(zone);
   // First check for delayed type arguments before using either provided or
@@ -7957,7 +7961,7 @@
     const auto& closure = Closure::Cast(receiver);
     function_type_args = closure.delayed_type_arguments();
     has_delayed_type_args =
-        function_type_args.raw() != Object::empty_type_arguments().raw();
+        function_type_args.ptr() != Object::empty_type_arguments().ptr();
   }
 
   if (args_desc.TypeArgsLen() > 0) {
@@ -7985,10 +7989,10 @@
             instantiator_type_args, parent_type_args);
         break;
       case Function::DefaultTypeArgumentsKind::kSharesInstantiatorTypeArguments:
-        function_type_args = instantiator_type_args.raw();
+        function_type_args = instantiator_type_args.ptr();
         break;
       case Function::DefaultTypeArgumentsKind::kSharesFunctionTypeArguments:
-        function_type_args = parent_type_args.raw();
+        function_type_args = parent_type_args.ptr();
         break;
     }
   }
@@ -8013,7 +8017,7 @@
       return receiver.GetTypeArguments();
     }
   }
-  return Object::empty_type_arguments().raw();
+  return Object::empty_type_arguments().ptr();
 }
 
 ObjectPtr Function::DoArgumentTypesMatch(
@@ -8092,7 +8096,7 @@
     auto& bound = AbstractType::Handle(zone);
     for (intptr_t i = 0; i < kNumLocalTypeArgs; i++) {
       parameter ^= params.TypeAt(i);
-      type = parameter.raw();
+      type = parameter.ptr();
       bound = parameter.bound();
       // Only perform non-covariant checks where the bound is not the top type.
       if (parameter.IsGenericCovariantImpl() || bound.IsTopTypeForSubtyping()) {
@@ -8407,7 +8411,7 @@
   }
 
   // Canonicalization is not part of instantiation.
-  return sig.raw();
+  return sig.ptr();
 }
 
 // Checks if the type of the specified parameter of this signature is a
@@ -8521,7 +8525,7 @@
     found_param_name = false;
     for (intptr_t j = num_fixed_params; j < num_params; j++) {
       ASSERT(String::Handle(zone, ParameterNameAt(j)).IsSymbol());
-      if (ParameterNameAt(j) == other_param_name.raw()) {
+      if (ParameterNameAt(j) == other_param_name.ptr()) {
         found_param_name = true;
         if (!IsContravariantParameter(j, other, i, space)) {
           return false;
@@ -8544,7 +8548,7 @@
         bool found = false;
         for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
           ASSERT(String::Handle(zone, other.ParameterNameAt(i)).IsSymbol());
-          if (other.ParameterNameAt(i) == param_name.raw()) {
+          if (other.ParameterNameAt(i) == param_name.ptr()) {
             found = true;
             if (!other.IsRequiredAt(i)) {
               return false;
@@ -8569,9 +8573,9 @@
 
 bool Function::IsImplicitStaticClosureFunction(FunctionPtr func) {
   NoSafepointScope no_safepoint;
-  uint32_t kind_tag = func->ptr()->kind_tag_;
+  uint32_t kind_tag = func->untag()->kind_tag_;
   return (KindBits::decode(kind_tag) ==
-          FunctionLayout::kImplicitClosureFunction) &&
+          UntaggedFunction::kImplicitClosureFunction) &&
          StaticBit::decode(kind_tag);
 }
 
@@ -8584,7 +8588,7 @@
 
 FunctionPtr Function::New(const FunctionType& signature,
                           const String& name,
-                          FunctionLayout::Kind kind,
+                          UntaggedFunction::Kind kind,
                           bool is_static,
                           bool is_const,
                           bool is_abstract,
@@ -8601,7 +8605,7 @@
   result.set_kind_tag(0);  // Ensure determinism of uninitialized bits.
   result.set_kind(kind);
   result.set_recognized_kind(MethodRecognizer::kUnknown);
-  result.set_modifier(FunctionLayout::kNoModifier);
+  result.set_modifier(UntaggedFunction::kNoModifier);
   result.set_is_static(is_static);
   result.set_is_const(is_const);
   result.set_is_abstract(is_abstract);
@@ -8630,12 +8634,12 @@
   result.set_is_inlinable(true);
   result.reset_unboxed_parameters_and_return();
   result.SetInstructionsSafe(StubCode::LazyCompile());
-  if (kind == FunctionLayout::kClosureFunction ||
-      kind == FunctionLayout::kImplicitClosureFunction) {
+  if (kind == UntaggedFunction::kClosureFunction ||
+      kind == UntaggedFunction::kImplicitClosureFunction) {
     ASSERT(space == Heap::kOld);
     const ClosureData& data = ClosureData::Handle(ClosureData::New());
     result.set_data(data);
-  } else if (kind == FunctionLayout::kFfiTrampoline) {
+  } else if (kind == UntaggedFunction::kFfiTrampoline) {
     const FfiTrampolineData& data =
         FfiTrampolineData::Handle(FfiTrampolineData::New());
     result.set_data(data);
@@ -8659,23 +8663,23 @@
     signature.set_num_implicit_parameters(result.NumImplicitParameters());
     result.set_signature(signature);
   } else {
-    ASSERT(kind == FunctionLayout::kFfiTrampoline);
+    ASSERT(kind == UntaggedFunction::kFfiTrampoline);
   }
-  return result.raw();
+  return result.ptr();
 }
 
-FunctionPtr Function::NewClosureFunctionWithKind(FunctionLayout::Kind kind,
+FunctionPtr Function::NewClosureFunctionWithKind(UntaggedFunction::Kind kind,
                                                  const String& name,
                                                  const Function& parent,
                                                  TokenPosition token_pos,
                                                  const Object& owner) {
-  ASSERT((kind == FunctionLayout::kClosureFunction) ||
-         (kind == FunctionLayout::kImplicitClosureFunction));
+  ASSERT((kind == UntaggedFunction::kClosureFunction) ||
+         (kind == UntaggedFunction::kImplicitClosureFunction));
   ASSERT(!parent.IsNull());
   ASSERT(!owner.IsNull());
   const FunctionType& signature = FunctionType::Handle(FunctionType::New(
-      kind == FunctionLayout::kClosureFunction ? parent.NumTypeArguments()
-                                               : 0));
+      kind == UntaggedFunction::kClosureFunction ? parent.NumTypeArguments()
+                                                 : 0));
   const Function& result = Function::Handle(
       Function::New(signature, name, kind,
                     /* is_static = */ parent.is_static(),
@@ -8684,7 +8688,7 @@
                     /* is_external = */ false,
                     /* is_native = */ false, owner, token_pos));
   result.set_parent_function(parent);
-  return result.raw();
+  return result.ptr();
 }
 
 FunctionPtr Function::NewClosureFunction(const String& name,
@@ -8692,7 +8696,7 @@
                                          TokenPosition token_pos) {
   // Use the owner defining the parent function and not the class containing it.
   const Object& parent_owner = Object::Handle(parent.RawOwner());
-  return NewClosureFunctionWithKind(FunctionLayout::kClosureFunction, name,
+  return NewClosureFunctionWithKind(UntaggedFunction::kClosureFunction, name,
                                     parent, token_pos, parent_owner);
 }
 
@@ -8701,7 +8705,7 @@
                                                  TokenPosition token_pos) {
   // Use the owner defining the parent function and not the class containing it.
   const Object& parent_owner = Object::Handle(parent.RawOwner());
-  return NewClosureFunctionWithKind(FunctionLayout::kImplicitClosureFunction,
+  return NewClosureFunctionWithKind(UntaggedFunction::kImplicitClosureFunction,
                                     name, parent, token_pos, parent_owner);
 }
 
@@ -8838,7 +8842,7 @@
   closure_function.set_signature(closure_signature);
   set_implicit_closure_function(closure_function);
   ASSERT(closure_function.IsImplicitClosureFunction());
-  return closure_function.raw();
+  return closure_function.ptr();
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
 }
 
@@ -8972,7 +8976,7 @@
   auto& target =
       Function::Handle(zone, Resolver::ResolveFunction(zone, owner, func_name));
 
-  if (!target.IsNull() && (target.raw() != parent.raw())) {
+  if (!target.IsNull() && (target.ptr() != parent.ptr())) {
     DEBUG_ASSERT(Isolate::Current()->HasAttemptedReload());
     if ((target.is_static() != parent.is_static()) ||
         (target.kind() != parent.kind())) {
@@ -8980,7 +8984,7 @@
     }
   }
 
-  return target.raw();
+  return target.ptr();
 }
 
 intptr_t Function::ComputeClosureHash() const {
@@ -9088,21 +9092,21 @@
 }
 
 ClassPtr Function::Owner() const {
-  ASSERT(raw_ptr()->owner() != Object::null());
-  if (raw_ptr()->owner()->IsClass()) {
-    return Class::RawCast(raw_ptr()->owner());
+  ASSERT(untag()->owner() != Object::null());
+  if (untag()->owner()->IsClass()) {
+    return Class::RawCast(untag()->owner());
   }
-  const Object& obj = Object::Handle(raw_ptr()->owner());
+  const Object& obj = Object::Handle(untag()->owner());
   ASSERT(obj.IsPatchClass());
   return PatchClass::Cast(obj).patched_class();
 }
 
 ClassPtr Function::origin() const {
-  ASSERT(raw_ptr()->owner() != Object::null());
-  if (raw_ptr()->owner()->IsClass()) {
-    return Class::RawCast(raw_ptr()->owner());
+  ASSERT(untag()->owner() != Object::null());
+  if (untag()->owner()->IsClass()) {
+    return Class::RawCast(untag()->owner());
   }
-  const Object& obj = Object::Handle(raw_ptr()->owner());
+  const Object& obj = Object::Handle(untag()->owner());
   ASSERT(obj.IsPatchClass());
   return PatchClass::Cast(obj).origin_class();
 }
@@ -9111,7 +9115,7 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
   UNREACHABLE();
 #else
-  StoreNonPointer(&raw_ptr()->kernel_offset_, src.raw_ptr()->kernel_offset_);
+  StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_);
 #endif
 }
 
@@ -9136,7 +9140,7 @@
 ScriptPtr Function::script() const {
   // NOTE(turnidge): If you update this function, you probably want to
   // update Class::PatchFieldsAndFunctions() at the same time.
-  const Object& data = Object::Handle(raw_ptr()->data());
+  const Object& data = Object::Handle(untag()->data());
   if (IsDynamicInvocationForwarder()) {
     const auto& forwarding_target = Function::Handle(ForwardingTarget());
     return forwarding_target.script();
@@ -9148,7 +9152,7 @@
   if (data.IsArray()) {
     Object& script = Object::Handle(Array::Cast(data).At(0));
     if (script.IsScript()) {
-      return Script::Cast(script).raw();
+      return Script::Cast(script).ptr();
     }
   }
   if (token_pos() == TokenPosition::kMinSource) {
@@ -9156,10 +9160,10 @@
     // eval functions having token position 0.
     const Script& script = Script::Handle(eval_script());
     if (!script.IsNull()) {
-      return script.raw();
+      return script.ptr();
     }
   }
-  const Object& obj = Object::Handle(raw_ptr()->owner());
+  const Object& obj = Object::Handle(untag()->owner());
   if (obj.IsPatchClass()) {
     return PatchClass::Cast(obj).script();
   }
@@ -9171,7 +9175,7 @@
 }
 
 ExternalTypedDataPtr Function::KernelData() const {
-  Object& data = Object::Handle(raw_ptr()->data());
+  Object& data = Object::Handle(untag()->data());
   if (data.IsArray()) {
     Object& script = Object::Handle(Array::Cast(data).At(0));
     if (script.IsScript()) {
@@ -9184,7 +9188,7 @@
     return parent.KernelData();
   }
 
-  const Object& obj = Object::Handle(raw_ptr()->owner());
+  const Object& obj = Object::Handle(untag()->owner());
   if (obj.IsClass()) {
     Library& lib = Library::Handle(Class::Cast(obj).library());
     return lib.kernel_data();
@@ -9198,7 +9202,7 @@
       IsFfiTrampoline()) {
     return 0;
   }
-  Object& data = Object::Handle(raw_ptr()->data());
+  Object& data = Object::Handle(untag()->data());
   if (data.IsArray()) {
     Object& script = Object::Handle(Array::Cast(data).At(0));
     if (script.IsScript()) {
@@ -9211,7 +9215,7 @@
     return parent.KernelDataProgramOffset();
   }
 
-  const Object& obj = Object::Handle(raw_ptr()->owner());
+  const Object& obj = Object::Handle(untag()->owner());
   if (obj.IsClass()) {
     Library& lib = Library::Handle(Class::Cast(obj).library());
     return lib.kernel_offset();
@@ -9277,7 +9281,7 @@
                          BaseTextBuffer* printer) const {
   // If |this| is the generated asynchronous body closure, use the
   // name of the parent function.
-  Function& fun = Function::Handle(raw());
+  Function& fun = Function::Handle(ptr());
 
   if (params.disambiguate_names) {
     if (fun.IsInvokeFieldDispatcher()) {
@@ -9299,7 +9303,7 @@
         !fun.IsSyncGenerator()) {
       // Parent function is not the generator of an asynchronous body closure,
       // start at |this|.
-      fun = raw();
+      fun = ptr();
     }
   }
   if (IsClosureFunction()) {
@@ -9317,21 +9321,21 @@
         printer->AddString(".");
       }
       if (params.disambiguate_names &&
-          fun.name() == Symbols::AnonymousClosure().raw()) {
+          fun.name() == Symbols::AnonymousClosure().ptr()) {
         printer->Printf("<anonymous closure @%" Pd ">", fun.token_pos().Pos());
       } else {
         printer->AddString(fun.NameCString(params.name_visibility));
       }
       // If we skipped rewritten async/async*/sync* body then append a suffix
       // to the end of the name.
-      if (fun.raw() != raw() && params.disambiguate_names) {
+      if (fun.ptr() != ptr() && params.disambiguate_names) {
         printer->AddString("{body}");
       }
       return;
     }
   }
 
-  if (fun.kind() == FunctionLayout::kConstructor) {
+  if (fun.kind() == UntaggedFunction::kConstructor) {
     printer->AddString("new ");
   } else if (params.include_class_name) {
     const Class& cls = Class::Handle(Owner());
@@ -9348,7 +9352,7 @@
 
   // If we skipped rewritten async/async*/sync* body then append a suffix
   // to the end of the name.
-  if (fun.raw() != raw() && params.disambiguate_names) {
+  if (fun.ptr() != ptr() && params.disambiguate_names) {
     printer->AddString("{body}");
   }
 
@@ -9411,7 +9415,7 @@
     // token semicolon belongs to the assignment so we skip it.
     const String& src = String::Handle(func_script.Source());
     if (src.IsNull() || src.Length() == 0) {
-      return Symbols::OptimizedOut().raw();
+      return Symbols::OptimizedOut().ptr();
     }
     uint16_t end_char = src.CharAt(end_token_pos().Pos());
     if ((end_char == ',') ||  // Case 1.
@@ -9493,7 +9497,7 @@
       ICData& ic_data = ICData::ZoneHandle(zone);
       ic_data ^= saved_ic_data.At(i);
       if (clone_ic_data) {
-        const ICData& original_ic_data = ICData::Handle(zone, ic_data.raw());
+        const ICData& original_ic_data = ICData::Handle(zone, ic_data.ptr());
         ic_data = ICData::Clone(ic_data);
         ic_data.SetOriginal(original_ic_data);
       }
@@ -9507,11 +9511,11 @@
 }
 
 void Function::set_ic_data_array(const Array& value) const {
-  raw_ptr()->set_ic_data_array<std::memory_order_release>(value.raw());
+  untag()->set_ic_data_array<std::memory_order_release>(value.ptr());
 }
 
 ArrayPtr Function::ic_data_array() const {
-  return raw_ptr()->ic_data_array<std::memory_order_acquire>();
+  return untag()->ic_data_array<std::memory_order_acquire>();
 }
 
 void Function::ClearICDataArray() const {
@@ -9524,7 +9528,7 @@
   for (intptr_t i = 1; i < array.Length(); i++) {
     ic_data ^= array.At(i);
     if (ic_data.deopt_id() == deopt_id) {
-      return ic_data.raw();
+      return ic_data.ptr();
     }
   }
   return ICData::null();
@@ -9594,7 +9598,7 @@
   }
   // Compiling in unoptimized mode should never fail if there are no errors.
   ASSERT(HasCode());
-  ASSERT(ForceOptimize() || unoptimized_code() == result.raw());
+  ASSERT(ForceOptimize() || unoptimized_code() == result.ptr());
   return CurrentCode();
 }
 
@@ -9641,7 +9645,7 @@
   // Issue(dartbug.com/42719):
   // Right now the metadata of _Closure.call says there are no dynamic callers -
   // even though there can be. To be conservative we return true.
-  if ((name() == Symbols::GetCall().raw() || name() == Symbols::Call().raw()) &&
+  if ((name() == Symbols::GetCall().ptr() || name() == Symbols::Call().ptr()) &&
       Class::IsClosureClass(Owner())) {
     return true;
   }
@@ -9712,43 +9716,43 @@
     buffer.AddString(" abstract");
   }
   switch (kind()) {
-    case FunctionLayout::kRegularFunction:
-    case FunctionLayout::kClosureFunction:
-    case FunctionLayout::kImplicitClosureFunction:
-    case FunctionLayout::kGetterFunction:
-    case FunctionLayout::kSetterFunction:
+    case UntaggedFunction::kRegularFunction:
+    case UntaggedFunction::kClosureFunction:
+    case UntaggedFunction::kImplicitClosureFunction:
+    case UntaggedFunction::kGetterFunction:
+    case UntaggedFunction::kSetterFunction:
       break;
-    case FunctionLayout::kConstructor:
+    case UntaggedFunction::kConstructor:
       buffer.AddString(is_static() ? " factory" : " constructor");
       break;
-    case FunctionLayout::kImplicitGetter:
+    case UntaggedFunction::kImplicitGetter:
       buffer.AddString(" getter");
       break;
-    case FunctionLayout::kImplicitSetter:
+    case UntaggedFunction::kImplicitSetter:
       buffer.AddString(" setter");
       break;
-    case FunctionLayout::kImplicitStaticGetter:
+    case UntaggedFunction::kImplicitStaticGetter:
       buffer.AddString(" static-getter");
       break;
-    case FunctionLayout::kFieldInitializer:
+    case UntaggedFunction::kFieldInitializer:
       buffer.AddString(" field-initializer");
       break;
-    case FunctionLayout::kMethodExtractor:
+    case UntaggedFunction::kMethodExtractor:
       buffer.AddString(" method-extractor");
       break;
-    case FunctionLayout::kNoSuchMethodDispatcher:
+    case UntaggedFunction::kNoSuchMethodDispatcher:
       buffer.AddString(" no-such-method-dispatcher");
       break;
-    case FunctionLayout::kDynamicInvocationForwarder:
+    case UntaggedFunction::kDynamicInvocationForwarder:
       buffer.AddString(" dynamic-invocation-forwarder");
       break;
-    case FunctionLayout::kInvokeFieldDispatcher:
+    case UntaggedFunction::kInvokeFieldDispatcher:
       buffer.AddString(" invoke-field-dispatcher");
       break;
-    case FunctionLayout::kIrregexpFunction:
+    case UntaggedFunction::kIrregexpFunction:
       buffer.AddString(" irregexp-function");
       break;
-    case FunctionLayout::kFfiTrampoline:
+    case UntaggedFunction::kFfiTrampoline:
       buffer.AddString(" ffi-trampoline-function");
       break;
     default:
@@ -9769,7 +9773,7 @@
 }
 
 void FunctionType::set_packed_fields(uint32_t packed_fields) const {
-  StoreNonPointer(&raw_ptr()->packed_fields_, packed_fields);
+  StoreNonPointer(&untag()->packed_fields_, packed_fields);
 }
 
 intptr_t FunctionType::NumParameters() const {
@@ -9778,31 +9782,32 @@
 
 void FunctionType::set_num_implicit_parameters(intptr_t value) const {
   ASSERT(value >= 0);
-  ASSERT(Utils::IsUint(FunctionTypeLayout::kMaxImplicitParametersBits, value));
-  const uint32_t* original = &raw_ptr()->packed_fields_;
+  ASSERT(
+      Utils::IsUint(UntaggedFunctionType::kMaxImplicitParametersBits, value));
+  const uint32_t* original = &untag()->packed_fields_;
   StoreNonPointer(original,
-                  FunctionTypeLayout::PackedNumImplicitParameters::update(
+                  UntaggedFunctionType::PackedNumImplicitParameters::update(
                       value, *original));
 }
 
 void ClosureData::set_default_type_arguments(const TypeArguments& value) const {
-  raw_ptr()->set_default_type_arguments(value.raw());
+  untag()->set_default_type_arguments(value.ptr());
 }
 
 intptr_t ClosureData::default_type_arguments_info() const {
-  const SmiPtr value = raw_ptr()->default_type_arguments_info();
+  const SmiPtr value = untag()->default_type_arguments_info();
   if (value == Smi::null()) {
     static_assert(Function::DefaultTypeArgumentsKindField::decode(0) ==
                       Function::DefaultTypeArgumentsKind::kInvalid,
                   "Returning valid value for null Smi");
     return 0;
   }
-  return Smi::Value(raw_ptr()->default_type_arguments_info());
+  return Smi::Value(untag()->default_type_arguments_info());
 }
 
 void ClosureData::set_default_type_arguments_info(intptr_t value) const {
   ASSERT(Smi::IsValid(value));
-  raw_ptr()->set_default_type_arguments_info(Smi::New(value));
+  untag()->set_default_type_arguments_info(Smi::New(value));
 }
 
 ClosureDataPtr ClosureData::New() {
@@ -9840,9 +9845,9 @@
 
 void Function::set_num_fixed_parameters(intptr_t value) const {
   ASSERT(value >= 0);
-  ASSERT(Utils::IsUint(FunctionLayout::kMaxFixedParametersBits, value));
-  const uint32_t* original = &raw_ptr()->packed_fields_;
-  StoreNonPointer(original, FunctionLayout::PackedNumFixedParameters::update(
+  ASSERT(Utils::IsUint(UntaggedFunction::kMaxFixedParametersBits, value));
+  const uint32_t* original = &untag()->packed_fields_;
+  StoreNonPointer(original, UntaggedFunction::PackedNumFixedParameters::update(
                                 value, *original));
   // Also store in signature.
   FunctionType::Handle(signature()).set_num_fixed_parameters(value);
@@ -9850,41 +9855,43 @@
 
 void FunctionType::set_num_fixed_parameters(intptr_t value) const {
   ASSERT(value >= 0);
-  ASSERT(Utils::IsUint(FunctionTypeLayout::kMaxFixedParametersBits, value));
-  const uint32_t* original = &raw_ptr()->packed_fields_;
+  ASSERT(Utils::IsUint(UntaggedFunctionType::kMaxFixedParametersBits, value));
+  const uint32_t* original = &untag()->packed_fields_;
   StoreNonPointer(
       original,
-      FunctionTypeLayout::PackedNumFixedParameters::update(value, *original));
+      UntaggedFunctionType::PackedNumFixedParameters::update(value, *original));
 }
 
 void Function::SetNumOptionalParameters(intptr_t value,
                                         bool are_optional_positional) const {
-  ASSERT(Utils::IsUint(FunctionLayout::kMaxOptionalParametersBits, value));
-  uint32_t packed_fields = raw_ptr()->packed_fields_;
-  packed_fields = FunctionLayout::PackedHasNamedOptionalParameters::update(
+  ASSERT(Utils::IsUint(UntaggedFunction::kMaxOptionalParametersBits, value));
+  uint32_t packed_fields = untag()->packed_fields_;
+  packed_fields = UntaggedFunction::PackedHasNamedOptionalParameters::update(
       (value > 0) && !are_optional_positional, packed_fields);
-  packed_fields =
-      FunctionLayout::PackedNumOptionalParameters::update(value, packed_fields);
-  StoreNonPointer(&raw_ptr()->packed_fields_, packed_fields);
+  packed_fields = UntaggedFunction::PackedNumOptionalParameters::update(
+      value, packed_fields);
+  StoreNonPointer(&untag()->packed_fields_, packed_fields);
   // Also store in signature.
   FunctionType::Handle(signature())
       .SetNumOptionalParameters(value, are_optional_positional);
 }
 
 void FfiTrampolineData::set_callback_target(const Function& value) const {
-  raw_ptr()->set_callback_target(value.raw());
+  untag()->set_callback_target(value.ptr());
 }
 
 void FunctionType::SetNumOptionalParameters(
     intptr_t value,
     bool are_optional_positional) const {
-  ASSERT(Utils::IsUint(FunctionTypeLayout::kMaxOptionalParametersBits, value));
-  uint32_t packed_fields = raw_ptr()->packed_fields_;
-  packed_fields = FunctionTypeLayout::PackedHasNamedOptionalParameters::update(
-      (value > 0) && !are_optional_positional, packed_fields);
-  packed_fields = FunctionTypeLayout::PackedNumOptionalParameters::update(
+  ASSERT(
+      Utils::IsUint(UntaggedFunctionType::kMaxOptionalParametersBits, value));
+  uint32_t packed_fields = untag()->packed_fields_;
+  packed_fields =
+      UntaggedFunctionType::PackedHasNamedOptionalParameters::update(
+          (value > 0) && !are_optional_positional, packed_fields);
+  packed_fields = UntaggedFunctionType::PackedNumOptionalParameters::update(
       value, packed_fields);
-  StoreNonPointer(&raw_ptr()->packed_fields_, packed_fields);
+  StoreNonPointer(&untag()->packed_fields_, packed_fields);
 }
 
 FunctionTypePtr FunctionType::New(Heap::Space space) {
@@ -9905,17 +9912,17 @@
   result.SetNumOptionalParameters(0, false);
   result.set_nullability(nullability);
   result.SetHash(0);
-  result.StoreNonPointer(&result.raw_ptr()->type_state_,
-                         TypeLayout::kAllocated);
+  result.StoreNonPointer(&result.untag()->type_state_,
+                         UntaggedType::kAllocated);
   result.SetTypeTestingStub(
       Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
-  return result.raw();
+  return result.ptr();
 }
 
 void FunctionType::set_type_state(uint8_t state) const {
-  ASSERT((state >= FunctionTypeLayout::kAllocated) &&
-         (state <= FunctionTypeLayout::kFinalizedUninstantiated));
-  StoreNonPointer(&raw_ptr()->type_state_, state);
+  ASSERT((state >= UntaggedFunctionType::kAllocated) &&
+         (state <= UntaggedFunctionType::kFinalizedUninstantiated));
+  StoreNonPointer(&untag()->type_state_, state);
 }
 
 const char* FunctionType::ToUserVisibleCString() const {
@@ -9951,30 +9958,30 @@
 }
 
 void ClosureData::set_context_scope(const ContextScope& value) const {
-  raw_ptr()->set_context_scope(value.raw());
+  untag()->set_context_scope(value.ptr());
 }
 
 void ClosureData::set_implicit_static_closure(const Instance& closure) const {
   ASSERT(!closure.IsNull());
-  ASSERT(raw_ptr()->closure() == Instance::null());
-  raw_ptr()->set_closure<std::memory_order_release>(closure.raw());
+  ASSERT(untag()->closure() == Instance::null());
+  untag()->set_closure<std::memory_order_release>(closure.ptr());
 }
 
 void ClosureData::set_parent_function(const Function& value) const {
-  raw_ptr()->set_parent_function(value.raw());
+  untag()->set_parent_function(value.ptr());
 }
 
 void FfiTrampolineData::set_c_signature(const FunctionType& value) const {
-  raw_ptr()->set_c_signature(value.raw());
+  untag()->set_c_signature(value.ptr());
 }
 
 void FfiTrampolineData::set_callback_id(int32_t callback_id) const {
-  StoreNonPointer(&raw_ptr()->callback_id_, callback_id);
+  StoreNonPointer(&untag()->callback_id_, callback_id);
 }
 
 void FfiTrampolineData::set_callback_exceptional_return(
     const Instance& value) const {
-  raw_ptr()->set_callback_exceptional_return(value.raw());
+  untag()->set_callback_exceptional_return(value.ptr());
 }
 
 FfiTrampolineDataPtr FfiTrampolineData::New() {
@@ -9983,7 +9990,7 @@
       Object::Allocate(FfiTrampolineData::kClassId,
                        FfiTrampolineData::InstanceSize(), Heap::kOld);
   FfiTrampolineDataPtr data = static_cast<FfiTrampolineDataPtr>(raw);
-  data->ptr()->callback_id_ = 0;
+  data->untag()->callback_id_ = 0;
   return data;
 }
 
@@ -10002,11 +10009,11 @@
   if (IsNull()) {
     return Field::null();
   }
-  Object& obj = Object::Handle(raw_ptr()->owner());
+  Object& obj = Object::Handle(untag()->owner());
   if (obj.IsField()) {
-    return Field::RawCast(obj.raw());
+    return Field::RawCast(obj.ptr());
   } else {
-    return this->raw();
+    return this->ptr();
   }
 }
 
@@ -10055,13 +10062,13 @@
          is_static());
 #endif
   return LoadNonPointer<ClassIdTagType, std::memory_order_relaxed>(
-      &raw_ptr()->guarded_cid_);
+      &untag()->guarded_cid_);
 }
 
 void Field::SetOriginal(const Field& value) const {
   ASSERT(value.IsOriginal());
   ASSERT(!value.IsNull());
-  raw_ptr()->set_owner(static_cast<ObjectPtr>(value.raw()));
+  untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
 }
 
 StringPtr Field::GetterName(const String& field_name) {
@@ -10118,26 +10125,26 @@
 void Field::set_name(const String& value) const {
   ASSERT(value.IsSymbol());
   ASSERT(IsOriginal());
-  raw_ptr()->set_name(value.raw());
+  untag()->set_name(value.ptr());
 }
 
 ObjectPtr Field::RawOwner() const {
   if (IsOriginal()) {
-    return raw_ptr()->owner();
+    return untag()->owner();
   } else {
     const Field& field = Field::Handle(Original());
     ASSERT(field.IsOriginal());
-    ASSERT(!Object::Handle(field.raw_ptr()->owner()).IsField());
-    return field.raw_ptr()->owner();
+    ASSERT(!Object::Handle(field.untag()->owner()).IsField());
+    return field.untag()->owner();
   }
 }
 
 ClassPtr Field::Owner() const {
   const Field& field = Field::Handle(Original());
   ASSERT(field.IsOriginal());
-  const Object& obj = Object::Handle(field.raw_ptr()->owner());
+  const Object& obj = Object::Handle(field.untag()->owner());
   if (obj.IsClass()) {
-    return Class::Cast(obj).raw();
+    return Class::Cast(obj).ptr();
   }
   ASSERT(obj.IsPatchClass());
   return PatchClass::Cast(obj).patched_class();
@@ -10146,9 +10153,9 @@
 ClassPtr Field::Origin() const {
   const Field& field = Field::Handle(Original());
   ASSERT(field.IsOriginal());
-  const Object& obj = Object::Handle(field.raw_ptr()->owner());
+  const Object& obj = Object::Handle(field.untag()->owner());
   if (obj.IsClass()) {
-    return Class::Cast(obj).raw();
+    return Class::Cast(obj).ptr();
   }
   ASSERT(obj.IsPatchClass());
   return PatchClass::Cast(obj).origin_class();
@@ -10159,7 +10166,7 @@
   // update Class::PatchFieldsAndFunctions() at the same time.
   const Field& field = Field::Handle(Original());
   ASSERT(field.IsOriginal());
-  const Object& obj = Object::Handle(field.raw_ptr()->owner());
+  const Object& obj = Object::Handle(field.untag()->owner());
   if (obj.IsClass()) {
     return Class::Cast(obj).script();
   }
@@ -10168,7 +10175,7 @@
 }
 
 ExternalTypedDataPtr Field::KernelData() const {
-  const Object& obj = Object::Handle(this->raw_ptr()->owner());
+  const Object& obj = Object::Handle(this->untag()->owner());
   // During background JIT compilation field objects are copied
   // and copy points to the original field via the owner field.
   if (obj.IsField()) {
@@ -10185,12 +10192,12 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
   UNREACHABLE();
 #else
-  StoreNonPointer(&raw_ptr()->kernel_offset_, src.raw_ptr()->kernel_offset_);
+  StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_);
 #endif
 }
 
 intptr_t Field::KernelDataProgramOffset() const {
-  const Object& obj = Object::Handle(raw_ptr()->owner());
+  const Object& obj = Object::Handle(untag()->owner());
   // During background JIT compilation field objects are copied
   // and copy points to the original field via the owner field.
   if (obj.IsField()) {
@@ -10208,8 +10215,8 @@
   ASSERT(Thread::Current()->IsMutatorThread());
   ASSERT(IsOriginal());
   ASSERT(!value.IsNull());
-  if (value.raw() != type()) {
-    raw_ptr()->set_type(value.raw());
+  if (value.ptr() != type()) {
+    untag()->set_type(value.ptr());
   }
 }
 
@@ -10301,7 +10308,7 @@
   InitializeNew(result, name, is_static, is_final, is_const, is_reflectable,
                 is_late, owner, token_pos, end_token_pos);
   result.SetFieldType(type);
-  return result.raw();
+  return result.ptr();
 }
 
 FieldPtr Field::NewTopLevel(const String& name,
@@ -10316,7 +10323,7 @@
   InitializeNew(result, name, true,       /* is_static */
                 is_final, is_const, true, /* is_reflectable */
                 is_late, owner, token_pos, end_token_pos);
-  return result.raw();
+  return result.ptr();
 }
 
 FieldPtr Field::Clone(const Field& original) const {
@@ -10328,7 +10335,7 @@
   clone ^= Object::Clone(*this, Heap::kOld);
   clone.SetOriginal(original);
   clone.InheritKernelOffsetFrom(original);
-  return clone.raw();
+  return clone.ptr();
 }
 
 int32_t Field::SourceFingerprint() const {
@@ -10363,24 +10370,24 @@
 }
 
 intptr_t Field::guarded_list_length() const {
-  return Smi::Value(raw_ptr()->guarded_list_length());
+  return Smi::Value(untag()->guarded_list_length());
 }
 
 void Field::set_guarded_list_length_unsafe(intptr_t list_length) const {
   ASSERT(Thread::Current()->IsMutatorThread());
   ASSERT(IsOriginal());
-  raw_ptr()->set_guarded_list_length(Smi::New(list_length));
+  untag()->set_guarded_list_length(Smi::New(list_length));
 }
 
 intptr_t Field::guarded_list_length_in_object_offset() const {
-  return raw_ptr()->guarded_list_length_in_object_offset_ + kHeapObjectTag;
+  return untag()->guarded_list_length_in_object_offset_ + kHeapObjectTag;
 }
 
 void Field::set_guarded_list_length_in_object_offset_unsafe(
     intptr_t list_length_offset) const {
   ASSERT(Thread::Current()->IsMutatorThread());
   ASSERT(IsOriginal());
-  StoreNonPointer(&raw_ptr()->guarded_list_length_in_object_offset_,
+  StoreNonPointer(&untag()->guarded_list_length_in_object_offset_,
                   static_cast<int8_t>(list_length_offset - kHeapObjectTag));
   ASSERT(guarded_list_length_in_object_offset() == list_length_offset);
 }
@@ -10460,7 +10467,7 @@
         Instance::Handle(zone, closure_field.StaticValue());
     ASSERT(!closure.IsNull());
     ASSERT(closure.IsClosure());
-    return closure.raw();
+    return closure.ptr();
   }
 
   UNREACHABLE();
@@ -10478,14 +10485,14 @@
 ArrayPtr Field::dependent_code() const {
   DEBUG_ASSERT(
       IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
-  return raw_ptr()->dependent_code();
+  return untag()->dependent_code();
 }
 
 void Field::set_dependent_code(const Array& array) const {
   ASSERT(IsOriginal());
   DEBUG_ASSERT(
       IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
-  raw_ptr()->set_dependent_code(array.raw());
+  untag()->set_dependent_code(array.ptr());
 }
 
 class FieldDependentArray : public WeakCodeReferences {
@@ -10540,10 +10547,10 @@
 }
 
 bool Field::IsConsistentWith(const Field& other) const {
-  return (raw_ptr()->guarded_cid_ == other.raw_ptr()->guarded_cid_) &&
-         (raw_ptr()->is_nullable_ == other.raw_ptr()->is_nullable_) &&
-         (raw_ptr()->guarded_list_length() ==
-          other.raw_ptr()->guarded_list_length()) &&
+  return (untag()->guarded_cid_ == other.untag()->guarded_cid_) &&
+         (untag()->is_nullable_ == other.untag()->is_nullable_) &&
+         (untag()->guarded_list_length() ==
+          other.untag()->guarded_list_length()) &&
          (is_unboxing_candidate() == other.is_unboxing_candidate()) &&
          (static_type_exactness_state().Encode() ==
           other.static_type_exactness_state().Encode());
@@ -10553,8 +10560,8 @@
   Thread* thread = Thread::Current();
   const FieldTable* field_table = thread->isolate()->field_table();
   const InstancePtr raw_value = field_table->At(field_id());
-  ASSERT(raw_value != Object::transition_sentinel().raw());
-  return raw_value == Object::sentinel().raw();
+  ASSERT(raw_value != Object::transition_sentinel().ptr());
+  return raw_value == Object::sentinel().ptr();
 }
 
 FunctionPtr Field::EnsureInitializerFunction() const {
@@ -10576,7 +10583,7 @@
     }
 #endif
   }
-  return initializer.raw();
+  return initializer.ptr();
 }
 
 void Field::SetInitializerFunction(const Function& initializer) const {
@@ -10590,19 +10597,19 @@
   // We have to ensure that all stores into the initializer function object
   // happen before releasing the pointer to the initializer as it may be
   // accessed without grabbing the lock.
-  raw_ptr()->set_initializer_function<std::memory_order_release>(
-      initializer.raw());
+  untag()->set_initializer_function<std::memory_order_release>(
+      initializer.ptr());
 #endif
 }
 
 bool Field::HasInitializerFunction() const {
-  return raw_ptr()->initializer_function() != Function::null();
+  return untag()->initializer_function() != Function::null();
 }
 
 ErrorPtr Field::InitializeInstance(const Instance& instance) const {
   ASSERT(IsOriginal());
   ASSERT(is_instance());
-  ASSERT(instance.GetField(*this) == Object::sentinel().raw());
+  ASSERT(instance.GetField(*this) == Object::sentinel().ptr());
   Object& value = Object::Handle();
 
   if (has_nontrivial_initializer()) {
@@ -10611,7 +10618,7 @@
     args.SetAt(0, instance);
     value = DartEntry::InvokeFunction(initializer, args);
     if (!value.IsNull() && value.IsError()) {
-      return Error::Cast(value).raw();
+      return Error::Cast(value).ptr();
     }
   } else {
     if (is_late() && !has_initializer()) {
@@ -10628,7 +10635,7 @@
   }
   ASSERT(value.IsNull() || value.IsInstance());
   if (is_late() && is_final() &&
-      (instance.GetField(*this) != Object::sentinel().raw())) {
+      (instance.GetField(*this) != Object::sentinel().ptr())) {
     Exceptions::ThrowLateFieldAssignedDuringInitialization(
         String::Handle(name()));
     UNREACHABLE();
@@ -10640,7 +10647,7 @@
 ErrorPtr Field::InitializeStatic() const {
   ASSERT(IsOriginal());
   ASSERT(is_static());
-  if (StaticValue() == Object::sentinel().raw()) {
+  if (StaticValue() == Object::sentinel().ptr()) {
     auto& value = Object::Handle();
     if (is_late()) {
       if (!has_initializer()) {
@@ -10649,9 +10656,9 @@
       }
       value = EvaluateInitializer();
       if (value.IsError()) {
-        return Error::Cast(value).raw();
+        return Error::Cast(value).ptr();
       }
-      if (is_final() && (StaticValue() != Object::sentinel().raw())) {
+      if (is_final() && (StaticValue() != Object::sentinel().ptr())) {
         Exceptions::ThrowLateFieldAssignedDuringInitialization(
             String::Handle(name()));
         UNREACHABLE();
@@ -10661,14 +10668,14 @@
       value = EvaluateInitializer();
       if (value.IsError()) {
         SetStaticValue(Object::null_instance());
-        return Error::Cast(value).raw();
+        return Error::Cast(value).ptr();
       }
     }
     ASSERT(value.IsNull() || value.IsInstance());
     SetStaticValue(value.IsNull() ? Instance::null_instance()
                                   : Instance::Cast(value));
     return Error::null();
-  } else if (StaticValue() == Object::transition_sentinel().raw()) {
+  } else if (StaticValue() == Object::transition_sentinel().ptr()) {
     ASSERT(!is_late());
     const Array& ctor_args = Array::Handle(Array::New(1));
     const String& field_name = String::Handle(name());
@@ -10689,7 +10696,7 @@
   // We can safely cache the value of the static const field in the initial
   // field table.
   auto& value = Object::Handle(zone, initial_field_table->At(field_id()));
-  if (value.raw() == Object::sentinel().raw()) {
+  if (value.ptr() == Object::sentinel().ptr()) {
     ASSERT(has_initializer());
     value = EvaluateInitializer();
     if (!value.IsError()) {
@@ -10698,7 +10705,7 @@
                                               : Instance::Cast(value));
     }
   }
-  return value.raw();
+  return value.ptr();
 }
 
 void Field::SetStaticConstFieldValue(const Instance& value,
@@ -10707,12 +10714,12 @@
   auto initial_field_table = thread->isolate_group()->initial_field_table();
 
   SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
-  ASSERT(initial_field_table->At(field_id()) == Object::sentinel().raw() ||
-         initial_field_table->At(field_id()) == value.raw() ||
+  ASSERT(initial_field_table->At(field_id()) == Object::sentinel().ptr() ||
+         initial_field_table->At(field_id()) == value.ptr() ||
          !assert_initializing_store);
   initial_field_table->SetAt(field_id(), value.IsNull()
-                                             ? Instance::null_instance().raw()
-                                             : Instance::Cast(value).raw());
+                                             ? Instance::null_instance().ptr()
+                                             : Instance::Cast(value).ptr());
 }
 
 ObjectPtr Field::EvaluateInitializer() const {
@@ -10883,7 +10890,7 @@
                                 const Class& cls,
                                 GrowableArray<const AbstractType*>* path,
                                 bool consider_only_super_classes) {
-  if (type.type_class() == cls.raw()) {
+  if (type.type_class() == cls.ptr()) {
     return true;  // Found instantiation.
   }
 
@@ -10925,7 +10932,7 @@
   ASSERT(id >= 0);
 
   SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
-  thread->isolate()->field_table()->SetAt(id, value.raw());
+  thread->isolate()->field_table()->SetAt(id, value.ptr());
 }
 
 static StaticTypeExactnessState TrivialTypeExactnessFor(const Class& cls) {
@@ -10941,7 +10948,7 @@
 }
 
 static const char* SafeTypeArgumentsToCString(const TypeArguments& args) {
-  return (args.raw() == TypeArguments::null()) ? "<null>" : args.ToCString();
+  return (args.ptr() == TypeArguments::null()) ? "<null>" : args.ToCString();
 }
 
 StaticTypeExactnessState StaticTypeExactnessState::Compute(
@@ -10949,8 +10956,8 @@
     const Instance& value,
     bool print_trace /* = false */) {
   ASSERT(!value.IsNull());  // Should be handled by the caller.
-  ASSERT(value.raw() != Object::sentinel().raw());
-  ASSERT(value.raw() != Object::transition_sentinel().raw());
+  ASSERT(value.ptr() != Object::sentinel().ptr());
+  ASSERT(value.ptr() != Object::transition_sentinel().ptr());
 
   const TypeArguments& static_type_args =
       TypeArguments::Handle(static_type.arguments());
@@ -10973,12 +10980,12 @@
   // Trivial case: field has type G<T0, ..., Tn> and value has type
   // G<U0, ..., Un>. Check if type arguments match.
   if (path.is_empty()) {
-    ASSERT(cls.raw() == static_type.type_class());
+    ASSERT(cls.ptr() == static_type.type_class());
     args = value.GetTypeArguments();
     // TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that
     // disregards superclass own arguments) improves precision of the
     // tracking.
-    if (args.raw() == static_type_args.raw()) {
+    if (args.ptr() == static_type_args.ptr()) {
       return TrivialTypeExactnessFor(cls);
     }
 
@@ -10998,7 +11005,7 @@
   // To compute C<X0, ..., Xn> at G we walk the chain backwards and
   // instantiate Si using type parameters of S{i-1} which gives us a type
   // depending on type parameters of S{i-2}.
-  AbstractType& type = AbstractType::Handle(path.Last()->raw());
+  AbstractType& type = AbstractType::Handle(path.Last()->ptr());
   for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated();
        i--) {
     args = path[i]->arguments();
@@ -11120,7 +11127,7 @@
   TypeArguments& args = TypeArguments::Handle();
   if (static_type_exactness_state().IsTriviallyExact()) {
     args = instance.GetTypeArguments();
-    if (args.raw() == field_type_args.raw()) {
+    if (args.ptr() == field_type_args.ptr()) {
       return false;
     }
 
@@ -11146,12 +11153,12 @@
   }
 
   // We should never try to record a sentinel.
-  ASSERT(value.raw() != Object::sentinel().raw());
+  ASSERT(value.ptr() != Object::sentinel().ptr());
 
   Thread* const thread = Thread::Current();
   SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
   if ((guarded_cid() == kDynamicCid) ||
-      (is_nullable() && value.raw() == Object::null())) {
+      (is_nullable() && value.ptr() == Object::null())) {
     // Nothing to do: the field is not guarded or we are storing null into
     // a nullable field.
     return;
@@ -11195,16 +11202,16 @@
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
 void Field::set_type_test_cache(const SubtypeTestCache& cache) const {
-  raw_ptr()->set_type_test_cache(cache.raw());
+  untag()->set_type_test_cache(cache.ptr());
 }
 #endif
 
 bool Script::HasSource() const {
-  return raw_ptr()->source() != String::null();
+  return untag()->source() != String::null();
 }
 
 StringPtr Script::Source() const {
-  return raw_ptr()->source();
+  return untag()->source();
 }
 
 bool Script::IsPartOfDartColonLibrary() const {
@@ -11224,15 +11231,15 @@
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
 void Script::set_compile_time_constants(const Array& value) const {
-  raw_ptr()->set_compile_time_constants(value.raw());
+  untag()->set_compile_time_constants(value.ptr());
 }
 
 void Script::set_kernel_program_info(const KernelProgramInfo& info) const {
-  raw_ptr()->set_kernel_program_info(info.raw());
+  untag()->set_kernel_program_info(info.ptr());
 }
 
 void Script::set_kernel_script_index(const intptr_t kernel_script_index) const {
-  StoreNonPointer(&raw_ptr()->kernel_script_index_, kernel_script_index);
+  StoreNonPointer(&untag()->kernel_script_index_, kernel_script_index);
 }
 
 TypedDataPtr Script::kernel_string_offsets() const {
@@ -11284,7 +11291,7 @@
     // Scripts in the AOT snapshot do not have a line starts array.
     // A well-formed line number array has a leading null.
     info.Add(line_separator);  // New line.
-    return info.raw();
+    return info.ptr();
   }
 #if !defined(DART_PRECOMPILED_RUNTIME)
   Smi& value = Smi::Handle(zone);
@@ -11325,15 +11332,15 @@
     previous_start = start;
   }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
-  return info.raw();
+  return info.ptr();
 }
 
 TokenPosition Script::MaxPosition() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
   if (HasCachedMaxPosition()) {
     return TokenPosition::Deserialize(
-        ScriptLayout::CachedMaxPositionBitField::decode(
-            raw_ptr()->flags_and_max_position_));
+        UntaggedScript::CachedMaxPositionBitField::decode(
+            untag()->flags_and_max_position_));
   }
   auto const zone = Thread::Current()->zone();
   LookupSourceAndLineStarts(zone);
@@ -11350,90 +11357,90 @@
 }
 
 void Script::set_url(const String& value) const {
-  raw_ptr()->set_url(value.raw());
+  untag()->set_url(value.ptr());
 }
 
 void Script::set_resolved_url(const String& value) const {
-  raw_ptr()->set_resolved_url(value.raw());
+  untag()->set_resolved_url(value.ptr());
 }
 
 void Script::set_source(const String& value) const {
-  raw_ptr()->set_source(value.raw());
+  untag()->set_source(value.ptr());
 }
 
 void Script::set_line_starts(const TypedData& value) const {
-  raw_ptr()->set_line_starts(value.raw());
+  untag()->set_line_starts(value.ptr());
 }
 
 #if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
 void Script::set_constant_coverage(const ExternalTypedData& value) const {
-  raw_ptr()->set_constant_coverage(value.raw());
+  untag()->set_constant_coverage(value.ptr());
 }
 
 ExternalTypedDataPtr Script::constant_coverage() const {
-  return raw_ptr()->constant_coverage();
+  return untag()->constant_coverage();
 }
 #endif  // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
 
 void Script::set_debug_positions(const Array& value) const {
-  raw_ptr()->set_debug_positions(value.raw());
+  untag()->set_debug_positions(value.ptr());
 }
 
 TypedDataPtr Script::line_starts() const {
-  return raw_ptr()->line_starts();
+  return untag()->line_starts();
 }
 
 ArrayPtr Script::debug_positions() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-  Array& debug_positions_array = Array::Handle(raw_ptr()->debug_positions());
+  Array& debug_positions_array = Array::Handle(untag()->debug_positions());
   if (debug_positions_array.IsNull()) {
     // This is created lazily. Now we need it.
     kernel::CollectTokenPositionsFor(*this);
   }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
-  return raw_ptr()->debug_positions();
+  return untag()->debug_positions();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
 void Script::SetLazyLookupSourceAndLineStarts(bool value) const {
-  StoreNonPointer(&raw_ptr()->flags_and_max_position_,
-                  ScriptLayout::LazyLookupSourceAndLineStartsBit::update(
-                      value, raw_ptr()->flags_and_max_position_));
+  StoreNonPointer(&untag()->flags_and_max_position_,
+                  UntaggedScript::LazyLookupSourceAndLineStartsBit::update(
+                      value, untag()->flags_and_max_position_));
 }
 
 bool Script::IsLazyLookupSourceAndLineStarts() const {
-  return ScriptLayout::LazyLookupSourceAndLineStartsBit::decode(
-      raw_ptr()->flags_and_max_position_);
+  return UntaggedScript::LazyLookupSourceAndLineStartsBit::decode(
+      untag()->flags_and_max_position_);
 }
 
 bool Script::HasCachedMaxPosition() const {
-  return ScriptLayout::HasCachedMaxPositionBit::decode(
-      raw_ptr()->flags_and_max_position_);
+  return UntaggedScript::HasCachedMaxPositionBit::decode(
+      untag()->flags_and_max_position_);
 }
 
 void Script::SetHasCachedMaxPosition(bool value) const {
-  StoreNonPointer(&raw_ptr()->flags_and_max_position_,
-                  ScriptLayout::HasCachedMaxPositionBit::update(
-                      value, raw_ptr()->flags_and_max_position_));
+  StoreNonPointer(&untag()->flags_and_max_position_,
+                  UntaggedScript::HasCachedMaxPositionBit::update(
+                      value, untag()->flags_and_max_position_));
 }
 
 void Script::SetCachedMaxPosition(intptr_t value) const {
-  StoreNonPointer(&raw_ptr()->flags_and_max_position_,
-                  ScriptLayout::CachedMaxPositionBitField::update(
-                      value, raw_ptr()->flags_and_max_position_));
+  StoreNonPointer(&untag()->flags_and_max_position_,
+                  UntaggedScript::CachedMaxPositionBitField::update(
+                      value, untag()->flags_and_max_position_));
 }
 #endif
 
 void Script::set_load_timestamp(int64_t value) const {
-  StoreNonPointer(&raw_ptr()->load_timestamp_, value);
+  StoreNonPointer(&untag()->load_timestamp_, value);
 }
 
 void Script::SetLocationOffset(intptr_t line_offset,
                                intptr_t col_offset) const {
   ASSERT(line_offset >= 0);
   ASSERT(col_offset >= 0);
-  StoreNonPointer(&raw_ptr()->line_offset_, line_offset);
-  StoreNonPointer(&raw_ptr()->col_offset_, col_offset);
+  StoreNonPointer(&untag()->line_offset_, line_offset);
+  StoreNonPointer(&untag()->col_offset_, col_offset);
 }
 
 bool Script::IsValidTokenPosition(TokenPosition token_pos) const {
@@ -11596,13 +11603,13 @@
 
 StringPtr Script::GetLine(intptr_t line_number, Heap::Space space) const {
   if (!HasSource()) {
-    return Symbols::OptimizedOut().raw();
+    return Symbols::OptimizedOut().ptr();
   }
   const String& src = String::Handle(Source());
   const intptr_t start =
       GetRelativeSourceIndex(src, line_number, line_offset());
   if (start < 0) {
-    return Symbols::Empty().raw();
+    return Symbols::Empty().ptr();
   }
   intptr_t end = start;
   for (; end < src.Length(); end++) {
@@ -11619,7 +11626,7 @@
                              intptr_t to_line,
                              intptr_t to_column) const {
   if (!HasSource()) {
-    return Symbols::OptimizedOut().raw();
+    return Symbols::OptimizedOut().ptr();
   }
   const String& src = String::Handle(Source());
   const intptr_t start = GetRelativeSourceIndex(src, from_line, line_offset(),
@@ -11661,7 +11668,7 @@
   result.set_kernel_script_index(0);
   result.set_load_timestamp(
       FLAG_remove_script_timestamps_for_test ? 0 : OS::GetCurrentTimeMillis());
-  return result.raw();
+  return result.ptr();
 }
 
 const char* Script::ToCString() const {
@@ -11681,8 +11688,8 @@
     lib ^= libs.At(i);
     scripts = lib.LoadedScripts();
     for (intptr_t j = 0; j < scripts.Length(); j++) {
-      if (scripts.At(j) == raw()) {
-        return lib.raw();
+      if (scripts.At(j) == ptr()) {
+        return lib.ptr();
       }
     }
   }
@@ -11729,12 +11736,12 @@
     int ix = next_ix_++;
     cls ^= array_.At(ix);
     MoveToNextClass();
-    return cls.raw();
+    return cls.ptr();
   }
   ASSERT(!toplevel_class_.IsNull());
-  cls = toplevel_class_.raw();
+  cls = toplevel_class_.ptr();
   toplevel_class_ = Class::null();
-  return cls.raw();
+  return cls.ptr();
 }
 
 void ClassDictionaryIterator::MoveToNextClass() {
@@ -11767,24 +11774,24 @@
   if (!Utils::IsUint(16, value)) {
     ReportTooManyImports(*this);
   }
-  StoreNonPointer(&raw_ptr()->num_imports_, value);
+  StoreNonPointer(&untag()->num_imports_, value);
 }
 
 void Library::set_name(const String& name) const {
   ASSERT(name.IsSymbol());
-  raw_ptr()->set_name(name.raw());
+  untag()->set_name(name.ptr());
 }
 
 void Library::set_url(const String& name) const {
-  raw_ptr()->set_url(name.raw());
+  untag()->set_url(name.ptr());
 }
 
 void Library::set_kernel_data(const ExternalTypedData& data) const {
-  raw_ptr()->set_kernel_data(data.raw());
+  untag()->set_kernel_data(data.ptr());
 }
 
 void Library::set_loading_unit(const LoadingUnit& value) const {
-  raw_ptr()->set_loading_unit(value.raw());
+  untag()->set_loading_unit(value.ptr());
 }
 
 void Library::SetName(const String& name) const {
@@ -11795,20 +11802,20 @@
 
 void Library::SetLoadInProgress() const {
   // Must not already be in the process of being loaded.
-  ASSERT(raw_ptr()->load_state_ <= LibraryLayout::kLoadRequested);
-  StoreNonPointer(&raw_ptr()->load_state_, LibraryLayout::kLoadInProgress);
+  ASSERT(untag()->load_state_ <= UntaggedLibrary::kLoadRequested);
+  StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadInProgress);
 }
 
 void Library::SetLoadRequested() const {
   // Must not be already loaded.
-  ASSERT(raw_ptr()->load_state_ == LibraryLayout::kAllocated);
-  StoreNonPointer(&raw_ptr()->load_state_, LibraryLayout::kLoadRequested);
+  ASSERT(untag()->load_state_ == UntaggedLibrary::kAllocated);
+  StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadRequested);
 }
 
 void Library::SetLoaded() const {
   // Should not be already loaded or just allocated.
   ASSERT(LoadInProgress() || LoadRequested());
-  StoreNonPointer(&raw_ptr()->load_state_, LibraryLayout::kLoaded);
+  StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoaded);
 }
 
 void Library::AddMetadata(const Object& declaration,
@@ -11827,7 +11834,7 @@
 
 ObjectPtr Library::GetMetadata(const Object& declaration) const {
 #if defined(DART_PRECOMPILED_RUNTIME)
-  return Object::empty_array().raw();
+  return Object::empty_array().ptr();
 #else
   RELEASE_ASSERT(declaration.IsClass() || declaration.IsField() ||
                  declaration.IsFunction() || declaration.IsLibrary() ||
@@ -11853,12 +11860,12 @@
   }
   if (value.IsNull()) {
     // There is no metadata for this object.
-    return Object::empty_array().raw();
+    return Object::empty_array().ptr();
   }
   if (!value.IsSmi()) {
     // Metadata is already evaluated.
     ASSERT(value.IsArray());
-    return value.raw();
+    return value.ptr();
   }
   const auto& smi_value = Smi::Cast(value);
   intptr_t kernel_offset = smi_value.Value();
@@ -11869,17 +11876,17 @@
                 /* is_annotations_offset = */ declaration.IsLibrary() ||
                     declaration.IsNamespace()));
   if (evaluated_value.IsArray() || evaluated_value.IsNull()) {
-    ASSERT(evaluated_value.raw() != Object::empty_array().raw());
+    ASSERT(evaluated_value.ptr() != Object::empty_array().ptr());
     SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
     MetadataMap map(metadata());
-    if (map.GetOrNull(declaration) == smi_value.raw()) {
+    if (map.GetOrNull(declaration) == smi_value.ptr()) {
       map.UpdateOrInsert(declaration, evaluated_value);
     } else {
-      ASSERT(map.GetOrNull(declaration) == evaluated_value.raw());
+      ASSERT(map.GetOrNull(declaration) == evaluated_value.ptr());
     }
     set_metadata(map.Release());
   }
-  return evaluated_value.raw();
+  return evaluated_value.ptr();
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
 }
 
@@ -11895,14 +11902,14 @@
 ObjectPtr Library::ResolveName(const String& name) const {
   Object& obj = Object::Handle();
   if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, &obj)) {
-    return obj.raw();
+    return obj.ptr();
   }
   EnsureTopLevelClassIsFinalized();
   obj = LookupLocalObject(name);
   if (!obj.IsNull()) {
     // Names that are in this library's dictionary and are unmangled
     // are not cached. This reduces the size of the cache.
-    return obj.raw();
+    return obj.ptr();
   }
   String& accessor_name = String::Handle(Field::LookupGetterSymbol(name));
   if (!accessor_name.IsNull()) {
@@ -11918,7 +11925,7 @@
     }
   }
   AddToResolvedNamesCache(name, obj);
-  return obj.raw();
+  return obj.ptr();
 }
 
 class StringEqualsTraits {
@@ -11948,7 +11955,7 @@
 // ASSERT that 'resolved_names()' has not changed only in mutator.
 #if defined(DEBUG)
   if (Thread::Current()->IsMutatorThread()) {
-    ASSERT(cache.Release().raw() == resolved_names());
+    ASSERT(cache.Release().ptr() == resolved_names());
   } else {
     // Release must be called in debug mode.
     cache.Release();
@@ -11970,7 +11977,7 @@
   }
   ResolvedNamesMap cache(resolved_names());
   cache.UpdateOrInsert(name, obj);
-  raw_ptr()->set_resolved_names(cache.Release().raw());
+  untag()->set_resolved_names(cache.Release().ptr());
 }
 
 bool Library::LookupExportedNamesCache(const String& name, Object* obj) const {
@@ -11986,7 +11993,7 @@
 // do not ASSERT that 'exported_names()' has not changed.
 #if defined(DEBUG)
   if (Thread::Current()->IsMutatorThread()) {
-    ASSERT(cache.Release().raw() == exported_names());
+    ASSERT(cache.Release().ptr() == exported_names());
   } else {
     // Release must be called in debug mode.
     cache.Release();
@@ -12005,7 +12012,7 @@
   }
   ResolvedNamesMap cache(exported_names());
   cache.UpdateOrInsert(name, obj);
-  raw_ptr()->set_exported_names(cache.Release().raw());
+  untag()->set_exported_names(cache.Release().ptr());
 }
 
 void Library::InvalidateResolvedName(const String& name) const {
@@ -12078,7 +12085,7 @@
   new_entry = Smi::New(used);
   new_dict.SetAt(new_dict_size, new_entry);
   // Remember the new dictionary now.
-  raw_ptr()->set_dictionary(new_dict.raw());
+  untag()->set_dictionary(new_dict.ptr());
 }
 
 void Library::AddObject(const Object& obj, const String& name) const {
@@ -12114,7 +12121,7 @@
 
   // Invalidate the cache of loaded scripts.
   if (loaded_scripts() != Array::null()) {
-    raw_ptr()->set_loaded_scripts(Array::null());
+    untag()->set_loaded_scripts(Array::null());
   }
 }
 
@@ -12132,7 +12139,7 @@
   }
   Object& obj = Object::Handle();
   if (FLAG_use_exp_cache && LookupExportedNamesCache(name, &obj)) {
-    return obj.raw();
+    return obj.ptr();
   }
 
   const intptr_t lib_id = this->index();
@@ -12157,7 +12164,7 @@
   if (FLAG_use_exp_cache && !in_cycle && !Compiler::IsBackgroundCompilation()) {
     AddToExportedNamesCache(name, obj);
   }
-  return obj.raw();
+  return obj.ptr();
 }
 
 ObjectPtr Library::LookupEntry(const String& name, intptr_t* index) const {
@@ -12178,7 +12185,7 @@
     entry_name = entry.DictionaryName();
     ASSERT(!entry_name.IsNull());
     if (entry_name.Equals(name)) {
-      return entry.raw();
+      return entry.ptr();
     }
     *index = (*index + 1) % dict_size;
     entry = dict.At(*index);
@@ -12204,7 +12211,7 @@
 
   for (int i = 0; i < scripts.Length(); i++) {
     script_obj ^= scripts.At(i);
-    if (script_obj.raw() == candidate.raw()) {
+    if (script_obj.ptr() == candidate.ptr()) {
       // We already have a reference to this script.
       return;
     }
@@ -12251,7 +12258,7 @@
         owner_script = Class::Cast(entry).script();
       } else {
         ASSERT(entry.IsScript());
-        owner_script = Script::Cast(entry).raw();
+        owner_script = Script::Cast(entry).ptr();
       }
       AddScriptIfUnique(scripts, owner_script);
     }
@@ -12276,7 +12283,7 @@
 
     // Create the array of scripts and cache it in loaded_scripts_.
     const Array& scripts_array = Array::Handle(Array::MakeFixedLength(scripts));
-    raw_ptr()->set_loaded_scripts(scripts_array.raw());
+    untag()->set_loaded_scripts(scripts_array.ptr());
   }
   return loaded_scripts();
 }
@@ -12304,14 +12311,14 @@
     }
     const intptr_t start_idx = script_url.Length() - url_length;
     if ((start_idx == 0) && url.Equals(script_url)) {
-      return script.raw();
+      return script.ptr();
     } else if (start_idx > 0) {
       // If we do a suffix match, only match if the partial path
       // starts at or immediately after the path separator.
       if (((url.CharAt(0) == '/') ||
            (script_url.CharAt(start_idx - 1) == '/')) &&
           url.Equals(script_url, start_idx, url_length)) {
-        return script.raw();
+        return script.ptr();
       }
     }
   }
@@ -12344,7 +12351,7 @@
   EnsureTopLevelClassIsFinalized();
   const Object& result = Object::Handle(LookupEntry(name, &index));
   if (!result.IsNull() && !result.IsLibraryPrefix()) {
-    return result.raw();
+    return result.ptr();
   }
   return LookupReExport(name);
 }
@@ -12353,7 +12360,7 @@
   EnsureTopLevelClassIsFinalized();
   Object& obj = Object::Handle(LookupObjectAllowPrivate(name));
   if (obj.IsField()) {
-    return Field::Cast(obj).raw();
+    return Field::Cast(obj).ptr();
   }
   return Field::null();
 }
@@ -12362,7 +12369,7 @@
   EnsureTopLevelClassIsFinalized();
   Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
   if (obj.IsField()) {
-    return Field::Cast(obj).raw();
+    return Field::Cast(obj).ptr();
   }
   return Field::null();
 }
@@ -12371,7 +12378,7 @@
   EnsureTopLevelClassIsFinalized();
   Object& obj = Object::Handle(LookupObjectAllowPrivate(name));
   if (obj.IsFunction()) {
-    return Function::Cast(obj).raw();
+    return Function::Cast(obj).ptr();
   }
   return Function::null();
 }
@@ -12380,7 +12387,7 @@
   EnsureTopLevelClassIsFinalized();
   Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
   if (obj.IsFunction()) {
-    return Function::Cast(obj).raw();
+    return Function::Cast(obj).ptr();
   }
   return Function::null();
 }
@@ -12394,14 +12401,14 @@
     String& private_name = String::Handle(zone, PrivateName(name));
     obj = LookupLocalObject(private_name);
   }
-  return obj.raw();
+  return obj.ptr();
 }
 
 ObjectPtr Library::LookupObjectAllowPrivate(const String& name) const {
   // First check if name is found in the local scope of the library.
   Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
   if (!obj.IsNull()) {
-    return obj.raw();
+    return obj.ptr();
   }
 
   // Do not look up private names in imported libraries.
@@ -12428,7 +12435,7 @@
     if (!obj.IsNull()) {
       import_lib = import.target();
       import_lib_url = import_lib.url();
-      if (found_obj.raw() != obj.raw()) {
+      if (found_obj.ptr() != obj.ptr()) {
         if (first_import_lib_url.IsNull() ||
             first_import_lib_url.StartsWith(Symbols::DartScheme())) {
           // This is the first object we found, or the
@@ -12436,7 +12443,7 @@
           // system library. The newly found object hides the one
           // from the Dart library.
           first_import_lib_url = import_lib.url();
-          found_obj = obj.raw();
+          found_obj = obj.ptr();
           found_obj_name = obj.DictionaryName();
         } else if (import_lib_url.StartsWith(Symbols::DartScheme())) {
           // The newly found object is exported from a Dart system
@@ -12448,7 +12455,7 @@
           // the first object we found is a setter. Replace the first
           // object with the one we just found.
           first_import_lib_url = import_lib.url();
-          found_obj = obj.raw();
+          found_obj = obj.ptr();
           found_obj_name = found_obj.DictionaryName();
         } else {
           // We found two different objects with the same name.
@@ -12471,7 +12478,7 @@
       }
     }
   }
-  return found_obj.raw();
+  return found_obj.ptr();
 }
 
 ClassPtr Library::LookupClass(const String& name) const {
@@ -12480,7 +12487,7 @@
     obj = LookupImportedObject(name);
   }
   if (obj.IsClass()) {
-    return Class::Cast(obj).raw();
+    return Class::Cast(obj).ptr();
   }
   return Class::null();
 }
@@ -12488,7 +12495,7 @@
 ClassPtr Library::LookupLocalClass(const String& name) const {
   Object& obj = Object::Handle(LookupLocalObject(name));
   if (obj.IsClass()) {
-    return Class::Cast(obj).raw();
+    return Class::Cast(obj).ptr();
   }
   return Class::null();
 }
@@ -12499,7 +12506,7 @@
   Zone* zone = Thread::Current()->zone();
   const Class& cls = Class::Handle(zone, LookupClass(name));
   if (!cls.IsNull()) {
-    return cls.raw();
+    return cls.ptr();
   }
 
   // Now try to lookup the class using its private name, but only in
@@ -12508,7 +12515,7 @@
     String& private_name = String::Handle(zone, PrivateName(name));
     const Object& obj = Object::Handle(LookupLocalObject(private_name));
     if (obj.IsClass()) {
-      return Class::Cast(obj).raw();
+      return Class::Cast(obj).ptr();
     }
   }
   return Class::null();
@@ -12526,7 +12533,7 @@
       cls_name = Class::Cast(entry).Name();
       // Warning: comparison is not symmetric.
       if (String::EqualsIgnoringPrivateKey(cls_name, name)) {
-        return Class::Cast(entry).raw();
+        return Class::Cast(entry).ptr();
       }
     }
   }
@@ -12536,25 +12543,25 @@
 LibraryPrefixPtr Library::LookupLocalLibraryPrefix(const String& name) const {
   const Object& obj = Object::Handle(LookupLocalObject(name));
   if (obj.IsLibraryPrefix()) {
-    return LibraryPrefix::Cast(obj).raw();
+    return LibraryPrefix::Cast(obj).ptr();
   }
   return LibraryPrefix::null();
 }
 
 void Library::set_toplevel_class(const Class& value) const {
-  ASSERT(raw_ptr()->toplevel_class() == Class::null());
-  raw_ptr()->set_toplevel_class(value.raw());
+  ASSERT(untag()->toplevel_class() == Class::null());
+  untag()->set_toplevel_class(value.ptr());
 }
 
 void Library::set_dependencies(const Array& deps) const {
-  raw_ptr()->set_dependencies(deps.raw());
+  untag()->set_dependencies(deps.ptr());
 }
 
 void Library::set_metadata(const Array& value) const {
-  if (raw_ptr()->metadata() != value.raw()) {
+  if (untag()->metadata() != value.ptr()) {
     DEBUG_ASSERT(
         IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
-    raw_ptr()->set_metadata(value.raw());
+    untag()->set_metadata(value.ptr());
   }
 }
 
@@ -12578,7 +12585,7 @@
   // We need to preserve the "dart-ext:" imports because they are used by
   // Loader::ReloadNativeExtensions().
   intptr_t native_import_count = 0;
-  Array& imports = Array::Handle(raw_ptr()->imports());
+  Array& imports = Array::Handle(untag()->imports());
   Namespace& ns = Namespace::Handle();
   Library& lib = Library::Handle();
   String& url = String::Handle();
@@ -12603,13 +12610,13 @@
     }
   }
 
-  raw_ptr()->set_imports(new_imports.raw());
-  raw_ptr()->set_exports(Object::empty_array().raw());
-  StoreNonPointer(&raw_ptr()->num_imports_, 0);
-  raw_ptr()->set_resolved_names(Array::null());
-  raw_ptr()->set_exported_names(Array::null());
-  raw_ptr()->set_loaded_scripts(Array::null());
-  raw_ptr()->set_dependencies(Array::null());
+  untag()->set_imports(new_imports.ptr());
+  untag()->set_exports(Object::empty_array().ptr());
+  StoreNonPointer(&untag()->num_imports_, 0);
+  untag()->set_resolved_names(Array::null());
+  untag()->set_exported_names(Array::null());
+  untag()->set_loaded_scripts(Array::null());
+  untag()->set_dependencies(Array::null());
 }
 
 void Library::AddImport(const Namespace& ns) const {
@@ -12618,7 +12625,7 @@
   if (num_imports() == capacity) {
     capacity = capacity + kImportsCapacityIncrement + (capacity >> 2);
     imports = Array::Grow(imports, capacity);
-    raw_ptr()->set_imports(imports.raw());
+    untag()->set_imports(imports.ptr());
   }
   intptr_t index = num_imports();
   imports.SetAt(index, ns);
@@ -12628,7 +12635,7 @@
 // Convenience function to determine whether the export list is
 // non-empty.
 bool Library::HasExports() const {
-  return exports() != Object::empty_array().raw();
+  return exports() != Object::empty_array().ptr();
 }
 
 // We add one namespace at a time to the exports array and don't
@@ -12638,7 +12645,7 @@
   Array& exports = Array::Handle(this->exports());
   intptr_t num_exports = exports.Length();
   exports = Array::Grow(exports, num_exports + 1);
-  raw_ptr()->set_exports(exports.raw());
+  untag()->set_exports(exports.ptr());
   exports.SetAt(num_exports, ns);
 }
 
@@ -12646,7 +12653,7 @@
   const Array& dict = Array::Handle(Array::New(initial_size + 1, Heap::kOld));
   // The last element of the dictionary specifies the number of in use slots.
   dict.SetAt(initial_size, Object::smi_zero());
-  return dict.raw();
+  return dict.ptr();
 }
 
 void Library::InitResolvedNamesCache() const {
@@ -12655,12 +12662,12 @@
   REUSABLE_FUNCTION_HANDLESCOPE(thread);
   Array& cache = thread->ArrayHandle();
   cache = HashTables::New<ResolvedNamesMap>(64);
-  raw_ptr()->set_resolved_names(cache.raw());
+  untag()->set_resolved_names(cache.ptr());
 }
 
 void Library::ClearResolvedNamesCache() const {
   ASSERT(Thread::Current()->IsMutatorThread());
-  raw_ptr()->set_resolved_names(Array::null());
+  untag()->set_resolved_names(Array::null());
 }
 
 void Library::InitExportedNamesCache() const {
@@ -12669,11 +12676,11 @@
   REUSABLE_FUNCTION_HANDLESCOPE(thread);
   Array& cache = thread->ArrayHandle();
   cache = HashTables::New<ResolvedNamesMap>(16);
-  raw_ptr()->set_exported_names(cache.raw());
+  untag()->set_exported_names(cache.ptr());
 }
 
 void Library::ClearExportedNamesCache() const {
-  raw_ptr()->set_exported_names(Array::null());
+  untag()->set_exported_names(Array::null());
 }
 
 void Library::InitClassDictionary() const {
@@ -12684,14 +12691,14 @@
   // TODO(iposva): Find reasonable initial size.
   const int kInitialElementCount = 16;
   dictionary = NewDictionary(kInitialElementCount);
-  raw_ptr()->set_dictionary(dictionary.raw());
+  untag()->set_dictionary(dictionary.ptr());
 }
 
 void Library::InitImportList() const {
   const Array& imports =
       Array::Handle(Array::New(kInitialImportsCapacity, Heap::kOld));
-  raw_ptr()->set_imports(imports.raw());
-  StoreNonPointer(&raw_ptr()->num_imports_, 0);
+  untag()->set_imports(imports.ptr());
+  StoreNonPointer(&untag()->num_imports_, 0);
 }
 
 LibraryPtr Library::New() {
@@ -12709,21 +12716,21 @@
   url.Hash();
   const bool dart_scheme = url.StartsWith(Symbols::DartScheme());
   const Library& result = Library::Handle(zone, Library::New());
-  result.raw_ptr()->set_name(Symbols::Empty().raw());
-  result.raw_ptr()->set_url(url.raw());
-  result.raw_ptr()->set_resolved_names(Array::null());
-  result.raw_ptr()->set_exported_names(Array::null());
-  result.raw_ptr()->set_dictionary(Object::empty_array().raw());
+  result.untag()->set_name(Symbols::Empty().ptr());
+  result.untag()->set_url(url.ptr());
+  result.untag()->set_resolved_names(Array::null());
+  result.untag()->set_exported_names(Array::null());
+  result.untag()->set_dictionary(Object::empty_array().ptr());
   Array& array = Array::Handle(zone);
   array = HashTables::New<MetadataMap>(4, Heap::kOld);
-  result.raw_ptr()->set_metadata(array.raw());
-  result.raw_ptr()->set_toplevel_class(Class::null());
+  result.untag()->set_metadata(array.ptr());
+  result.untag()->set_toplevel_class(Class::null());
   GrowableObjectArray& list = GrowableObjectArray::Handle(zone);
   list = GrowableObjectArray::New(Object::empty_array(), Heap::kOld);
-  result.raw_ptr()->set_used_scripts(list.raw());
-  result.raw_ptr()->set_imports(Object::empty_array().raw());
-  result.raw_ptr()->set_exports(Object::empty_array().raw());
-  result.raw_ptr()->set_loaded_scripts(Array::null());
+  result.untag()->set_used_scripts(list.ptr());
+  result.untag()->set_imports(Object::empty_array().ptr());
+  result.untag()->set_exports(Object::empty_array().ptr());
+  result.untag()->set_loaded_scripts(Array::null());
   result.set_native_entry_resolver(NULL);
   result.set_native_entry_symbol_resolver(NULL);
   result.set_flags(0);
@@ -12739,9 +12746,9 @@
   }
   result.set_is_dart_scheme(dart_scheme);
   NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
-  result.StoreNonPointer(&result.raw_ptr()->load_state_,
-                         LibraryLayout::kAllocated);
-  result.StoreNonPointer(&result.raw_ptr()->index_, -1);
+  result.StoreNonPointer(&result.untag()->load_state_,
+                         UntaggedLibrary::kAllocated);
+  result.StoreNonPointer(&result.untag()->index_, -1);
   result.InitClassDictionary();
   result.InitImportList();
   result.AllocatePrivateKey();
@@ -12753,7 +12760,7 @@
                                                Object::null_array(), result));
     result.AddImport(ns);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 LibraryPtr Library::New(const String& url) {
@@ -12761,7 +12768,7 @@
 }
 
 void Library::set_flags(uint8_t flags) const {
-  StoreNonPointer(&raw_ptr()->flags_, flags);
+  StoreNonPointer(&untag()->flags_, flags);
 }
 
 void Library::InitCoreLibrary(IsolateGroup* isolate_group) {
@@ -12830,7 +12837,7 @@
         String::Handle(Field::GetterName(getter_name));
     obj = LookupLocalOrReExportObject(internal_getter_name);
     if (obj.IsFunction()) {
-      getter = Function::Cast(obj).raw();
+      getter = Function::Cast(obj).ptr();
       if (check_is_entrypoint) {
         CHECK_ERROR(getter.VerifyCallEntryPoint());
       }
@@ -12841,7 +12848,7 @@
       // exception of "main".
       if (obj.IsFunction() && check_is_entrypoint) {
         if (!getter_name.Equals(String::Handle(String::New("main"))) ||
-            raw() != IsolateGroup::Current()->object_store()->root_library()) {
+            ptr() != IsolateGroup::Current()->object_store()->root_library()) {
           CHECK_ERROR(Function::Cast(obj).VerifyClosurizedEntryPoint());
         }
       }
@@ -12865,7 +12872,7 @@
     // Fall through case: Indicate that we didn't find any function or field
     // using a special null instance. This is different from a field being null.
     // Callers make sure that this null does not leak into Dartland.
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
 
   // Invoke the getter and return the result.
@@ -12903,13 +12910,13 @@
           InvocationMirror::kTopLevel, InvocationMirror::kSetter);
     }
     field.SetStaticValue(value);
-    return value.raw();
+    return value.ptr();
   }
 
   Function& setter = Function::Handle();
   obj = LookupLocalOrReExportObject(internal_setter_name);
   if (obj.IsFunction()) {
-    setter ^= obj.raw();
+    setter ^= obj.ptr();
   }
 
   if (!setter.IsNull() && check_is_entrypoint) {
@@ -12956,7 +12963,7 @@
   auto& result =
       Object::Handle(zone, LookupLocalOrReExportObject(function_name));
   if (result.IsFunction()) {
-    function ^= result.raw();
+    function ^= result.ptr();
   }
 
   if (!function.IsNull() && check_is_entrypoint) {
@@ -12968,7 +12975,7 @@
     const Object& getter_result = Object::Handle(
         zone, InvokeGetter(function_name, false, respect_reflectable,
                            check_is_entrypoint));
-    if (getter_result.raw() != Object::sentinel().raw()) {
+    if (getter_result.ptr() != Object::sentinel().ptr()) {
       if (check_is_entrypoint) {
         CHECK_ERROR(EntryPointFieldInvocationError(function_name));
       }
@@ -13065,7 +13072,7 @@
 
   static uword Hash(const Object& key) { return String::Cast(key).Hash(); }
 
-  static ObjectPtr NewKey(const String& str) { return str.raw(); }
+  static ObjectPtr NewKey(const String& str) { return str.ptr(); }
 };
 typedef UnorderedHashMap<LibraryLookupTraits> LibraryLookupMap;
 
@@ -13098,9 +13105,9 @@
       zone, loader.LoadExpressionEvaluationFunction(library_url, klass));
   kernel_pgm.reset();
 
-  if (result.IsError()) return result.raw();
+  if (result.IsError()) return result.ptr();
 
-  const auto& callee = Function::CheckedHandle(zone, result.raw());
+  const auto& callee = Function::CheckedHandle(zone, result.ptr());
 
   // type_arguments is null if all type arguments are dynamic.
   if (type_definitions.Length() == 0 || type_arguments.IsNull()) {
@@ -13122,7 +13129,7 @@
     result = DartEntry::InvokeFunction(callee, real_arguments, args_desc);
   }
 
-  return result.raw();
+  return result.ptr();
 #endif
 }
 
@@ -13142,9 +13149,9 @@
   } else {
     LibraryLookupMap map(object_store->libraries_map());
     lib ^= map.GetOrNull(url);
-    ASSERT(map.Release().raw() == object_store->libraries_map());
+    ASSERT(map.Release().ptr() == object_store->libraries_map());
   }
-  return lib.raw();
+  return lib.ptr();
 }
 
 bool Library::IsPrivate(const String& name) {
@@ -13176,7 +13183,7 @@
     const String& original_key =
         String::Handle(reload_context->FindLibraryPrivateKey(*this));
     if (!original_key.IsNull()) {
-      raw_ptr()->set_private_key(original_key.raw());
+      untag()->set_private_key(original_key.ptr());
       return;
     }
   }
@@ -13198,7 +13205,7 @@
   const String& key =
       String::Handle(zone, String::New(private_key, Heap::kOld));
   key.Hash();  // This string may end up in the VM isolate.
-  raw_ptr()->set_private_key(key.raw());
+  untag()->set_private_key(key.ptr());
 }
 
 const String& Library::PrivateCoreLibName(const String& member) {
@@ -13220,7 +13227,7 @@
   Thread* thread = Thread::Current();
   Zone* zone = thread->zone();
   const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
-  String& name = String::Handle(zone, class_name.raw());
+  String& name = String::Handle(zone, class_name.ptr());
   if (class_name.CharAt(0) == kPrivateIdentifierStart) {
     // Private identifiers are mangled on a per library basis.
     name = Symbols::FromConcat(thread, name,
@@ -13237,10 +13244,10 @@
   ASSERT(IsPrivate(name));
   // ASSERT(strchr(name, '@') == NULL);
   String& str = String::Handle(zone);
-  str = name.raw();
+  str = name.ptr();
   str = Symbols::FromConcat(thread, str,
                             String::Handle(zone, this->private_key()));
-  return str.raw();
+  return str.ptr();
 }
 
 LibraryPtr Library::GetLibrary(intptr_t index) {
@@ -13253,7 +13260,7 @@
   if ((0 <= index) && (index < libs.Length())) {
     Library& lib = Library::Handle(zone);
     lib ^= libs.At(index);
-    return lib.raw();
+    return lib.ptr();
   }
   return Library::null();
 }
@@ -13414,31 +13421,31 @@
   result.set_name(name);
   result.set_num_imports(0);
   result.set_importer(importer);
-  result.StoreNonPointer(&result.raw_ptr()->is_deferred_load_, deferred_load);
-  result.StoreNonPointer(&result.raw_ptr()->is_loaded_, !deferred_load);
+  result.StoreNonPointer(&result.untag()->is_deferred_load_, deferred_load);
+  result.StoreNonPointer(&result.untag()->is_loaded_, !deferred_load);
   result.set_imports(Array::Handle(Array::New(kInitialSize)));
   result.AddImport(import);
-  return result.raw();
+  return result.ptr();
 }
 
 void LibraryPrefix::set_name(const String& value) const {
   ASSERT(value.IsSymbol());
-  raw_ptr()->set_name(value.raw());
+  untag()->set_name(value.ptr());
 }
 
 void LibraryPrefix::set_imports(const Array& value) const {
-  raw_ptr()->set_imports(value.raw());
+  untag()->set_imports(value.ptr());
 }
 
 void LibraryPrefix::set_num_imports(intptr_t value) const {
   if (!Utils::IsUint(16, value)) {
     ReportTooManyImports(Library::Handle(importer()));
   }
-  StoreNonPointer(&raw_ptr()->num_imports_, value);
+  StoreNonPointer(&untag()->num_imports_, value);
 }
 
 void LibraryPrefix::set_importer(const Library& value) const {
-  raw_ptr()->set_importer(value.raw());
+  untag()->set_importer(value.ptr());
 }
 
 const char* LibraryPrefix::ToCString() const {
@@ -13552,7 +13559,7 @@
   if (obj.IsNull() || HidesName(name) || obj.IsLibraryPrefix()) {
     return Object::null();
   }
-  return obj.raw();
+  return obj.ptr();
 }
 
 NamespacePtr Namespace::New() {
@@ -13569,11 +13576,11 @@
   ASSERT(show_names.IsNull() || (show_names.Length() > 0));
   ASSERT(hide_names.IsNull() || (hide_names.Length() > 0));
   const Namespace& result = Namespace::Handle(Namespace::New());
-  result.raw_ptr()->set_target(target.raw());
-  result.raw_ptr()->set_show_names(show_names.raw());
-  result.raw_ptr()->set_hide_names(hide_names.raw());
-  result.raw_ptr()->set_owner(owner.raw());
-  return result.raw();
+  result.untag()->set_target(target.ptr());
+  result.untag()->set_show_names(show_names.ptr());
+  result.untag()->set_hide_names(hide_names.ptr());
+  result.untag()->set_owner(owner.ptr());
+  return result.ptr();
 }
 
 KernelProgramInfoPtr KernelProgramInfo::New() {
@@ -13597,18 +13604,18 @@
     const uint32_t binary_version) {
   const KernelProgramInfo& info =
       KernelProgramInfo::Handle(KernelProgramInfo::New());
-  info.raw_ptr()->set_string_offsets(string_offsets.raw());
-  info.raw_ptr()->set_string_data(string_data.raw());
-  info.raw_ptr()->set_canonical_names(canonical_names.raw());
-  info.raw_ptr()->set_metadata_payloads(metadata_payloads.raw());
-  info.raw_ptr()->set_metadata_mappings(metadata_mappings.raw());
-  info.raw_ptr()->set_scripts(scripts.raw());
-  info.raw_ptr()->set_constants_table(constants_table.raw());
-  info.raw_ptr()->set_libraries_cache(libraries_cache.raw());
-  info.raw_ptr()->set_classes_cache(classes_cache.raw());
-  info.raw_ptr()->set_retained_kernel_blob(retained_kernel_blob.raw());
+  info.untag()->set_string_offsets(string_offsets.ptr());
+  info.untag()->set_string_data(string_data.ptr());
+  info.untag()->set_canonical_names(canonical_names.ptr());
+  info.untag()->set_metadata_payloads(metadata_payloads.ptr());
+  info.untag()->set_metadata_mappings(metadata_mappings.ptr());
+  info.untag()->set_scripts(scripts.ptr());
+  info.untag()->set_constants_table(constants_table.ptr());
+  info.untag()->set_libraries_cache(libraries_cache.ptr());
+  info.untag()->set_classes_cache(classes_cache.ptr());
+  info.untag()->set_retained_kernel_blob(retained_kernel_blob.ptr());
   info.set_kernel_binary_version(binary_version);
-  return info.raw();
+  return info.ptr();
 }
 
 const char* KernelProgramInfo::ToCString() const {
@@ -13622,34 +13629,34 @@
 }
 
 void KernelProgramInfo::set_scripts(const Array& scripts) const {
-  raw_ptr()->set_scripts(scripts.raw());
+  untag()->set_scripts(scripts.ptr());
 }
 
 void KernelProgramInfo::set_constants(const Array& constants) const {
-  raw_ptr()->set_constants(constants.raw());
+  untag()->set_constants(constants.ptr());
 }
 
 void KernelProgramInfo::set_kernel_binary_version(uint32_t version) const {
-  StoreNonPointer(&raw_ptr()->kernel_binary_version_, version);
+  StoreNonPointer(&untag()->kernel_binary_version_, version);
 }
 
 void KernelProgramInfo::set_constants_table(
     const ExternalTypedData& value) const {
-  raw_ptr()->set_constants_table(value.raw());
+  untag()->set_constants_table(value.ptr());
 }
 
 void KernelProgramInfo::set_potential_natives(
     const GrowableObjectArray& candidates) const {
-  raw_ptr()->set_potential_natives(candidates.raw());
+  untag()->set_potential_natives(candidates.ptr());
 }
 
 void KernelProgramInfo::set_potential_pragma_functions(
     const GrowableObjectArray& candidates) const {
-  raw_ptr()->set_potential_pragma_functions(candidates.raw());
+  untag()->set_potential_pragma_functions(candidates.ptr());
 }
 
 void KernelProgramInfo::set_libraries_cache(const Array& cache) const {
-  raw_ptr()->set_libraries_cache(cache.raw());
+  untag()->set_libraries_cache(cache.ptr());
 }
 
 typedef UnorderedHashMap<SmiTraits> IntHashMap;
@@ -13673,7 +13680,7 @@
     result ^= table.GetOrNull(name_index);
     table.Release();
   }
-  return result.raw();
+  return result.ptr();
 }
 
 LibraryPtr KernelProgramInfo::InsertLibrary(Thread* thread,
@@ -13696,11 +13703,11 @@
     result ^= table.InsertOrGetValue(name_index, lib);
     set_libraries_cache(table.Release());
   }
-  return result.raw();
+  return result.ptr();
 }
 
 void KernelProgramInfo::set_classes_cache(const Array& cache) const {
-  raw_ptr()->set_classes_cache(cache.raw());
+  untag()->set_classes_cache(cache.ptr());
 }
 
 ClassPtr KernelProgramInfo::LookupClass(Thread* thread,
@@ -13722,7 +13729,7 @@
     result ^= table.GetOrNull(name_index);
     table.Release();
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ClassPtr KernelProgramInfo::InsertClass(Thread* thread,
@@ -13745,7 +13752,7 @@
     result ^= table.InsertOrGetValue(name_index, klass);
     set_classes_cache(table.Release());
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ErrorPtr Library::CompileAll(bool ignore_error /* = false */) {
@@ -13764,12 +13771,12 @@
       error = cls.EnsureIsFinalized(thread);
       if (!error.IsNull()) {
         if (ignore_error) continue;
-        return error.raw();
+        return error.ptr();
       }
       error = Compiler::CompileAllFunctions(cls);
       if (!error.IsNull()) {
         if (ignore_error) continue;
-        return error.raw();
+        return error.ptr();
       }
     }
   }
@@ -13779,13 +13786,13 @@
     if (!func.HasCode()) {
       result = Compiler::CompileFunction(thread, func);
       if (result.IsError()) {
-        error = Error::Cast(result).raw();
+        error = Error::Cast(result).ptr();
         return false;  // Stop iteration.
       }
     }
     return true;  // Continue iteration.
   });
-  return error.raw();
+  return error.ptr();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -13815,7 +13822,7 @@
       cls = it.GetNextClass();
       error = cls.EnsureIsFinalized(thread);
       if (!error.IsNull()) {
-        return error.raw();
+        return error.ptr();
       }
     }
   }
@@ -13853,7 +13860,7 @@
       }
     }
     if (!func.IsNull()) {
-      return func.raw();
+      return func.ptr();
     }
   }
   return Function::null();
@@ -13867,7 +13874,7 @@
     // Check whether the function is reexported into the library.
     const Object& obj = Object::Handle(zone, LookupReExport(name));
     if (obj.IsFunction()) {
-      func ^= obj.raw();
+      func ^= obj.ptr();
     } else {
       // Check if there is a getter of 'name', in which case invoke it
       // and return the result.
@@ -13988,7 +13995,7 @@
     result.SetHasMonomorphicEntry(has_monomorphic_entry);
     result.set_stats(nullptr);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* Instructions::ToCString() const {
@@ -13998,7 +14005,7 @@
 CodeStatistics* Instructions::stats() const {
 #if defined(DART_PRECOMPILER)
   return reinterpret_cast<CodeStatistics*>(
-      Thread::Current()->heap()->GetPeer(raw()));
+      Thread::Current()->heap()->GetPeer(ptr()));
 #else
   return nullptr;
 #endif
@@ -14006,7 +14013,7 @@
 
 void Instructions::set_stats(CodeStatistics* stats) const {
 #if defined(DART_PRECOMPILER)
-  Thread::Current()->heap()->SetPeer(raw(), stats);
+  Thread::Current()->heap()->SetPeer(ptr(), stats);
 #endif
 }
 
@@ -14033,7 +14040,7 @@
     }
   }
 
-  return result.raw();
+  return result.ptr();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -14041,7 +14048,7 @@
     const compiler::ObjectPoolBuilder& builder) {
   const intptr_t len = builder.CurrentLength();
   if (len == 0) {
-    return Object::empty_object_pool().raw();
+    return Object::empty_object_pool().ptr();
   }
   const ObjectPool& result = ObjectPool::Handle(ObjectPool::New(len));
   for (intptr_t i = 0; i < len; i++) {
@@ -14055,7 +14062,7 @@
       result.SetRawValueAt(i, entry.raw_value_);
     }
   }
-  return result.raw();
+  return result.ptr();
 }
 
 void ObjectPool::CopyInto(compiler::ObjectPoolBuilder* builder) const {
@@ -14118,16 +14125,16 @@
 }
 
 intptr_t PcDescriptors::Length() const {
-  return raw_ptr()->length_;
+  return untag()->length_;
 }
 
 void PcDescriptors::SetLength(intptr_t value) const {
-  StoreNonPointer(&raw_ptr()->length_, value);
+  StoreNonPointer(&untag()->length_, value);
 }
 
 void PcDescriptors::CopyData(const void* bytes, intptr_t size) {
   NoSafepointScope no_safepoint;
-  uint8_t* data = UnsafeMutableNonPointer(&raw_ptr()->data()[0]);
+  uint8_t* data = UnsafeMutableNonPointer(&untag()->data()[0]);
   // We're guaranted these memory spaces do not overlap.
   memcpy(data, bytes, size);  // NOLINT
 }
@@ -14145,7 +14152,7 @@
     result.SetLength(size);
     result.CopyData(delta_encoded_data, size);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 PcDescriptorsPtr PcDescriptors::New(intptr_t length) {
@@ -14159,28 +14166,28 @@
     result ^= raw;
     result.SetLength(length);
   }
-  return result.raw();
+  return result.ptr();
 }
 
-const char* PcDescriptors::KindAsStr(PcDescriptorsLayout::Kind kind) {
+const char* PcDescriptors::KindAsStr(UntaggedPcDescriptors::Kind kind) {
   switch (kind) {
-    case PcDescriptorsLayout::kDeopt:
+    case UntaggedPcDescriptors::kDeopt:
       return "deopt        ";
-    case PcDescriptorsLayout::kIcCall:
+    case UntaggedPcDescriptors::kIcCall:
       return "ic-call      ";
-    case PcDescriptorsLayout::kUnoptStaticCall:
+    case UntaggedPcDescriptors::kUnoptStaticCall:
       return "unopt-call   ";
-    case PcDescriptorsLayout::kRuntimeCall:
+    case UntaggedPcDescriptors::kRuntimeCall:
       return "runtime-call ";
-    case PcDescriptorsLayout::kOsrEntry:
+    case UntaggedPcDescriptors::kOsrEntry:
       return "osr-entry    ";
-    case PcDescriptorsLayout::kRewind:
+    case UntaggedPcDescriptors::kRewind:
       return "rewind       ";
-    case PcDescriptorsLayout::kBSSRelocation:
+    case UntaggedPcDescriptors::kBSSRelocation:
       return "bss reloc    ";
-    case PcDescriptorsLayout::kOther:
+    case UntaggedPcDescriptors::kOther:
       return "other        ";
-    case PcDescriptorsLayout::kAnyKind:
+    case UntaggedPcDescriptors::kAnyKind:
       UNREACHABLE();
       break;
   }
@@ -14209,7 +14216,7 @@
   // First compute the buffer size required.
   intptr_t len = 1;  // Trailing '\0'.
   {
-    Iterator iter(*this, PcDescriptorsLayout::kAnyKind);
+    Iterator iter(*this, UntaggedPcDescriptors::kAnyKind);
     while (iter.MoveNext()) {
       len += Utils::SNPrint(NULL, 0, FORMAT, addr_width, iter.PcOffset(),
                             KindAsStr(iter.Kind()), iter.DeoptId(),
@@ -14221,7 +14228,7 @@
   char* buffer = Thread::Current()->zone()->Alloc<char>(len);
   // Layout the fields in the buffer.
   intptr_t index = 0;
-  Iterator iter(*this, PcDescriptorsLayout::kAnyKind);
+  Iterator iter(*this, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     index += Utils::SNPrint((buffer + index), (len - index), FORMAT, addr_width,
                             iter.PcOffset(), KindAsStr(iter.Kind()),
@@ -14244,8 +14251,8 @@
     return;
   }
   intptr_t max_deopt_id = 0;
-  Iterator max_iter(*this,
-                    PcDescriptorsLayout::kDeopt | PcDescriptorsLayout::kIcCall);
+  Iterator max_iter(
+      *this, UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
   while (max_iter.MoveNext()) {
     if (max_iter.DeoptId() > max_deopt_id) {
       max_deopt_id = max_iter.DeoptId();
@@ -14256,7 +14263,7 @@
   BitVector* deopt_ids = new (zone) BitVector(zone, max_deopt_id + 1);
   BitVector* iccall_ids = new (zone) BitVector(zone, max_deopt_id + 1);
   Iterator iter(*this,
-                PcDescriptorsLayout::kDeopt | PcDescriptorsLayout::kIcCall);
+                UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
   while (iter.MoveNext()) {
     // 'deopt_id' is set for kDeopt and kIcCall and must be unique for one kind.
     if (DeoptId::IsDeoptAfter(iter.DeoptId())) {
@@ -14265,7 +14272,7 @@
       // lead to issues in the future. Fix that and enable verification.
       continue;
     }
-    if (iter.Kind() == PcDescriptorsLayout::kDeopt) {
+    if (iter.Kind() == UntaggedPcDescriptors::kDeopt) {
       ASSERT(!deopt_ids->Contains(iter.DeoptId()));
       deopt_ids->Add(iter.DeoptId());
     } else {
@@ -14277,7 +14284,7 @@
 }
 
 void CodeSourceMap::SetLength(intptr_t value) const {
-  StoreNonPointer(&raw_ptr()->length_, value);
+  StoreNonPointer(&untag()->length_, value);
 }
 
 CodeSourceMapPtr CodeSourceMap::New(intptr_t length) {
@@ -14291,7 +14298,7 @@
     result ^= raw;
     result.SetLength(length);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* CodeSourceMap::ToCString() const {
@@ -14300,7 +14307,7 @@
 
 intptr_t CompressedStackMaps::Hashcode() const {
   NoSafepointScope scope;
-  uint8_t* data = UnsafeMutableNonPointer(&raw_ptr()->data()[0]);
+  uint8_t* data = UnsafeMutableNonPointer(&untag()->data()[0]);
   uint8_t* end = data + payload_size();
   uint32_t hash = payload_size();
   for (uint8_t* cursor = data; cursor < end; cursor++) {
@@ -14347,8 +14354,7 @@
   }
 
   NoSafepointScope scope;
-  ReadStream stream(maps_.raw_ptr()->data(), maps_.payload_size(),
-                    next_offset_);
+  ReadStream stream(maps_.untag()->data(), maps_.payload_size(), next_offset_);
 
   auto const pc_delta = stream.ReadLEB128();
   ASSERT(pc_delta <= (kMaxUint32 - current_pc_offset_));
@@ -14407,7 +14413,7 @@
   uint8_t byte_mask = 1U << bit_remainder;
   const intptr_t byte_offset = current_bits_offset_ + byte_index;
   NoSafepointScope scope;
-  return (bits_container_.raw_ptr()->data()[byte_offset] & byte_mask) != 0;
+  return (bits_container_.untag()->data()[byte_offset] & byte_mask) != 0;
 }
 
 void CompressedStackMaps::Iterator::LazyLoadGlobalTableEntry() const {
@@ -14416,7 +14422,7 @@
   ASSERT(current_global_table_offset_ < bits_container_.payload_size());
 
   NoSafepointScope scope;
-  ReadStream stream(bits_container_.raw_ptr()->data(),
+  ReadStream stream(bits_container_.untag()->data(),
                     bits_container_.payload_size(),
                     current_global_table_offset_);
 
@@ -14465,7 +14471,7 @@
   // The canonical empty instance should be used instead.
   ASSERT(size != 0);
 
-  if (!CompressedStackMapsLayout::SizeField::is_valid(size)) {
+  if (!UntaggedCompressedStackMaps::SizeField::is_valid(size)) {
     FATAL1(
         "Fatal error in CompressedStackMaps::New: "
         "invalid payload size %" Pu "\n",
@@ -14482,17 +14488,18 @@
     NoSafepointScope no_safepoint;
     result ^= raw;
     result.StoreNonPointer(
-        &result.raw_ptr()->flags_and_size_,
-        CompressedStackMapsLayout::GlobalTableBit::encode(is_global_table) |
-            CompressedStackMapsLayout::UsesTableBit::encode(uses_global_table) |
-            CompressedStackMapsLayout::SizeField::encode(size));
-    auto cursor = result.UnsafeMutableNonPointer(result.raw_ptr()->data());
+        &result.untag()->flags_and_size_,
+        UntaggedCompressedStackMaps::GlobalTableBit::encode(is_global_table) |
+            UntaggedCompressedStackMaps::UsesTableBit::encode(
+                uses_global_table) |
+            UntaggedCompressedStackMaps::SizeField::encode(size));
+    auto cursor = result.UnsafeMutableNonPointer(result.untag()->data());
     memcpy(cursor, payload, size);  // NOLINT
   }
 
   ASSERT(!result.IsGlobalTable() || !result.UsesGlobalTable());
 
-  return result.raw();
+  return result.ptr();
 }
 
 const char* CompressedStackMaps::ToCString() const {
@@ -14511,35 +14518,35 @@
 
 StringPtr LocalVarDescriptors::GetName(intptr_t var_index) const {
   ASSERT(var_index < Length());
-  ASSERT(Object::Handle(*raw()->ptr()->nameAddrAt(var_index)).IsString());
-  return *raw()->ptr()->nameAddrAt(var_index);
+  ASSERT(Object::Handle(*ptr()->untag()->nameAddrAt(var_index)).IsString());
+  return *ptr()->untag()->nameAddrAt(var_index);
 }
 
 void LocalVarDescriptors::SetVar(
     intptr_t var_index,
     const String& name,
-    LocalVarDescriptorsLayout::VarInfo* info) const {
+    UntaggedLocalVarDescriptors::VarInfo* info) const {
   ASSERT(var_index < Length());
   ASSERT(!name.IsNull());
-  StorePointer(raw()->ptr()->nameAddrAt(var_index), name.raw());
-  raw()->ptr()->data()[var_index] = *info;
+  StorePointer(ptr()->untag()->nameAddrAt(var_index), name.ptr());
+  ptr()->untag()->data()[var_index] = *info;
 }
 
 void LocalVarDescriptors::GetInfo(
     intptr_t var_index,
-    LocalVarDescriptorsLayout::VarInfo* info) const {
+    UntaggedLocalVarDescriptors::VarInfo* info) const {
   ASSERT(var_index < Length());
-  *info = raw()->ptr()->data()[var_index];
+  *info = ptr()->untag()->data()[var_index];
 }
 
 static int PrintVarInfo(char* buffer,
                         int len,
                         intptr_t i,
                         const String& var_name,
-                        const LocalVarDescriptorsLayout::VarInfo& info) {
-  const LocalVarDescriptorsLayout::VarInfoKind kind = info.kind();
+                        const UntaggedLocalVarDescriptors::VarInfo& info) {
+  const UntaggedLocalVarDescriptors::VarInfoKind kind = info.kind();
   const int32_t index = info.index();
-  if (kind == LocalVarDescriptorsLayout::kContextLevel) {
+  if (kind == UntaggedLocalVarDescriptors::kContextLevel) {
     return Utils::SNPrint(buffer, len,
                           "%2" Pd
                           " %-13s level=%-3d"
@@ -14547,7 +14554,7 @@
                           i, LocalVarDescriptors::KindToCString(kind), index,
                           static_cast<int>(info.begin_pos.Pos()),
                           static_cast<int>(info.end_pos.Pos()));
-  } else if (kind == LocalVarDescriptorsLayout::kContextVar) {
+  } else if (kind == UntaggedLocalVarDescriptors::kContextVar) {
     return Utils::SNPrint(
         buffer, len,
         "%2" Pd
@@ -14578,7 +14585,7 @@
   intptr_t len = 1;  // Trailing '\0'.
   String& var_name = String::Handle();
   for (intptr_t i = 0; i < Length(); i++) {
-    LocalVarDescriptorsLayout::VarInfo info;
+    UntaggedLocalVarDescriptors::VarInfo info;
     var_name = GetName(i);
     GetInfo(i, &info);
     len += PrintVarInfo(NULL, 0, i, var_name, info);
@@ -14587,7 +14594,7 @@
   buffer[0] = '\0';
   intptr_t num_chars = 0;
   for (intptr_t i = 0; i < Length(); i++) {
-    LocalVarDescriptorsLayout::VarInfo info;
+    UntaggedLocalVarDescriptors::VarInfo info;
     var_name = GetName(i);
     GetInfo(i, &info);
     num_chars += PrintVarInfo((buffer + num_chars), (len - num_chars), i,
@@ -14597,15 +14604,15 @@
 }
 
 const char* LocalVarDescriptors::KindToCString(
-    LocalVarDescriptorsLayout::VarInfoKind kind) {
+    UntaggedLocalVarDescriptors::VarInfoKind kind) {
   switch (kind) {
-    case LocalVarDescriptorsLayout::kStackVar:
+    case UntaggedLocalVarDescriptors::kStackVar:
       return "StackVar";
-    case LocalVarDescriptorsLayout::kContextVar:
+    case UntaggedLocalVarDescriptors::kContextVar:
       return "ContextVar";
-    case LocalVarDescriptorsLayout::kContextLevel:
+    case UntaggedLocalVarDescriptors::kContextLevel:
       return "ContextLevel";
-    case LocalVarDescriptorsLayout::kSavedCurrentContext:
+    case UntaggedLocalVarDescriptors::kSavedCurrentContext:
       return "CurrentCtx";
     default:
       UNIMPLEMENTED();
@@ -14620,7 +14627,7 @@
     FATAL2(
         "Fatal error in LocalVarDescriptors::New: "
         "invalid num_variables %" Pd ". Maximum is: %d\n",
-        num_variables, LocalVarDescriptorsLayout::kMaxIndex);
+        num_variables, UntaggedLocalVarDescriptors::kMaxIndex);
   }
   LocalVarDescriptors& result = LocalVarDescriptors::Handle();
   {
@@ -14629,17 +14636,17 @@
         Object::Allocate(LocalVarDescriptors::kClassId, size, Heap::kOld);
     NoSafepointScope no_safepoint;
     result ^= raw;
-    result.StoreNonPointer(&result.raw_ptr()->num_entries_, num_variables);
+    result.StoreNonPointer(&result.untag()->num_entries_, num_variables);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 intptr_t LocalVarDescriptors::Length() const {
-  return raw_ptr()->num_entries_;
+  return untag()->num_entries_;
 }
 
 intptr_t ExceptionHandlers::num_entries() const {
-  return raw_ptr()->num_entries_;
+  return untag()->num_entries_;
 }
 
 void ExceptionHandlers::SetHandlerInfo(intptr_t try_index,
@@ -14651,7 +14658,7 @@
   ASSERT((try_index >= 0) && (try_index < num_entries()));
   NoSafepointScope no_safepoint;
   ExceptionHandlerInfo* info =
-      UnsafeMutableNonPointer(&raw_ptr()->data()[try_index]);
+      UnsafeMutableNonPointer(&untag()->data()[try_index]);
   info->outer_try_index = outer_try_index;
   // Some C compilers warn about the comparison always being true when using <=
   // due to limited range of data type.
@@ -14667,32 +14674,32 @@
                                        ExceptionHandlerInfo* info) const {
   ASSERT((try_index >= 0) && (try_index < num_entries()));
   ASSERT(info != NULL);
-  *info = raw_ptr()->data()[try_index];
+  *info = untag()->data()[try_index];
 }
 
 uword ExceptionHandlers::HandlerPCOffset(intptr_t try_index) const {
   ASSERT((try_index >= 0) && (try_index < num_entries()));
-  return raw_ptr()->data()[try_index].handler_pc_offset;
+  return untag()->data()[try_index].handler_pc_offset;
 }
 
 intptr_t ExceptionHandlers::OuterTryIndex(intptr_t try_index) const {
   ASSERT((try_index >= 0) && (try_index < num_entries()));
-  return raw_ptr()->data()[try_index].outer_try_index;
+  return untag()->data()[try_index].outer_try_index;
 }
 
 bool ExceptionHandlers::NeedsStackTrace(intptr_t try_index) const {
   ASSERT((try_index >= 0) && (try_index < num_entries()));
-  return raw_ptr()->data()[try_index].needs_stacktrace != 0;
+  return untag()->data()[try_index].needs_stacktrace != 0;
 }
 
 bool ExceptionHandlers::IsGenerated(intptr_t try_index) const {
   ASSERT((try_index >= 0) && (try_index < num_entries()));
-  return raw_ptr()->data()[try_index].is_generated != 0;
+  return untag()->data()[try_index].is_generated != 0;
 }
 
 bool ExceptionHandlers::HasCatchAll(intptr_t try_index) const {
   ASSERT((try_index >= 0) && (try_index < num_entries()));
-  return raw_ptr()->data()[try_index].has_catch_all != 0;
+  return untag()->data()[try_index].has_catch_all != 0;
 }
 
 void ExceptionHandlers::SetHandledTypes(intptr_t try_index,
@@ -14700,19 +14707,19 @@
   ASSERT((try_index >= 0) && (try_index < num_entries()));
   ASSERT(!handled_types.IsNull());
   const Array& handled_types_data =
-      Array::Handle(raw_ptr()->handled_types_data());
+      Array::Handle(untag()->handled_types_data());
   handled_types_data.SetAt(try_index, handled_types);
 }
 
 ArrayPtr ExceptionHandlers::GetHandledTypes(intptr_t try_index) const {
   ASSERT((try_index >= 0) && (try_index < num_entries()));
-  Array& array = Array::Handle(raw_ptr()->handled_types_data());
+  Array& array = Array::Handle(untag()->handled_types_data());
   array ^= array.At(try_index);
-  return array.raw();
+  return array.ptr();
 }
 
 void ExceptionHandlers::set_handled_types_data(const Array& value) const {
-  raw_ptr()->set_handled_types_data(value.raw());
+  untag()->set_handled_types_data(value.ptr());
 }
 
 ExceptionHandlersPtr ExceptionHandlers::New(intptr_t num_handlers) {
@@ -14730,13 +14737,13 @@
         Object::Allocate(ExceptionHandlers::kClassId, size, Heap::kOld);
     NoSafepointScope no_safepoint;
     result ^= raw;
-    result.StoreNonPointer(&result.raw_ptr()->num_entries_, num_handlers);
+    result.StoreNonPointer(&result.untag()->num_entries_, num_handlers);
   }
   const Array& handled_types_data =
       (num_handlers == 0) ? Object::empty_array()
                           : Array::Handle(Array::New(num_handlers, Heap::kOld));
   result.set_handled_types_data(handled_types_data);
-  return result.raw();
+  return result.ptr();
 }
 
 ExceptionHandlersPtr ExceptionHandlers::New(const Array& handled_types_data) {
@@ -14755,10 +14762,10 @@
         Object::Allocate(ExceptionHandlers::kClassId, size, Heap::kOld);
     NoSafepointScope no_safepoint;
     result ^= raw;
-    result.StoreNonPointer(&result.raw_ptr()->num_entries_, num_handlers);
+    result.StoreNonPointer(&result.untag()->num_entries_, num_handlers);
   }
   result.set_handled_types_data(handled_types_data);
-  return result.raw();
+  return result.ptr();
 }
 
 const char* ExceptionHandlers::ToCString() const {
@@ -14814,7 +14821,7 @@
 }
 
 void SingleTargetCache::set_target(const Code& value) const {
-  raw_ptr()->set_target(value.raw());
+  untag()->set_target(value.ptr());
 }
 
 const char* SingleTargetCache::ToCString() const {
@@ -14835,11 +14842,11 @@
   result.set_entry_point(0);
   result.set_lower_limit(kIllegalCid);
   result.set_upper_limit(kIllegalCid);
-  return result.raw();
+  return result.ptr();
 }
 
 void UnlinkedCall::set_can_patch_to_monomorphic(bool value) const {
-  StoreNonPointer(&raw_ptr()->can_patch_to_monomorphic_, value);
+  StoreNonPointer(&untag()->can_patch_to_monomorphic_, value);
 }
 
 intptr_t UnlinkedCall::Hashcode() const {
@@ -14861,7 +14868,7 @@
   result ^= Object::Allocate(UnlinkedCall::kClassId,
                              UnlinkedCall::InstanceSize(), Heap::kOld);
   result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode);
-  return result.raw();
+  return result.ptr();
 }
 
 MonomorphicSmiableCallPtr MonomorphicSmiableCall::New(classid_t expected_cid,
@@ -14870,10 +14877,10 @@
   result ^=
       Object::Allocate(MonomorphicSmiableCall::kClassId,
                        MonomorphicSmiableCall::InstanceSize(), Heap::kOld);
-  result.raw_ptr()->set_target(target.raw());
-  result.StoreNonPointer(&result.raw_ptr()->expected_cid_, expected_cid);
-  result.StoreNonPointer(&result.raw_ptr()->entrypoint_, target.EntryPoint());
-  return result.raw();
+  result.untag()->set_target(target.ptr());
+  result.StoreNonPointer(&result.untag()->expected_cid_, expected_cid);
+  result.StoreNonPointer(&result.untag()->entrypoint_, target.EntryPoint());
+  return result.ptr();
 }
 
 const char* MonomorphicSmiableCall::ToCString() const {
@@ -14888,17 +14895,17 @@
 
 void CallSiteData::set_target_name(const String& value) const {
   ASSERT(!value.IsNull());
-  raw_ptr()->set_target_name(value.raw());
+  untag()->set_target_name(value.ptr());
 }
 
 void CallSiteData::set_arguments_descriptor(const Array& value) const {
   ASSERT(!value.IsNull());
-  raw_ptr()->set_args_descriptor(value.raw());
+  untag()->set_args_descriptor(value.ptr());
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
 void ICData::SetReceiversStaticType(const AbstractType& type) const {
-  raw_ptr()->set_receivers_static_type(type.raw());
+  untag()->set_receivers_static_type(type.ptr());
 
 #if defined(TARGET_ARCH_X64)
   if (!type.IsNull() && type.HasTypeClass() && (NumArgsTested() == 1) &&
@@ -14942,15 +14949,15 @@
 }
 
 FunctionPtr ICData::Owner() const {
-  Object& obj = Object::Handle(raw_ptr()->owner());
+  Object& obj = Object::Handle(untag()->owner());
   if (obj.IsNull()) {
     ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
     return Function::null();
   } else if (obj.IsFunction()) {
-    return Function::Cast(obj).raw();
+    return Function::Cast(obj).ptr();
   } else {
     ICData& original = ICData::Handle();
-    original ^= obj.raw();
+    original ^= obj.ptr();
     return original.Owner();
   }
 }
@@ -14959,22 +14966,22 @@
   if (IsNull()) {
     return ICData::null();
   }
-  Object& obj = Object::Handle(raw_ptr()->owner());
+  Object& obj = Object::Handle(untag()->owner());
   if (obj.IsFunction()) {
-    return this->raw();
+    return this->ptr();
   } else {
-    return ICData::RawCast(obj.raw());
+    return ICData::RawCast(obj.ptr());
   }
 }
 
 void ICData::SetOriginal(const ICData& value) const {
   ASSERT(value.IsOriginal());
   ASSERT(!value.IsNull());
-  raw_ptr()->set_owner(static_cast<ObjectPtr>(value.raw()));
+  untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
 }
 
 void ICData::set_owner(const Function& value) const {
-  raw_ptr()->set_owner(static_cast<ObjectPtr>(value.raw()));
+  untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
 }
 
 void ICData::set_deopt_id(intptr_t value) const {
@@ -14982,23 +14989,23 @@
   UNREACHABLE();
 #else
   ASSERT(value <= kMaxInt32);
-  StoreNonPointer(&raw_ptr()->deopt_id_, value);
+  StoreNonPointer(&untag()->deopt_id_, value);
 #endif
 }
 
 void ICData::set_entries(const Array& value) const {
   ASSERT(!value.IsNull());
-  raw_ptr()->set_entries<std::memory_order_release>(value.raw());
+  untag()->set_entries<std::memory_order_release>(value.ptr());
 }
 
 intptr_t ICData::NumArgsTested() const {
-  return NumArgsTestedBits::decode(raw_ptr()->state_bits_);
+  return NumArgsTestedBits::decode(untag()->state_bits_);
 }
 
 void ICData::SetNumArgsTested(intptr_t value) const {
   ASSERT(Utils::IsUint(2, value));
-  StoreNonPointer(&raw_ptr()->state_bits_,
-                  NumArgsTestedBits::update(value, raw_ptr()->state_bits_));
+  StoreNonPointer(&untag()->state_bits_,
+                  NumArgsTestedBits::update(value, untag()->state_bits_));
 }
 
 intptr_t CallSiteData::TypeArgsLen() const {
@@ -15027,12 +15034,12 @@
 }
 
 uint32_t ICData::DeoptReasons() const {
-  return DeoptReasonBits::decode(raw_ptr()->state_bits_);
+  return DeoptReasonBits::decode(untag()->state_bits_);
 }
 
 void ICData::SetDeoptReasons(uint32_t reasons) const {
-  StoreNonPointer(&raw_ptr()->state_bits_,
-                  DeoptReasonBits::update(reasons, raw_ptr()->state_bits_));
+  StoreNonPointer(&untag()->state_bits_,
+                  DeoptReasonBits::update(reasons, untag()->state_bits_));
 }
 
 bool ICData::HasDeoptReason(DeoptReasonId reason) const {
@@ -15070,12 +15077,12 @@
 }
 
 ICData::RebindRule ICData::rebind_rule() const {
-  return (ICData::RebindRule)RebindRuleBits::decode(raw_ptr()->state_bits_);
+  return (ICData::RebindRule)RebindRuleBits::decode(untag()->state_bits_);
 }
 
 void ICData::set_rebind_rule(uint32_t rebind_rule) const {
-  StoreNonPointer(&raw_ptr()->state_bits_,
-                  RebindRuleBits::update(rebind_rule, raw_ptr()->state_bits_));
+  StoreNonPointer(&untag()->state_bits_,
+                  RebindRuleBits::update(rebind_rule, untag()->state_bits_));
 }
 
 bool ICData::is_static_call() const {
@@ -15083,7 +15090,7 @@
 }
 
 void ICData::set_state_bits(uint32_t bits) const {
-  StoreNonPointer(&raw_ptr()->state_bits_, bits);
+  StoreNonPointer(&untag()->state_bits_, bits);
 }
 
 intptr_t ICData::TestEntryLengthFor(intptr_t num_args,
@@ -15097,7 +15104,7 @@
 }
 
 intptr_t ICData::Length() const {
-  return (Smi::Value(entries()->ptr()->length()) / TestEntryLength());
+  return (Smi::Value(entries()->untag()->length()) / TestEntryLength());
 }
 
 intptr_t ICData::NumberOfChecks() const {
@@ -15260,11 +15267,11 @@
   }
 #endif
   ObjectStore* store = IsolateGroup::Current()->object_store();
-  ASSERT((target.raw() == store->simple_instance_of_true_function()) ||
-         (target.raw() == store->simple_instance_of_false_function()));
+  ASSERT((target.ptr() == store->simple_instance_of_true_function()) ||
+         (target.ptr() == store->simple_instance_of_false_function()));
   const String& instance_of_name = String::Handle(
-      Library::PrivateCoreLibName(Symbols::_simpleInstanceOf()).raw());
-  ASSERT(target_name() == instance_of_name.raw());
+      Library::PrivateCoreLibName(Symbols::_simpleInstanceOf()).ptr());
+  ASSERT(target_name() == instance_of_name.ptr());
   return true;
 }
 
@@ -15308,7 +15315,7 @@
         Smi::Value(Smi::RawCast(data.At(0))) == kObjectCid &&
         Smi::Value(Smi::RawCast(data.At(1))) == kObjectCid;
     if (has_dummy_entry) {
-      ASSERT(target.raw() == data.At(TargetIndexFor(num_args_tested)));
+      ASSERT(target.ptr() == data.At(TargetIndexFor(num_args_tested)));
       // Replace dummy entry.
       Smi& value = Smi::Handle();
       for (intptr_t i = 0; i < NumArgsTested(); i++) {
@@ -15350,7 +15357,7 @@
   const intptr_t new_len = data.Length() + TestEntryLength();
   data = Array::Grow(data, new_len, Heap::kOld);
   WriteSentinel(data, TestEntryLength());
-  return data.raw();
+  return data.ptr();
 }
 
 void ICData::DebugDump() const {
@@ -15478,7 +15485,7 @@
   const intptr_t entry_length = TestEntryLength();
   intptr_t data_pos = index * TestEntryLength();
   for (intptr_t i = 0; i < entry_length; i++) {
-    if (data.At(data_pos++) != smi_illegal_cid().raw()) {
+    if (data.At(data_pos++) != smi_illegal_cid().ptr()) {
       return false;
     }
   }
@@ -15539,7 +15546,7 @@
   const intptr_t data_pos = index * TestEntryLength();
   NoSafepointScope no_safepoint;
   ArrayPtr raw_data = entries();
-  return Smi::Value(Smi::RawCast(raw_data->ptr()->data()[data_pos]));
+  return Smi::Value(Smi::RawCast(raw_data->untag()->data()[data_pos]));
 }
 
 FunctionPtr ICData::GetTargetAt(intptr_t index) const {
@@ -15553,7 +15560,7 @@
 
   NoSafepointScope no_safepoint;
   ArrayPtr raw_data = entries();
-  return static_cast<FunctionPtr>(raw_data->ptr()->data()[data_pos]);
+  return static_cast<FunctionPtr>(raw_data->untag()->data()[data_pos]);
 #endif
 }
 
@@ -15613,7 +15620,7 @@
   ASSERT(NumArgsTested() > arg_nr);
   if ((arg_nr == 0) && (NumArgsTested() == 1)) {
     // Frequent case.
-    return raw();
+    return ptr();
   }
   const intptr_t kNumArgsTested = 1;
   ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
@@ -15645,7 +15652,7 @@
     }
   }
 
-  return result.raw();
+  return result.ptr();
 }
 
 // (cid, count) tuple used to sort ICData by count.
@@ -15716,7 +15723,7 @@
   WriteSentinel(data, result.TestEntryLength());
   result.set_entries(data);
   ASSERT(result.NumberOfChecksIs(aggregate.length()));
-  return result.raw();
+  return result.ptr();
 }
 
 UnlinkedCallPtr ICData::AsUnlinkedCall() const {
@@ -15727,7 +15734,7 @@
   result.set_arguments_descriptor(Array::Handle(arguments_descriptor()));
   result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode ||
                                       receiver_cannot_be_smi());
-  return result.raw();
+  return result.ptr();
 }
 
 bool ICData::HasReceiverClassId(intptr_t class_id) const {
@@ -15785,7 +15792,7 @@
   const Array& array = Array::Handle(Array::New(len, Heap::kOld));
   WriteSentinel(array, len);
   array.MakeImmutable();
-  return array.raw();
+  return array.ptr();
 }
 
 ArrayPtr ICData::CachedEmptyICDataArray(intptr_t num_args_tested,
@@ -15837,7 +15844,7 @@
   result.set_rebind_rule(rebind_rule);
   result.SetNumArgsTested(num_args_tested);
   NOT_IN_PRECOMPILED(result.SetReceiversStaticType(receivers_static_type));
-  return result.raw();
+  return result.ptr();
 }
 
 bool ICData::IsImmutable() const {
@@ -15855,7 +15862,7 @@
   }
   result.set_deopt_id(DeoptId::kNone);
   result.set_state_bits(0);
-  return result.raw();
+  return result.ptr();
 }
 
 ICDataPtr ICData::New(const Function& owner,
@@ -15873,7 +15880,7 @@
   result.set_entries(Array::Handle(
       zone,
       CachedEmptyICDataArray(num_args_tested, result.is_tracking_exactness())));
-  return result.raw();
+  return result.ptr();
 }
 
 ICDataPtr ICData::NewWithCheck(const Function& owner,
@@ -15914,7 +15921,7 @@
 
   result.set_entries(array);
 
-  return result.raw();
+  return result.ptr();
 }
 
 ICDataPtr ICData::NewForStaticCall(const Function& owner,
@@ -15952,7 +15959,7 @@
   // Copy deoptimization reasons.
   result.SetDeoptReasons(from.DeoptReasons());
   result.set_is_megamorphic(is_megamorphic);
-  return result.raw();
+  return result.ptr();
 }
 
 ICDataPtr ICData::Clone(const ICData& from) {
@@ -15990,7 +15997,7 @@
   RELEASE_ASSERT(!is_megamorphic ||
                  result.NumberOfChecks() >= FLAG_max_polymorphic_checks);
 
-  return result.raw();
+  return result.ptr();
 }
 #endif
 
@@ -16008,11 +16015,11 @@
   // snapshots), non-heap objects, and WSRs (as there is no point in deeply
   // nesting them). We also only wrap objects in the precompiler.
   return FLAG_precompiled_mode && !object.IsNull() &&
-         object.raw()->IsHeapObject() && !object.IsWeakSerializationReference();
+         object.ptr()->IsHeapObject() && !object.IsWeakSerializationReference();
 }
 
 ObjectPtr WeakSerializationReference::Wrap(Zone* zone, const Object& target) {
-  if (!CanWrap(target)) return target.raw();
+  if (!CanWrap(target)) return target.ptr();
   ASSERT(Object::weak_serialization_reference_class() != Class::null());
   WeakSerializationReference& result = WeakSerializationReference::Handle(zone);
   {
@@ -16022,9 +16029,9 @@
     NoSafepointScope no_safepoint;
 
     result ^= raw;
-    result.raw_ptr()->set_target(target.raw());
+    result.untag()->set_target(target.ptr());
   }
-  return result.raw();
+  return result.ptr();
 }
 #endif
 
@@ -16123,49 +16130,49 @@
   ASSERT(unwrapped_owner.IsFunction() || unwrapped_owner.IsClass() ||
          unwrapped_owner.IsAbstractType());
 #endif
-  raw_ptr()->set_owner(owner.raw());
+  untag()->set_owner(owner.ptr());
 }
 
 void Code::set_state_bits(intptr_t bits) const {
-  StoreNonPointer(&raw_ptr()->state_bits_, bits);
+  StoreNonPointer(&untag()->state_bits_, bits);
 }
 
 void Code::set_is_optimized(bool value) const {
-  set_state_bits(OptimizedBit::update(value, raw_ptr()->state_bits_));
+  set_state_bits(OptimizedBit::update(value, untag()->state_bits_));
 }
 
 void Code::set_is_force_optimized(bool value) const {
-  set_state_bits(ForceOptimizedBit::update(value, raw_ptr()->state_bits_));
+  set_state_bits(ForceOptimizedBit::update(value, untag()->state_bits_));
 }
 
 void Code::set_is_alive(bool value) const {
-  set_state_bits(AliveBit::update(value, raw_ptr()->state_bits_));
+  set_state_bits(AliveBit::update(value, untag()->state_bits_));
 }
 
 void Code::set_compressed_stackmaps(const CompressedStackMaps& maps) const {
   ASSERT(maps.IsOld());
-  raw_ptr()->set_compressed_stackmaps(maps.raw());
+  untag()->set_compressed_stackmaps(maps.ptr());
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
 intptr_t Code::num_variables() const {
   ASSERT(!FLAG_precompiled_mode);
-  return Smi::Value(Smi::RawCast(raw_ptr()->catch_entry()));
+  return Smi::Value(Smi::RawCast(untag()->catch_entry()));
 }
 void Code::set_num_variables(intptr_t num_variables) const {
   ASSERT(!FLAG_precompiled_mode);
-  raw_ptr()->set_catch_entry(Smi::New(num_variables));
+  untag()->set_catch_entry(Smi::New(num_variables));
 }
 #endif
 
 #if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
 TypedDataPtr Code::catch_entry_moves_maps() const {
   ASSERT(FLAG_precompiled_mode);
-  return TypedData::RawCast(raw_ptr()->catch_entry());
+  return TypedData::RawCast(untag()->catch_entry());
 }
 void Code::set_catch_entry_moves_maps(const TypedData& maps) const {
   ASSERT(FLAG_precompiled_mode);
-  raw_ptr()->set_catch_entry(maps.raw());
+  untag()->set_catch_entry(maps.ptr());
 }
 #endif
 
@@ -16174,7 +16181,7 @@
   UNREACHABLE();
 #else
   ASSERT(array.IsOld());
-  raw_ptr()->set_deopt_info_array(array.raw());
+  untag()->set_deopt_info_array(array.ptr());
 #endif
 }
 
@@ -16182,7 +16189,7 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
   UNREACHABLE();
 #else
-  raw_ptr()->set_static_calls_target_table(value.raw());
+  untag()->set_static_calls_target_table(value.ptr());
 #endif
 #if defined(DEBUG)
   // Check that the table is sorted by pc offsets.
@@ -16242,7 +16249,7 @@
       ASSERT(!info.IsNull());
       *deopt_reason = DeoptTable::ReasonField::decode(reason_and_flags.Value());
       *deopt_flags = DeoptTable::FlagsField::decode(reason_and_flags.Value());
-      return info.raw();
+      return info.ptr();
     }
   }
   *deopt_reason = ICData::kDeoptUnknown;
@@ -16255,7 +16262,7 @@
   UNREACHABLE();
 #else
   NoSafepointScope no_safepoint;
-  const Array& table = Array::Handle(raw_ptr()->static_calls_target_table());
+  const Array& table = Array::Handle(untag()->static_calls_target_table());
   StaticCallsTable entries(table);
   const intptr_t pc_offset = pc - PayloadStart();
   intptr_t imin = 0;
@@ -16285,7 +16292,7 @@
   if (i < 0) {
     return Function::null();
   }
-  const Array& array = Array::Handle(raw_ptr()->static_calls_target_table());
+  const Array& array = Array::Handle(untag()->static_calls_target_table());
   StaticCallsTable entries(array);
   return entries[i].Get<kSCallTableFunctionTarget>();
 #endif
@@ -16297,7 +16304,7 @@
 #else
   const intptr_t i = BinarySearchInSCallTable(pc);
   ASSERT(i >= 0);
-  const Array& array = Array::Handle(raw_ptr()->static_calls_target_table());
+  const Array& array = Array::Handle(untag()->static_calls_target_table());
   StaticCallsTable entries(array);
   ASSERT(code.IsNull() ||
          (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
@@ -16311,7 +16318,7 @@
 #else
   const intptr_t i = BinarySearchInSCallTable(pc);
   ASSERT(i >= 0);
-  const Array& array = Array::Handle(raw_ptr()->static_calls_target_table());
+  const Array& array = Array::Handle(untag()->static_calls_target_table());
   StaticCallsTable entries(array);
 #if defined(DEBUG)
   if (entries[i].Get<kSCallTableFunctionTarget>() == Function::null()) {
@@ -16343,7 +16350,7 @@
 #if defined(PRODUCT)
   Comments* comments = new Code::Comments(Array::Handle());
 #else
-  Comments* comments = new Code::Comments(Array::Handle(raw_ptr()->comments()));
+  Comments* comments = new Code::Comments(Array::Handle(untag()->comments()));
 #endif
   return *comments;
 }
@@ -16353,7 +16360,7 @@
   UNREACHABLE();
 #else
   ASSERT(comments.comments_.IsOld());
-  raw_ptr()->set_comments(comments.comments_.raw());
+  untag()->set_comments(comments.comments_.ptr());
 #endif
 }
 
@@ -16362,7 +16369,7 @@
   UNREACHABLE();
 #else
   ASSERT(offset >= 0);
-  raw_ptr()->set_return_address_metadata(Smi::New(offset));
+  untag()->set_return_address_metadata(Smi::New(offset));
 #endif
 }
 
@@ -16371,7 +16378,7 @@
   UNREACHABLE();
   return -1;
 #else
-  const Object& object = Object::Handle(raw_ptr()->return_address_metadata());
+  const Object& object = Object::Handle(untag()->return_address_metadata());
   // In the future we may put something other than a smi in
   // |return_address_metadata_|.
   if (object.IsNull() || !object.IsSmi()) {
@@ -16382,12 +16389,12 @@
 }
 
 ArrayPtr Code::inlined_id_to_function() const {
-  return raw_ptr()->inlined_id_to_function();
+  return untag()->inlined_id_to_function();
 }
 
 void Code::set_inlined_id_to_function(const Array& value) const {
   ASSERT(value.IsOld());
-  raw_ptr()->set_inlined_id_to_function(value.raw());
+  untag()->set_inlined_id_to_function(value.ptr());
 }
 
 CodePtr Code::New(intptr_t pointer_offsets_length) {
@@ -16412,7 +16419,7 @@
     result.set_pc_descriptors(Object::empty_descriptors());
     result.set_compressed_stackmaps(Object::empty_compressed_stackmaps());
   }
-  return result.raw();
+  return result.ptr();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -16426,7 +16433,7 @@
   const auto& code = Code::Handle(
       FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
   NotifyCodeObservers(function, code, optimized);
-  return code.raw();
+  return code.ptr();
 }
 
 CodePtr Code::FinalizeCodeAndNotify(const char* name,
@@ -16439,7 +16446,7 @@
   const auto& code = Code::Handle(
       FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
   NotifyCodeObservers(name, code, optimized);
-  return code.raw();
+  return code.ptr();
 }
 
 #if defined(DART_PRECOMPILER)
@@ -16462,7 +16469,7 @@
       object_pool =
           ObjectPool::NewFromBuilder(assembler->object_pool_builder());
     } else {
-      object_pool = ObjectPool::empty_object_pool().raw();
+      object_pool = ObjectPool::empty_object_pool().ptr();
     }
   } else {
 #if defined(DART_PRECOMPILER)
@@ -16483,7 +16490,7 @@
   intptr_t pointer_offset_count = assembler->CountPointerOffsets();
   Code& code = Code::ZoneHandle(Code::New(pointer_offset_count));
 #ifdef TARGET_ARCH_IA32
-  assembler->GetSelfHandle() = code.raw();
+  assembler->GetSelfHandle() = code.ptr();
 #endif
   Instructions& instrs = Instructions::ZoneHandle(Instructions::New(
       assembler->CodeSize(), assembler->has_monomorphic_entry()));
@@ -16517,17 +16524,17 @@
       ASSERT(object->IsOld());
       // N.B. The pointer is embedded in the Instructions object, but visited
       // through the Code object.
-      code.raw()->ptr()->StorePointerUnaligned(
-          reinterpret_cast<ObjectPtr*>(addr), object->raw(), thread);
+      code.ptr()->untag()->StorePointerUnaligned(
+          reinterpret_cast<ObjectPtr*>(addr), object->ptr(), thread);
     }
 
     // Write protect instructions and, if supported by OS, use dual mapping
     // for execution.
     if (FLAG_write_protect_code) {
-      uword address = ObjectLayout::ToAddr(instrs.raw());
+      uword address = UntaggedObject::ToAddr(instrs.ptr());
       // Check if a dual mapping exists.
-      instrs = Instructions::RawCast(OldPage::ToExecutable(instrs.raw()));
-      uword exec_address = ObjectLayout::ToAddr(instrs.raw());
+      instrs = Instructions::RawCast(OldPage::ToExecutable(instrs.ptr()));
+      uword exec_address = UntaggedObject::ToAddr(instrs.ptr());
       const bool use_dual_mapping = exec_address != address;
       ASSERT(use_dual_mapping == FLAG_dual_map_code);
 
@@ -16536,14 +16543,14 @@
       // Yet the writable mapping is still turned back from RW to R.
       if (use_dual_mapping) {
         VirtualMemory::Protect(reinterpret_cast<void*>(address),
-                               instrs.raw()->ptr()->HeapSize(),
+                               instrs.ptr()->untag()->HeapSize(),
                                VirtualMemory::kReadOnly);
         address = exec_address;
       } else {
         // If dual mapping is disabled and we write protect then we have to
         // change the single mapping from RW -> RX.
         VirtualMemory::Protect(reinterpret_cast<void*>(address),
-                               instrs.raw()->ptr()->HeapSize(),
+                               instrs.ptr()->untag()->HeapSize(),
                                VirtualMemory::kReadExecute);
       }
     }
@@ -16557,7 +16564,7 @@
 
     // Set object pool in Instructions object.
     if (!object_pool.IsNull()) {
-      code.set_object_pool(object_pool.raw());
+      code.set_object_pool(object_pool.ptr());
     }
 
 #if defined(DART_PRECOMPILER)
@@ -16581,7 +16588,7 @@
     code.SetPrologueOffset(assembler->CodeSize());
   }
 #endif
-  return code.raw();
+  return code.ptr();
 }
 
 void Code::NotifyCodeObservers(const Code& code, bool optimized) {
@@ -16633,7 +16640,7 @@
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
 bool Code::SlowFindRawCodeVisitor::FindObject(ObjectPtr raw_obj) const {
-  return CodeLayout::ContainsPC(raw_obj, pc_);
+  return UntaggedCode::ContainsPC(raw_obj, pc_);
 }
 
 CodePtr Code::LookupCodeInIsolateGroup(IsolateGroup* isolate_group, uword pc) {
@@ -16665,13 +16672,13 @@
   if (!code.IsNull() && (code.compile_timestamp() == timestamp) &&
       (code.PayloadStart() == pc)) {
     // Found code in isolate.
-    return code.raw();
+    return code.ptr();
   }
   code = Code::LookupCodeInVmIsolate(pc);
   if (!code.IsNull() && (code.compile_timestamp() == timestamp) &&
       (code.PayloadStart() == pc)) {
     // Found code in VM isolate.
-    return code.raw();
+    return code.ptr();
   }
   return Code::null();
 }
@@ -16679,7 +16686,7 @@
 TokenPosition Code::GetTokenIndexOfPC(uword pc) const {
   uword pc_offset = pc - PayloadStart();
   const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
-  PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     if (iter.PcOffset() == pc_offset) {
       return iter.TokenPos();
@@ -16689,7 +16696,7 @@
 }
 
 uword Code::GetPcForDeoptId(intptr_t deopt_id,
-                            PcDescriptorsLayout::Kind kind) const {
+                            UntaggedPcDescriptors::Kind kind) const {
   const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
   PcDescriptors::Iterator iter(descriptors, kind);
   while (iter.MoveNext()) {
@@ -16706,7 +16713,7 @@
 intptr_t Code::GetDeoptIdForOsr(uword pc) const {
   uword pc_offset = pc - PayloadStart();
   const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
-  PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kOsrEntry);
+  PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kOsrEntry);
   while (iter.MoveNext()) {
     if (iter.PcOffset() == pc_offset) {
       return iter.DeoptId();
@@ -16813,10 +16820,10 @@
   const uword entry_point = Instructions::EntryPoint(instructions);
   const uword monomorphic_entry_point =
       Instructions::MonomorphicEntryPoint(instructions);
-  code->ptr()->entry_point_ = entry_point;
-  code->ptr()->monomorphic_entry_point_ = monomorphic_entry_point;
-  code->ptr()->unchecked_entry_point_ = entry_point + unchecked_offset;
-  code->ptr()->monomorphic_unchecked_entry_point_ =
+  code->untag()->entry_point_ = entry_point;
+  code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
+  code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
+  code->untag()->monomorphic_unchecked_entry_point_ =
       monomorphic_entry_point + unchecked_offset;
 }
 
@@ -16828,8 +16835,8 @@
   DEBUG_ASSERT(IsMutatorOrAtSafepoint() || !is_alive());
   // RawInstructions are never allocated in New space and hence a
   // store buffer update is not needed here.
-  raw_ptr()->set_active_instructions(instructions.raw());
-  Code::InitializeCachedEntryPointsFrom(raw(), instructions.raw(),
+  untag()->set_active_instructions(instructions.ptr());
+  Code::InitializeCachedEntryPointsFrom(ptr(), instructions.ptr(),
                                         unchecked_offset);
 #endif
 }
@@ -16839,7 +16846,7 @@
   UNREACHABLE();
 #else
   SetActiveInstructions(Instructions::Handle(instructions()),
-                        raw_ptr()->unchecked_offset_);
+                        untag()->unchecked_offset_);
 #endif
 }
 
@@ -16922,7 +16929,7 @@
     result ^= raw;
     result.set_num_variables(num_variables);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* Context::ToCString() const {
@@ -16992,7 +16999,7 @@
     result.set_num_variables(num_variables);
     result.set_is_implicit(is_implicit);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 TokenPosition ContextScope::TokenIndexAt(intptr_t scope_index) const {
@@ -17024,7 +17031,7 @@
 }
 
 void ContextScope::SetNameAt(intptr_t scope_index, const String& name) const {
-  StorePointer(&(VariableDescAddr(scope_index)->name), name.raw());
+  StorePointer(&(VariableDescAddr(scope_index)->name), name.ptr());
 }
 
 void ContextScope::ClearFlagsAt(intptr_t scope_index) const {
@@ -17044,27 +17051,29 @@
 }
 
 bool ContextScope::IsFinalAt(intptr_t scope_index) const {
-  return GetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsFinal);
+  return GetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIsFinal);
 }
 
 void ContextScope::SetIsFinalAt(intptr_t scope_index, bool is_final) const {
-  SetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsFinal, is_final);
+  SetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIsFinal,
+            is_final);
 }
 
 bool ContextScope::IsLateAt(intptr_t scope_index) const {
-  return GetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsLate);
+  return GetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIsLate);
 }
 
 void ContextScope::SetIsLateAt(intptr_t scope_index, bool is_late) const {
-  SetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsLate, is_late);
+  SetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIsLate, is_late);
 }
 
 bool ContextScope::IsConstAt(intptr_t scope_index) const {
-  return GetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsConst);
+  return GetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIsConst);
 }
 
 void ContextScope::SetIsConstAt(intptr_t scope_index, bool is_const) const {
-  SetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsConst, is_const);
+  SetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIsConst,
+            is_const);
 }
 
 intptr_t ContextScope::LateInitOffsetAt(intptr_t scope_index) const {
@@ -17084,7 +17093,7 @@
 
 void ContextScope::SetTypeAt(intptr_t scope_index,
                              const AbstractType& type) const {
-  StorePointer(&(VariableDescAddr(scope_index)->type), type.raw());
+  StorePointer(&(VariableDescAddr(scope_index)->type), type.ptr());
 }
 
 InstancePtr ContextScope::ConstValueAt(intptr_t scope_index) const {
@@ -17095,7 +17104,7 @@
 void ContextScope::SetConstValueAt(intptr_t scope_index,
                                    const Instance& value) const {
   ASSERT(IsConstAt(scope_index));
-  StorePointer(&(VariableDescAddr(scope_index)->value), value.raw());
+  StorePointer(&(VariableDescAddr(scope_index)->value), value.ptr());
 }
 
 intptr_t ContextScope::ContextIndexAt(intptr_t scope_index) const {
@@ -17137,30 +17146,30 @@
 }
 
 ArrayPtr MegamorphicCache::buckets() const {
-  return raw_ptr()->buckets();
+  return untag()->buckets();
 }
 
 void MegamorphicCache::set_buckets(const Array& buckets) const {
-  raw_ptr()->set_buckets(buckets.raw());
+  untag()->set_buckets(buckets.ptr());
 }
 
 // Class IDs in the table are smi-tagged, so we use a smi-tagged mask
 // and target class ID to avoid untagging (on each iteration of the
 // test loop) in generated code.
 intptr_t MegamorphicCache::mask() const {
-  return Smi::Value(raw_ptr()->mask());
+  return Smi::Value(untag()->mask());
 }
 
 void MegamorphicCache::set_mask(intptr_t mask) const {
-  raw_ptr()->set_mask(Smi::New(mask));
+  untag()->set_mask(Smi::New(mask));
 }
 
 intptr_t MegamorphicCache::filled_entry_count() const {
-  return raw_ptr()->filled_entry_count_;
+  return untag()->filled_entry_count_;
 }
 
 void MegamorphicCache::set_filled_entry_count(intptr_t count) const {
-  StoreNonPointer(&raw_ptr()->filled_entry_count_, count);
+  StoreNonPointer(&untag()->filled_entry_count_, count);
 }
 
 MegamorphicCachePtr MegamorphicCache::New() {
@@ -17173,7 +17182,7 @@
     result ^= raw;
   }
   result.set_filled_entry_count(0);
-  return result.raw();
+  return result.ptr();
 }
 
 MegamorphicCachePtr MegamorphicCache::New(const String& target_name,
@@ -17198,7 +17207,7 @@
   result.set_target_name(target_name);
   result.set_arguments_descriptor(arguments_descriptor);
   result.set_filled_entry_count(0);
-  return result.raw();
+  return result.ptr();
 }
 
 void MegamorphicCache::EnsureContains(const Smi& class_id,
@@ -17215,10 +17224,10 @@
       const auto& function = Function::Cast(target);
       const auto& entry_point = Smi::Handle(
           Smi::FromAlignedAddress(Code::EntryPointOf(function.CurrentCode())));
-      ASSERT(LookupLocked(class_id) == entry_point.raw());
+      ASSERT(LookupLocked(class_id) == entry_point.ptr());
     }
   } else {
-    ASSERT(LookupLocked(class_id) == target.raw());
+    ASSERT(LookupLocked(class_id) == target.ptr());
   }
 #endif  // define(DEBUG)
 }
@@ -17373,26 +17382,26 @@
     result ^= raw;
   }
   result.set_cache(Array::Handle(cached_array_));
-  return result.raw();
+  return result.ptr();
 }
 
 ArrayPtr SubtypeTestCache::cache() const {
   // We rely on the fact that any loads from the array are dependent loads and
   // avoid the load-acquire barrier here.
-  return raw_ptr()->cache<std::memory_order_relaxed>();
+  return untag()->cache<std::memory_order_relaxed>();
 }
 
 void SubtypeTestCache::set_cache(const Array& value) const {
   // We have to ensure that initializing stores to the array are available
   // when releasing the pointer to the array pointer.
   // => We have to use store-release here.
-  raw_ptr()->set_cache<std::memory_order_release>(value.raw());
+  untag()->set_cache<std::memory_order_release>(value.ptr());
 }
 
 intptr_t SubtypeTestCache::NumberOfChecks() const {
   NoSafepointScope no_safepoint;
   // Do not count the sentinel;
-  return (Smi::Value(cache()->ptr()->length()) / kTestEntryLength) - 1;
+  return (Smi::Value(cache()->untag()->length()) / kTestEntryLength) - 1;
 }
 
 void SubtypeTestCache::AddCheck(
@@ -17500,16 +17509,16 @@
   for (intptr_t i = 0; i < last_index; i++) {
     const auto entry = entries[i];
     if (entry.Get<kInstanceClassIdOrFunction>() ==
-            instance_class_id_or_function.raw() &&
-        entry.Get<kDestinationType>() == destination_type.raw() &&
-        entry.Get<kInstanceTypeArguments>() == instance_type_arguments.raw() &&
+            instance_class_id_or_function.ptr() &&
+        entry.Get<kDestinationType>() == destination_type.ptr() &&
+        entry.Get<kInstanceTypeArguments>() == instance_type_arguments.ptr() &&
         entry.Get<kInstantiatorTypeArguments>() ==
-            instantiator_type_arguments.raw() &&
-        entry.Get<kFunctionTypeArguments>() == function_type_arguments.raw() &&
+            instantiator_type_arguments.ptr() &&
+        entry.Get<kFunctionTypeArguments>() == function_type_arguments.ptr() &&
         entry.Get<kInstanceParentFunctionTypeArguments>() ==
-            instance_parent_function_type_arguments.raw() &&
+            instance_parent_function_type_arguments.ptr() &&
         entry.Get<kInstanceDelayedFunctionTypeArguments>() ==
-            instance_delayed_type_arguments.raw()) {
+            instance_delayed_type_arguments.ptr()) {
       if (index != nullptr) {
         *index = i;
       }
@@ -17557,14 +17566,14 @@
   buffer->Printf(
       "[ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
       ", %#" Px " ]",
-      static_cast<uword>(instance_class_id_or_function.raw()),
-      static_cast<uword>(destination_type.raw()),
-      static_cast<uword>(instance_type_arguments.raw()),
-      static_cast<uword>(instantiator_type_arguments.raw()),
-      static_cast<uword>(function_type_arguments.raw()),
-      static_cast<uword>(instance_parent_function_type_arguments.raw()),
-      static_cast<uword>(instance_delayed_type_arguments.raw()),
-      static_cast<uword>(result.raw()));
+      static_cast<uword>(instance_class_id_or_function.ptr()),
+      static_cast<uword>(destination_type.ptr()),
+      static_cast<uword>(instance_type_arguments.ptr()),
+      static_cast<uword>(instantiator_type_arguments.ptr()),
+      static_cast<uword>(function_type_arguments.ptr()),
+      static_cast<uword>(instance_parent_function_type_arguments.ptr()),
+      static_cast<uword>(instance_delayed_type_arguments.ptr()),
+      static_cast<uword>(result.ptr()));
   if (instance_class_id_or_function.IsSmi()) {
     buffer->Printf("%sclass id: %" Pd "", separator,
                    Smi::Cast(instance_class_id_or_function).Value());
@@ -17655,21 +17664,21 @@
   result.set_id(kIllegalId);
   result.set_loaded(false);
   result.set_load_outstanding(false);
-  return result.raw();
+  return result.ptr();
 }
 
 LoadingUnitPtr LoadingUnit::parent() const {
-  return raw_ptr()->parent();
+  return untag()->parent();
 }
 void LoadingUnit::set_parent(const LoadingUnit& value) const {
-  raw_ptr()->set_parent(value.raw());
+  untag()->set_parent(value.ptr());
 }
 
 ArrayPtr LoadingUnit::base_objects() const {
-  return raw_ptr()->base_objects();
+  return untag()->base_objects();
 }
 void LoadingUnit::set_base_objects(const Array& value) const {
-  raw_ptr()->set_base_objects(value.raw());
+  untag()->set_base_objects(value.ptr());
 }
 
 const char* LoadingUnit::ToCString() const {
@@ -17742,11 +17751,11 @@
     result ^= raw;
   }
   result.set_message(message);
-  return result.raw();
+  return result.ptr();
 }
 
 void ApiError::set_message(const String& message) const {
-  raw_ptr()->set_message(message.raw());
+  untag()->set_message(message.ptr());
 }
 
 const char* ApiError::ToErrorCString() const {
@@ -17788,7 +17797,7 @@
   result.set_kind(kind);
   result.set_message(
       String::Handle(String::NewFormattedV(format, args, space)));
-  return result.raw();
+  return result.ptr();
 }
 
 LanguageErrorPtr LanguageError::NewFormatted(const Error& prev_error,
@@ -17822,36 +17831,36 @@
   }
   result.set_formatted_message(formatted_message);
   result.set_kind(kind);
-  return result.raw();
+  return result.ptr();
 }
 
 void LanguageError::set_previous_error(const Error& value) const {
-  raw_ptr()->set_previous_error(value.raw());
+  untag()->set_previous_error(value.ptr());
 }
 
 void LanguageError::set_script(const Script& value) const {
-  raw_ptr()->set_script(value.raw());
+  untag()->set_script(value.ptr());
 }
 
 void LanguageError::set_token_pos(TokenPosition token_pos) const {
   ASSERT(!token_pos.IsClassifying());
-  StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
+  StoreNonPointer(&untag()->token_pos_, token_pos);
 }
 
 void LanguageError::set_report_after_token(bool value) {
-  StoreNonPointer(&raw_ptr()->report_after_token_, value);
+  StoreNonPointer(&untag()->report_after_token_, value);
 }
 
 void LanguageError::set_kind(uint8_t value) const {
-  StoreNonPointer(&raw_ptr()->kind_, value);
+  StoreNonPointer(&untag()->kind_, value);
 }
 
 void LanguageError::set_message(const String& value) const {
-  raw_ptr()->set_message(value.raw());
+  untag()->set_message(value.ptr());
 }
 
 void LanguageError::set_formatted_message(const String& value) const {
-  raw_ptr()->set_formatted_message(value.raw());
+  untag()->set_formatted_message(value.ptr());
 }
 
 StringPtr LanguageError::FormatMessage() const {
@@ -17868,7 +17877,7 @@
         String::Handle(String::New(prev_error.ToErrorCString())), result);
   }
   set_formatted_message(result);
-  return result.raw();
+  return result.ptr();
 }
 
 const char* LanguageError::ToErrorCString() const {
@@ -17895,7 +17904,7 @@
   }
   result.set_exception(exception);
   result.set_stacktrace(stacktrace);
-  return result.raw();
+  return result.ptr();
 }
 
 UnhandledExceptionPtr UnhandledException::New(Heap::Space space) {
@@ -17909,15 +17918,15 @@
   }
   result.set_exception(Object::null_instance());
   result.set_stacktrace(StackTrace::Handle());
-  return result.raw();
+  return result.ptr();
 }
 
 void UnhandledException::set_exception(const Instance& exception) const {
-  raw_ptr()->set_exception(exception.raw());
+  untag()->set_exception(exception.ptr());
 }
 
 void UnhandledException::set_stacktrace(const Instance& stacktrace) const {
-  raw_ptr()->set_stacktrace(stacktrace.raw());
+  untag()->set_stacktrace(stacktrace.ptr());
 }
 
 const char* UnhandledException::ToErrorCString() const {
@@ -17967,15 +17976,15 @@
   }
   result.set_message(message);
   result.set_is_user_initiated(false);
-  return result.raw();
+  return result.ptr();
 }
 
 void UnwindError::set_message(const String& message) const {
-  raw_ptr()->set_message(message.raw());
+  untag()->set_message(message.ptr());
 }
 
 void UnwindError::set_is_user_initiated(bool value) const {
-  StoreNonPointer(&raw_ptr()->is_user_initiated_, value);
+  StoreNonPointer(&untag()->is_user_initiated_, value);
 }
 
 const char* UnwindError::ToErrorCString() const {
@@ -18009,7 +18018,7 @@
     // The getter must correspond to either an entry-point field or a getter
     // method explicitly marked.
     Field& field = Field::Handle(zone);
-    if (function.kind() == FunctionLayout::kImplicitGetter) {
+    if (function.kind() == UntaggedFunction::kImplicitGetter) {
       field = function.accessor_field();
     }
     if (!field.IsNull()) {
@@ -18070,7 +18079,7 @@
     // The setter must correspond to either an entry-point field or a setter
     // method explicitly marked.
     Field& field = Field::Handle(zone);
-    if (setter.kind() == FunctionLayout::kImplicitSetter) {
+    if (setter.kind() == UntaggedFunction::kImplicitSetter) {
       field = setter.accessor_field();
     }
     if (!field.IsNull()) {
@@ -18132,7 +18141,7 @@
       if (check_is_entrypoint) {
         CHECK_ERROR(EntryPointFieldInvocationError(function_name));
       }
-      ASSERT(function.kind() != FunctionLayout::kMethodExtractor);
+      ASSERT(function.kind() != UntaggedFunction::kMethodExtractor);
       // Invoke the getter.
       const int kNumArgs = 1;
       const Array& getter_args = Array::Handle(zone, Array::New(kNumArgs));
@@ -18145,7 +18154,7 @@
                                        getter_args, getter_args_descriptor,
                                        respect_reflectable, inst_type_args));
       if (getter_result.IsError()) {
-        return getter_result.raw();
+        return getter_result.ptr();
       }
       // Replace the closure as the receiver in the arguments list.
       args.SetAt(0, getter_result);
@@ -18192,7 +18201,7 @@
 }
 
 bool Instance::CanonicalizeEquals(const Instance& other) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     return true;  // "===".
   }
 
@@ -18210,8 +18219,8 @@
     if (instance_size != other_instance_size) {
       return false;
     }
-    uword this_addr = reinterpret_cast<uword>(this->raw_ptr());
-    uword other_addr = reinterpret_cast<uword>(other.raw_ptr());
+    uword this_addr = reinterpret_cast<uword>(this->untag());
+    uword other_addr = reinterpret_cast<uword>(other.untag());
     for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
          offset += kWordSize) {
       if ((*reinterpret_cast<ObjectPtr*>(this_addr + offset)) !=
@@ -18228,7 +18237,7 @@
     return 2011;  // Matches null_patch.dart.
   }
   Thread* thread = Thread::Current();
-  uint32_t hash = thread->heap()->GetCanonicalHash(raw());
+  uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
   if (hash != 0) {
     return hash;
   }
@@ -18237,7 +18246,7 @@
   const intptr_t instance_size = SizeFromClass();
   ASSERT(instance_size != 0);
   hash = instance_size / kWordSize;
-  uword this_addr = reinterpret_cast<uword>(this->raw_ptr());
+  uword this_addr = reinterpret_cast<uword>(this->untag());
   Instance& member = Instance::Handle();
 
   const auto unboxed_fields_bitmap =
@@ -18262,7 +18271,7 @@
     }
   }
   hash = FinalizeHash(hash, String::kHashBits);
-  thread->heap()->SetCanonicalHash(raw(), hash);
+  thread->heap()->SetCanonicalHash(ptr(), hash);
   return hash;
 }
 
@@ -18312,7 +18321,7 @@
 #if defined(DEBUG)
     // Make sure that we are not missing any fields.
     CheckForPointers has_pointers(IsolateGroup::Current());
-    this->raw()->ptr()->VisitPointers(&has_pointers);
+    this->ptr()->untag()->VisitPointers(&has_pointers);
     ASSERT(!has_pointers.has_pointers());
 #endif  // DEBUG
   }
@@ -18330,7 +18339,7 @@
 
 InstancePtr Instance::CanonicalizeLocked(Thread* thread) const {
   if (this->IsCanonical()) {
-    return this->raw();
+    return this->ptr();
   }
   ASSERT(!IsNull());
   CanonicalizeFieldsLocked(thread);
@@ -18339,14 +18348,14 @@
   Instance& result =
       Instance::Handle(zone, cls.LookupCanonicalInstance(zone, *this));
   if (!result.IsNull()) {
-    return result.raw();
+    return result.ptr();
   }
   if (IsNew()) {
     ASSERT((thread->isolate() == Dart::vm_isolate()) || !InVMIsolateHeap());
     // Create a canonical object in old space.
     result ^= Object::Clone(*this, Heap::kOld);
   } else {
-    result = this->raw();
+    result = this->ptr();
   }
   ASSERT(result.IsOld());
   result.SetCanonical();
@@ -18361,7 +18370,7 @@
   SafepointMutexLocker ml(
       thread->isolate_group()->constant_canonicalization_mutex());
   result ^= cls.LookupCanonicalInstance(zone, *this);
-  return (result.raw() == this->raw());
+  return (result.ptr() == this->ptr());
 }
 #endif  // DEBUG
 
@@ -18416,7 +18425,7 @@
   } else {
     field.RecordStore(value);
     const Object* stored_value = field.CloneForUnboxed(value);
-    StorePointer(FieldAddr(field), stored_value->raw());
+    StorePointer(FieldAddr(field), stored_value->ptr());
   }
 }
 
@@ -18440,7 +18449,7 @@
       signature.SetIsFinalized();
     }
     signature ^= signature.Canonicalize(thread, nullptr);
-    return signature.raw();
+    return signature.ptr();
   }
   Type& type = Type::Handle(zone);
   if (!cls.IsGeneric()) {
@@ -18455,7 +18464,7 @@
     type.SetIsFinalized();
     type ^= type.Canonicalize(thread, nullptr);
   }
-  return type.raw();
+  return type.ptr();
 }
 
 TypeArgumentsPtr Instance::GetTypeArguments() const {
@@ -18465,7 +18474,7 @@
   ASSERT(field_offset != Class::kNoTypeArguments);
   TypeArguments& type_arguments = TypeArguments::Handle();
   type_arguments ^= *FieldAddrAtOffset(field_offset);
-  return type_arguments.raw();
+  return type_arguments.ptr();
 }
 
 void Instance::SetTypeArguments(const TypeArguments& value) const {
@@ -18596,7 +18605,7 @@
     const TypeArguments& other_function_type_arguments) const {
   ASSERT(other.IsFinalized());
   ASSERT(!other.IsTypeRef());  // Must be dereferenced at compile time.
-  ASSERT(raw() != Object::sentinel().raw());
+  ASSERT(ptr() != Object::sentinel().ptr());
   // Instance may not have runtimeType dynamic, void, or Never.
   if (other.IsTopTypeForSubtyping()) {
     return true;
@@ -18614,7 +18623,7 @@
         other.IsObjectType()) {
       return true;
     }
-    AbstractType& instantiated_other = AbstractType::Handle(zone, other.raw());
+    AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
     if (!other.IsInstantiated()) {
       instantiated_other = other.InstantiateFrom(
           other_instantiator_type_arguments, other_function_type_arguments,
@@ -18652,7 +18661,7 @@
     ASSERT(type_arguments.IsNull() ||
            (type_arguments.Length() >= cls.NumTypeArguments()));
   }
-  AbstractType& instantiated_other = AbstractType::Handle(zone, other.raw());
+  AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
   if (!other.IsInstantiated()) {
     instantiated_other = other.InstantiateFrom(
         other_instantiator_type_arguments, other_function_type_arguments,
@@ -18715,11 +18724,11 @@
 bool Instance::OperatorEquals(const Instance& other) const {
   // TODO(koda): Optimize for all builtin classes and all classes
   // that do not override operator==.
-  return DartLibraryCalls::Equals(*this, other) == Object::bool_true().raw();
+  return DartLibraryCalls::Equals(*this, other) == Object::bool_true().ptr();
 }
 
 bool Instance::IsIdenticalTo(const Instance& other) const {
-  if (raw() == other.raw()) return true;
+  if (ptr() == other.ptr()) return true;
   if (IsInteger() && other.IsInteger()) {
     return Integer::Cast(*this).Equals(other);
   }
@@ -18736,7 +18745,7 @@
   if (native_fields == TypedData::null()) {
     return NULL;
   }
-  return reinterpret_cast<intptr_t*>(native_fields->ptr()->data());
+  return reinterpret_cast<intptr_t*>(native_fields->untag()->data());
 }
 
 void Instance::SetNativeField(int index, intptr_t value) const {
@@ -18745,7 +18754,7 @@
   if (native_fields.IsNull()) {
     // Allocate backing storage for the native fields.
     native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
-    StorePointer(NativeFieldsAddr(), native_fields.raw());
+    StorePointer(NativeFieldsAddr(), native_fields.ptr());
   }
   intptr_t byte_offset = index * sizeof(intptr_t);
   TypedData::Cast(native_fields).SetIntPtr(byte_offset, value);
@@ -18759,7 +18768,7 @@
   if (native_fields.IsNull()) {
     // Allocate backing storage for the native fields.
     native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
-    StorePointer(NativeFieldsAddr(), native_fields.raw());
+    StorePointer(NativeFieldsAddr(), native_fields.ptr());
   }
   for (uint16_t i = 0; i < num_native_fields; i++) {
     intptr_t byte_offset = i * sizeof(intptr_t);
@@ -18784,7 +18793,7 @@
     return false;
   }
   if (function != nullptr) {
-    *function = call_function.raw();
+    *function = call_function.ptr();
   }
   return true;
 }
@@ -18869,13 +18878,13 @@
 const char* Instance::ToCString() const {
   if (IsNull()) {
     return "null";
-  } else if (raw() == Object::sentinel().raw()) {
+  } else if (ptr() == Object::sentinel().ptr()) {
     return "sentinel";
-  } else if (raw() == Object::transition_sentinel().raw()) {
+  } else if (ptr() == Object::transition_sentinel().ptr()) {
     return "transition_sentinel";
-  } else if (raw() == Object::unknown_constant().raw()) {
+  } else if (ptr() == Object::unknown_constant().ptr()) {
     return "unknown_constant";
-  } else if (raw() == Object::non_constant().raw()) {
+  } else if (ptr() == Object::non_constant().ptr()) {
     return "non_constant";
   } else if (Thread::Current()->no_safepoint_scope_depth() > 0) {
     // Can occur when running disassembler.
@@ -18967,10 +18976,10 @@
     result_nullability = Nullability::kLegacy;
   } else {
     // Keep arg nullability.
-    return raw();
+    return ptr();
   }
   if (arg_nullability == result_nullability) {
-    return raw();
+    return ptr();
   }
   if (IsType()) {
     return Type::Cast(*this).ToNullability(result_nullability, space);
@@ -18995,11 +19004,11 @@
         AbstractType::Handle(zone, UnwrapFutureOr());
     const classid_t cid = unwrapped_type.type_class_id();
     if (cid == kDynamicCid || cid == kVoidCid) {
-      return unwrapped_type.raw();
+      return unwrapped_type.ptr();
     }
     if (cid == kInstanceCid) {
       if (IsNonNullable()) {
-        return unwrapped_type.raw();
+        return unwrapped_type.ptr();
       }
       if (IsNullable() || unwrapped_type.IsNullable()) {
         return Type::Cast(unwrapped_type)
@@ -19024,7 +19033,7 @@
       return Type::Cast(*this).ToNullability(Nullability::kNonNullable, space);
     }
   }
-  return raw();
+  return ptr();
 }
 
 bool AbstractType::IsInstantiated(Genericity genericity,
@@ -19110,9 +19119,9 @@
   for (intptr_t i = 0; i < len; i += 2) {
     ASSERT(trail->At(i).IsZoneHandle());
     ASSERT(trail->At(i + 1).IsZoneHandle());
-    if (trail->At(i).raw() == this->raw()) {
+    if (trail->At(i).ptr() == this->ptr()) {
       ASSERT(!trail->At(i + 1).IsNull());
-      return trail->At(i + 1).raw();
+      return trail->At(i + 1).ptr();
     }
   }
   return AbstractType::null();
@@ -19135,7 +19144,7 @@
   } else {
     const intptr_t len = (*trail)->length();
     for (intptr_t i = 0; i < len; i++) {
-      if ((*trail)->At(i).raw() == this->raw()) {
+      if ((*trail)->At(i).ptr() == this->ptr()) {
         return true;
       }
     }
@@ -19157,9 +19166,9 @@
     // one exception, when the class of the 'this' type implements the 'call'
     // method, thereby possibly creating a recursive type (see regress_29405).
     for (intptr_t i = 0; i < len; i += 2) {
-      if ((((*trail)->At(i).raw() == this->raw()) ||
+      if ((((*trail)->At(i).ptr() == this->ptr()) ||
            (buddy_is_typeref && (*trail)->At(i).Equals(*this))) &&
-          (((*trail)->At(i + 1).raw() == buddy.raw()) ||
+          (((*trail)->At(i + 1).ptr() == buddy.ptr()) ||
            (this_is_typeref && (*trail)->At(i + 1).Equals(buddy)))) {
         return true;
       }
@@ -19205,7 +19214,7 @@
   GrowableHandlePtrArray<const String> pieces(zone, 5 * (len / 3));
   for (intptr_t i = 0; i < len; i += 3) {
     // Only print URIs that have been marked.
-    if (uris->At(i + 2).raw() == Symbols::print().raw()) {
+    if (uris->At(i + 2).ptr() == Symbols::print().ptr()) {
       pieces.Add(Symbols::TwoSpaces());
       pieces.Add(uris->At(i));
       pieces.Add(Symbols::SpaceIsFromSpace());
@@ -19427,10 +19436,10 @@
 
 AbstractTypePtr AbstractType::UnwrapFutureOr() const {
   if (!IsFutureOrType()) {
-    return raw();
+    return ptr();
   }
   if (arguments() == TypeArguments::null()) {
-    return Type::dynamic_type().raw();
+    return Type::dynamic_type().ptr();
   }
   Thread* thread = Thread::Current();
   REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
@@ -19441,12 +19450,12 @@
   type_arg = type_args.TypeAt(0);
   while (type_arg.IsFutureOrType()) {
     if (type_arg.arguments() == TypeArguments::null()) {
-      return Type::dynamic_type().raw();
+      return Type::dynamic_type().ptr();
     }
     type_args = type_arg.arguments();
     type_arg = type_args.TypeAt(0);
   }
-  return type_arg.raw();
+  return type_arg.ptr();
 }
 
 bool AbstractType::IsSubtypeOf(const AbstractType& other,
@@ -19455,7 +19464,7 @@
   ASSERT(IsFinalized());
   ASSERT(other.IsFinalized());
   // Reflexivity.
-  if (raw() == other.raw()) {
+  if (ptr() == other.ptr()) {
     return true;
   }
   // Right top type.
@@ -19602,15 +19611,15 @@
     // This only happens during bootstrapping when creating Type objects before
     // we have the instructions.
     ASSERT(type_class_id() == kDynamicCid || type_class_id() == kVoidCid);
-    StoreNonPointer(&raw_ptr()->type_test_stub_entry_point_, 0);
-    raw_ptr()->set_type_test_stub(stub.raw());
+    StoreNonPointer(&untag()->type_test_stub_entry_point_, 0);
+    untag()->set_type_test_stub(stub.ptr());
     return;
   }
 
   Thread* thread = Thread::Current();
   SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
-  StoreNonPointer(&raw_ptr()->type_test_stub_entry_point_, stub.EntryPoint());
-  raw_ptr()->set_type_test_stub(stub.raw());
+  StoreNonPointer(&untag()->type_test_stub_entry_point_, stub.EntryPoint());
+  untag()->set_type_test_stub(stub.ptr());
 }
 
 TypePtr Type::NullType() {
@@ -19618,11 +19627,11 @@
 }
 
 TypePtr Type::DynamicType() {
-  return Object::dynamic_type().raw();
+  return Object::dynamic_type().ptr();
 }
 
 TypePtr Type::VoidType() {
-  return Object::void_type().raw();
+  return Object::void_type().ptr();
 }
 
 TypePtr Type::NeverType() {
@@ -19708,54 +19717,54 @@
   // yet, so do not call DeclarationType().
   Type& type = Type::Handle(type_class.declaration_type());
   if (type.IsNull()) {
-    type = Type::New(Class::Handle(type_class.raw()),
+    type = Type::New(Class::Handle(type_class.ptr()),
                      Object::null_type_arguments(), Nullability::kNonNullable);
     type.SetIsFinalized();
     type ^= type.Canonicalize(Thread::Current(), nullptr);
     type_class.set_declaration_type(type);
   }
   ASSERT(type.IsFinalized());
-  return type.raw();
+  return type.ptr();
 }
 
 void Type::SetIsFinalized() const {
   ASSERT(!IsFinalized());
   if (IsInstantiated()) {
-    set_type_state(TypeLayout::kFinalizedInstantiated);
+    set_type_state(UntaggedType::kFinalizedInstantiated);
   } else {
-    set_type_state(TypeLayout::kFinalizedUninstantiated);
+    set_type_state(UntaggedType::kFinalizedUninstantiated);
   }
 }
 
 void FunctionType::SetIsFinalized() const {
   ASSERT(!IsFinalized());
   if (IsInstantiated()) {
-    set_type_state(FunctionTypeLayout::kFinalizedInstantiated);
+    set_type_state(UntaggedFunctionType::kFinalizedInstantiated);
   } else {
-    set_type_state(FunctionTypeLayout::kFinalizedUninstantiated);
+    set_type_state(UntaggedFunctionType::kFinalizedUninstantiated);
   }
 }
 
 void Type::SetIsBeingFinalized() const {
   ASSERT(!IsFinalized() && !IsBeingFinalized());
-  set_type_state(TypeLayout::kBeingFinalized);
+  set_type_state(UntaggedType::kBeingFinalized);
 }
 
 void FunctionType::SetIsBeingFinalized() const {
   ASSERT(!IsFinalized() && !IsBeingFinalized());
-  set_type_state(FunctionTypeLayout::kBeingFinalized);
+  set_type_state(UntaggedFunctionType::kBeingFinalized);
 }
 
 TypePtr Type::ToNullability(Nullability value, Heap::Space space) const {
   if (nullability() == value) {
-    return raw();
+    return ptr();
   }
   // Type parameter instantiation may request a nullability change, which should
   // be ignored for types dynamic and void. Type Null cannot be the result of
   // instantiating a non-nullable type parameter (TypeError thrown).
   const classid_t cid = type_class_id();
   if (cid == kDynamicCid || cid == kVoidCid || cid == kNullCid) {
-    return raw();
+    return ptr();
   }
   if (cid == kNeverCid && value == Nullability::kNullable) {
     // Normalize Never? to Null.
@@ -19775,13 +19784,13 @@
     ASSERT(!type.IsCanonical());
     type ^= type.Canonicalize(Thread::Current(), nullptr);
   }
-  return type.raw();
+  return type.ptr();
 }
 
 FunctionTypePtr FunctionType::ToNullability(Nullability value,
                                             Heap::Space space) const {
   if (nullability() == value) {
-    return raw();
+    return ptr();
   }
   // Clone function type and set new nullability.
   FunctionType& type = FunctionType::Handle();
@@ -19797,11 +19806,11 @@
     ASSERT(!type.IsCanonical());
     type ^= type.Canonicalize(Thread::Current(), nullptr);
   }
-  return type.raw();
+  return type.ptr();
 }
 
 classid_t Type::type_class_id() const {
-  return Smi::Value(raw_ptr()->type_class_id());
+  return Smi::Value(untag()->type_class_id());
 }
 
 ClassPtr Type::type_class() const {
@@ -19811,11 +19820,11 @@
 bool Type::IsInstantiated(Genericity genericity,
                           intptr_t num_free_fun_type_params,
                           TrailPtr trail) const {
-  if (raw_ptr()->type_state_ == TypeLayout::kFinalizedInstantiated) {
+  if (untag()->type_state_ == UntaggedType::kFinalizedInstantiated) {
     return true;
   }
   if ((genericity == kAny) && (num_free_fun_type_params == kAllFree) &&
-      (raw_ptr()->type_state_ == TypeLayout::kFinalizedUninstantiated)) {
+      (untag()->type_state_ == UntaggedType::kFinalizedUninstantiated)) {
     return false;
   }
   if (arguments() == TypeArguments::null()) {
@@ -19859,7 +19868,7 @@
       num_free_fun_type_params, space, trail);
   // A returned empty_type_arguments indicates a failed instantiation in dead
   // code that must be propagated up to the caller, the optimizing compiler.
-  if (type_arguments.raw() == Object::empty_type_arguments().raw()) {
+  if (type_arguments.ptr() == Object::empty_type_arguments().ptr()) {
     return Type::null();
   }
   // This uninstantiated type is not modified, as it can be instantiated
@@ -19881,7 +19890,7 @@
                         TypeEquality kind,
                         TrailPtr trail) const {
   ASSERT(!IsNull());
-  if (raw() == other.raw()) {
+  if (ptr() == other.ptr()) {
     return true;
   }
   if (other.IsTypeRef()) {
@@ -19982,7 +19991,7 @@
                                 TypeEquality kind,
                                 TrailPtr trail) const {
   ASSERT(!IsNull());
-  if (raw() == other.raw()) {
+  if (ptr() == other.ptr()) {
     return true;
   }
   if (other.IsTypeRef()) {
@@ -20096,7 +20105,7 @@
 }
 
 bool Type::IsDeclarationTypeOf(const Class& cls) const {
-  ASSERT(type_class() == cls.raw());
+  ASSERT(type_class() == cls.ptr());
   if (cls.IsNullClass()) {
     return true;
   }
@@ -20115,18 +20124,18 @@
     ASSERT(type_args.IsCanonical());
     ASSERT(type_args.IsOld());
 #endif
-    return this->raw();
+    return this->ptr();
   }
   auto isolate_group = thread->isolate_group();
   const classid_t cid = type_class_id();
   if (cid == kDynamicCid) {
     ASSERT(Object::dynamic_type().IsCanonical());
-    return Object::dynamic_type().raw();
+    return Object::dynamic_type().ptr();
   }
 
   if (cid == kVoidCid) {
     ASSERT(Object::void_type().IsCanonical());
-    return Object::void_type().raw();
+    return Object::void_type().ptr();
   }
 
   const Class& cls = Class::Handle(zone, type_class());
@@ -20136,7 +20145,7 @@
     ASSERT(!cls.IsNullClass() || IsNullable());
     Type& type = Type::Handle(zone, cls.declaration_type());
     if (type.IsNull()) {
-      ASSERT(!cls.raw()->ptr()->InVMIsolateHeap() ||
+      ASSERT(!cls.ptr()->untag()->InVMIsolateHeap() ||
              (isolate_group == Dart::vm_isolate_group()));
       // Canonicalize the type arguments of the supertype, if any.
       TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
@@ -20144,7 +20153,7 @@
       if (IsCanonical()) {
         // Canonicalizing type_args canonicalized this type.
         ASSERT(IsRecursive());
-        return this->raw();
+        return this->ptr();
       }
       set_arguments(type_args);
       type = cls.declaration_type();
@@ -20157,13 +20166,13 @@
           if (this->IsNew()) {
             type ^= Object::Clone(*this, Heap::kOld);
           } else {
-            type = this->raw();
+            type = this->ptr();
           }
           ASSERT(type.IsOld());
           type.ComputeHash();
           type.SetCanonical();
           cls.set_declaration_type(type);
-          return type.raw();
+          return type.ptr();
         }
       }
     }
@@ -20171,7 +20180,7 @@
     // TODO(rmacnak): Revisit immediately returning type after to changes to
     // recanonicalization on load for literal splitting.
     if (type.IsCanonical()) {
-      return type.raw();
+      return type.ptr();
     }
   }
 
@@ -20181,7 +20190,7 @@
     SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
     CanonicalTypeSet table(zone, object_store->canonical_types());
     type ^= table.GetOrNull(CanonicalTypeKey(*this));
-    ASSERT(object_store->canonical_types() == table.Release().raw());
+    ASSERT(object_store->canonical_types() == table.Release().ptr());
   }
   if (type.IsNull()) {
     // The type was not found in the table. It is not canonical yet.
@@ -20202,7 +20211,7 @@
           type_arg = type_args.TypeAt(i);
           new_type_args.SetTypeAt(i, type_arg);
         }
-        type_args = new_type_args.raw();
+        type_args = new_type_args.ptr();
         set_arguments(type_args);
         SetHash(0);  // Flush cached hash value.
       }
@@ -20212,7 +20221,7 @@
       // Canonicalizing type_args canonicalized this type as a side effect.
       ASSERT(IsRecursive());
       // A type can be recursive due to a cycle in its type arguments.
-      return this->raw();
+      return this->ptr();
     }
     set_arguments(type_args);
     ASSERT(type_args.IsNull() || type_args.IsOld());
@@ -20227,7 +20236,7 @@
       if (this->IsNew()) {
         type ^= Object::Clone(*this, Heap::kOld);
       } else {
-        type = this->raw();
+        type = this->ptr();
       }
       ASSERT(type.IsOld());
       type.SetCanonical();  // Mark object as being canonical.
@@ -20236,7 +20245,7 @@
     }
     object_store->set_canonical_types(table.Release());
   }
-  return type.raw();
+  return type.ptr();
 }
 
 #if defined(DEBUG)
@@ -20246,10 +20255,10 @@
   }
   const classid_t cid = type_class_id();
   if (cid == kDynamicCid) {
-    return (raw() == Object::dynamic_type().raw());
+    return (ptr() == Object::dynamic_type().ptr());
   }
   if (cid == kVoidCid) {
-    return (raw() == Object::void_type().raw());
+    return (ptr() == Object::void_type().ptr());
   }
   Zone* zone = thread->zone();
   auto isolate_group = thread->isolate_group();
@@ -20260,7 +20269,7 @@
   if (IsDeclarationTypeOf(cls)) {
     type = cls.declaration_type();
     ASSERT(type.IsCanonical());
-    return (raw() == type.raw());
+    return (ptr() == type.ptr());
   }
 
   ObjectStore* object_store = isolate_group->object_store();
@@ -20270,7 +20279,7 @@
     type ^= table.GetOrNull(CanonicalTypeKey(*this));
     object_store->set_canonical_types(table.Release());
   }
-  return (raw() == type.raw());
+  return (ptr() == type.ptr());
 }
 #endif  // DEBUG
 
@@ -20360,12 +20369,12 @@
 
 void Type::set_type_class(const Class& value) const {
   ASSERT(!value.IsNull());
-  raw_ptr()->set_type_class_id(Smi::New(value.id()));
+  untag()->set_type_class_id(Smi::New(value.id()));
 }
 
 void Type::set_arguments(const TypeArguments& value) const {
   ASSERT(!IsCanonical());
-  raw_ptr()->set_arguments(value.raw());
+  untag()->set_arguments(value.ptr());
 }
 
 TypePtr Type::New(Heap::Space space) {
@@ -20382,19 +20391,19 @@
   result.set_type_class(clazz);
   result.set_arguments(arguments);
   result.SetHash(0);
-  result.StoreNonPointer(&result.raw_ptr()->type_state_,
-                         TypeLayout::kAllocated);
+  result.StoreNonPointer(&result.untag()->type_state_,
+                         UntaggedType::kAllocated);
   result.set_nullability(nullability);
 
   result.SetTypeTestingStub(
       Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
-  return result.raw();
+  return result.ptr();
 }
 
 void Type::set_type_state(uint8_t state) const {
-  ASSERT((state >= TypeLayout::kAllocated) &&
-         (state <= TypeLayout::kFinalizedUninstantiated));
-  StoreNonPointer(&raw_ptr()->type_state_, state);
+  ASSERT((state >= UntaggedType::kAllocated) &&
+         (state <= UntaggedType::kFinalizedUninstantiated));
+  StoreNonPointer(&untag()->type_state_, state);
 }
 
 const char* Type::ToCString() const {
@@ -20521,7 +20530,7 @@
       ASSERT(type.IsCanonical());
     }
 #endif
-    return raw();
+    return ptr();
   }
   auto isolate_group = thread->isolate_group();
   ObjectStore* object_store = isolate_group->object_store();
@@ -20531,7 +20540,7 @@
     CanonicalFunctionTypeSet table(zone,
                                    object_store->canonical_function_types());
     sig ^= table.GetOrNull(CanonicalFunctionTypeKey(*this));
-    ASSERT(object_store->canonical_function_types() == table.Release().raw());
+    ASSERT(object_store->canonical_function_types() == table.Release().ptr());
   }
   if (sig.IsNull()) {
     // The function type was not found in the table. It is not canonical yet.
@@ -20573,7 +20582,7 @@
       // Canonicalizing signature types canonicalized this signature as a
       // side effect.
       ASSERT(IsRecursive());
-      return this->raw();
+      return this->ptr();
     }
     // Check to see if the function type got added to canonical table as part
     // of the canonicalization of its signature types.
@@ -20586,7 +20595,7 @@
       if (this->IsNew()) {
         sig ^= Object::Clone(*this, Heap::kOld);
       } else {
-        sig = this->raw();
+        sig = this->ptr();
       }
       ASSERT(sig.IsOld());
       sig.SetCanonical();  // Mark object as being canonical.
@@ -20595,7 +20604,7 @@
     }
     object_store->set_canonical_function_types(table.Release());
   }
-  return sig.raw();
+  return sig.ptr();
 }
 
 #if defined(DEBUG)
@@ -20611,7 +20620,7 @@
     type ^= table.GetOrNull(CanonicalFunctionTypeKey(*this));
     object_store->set_canonical_function_types(table.Release());
   }
-  return raw() == type.raw();
+  return ptr() == type.ptr();
 }
 #endif  // DEBUG
 
@@ -20653,7 +20662,7 @@
 bool TypeRef::IsEquivalent(const Instance& other,
                            TypeEquality kind,
                            TrailPtr trail) const {
-  if (raw() == other.raw()) {
+  if (ptr() == other.ptr()) {
     return true;
   }
   if (!other.IsAbstractType()) {
@@ -20675,7 +20684,7 @@
   TypeRef& instantiated_type_ref = TypeRef::Handle();
   instantiated_type_ref ^= OnlyBuddyInTrail(trail);
   if (!instantiated_type_ref.IsNull()) {
-    return instantiated_type_ref.raw();
+    return instantiated_type_ref.ptr();
   }
   instantiated_type_ref = TypeRef::New();
   AddOnlyBuddyToTrail(&trail, instantiated_type_ref);
@@ -20696,12 +20705,12 @@
 
   instantiated_type_ref.SetTypeTestingStub(Code::Handle(
       TypeTestingStubGenerator::DefaultCodeForType(instantiated_type_ref)));
-  return instantiated_type_ref.raw();
+  return instantiated_type_ref.ptr();
 }
 
 void TypeRef::set_type(const AbstractType& value) const {
   ASSERT(value.IsNull() || value.IsType() || value.IsFunctionType());
-  raw_ptr()->set_type(value.raw());
+  untag()->set_type(value.ptr());
 }
 
 // A TypeRef cannot be canonical by definition. Only its referenced type can be.
@@ -20710,7 +20719,7 @@
 // represented by a TypeRef pointing to itself.
 AbstractTypePtr TypeRef::Canonicalize(Thread* thread, TrailPtr trail) const {
   if (TestAndAddToTrail(&trail)) {
-    return raw();
+    return ptr();
   }
   // TODO(regis): Try to reduce the number of nodes required to represent the
   // referenced recursive type.
@@ -20718,7 +20727,7 @@
   ASSERT(!ref_type.IsNull());
   ref_type = ref_type.Canonicalize(thread, trail);
   set_type(ref_type);
-  return raw();
+  return ptr();
 }
 
 #if defined(DEBUG)
@@ -20775,7 +20784,7 @@
 
   result.SetTypeTestingStub(
       Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
-  return result.raw();
+  return result.ptr();
 }
 
 const char* TypeRef::ToCString() const {
@@ -20796,30 +20805,30 @@
 
 void TypeParameter::SetIsFinalized() const {
   ASSERT(!IsFinalized());
-  set_flags(TypeParameterLayout::FinalizedBit::update(
-      true, TypeParameterLayout::BeingFinalizedBit::update(false,
-                                                           raw_ptr()->flags_)));
+  set_flags(UntaggedTypeParameter::FinalizedBit::update(
+      true, UntaggedTypeParameter::BeingFinalizedBit::update(false,
+                                                             untag()->flags_)));
 }
 
 void TypeParameter::SetIsBeingFinalized() const {
   ASSERT(!IsFinalized());
   set_flags(
-      TypeParameterLayout::BeingFinalizedBit::update(true, raw_ptr()->flags_));
+      UntaggedTypeParameter::BeingFinalizedBit::update(true, untag()->flags_));
 }
 
 void TypeParameter::SetGenericCovariantImpl(bool value) const {
-  set_flags(TypeParameterLayout::GenericCovariantImplBit::update(
-      value, raw_ptr()->flags_));
+  set_flags(UntaggedTypeParameter::GenericCovariantImplBit::update(
+      value, untag()->flags_));
 }
 
 void TypeParameter::set_nullability(Nullability value) const {
-  StoreNonPointer(&raw_ptr()->nullability_, static_cast<uint8_t>(value));
+  StoreNonPointer(&untag()->nullability_, static_cast<uint8_t>(value));
 }
 
 TypeParameterPtr TypeParameter::ToNullability(Nullability value,
                                               Heap::Space space) const {
   if (nullability() == value) {
-    return raw();
+    return ptr();
   }
   // Clone type parameter and set new nullability.
   TypeParameter& type_parameter = TypeParameter::Handle();
@@ -20835,7 +20844,7 @@
     ASSERT(type_parameter.IsFinalized());
     type_parameter ^= type_parameter.Canonicalize(Thread::Current(), nullptr);
   }
-  return type_parameter.raw();
+  return type_parameter.ptr();
 }
 
 bool TypeParameter::IsInstantiated(Genericity genericity,
@@ -20867,7 +20876,7 @@
 bool TypeParameter::IsEquivalent(const Instance& other,
                                  TypeEquality kind,
                                  TrailPtr trail) const {
-  if (raw() == other.raw()) {
+  if (ptr() == other.ptr()) {
     return true;
   }
   if (other.IsTypeRef()) {
@@ -21020,11 +21029,11 @@
 }
 
 void TypeParameter::set_parameterized_class_id(classid_t value) const {
-  StoreNonPointer(&raw_ptr()->parameterized_class_id_, value);
+  StoreNonPointer(&untag()->parameterized_class_id_, value);
 }
 
 classid_t TypeParameter::parameterized_class_id() const {
-  return raw_ptr()->parameterized_class_id_;
+  return untag()->parameterized_class_id_;
 }
 
 ClassPtr TypeParameter::parameterized_class() const {
@@ -21039,28 +21048,28 @@
 void TypeParameter::set_base(intptr_t value) const {
   ASSERT(value >= 0);
   ASSERT(Utils::IsUint(16, value));
-  StoreNonPointer(&raw_ptr()->base_, value);
+  StoreNonPointer(&untag()->base_, value);
 }
 
 void TypeParameter::set_index(intptr_t value) const {
   ASSERT(value >= 0);
   ASSERT(Utils::IsUint(16, value));
-  StoreNonPointer(&raw_ptr()->index_, value);
+  StoreNonPointer(&untag()->index_, value);
 }
 
 void TypeParameter::set_name(const String& value) const {
   ASSERT(value.IsSymbol());
-  raw_ptr()->set_name(value.raw());
+  untag()->set_name(value.ptr());
 }
 
 void TypeParameter::set_bound(const AbstractType& value) const {
   ASSERT(!IsCanonical());
-  raw_ptr()->set_bound(value.raw());
+  untag()->set_bound(value.ptr());
 }
 
 void TypeParameter::set_default_argument(const AbstractType& value) const {
   ASSERT(!IsCanonical());
-  raw_ptr()->set_default_argument(value.raw());
+  untag()->set_default_argument(value.ptr());
 }
 
 AbstractTypePtr TypeParameter::GetFromTypeArguments(
@@ -21084,7 +21093,7 @@
     ASSERT(IsFinalized());
     if (index() >= num_free_fun_type_params) {
       // Do not instantiate the function type parameter, but possibly its bound.
-      result = raw();
+      result = ptr();
       AbstractType& upper_bound = AbstractType::Handle(bound());
       if (!upper_bound.IsInstantiated(kAny, num_free_fun_type_params,
                                       nullptr)) {
@@ -21093,15 +21102,15 @@
         if (TestAndAddBuddyToTrail(&trail, *this)) {
           // If the type parameter is already in the trail, it is returned
           // unchanged here and will be processed when returning from recursion.
-          return raw();
+          return ptr();
         }
         upper_bound = upper_bound.InstantiateFrom(
             instantiator_type_arguments, function_type_arguments,
             num_free_fun_type_params, space, trail);
-        if (upper_bound.raw() == Type::NeverType()) {
+        if (upper_bound.ptr() == Type::NeverType()) {
           // Normalize 'X extends Never' to 'Never'.
           result = Type::NeverType();
-        } else if (upper_bound.raw() != bound()) {
+        } else if (upper_bound.ptr() != bound()) {
           result ^= Object::Clone(result, space);
           TypeParameter::Cast(result).set_bound(upper_bound);
         }
@@ -21152,7 +21161,7 @@
     ASSERT(type.IsOld());
     ASSERT(type.IsCanonical());
 #endif
-    return this->raw();
+    return this->ptr();
   }
   auto isolate_group = thread->isolate_group();
   ObjectStore* object_store = isolate_group->object_store();
@@ -21162,7 +21171,7 @@
     CanonicalTypeParameterSet table(zone,
                                     object_store->canonical_type_parameters());
     type_parameter ^= table.GetOrNull(CanonicalTypeParameterKey(*this));
-    ASSERT(object_store->canonical_type_parameters() == table.Release().raw());
+    ASSERT(object_store->canonical_type_parameters() == table.Release().ptr());
   }
   if (type_parameter.IsNull()) {
     // The type parameter was not found in the table. It is not canonical yet.
@@ -21171,7 +21180,7 @@
     // of a cycle via its bound. Return it now and let the caller finish
     // canonicalizing it.
     if (TestAndAddToTrail(&trail)) {
-      return raw();
+      return ptr();
     }
     AbstractType& type = AbstractType::Handle(zone);
     type = bound();
@@ -21180,7 +21189,7 @@
       // Canonicalizing bound or default argument canonicalized this type
       // parameter as a side effect.
       ASSERT(IsRecursive());  // Self-referring bound or default argument.
-      return raw();
+      return ptr();
     }
     set_bound(type);
     type = default_argument();
@@ -21189,7 +21198,7 @@
       // Canonicalizing bound or default argument canonicalized this type
       // parameter as a side effect.
       ASSERT(IsRecursive());  // Self-referring bound or default argument.
-      return this->raw();
+      return this->ptr();
     }
     set_default_argument(type);
     // Check to see if the type parameter got added to canonical table as part
@@ -21203,7 +21212,7 @@
       if (this->IsNew()) {
         type_parameter ^= Object::Clone(*this, Heap::kOld);
       } else {
-        type_parameter = this->raw();
+        type_parameter = this->ptr();
       }
       ASSERT(type_parameter.IsOld());
       type_parameter.SetCanonical();  // Mark object as being canonical.
@@ -21212,7 +21221,7 @@
     }
     object_store->set_canonical_type_parameters(table.Release());
   }
-  return type_parameter.raw();
+  return type_parameter.ptr();
 }
 
 #if defined(DEBUG)
@@ -21229,7 +21238,7 @@
     type_parameter ^= table.GetOrNull(CanonicalTypeParameterKey(*this));
     object_store->set_canonical_type_parameters(table.Release());
   }
-  return (raw() == type_parameter.raw());
+  return (ptr() == type_parameter.ptr());
 }
 #endif  // DEBUG
 
@@ -21293,11 +21302,11 @@
 
   result.SetTypeTestingStub(
       Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
-  return result.raw();
+  return result.ptr();
 }
 
 void TypeParameter::set_flags(uint8_t flags) const {
-  StoreNonPointer(&raw_ptr()->flags_, flags);
+  StoreNonPointer(&untag()->flags_, flags);
 }
 
 const char* TypeParameter::ToCString() const {
@@ -21352,12 +21361,12 @@
     case kMintCid: {
       Mint& result = Mint::Handle(zone);
       result ^= cls.LookupCanonicalMint(zone, Mint::Cast(*this).value());
-      return (result.raw() == this->raw());
+      return (result.ptr() == this->ptr());
     }
     case kDoubleCid: {
       Double& dbl = Double::Handle(zone);
       dbl ^= cls.LookupCanonicalDouble(zone, Double::Cast(*this).value());
-      return (dbl.raw() == this->raw());
+      return (dbl.ptr() == this->ptr());
     }
     default:
       UNREACHABLE();
@@ -21477,17 +21486,17 @@
 }
 
 IntegerPtr Integer::AsValidInteger() const {
-  if (IsSmi()) return raw();
+  if (IsSmi()) return ptr();
   if (IsMint()) {
     Mint& mint = Mint::Handle();
-    mint ^= raw();
+    mint ^= ptr();
     if (Smi::IsValid(mint.value())) {
       return Smi::New(static_cast<intptr_t>(mint.value()));
     } else {
-      return raw();
+      return ptr();
     }
   }
-  return raw();
+  return ptr();
 }
 
 const char* Integer::ToHexCString(Zone* zone) const {
@@ -21508,8 +21517,8 @@
   // In 64-bit mode, the result of any operation between two Smis will fit in a
   // 64-bit signed result, except the product of two Smis (see below).
   if (IsSmi() && other.IsSmi()) {
-    const intptr_t left_value = Smi::Value(Smi::RawCast(raw()));
-    const intptr_t right_value = Smi::Value(Smi::RawCast(other.raw()));
+    const intptr_t left_value = Smi::Value(Smi::RawCast(ptr()));
+    const intptr_t right_value = Smi::Value(Smi::RawCast(other.ptr()));
     switch (operation) {
       case Token::kADD:
         return Integer::New(left_value + right_value, space);
@@ -21586,8 +21595,8 @@
                           const Integer& other,
                           Heap::Space space) const {
   if (IsSmi() && other.IsSmi()) {
-    intptr_t op1_value = Smi::Value(Smi::RawCast(raw()));
-    intptr_t op2_value = Smi::Value(Smi::RawCast(other.raw()));
+    intptr_t op1_value = Smi::Value(Smi::RawCast(ptr()));
+    intptr_t op2_value = Smi::Value(Smi::RawCast(other.ptr()));
     intptr_t result = 0;
     switch (kind) {
       case Token::kBIT_AND:
@@ -21688,7 +21697,7 @@
 }
 
 void Mint::set_value(int64_t value) const {
-  StoreNonPointer(&raw_ptr()->value_, value);
+  StoreNonPointer(&untag()->value_, value);
 }
 
 MintPtr Mint::New(int64_t val, Heap::Space space) {
@@ -21704,7 +21713,7 @@
     result ^= raw;
   }
   result.set_value(val);
-  return result.raw();
+  return result.ptr();
 }
 
 MintPtr Mint::NewCanonical(int64_t value) {
@@ -21724,18 +21733,18 @@
   Mint& canonical_value =
       Mint::Handle(zone, cls.LookupCanonicalMint(zone, value));
   if (!canonical_value.IsNull()) {
-    return canonical_value.raw();
+    return canonical_value.ptr();
   }
   canonical_value = Mint::New(value, Heap::kOld);
   canonical_value.SetCanonical();
   // The value needs to be added to the constants list. Grow the list if
   // it is full.
   cls.InsertCanonicalMint(zone, canonical_value);
-  return canonical_value.raw();
+  return canonical_value.ptr();
 }
 
 bool Mint::Equals(const Instance& other) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     // Both handles point to the same raw instance.
     return true;
   }
@@ -21780,13 +21789,13 @@
 }
 
 void Double::set_value(double value) const {
-  StoreNonPointer(&raw_ptr()->value_, value);
+  StoreNonPointer(&untag()->value_, value);
 }
 
 bool Double::BitwiseEqualsToDouble(double value) const {
   intptr_t value_offset = Double::value_offset();
   void* this_addr = reinterpret_cast<void*>(
-      reinterpret_cast<uword>(this->raw_ptr()) + value_offset);
+      reinterpret_cast<uword>(this->untag()) + value_offset);
   void* other_addr = reinterpret_cast<void*>(&value);
   return (memcmp(this_addr, other_addr, sizeof(value)) == 0);
 }
@@ -21802,7 +21811,7 @@
 }
 
 bool Double::CanonicalizeEquals(const Instance& other) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     return true;  // "===".
   }
   if (other.IsNull() || !other.IsDouble()) {
@@ -21826,13 +21835,13 @@
     result ^= raw;
   }
   result.set_value(d);
-  return result.raw();
+  return result.ptr();
 }
 
 DoublePtr Double::New(const String& str, Heap::Space space) {
   double double_value;
   if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
-    return Double::Handle().raw();
+    return Double::Handle().ptr();
   }
   return New(double_value, space);
 }
@@ -21854,19 +21863,19 @@
   Double& canonical_value =
       Double::Handle(zone, cls.LookupCanonicalDouble(zone, value));
   if (!canonical_value.IsNull()) {
-    return canonical_value.raw();
+    return canonical_value.ptr();
   }
   canonical_value = Double::New(value, Heap::kOld);
   canonical_value.SetCanonical();
   // The value needs to be added to the constants list.
   cls.InsertCanonicalDouble(zone, canonical_value);
-  return canonical_value.raw();
+  return canonical_value.ptr();
 }
 
 DoublePtr Double::NewCanonical(const String& str) {
   double double_value;
   if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
-    return Double::Handle().raw();
+    return Double::Handle().ptr();
   }
   return NewCanonical(double_value);
 }
@@ -21938,25 +21947,25 @@
 
 intptr_t String::Hash(StringPtr raw) {
   StringHasher hasher;
-  uword length = Smi::Value(raw->ptr()->length());
+  uword length = Smi::Value(raw->untag()->length());
   if (raw->IsOneByteString() || raw->IsExternalOneByteString()) {
     const uint8_t* data;
     if (raw->IsOneByteString()) {
-      data = static_cast<OneByteStringPtr>(raw)->ptr()->data();
+      data = static_cast<OneByteStringPtr>(raw)->untag()->data();
     } else {
       ASSERT(raw->IsExternalOneByteString());
       ExternalOneByteStringPtr str = static_cast<ExternalOneByteStringPtr>(raw);
-      data = str->ptr()->external_data_;
+      data = str->untag()->external_data_;
     }
     return String::Hash(data, length);
   } else {
     const uint16_t* data;
     if (raw->IsTwoByteString()) {
-      data = static_cast<TwoByteStringPtr>(raw)->ptr()->data();
+      data = static_cast<TwoByteStringPtr>(raw)->untag()->data();
     } else {
       ASSERT(raw->IsExternalTwoByteString());
       ExternalTwoByteStringPtr str = static_cast<ExternalTwoByteStringPtr>(raw);
-      data = str->ptr()->external_data_;
+      data = str->untag()->external_data_;
     }
     return String::Hash(data, length);
   }
@@ -21981,7 +21990,7 @@
 }
 
 intptr_t String::CharSize() const {
-  intptr_t class_id = raw()->GetClassId();
+  intptr_t class_id = ptr()->GetClassId();
   if (class_id == kOneByteStringCid || class_id == kExternalOneByteStringCid) {
     return kOneByteChar;
   }
@@ -21991,7 +22000,7 @@
 }
 
 void* String::GetPeer() const {
-  intptr_t class_id = raw()->GetClassId();
+  intptr_t class_id = ptr()->GetClassId();
   if (class_id == kExternalOneByteStringCid) {
     return ExternalOneByteString::GetPeer(*this);
   }
@@ -22000,7 +22009,7 @@
 }
 
 bool String::Equals(const Instance& other) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     // Both handles point to the same raw instance.
     return true;
   }
@@ -22161,7 +22170,7 @@
 
 InstancePtr String::CanonicalizeLocked(Thread* thread) const {
   if (IsCanonical()) {
-    return this->raw();
+    return this->ptr();
   }
   return Symbols::New(Thread::Current(), *this);
 }
@@ -22170,7 +22179,7 @@
 bool String::CheckIsCanonical(Thread* thread) const {
   Zone* zone = thread->zone();
   const String& str = String::Handle(zone, Symbols::Lookup(thread, *this));
-  return (str.raw() == this->raw());
+  return (str.ptr() == this->ptr());
 }
 #endif  // DEBUG
 
@@ -22196,7 +22205,7 @@
         return String::null();
       }
     }
-    return strobj.raw();
+    return strobj.ptr();
   }
   ASSERT((type == Utf8::kBMP) || (type == Utf8::kSupplementary));
   const String& strobj = String::Handle(TwoByteString::New(len, space));
@@ -22206,7 +22215,7 @@
     Utf8::ReportInvalidByte(utf8_array, array_len, len);
     return String::null();
   }
-  return strobj.raw();
+  return strobj.ptr();
 }
 
 StringPtr String::FromLatin1(const uint8_t* latin1_array,
@@ -22265,7 +22274,7 @@
     result = TwoByteString::New(len, space);
   }
   String::Copy(result, 0, str, 0, len);
-  return result.raw();
+  return result.ptr();
 }
 
 StringPtr String::NewExternal(const uint8_t* characters,
@@ -22618,7 +22627,7 @@
   ASSERT(begin_index >= 0);
   ASSERT(length >= 0);
   if (begin_index <= str.Length() && length == 0) {
-    return Symbols::Empty().raw();
+    return Symbols::Empty().ptr();
   }
   if (begin_index > str.Length()) {
     return String::null();
@@ -22641,7 +22650,7 @@
     result = TwoByteString::New(length, space);
   }
   String::Copy(result, 0, str, begin_index, length);
-  return result.raw();
+  return result.ptr();
 }
 
 const char* String::ToCString() const {
@@ -22695,7 +22704,7 @@
     dst_max = Utils::Maximum(dst_max, dst);
   }
   if (!has_mapping) {
-    return str.raw();
+    return str.ptr();
   }
   if (Utf::IsLatin1(dst_max)) {
     return OneByteString::Transform(mapping, str, space);
@@ -22815,12 +22824,12 @@
   UNREACHABLE();
 
 bool String::EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
-  if (str1.raw() == str2.raw()) {
+  if (str1.ptr() == str2.ptr()) {
     return true;  // Both handles point to the same raw instance.
   }
   NoSafepointScope no_safepoint;
-  intptr_t str1_class_id = str1.raw()->GetClassId();
-  intptr_t str2_class_id = str2.raw()->GetClassId();
+  intptr_t str1_class_id = str1.ptr()->GetClassId();
+  intptr_t str2_class_id = str2.ptr()->GetClassId();
   switch (str1_class_id) {
     case kOneByteStringCid:
       EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, OneByteString, str1, str2);
@@ -22938,9 +22947,9 @@
                                      OneByteString::InstanceSize(len), space);
     NoSafepointScope no_safepoint;
     OneByteStringPtr result = static_cast<OneByteStringPtr>(raw);
-    result->ptr()->set_length(Smi::New(len));
+    result->untag()->set_length(Smi::New(len));
 #if !defined(HASH_IN_OBJECT_HEADER)
-    result->ptr()->set_hash(Smi::New(0));
+    result->untag()->set_hash(Smi::New(0));
 #endif
     return result;
   }
@@ -23093,8 +23102,8 @@
   OneByteStringPtr result = OneByteString::New(length, space);
   NoSafepointScope no_safepoint;
   if (length > 0) {
-    uint8_t* dest = &result->ptr()->data()[0];
-    const uint8_t* src = &raw_ptr(str)->data()[begin_index];
+    uint8_t* dest = &result->untag()->data()[0];
+    const uint8_t* src = &untag(str)->data()[begin_index];
     memmove(dest, src, length);
   }
   return result;
@@ -23354,7 +23363,7 @@
   }
   result.set_value(value);
   result.SetCanonical();
-  return result.raw();
+  return result.ptr();
 }
 
 const char* Bool::ToCString() const {
@@ -23362,7 +23371,7 @@
 }
 
 bool Array::CanonicalizeEquals(const Instance& other) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     // Both handles point to the same raw instance.
     return true;
   }
@@ -23405,7 +23414,7 @@
     return 1;
   }
   Thread* thread = Thread::Current();
-  uint32_t hash = thread->heap()->GetCanonicalHash(raw());
+  uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
   if (hash != 0) {
     return hash;
   }
@@ -23417,7 +23426,7 @@
     hash = CombineHashes(hash, member.CanonicalizeHash());
   }
   hash = FinalizeHash(hash, kHashBits);
-  thread->heap()->SetCanonicalHash(raw(), hash);
+  thread->heap()->SetCanonicalHash(ptr(), hash);
   return hash;
 }
 
@@ -23427,7 +23436,7 @@
   ArrayPtr result = New(kClassId, len, space);
   if (UseCardMarkingForAllocation(len)) {
     ASSERT(result->IsOldObject());
-    result->ptr()->SetCardRememberedBitUnsynchronized();
+    result->untag()->SetCardRememberedBitUnsynchronized();
   }
   return result;
 }
@@ -23442,7 +23451,7 @@
     type_args = type_args.Canonicalize(Thread::Current(), nullptr);
     result.SetTypeArguments(type_args);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ArrayPtr Array::New(intptr_t class_id, intptr_t len, Heap::Space space) {
@@ -23454,7 +23463,7 @@
     ArrayPtr raw = static_cast<ArrayPtr>(
         Object::Allocate(class_id, Array::InstanceSize(len), space));
     NoSafepointScope no_safepoint;
-    raw->ptr()->set_length(Smi::New(len));
+    raw->untag()->set_length(Smi::New(len));
     return raw;
   }
 }
@@ -23472,13 +23481,13 @@
     dest.SetTypeArguments(TypeArguments::Handle(GetTypeArguments()));
   }
 
-  return dest.raw();
+  return dest.ptr();
 }
 
 void Array::MakeImmutable() const {
   if (IsImmutable()) return;
   ASSERT(!IsCanonical());
-  raw_ptr()->SetClassId(kImmutableArrayCid);
+  untag()->SetClassId(kImmutableArrayCid);
 }
 
 const char* Array::ToCString() const {
@@ -23509,7 +23518,7 @@
     obj = source.At(i);
     result.SetAt(i, obj);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 void Array::Truncate(intptr_t new_len) const {
@@ -23518,7 +23527,7 @@
   }
   Thread* thread = Thread::Current();
   Zone* zone = thread->zone();
-  const Array& array = Array::Handle(zone, this->raw());
+  const Array& array = Array::Handle(zone, this->ptr());
 
   intptr_t old_len = array.Length();
   ASSERT(new_len <= old_len);
@@ -23538,18 +23547,18 @@
   // Update the size in the header field and length of the array object.
   // These release operations are balanced by acquire operations in the
   // concurrent sweeper.
-  uword old_tags = array.raw_ptr()->tags_;
+  uword old_tags = array.untag()->tags_;
   uword new_tags;
-  ASSERT(kArrayCid == ObjectLayout::ClassIdTag::decode(old_tags));
+  ASSERT(kArrayCid == UntaggedObject::ClassIdTag::decode(old_tags));
   do {
-    new_tags = ObjectLayout::SizeTag::update(new_size, old_tags);
-  } while (!array.raw_ptr()->tags_.compare_exchange_weak(
+    new_tags = UntaggedObject::SizeTag::update(new_size, old_tags);
+  } while (!array.untag()->tags_.compare_exchange_weak(
       old_tags, new_tags, std::memory_order_release));
 
   // Between the CAS of the header above and the SetLength below, the array is
   // temporarily in an inconsistent state. The header is considered the
-  // overriding source of object size by ObjectLayout::Size, but the ASSERTs in
-  // ObjectLayout::HeapSizeFromClass must handle this special case.
+  // overriding source of object size by UntaggedObject::HeapSize, but the
+  // ASSERTs in UntaggedObject::HeapSizeFromClass must handle this special case.
   array.SetLengthRelease(new_len);
 }
 
@@ -23566,7 +23575,7 @@
     if (type_arguments.IsNull() && !unique) {
       // This is a raw List (as in no type arguments), so we can return the
       // simple empty array.
-      return Object::empty_array().raw();
+      return Object::empty_array().ptr();
     }
 
     // The backing array may be a shared instance, or may not have correct
@@ -23574,7 +23583,7 @@
     Heap::Space space = thread->IsMutatorThread() ? Heap::kNew : Heap::kOld;
     Array& array = Array::Handle(zone, Array::New(0, space));
     array.SetTypeArguments(type_arguments);
-    return array.raw();
+    return array.ptr();
   }
   const Array& array = Array::Handle(zone, growable_array.data());
   ASSERT(array.IsArray());
@@ -23586,7 +23595,7 @@
 
   // Truncate the old backing array and return it.
   array.Truncate(used_len);
-  return array.raw();
+  return array.ptr();
 }
 
 void Array::CanonicalizeFieldsLocked(Thread* thread) const {
@@ -23630,7 +23639,7 @@
   const Array& contents = Array::Handle(data());
   const Array& new_contents =
       Array::Handle(Array::Grow(contents, new_capacity, space));
-  raw_ptr()->set_data(new_contents.raw());
+  untag()->set_data(new_contents.ptr());
 }
 
 ObjectPtr GrowableObjectArray::RemoveLast() const {
@@ -23641,12 +23650,12 @@
   const PassiveObject& obj = PassiveObject::Handle(contents.At(index));
   contents.SetAt(index, Object::null_object());
   SetLength(index);
-  return obj.raw();
+  return obj.ptr();
 }
 
 GrowableObjectArrayPtr GrowableObjectArray::New(intptr_t capacity,
                                                 Heap::Space space) {
-  ArrayPtr raw_data = (capacity == 0) ? Object::empty_array().raw()
+  ArrayPtr raw_data = (capacity == 0) ? Object::empty_array().ptr()
                                       : Array::New(capacity, space);
   const Array& data = Array::Handle(raw_data);
   return New(data, space);
@@ -23667,7 +23676,7 @@
     result.SetLength(0);
     result.SetData(array);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* GrowableObjectArray::ToCString() const {
@@ -23739,7 +23748,7 @@
   result.SetHashMask(hash_mask);
   result.SetUsedData(used_data);
   result.SetDeletedKeys(deleted_keys);
-  return result.raw();
+  return result.ptr();
 }
 
 LinkedHashMapPtr LinkedHashMap::NewUninitialized(Heap::Space space) {
@@ -23752,7 +23761,7 @@
     NoSafepointScope no_safepoint;
     result ^= raw;
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* LinkedHashMap::ToCString() const {
@@ -23783,7 +23792,7 @@
   result.set_y(v1);
   result.set_z(v2);
   result.set_w(v3);
-  return result.raw();
+  return result.ptr();
 }
 
 Float32x4Ptr Float32x4::New(simd128_value_t value, Heap::Space space) {
@@ -23797,49 +23806,49 @@
     result ^= raw;
   }
   result.set_value(value);
-  return result.raw();
+  return result.ptr();
 }
 
 simd128_value_t Float32x4::value() const {
   return LoadUnaligned(
-      reinterpret_cast<const simd128_value_t*>(&raw_ptr()->value_));
+      reinterpret_cast<const simd128_value_t*>(&untag()->value_));
 }
 
 void Float32x4::set_value(simd128_value_t value) const {
-  StoreUnaligned(reinterpret_cast<simd128_value_t*>(&raw()->ptr()->value_),
+  StoreUnaligned(reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
                  value);
 }
 
 void Float32x4::set_x(float value) const {
-  StoreNonPointer(&raw_ptr()->value_[0], value);
+  StoreNonPointer(&untag()->value_[0], value);
 }
 
 void Float32x4::set_y(float value) const {
-  StoreNonPointer(&raw_ptr()->value_[1], value);
+  StoreNonPointer(&untag()->value_[1], value);
 }
 
 void Float32x4::set_z(float value) const {
-  StoreNonPointer(&raw_ptr()->value_[2], value);
+  StoreNonPointer(&untag()->value_[2], value);
 }
 
 void Float32x4::set_w(float value) const {
-  StoreNonPointer(&raw_ptr()->value_[3], value);
+  StoreNonPointer(&untag()->value_[3], value);
 }
 
 float Float32x4::x() const {
-  return raw_ptr()->value_[0];
+  return untag()->value_[0];
 }
 
 float Float32x4::y() const {
-  return raw_ptr()->value_[1];
+  return untag()->value_[1];
 }
 
 float Float32x4::z() const {
-  return raw_ptr()->value_[2];
+  return untag()->value_[2];
 }
 
 float Float32x4::w() const {
-  return raw_ptr()->value_[3];
+  return untag()->value_[3];
 }
 
 const char* Float32x4::ToCString() const {
@@ -23869,7 +23878,7 @@
   result.set_y(v1);
   result.set_z(v2);
   result.set_w(v3);
-  return result.raw();
+  return result.ptr();
 }
 
 Int32x4Ptr Int32x4::New(simd128_value_t value, Heap::Space space) {
@@ -23883,48 +23892,48 @@
     result ^= raw;
   }
   result.set_value(value);
-  return result.raw();
+  return result.ptr();
 }
 
 void Int32x4::set_x(int32_t value) const {
-  StoreNonPointer(&raw_ptr()->value_[0], value);
+  StoreNonPointer(&untag()->value_[0], value);
 }
 
 void Int32x4::set_y(int32_t value) const {
-  StoreNonPointer(&raw_ptr()->value_[1], value);
+  StoreNonPointer(&untag()->value_[1], value);
 }
 
 void Int32x4::set_z(int32_t value) const {
-  StoreNonPointer(&raw_ptr()->value_[2], value);
+  StoreNonPointer(&untag()->value_[2], value);
 }
 
 void Int32x4::set_w(int32_t value) const {
-  StoreNonPointer(&raw_ptr()->value_[3], value);
+  StoreNonPointer(&untag()->value_[3], value);
 }
 
 int32_t Int32x4::x() const {
-  return raw_ptr()->value_[0];
+  return untag()->value_[0];
 }
 
 int32_t Int32x4::y() const {
-  return raw_ptr()->value_[1];
+  return untag()->value_[1];
 }
 
 int32_t Int32x4::z() const {
-  return raw_ptr()->value_[2];
+  return untag()->value_[2];
 }
 
 int32_t Int32x4::w() const {
-  return raw_ptr()->value_[3];
+  return untag()->value_[3];
 }
 
 simd128_value_t Int32x4::value() const {
   return LoadUnaligned(
-      reinterpret_cast<const simd128_value_t*>(&raw_ptr()->value_));
+      reinterpret_cast<const simd128_value_t*>(&untag()->value_));
 }
 
 void Int32x4::set_value(simd128_value_t value) const {
-  StoreUnaligned(reinterpret_cast<simd128_value_t*>(&raw()->ptr()->value_),
+  StoreUnaligned(reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
                  value);
 }
 
@@ -23949,7 +23958,7 @@
   }
   result.set_x(value0);
   result.set_y(value1);
-  return result.raw();
+  return result.ptr();
 }
 
 Float64x2Ptr Float64x2::New(simd128_value_t value, Heap::Space space) {
@@ -23963,31 +23972,31 @@
     result ^= raw;
   }
   result.set_value(value);
-  return result.raw();
+  return result.ptr();
 }
 
 double Float64x2::x() const {
-  return raw_ptr()->value_[0];
+  return untag()->value_[0];
 }
 
 double Float64x2::y() const {
-  return raw_ptr()->value_[1];
+  return untag()->value_[1];
 }
 
 void Float64x2::set_x(double x) const {
-  StoreNonPointer(&raw_ptr()->value_[0], x);
+  StoreNonPointer(&untag()->value_[0], x);
 }
 
 void Float64x2::set_y(double y) const {
-  StoreNonPointer(&raw_ptr()->value_[1], y);
+  StoreNonPointer(&untag()->value_[1], y);
 }
 
 simd128_value_t Float64x2::value() const {
-  return simd128_value_t().readFrom(&raw_ptr()->value_[0]);
+  return simd128_value_t().readFrom(&untag()->value_[0]);
 }
 
 void Float64x2::set_value(simd128_value_t value) const {
-  StoreSimd128(&raw_ptr()->value_[0], value);
+  StoreSimd128(&untag()->value_[0], value);
 }
 
 const char* Float64x2::ToCString() const {
@@ -24015,7 +24024,7 @@
 };
 
 bool TypedData::CanonicalizeEquals(const Instance& other) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     // Both handles point to the same raw instance.
     return true;
   }
@@ -24067,7 +24076,7 @@
     result.SetLength(len);
     result.RecomputeDataField();
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* TypedData::ToCString() const {
@@ -24113,7 +24122,7 @@
     result.SetLength(len);
     result.SetData(data);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ExternalTypedDataPtr ExternalTypedData::NewFinalizeWithFree(uint8_t* data,
@@ -24122,7 +24131,7 @@
       kExternalTypedDataUint8ArrayCid, data, len, Heap::kOld));
   result.AddFinalizer(
       data, [](void* isolate_callback_data, void* data) { free(data); }, len);
-  return result.raw();
+  return result.ptr();
 }
 
 TypedDataViewPtr TypedDataView::New(intptr_t class_id, Heap::Space space) {
@@ -24134,7 +24143,7 @@
     result ^= raw;
     result.Clear();
   }
-  return result.raw();
+  return result.ptr();
 }
 
 TypedDataViewPtr TypedDataView::New(intptr_t class_id,
@@ -24144,7 +24153,7 @@
                                     Heap::Space space) {
   auto& result = TypedDataView::Handle(TypedDataView::New(class_id, space));
   result.InitializeWith(typed_data, offset_in_bytes, length);
-  return result.raw();
+  return result.ptr();
 }
 
 const char* TypedDataBase::ToCString() const {
@@ -24182,7 +24191,7 @@
   result.SetTypeArguments(type_args);
   result.SetNativeAddress(native_address);
 
-  return result.raw();
+  return result.ptr();
 }
 
 const char* Pointer::ToCString() const {
@@ -24198,11 +24207,11 @@
                              DynamicLibrary::InstanceSize(), space);
   NoSafepointScope no_safepoint;
   result.SetHandle(handle);
-  return result.raw();
+  return result.ptr();
 }
 
 bool Pointer::IsPointer(const Instance& obj) {
-  return IsFfiPointerClassId(obj.raw()->GetClassId());
+  return IsFfiPointerClassId(obj.ptr()->GetClassId());
 }
 
 bool Instance::IsPointer() const {
@@ -24221,9 +24230,9 @@
                                      Capability::InstanceSize(), space);
     NoSafepointScope no_safepoint;
     result ^= raw;
-    result.StoreNonPointer(&result.raw_ptr()->id_, id);
+    result.StoreNonPointer(&result.untag()->id_, id);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* Capability::ToCString() const {
@@ -24250,10 +24259,10 @@
                                      ReceivePort::InstanceSize(), space);
     NoSafepointScope no_safepoint;
     result ^= raw;
-    result.raw_ptr()->set_send_port(send_port.raw());
+    result.untag()->set_send_port(send_port.ptr());
 #if !defined(PRODUCT)
-    result.raw_ptr()->set_debug_name(debug_name.raw());
-    result.raw_ptr()->set_allocation_location(allocation_location_.raw());
+    result.untag()->set_debug_name(debug_name.ptr());
+    result.untag()->set_allocation_location(allocation_location_.ptr());
 #endif  // !defined(PRODUCT)
   }
   if (is_control_port) {
@@ -24261,7 +24270,7 @@
   } else {
     PortMap::SetPortState(id, PortMap::kLivePort);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* ReceivePort::ToCString() const {
@@ -24282,10 +24291,10 @@
         Object::Allocate(SendPort::kClassId, SendPort::InstanceSize(), space);
     NoSafepointScope no_safepoint;
     result ^= raw;
-    result.StoreNonPointer(&result.raw_ptr()->id_, id);
-    result.StoreNonPointer(&result.raw_ptr()->origin_id_, origin_id);
+    result.StoreNonPointer(&result.untag()->id_, id);
+    result.StoreNonPointer(&result.untag()->origin_id_, origin_id);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* SendPort::ToCString() const {
@@ -24319,7 +24328,7 @@
                                        &TransferableTypedDataFinalizer, length,
                                        /*auto_delete=*/true));
 
-  return result.raw();
+  return result.ptr();
 }
 
 const char* TransferableTypedData::ToCString() const {
@@ -24362,7 +24371,7 @@
 intptr_t Closure::NumTypeParameters(Thread* thread) const {
   // Only check for empty here, as the null TAV is used to mean that the
   // closed-over delayed type parameters were all of dynamic type.
-  if (delayed_type_arguments() != Object::empty_type_arguments().raw()) {
+  if (delayed_type_arguments() != Object::empty_type_arguments().ptr()) {
     return 0;
   } else {
     const auto& closure_function = Function::Handle(thread->zone(), function());
@@ -24442,15 +24451,14 @@
         Object::Allocate(Closure::kClassId, Closure::InstanceSize(), space);
     NoSafepointScope no_safepoint;
     result ^= raw;
-    result.raw_ptr()->set_instantiator_type_arguments(
-        instantiator_type_arguments.raw());
-    result.raw_ptr()->set_function_type_arguments(
-        function_type_arguments.raw());
-    result.raw_ptr()->set_delayed_type_arguments(delayed_type_arguments.raw());
-    result.raw_ptr()->set_function(function.raw());
-    result.raw_ptr()->set_context(context.raw());
+    result.untag()->set_instantiator_type_arguments(
+        instantiator_type_arguments.ptr());
+    result.untag()->set_function_type_arguments(function_type_arguments.ptr());
+    result.untag()->set_delayed_type_arguments(delayed_type_arguments.ptr());
+    result.untag()->set_function(function.ptr());
+    result.untag()->set_context(context.ptr());
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ClosurePtr Closure::New() {
@@ -24472,7 +24480,7 @@
   // We detect the case of a partial tearoff type application and substitute the
   // type arguments for the type parameters of the function.
   intptr_t num_free_params;
-  if (delayed_type_args.raw() != Object::empty_type_arguments().raw()) {
+  if (delayed_type_args.ptr() != Object::empty_type_arguments().ptr()) {
     num_free_params = kCurrentAndEnclosingFree;
     fn_type_args = delayed_type_args.Prepend(
         zone, fn_type_args, sig.NumParentTypeArguments(),
@@ -24485,62 +24493,62 @@
     sig ^= sig.InstantiateFrom(inst_type_args, fn_type_args, num_free_params,
                                Heap::kOld);
   }
-  return sig.raw();
+  return sig.ptr();
 }
 
 bool StackTrace::skip_sync_start_in_parent_stack() const {
-  return raw_ptr()->skip_sync_start_in_parent_stack;
+  return untag()->skip_sync_start_in_parent_stack;
 }
 
 void StackTrace::set_skip_sync_start_in_parent_stack(bool value) const {
-  StoreNonPointer(&raw_ptr()->skip_sync_start_in_parent_stack, value);
+  StoreNonPointer(&untag()->skip_sync_start_in_parent_stack, value);
 }
 
 intptr_t StackTrace::Length() const {
-  const Array& code_array = Array::Handle(raw_ptr()->code_array());
+  const Array& code_array = Array::Handle(untag()->code_array());
   return code_array.Length();
 }
 
 ObjectPtr StackTrace::CodeAtFrame(intptr_t frame_index) const {
-  const Array& code_array = Array::Handle(raw_ptr()->code_array());
+  const Array& code_array = Array::Handle(untag()->code_array());
   return code_array.At(frame_index);
 }
 
 void StackTrace::SetCodeAtFrame(intptr_t frame_index,
                                 const Object& code) const {
-  const Array& code_array = Array::Handle(raw_ptr()->code_array());
+  const Array& code_array = Array::Handle(untag()->code_array());
   code_array.SetAt(frame_index, code);
 }
 
 SmiPtr StackTrace::PcOffsetAtFrame(intptr_t frame_index) const {
-  const Array& pc_offset_array = Array::Handle(raw_ptr()->pc_offset_array());
+  const Array& pc_offset_array = Array::Handle(untag()->pc_offset_array());
   return static_cast<SmiPtr>(pc_offset_array.At(frame_index));
 }
 
 void StackTrace::SetPcOffsetAtFrame(intptr_t frame_index,
                                     const Smi& pc_offset) const {
-  const Array& pc_offset_array = Array::Handle(raw_ptr()->pc_offset_array());
+  const Array& pc_offset_array = Array::Handle(untag()->pc_offset_array());
   pc_offset_array.SetAt(frame_index, pc_offset);
 }
 
 void StackTrace::set_async_link(const StackTrace& async_link) const {
-  raw_ptr()->set_async_link(async_link.raw());
+  untag()->set_async_link(async_link.ptr());
 }
 
 void StackTrace::set_code_array(const Array& code_array) const {
-  raw_ptr()->set_code_array(code_array.raw());
+  untag()->set_code_array(code_array.ptr());
 }
 
 void StackTrace::set_pc_offset_array(const Array& pc_offset_array) const {
-  raw_ptr()->set_pc_offset_array(pc_offset_array.raw());
+  untag()->set_pc_offset_array(pc_offset_array.ptr());
 }
 
 void StackTrace::set_expand_inlined(bool value) const {
-  StoreNonPointer(&raw_ptr()->expand_inlined_, value);
+  StoreNonPointer(&untag()->expand_inlined_, value);
 }
 
 bool StackTrace::expand_inlined() const {
-  return raw_ptr()->expand_inlined_;
+  return untag()->expand_inlined_;
 }
 
 StackTracePtr StackTrace::New(const Array& code_array,
@@ -24557,7 +24565,7 @@
   result.set_pc_offset_array(pc_offset_array);
   result.set_expand_inlined(true);  // default.
   result.set_skip_sync_start_in_parent_stack(false);
-  return result.raw();
+  return result.ptr();
 }
 
 StackTracePtr StackTrace::New(const Array& code_array,
@@ -24577,7 +24585,7 @@
   result.set_pc_offset_array(pc_offset_array);
   result.set_expand_inlined(true);  // default.
   result.set_skip_sync_start_in_parent_stack(skip_sync_start_in_parent_stack);
-  return result.raw();
+  return result.ptr();
 }
 
 #if defined(DART_PRECOMPILED_RUNTIME)
@@ -24676,7 +24684,7 @@
 const char* StackTrace::ToCString() const {
   auto const T = Thread::Current();
   auto const zone = T->zone();
-  auto& stack_trace = StackTrace::Handle(zone, this->raw());
+  auto& stack_trace = StackTrace::Handle(zone, this->ptr());
   auto& function = Function::Handle(zone);
   auto& code_object = Object::Handle(zone);
   auto& code = Code::Handle(zone);
@@ -24754,7 +24762,7 @@
         continue;
       }
 
-      if (code_object.raw() == StubCode::AsynchronousGapMarker().raw()) {
+      if (code_object.ptr() == StubCode::AsynchronousGapMarker().ptr()) {
         if (!in_gap) {
           buffer.AddString("<asynchronous suspension>\n");
         }
@@ -24764,7 +24772,7 @@
 
       intptr_t pc_offset = Smi::Value(stack_trace.PcOffsetAtFrame(i));
       ASSERT(code_object.IsCode());
-      code ^= code_object.raw();
+      code ^= code_object.ptr();
       ASSERT(code.IsFunctionCode());
       function = code.function();
       const uword pc = code.PayloadStart() + pc_offset;
@@ -24862,13 +24870,13 @@
                     "symbolize stack traces in the precompiled runtime.");
 
 void RegExp::set_pattern(const String& pattern) const {
-  raw_ptr()->set_pattern(pattern.raw());
+  untag()->set_pattern(pattern.ptr());
 }
 
 void RegExp::set_function(intptr_t cid,
                           bool sticky,
                           const Function& value) const {
-  StorePointer(FunctionAddr(cid, sticky), value.raw());
+  StorePointer(FunctionAddr(cid, sticky), value.ptr());
 }
 
 void RegExp::set_bytecode(bool is_one_byte,
@@ -24876,25 +24884,25 @@
                           const TypedData& bytecode) const {
   if (sticky) {
     if (is_one_byte) {
-      raw_ptr()->set_one_byte_sticky(bytecode.raw());
+      untag()->set_one_byte_sticky(bytecode.ptr());
     } else {
-      raw_ptr()->set_two_byte_sticky(bytecode.raw());
+      untag()->set_two_byte_sticky(bytecode.ptr());
     }
   } else {
     if (is_one_byte) {
-      raw_ptr()->set_one_byte(bytecode.raw());
+      untag()->set_one_byte(bytecode.ptr());
     } else {
-      raw_ptr()->set_two_byte(bytecode.raw());
+      untag()->set_two_byte(bytecode.ptr());
     }
   }
 }
 
 void RegExp::set_num_bracket_expressions(intptr_t value) const {
-  raw_ptr()->set_num_bracket_expressions(Smi::New(value));
+  untag()->set_num_bracket_expressions(Smi::New(value));
 }
 
 void RegExp::set_capture_name_map(const Array& array) const {
-  raw_ptr()->set_capture_name_map(array.raw());
+  untag()->set_capture_name_map(array.ptr());
 }
 
 RegExpPtr RegExp::New(Heap::Space space) {
@@ -24909,7 +24917,7 @@
     result.set_num_registers(/*is_one_byte=*/false, -1);
     result.set_num_registers(/*is_one_byte=*/true, -1);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 const char* RegExpFlags::ToCString() const {
@@ -24951,7 +24959,7 @@
 }
 
 bool RegExp::CanonicalizeEquals(const Instance& other) const {
-  if (this->raw() == other.raw()) {
+  if (this->ptr() == other.ptr()) {
     return true;  // "===".
   }
   if (other.IsNull() || !other.IsRegExp()) {
@@ -24991,37 +24999,37 @@
 
 AbstractTypePtr MirrorReference::GetAbstractTypeReferent() const {
   ASSERT(Object::Handle(referent()).IsAbstractType());
-  return AbstractType::Cast(Object::Handle(referent())).raw();
+  return AbstractType::Cast(Object::Handle(referent())).ptr();
 }
 
 ClassPtr MirrorReference::GetClassReferent() const {
   ASSERT(Object::Handle(referent()).IsClass());
-  return Class::Cast(Object::Handle(referent())).raw();
+  return Class::Cast(Object::Handle(referent())).ptr();
 }
 
 FieldPtr MirrorReference::GetFieldReferent() const {
   ASSERT(Object::Handle(referent()).IsField());
-  return Field::Cast(Object::Handle(referent())).raw();
+  return Field::Cast(Object::Handle(referent())).ptr();
 }
 
 FunctionPtr MirrorReference::GetFunctionReferent() const {
   ASSERT(Object::Handle(referent()).IsFunction());
-  return Function::Cast(Object::Handle(referent())).raw();
+  return Function::Cast(Object::Handle(referent())).ptr();
 }
 
 FunctionTypePtr MirrorReference::GetFunctionTypeReferent() const {
   ASSERT(Object::Handle(referent()).IsFunctionType());
-  return FunctionType::Cast(Object::Handle(referent())).raw();
+  return FunctionType::Cast(Object::Handle(referent())).ptr();
 }
 
 LibraryPtr MirrorReference::GetLibraryReferent() const {
   ASSERT(Object::Handle(referent()).IsLibrary());
-  return Library::Cast(Object::Handle(referent())).raw();
+  return Library::Cast(Object::Handle(referent())).ptr();
 }
 
 TypeParameterPtr MirrorReference::GetTypeParameterReferent() const {
   ASSERT(Object::Handle(referent()).IsTypeParameter());
-  return TypeParameter::Cast(Object::Handle(referent())).raw();
+  return TypeParameter::Cast(Object::Handle(referent())).ptr();
 }
 
 MirrorReferencePtr MirrorReference::New(const Object& referent,
@@ -25034,7 +25042,7 @@
     result ^= raw;
   }
   result.set_referent(referent);
-  return result.raw();
+  return result.ptr();
 }
 
 const char* MirrorReference::ToCString() const {
@@ -25055,7 +25063,7 @@
   UserTag& result = UserTag::Handle(FindTagInIsolate(thread, label));
   if (!result.IsNull()) {
     // Tag already exists, return existing instance.
-    return result.raw();
+    return result.ptr();
   }
   if (TagTableIsFull(thread)) {
     const String& error = String::Handle(String::NewFormatted(
@@ -25073,7 +25081,7 @@
   }
   result.set_label(label);
   AddTagToIsolate(thread, result);
-  return result.raw();
+  return result.ptr();
 }
 
 UserTagPtr UserTag::DefaultTag() {
@@ -25090,7 +25098,7 @@
       UserTag::Handle(zone, UserTag::New(Symbols::Default()));
   ASSERT(result.tag() == UserTags::kDefaultUserTag);
   isolate->set_default_tag(result);
-  return result.raw();
+  return result.ptr();
 }
 
 UserTagPtr UserTag::FindTagInIsolate(Thread* thread, const String& label) {
@@ -25107,7 +25115,7 @@
     tag_label = other.label();
     ASSERT(!tag_label.IsNull());
     if (tag_label.Equals(label)) {
-      return other.raw();
+      return other.ptr();
     }
   }
   return UserTag::null();
@@ -25158,7 +25166,7 @@
   for (intptr_t i = 0; i < tag_table.Length(); i++) {
     tag ^= tag_table.At(i);
     if (tag.tag() == tag_id) {
-      return tag.raw();
+      return tag.ptr();
     }
   }
   return UserTag::null();
@@ -25211,22 +25219,22 @@
     }
     *reusable_field_handle = IG->object_store()->pragma_name();
     if (Instance::Cast(*pragma).GetField(*reusable_field_handle) !=
-        Symbols::vm_entry_point().raw()) {
+        Symbols::vm_entry_point().ptr()) {
       continue;
     }
     *reusable_field_handle = IG->object_store()->pragma_options();
     *pragma = Instance::Cast(*pragma).GetField(*reusable_field_handle);
-    if (pragma->raw() == Bool::null() || pragma->raw() == Bool::True().raw()) {
+    if (pragma->ptr() == Bool::null() || pragma->ptr() == Bool::True().ptr()) {
       return EntryPointPragma::kAlways;
       break;
     }
-    if (pragma->raw() == Symbols::Get().raw()) {
+    if (pragma->ptr() == Symbols::Get().ptr()) {
       return EntryPointPragma::kGetterOnly;
     }
-    if (pragma->raw() == Symbols::Set().raw()) {
+    if (pragma->ptr() == Symbols::Set().ptr()) {
       return EntryPointPragma::kSetterOnly;
     }
-    if (pragma->raw() == Symbols::Call().raw()) {
+    if (pragma->ptr() == Symbols::Call().ptr()) {
       return EntryPointPragma::kCallOnly;
     }
   }
@@ -25253,11 +25261,11 @@
     is_marked_entrypoint = false;
   }
 #else
-  Object& metadata = Object::Handle(Object::empty_array().raw());
+  Object& metadata = Object::Handle(Object::empty_array().ptr());
   if (!annotated.IsNull()) {
     metadata = lib.GetMetadata(annotated);
   }
-  if (metadata.IsError()) return Error::RawCast(metadata.raw());
+  if (metadata.IsError()) return Error::RawCast(metadata.ptr());
   ASSERT(!metadata.IsNull() && metadata.IsArray());
   EntryPointPragma pragma =
       FindEntryPointPragma(IsolateGroup::Current(), Array::Cast(metadata),
@@ -25333,25 +25341,25 @@
   const Class& cls = Class::Handle(Owner());
   const Library& lib = Library::Handle(cls.library());
   switch (kind()) {
-    case FunctionLayout::kRegularFunction:
-    case FunctionLayout::kSetterFunction:
-    case FunctionLayout::kConstructor:
+    case UntaggedFunction::kRegularFunction:
+    case UntaggedFunction::kSetterFunction:
+    case UntaggedFunction::kConstructor:
       return dart::VerifyEntryPoint(lib, *this, *this,
                                     {EntryPointPragma::kCallOnly});
       break;
-    case FunctionLayout::kGetterFunction:
+    case UntaggedFunction::kGetterFunction:
       return dart::VerifyEntryPoint(
           lib, *this, *this,
           {EntryPointPragma::kCallOnly, EntryPointPragma::kGetterOnly});
       break;
-    case FunctionLayout::kImplicitGetter:
+    case UntaggedFunction::kImplicitGetter:
       return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
                                     {EntryPointPragma::kGetterOnly});
       break;
-    case FunctionLayout::kImplicitSetter:
+    case UntaggedFunction::kImplicitSetter:
       return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
                                     {EntryPointPragma::kSetterOnly});
-    case FunctionLayout::kMethodExtractor:
+    case UntaggedFunction::kMethodExtractor:
       return Function::Handle(extracted_method_closure())
           .VerifyClosurizedEntryPoint();
       break;
@@ -25367,10 +25375,10 @@
   const Class& cls = Class::Handle(Owner());
   const Library& lib = Library::Handle(cls.library());
   switch (kind()) {
-    case FunctionLayout::kRegularFunction:
+    case UntaggedFunction::kRegularFunction:
       return dart::VerifyEntryPoint(lib, *this, *this,
                                     {EntryPointPragma::kGetterOnly});
-    case FunctionLayout::kImplicitClosureFunction: {
+    case UntaggedFunction::kImplicitClosureFunction: {
       const Function& parent = Function::Handle(parent_function());
       return dart::VerifyEntryPoint(lib, parent, parent,
                                     {EntryPointPragma::kGetterOnly});
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 594bc24..97606d6 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -82,9 +82,9 @@
 
 #define BASE_OBJECT_IMPLEMENTATION(object, super)                              \
  public: /* NOLINT */                                                          \
-  using ObjectLayoutType = dart::object##Layout;                               \
+  using UntaggedObjectType = dart::Untagged##object;                           \
   using ObjectPtrType = dart::object##Ptr;                                     \
-  object##Ptr raw() const { return static_cast<object##Ptr>(raw_); }           \
+  object##Ptr ptr() const { return static_cast<object##Ptr>(ptr_); }           \
   bool Is##object() const { return true; }                                     \
   DART_NOINLINE static object& Handle() {                                      \
     return HandleImpl(Thread::Current()->zone(), object::null());              \
@@ -92,11 +92,11 @@
   DART_NOINLINE static object& Handle(Zone* zone) {                            \
     return HandleImpl(zone, object::null());                                   \
   }                                                                            \
-  DART_NOINLINE static object& Handle(object##Ptr raw_ptr) {                   \
-    return HandleImpl(Thread::Current()->zone(), raw_ptr);                     \
+  DART_NOINLINE static object& Handle(object##Ptr ptr) {                       \
+    return HandleImpl(Thread::Current()->zone(), ptr);                         \
   }                                                                            \
-  DART_NOINLINE static object& Handle(Zone* zone, object##Ptr raw_ptr) {       \
-    return HandleImpl(zone, raw_ptr);                                          \
+  DART_NOINLINE static object& Handle(Zone* zone, object##Ptr ptr) {           \
+    return HandleImpl(zone, ptr);                                              \
   }                                                                            \
   DART_NOINLINE static object& ZoneHandle() {                                  \
     return ZoneHandleImpl(Thread::Current()->zone(), object::null());          \
@@ -104,39 +104,38 @@
   DART_NOINLINE static object& ZoneHandle(Zone* zone) {                        \
     return ZoneHandleImpl(zone, object::null());                               \
   }                                                                            \
-  DART_NOINLINE static object& ZoneHandle(object##Ptr raw_ptr) {               \
-    return ZoneHandleImpl(Thread::Current()->zone(), raw_ptr);                 \
+  DART_NOINLINE static object& ZoneHandle(object##Ptr ptr) {                   \
+    return ZoneHandleImpl(Thread::Current()->zone(), ptr);                     \
   }                                                                            \
-  DART_NOINLINE static object& ZoneHandle(Zone* zone, object##Ptr raw_ptr) {   \
-    return ZoneHandleImpl(zone, raw_ptr);                                      \
+  DART_NOINLINE static object& ZoneHandle(Zone* zone, object##Ptr ptr) {       \
+    return ZoneHandleImpl(zone, ptr);                                          \
   }                                                                            \
   DART_NOINLINE static object* ReadOnlyHandle() {                              \
     object* obj = reinterpret_cast<object*>(Dart::AllocateReadOnlyHandle());   \
     initializeHandle(obj, object::null());                                     \
     return obj;                                                                \
   }                                                                            \
-  DART_NOINLINE static object& CheckedHandle(Zone* zone, ObjectPtr raw_ptr) {  \
+  DART_NOINLINE static object& CheckedHandle(Zone* zone, ObjectPtr ptr) {      \
     object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone));  \
-    initializeHandle(obj, raw_ptr);                                            \
+    initializeHandle(obj, ptr);                                                \
     if (!obj->Is##object()) {                                                  \
       FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(),      \
              #object);                                                         \
     }                                                                          \
     return *obj;                                                               \
   }                                                                            \
-  DART_NOINLINE static object& CheckedZoneHandle(Zone* zone,                   \
-                                                 ObjectPtr raw_ptr) {          \
+  DART_NOINLINE static object& CheckedZoneHandle(Zone* zone, ObjectPtr ptr) {  \
     object* obj =                                                              \
         reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone));        \
-    initializeHandle(obj, raw_ptr);                                            \
+    initializeHandle(obj, ptr);                                                \
     if (!obj->Is##object()) {                                                  \
       FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(),      \
              #object);                                                         \
     }                                                                          \
     return *obj;                                                               \
   }                                                                            \
-  DART_NOINLINE static object& CheckedZoneHandle(ObjectPtr raw_ptr) {          \
-    return CheckedZoneHandle(Thread::Current()->zone(), raw_ptr);              \
+  DART_NOINLINE static object& CheckedZoneHandle(ObjectPtr ptr) {              \
+    return CheckedZoneHandle(Thread::Current()->zone(), ptr);                  \
   }                                                                            \
   /* T::Cast cannot be applied to a null Object, because the object vtable */  \
   /* is not setup for type T, although some methods are supposed to work   */  \
@@ -156,23 +155,23 @@
   static const ClassId kClassId = k##object##Cid;                              \
                                                                                \
  private: /* NOLINT */                                                         \
-  static object& HandleImpl(Zone* zone, object##Ptr raw_ptr) {                 \
+  static object& HandleImpl(Zone* zone, object##Ptr ptr) {                     \
     object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone));  \
-    initializeHandle(obj, raw_ptr);                                            \
+    initializeHandle(obj, ptr);                                                \
     return *obj;                                                               \
   }                                                                            \
-  static object& ZoneHandleImpl(Zone* zone, object##Ptr raw_ptr) {             \
+  static object& ZoneHandleImpl(Zone* zone, object##Ptr ptr) {                 \
     object* obj =                                                              \
         reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone));        \
-    initializeHandle(obj, raw_ptr);                                            \
+    initializeHandle(obj, ptr);                                                \
     return *obj;                                                               \
   }                                                                            \
-  /* Initialize the handle based on the raw_ptr in the presence of null. */    \
-  static void initializeHandle(object* obj, ObjectPtr raw_ptr) {               \
-    if (raw_ptr != Object::null()) {                                           \
-      obj->SetRaw(raw_ptr);                                                    \
+  /* Initialize the handle based on the ptr in the presence of null. */        \
+  static void initializeHandle(object* obj, ObjectPtr ptr) {                   \
+    if (ptr != Object::null()) {                                               \
+      obj->SetPtr(ptr);                                                        \
     } else {                                                                   \
-      obj->raw_ = Object::null();                                              \
+      obj->ptr_ = Object::null();                                              \
       object fake_object;                                                      \
       obj->set_vtable(fake_object.vtable());                                   \
     }                                                                          \
@@ -229,9 +228,9 @@
 
 #define HEAP_OBJECT_IMPLEMENTATION(object, super)                              \
   OBJECT_IMPLEMENTATION(object, super);                                        \
-  object##Layout* raw_ptr() const {                                            \
-    ASSERT(raw() != null());                                                   \
-    return const_cast<object##Layout*>(raw()->ptr());                          \
+  Untagged##object* untag() const {                                            \
+    ASSERT(ptr() != null());                                                   \
+    return const_cast<Untagged##object*>(ptr()->untag());                      \
   }                                                                            \
   SNAPSHOT_READER_SUPPORT(object)                                              \
   friend class StackFrame;                                                     \
@@ -241,11 +240,11 @@
 #define FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super)        \
  public: /* NOLINT */                                                          \
   void operator=(object##Ptr value) {                                          \
-    raw_ = value;                                                              \
+    ptr_ = value;                                                              \
     CHECK_HANDLE();                                                            \
   }                                                                            \
   void operator^=(ObjectPtr value) {                                           \
-    raw_ = value;                                                              \
+    ptr_ = value;                                                              \
     CHECK_HANDLE();                                                            \
   }                                                                            \
                                                                                \
@@ -253,9 +252,9 @@
   object() : super() {}                                                        \
   BASE_OBJECT_IMPLEMENTATION(object, super)                                    \
   OBJECT_SERVICE_SUPPORT(object)                                               \
-  object##Layout* raw_ptr() const {                                            \
-    ASSERT(raw() != null());                                                   \
-    return const_cast<object##Layout*>(raw()->ptr());                          \
+  Untagged##object* untag() const {                                            \
+    ASSERT(ptr() != null());                                                   \
+    return const_cast<Untagged##object*>(ptr()->untag());                      \
   }                                                                            \
   static intptr_t NextFieldOffset() { return -kWordSize; }                     \
   SNAPSHOT_READER_SUPPORT(rettype)                                             \
@@ -280,25 +279,25 @@
 
 class Object {
  public:
-  using ObjectLayoutType = ObjectLayout;
+  using UntaggedObjectType = UntaggedObject;
   using ObjectPtrType = ObjectPtr;
 
   static ObjectPtr RawCast(ObjectPtr obj) { return obj; }
 
   virtual ~Object() {}
 
-  ObjectPtr raw() const { return raw_; }
+  ObjectPtr ptr() const { return ptr_; }
   void operator=(ObjectPtr value) { initializeHandle(this, value); }
 
-  bool IsCanonical() const { return raw()->ptr()->IsCanonical(); }
-  void SetCanonical() const { raw()->ptr()->SetCanonical(); }
-  void ClearCanonical() const { raw()->ptr()->ClearCanonical(); }
+  bool IsCanonical() const { return ptr()->untag()->IsCanonical(); }
+  void SetCanonical() const { ptr()->untag()->SetCanonical(); }
+  void ClearCanonical() const { ptr()->untag()->ClearCanonical(); }
   intptr_t GetClassId() const {
-    return !raw()->IsHeapObject() ? static_cast<intptr_t>(kSmiCid)
-                                  : raw()->ptr()->GetClassId();
+    return !ptr()->IsHeapObject() ? static_cast<intptr_t>(kSmiCid)
+                                  : ptr()->untag()->GetClassId();
   }
   inline ClassPtr clazz() const;
-  static intptr_t tags_offset() { return OFFSET_OF(ObjectLayout, tags_); }
+  static intptr_t tags_offset() { return OFFSET_OF(UntaggedObject, tags_); }
 
 // Class testers.
 #define DEFINE_CLASS_TESTER(clazz)                                             \
@@ -306,7 +305,7 @@
   CLASS_LIST_FOR_HANDLES(DEFINE_CLASS_TESTER);
 #undef DEFINE_CLASS_TESTER
 
-  bool IsNull() const { return raw_ == null_; }
+  bool IsNull() const { return ptr_ == null_; }
 
   // Matches Object.toString on instances (except String::ToCString, bug 20583).
   virtual const char* ToCString() const {
@@ -330,12 +329,12 @@
   // namespaces need to provide an implementation.
   virtual StringPtr DictionaryName() const;
 
-  bool IsNew() const { return raw()->IsNewObject(); }
-  bool IsOld() const { return raw()->IsOldObject(); }
+  bool IsNew() const { return ptr()->IsNewObject(); }
+  bool IsOld() const { return ptr()->IsOldObject(); }
 #if defined(DEBUG)
   bool InVMIsolateHeap() const;
 #else
-  bool InVMIsolateHeap() const { return raw()->ptr()->InVMIsolateHeap(); }
+  bool InVMIsolateHeap() const { return ptr()->untag()->InVMIsolateHeap(); }
 #endif  // DEBUG
 
   // Print the object on stdout for debugging.
@@ -349,9 +348,9 @@
 
   bool IsNotTemporaryScopedHandle() const;
 
-  static Object& Handle(Zone* zone, ObjectPtr raw_ptr) {
+  static Object& Handle(Zone* zone, ObjectPtr ptr) {
     Object* obj = reinterpret_cast<Object*>(VMHandles::AllocateHandle(zone));
-    initializeHandle(obj, raw_ptr);
+    initializeHandle(obj, ptr);
     return *obj;
   }
   static Object* ReadOnlyHandle() {
@@ -364,14 +363,14 @@
 
   static Object& Handle(Zone* zone) { return Handle(zone, null_); }
 
-  static Object& Handle(ObjectPtr raw_ptr) {
-    return Handle(Thread::Current()->zone(), raw_ptr);
+  static Object& Handle(ObjectPtr ptr) {
+    return Handle(Thread::Current()->zone(), ptr);
   }
 
-  static Object& ZoneHandle(Zone* zone, ObjectPtr raw_ptr) {
+  static Object& ZoneHandle(Zone* zone, ObjectPtr ptr) {
     Object* obj =
         reinterpret_cast<Object*>(VMHandles::AllocateZoneHandle(zone));
-    initializeHandle(obj, raw_ptr);
+    initializeHandle(obj, ptr);
     return *obj;
   }
 
@@ -381,19 +380,19 @@
     return ZoneHandle(Thread::Current()->zone(), null_);
   }
 
-  static Object& ZoneHandle(ObjectPtr raw_ptr) {
-    return ZoneHandle(Thread::Current()->zone(), raw_ptr);
+  static Object& ZoneHandle(ObjectPtr ptr) {
+    return ZoneHandle(Thread::Current()->zone(), ptr);
   }
 
   static ObjectPtr null() { return null_; }
 
 #if defined(HASH_IN_OBJECT_HEADER)
   static uint32_t GetCachedHash(const ObjectPtr obj) {
-    return obj->ptr()->GetHeaderHash();
+    return obj->untag()->GetHeaderHash();
   }
 
   static void SetCachedHash(ObjectPtr obj, uint32_t hash) {
-    obj->ptr()->SetHeaderHash(hash);
+    obj->untag()->SetHeaderHash(hash);
   }
 #endif
 
@@ -532,7 +531,7 @@
                                          intptr_t used_size);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ObjectLayout));
+    return RoundedAllocationSize(sizeof(UntaggedObject));
   }
 
   template <class FakeObject>
@@ -603,11 +602,11 @@
 
  protected:
   // Used for extracting the C++ vtable during bringup.
-  Object() : raw_(null_) {}
+  Object() : ptr_(null_) {}
 
-  uword raw_value() const { return static_cast<uword>(raw()); }
+  uword raw_value() const { return static_cast<uword>(ptr()); }
 
-  inline void SetRaw(ObjectPtr value);
+  inline void SetPtr(ObjectPtr value);
   void CheckHandle() const;
 
   cpp_vtable vtable() const { return bit_copy<cpp_vtable>(*this); }
@@ -619,7 +618,7 @@
     return Utils::RoundUp(size, kObjectAlignment);
   }
 
-  bool Contains(uword addr) const { return raw()->ptr()->Contains(addr); }
+  bool Contains(uword addr) const { return ptr()->untag()->Contains(addr); }
 
   // Start of field mutator guards.
   //
@@ -629,18 +628,18 @@
 
   template <typename type, std::memory_order order = std::memory_order_relaxed>
   type LoadPointer(type const* addr) const {
-    return raw()->ptr()->LoadPointer<type, order>(addr);
+    return ptr()->untag()->LoadPointer<type, order>(addr);
   }
 
   template <typename type, std::memory_order order = std::memory_order_relaxed>
   void StorePointer(type const* addr, type value) const {
-    raw()->ptr()->StorePointer<type, order>(addr, value);
+    ptr()->untag()->StorePointer<type, order>(addr, value);
   }
 
   // Use for storing into an explicitly Smi-typed field of an object
   // (i.e., both the previous and new value are Smis).
   void StoreSmi(SmiPtr const* addr, SmiPtr value) const {
-    raw()->ptr()->StoreSmi(addr, value);
+    ptr()->untag()->StoreSmi(addr, value);
   }
 
   template <typename FieldType>
@@ -665,14 +664,14 @@
   template <typename FieldType, typename ValueType>
   void StoreNonPointer(const FieldType* addr, ValueType value) const {
     // Can't use Contains, as it uses tags_, which is set through this method.
-    ASSERT(reinterpret_cast<uword>(addr) >= ObjectLayout::ToAddr(raw()));
+    ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(ptr()));
     *const_cast<FieldType*>(addr) = value;
   }
 
   template <typename FieldType, typename ValueType, std::memory_order order>
   void StoreNonPointer(const FieldType* addr, ValueType value) const {
     // Can't use Contains, as it uses tags_, which is set through this method.
-    ASSERT(reinterpret_cast<uword>(addr) >= ObjectLayout::ToAddr(raw()));
+    ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(ptr()));
     reinterpret_cast<std::atomic<FieldType>*>(const_cast<FieldType*>(addr))
         ->store(value, order);
   }
@@ -712,7 +711,7 @@
 
   // End of field mutator guards.
 
-  ObjectPtr raw_;  // The raw object reference.
+  ObjectPtr ptr_;  // The raw object reference.
 
  protected:
   void AddCommonObjectProperties(JSONObject* jsobj,
@@ -734,12 +733,12 @@
                                    const String& name,
                                    const Library& lib);
 
-  /* Initialize the handle based on the raw_ptr in the presence of null. */
-  static void initializeHandle(Object* obj, ObjectPtr raw_ptr) {
-    if (raw_ptr != Object::null()) {
-      obj->SetRaw(raw_ptr);
+  /* Initialize the handle based on the ptr in the presence of null. */
+  static void initializeHandle(Object* obj, ObjectPtr ptr) {
+    if (ptr != Object::null()) {
+      obj->SetPtr(ptr);
     } else {
-      obj->raw_ = Object::null();
+      obj->ptr_ = Object::null();
       Object fake_object;
       obj->set_vtable(fake_object.vtable());
     }
@@ -806,7 +805,7 @@
 #undef DECLARE_SHARED_READONLY_HANDLE
 
   friend void ClassTable::Register(const Class& cls);
-  friend void ObjectLayout::Validate(IsolateGroup* isolate_group) const;
+  friend void UntaggedObject::Validate(IsolateGroup* isolate_group) const;
   friend class Closure;
   friend class SnapshotReader;
   friend class InstanceDeserializationCluster;
@@ -827,18 +826,18 @@
 
 class PassiveObject : public Object {
  public:
-  void operator=(ObjectPtr value) { raw_ = value; }
-  void operator^=(ObjectPtr value) { raw_ = value; }
+  void operator=(ObjectPtr value) { ptr_ = value; }
+  void operator^=(ObjectPtr value) { ptr_ = value; }
 
-  static PassiveObject& Handle(Zone* zone, ObjectPtr raw_ptr) {
+  static PassiveObject& Handle(Zone* zone, ObjectPtr ptr) {
     PassiveObject* obj =
         reinterpret_cast<PassiveObject*>(VMHandles::AllocateHandle(zone));
-    obj->raw_ = raw_ptr;
+    obj->ptr_ = ptr;
     obj->set_vtable(0);
     return *obj;
   }
-  static PassiveObject& Handle(ObjectPtr raw_ptr) {
-    return Handle(Thread::Current()->zone(), raw_ptr);
+  static PassiveObject& Handle(ObjectPtr ptr) {
+    return Handle(Thread::Current()->zone(), ptr);
   }
   static PassiveObject& Handle() {
     return Handle(Thread::Current()->zone(), Object::null());
@@ -846,15 +845,15 @@
   static PassiveObject& Handle(Zone* zone) {
     return Handle(zone, Object::null());
   }
-  static PassiveObject& ZoneHandle(Zone* zone, ObjectPtr raw_ptr) {
+  static PassiveObject& ZoneHandle(Zone* zone, ObjectPtr ptr) {
     PassiveObject* obj =
         reinterpret_cast<PassiveObject*>(VMHandles::AllocateZoneHandle(zone));
-    obj->raw_ = raw_ptr;
+    obj->ptr_ = ptr;
     obj->set_vtable(0);
     return *obj;
   }
-  static PassiveObject& ZoneHandle(ObjectPtr raw_ptr) {
-    return ZoneHandle(Thread::Current()->zone(), raw_ptr);
+  static PassiveObject& ZoneHandle(ObjectPtr ptr) {
+    return ZoneHandle(Thread::Current()->zone(), ptr);
   }
   static PassiveObject& ZoneHandle() {
     return ZoneHandle(Thread::Current()->zone(), Object::null());
@@ -920,23 +919,23 @@
 
   intptr_t host_instance_size() const {
     ASSERT(is_finalized() || is_prefinalized());
-    return (raw_ptr()->host_instance_size_in_words_ * kWordSize);
+    return (untag()->host_instance_size_in_words_ * kWordSize);
   }
   intptr_t target_instance_size() const {
     ASSERT(is_finalized() || is_prefinalized());
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return (raw_ptr()->target_instance_size_in_words_ *
+    return (untag()->target_instance_size_in_words_ *
             compiler::target::kWordSize);
 #else
     return host_instance_size();
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
   }
   static intptr_t host_instance_size(ClassPtr clazz) {
-    return (clazz->ptr()->host_instance_size_in_words_ * kWordSize);
+    return (clazz->untag()->host_instance_size_in_words_ * kWordSize);
   }
   static intptr_t target_instance_size(ClassPtr clazz) {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return (clazz->ptr()->target_instance_size_in_words_ *
+    return (clazz->untag()->target_instance_size_in_words_ *
             compiler::target::kWordSize);
 #else
     return host_instance_size(clazz);
@@ -952,22 +951,22 @@
   void set_instance_size_in_words(intptr_t host_value,
                                   intptr_t target_value) const {
     ASSERT(Utils::IsAligned((host_value * kWordSize), kObjectAlignment));
-    StoreNonPointer(&raw_ptr()->host_instance_size_in_words_, host_value);
+    StoreNonPointer(&untag()->host_instance_size_in_words_, host_value);
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT(Utils::IsAligned((target_value * compiler::target::kWordSize),
                             compiler::target::kObjectAlignment));
-    StoreNonPointer(&raw_ptr()->target_instance_size_in_words_, target_value);
+    StoreNonPointer(&untag()->target_instance_size_in_words_, target_value);
 #else
     ASSERT(host_value == target_value);
 #endif  // #!defined(DART_PRECOMPILED_RUNTIME)
   }
 
   intptr_t host_next_field_offset() const {
-    return raw_ptr()->host_next_field_offset_in_words_ * kWordSize;
+    return untag()->host_next_field_offset_in_words_ * kWordSize;
   }
   intptr_t target_next_field_offset() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->target_next_field_offset_in_words_ *
+    return untag()->target_next_field_offset_in_words_ *
            compiler::target::kWordSize;
 #else
     return host_next_field_offset();
@@ -983,36 +982,35 @@
                                       intptr_t target_value) const {
     ASSERT((host_value == -1) ||
            (Utils::IsAligned((host_value * kWordSize), kObjectAlignment) &&
-            (host_value == raw_ptr()->host_instance_size_in_words_)) ||
+            (host_value == untag()->host_instance_size_in_words_)) ||
            (!Utils::IsAligned((host_value * kWordSize), kObjectAlignment) &&
-            ((host_value + 1) == raw_ptr()->host_instance_size_in_words_)));
-    StoreNonPointer(&raw_ptr()->host_next_field_offset_in_words_, host_value);
+            ((host_value + 1) == untag()->host_instance_size_in_words_)));
+    StoreNonPointer(&untag()->host_next_field_offset_in_words_, host_value);
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT((target_value == -1) ||
            (Utils::IsAligned((target_value * compiler::target::kWordSize),
                              compiler::target::kObjectAlignment) &&
-            (target_value == raw_ptr()->target_instance_size_in_words_)) ||
+            (target_value == untag()->target_instance_size_in_words_)) ||
            (!Utils::IsAligned((target_value * compiler::target::kWordSize),
                               compiler::target::kObjectAlignment) &&
-            ((target_value + 1) == raw_ptr()->target_instance_size_in_words_)));
-    StoreNonPointer(&raw_ptr()->target_next_field_offset_in_words_,
-                    target_value);
+            ((target_value + 1) == untag()->target_instance_size_in_words_)));
+    StoreNonPointer(&untag()->target_next_field_offset_in_words_, target_value);
 #else
     ASSERT(host_value == target_value);
 #endif  // #!defined(DART_PRECOMPILED_RUNTIME)
   }
 
   static bool is_valid_id(intptr_t value) {
-    return ObjectLayout::ClassIdTag::is_valid(value);
+    return UntaggedObject::ClassIdTag::is_valid(value);
   }
-  intptr_t id() const { return raw_ptr()->id_; }
+  intptr_t id() const { return untag()->id_; }
   void set_id(intptr_t value) const {
     ASSERT(value >= 0 && value < std::numeric_limits<classid_t>::max());
-    StoreNonPointer(&raw_ptr()->id_, value);
+    StoreNonPointer(&untag()->id_, value);
   }
-  static intptr_t id_offset() { return OFFSET_OF(ClassLayout, id_); }
+  static intptr_t id_offset() { return OFFSET_OF(UntaggedClass, id_); }
   static intptr_t num_type_arguments_offset() {
-    return OFFSET_OF(ClassLayout, num_type_arguments_);
+    return OFFSET_OF(UntaggedClass, num_type_arguments_);
   }
 
   StringPtr Name() const;
@@ -1034,12 +1032,12 @@
 
   virtual StringPtr DictionaryName() const { return Name(); }
 
-  ScriptPtr script() const { return raw_ptr()->script(); }
+  ScriptPtr script() const { return untag()->script(); }
   void set_script(const Script& value) const;
 
-  TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
+  TokenPosition token_pos() const { return untag()->token_pos_; }
   void set_token_pos(TokenPosition value) const;
-  TokenPosition end_token_pos() const { return raw_ptr()->end_token_pos_; }
+  TokenPosition end_token_pos() const { return untag()->end_token_pos_; }
   void set_end_token_pos(TokenPosition value) const;
 
   int32_t SourceFingerprint() const;
@@ -1060,17 +1058,17 @@
   TypePtr DeclarationType() const;
 
   static intptr_t declaration_type_offset() {
-    return OFFSET_OF(ClassLayout, declaration_type_);
+    return OFFSET_OF(UntaggedClass, declaration_type_);
   }
 
-  LibraryPtr library() const { return raw_ptr()->library(); }
+  LibraryPtr library() const { return untag()->library(); }
   void set_library(const Library& value) const;
 
   // The type parameters (and their bounds) are specified as an array of
   // TypeParameter.
   TypeArgumentsPtr type_parameters() const {
     ASSERT(is_declaration_loaded());
-    return raw_ptr()->type_parameters();
+    return untag()->type_parameters();
   }
   void set_type_parameters(const TypeArguments& value) const;
   intptr_t NumTypeParameters(Thread* thread) const;
@@ -1097,20 +1095,20 @@
   static const intptr_t kNoTypeArguments = -1;
   intptr_t host_type_arguments_field_offset() const {
     ASSERT(is_type_finalized() || is_prefinalized());
-    if (raw_ptr()->host_type_arguments_field_offset_in_words_ ==
+    if (untag()->host_type_arguments_field_offset_in_words_ ==
         kNoTypeArguments) {
       return kNoTypeArguments;
     }
-    return raw_ptr()->host_type_arguments_field_offset_in_words_ * kWordSize;
+    return untag()->host_type_arguments_field_offset_in_words_ * kWordSize;
   }
   intptr_t target_type_arguments_field_offset() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT(is_type_finalized() || is_prefinalized());
-    if (raw_ptr()->target_type_arguments_field_offset_in_words_ ==
+    if (untag()->target_type_arguments_field_offset_in_words_ ==
         compiler::target::Class::kNoTypeArguments) {
       return compiler::target::Class::kNoTypeArguments;
     }
-    return raw_ptr()->target_type_arguments_field_offset_in_words_ *
+    return untag()->target_type_arguments_field_offset_in_words_ *
            compiler::target::kWordSize;
 #else
     return host_type_arguments_field_offset();
@@ -1134,22 +1132,23 @@
   }
   void set_type_arguments_field_offset_in_words(intptr_t host_value,
                                                 intptr_t target_value) const {
-    StoreNonPointer(&raw_ptr()->host_type_arguments_field_offset_in_words_,
+    StoreNonPointer(&untag()->host_type_arguments_field_offset_in_words_,
                     host_value);
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    StoreNonPointer(&raw_ptr()->target_type_arguments_field_offset_in_words_,
+    StoreNonPointer(&untag()->target_type_arguments_field_offset_in_words_,
                     target_value);
 #else
     ASSERT(host_value == target_value);
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
   }
   static intptr_t host_type_arguments_field_offset_in_words_offset() {
-    return OFFSET_OF(ClassLayout, host_type_arguments_field_offset_in_words_);
+    return OFFSET_OF(UntaggedClass, host_type_arguments_field_offset_in_words_);
   }
 
   static intptr_t target_type_arguments_field_offset_in_words_offset() {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return OFFSET_OF(ClassLayout, target_type_arguments_field_offset_in_words_);
+    return OFFSET_OF(UntaggedClass,
+                     target_type_arguments_field_offset_in_words_);
 #else
     return host_type_arguments_field_offset_in_words_offset();
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -1158,11 +1157,11 @@
   // The super type of this class, Object type if not explicitly specified.
   AbstractTypePtr super_type() const {
     ASSERT(is_declaration_loaded());
-    return raw_ptr()->super_type();
+    return untag()->super_type();
   }
   void set_super_type(const AbstractType& value) const;
   static intptr_t super_type_offset() {
-    return OFFSET_OF(ClassLayout, super_type_);
+    return OFFSET_OF(UntaggedClass, super_type_);
   }
 
   // Asserts that the class of the super type has been resolved.
@@ -1174,7 +1173,7 @@
   // Interfaces is an array of Types.
   ArrayPtr interfaces() const {
     ASSERT(is_declaration_loaded());
-    return raw_ptr()->interfaces();
+    return untag()->interfaces();
   }
   void set_interfaces(const Array& value) const;
 
@@ -1182,7 +1181,7 @@
   GrowableObjectArrayPtr direct_implementors() const {
     DEBUG_ASSERT(
         IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
-    return raw_ptr()->direct_implementors();
+    return untag()->direct_implementors();
   }
   void AddDirectImplementor(const Class& subclass, bool is_mixin) const;
   void ClearDirectImplementors() const;
@@ -1194,7 +1193,7 @@
     return direct_subclasses_unsafe();
   }
   GrowableObjectArrayPtr direct_subclasses_unsafe() const {
-    return raw_ptr()->direct_subclasses();
+    return untag()->direct_subclasses();
   }
   void AddDirectSubclass(const Class& subclass) const;
   void ClearDirectSubclasses() const;
@@ -1227,13 +1226,13 @@
   bool IsClosureClass() const { return id() == kClosureCid; }
   static bool IsClosureClass(ClassPtr cls) {
     NoSafepointScope no_safepoint;
-    return cls->ptr()->id_ == kClosureCid;
+    return cls->untag()->id_ == kClosureCid;
   }
 
   static bool IsInFullSnapshot(ClassPtr cls) {
     NoSafepointScope no_safepoint;
-    return LibraryLayout::InFullSnapshotBit::decode(
-        cls->ptr()->library()->ptr()->flags_);
+    return UntaggedLibrary::InFullSnapshotBit::decode(
+        cls->untag()->library()->untag()->flags_);
   }
 
   // Returns true if the type specified by cls, type_arguments, and nullability
@@ -1257,7 +1256,7 @@
   ArrayPtr fields() const {
     // We rely on the fact that any loads from the array are dependent loads
     // and avoid the load-acquire barrier here.
-    return raw_ptr()->fields();
+    return untag()->fields();
   }
   void SetFields(const Array& value) const;
   void AddField(const Field& field) const;
@@ -1281,7 +1280,7 @@
   ArrayPtr current_functions() const {
     // We rely on the fact that any loads from the array are dependent loads
     // and avoid the load-acquire barrier here.
-    return raw_ptr()->functions();
+    return untag()->functions();
   }
   ArrayPtr functions() const {
     DEBUG_ASSERT(
@@ -1331,7 +1330,7 @@
   bool RequireCanonicalTypeErasureOfConstants(Zone* zone) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ClassLayout));
+    return RoundedAllocationSize(sizeof(UntaggedClass));
   }
 
   bool is_implemented() const { return ImplementedBit::decode(state_bits()); }
@@ -1340,17 +1339,17 @@
   bool is_abstract() const { return AbstractBit::decode(state_bits()); }
   void set_is_abstract() const;
 
-  ClassLayout::ClassLoadingState class_loading_state() const {
+  UntaggedClass::ClassLoadingState class_loading_state() const {
     return ClassLoadingBits::decode(state_bits());
   }
 
   bool is_declaration_loaded() const {
-    return class_loading_state() >= ClassLayout::kDeclarationLoaded;
+    return class_loading_state() >= UntaggedClass::kDeclarationLoaded;
   }
   void set_is_declaration_loaded() const;
 
   bool is_type_finalized() const {
-    return class_loading_state() >= ClassLayout::kTypeFinalized;
+    return class_loading_state() >= UntaggedClass::kTypeFinalized;
   }
   void set_is_type_finalized() const;
 
@@ -1364,21 +1363,21 @@
 
   bool is_finalized() const {
     return ClassFinalizedBits::decode(state_bits()) ==
-               ClassLayout::kFinalized ||
+               UntaggedClass::kFinalized ||
            ClassFinalizedBits::decode(state_bits()) ==
-               ClassLayout::kAllocateFinalized;
+               UntaggedClass::kAllocateFinalized;
   }
   void set_is_finalized() const;
 
   bool is_allocate_finalized() const {
     return ClassFinalizedBits::decode(state_bits()) ==
-           ClassLayout::kAllocateFinalized;
+           UntaggedClass::kAllocateFinalized;
   }
   void set_is_allocate_finalized() const;
 
   bool is_prefinalized() const {
     return ClassFinalizedBits::decode(state_bits()) ==
-           ClassLayout::kPreFinalized;
+           UntaggedClass::kPreFinalized;
   }
 
   void set_is_prefinalized() const;
@@ -1408,19 +1407,19 @@
   bool is_loaded() const { return IsLoadedBit::decode(state_bits()); }
   void set_is_loaded(bool value) const;
 
-  uint16_t num_native_fields() const { return raw_ptr()->num_native_fields_; }
+  uint16_t num_native_fields() const { return untag()->num_native_fields_; }
   void set_num_native_fields(uint16_t value) const {
-    StoreNonPointer(&raw_ptr()->num_native_fields_, value);
+    StoreNonPointer(&untag()->num_native_fields_, value);
   }
 
-  CodePtr allocation_stub() const { return raw_ptr()->allocation_stub(); }
+  CodePtr allocation_stub() const { return untag()->allocation_stub(); }
   void set_allocation_stub(const Code& value) const;
 
   intptr_t kernel_offset() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
     return 0;
 #else
-    return raw_ptr()->kernel_offset_;
+    return untag()->kernel_offset_;
 #endif
   }
 
@@ -1429,7 +1428,7 @@
     UNREACHABLE();
 #else
     ASSERT(value >= 0);
-    StoreNonPointer(&raw_ptr()->kernel_offset_, value);
+    StoreNonPointer(&untag()->kernel_offset_, value);
 #endif
   }
 
@@ -1443,7 +1442,7 @@
 
   FunctionPtr GetInvocationDispatcher(const String& target_name,
                                       const Array& args_desc,
-                                      FunctionLayout::Kind kind,
+                                      UntaggedFunction::Kind kind,
                                       bool create_if_absent) const;
 
   void Finalize() const;
@@ -1551,37 +1550,37 @@
                                const Function& dispatcher) const;
 
   static int32_t host_instance_size_in_words(const ClassPtr cls) {
-    return cls->ptr()->host_instance_size_in_words_;
+    return cls->untag()->host_instance_size_in_words_;
   }
 
   static int32_t target_instance_size_in_words(const ClassPtr cls) {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return cls->ptr()->target_instance_size_in_words_;
+    return cls->untag()->target_instance_size_in_words_;
 #else
     return host_instance_size_in_words(cls);
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
   }
 
   static int32_t host_next_field_offset_in_words(const ClassPtr cls) {
-    return cls->ptr()->host_next_field_offset_in_words_;
+    return cls->untag()->host_next_field_offset_in_words_;
   }
 
   static int32_t target_next_field_offset_in_words(const ClassPtr cls) {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return cls->ptr()->target_next_field_offset_in_words_;
+    return cls->untag()->target_next_field_offset_in_words_;
 #else
     return host_next_field_offset_in_words(cls);
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
   }
 
   static int32_t host_type_arguments_field_offset_in_words(const ClassPtr cls) {
-    return cls->ptr()->host_type_arguments_field_offset_in_words_;
+    return cls->untag()->host_type_arguments_field_offset_in_words_;
   }
 
   static int32_t target_type_arguments_field_offset_in_words(
       const ClassPtr cls) {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return cls->ptr()->target_type_arguments_field_offset_in_words_;
+    return cls->untag()->target_type_arguments_field_offset_in_words_;
 #else
     return host_type_arguments_field_offset_in_words(cls);
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -1589,7 +1588,7 @@
 
  private:
   TypePtr declaration_type() const {
-    return raw_ptr()->declaration_type<std::memory_order_acquire>();
+    return untag()->declaration_type<std::memory_order_acquire>();
   }
 
   // Caches the declaration type of this class.
@@ -1635,11 +1634,11 @@
   class ConstBit : public BitField<uint32_t, bool, kConstBit, 1> {};
   class ImplementedBit : public BitField<uint32_t, bool, kImplementedBit, 1> {};
   class ClassFinalizedBits : public BitField<uint32_t,
-                                             ClassLayout::ClassFinalizedState,
+                                             UntaggedClass::ClassFinalizedState,
                                              kClassFinalizedPos,
                                              kClassFinalizedSize> {};
   class ClassLoadingBits : public BitField<uint32_t,
-                                           ClassLayout::ClassLoadingState,
+                                           UntaggedClass::ClassLoadingState,
                                            kClassLoadingPos,
                                            kClassLoadingSize> {};
   class AbstractBit : public BitField<uint32_t, bool, kAbstractBit, 1> {};
@@ -1661,7 +1660,7 @@
 
   FunctionPtr CreateInvocationDispatcher(const String& target_name,
                                          const Array& args_desc,
-                                         FunctionLayout::Kind kind) const;
+                                         UntaggedFunction::Kind kind) const;
 
   // Returns the bitmap of unboxed fields
   UnboxedFieldBitmap CalculateFieldOffsets() const;
@@ -1672,13 +1671,13 @@
   // Initial value for the cached number of type arguments.
   static const intptr_t kUnknownNumTypeArguments = -1;
 
-  int16_t num_type_arguments() const { return raw_ptr()->num_type_arguments_; }
+  int16_t num_type_arguments() const { return untag()->num_type_arguments_; }
 
   uint32_t state_bits() const {
     // Ensure any following load instructions do not get performed before this
     // one.
     return LoadNonPointer<uint32_t, std::memory_order_acquire>(
-        &raw_ptr()->state_bits_);
+        &untag()->state_bits_);
   }
 
  public:
@@ -1742,32 +1741,32 @@
 
 class PatchClass : public Object {
  public:
-  ClassPtr patched_class() const { return raw_ptr()->patched_class(); }
-  ClassPtr origin_class() const { return raw_ptr()->origin_class(); }
-  ScriptPtr script() const { return raw_ptr()->script(); }
+  ClassPtr patched_class() const { return untag()->patched_class(); }
+  ClassPtr origin_class() const { return untag()->origin_class(); }
+  ScriptPtr script() const { return untag()->script(); }
   ExternalTypedDataPtr library_kernel_data() const {
-    return raw_ptr()->library_kernel_data();
+    return untag()->library_kernel_data();
   }
   void set_library_kernel_data(const ExternalTypedData& data) const;
 
   intptr_t library_kernel_offset() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->library_kernel_offset_;
+    return untag()->library_kernel_offset_;
 #else
     return -1;
 #endif
   }
   void set_library_kernel_offset(intptr_t offset) const {
     NOT_IN_PRECOMPILED(
-        StoreNonPointer(&raw_ptr()->library_kernel_offset_, offset));
+        StoreNonPointer(&untag()->library_kernel_offset_, offset));
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(PatchClassLayout));
+    return RoundedAllocationSize(sizeof(UntaggedPatchClass));
   }
   static bool IsInFullSnapshot(PatchClassPtr cls) {
     NoSafepointScope no_safepoint;
-    return Class::IsInFullSnapshot(cls->ptr()->patched_class());
+    return Class::IsInFullSnapshot(cls->untag()->patched_class());
   }
 
   static PatchClassPtr New(const Class& patched_class,
@@ -1788,19 +1787,19 @@
 
 class SingleTargetCache : public Object {
  public:
-  CodePtr target() const { return raw_ptr()->target(); }
+  CodePtr target() const { return untag()->target(); }
   void set_target(const Code& target) const;
   static intptr_t target_offset() {
-    return OFFSET_OF(SingleTargetCacheLayout, target_);
+    return OFFSET_OF(UntaggedSingleTargetCache, target_);
   }
 
 #define DEFINE_NON_POINTER_FIELD_ACCESSORS(type, name)                         \
-  type name() const { return raw_ptr()->name##_; }                             \
+  type name() const { return untag()->name##_; }                               \
   void set_##name(type value) const {                                          \
-    StoreNonPointer(&raw_ptr()->name##_, value);                               \
+    StoreNonPointer(&untag()->name##_, value);                                 \
   }                                                                            \
   static intptr_t name##_offset() {                                            \
-    return OFFSET_OF(SingleTargetCacheLayout, name##_);                        \
+    return OFFSET_OF(UntaggedSingleTargetCache, name##_);                      \
   }
 
   DEFINE_NON_POINTER_FIELD_ACCESSORS(uword, entry_point);
@@ -1809,7 +1808,7 @@
 #undef DEFINE_NON_POINTER_FIELD_ACCESSORS
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(SingleTargetCacheLayout));
+    return RoundedAllocationSize(sizeof(UntaggedSingleTargetCache));
   }
 
   static SingleTargetCachePtr New();
@@ -1821,26 +1820,26 @@
 
 class MonomorphicSmiableCall : public Object {
  public:
-  CodePtr target() const { return raw_ptr()->target(); }
-  classid_t expected_cid() const { return raw_ptr()->expected_cid_; }
+  CodePtr target() const { return untag()->target(); }
+  classid_t expected_cid() const { return untag()->expected_cid_; }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(MonomorphicSmiableCallLayout));
+    return RoundedAllocationSize(sizeof(UntaggedMonomorphicSmiableCall));
   }
 
   static MonomorphicSmiableCallPtr New(classid_t expected_cid,
                                        const Code& target);
 
   static intptr_t expected_cid_offset() {
-    return OFFSET_OF(MonomorphicSmiableCallLayout, expected_cid_);
+    return OFFSET_OF(UntaggedMonomorphicSmiableCall, expected_cid_);
   }
 
   static intptr_t target_offset() {
-    return OFFSET_OF(MonomorphicSmiableCallLayout, target_);
+    return OFFSET_OF(UntaggedMonomorphicSmiableCall, target_);
   }
 
   static intptr_t entrypoint_offset() {
-    return OFFSET_OF(MonomorphicSmiableCallLayout, entrypoint_);
+    return OFFSET_OF(UntaggedMonomorphicSmiableCall, entrypoint_);
   }
 
  private:
@@ -1850,8 +1849,8 @@
 
 class CallSiteData : public Object {
  public:
-  StringPtr target_name() const { return raw_ptr()->target_name(); }
-  ArrayPtr arguments_descriptor() const { return raw_ptr()->args_descriptor(); }
+  StringPtr target_name() const { return untag()->target_name(); }
+  ArrayPtr arguments_descriptor() const { return untag()->args_descriptor(); }
 
   intptr_t TypeArgsLen() const;
 
@@ -1864,11 +1863,11 @@
   intptr_t SizeWithTypeArgs() const;
 
   static intptr_t target_name_offset() {
-    return OFFSET_OF(CallSiteDataLayout, target_name_);
+    return OFFSET_OF(UntaggedCallSiteData, target_name_);
   }
 
   static intptr_t arguments_descriptor_offset() {
-    return OFFSET_OF(CallSiteDataLayout, args_descriptor_);
+    return OFFSET_OF(UntaggedCallSiteData, args_descriptor_);
   }
 
  private:
@@ -1884,11 +1883,11 @@
 class UnlinkedCall : public CallSiteData {
  public:
   bool can_patch_to_monomorphic() const {
-    return raw_ptr()->can_patch_to_monomorphic_;
+    return untag()->can_patch_to_monomorphic_;
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(UnlinkedCallLayout));
+    return RoundedAllocationSize(sizeof(UntaggedUnlinkedCall));
   }
 
   intptr_t Hashcode() const;
@@ -1918,7 +1917,7 @@
 
   void SetOriginal(const ICData& value) const;
 
-  bool IsOriginal() const { return Original() == this->raw(); }
+  bool IsOriginal() const { return Original() == this->ptr(); }
 
   intptr_t NumArgsTested() const;
 
@@ -1927,7 +1926,7 @@
     UNREACHABLE();
     return -1;
 #else
-    return raw_ptr()->deopt_id_;
+    return untag()->deopt_id_;
 #endif
   }
 
@@ -1935,10 +1934,10 @@
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
   AbstractTypePtr receivers_static_type() const {
-    return raw_ptr()->receivers_static_type();
+    return untag()->receivers_static_type();
   }
   bool is_tracking_exactness() const {
-    return TrackingExactnessBit::decode(raw_ptr()->state_bits_);
+    return TrackingExactnessBit::decode(untag()->state_bits_);
   }
 #else
   bool is_tracking_exactness() const { return false; }
@@ -2012,12 +2011,12 @@
   void set_is_megamorphic(bool value) const {
     // We don't have concurrent RW access to [state_bits_].
     const uint32_t updated_bits =
-        MegamorphicBit::update(value, raw_ptr()->state_bits_);
+        MegamorphicBit::update(value, untag()->state_bits_);
 
     // Though we ensure that once the state bits are updated, all other previous
     // writes to the IC are visible as well.
     StoreNonPointer<uint32_t, uint32_t, std::memory_order_release>(
-        &raw_ptr()->state_bits_, updated_bits);
+        &untag()->state_bits_, updated_bits);
   }
 
   // The length of the array. This includes all sentinel entries including
@@ -2035,11 +2034,11 @@
   bool NumberOfChecksIs(intptr_t n) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ICDataLayout));
+    return RoundedAllocationSize(sizeof(UntaggedICData));
   }
 
   static intptr_t state_bits_offset() {
-    return OFFSET_OF(ICDataLayout, state_bits_);
+    return OFFSET_OF(UntaggedICData, state_bits_);
   }
 
   static intptr_t NumArgsTestedShift() { return kNumArgsTestedPos; }
@@ -2048,13 +2047,15 @@
     return ((1 << kNumArgsTestedSize) - 1) << kNumArgsTestedPos;
   }
 
-  static intptr_t entries_offset() { return OFFSET_OF(ICDataLayout, entries_); }
+  static intptr_t entries_offset() {
+    return OFFSET_OF(UntaggedICData, entries_);
+  }
 
-  static intptr_t owner_offset() { return OFFSET_OF(ICDataLayout, owner_); }
+  static intptr_t owner_offset() { return OFFSET_OF(UntaggedICData, owner_); }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
   static intptr_t receivers_static_type_offset() {
-    return OFFSET_OF(ICDataLayout, receivers_static_type_);
+    return OFFSET_OF(UntaggedICData, receivers_static_type_);
   }
 #endif
 
@@ -2139,7 +2140,7 @@
   intptr_t GetCountAt(intptr_t index) const;
   intptr_t AggregateCount() const;
 
-  // Returns this->raw() if num_args_tested == 1 and arg_nr == 1, otherwise
+  // Returns this->untag() if num_args_tested == 1 and arg_nr == 1, otherwise
   // returns a new ICData object containing only unique arg_nr checks.
   // Returns only used entries.
   ICDataPtr AsUnaryClassChecksForArgNr(intptr_t arg_nr) const;
@@ -2230,17 +2231,17 @@
   intptr_t FindCheck(const GrowableArray<intptr_t>& cids) const;
 
   ArrayPtr entries() const {
-    return raw_ptr()->entries<std::memory_order_acquire>();
+    return untag()->entries<std::memory_order_acquire>();
   }
 
   bool receiver_cannot_be_smi() const {
     return ReceiverCannotBeSmiBit::decode(
-        LoadNonPointer(&raw_ptr()->state_bits_));
+        LoadNonPointer(&untag()->state_bits_));
   }
 
   void set_receiver_cannot_be_smi(bool value) const {
     set_state_bits(ReceiverCannotBeSmiBit::encode(value) |
-                   LoadNonPointer(&raw_ptr()->state_bits_));
+                   LoadNonPointer(&untag()->state_bits_));
   }
 
  private:
@@ -2258,9 +2259,8 @@
   void set_rebind_rule(uint32_t rebind_rule) const;
   void set_state_bits(uint32_t bits) const;
   void set_tracking_exactness(bool value) const {
-    StoreNonPointer(
-        &raw_ptr()->state_bits_,
-        TrackingExactnessBit::update(value, raw_ptr()->state_bits_));
+    StoreNonPointer(&untag()->state_bits_,
+                    TrackingExactnessBit::update(value, untag()->state_bits_));
   }
 
   // Does entry |index| contain the sentinel value?
@@ -2290,7 +2290,7 @@
     // Ensure any following load instructions do not get performed before this
     // one.
     const uint32_t bits = LoadNonPointer<uint32_t, std::memory_order_acquire>(
-        &raw_ptr()->state_bits_);
+        &untag()->state_bits_);
     return MegamorphicBit::decode(bits);
   }
 
@@ -2312,7 +2312,7 @@
   };
 
   COMPILE_ASSERT(kReceiverCannotBeSmiPos + kReceiverCannotBeSmiSize <=
-                 sizeof(ICDataLayout::state_bits_) * kBitsPerWord);
+                 sizeof(UntaggedICData::state_bits_) * kBitsPerWord);
   COMPILE_ASSERT(kNumRebindRules <= (1 << kRebindRuleSize));
 
   class NumArgsTestedBits : public BitField<uint32_t,
@@ -2435,7 +2435,7 @@
 
 class Function : public Object {
  public:
-  StringPtr name() const { return raw_ptr()->name(); }
+  StringPtr name() const { return untag()->name(); }
   StringPtr UserVisibleName() const;  // Same as scrubbed name.
   const char* UserVisibleNameCString() const;
 
@@ -2484,10 +2484,10 @@
   void SetFfiCallbackExceptionalReturn(const Instance& value) const;
 
   // Return the signature of this function.
-  FunctionTypePtr signature() const { return raw_ptr()->signature(); }
+  FunctionTypePtr signature() const { return untag()->signature(); }
   void set_signature(const FunctionType& value) const;
   static intptr_t signature_offset() {
-    return OFFSET_OF(FunctionLayout, signature_);
+    return OFFSET_OF(UntaggedFunction, signature_);
   }
 
   // Build a string of the form '<T>(T, {B b, C c}) => R' representing the
@@ -2516,7 +2516,7 @@
   void set_owner(const Object& value) const;
   ClassPtr origin() const;
   ScriptPtr script() const;
-  ObjectPtr RawOwner() const { return raw_ptr()->owner(); }
+  ObjectPtr RawOwner() const { return untag()->owner(); }
 
   // The NNBD mode of the library declaring this function.
   // TODO(alexmarkov): nnbd_mode() doesn't work for mixins.
@@ -2534,7 +2534,7 @@
   void set_native_name(const String& name) const;
 
   AbstractTypePtr result_type() const {
-    return raw_ptr()->signature()->ptr()->result_type();
+    return untag()->signature()->untag()->result_type();
   }
 
   // The parameters, starting with NumImplicitParameters() parameters which are
@@ -2542,7 +2542,7 @@
   // Note that type checks exclude implicit parameters.
   AbstractTypePtr ParameterTypeAt(intptr_t index) const;
   ArrayPtr parameter_types() const {
-    return raw_ptr()->signature()->ptr()->parameter_types();
+    return untag()->signature()->untag()->parameter_types();
   }
 
   // Parameter names are valid for all valid parameter indices, and are not
@@ -2551,10 +2551,10 @@
   // array isn't necessarily NumParameters(), but the first NumParameters()
   // elements are the names.
   StringPtr ParameterNameAt(intptr_t index) const;
-  ArrayPtr parameter_names() const { return raw_ptr()->parameter_names(); }
+  ArrayPtr parameter_names() const { return untag()->parameter_names(); }
   void SetParameterNamesFrom(const FunctionType& signature) const;
   static intptr_t parameter_names_offset() {
-    return OFFSET_OF(FunctionLayout, parameter_names_);
+    return OFFSET_OF(UntaggedFunction, parameter_names_);
   }
 
   // The required flags are stored at the end of the parameter_names. The flags
@@ -2564,12 +2564,12 @@
   // The type parameters (and their bounds) are specified as an array of
   // TypeParameter stored in the signature. They are part of the function type.
   TypeArgumentsPtr type_parameters() const {
-    return raw_ptr()->signature()->ptr()->type_parameters();
+    return untag()->signature()->untag()->type_parameters();
   }
 
   intptr_t NumTypeParameters() const {
-    return FunctionLayout::PackedNumTypeParameters::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunction::PackedNumTypeParameters::decode(
+        untag()->packed_fields_);
   }
   void SetNumTypeParameters(intptr_t value) const;
 
@@ -2620,41 +2620,41 @@
 
   // Return the most recently compiled and installed code for this function.
   // It is not the only Code object that points to this function.
-  CodePtr CurrentCode() const { return CurrentCodeOf(raw()); }
+  CodePtr CurrentCode() const { return CurrentCodeOf(ptr()); }
 
   bool SafeToClosurize() const;
 
   static CodePtr CurrentCodeOf(const FunctionPtr function) {
-    return function->ptr()->code();
+    return function->untag()->code();
   }
 
   CodePtr unoptimized_code() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
     return static_cast<CodePtr>(Object::null());
 #else
-    return raw_ptr()->unoptimized_code();
+    return untag()->unoptimized_code();
 #endif
   }
   void set_unoptimized_code(const Code& value) const;
   bool HasCode() const;
   static bool HasCode(FunctionPtr function);
 
-  static intptr_t code_offset() { return OFFSET_OF(FunctionLayout, code_); }
+  static intptr_t code_offset() { return OFFSET_OF(UntaggedFunction, code_); }
 
   static intptr_t entry_point_offset(
       CodeEntryKind entry_kind = CodeEntryKind::kNormal) {
     switch (entry_kind) {
       case CodeEntryKind::kNormal:
-        return OFFSET_OF(FunctionLayout, entry_point_);
+        return OFFSET_OF(UntaggedFunction, entry_point_);
       case CodeEntryKind::kUnchecked:
-        return OFFSET_OF(FunctionLayout, unchecked_entry_point_);
+        return OFFSET_OF(UntaggedFunction, unchecked_entry_point_);
       default:
         UNREACHABLE();
     }
   }
 
   static intptr_t unchecked_entry_point_offset() {
-    return OFFSET_OF(FunctionLayout, unchecked_entry_point_);
+    return OFFSET_OF(UntaggedFunction, unchecked_entry_point_);
   }
 
   virtual intptr_t Hash() const;
@@ -2740,19 +2740,19 @@
   FieldPtr accessor_field() const;
 
   bool IsRegularFunction() const {
-    return kind() == FunctionLayout::kRegularFunction;
+    return kind() == UntaggedFunction::kRegularFunction;
   }
 
   bool IsMethodExtractor() const {
-    return kind() == FunctionLayout::kMethodExtractor;
+    return kind() == UntaggedFunction::kMethodExtractor;
   }
 
   bool IsNoSuchMethodDispatcher() const {
-    return kind() == FunctionLayout::kNoSuchMethodDispatcher;
+    return kind() == UntaggedFunction::kNoSuchMethodDispatcher;
   }
 
   bool IsInvokeFieldDispatcher() const {
-    return kind() == FunctionLayout::kInvokeFieldDispatcher;
+    return kind() == UntaggedFunction::kInvokeFieldDispatcher;
   }
 
   bool IsDynamicInvokeFieldDispatcher() const {
@@ -2770,13 +2770,13 @@
   bool IsDynamicClosureCallDispatcher(Thread* thread) const;
 
   bool IsDynamicInvocationForwarder() const {
-    return kind() == FunctionLayout::kDynamicInvocationForwarder;
+    return kind() == UntaggedFunction::kDynamicInvocationForwarder;
   }
 
   bool IsImplicitGetterOrSetter() const {
-    return kind() == FunctionLayout::kImplicitGetter ||
-           kind() == FunctionLayout::kImplicitSetter ||
-           kind() == FunctionLayout::kImplicitStaticGetter;
+    return kind() == UntaggedFunction::kImplicitGetter ||
+           kind() == UntaggedFunction::kImplicitSetter ||
+           kind() == UntaggedFunction::kImplicitStaticGetter;
   }
 
   // Returns true iff an implicit closure function has been created
@@ -2806,25 +2806,25 @@
   FunctionPtr ForwardingTarget() const;
   void SetForwardingChecks(const Array& checks) const;
 
-  FunctionLayout::Kind kind() const {
-    return KindBits::decode(raw_ptr()->kind_tag_);
+  UntaggedFunction::Kind kind() const {
+    return KindBits::decode(untag()->kind_tag_);
   }
-  static FunctionLayout::Kind kind(FunctionPtr function) {
-    return KindBits::decode(function->ptr()->kind_tag_);
+  static UntaggedFunction::Kind kind(FunctionPtr function) {
+    return KindBits::decode(function->untag()->kind_tag_);
   }
 
-  FunctionLayout::AsyncModifier modifier() const {
-    return ModifierBits::decode(raw_ptr()->kind_tag_);
+  UntaggedFunction::AsyncModifier modifier() const {
+    return ModifierBits::decode(untag()->kind_tag_);
   }
 
-  static const char* KindToCString(FunctionLayout::Kind kind);
+  static const char* KindToCString(UntaggedFunction::Kind kind);
 
   bool IsGenerativeConstructor() const {
-    return (kind() == FunctionLayout::kConstructor) && !is_static();
+    return (kind() == UntaggedFunction::kConstructor) && !is_static();
   }
   bool IsImplicitConstructor() const;
   bool IsFactory() const {
-    return (kind() == FunctionLayout::kConstructor) && is_static();
+    return (kind() == UntaggedFunction::kConstructor) && is_static();
   }
 
   bool HasThisParameter() const {
@@ -2837,22 +2837,22 @@
       return false;
     }
     switch (kind()) {
-      case FunctionLayout::kRegularFunction:
-      case FunctionLayout::kGetterFunction:
-      case FunctionLayout::kSetterFunction:
-      case FunctionLayout::kImplicitGetter:
-      case FunctionLayout::kImplicitSetter:
-      case FunctionLayout::kMethodExtractor:
-      case FunctionLayout::kNoSuchMethodDispatcher:
-      case FunctionLayout::kInvokeFieldDispatcher:
-      case FunctionLayout::kDynamicInvocationForwarder:
+      case UntaggedFunction::kRegularFunction:
+      case UntaggedFunction::kGetterFunction:
+      case UntaggedFunction::kSetterFunction:
+      case UntaggedFunction::kImplicitGetter:
+      case UntaggedFunction::kImplicitSetter:
+      case UntaggedFunction::kMethodExtractor:
+      case UntaggedFunction::kNoSuchMethodDispatcher:
+      case UntaggedFunction::kInvokeFieldDispatcher:
+      case UntaggedFunction::kDynamicInvocationForwarder:
         return true;
-      case FunctionLayout::kClosureFunction:
-      case FunctionLayout::kImplicitClosureFunction:
-      case FunctionLayout::kConstructor:
-      case FunctionLayout::kImplicitStaticGetter:
-      case FunctionLayout::kFieldInitializer:
-      case FunctionLayout::kIrregexpFunction:
+      case UntaggedFunction::kClosureFunction:
+      case UntaggedFunction::kImplicitClosureFunction:
+      case UntaggedFunction::kConstructor:
+      case UntaggedFunction::kImplicitStaticGetter:
+      case UntaggedFunction::kFieldInitializer:
+      case UntaggedFunction::kIrregexpFunction:
         return false;
       default:
         UNREACHABLE();
@@ -2864,22 +2864,22 @@
       return false;
     }
     switch (kind()) {
-      case FunctionLayout::kRegularFunction:
-      case FunctionLayout::kGetterFunction:
-      case FunctionLayout::kSetterFunction:
-      case FunctionLayout::kImplicitGetter:
-      case FunctionLayout::kImplicitSetter:
-      case FunctionLayout::kImplicitStaticGetter:
-      case FunctionLayout::kFieldInitializer:
-      case FunctionLayout::kIrregexpFunction:
+      case UntaggedFunction::kRegularFunction:
+      case UntaggedFunction::kGetterFunction:
+      case UntaggedFunction::kSetterFunction:
+      case UntaggedFunction::kImplicitGetter:
+      case UntaggedFunction::kImplicitSetter:
+      case UntaggedFunction::kImplicitStaticGetter:
+      case UntaggedFunction::kFieldInitializer:
+      case UntaggedFunction::kIrregexpFunction:
         return true;
-      case FunctionLayout::kClosureFunction:
-      case FunctionLayout::kImplicitClosureFunction:
-      case FunctionLayout::kConstructor:
-      case FunctionLayout::kMethodExtractor:
-      case FunctionLayout::kNoSuchMethodDispatcher:
-      case FunctionLayout::kInvokeFieldDispatcher:
-      case FunctionLayout::kDynamicInvocationForwarder:
+      case UntaggedFunction::kClosureFunction:
+      case UntaggedFunction::kImplicitClosureFunction:
+      case UntaggedFunction::kConstructor:
+      case UntaggedFunction::kMethodExtractor:
+      case UntaggedFunction::kNoSuchMethodDispatcher:
+      case UntaggedFunction::kInvokeFieldDispatcher:
+      case UntaggedFunction::kDynamicInvocationForwarder:
         return false;
       default:
         UNREACHABLE();
@@ -2889,11 +2889,11 @@
   bool IsInFactoryScope() const;
 
   bool NeedsTypeArgumentTypeChecks() const {
-    return !(is_static() || (kind() == FunctionLayout::kConstructor));
+    return !(is_static() || (kind() == UntaggedFunction::kConstructor));
   }
 
   bool NeedsArgumentTypeChecks() const {
-    return !(is_static() || (kind() == FunctionLayout::kConstructor));
+    return !(is_static() || (kind() == UntaggedFunction::kConstructor));
   }
 
   bool NeedsMonomorphicCheckedEntry(Zone* zone) const;
@@ -2906,7 +2906,7 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
     return TokenPosition::kNoSource;
 #else
-    return raw_ptr()->token_pos_;
+    return untag()->token_pos_;
 #endif
   }
   void set_token_pos(TokenPosition value) const;
@@ -2915,14 +2915,14 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
     return TokenPosition::kNoSource;
 #else
-    return raw_ptr()->end_token_pos_;
+    return untag()->end_token_pos_;
 #endif
   }
   void set_end_token_pos(TokenPosition value) const {
 #if defined(DART_PRECOMPILED_RUNTIME)
     UNREACHABLE();
 #else
-    StoreNonPointer(&raw_ptr()->end_token_pos_, value);
+    StoreNonPointer(&untag()->end_token_pos_, value);
 #endif
   }
 
@@ -2931,39 +2931,39 @@
 
   // Reexported so they can be used by the flow graph builders.
   using PackedHasNamedOptionalParameters =
-      FunctionLayout::PackedHasNamedOptionalParameters;
-  using PackedNumFixedParameters = FunctionLayout::PackedNumFixedParameters;
+      UntaggedFunction::PackedHasNamedOptionalParameters;
+  using PackedNumFixedParameters = UntaggedFunction::PackedNumFixedParameters;
   using PackedNumOptionalParameters =
-      FunctionLayout::PackedNumOptionalParameters;
+      UntaggedFunction::PackedNumOptionalParameters;
 
-  uint32_t packed_fields() const { return raw_ptr()->packed_fields_; }
+  uint32_t packed_fields() const { return untag()->packed_fields_; }
   void set_packed_fields(uint32_t packed_fields) const;
   static intptr_t packed_fields_offset() {
-    return OFFSET_OF(FunctionLayout, packed_fields_);
+    return OFFSET_OF(UntaggedFunction, packed_fields_);
   }
 
   intptr_t num_fixed_parameters() const {
-    return FunctionLayout::PackedNumFixedParameters::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunction::PackedNumFixedParameters::decode(
+        untag()->packed_fields_);
   }
   void set_num_fixed_parameters(intptr_t value) const;
 
   bool HasOptionalParameters() const {
-    return FunctionLayout::PackedNumOptionalParameters::decode(
-               raw_ptr()->packed_fields_) > 0;
+    return UntaggedFunction::PackedNumOptionalParameters::decode(
+               untag()->packed_fields_) > 0;
   }
   bool HasOptionalNamedParameters() const {
     return HasOptionalParameters() &&
-           FunctionLayout::PackedHasNamedOptionalParameters::decode(
-               raw_ptr()->packed_fields_);
+           UntaggedFunction::PackedHasNamedOptionalParameters::decode(
+               untag()->packed_fields_);
   }
   bool HasRequiredNamedParameters() const;
   bool HasOptionalPositionalParameters() const {
     return HasOptionalParameters() && !HasOptionalNamedParameters();
   }
   intptr_t NumOptionalParameters() const {
-    return FunctionLayout::PackedNumOptionalParameters::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunction::PackedNumOptionalParameters::decode(
+        untag()->packed_fields_);
   }
   intptr_t NumOptionalPositionalParameters() const {
     return HasOptionalPositionalParameters() ? NumOptionalParameters() : 0;
@@ -2991,12 +2991,12 @@
 #else
 #define DEFINE_GETTERS_AND_SETTERS(return_type, type, name)                    \
   static intptr_t name##_offset() {                                            \
-    return OFFSET_OF(FunctionLayout, name##_);                                 \
+    return OFFSET_OF(UntaggedFunction, name##_);                               \
   }                                                                            \
-  return_type name() const { return raw_ptr()->name##_; }                      \
+  return_type name() const { return untag()->name##_; }                        \
                                                                                \
   void set_##name(type value) const {                                          \
-    StoreNonPointer(&raw_ptr()->name##_, value);                               \
+    StoreNonPointer(&untag()->name##_, value);                                 \
   }
 #endif
 
@@ -3008,7 +3008,7 @@
 #if defined(DART_PRECOMPILED_RUNTIME)
     return 0;
 #else
-    return raw_ptr()->kernel_offset_;
+    return untag()->kernel_offset_;
 #endif
   }
 
@@ -3017,7 +3017,7 @@
     UNREACHABLE();
 #else
     ASSERT(value >= 0);
-    StoreNonPointer(&raw_ptr()->kernel_offset_, value);
+    StoreNonPointer(&untag()->kernel_offset_, value);
 #endif
   }
 
@@ -3061,7 +3061,7 @@
   bool CanBeInlined() const;
 
   MethodRecognizer::Kind recognized_kind() const {
-    return RecognizedBits::decode(raw_ptr()->kind_tag_);
+    return RecognizedBits::decode(untag()->kind_tag_);
   }
   void set_recognized_kind(MethodRecognizer::Kind value) const;
 
@@ -3145,13 +3145,13 @@
 
   static constexpr intptr_t maximum_unboxed_parameter_count() {
     // Subtracts one that represents the return value
-    return FunctionLayout::UnboxedParameterBitmap::kCapacity - 1;
+    return UntaggedFunction::UnboxedParameterBitmap::kCapacity - 1;
   }
 
   void reset_unboxed_parameters_and_return() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    StoreNonPointer(&raw_ptr()->unboxed_parameters_info_,
-                    FunctionLayout::UnboxedParameterBitmap());
+    StoreNonPointer(&untag()->unboxed_parameters_info_,
+                    UntaggedFunction::UnboxedParameterBitmap());
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
   }
 
@@ -3159,8 +3159,8 @@
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT(index >= 0 && index < maximum_unboxed_parameter_count());
     index++;  // position 0 is reserved for the return value
-    const_cast<FunctionLayout::UnboxedParameterBitmap*>(
-        &raw_ptr()->unboxed_parameters_info_)
+    const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
+        &untag()->unboxed_parameters_info_)
         ->SetUnboxedInteger(index);
 #else
     UNREACHABLE();
@@ -3171,8 +3171,8 @@
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT(index >= 0 && index < maximum_unboxed_parameter_count());
     index++;  // position 0 is reserved for the return value
-    const_cast<FunctionLayout::UnboxedParameterBitmap*>(
-        &raw_ptr()->unboxed_parameters_info_)
+    const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
+        &untag()->unboxed_parameters_info_)
         ->SetUnboxedDouble(index);
 
 #else
@@ -3182,8 +3182,8 @@
 
   void set_unboxed_integer_return() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    const_cast<FunctionLayout::UnboxedParameterBitmap*>(
-        &raw_ptr()->unboxed_parameters_info_)
+    const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
+        &untag()->unboxed_parameters_info_)
         ->SetUnboxedInteger(0);
 #else
     UNREACHABLE();
@@ -3192,8 +3192,8 @@
 
   void set_unboxed_double_return() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    const_cast<FunctionLayout::UnboxedParameterBitmap*>(
-        &raw_ptr()->unboxed_parameters_info_)
+    const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
+        &untag()->unboxed_parameters_info_)
         ->SetUnboxedDouble(0);
 
 #else
@@ -3205,7 +3205,7 @@
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT(index >= 0);
     index++;  // position 0 is reserved for the return value
-    return raw_ptr()->unboxed_parameters_info_.IsUnboxed(index);
+    return untag()->unboxed_parameters_info_.IsUnboxed(index);
 #else
     return false;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -3215,7 +3215,7 @@
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT(index >= 0);
     index++;  // position 0 is reserved for the return value
-    return raw_ptr()->unboxed_parameters_info_.IsUnboxedInteger(index);
+    return untag()->unboxed_parameters_info_.IsUnboxedInteger(index);
 #else
     return false;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -3225,7 +3225,7 @@
 #if !defined(DART_PRECOMPILED_RUNTIME)
     ASSERT(index >= 0);
     index++;  // position 0 is reserved for the return value
-    return raw_ptr()->unboxed_parameters_info_.IsUnboxedDouble(index);
+    return untag()->unboxed_parameters_info_.IsUnboxedDouble(index);
 #else
     return false;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -3233,7 +3233,7 @@
 
   bool has_unboxed_return() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->unboxed_parameters_info_.IsUnboxed(0);
+    return untag()->unboxed_parameters_info_.IsUnboxed(0);
 #else
     return false;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -3241,7 +3241,7 @@
 
   bool has_unboxed_integer_return() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->unboxed_parameters_info_.IsUnboxedInteger(0);
+    return untag()->unboxed_parameters_info_.IsUnboxedInteger(0);
 #else
     return false;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -3249,7 +3249,7 @@
 
   bool has_unboxed_double_return() const {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->unboxed_parameters_info_.IsUnboxedDouble(0);
+    return untag()->unboxed_parameters_info_.IsUnboxedDouble(0);
 #else
     return false;
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -3257,21 +3257,21 @@
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
   bool HasUnboxedParameters() const {
-    return raw_ptr()->unboxed_parameters_info_.HasUnboxedParameters();
+    return untag()->unboxed_parameters_info_.HasUnboxedParameters();
   }
   bool HasUnboxedReturnValue() const {
-    return raw_ptr()->unboxed_parameters_info_.HasUnboxedReturnValue();
+    return untag()->unboxed_parameters_info_.HasUnboxedReturnValue();
   }
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
 
   bool IsDispatcherOrImplicitAccessor() const {
     switch (kind()) {
-      case FunctionLayout::kImplicitGetter:
-      case FunctionLayout::kImplicitSetter:
-      case FunctionLayout::kImplicitStaticGetter:
-      case FunctionLayout::kNoSuchMethodDispatcher:
-      case FunctionLayout::kInvokeFieldDispatcher:
-      case FunctionLayout::kDynamicInvocationForwarder:
+      case UntaggedFunction::kImplicitGetter:
+      case UntaggedFunction::kImplicitSetter:
+      case UntaggedFunction::kImplicitStaticGetter:
+      case UntaggedFunction::kNoSuchMethodDispatcher:
+      case UntaggedFunction::kInvokeFieldDispatcher:
+      case UntaggedFunction::kDynamicInvocationForwarder:
         return true;
       default:
         return false;
@@ -3280,53 +3280,53 @@
 
   // Returns true if this function represents an explicit getter function.
   bool IsGetterFunction() const {
-    return kind() == FunctionLayout::kGetterFunction;
+    return kind() == UntaggedFunction::kGetterFunction;
   }
 
   // Returns true if this function represents an implicit getter function.
   bool IsImplicitGetterFunction() const {
-    return kind() == FunctionLayout::kImplicitGetter;
+    return kind() == UntaggedFunction::kImplicitGetter;
   }
 
   // Returns true if this function represents an implicit static getter
   // function.
   bool IsImplicitStaticGetterFunction() const {
-    return kind() == FunctionLayout::kImplicitStaticGetter;
+    return kind() == UntaggedFunction::kImplicitStaticGetter;
   }
 
   // Returns true if this function represents an explicit setter function.
   bool IsSetterFunction() const {
-    return kind() == FunctionLayout::kSetterFunction;
+    return kind() == UntaggedFunction::kSetterFunction;
   }
 
   // Returns true if this function represents an implicit setter function.
   bool IsImplicitSetterFunction() const {
-    return kind() == FunctionLayout::kImplicitSetter;
+    return kind() == UntaggedFunction::kImplicitSetter;
   }
 
   // Returns true if this function represents an initializer for a static or
   // instance field. The function returns the initial value and the caller is
   // responsible for setting the field.
   bool IsFieldInitializer() const {
-    return kind() == FunctionLayout::kFieldInitializer;
+    return kind() == UntaggedFunction::kFieldInitializer;
   }
 
   // Returns true if this function represents a (possibly implicit) closure
   // function.
   bool IsClosureFunction() const {
-    FunctionLayout::Kind k = kind();
-    return (k == FunctionLayout::kClosureFunction) ||
-           (k == FunctionLayout::kImplicitClosureFunction);
+    UntaggedFunction::Kind k = kind();
+    return (k == UntaggedFunction::kClosureFunction) ||
+           (k == UntaggedFunction::kImplicitClosureFunction);
   }
 
   // Returns true if this function represents a generated irregexp function.
   bool IsIrregexpFunction() const {
-    return kind() == FunctionLayout::kIrregexpFunction;
+    return kind() == UntaggedFunction::kIrregexpFunction;
   }
 
   // Returns true if this function represents an implicit closure function.
   bool IsImplicitClosureFunction() const {
-    return kind() == FunctionLayout::kImplicitClosureFunction;
+    return kind() == UntaggedFunction::kImplicitClosureFunction;
   }
 
   // Returns true if this function represents a non implicit closure function.
@@ -3352,12 +3352,12 @@
 
   // Returns true if this function represents an ffi trampoline.
   bool IsFfiTrampoline() const {
-    return kind() == FunctionLayout::kFfiTrampoline;
+    return kind() == UntaggedFunction::kFfiTrampoline;
   }
   static bool IsFfiTrampoline(FunctionPtr function) {
     NoSafepointScope no_safepoint;
-    return KindBits::decode(function->ptr()->kind_tag_) ==
-           FunctionLayout::kFfiTrampoline;
+    return KindBits::decode(function->untag()->kind_tag_) ==
+           UntaggedFunction::kFfiTrampoline;
   }
 
   bool IsFfiLoad() const {
@@ -3391,7 +3391,9 @@
   //   user_func async {
   //     // ...
   //   }
-  bool IsAsyncFunction() const { return modifier() == FunctionLayout::kAsync; }
+  bool IsAsyncFunction() const {
+    return modifier() == UntaggedFunction::kAsync;
+  }
 
   // Recognise synthetic sync-yielding functions like the inner-most:
   //   user_func /* was async */ {
@@ -3409,7 +3411,7 @@
   //     // ...
   //   }
   bool IsSyncGenerator() const {
-    return modifier() == FunctionLayout::kSyncGen;
+    return modifier() == UntaggedFunction::kSyncGen;
   }
 
   // Recognise synthetic :sync_op_gen()s like:
@@ -3428,7 +3430,7 @@
   //     // ...
   //   }
   bool IsAsyncGenerator() const {
-    return modifier() == FunctionLayout::kAsyncGen;
+    return modifier() == UntaggedFunction::kAsyncGen;
   }
 
   // Recognise synthetic sync-yielding functions like the inner-most:
@@ -3443,7 +3445,7 @@
   }
 
   bool IsAsyncOrGenerator() const {
-    return modifier() != FunctionLayout::kNoModifier;
+    return modifier() != UntaggedFunction::kNoModifier;
   }
 
   // Recognise synthetic sync-yielding functions like the inner-most:
@@ -3460,7 +3462,7 @@
   }
 
   bool IsTypedDataViewFactory() const {
-    if (is_native() && kind() == FunctionLayout::kConstructor) {
+    if (is_native() && kind() == UntaggedFunction::kConstructor) {
       // This is a native factory constructor.
       const Class& klass = Class::Handle(Owner());
       return IsTypedDataViewClassId(klass.id());
@@ -3475,12 +3477,12 @@
   ErrorPtr VerifyClosurizedEntryPoint() const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(FunctionLayout));
+    return RoundedAllocationSize(sizeof(UntaggedFunction));
   }
 
   static FunctionPtr New(const FunctionType& signature,
                          const String& name,
-                         FunctionLayout::Kind kind,
+                         UntaggedFunction::Kind kind,
                          bool is_static,
                          bool is_const,
                          bool is_abstract,
@@ -3492,7 +3494,7 @@
 
   // Allocates a new Function object representing a closure function
   // with given kind - kClosureFunction or kImplicitClosureFunction.
-  static FunctionPtr NewClosureFunctionWithKind(FunctionLayout::Kind kind,
+  static FunctionPtr NewClosureFunctionWithKind(UntaggedFunction::Kind kind,
                                                 const String& name,
                                                 const Function& parent,
                                                 TokenPosition token_pos,
@@ -3549,7 +3551,7 @@
   // Sets deopt reason in all ICData-s with given deopt_id.
   void SetDeoptReasonForAll(intptr_t deopt_id, ICData::DeoptReasonId reason);
 
-  void set_modifier(FunctionLayout::AsyncModifier value) const;
+  void set_modifier(UntaggedFunction::AsyncModifier value) const;
 
 // 'WasCompiled' is true if the function was compiled once in this
 // VM instantiation. It is independent from presence of type feedback
@@ -3594,10 +3596,10 @@
 
   void SetWasExecuted(bool value) const { SetWasExecutedBit(value); }
 
-  static intptr_t data_offset() { return OFFSET_OF(FunctionLayout, data_); }
+  static intptr_t data_offset() { return OFFSET_OF(UntaggedFunction, data_); }
 
   static intptr_t kind_tag_offset() {
-    return OFFSET_OF(FunctionLayout, kind_tag_);
+    return OFFSET_OF(UntaggedFunction, kind_tag_);
   }
 
   // static: Considered during class-side or top-level resolution rather than
@@ -3643,9 +3645,9 @@
 
 #define DEFINE_ACCESSORS(name, accessor_name)                                  \
   void set_##accessor_name(bool value) const {                                 \
-    set_kind_tag(name##Bit::update(value, raw_ptr()->kind_tag_));              \
+    set_kind_tag(name##Bit::update(value, untag()->kind_tag_));                \
   }                                                                            \
-  bool accessor_name() const { return name##Bit::decode(raw_ptr()->kind_tag_); }
+  bool accessor_name() const { return name##Bit::decode(untag()->kind_tag_); }
   FOR_EACH_FUNCTION_KIND_BIT(DEFINE_ACCESSORS)
 #undef DEFINE_ACCESSORS
 
@@ -3653,23 +3655,23 @@
   //              some functions known to be execute infrequently and functions
   //              which have been de-optimized too many times.
   bool is_optimizable() const {
-    return FunctionLayout::PackedOptimizable::decode(raw_ptr()->packed_fields_);
+    return UntaggedFunction::PackedOptimizable::decode(untag()->packed_fields_);
   }
   void set_is_optimizable(bool value) const {
-    set_packed_fields(FunctionLayout::PackedOptimizable::update(
-        value, raw_ptr()->packed_fields_));
+    set_packed_fields(UntaggedFunction::PackedOptimizable::update(
+        value, untag()->packed_fields_));
   }
 
   // Indicates whether this function can be optimized on the background compiler
   // thread.
   bool is_background_optimizable() const {
-    return FunctionLayout::PackedBackgroundOptimizable::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunction::PackedBackgroundOptimizable::decode(
+        untag()->packed_fields_);
   }
 
   void set_is_background_optimizable(bool value) const {
-    set_packed_fields(FunctionLayout::PackedBackgroundOptimizable::update(
-        value, raw_ptr()->packed_fields_));
+    set_packed_fields(UntaggedFunction::PackedBackgroundOptimizable::update(
+        value, untag()->packed_fields_));
   }
 
   enum KindTagBits {
@@ -3690,10 +3692,11 @@
   COMPILE_ASSERT(MethodRecognizer::kNumRecognizedMethods <
                  (1 << kRecognizedTagSize));
   COMPILE_ASSERT(kNumTagBits <=
-                 (kBitsPerByte * sizeof(decltype(FunctionLayout::kind_tag_))));
+                 (kBitsPerByte *
+                  sizeof(decltype(UntaggedFunction::kind_tag_))));
 
   class KindBits : public BitField<uint32_t,
-                                   FunctionLayout::Kind,
+                                   UntaggedFunction::Kind,
                                    kKindTagPos,
                                    kKindTagSize> {};
 
@@ -3702,7 +3705,7 @@
                                          kRecognizedTagPos,
                                          kRecognizedTagSize> {};
   class ModifierBits : public BitField<uint32_t,
-                                       FunctionLayout::AsyncModifier,
+                                       UntaggedFunction::AsyncModifier,
                                        kModifierPos,
                                        kModifierSize> {};
 
@@ -3722,7 +3725,7 @@
   void set_ic_data_array(const Array& value) const;
   void SetInstructionsSafe(const Code& value) const;
   void set_name(const String& value) const;
-  void set_kind(FunctionLayout::Kind value) const;
+  void set_kind(UntaggedFunction::Kind value) const;
   void set_parent_function(const Function& value) const;
   FunctionPtr implicit_closure_function() const;
   void set_implicit_closure_function(const Function& value) const;
@@ -3740,9 +3743,9 @@
   friend class Class;
   friend class SnapshotWriter;
   friend class Parser;  // For set_eval_script.
-  // FunctionLayout::VisitFunctionPointers accesses the private constructor of
+  // UntaggedFunction::VisitFunctionPointers accesses the private constructor of
   // Function.
-  friend class FunctionLayout;
+  friend class UntaggedFunction;
   friend class ClassFinalizer;  // To reset parent_function.
   friend class Type;            // To adjust parent_function.
   friend class ProgramVisitor;  // For set_parameter_types/names.
@@ -3751,31 +3754,31 @@
 class ClosureData : public Object {
  public:
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ClosureDataLayout));
+    return RoundedAllocationSize(sizeof(UntaggedClosureData));
   }
 
   static intptr_t default_type_arguments_offset() {
-    return OFFSET_OF(ClosureDataLayout, default_type_arguments_);
+    return OFFSET_OF(UntaggedClosureData, default_type_arguments_);
   }
   static intptr_t default_type_arguments_info_offset() {
-    return OFFSET_OF(ClosureDataLayout, default_type_arguments_info_);
+    return OFFSET_OF(UntaggedClosureData, default_type_arguments_info_);
   }
 
  private:
-  ContextScopePtr context_scope() const { return raw_ptr()->context_scope_; }
+  ContextScopePtr context_scope() const { return untag()->context_scope_; }
   void set_context_scope(const ContextScope& value) const;
 
   // Enclosing function of this local function.
-  FunctionPtr parent_function() const { return raw_ptr()->parent_function_; }
+  FunctionPtr parent_function() const { return untag()->parent_function_; }
   void set_parent_function(const Function& value) const;
 
   InstancePtr implicit_static_closure() const {
-    return raw_ptr()->closure<std::memory_order_acquire>();
+    return untag()->closure<std::memory_order_acquire>();
   }
   void set_implicit_static_closure(const Instance& closure) const;
 
   TypeArgumentsPtr default_type_arguments() const {
-    return raw_ptr()->default_type_arguments_;
+    return untag()->default_type_arguments_;
   }
   void set_default_type_arguments(const TypeArguments& value) const;
 
@@ -3801,22 +3804,22 @@
 class FfiTrampolineData : public Object {
  public:
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(FfiTrampolineDataLayout));
+    return RoundedAllocationSize(sizeof(UntaggedFfiTrampolineData));
   }
 
  private:
-  FunctionTypePtr c_signature() const { return raw_ptr()->c_signature(); }
+  FunctionTypePtr c_signature() const { return untag()->c_signature(); }
   void set_c_signature(const FunctionType& value) const;
 
-  FunctionPtr callback_target() const { return raw_ptr()->callback_target(); }
+  FunctionPtr callback_target() const { return untag()->callback_target(); }
   void set_callback_target(const Function& value) const;
 
   InstancePtr callback_exceptional_return() const {
-    return raw_ptr()->callback_exceptional_return();
+    return untag()->callback_exceptional_return();
   }
   void set_callback_exceptional_return(const Instance& value) const;
 
-  int32_t callback_id() const { return raw_ptr()->callback_id_; }
+  int32_t callback_id() const { return untag()->callback_id_; }
   void set_callback_id(int32_t value) const;
 
   static FfiTrampolineDataPtr New();
@@ -3843,7 +3846,7 @@
       return true;
     }
     NoSafepointScope no_safepoint;
-    return !raw_ptr()->owner()->IsField();
+    return !untag()->owner()->IsField();
   }
 
   // Mark previously unboxed field boxed. Only operates on clones, updates
@@ -3853,14 +3856,14 @@
   // original field of result.
   FieldPtr CloneFromOriginal() const;
 
-  StringPtr name() const { return raw_ptr()->name(); }
+  StringPtr name() const { return untag()->name(); }
   StringPtr UserVisibleName() const;  // Same as scrubbed name.
   const char* UserVisibleNameCString() const;
   virtual StringPtr DictionaryName() const { return name(); }
 
   uint16_t kind_bits() const {
     return LoadNonPointer<uint16_t, std::memory_order_acquire>(
-        &raw_ptr()->kind_bits_);
+        &untag()->kind_bits_);
   }
 
   bool is_static() const { return StaticBit::decode(kind_bits()); }
@@ -3878,7 +3881,7 @@
   void set_is_reflectable(bool value) const {
     ASSERT(IsOriginal());
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(ReflectableBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(ReflectableBit::update(value, untag()->kind_bits_));
   }
   bool is_double_initialized() const {
     return DoubleInitializedBit::decode(kind_bits());
@@ -3889,7 +3892,7 @@
     ASSERT(Thread::Current()->IsMutatorThread());
     ASSERT(IsOriginal());
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(DoubleInitializedBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(DoubleInitializedBit::update(value, untag()->kind_bits_));
   }
 
   bool initializer_changed_after_initialization() const {
@@ -3898,19 +3901,19 @@
   void set_initializer_changed_after_initialization(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
     set_kind_bits(InitializerChangedAfterInitializatonBit::update(
-        value, raw_ptr()->kind_bits_));
+        value, untag()->kind_bits_));
   }
 
   bool has_pragma() const { return HasPragmaBit::decode(kind_bits()); }
   void set_has_pragma(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(HasPragmaBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(HasPragmaBit::update(value, untag()->kind_bits_));
   }
 
   bool is_covariant() const { return CovariantBit::decode(kind_bits()); }
   void set_is_covariant(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(CovariantBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(CovariantBit::update(value, untag()->kind_bits_));
   }
 
   bool is_generic_covariant_impl() const {
@@ -3918,15 +3921,14 @@
   }
   void set_is_generic_covariant_impl(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(
-        GenericCovariantImplBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(GenericCovariantImplBit::update(value, untag()->kind_bits_));
   }
 
   intptr_t kernel_offset() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
     return 0;
 #else
-    return raw_ptr()->kernel_offset_;
+    return untag()->kernel_offset_;
 #endif
   }
 
@@ -3935,7 +3937,7 @@
     UNREACHABLE();
 #else
     ASSERT(value >= 0);
-    StoreNonPointer(&raw_ptr()->kernel_offset_, value);
+    StoreNonPointer(&untag()->kernel_offset_, value);
 #endif
   }
 
@@ -3951,7 +3953,7 @@
 
   inline intptr_t HostOffset() const;
   static intptr_t host_offset_or_field_id_offset() {
-    return OFFSET_OF(FieldLayout, host_offset_or_field_id_);
+    return OFFSET_OF(UntaggedField, host_offset_or_field_id_);
   }
 
   inline intptr_t TargetOffset() const;
@@ -3972,7 +3974,7 @@
   ScriptPtr Script() const;
   ObjectPtr RawOwner() const;
 
-  AbstractTypePtr type() const { return raw_ptr()->type(); }
+  AbstractTypePtr type() const { return untag()->type(); }
   // Used by class finalizer, otherwise initialized in constructor.
   void SetFieldType(const AbstractType& value) const;
 
@@ -3980,7 +3982,7 @@
   ErrorPtr VerifyEntryPoint(EntryPointPragma kind) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(FieldLayout));
+    return RoundedAllocationSize(sizeof(UntaggedField));
   }
 
   static FieldPtr New(const String& name,
@@ -4007,11 +4009,11 @@
   FieldPtr Clone(const Field& original) const;
 
   static intptr_t kind_bits_offset() {
-    return OFFSET_OF(FieldLayout, kind_bits_);
+    return OFFSET_OF(UntaggedField, kind_bits_);
   }
 
-  TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
-  TokenPosition end_token_pos() const { return raw_ptr()->end_token_pos_; }
+  TokenPosition token_pos() const { return untag()->token_pos_; }
+  TokenPosition end_token_pos() const { return untag()->end_token_pos_; }
 
   int32_t SourceFingerprint() const;
 
@@ -4026,7 +4028,7 @@
     ASSERT(Thread::Current()->IsMutatorThread());
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
     set_kind_bits(HasNontrivialInitializerBit::update(
-        has_nontrivial_initializer, raw_ptr()->kind_bits_));
+        has_nontrivial_initializer, untag()->kind_bits_));
   }
 
   bool has_initializer() const {
@@ -4038,7 +4040,7 @@
     ASSERT(Thread::Current()->IsMutatorThread());
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
     set_kind_bits(
-        HasInitializerBit::update(has_initializer, raw_ptr()->kind_bits_));
+        HasInitializerBit::update(has_initializer, untag()->kind_bits_));
   }
 
   bool has_trivial_initializer() const {
@@ -4053,20 +4055,20 @@
     ASSERT(Thread::Current()->IsMutatorThread());
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
     set_kind_bits(IsNonNullableIntBit::update(is_non_nullable_integer,
-                                              raw_ptr()->kind_bits_));
+                                              untag()->kind_bits_));
   }
 
   StaticTypeExactnessState static_type_exactness_state() const {
     return StaticTypeExactnessState::Decode(
-        raw_ptr()->static_type_exactness_state_);
+        untag()->static_type_exactness_state_);
   }
 
   void set_static_type_exactness_state(StaticTypeExactnessState state) const {
-    StoreNonPointer(&raw_ptr()->static_type_exactness_state_, state.Encode());
+    StoreNonPointer(&untag()->static_type_exactness_state_, state.Encode());
   }
 
   static intptr_t static_type_exactness_state_offset() {
-    return OFFSET_OF(FieldLayout, static_type_exactness_state_);
+    return OFFSET_OF(UntaggedField, static_type_exactness_state_);
   }
 
   // Return class id that any non-null value read from this field is guaranteed
@@ -4080,10 +4082,10 @@
     set_guarded_cid_unsafe(cid);
   }
   void set_guarded_cid_unsafe(intptr_t cid) const {
-    StoreNonPointer(&raw_ptr()->guarded_cid_, cid);
+    StoreNonPointer(&untag()->guarded_cid_, cid);
   }
   static intptr_t guarded_cid_offset() {
-    return OFFSET_OF(FieldLayout, guarded_cid_);
+    return OFFSET_OF(UntaggedField, guarded_cid_);
   }
   // Return the list length that any list stored in this field is guaranteed
   // to have. If length is kUnknownFixedLength the length has not
@@ -4097,7 +4099,7 @@
     set_guarded_list_length_unsafe(list_length);
   }
   static intptr_t guarded_list_length_offset() {
-    return OFFSET_OF(FieldLayout, guarded_list_length_);
+    return OFFSET_OF(UntaggedField, guarded_list_length_);
   }
   intptr_t guarded_list_length_in_object_offset() const;
   void set_guarded_list_length_in_object_offset_unsafe(intptr_t offset) const;
@@ -4107,7 +4109,7 @@
     set_guarded_list_length_in_object_offset_unsafe(offset);
   }
   static intptr_t guarded_list_length_in_object_offset_offset() {
-    return OFFSET_OF(FieldLayout, guarded_list_length_in_object_offset_);
+    return OFFSET_OF(UntaggedField, guarded_list_length_in_object_offset_);
   }
 
   bool needs_length_check() const {
@@ -4134,7 +4136,7 @@
   // Default 'true', set to false once optimizing compiler determines it should
   // be boxed.
   void set_is_unboxing_candidate_unsafe(bool b) const {
-    set_kind_bits(UnboxingCandidateBit::update(b, raw_ptr()->kind_bits_));
+    set_kind_bits(UnboxingCandidateBit::update(b, untag()->kind_bits_));
   }
 
   void set_is_unboxing_candidate(bool b) const {
@@ -4150,15 +4152,15 @@
   };
   void set_is_late(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(IsLateBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(IsLateBit::update(value, untag()->kind_bits_));
   }
   void set_is_extension_member(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(IsExtensionMemberBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(IsExtensionMemberBit::update(value, untag()->kind_bits_));
   }
   void set_needs_load_guard(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(NeedsLoadGuardBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(NeedsLoadGuardBit::update(value, untag()->kind_bits_));
   }
   // Returns false if any value read from this field is guaranteed to be
   // not null.
@@ -4175,7 +4177,7 @@
              thread->IsAtSafepoint());
     }
 #endif
-    return raw_ptr()->is_nullable_ == kNullCid;
+    return untag()->is_nullable_ == kNullCid;
   }
   void set_is_nullable(bool val) const {
     DEBUG_ASSERT(
@@ -4184,10 +4186,10 @@
   }
   void set_is_nullable_unsafe(bool val) const {
     ASSERT(Thread::Current()->IsMutatorThread());
-    StoreNonPointer(&raw_ptr()->is_nullable_, val ? kNullCid : kIllegalCid);
+    StoreNonPointer(&untag()->is_nullable_, val ? kNullCid : kIllegalCid);
   }
   static intptr_t is_nullable_offset() {
-    return OFFSET_OF(FieldLayout, is_nullable_);
+    return OFFSET_OF(UntaggedField, is_nullable_);
   }
 
   // Record store of the given value into this field. May trigger
@@ -4225,12 +4227,12 @@
 
   FunctionPtr EnsureInitializerFunction() const;
   FunctionPtr InitializerFunction() const {
-    return raw_ptr()->initializer_function<std::memory_order_acquire>();
+    return untag()->initializer_function<std::memory_order_acquire>();
   }
   void SetInitializerFunction(const Function& initializer) const;
   bool HasInitializerFunction() const;
   static intptr_t initializer_function_offset() {
-    return OFFSET_OF(FieldLayout, initializer_function_);
+    return OFFSET_OF(UntaggedField, initializer_function_);
   }
 
   // For static fields only. Constructs a closure that gets/sets the
@@ -4257,7 +4259,7 @@
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
   SubtypeTestCachePtr type_test_cache() const {
-    return raw_ptr()->type_test_cache();
+    return untag()->type_test_cache();
   }
   void set_type_test_cache(const SubtypeTestCache& cache) const;
 #endif
@@ -4341,28 +4343,26 @@
   void set_name(const String& value) const;
   void set_is_static(bool is_static) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(StaticBit::update(is_static, raw_ptr()->kind_bits_));
+    set_kind_bits(StaticBit::update(is_static, untag()->kind_bits_));
   }
   void set_is_final(bool is_final) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(FinalBit::update(is_final, raw_ptr()->kind_bits_));
+    set_kind_bits(FinalBit::update(is_final, untag()->kind_bits_));
   }
   void set_is_const(bool value) const {
     // TODO(36097): Once concurrent access is possible ensure updates are safe.
-    set_kind_bits(ConstBit::update(value, raw_ptr()->kind_bits_));
+    set_kind_bits(ConstBit::update(value, untag()->kind_bits_));
   }
-  void set_owner(const Object& value) const {
-    raw_ptr()->set_owner(value.raw());
-  }
+  void set_owner(const Object& value) const { untag()->set_owner(value.ptr()); }
   void set_token_pos(TokenPosition token_pos) const {
-    StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
+    StoreNonPointer(&untag()->token_pos_, token_pos);
   }
   void set_end_token_pos(TokenPosition token_pos) const {
-    StoreNonPointer(&raw_ptr()->end_token_pos_, token_pos);
+    StoreNonPointer(&untag()->end_token_pos_, token_pos);
   }
   void set_kind_bits(uint16_t value) const {
     StoreNonPointer<uint16_t, uint16_t, std::memory_order_release>(
-        &raw_ptr()->kind_bits_, value);
+        &untag()->kind_bits_, value);
   }
 
   static FieldPtr New();
@@ -4370,18 +4370,18 @@
   FINAL_HEAP_OBJECT_IMPLEMENTATION(Field, Object);
   friend class Class;
   friend class HeapProfiler;
-  friend class FieldLayout;
+  friend class UntaggedField;
   friend class FieldSerializationCluster;
   friend class FieldDeserializationCluster;
 };
 
 class Script : public Object {
  public:
-  StringPtr url() const { return raw_ptr()->url(); }
+  StringPtr url() const { return untag()->url(); }
   void set_url(const String& value) const;
 
   // The actual url which was loaded from disk, if provided by the embedder.
-  StringPtr resolved_url() const { return raw_ptr()->resolved_url(); }
+  StringPtr resolved_url() const { return untag()->resolved_url(); }
   bool HasSource() const;
   StringPtr Source() const;
   bool IsPartOfDartColonLibrary() const;
@@ -4389,28 +4389,26 @@
   void LookupSourceAndLineStarts(Zone* zone) const;
   GrowableObjectArrayPtr GenerateLineNumberArray() const;
 
-  intptr_t line_offset() const { return raw_ptr()->line_offset_; }
-  intptr_t col_offset() const { return raw_ptr()->col_offset_; }
+  intptr_t line_offset() const { return untag()->line_offset_; }
+  intptr_t col_offset() const { return untag()->col_offset_; }
   // Returns the max real token position for this script, or kNoSource
   // if there is no line starts information.
   TokenPosition MaxPosition() const;
 
   // The load time in milliseconds since epoch.
-  int64_t load_timestamp() const { return raw_ptr()->load_timestamp_; }
+  int64_t load_timestamp() const { return untag()->load_timestamp_; }
 
   ArrayPtr compile_time_constants() const {
-    return raw_ptr()->compile_time_constants();
+    return untag()->compile_time_constants();
   }
   void set_compile_time_constants(const Array& value) const;
 
   KernelProgramInfoPtr kernel_program_info() const {
-    return raw_ptr()->kernel_program_info();
+    return untag()->kernel_program_info();
   }
   void set_kernel_program_info(const KernelProgramInfo& info) const;
 
-  intptr_t kernel_script_index() const {
-    return raw_ptr()->kernel_script_index_;
-  }
+  intptr_t kernel_script_index() const { return untag()->kernel_script_index_; }
   void set_kernel_script_index(const intptr_t kernel_script_index) const;
 
   TypedDataPtr kernel_string_offsets() const;
@@ -4459,7 +4457,7 @@
                         TokenPosition* last_token_index) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ScriptLayout));
+    return RoundedAllocationSize(sizeof(UntaggedScript));
   }
 
   static ScriptPtr New(const String& url, const String& source);
@@ -4544,32 +4542,32 @@
 
 class Library : public Object {
  public:
-  StringPtr name() const { return raw_ptr()->name(); }
+  StringPtr name() const { return untag()->name(); }
   void SetName(const String& name) const;
 
-  StringPtr url() const { return raw_ptr()->url(); }
-  StringPtr private_key() const { return raw_ptr()->private_key(); }
+  StringPtr url() const { return untag()->url(); }
+  StringPtr private_key() const { return untag()->private_key(); }
   bool LoadNotStarted() const {
-    return raw_ptr()->load_state_ == LibraryLayout::kAllocated;
+    return untag()->load_state_ == UntaggedLibrary::kAllocated;
   }
   bool LoadRequested() const {
-    return raw_ptr()->load_state_ == LibraryLayout::kLoadRequested;
+    return untag()->load_state_ == UntaggedLibrary::kLoadRequested;
   }
   bool LoadInProgress() const {
-    return raw_ptr()->load_state_ == LibraryLayout::kLoadInProgress;
+    return untag()->load_state_ == UntaggedLibrary::kLoadInProgress;
   }
   void SetLoadRequested() const;
   void SetLoadInProgress() const;
   bool Loaded() const {
-    return raw_ptr()->load_state_ == LibraryLayout::kLoaded;
+    return untag()->load_state_ == UntaggedLibrary::kLoaded;
   }
   void SetLoaded() const;
 
-  LoadingUnitPtr loading_unit() const { return raw_ptr()->loading_unit(); }
+  LoadingUnitPtr loading_unit() const { return untag()->loading_unit(); }
   void set_loading_unit(const LoadingUnit& value) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(LibraryLayout));
+    return RoundedAllocationSize(sizeof(UntaggedLibrary));
   }
 
   static LibraryPtr New(const String& url);
@@ -4662,22 +4660,22 @@
                          const String& pragma_name,
                          Object* options = nullptr);
 
-  ClassPtr toplevel_class() const { return raw_ptr()->toplevel_class(); }
+  ClassPtr toplevel_class() const { return untag()->toplevel_class(); }
   void set_toplevel_class(const Class& value) const;
 
   GrowableObjectArrayPtr used_scripts() const {
-    return raw_ptr()->used_scripts();
+    return untag()->used_scripts();
   }
 
   // Library imports.
-  ArrayPtr imports() const { return raw_ptr()->imports(); }
-  ArrayPtr exports() const { return raw_ptr()->exports(); }
+  ArrayPtr imports() const { return untag()->imports(); }
+  ArrayPtr exports() const { return untag()->exports(); }
   void AddImport(const Namespace& ns) const;
-  intptr_t num_imports() const { return raw_ptr()->num_imports_; }
+  intptr_t num_imports() const { return untag()->num_imports_; }
   NamespacePtr ImportAt(intptr_t index) const;
   LibraryPtr ImportLibraryAt(intptr_t index) const;
 
-  ArrayPtr dependencies() const { return raw_ptr()->dependencies(); }
+  ArrayPtr dependencies() const { return untag()->dependencies(); }
   void set_dependencies(const Array& deps) const;
 
   void DropDependenciesAndCaches() const;
@@ -4685,37 +4683,37 @@
   // Resolving native methods for script loaded in the library.
   Dart_NativeEntryResolver native_entry_resolver() const {
     return LoadNonPointer<Dart_NativeEntryResolver, std::memory_order_relaxed>(
-        &raw_ptr()->native_entry_resolver_);
+        &untag()->native_entry_resolver_);
   }
   void set_native_entry_resolver(Dart_NativeEntryResolver value) const {
     StoreNonPointer<Dart_NativeEntryResolver, Dart_NativeEntryResolver,
-                    std::memory_order_relaxed>(
-        &raw_ptr()->native_entry_resolver_, value);
+                    std::memory_order_relaxed>(&untag()->native_entry_resolver_,
+                                               value);
   }
   Dart_NativeEntrySymbol native_entry_symbol_resolver() const {
     return LoadNonPointer<Dart_NativeEntrySymbol, std::memory_order_relaxed>(
-        &raw_ptr()->native_entry_symbol_resolver_);
+        &untag()->native_entry_symbol_resolver_);
   }
   void set_native_entry_symbol_resolver(
       Dart_NativeEntrySymbol native_symbol_resolver) const {
     StoreNonPointer<Dart_NativeEntrySymbol, Dart_NativeEntrySymbol,
                     std::memory_order_relaxed>(
-        &raw_ptr()->native_entry_symbol_resolver_, native_symbol_resolver);
+        &untag()->native_entry_symbol_resolver_, native_symbol_resolver);
   }
 
   bool is_in_fullsnapshot() const {
-    return LibraryLayout::InFullSnapshotBit::decode(raw_ptr()->flags_);
+    return UntaggedLibrary::InFullSnapshotBit::decode(untag()->flags_);
   }
   void set_is_in_fullsnapshot(bool value) const {
     set_flags(
-        LibraryLayout::InFullSnapshotBit::update(value, raw_ptr()->flags_));
+        UntaggedLibrary::InFullSnapshotBit::update(value, untag()->flags_));
   }
 
   bool is_nnbd() const {
-    return LibraryLayout::NnbdBit::decode(raw_ptr()->flags_);
+    return UntaggedLibrary::NnbdBit::decode(untag()->flags_);
   }
   void set_is_nnbd(bool value) const {
-    set_flags(LibraryLayout::NnbdBit::update(value, raw_ptr()->flags_));
+    set_flags(UntaggedLibrary::NnbdBit::update(value, untag()->flags_));
   }
 
   NNBDMode nnbd_mode() const {
@@ -4724,20 +4722,20 @@
 
   NNBDCompiledMode nnbd_compiled_mode() const {
     return static_cast<NNBDCompiledMode>(
-        LibraryLayout::NnbdCompiledModeBits::decode(raw_ptr()->flags_));
+        UntaggedLibrary::NnbdCompiledModeBits::decode(untag()->flags_));
   }
   void set_nnbd_compiled_mode(NNBDCompiledMode value) const {
-    set_flags(LibraryLayout::NnbdCompiledModeBits::update(
-        static_cast<uint8_t>(value), raw_ptr()->flags_));
+    set_flags(UntaggedLibrary::NnbdCompiledModeBits::update(
+        static_cast<uint8_t>(value), untag()->flags_));
   }
 
   StringPtr PrivateName(const String& name) const;
 
-  intptr_t index() const { return raw_ptr()->index_; }
+  intptr_t index() const { return untag()->index_; }
   void set_index(intptr_t value) const {
     ASSERT((value == -1) ||
            ((value >= 0) && (value < std::numeric_limits<classid_t>::max())));
-    StoreNonPointer(&raw_ptr()->index_, value);
+    StoreNonPointer(&untag()->index_, value);
   }
 
   void Register(Thread* thread) const;
@@ -4745,17 +4743,17 @@
                                 const GrowableObjectArray& libs);
 
   bool IsDebuggable() const {
-    return LibraryLayout::DebuggableBit::decode(raw_ptr()->flags_);
+    return UntaggedLibrary::DebuggableBit::decode(untag()->flags_);
   }
   void set_debuggable(bool value) const {
-    set_flags(LibraryLayout::DebuggableBit::update(value, raw_ptr()->flags_));
+    set_flags(UntaggedLibrary::DebuggableBit::update(value, untag()->flags_));
   }
 
   bool is_dart_scheme() const {
-    return LibraryLayout::DartSchemeBit::decode(raw_ptr()->flags_);
+    return UntaggedLibrary::DartSchemeBit::decode(untag()->flags_);
   }
   void set_is_dart_scheme(bool value) const {
-    set_flags(LibraryLayout::DartSchemeBit::update(value, raw_ptr()->flags_));
+    set_flags(UntaggedLibrary::DartSchemeBit::update(value, untag()->flags_));
   }
 
   // Includes 'dart:async', 'dart:typed_data', etc.
@@ -4763,14 +4761,14 @@
 
   inline intptr_t UrlHash() const;
 
-  ExternalTypedDataPtr kernel_data() const { return raw_ptr()->kernel_data(); }
+  ExternalTypedDataPtr kernel_data() const { return untag()->kernel_data(); }
   void set_kernel_data(const ExternalTypedData& data) const;
 
   intptr_t kernel_offset() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
     return 0;
 #else
-    return raw_ptr()->kernel_offset_;
+    return untag()->kernel_offset_;
 #endif
   }
 
@@ -4779,7 +4777,7 @@
     UNREACHABLE();
 #else
     ASSERT(value >= 0);
-    StoreNonPointer(&raw_ptr()->kernel_offset_, value);
+    StoreNonPointer(&untag()->kernel_offset_, value);
 #endif
   }
 
@@ -4869,17 +4867,17 @@
   void set_num_imports(intptr_t value) const;
   void set_flags(uint8_t flags) const;
   bool HasExports() const;
-  ArrayPtr loaded_scripts() const { return raw_ptr()->loaded_scripts(); }
+  ArrayPtr loaded_scripts() const { return untag()->loaded_scripts(); }
   ArrayPtr metadata() const {
     DEBUG_ASSERT(
         IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
-    return raw_ptr()->metadata();
+    return untag()->metadata();
   }
   void set_metadata(const Array& value) const;
-  ArrayPtr dictionary() const { return raw_ptr()->dictionary(); }
+  ArrayPtr dictionary() const { return untag()->dictionary(); }
   void InitClassDictionary() const;
 
-  ArrayPtr resolved_names() const { return raw_ptr()->resolved_names(); }
+  ArrayPtr resolved_names() const { return untag()->resolved_names(); }
   bool LookupResolvedNamesCache(const String& name, Object* obj) const;
   void AddToResolvedNamesCache(const String& name, const Object& obj) const;
   void InitResolvedNamesCache() const;
@@ -4887,7 +4885,7 @@
   void InvalidateResolvedName(const String& name) const;
   void InvalidateResolvedNamesCache() const;
 
-  ArrayPtr exported_names() const { return raw_ptr()->exported_names(); }
+  ArrayPtr exported_names() const { return untag()->exported_names(); }
   bool LookupExportedNamesCache(const String& name, Object* obj) const;
   void AddToExportedNamesCache(const String& name, const Object& obj) const;
   void InitExportedNamesCache() const;
@@ -4920,13 +4918,13 @@
 // the show/hide combinators.
 class Namespace : public Object {
  public:
-  LibraryPtr target() const { return raw_ptr()->target(); }
-  ArrayPtr show_names() const { return raw_ptr()->show_names(); }
-  ArrayPtr hide_names() const { return raw_ptr()->hide_names(); }
-  LibraryPtr owner() const { return raw_ptr()->owner(); }
+  LibraryPtr target() const { return untag()->target(); }
+  ArrayPtr show_names() const { return untag()->show_names(); }
+  ArrayPtr hide_names() const { return untag()->hide_names(); }
+  LibraryPtr owner() const { return untag()->owner(); }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(NamespaceLayout));
+    return RoundedAllocationSize(sizeof(UntaggedNamespace));
   }
 
   bool HidesName(const String& name) const;
@@ -4961,37 +4959,37 @@
                                   const uint32_t binary_version);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(KernelProgramInfoLayout));
+    return RoundedAllocationSize(sizeof(UntaggedKernelProgramInfo));
   }
 
-  TypedDataPtr string_offsets() const { return raw_ptr()->string_offsets(); }
+  TypedDataPtr string_offsets() const { return untag()->string_offsets(); }
 
-  ExternalTypedDataPtr string_data() const { return raw_ptr()->string_data(); }
+  ExternalTypedDataPtr string_data() const { return untag()->string_data(); }
 
-  TypedDataPtr canonical_names() const { return raw_ptr()->canonical_names(); }
+  TypedDataPtr canonical_names() const { return untag()->canonical_names(); }
 
   ExternalTypedDataPtr metadata_payloads() const {
-    return raw_ptr()->metadata_payloads();
+    return untag()->metadata_payloads();
   }
 
   ExternalTypedDataPtr metadata_mappings() const {
-    return raw_ptr()->metadata_mappings();
+    return untag()->metadata_mappings();
   }
 
   ExternalTypedDataPtr constants_table() const {
-    return raw_ptr()->constants_table();
+    return untag()->constants_table();
   }
 
   void set_constants_table(const ExternalTypedData& value) const;
 
-  ArrayPtr scripts() const { return raw_ptr()->scripts(); }
+  ArrayPtr scripts() const { return untag()->scripts(); }
   void set_scripts(const Array& scripts) const;
 
-  ArrayPtr constants() const { return raw_ptr()->constants(); }
+  ArrayPtr constants() const { return untag()->constants(); }
   void set_constants(const Array& constants) const;
 
   uint32_t kernel_binary_version() const {
-    return raw_ptr()->kernel_binary_version_;
+    return untag()->kernel_binary_version_;
   }
   void set_kernel_binary_version(uint32_t version) const;
 
@@ -5001,26 +4999,26 @@
   //
   // This array will hold the functions which might need their native name set.
   GrowableObjectArrayPtr potential_natives() const {
-    return raw_ptr()->potential_natives();
+    return untag()->potential_natives();
   }
   void set_potential_natives(const GrowableObjectArray& candidates) const;
 
   GrowableObjectArrayPtr potential_pragma_functions() const {
-    return raw_ptr()->potential_pragma_functions();
+    return untag()->potential_pragma_functions();
   }
   void set_potential_pragma_functions(
       const GrowableObjectArray& candidates) const;
 
   ScriptPtr ScriptAt(intptr_t index) const;
 
-  ArrayPtr libraries_cache() const { return raw_ptr()->libraries_cache(); }
+  ArrayPtr libraries_cache() const { return untag()->libraries_cache(); }
   void set_libraries_cache(const Array& cache) const;
   LibraryPtr LookupLibrary(Thread* thread, const Smi& name_index) const;
   LibraryPtr InsertLibrary(Thread* thread,
                            const Smi& name_index,
                            const Library& lib) const;
 
-  ArrayPtr classes_cache() const { return raw_ptr()->classes_cache(); }
+  ArrayPtr classes_cache() const { return untag()->classes_cache(); }
   void set_classes_cache(const Array& cache) const;
   ClassPtr LookupClass(Thread* thread, const Smi& name_index) const;
   ClassPtr InsertClass(Thread* thread,
@@ -5056,20 +5054,20 @@
     EntryType type_;
   };
 
-  intptr_t Length() const { return raw_ptr()->length_; }
+  intptr_t Length() const { return untag()->length_; }
   void SetLength(intptr_t value) const {
-    StoreNonPointer(&raw_ptr()->length_, value);
+    StoreNonPointer(&untag()->length_, value);
   }
 
   static intptr_t length_offset() {
-    return OFFSET_OF(ObjectPoolLayout, length_);
+    return OFFSET_OF(UntaggedObjectPool, length_);
   }
   static intptr_t data_offset() {
-    return OFFSET_OF_RETURNED_VALUE(ObjectPoolLayout, data);
+    return OFFSET_OF_RETURNED_VALUE(UntaggedObjectPool, data);
   }
   static intptr_t element_offset(intptr_t index) {
-    return OFFSET_OF_RETURNED_VALUE(ObjectPoolLayout, data) +
-           sizeof(ObjectPoolLayout::Entry) * index;
+    return OFFSET_OF_RETURNED_VALUE(UntaggedObjectPool, data) +
+           sizeof(UntaggedObjectPool::Entry) * index;
   }
 
   struct ArrayTraits {
@@ -5077,24 +5075,24 @@
       return ObjectPool::data_offset();
     }
 
-    static constexpr intptr_t kElementSize = sizeof(ObjectPoolLayout::Entry);
+    static constexpr intptr_t kElementSize = sizeof(UntaggedObjectPool::Entry);
   };
 
   EntryType TypeAt(intptr_t index) const {
     ASSERT((index >= 0) && (index <= Length()));
-    return TypeBits::decode(raw_ptr()->entry_bits()[index]);
+    return TypeBits::decode(untag()->entry_bits()[index]);
   }
 
   Patchability PatchableAt(intptr_t index) const {
     ASSERT((index >= 0) && (index <= Length()));
-    return PatchableBit::decode(raw_ptr()->entry_bits()[index]);
+    return PatchableBit::decode(untag()->entry_bits()[index]);
   }
 
   void SetTypeAt(intptr_t index, EntryType type, Patchability patchable) const {
     ASSERT(index >= 0 && index <= Length());
     const uint8_t bits =
         PatchableBit::encode(patchable) | TypeBits::encode(type);
-    StoreNonPointer(&raw_ptr()->entry_bits()[index], bits);
+    StoreNonPointer(&untag()->entry_bits()[index], bits);
   }
 
   template <std::memory_order order = std::memory_order_relaxed>
@@ -5107,7 +5105,7 @@
   void SetObjectAt(intptr_t index, const Object& obj) const {
     ASSERT((TypeAt(index) == EntryType::kTaggedObject) ||
            (TypeAt(index) == EntryType::kImmediate && obj.IsSmi()));
-    StorePointer<ObjectPtr, order>(&EntryAddr(index)->raw_obj_, obj.raw());
+    StorePointer<ObjectPtr, order>(&EntryAddr(index)->raw_obj_, obj.ptr());
   }
 
   uword RawValueAt(intptr_t index) const {
@@ -5120,21 +5118,21 @@
   }
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(ObjectPoolLayout) ==
-           OFFSET_OF_RETURNED_VALUE(ObjectPoolLayout, data));
+    ASSERT(sizeof(UntaggedObjectPool) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedObjectPool, data));
     return 0;
   }
 
   static const intptr_t kBytesPerElement =
-      sizeof(ObjectPoolLayout::Entry) + sizeof(uint8_t);
+      sizeof(UntaggedObjectPool::Entry) + sizeof(uint8_t);
   static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
 
   static intptr_t InstanceSize(intptr_t len) {
     // Ensure that variable length data is not adding to the object length.
-    ASSERT(sizeof(ObjectPoolLayout) ==
-           (sizeof(ObjectLayout) + (1 * kWordSize)));
+    ASSERT(sizeof(UntaggedObjectPool) ==
+           (sizeof(UntaggedObject) + (1 * kWordSize)));
     ASSERT(0 <= len && len <= kMaxElements);
-    return RoundedAllocationSize(sizeof(ObjectPoolLayout) +
+    return RoundedAllocationSize(sizeof(UntaggedObjectPool) +
                                  (len * kBytesPerElement));
   }
 
@@ -5165,15 +5163,15 @@
   void DebugPrint() const;
 
  private:
-  ObjectPoolLayout::Entry const* EntryAddr(intptr_t index) const {
+  UntaggedObjectPool::Entry const* EntryAddr(intptr_t index) const {
     ASSERT((index >= 0) && (index < Length()));
-    return &raw_ptr()->data()[index];
+    return &untag()->data()[index];
   }
 
   FINAL_HEAP_OBJECT_IMPLEMENTATION(ObjectPool, Object);
   friend class Class;
   friend class Object;
-  friend class ObjectPoolLayout;
+  friend class UntaggedObjectPool;
 };
 
 class Instructions : public Object {
@@ -5189,23 +5187,23 @@
   class FlagsBits : public BitField<uint32_t, bool, kFlagsPos, kFlagsSize> {};
 
   // Excludes HeaderSize().
-  intptr_t Size() const { return SizeBits::decode(raw_ptr()->size_and_flags_); }
+  intptr_t Size() const { return SizeBits::decode(untag()->size_and_flags_); }
   static intptr_t Size(const InstructionsPtr instr) {
-    return SizeBits::decode(instr->ptr()->size_and_flags_);
+    return SizeBits::decode(instr->untag()->size_and_flags_);
   }
 
   bool HasMonomorphicEntry() const {
-    return FlagsBits::decode(raw_ptr()->size_and_flags_);
+    return FlagsBits::decode(untag()->size_and_flags_);
   }
   static bool HasMonomorphicEntry(const InstructionsPtr instr) {
-    return FlagsBits::decode(instr->ptr()->size_and_flags_);
+    return FlagsBits::decode(instr->untag()->size_and_flags_);
   }
 
-  uword PayloadStart() const { return PayloadStart(raw()); }
-  uword MonomorphicEntryPoint() const { return MonomorphicEntryPoint(raw()); }
-  uword EntryPoint() const { return EntryPoint(raw()); }
+  uword PayloadStart() const { return PayloadStart(ptr()); }
+  uword MonomorphicEntryPoint() const { return MonomorphicEntryPoint(ptr()); }
+  uword EntryPoint() const { return EntryPoint(ptr()); }
   static uword PayloadStart(const InstructionsPtr instr) {
-    return reinterpret_cast<uword>(instr->ptr()) + HeaderSize();
+    return reinterpret_cast<uword>(instr->untag()) + HeaderSize();
   }
 
 // Note: We keep the checked entrypoint offsets even (emitting NOPs if
@@ -5253,7 +5251,7 @@
   }
 
   static const intptr_t kMaxElements =
-      (kMaxInt32 - (sizeof(InstructionsLayout) + sizeof(ObjectLayout) +
+      (kMaxInt32 - (sizeof(UntaggedInstructions) + sizeof(UntaggedObject) +
                     (2 * kMaxObjectAlignment)));
 
   // Currently, we align bare instruction payloads on 4 byte boundaries.
@@ -5276,12 +5274,13 @@
       UNREACHABLE();
     }
 #endif
-    return Utils::RoundUp(sizeof(InstructionsLayout), kNonBarePayloadAlignment);
+    return Utils::RoundUp(sizeof(UntaggedInstructions),
+                          kNonBarePayloadAlignment);
   }
 
   static intptr_t InstanceSize() {
-    ASSERT_EQUAL(sizeof(InstructionsLayout),
-                 OFFSET_OF_RETURNED_VALUE(InstructionsLayout, data));
+    ASSERT_EQUAL(sizeof(UntaggedInstructions),
+                 OFFSET_OF_RETURNED_VALUE(UntaggedInstructions, data));
     return 0;
   }
 
@@ -5305,13 +5304,13 @@
   }
 
   bool Equals(const Instructions& other) const {
-    return Equals(raw(), other.raw());
+    return Equals(ptr(), other.ptr());
   }
 
   static bool Equals(InstructionsPtr a, InstructionsPtr b) {
     if (Size(a) != Size(b)) return false;
     NoSafepointScope no_safepoint;
-    return memcmp(a->ptr(), b->ptr(), InstanceSize(Size(a))) == 0;
+    return memcmp(a->untag(), b->untag(), InstanceSize(Size(a))) == 0;
   }
 
   uint32_t Hash() const {
@@ -5324,13 +5323,13 @@
  private:
   void SetSize(intptr_t value) const {
     ASSERT(value >= 0);
-    StoreNonPointer(&raw_ptr()->size_and_flags_,
-                    SizeBits::update(value, raw_ptr()->size_and_flags_));
+    StoreNonPointer(&untag()->size_and_flags_,
+                    SizeBits::update(value, untag()->size_and_flags_));
   }
 
   void SetHasMonomorphicEntry(bool value) const {
-    StoreNonPointer(&raw_ptr()->size_and_flags_,
-                    FlagsBits::update(value, raw_ptr()->size_and_flags_));
+    StoreNonPointer(&untag()->size_and_flags_,
+                    FlagsBits::update(value, untag()->size_and_flags_));
   }
 
   // New is a private method as RawInstruction and RawCode objects should
@@ -5361,11 +5360,11 @@
  public:
   // Excludes HeaderSize().
   static intptr_t Size(const InstructionsSectionPtr instr) {
-    return instr->ptr()->payload_length_;
+    return instr->untag()->payload_length_;
   }
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(InstructionsSectionLayout) ==
-           OFFSET_OF_RETURNED_VALUE(InstructionsSectionLayout, data));
+    ASSERT(sizeof(UntaggedInstructionsSection) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedInstructionsSection, data));
     return 0;
   }
 
@@ -5374,7 +5373,7 @@
   }
 
   static intptr_t HeaderSize() {
-    return Utils::RoundUp(sizeof(InstructionsSectionLayout),
+    return Utils::RoundUp(sizeof(UntaggedInstructionsSection),
                           Instructions::kBarePayloadAlignment);
   }
 
@@ -5383,7 +5382,7 @@
 
  private:
   // Note there are no New() methods for InstructionsSection. Instead, the
-  // serializer writes the InstructionsSectionLayout object manually at the
+  // serializer writes the UntaggedInstructionsSection object manually at the
   // start of instructions Images in precompiled snapshots.
 
   FINAL_HEAP_OBJECT_IMPLEMENTATION(InstructionsSection, Object);
@@ -5398,31 +5397,32 @@
 
   void SetVar(intptr_t var_index,
               const String& name,
-              LocalVarDescriptorsLayout::VarInfo* info) const;
+              UntaggedLocalVarDescriptors::VarInfo* info) const;
 
   void GetInfo(intptr_t var_index,
-               LocalVarDescriptorsLayout::VarInfo* info) const;
+               UntaggedLocalVarDescriptors::VarInfo* info) const;
 
   static const intptr_t kBytesPerElement =
-      sizeof(LocalVarDescriptorsLayout::VarInfo);
-  static const intptr_t kMaxElements = LocalVarDescriptorsLayout::kMaxIndex;
+      sizeof(UntaggedLocalVarDescriptors::VarInfo);
+  static const intptr_t kMaxElements = UntaggedLocalVarDescriptors::kMaxIndex;
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(LocalVarDescriptorsLayout) ==
-           OFFSET_OF_RETURNED_VALUE(LocalVarDescriptorsLayout, names));
+    ASSERT(sizeof(UntaggedLocalVarDescriptors) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedLocalVarDescriptors, names));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t len) {
     ASSERT(0 <= len && len <= kMaxElements);
     return RoundedAllocationSize(
-        sizeof(LocalVarDescriptorsLayout) +
+        sizeof(UntaggedLocalVarDescriptors) +
         (len * kWordSize)  // RawStrings for names.
-        + (len * sizeof(LocalVarDescriptorsLayout::VarInfo)));
+        + (len * sizeof(UntaggedLocalVarDescriptors::VarInfo)));
   }
 
   static LocalVarDescriptorsPtr New(intptr_t num_variables);
 
-  static const char* KindToCString(LocalVarDescriptorsLayout::VarInfoKind kind);
+  static const char* KindToCString(
+      UntaggedLocalVarDescriptors::VarInfoKind kind);
 
  private:
   FINAL_HEAP_OBJECT_IMPLEMENTATION(LocalVarDescriptors, Object);
@@ -5435,14 +5435,14 @@
   static const intptr_t kBytesPerElement = 1;
   static const intptr_t kMaxElements = kMaxInt32 / kBytesPerElement;
 
-  static intptr_t HeaderSize() { return sizeof(PcDescriptorsLayout); }
+  static intptr_t HeaderSize() { return sizeof(UntaggedPcDescriptors); }
   static intptr_t UnroundedSize(PcDescriptorsPtr desc) {
-    return UnroundedSize(desc->ptr()->length_);
+    return UnroundedSize(desc->untag()->length_);
   }
   static intptr_t UnroundedSize(intptr_t len) { return HeaderSize() + len; }
   static intptr_t InstanceSize() {
-    ASSERT_EQUAL(sizeof(PcDescriptorsLayout),
-                 OFFSET_OF_RETURNED_VALUE(PcDescriptorsLayout, data));
+    ASSERT_EQUAL(sizeof(UntaggedPcDescriptors),
+                 OFFSET_OF_RETURNED_VALUE(UntaggedPcDescriptors, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t len) {
@@ -5461,7 +5461,7 @@
 
   // We would have a VisitPointers function here to traverse the
   // pc descriptors table to visit objects if any in the table.
-  // Note: never return a reference to a PcDescriptorsLayout::PcDescriptorRec
+  // Note: never return a reference to a UntaggedPcDescriptors::PcDescriptorRec
   // as the object can move.
   class Iterator : ValueObject {
    public:
@@ -5474,21 +5474,21 @@
           cur_deopt_id_(0),
           cur_token_pos_(0),
           cur_try_index_(0),
-          cur_yield_index_(PcDescriptorsLayout::kInvalidYieldIndex) {}
+          cur_yield_index_(UntaggedPcDescriptors::kInvalidYieldIndex) {}
 
     bool MoveNext() {
       NoSafepointScope scope;
-      ReadStream stream(descriptors_.raw_ptr()->data(), descriptors_.Length(),
+      ReadStream stream(descriptors_.untag()->data(), descriptors_.Length(),
                         byte_index_);
       // Moves to record that matches kind_mask_.
       while (byte_index_ < descriptors_.Length()) {
         const int32_t kind_and_metadata = stream.ReadSLEB128<int32_t>();
-        cur_kind_ =
-            PcDescriptorsLayout::KindAndMetadata::DecodeKind(kind_and_metadata);
-        cur_try_index_ = PcDescriptorsLayout::KindAndMetadata::DecodeTryIndex(
+        cur_kind_ = UntaggedPcDescriptors::KindAndMetadata::DecodeKind(
+            kind_and_metadata);
+        cur_try_index_ = UntaggedPcDescriptors::KindAndMetadata::DecodeTryIndex(
             kind_and_metadata);
         cur_yield_index_ =
-            PcDescriptorsLayout::KindAndMetadata::DecodeYieldIndex(
+            UntaggedPcDescriptors::KindAndMetadata::DecodeYieldIndex(
                 kind_and_metadata);
 
         cur_pc_offset_ += stream.ReadSLEB128();
@@ -5514,8 +5514,8 @@
     }
     intptr_t TryIndex() const { return cur_try_index_; }
     intptr_t YieldIndex() const { return cur_yield_index_; }
-    PcDescriptorsLayout::Kind Kind() const {
-      return static_cast<PcDescriptorsLayout::Kind>(cur_kind_);
+    UntaggedPcDescriptors::Kind Kind() const {
+      return static_cast<UntaggedPcDescriptors::Kind>(cur_kind_);
     }
 
    private:
@@ -5552,11 +5552,11 @@
       return false;
     }
     NoSafepointScope no_safepoint;
-    return memcmp(raw_ptr(), other.raw_ptr(), InstanceSize(Length())) == 0;
+    return memcmp(untag(), other.untag(), InstanceSize(Length())) == 0;
   }
 
  private:
-  static const char* KindAsStr(PcDescriptorsLayout::Kind kind);
+  static const char* KindAsStr(UntaggedPcDescriptors::Kind kind);
 
   static PcDescriptorsPtr New(intptr_t length);
 
@@ -5573,14 +5573,14 @@
   static const intptr_t kBytesPerElement = 1;
   static const intptr_t kMaxElements = kMaxInt32 / kBytesPerElement;
 
-  static intptr_t HeaderSize() { return sizeof(CodeSourceMapLayout); }
+  static intptr_t HeaderSize() { return sizeof(UntaggedCodeSourceMap); }
   static intptr_t UnroundedSize(CodeSourceMapPtr map) {
-    return UnroundedSize(map->ptr()->length_);
+    return UnroundedSize(map->untag()->length_);
   }
   static intptr_t UnroundedSize(intptr_t len) { return HeaderSize() + len; }
   static intptr_t InstanceSize() {
-    ASSERT_EQUAL(sizeof(CodeSourceMapLayout),
-                 OFFSET_OF_RETURNED_VALUE(CodeSourceMapLayout, data));
+    ASSERT_EQUAL(sizeof(UntaggedCodeSourceMap),
+                 OFFSET_OF_RETURNED_VALUE(UntaggedCodeSourceMap, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t len) {
@@ -5590,17 +5590,15 @@
 
   static CodeSourceMapPtr New(intptr_t length);
 
-  intptr_t Length() const { return raw_ptr()->length_; }
-  uint8_t* Data() const {
-    return UnsafeMutableNonPointer(&raw_ptr()->data()[0]);
-  }
+  intptr_t Length() const { return untag()->length_; }
+  uint8_t* Data() const { return UnsafeMutableNonPointer(&untag()->data()[0]); }
 
   bool Equals(const CodeSourceMap& other) const {
     if (Length() != other.Length()) {
       return false;
     }
     NoSafepointScope no_safepoint;
-    return memcmp(raw_ptr(), other.raw_ptr(), InstanceSize(Length())) == 0;
+    return memcmp(untag(), other.untag(), InstanceSize(Length())) == 0;
   }
 
   void PrintToJSONObject(JSONObject* jsobj, bool ref) const;
@@ -5617,27 +5615,26 @@
  public:
   static const intptr_t kHashBits = 30;
 
-  uintptr_t payload_size() const { return PayloadSizeOf(raw()); }
+  uintptr_t payload_size() const { return PayloadSizeOf(ptr()); }
   static uintptr_t PayloadSizeOf(const CompressedStackMapsPtr raw) {
-    return CompressedStackMapsLayout::SizeField::decode(
-        raw->ptr()->flags_and_size_);
+    return UntaggedCompressedStackMaps::SizeField::decode(
+        raw->untag()->flags_and_size_);
   }
 
   bool Equals(const CompressedStackMaps& other) const {
     // All of the table flags and payload size must match.
-    if (raw_ptr()->flags_and_size_ != other.raw_ptr()->flags_and_size_) {
+    if (untag()->flags_and_size_ != other.untag()->flags_and_size_) {
       return false;
     }
     NoSafepointScope no_safepoint;
-    return memcmp(raw_ptr(), other.raw_ptr(), InstanceSize(payload_size())) ==
-           0;
+    return memcmp(untag(), other.untag(), InstanceSize(payload_size())) == 0;
   }
 
   // Methods to allow use with PointerKeyValueTrait to create sets of CSMs.
   bool Equals(const CompressedStackMaps* other) const { return Equals(*other); }
   intptr_t Hashcode() const;
 
-  static intptr_t HeaderSize() { return sizeof(CompressedStackMapsLayout); }
+  static intptr_t HeaderSize() { return sizeof(UntaggedCompressedStackMaps); }
   static intptr_t UnroundedSize(CompressedStackMapsPtr maps) {
     return UnroundedSize(CompressedStackMaps::PayloadSizeOf(maps));
   }
@@ -5645,24 +5642,24 @@
     return HeaderSize() + length;
   }
   static intptr_t InstanceSize() {
-    ASSERT_EQUAL(sizeof(CompressedStackMapsLayout),
-                 OFFSET_OF_RETURNED_VALUE(CompressedStackMapsLayout, data));
+    ASSERT_EQUAL(sizeof(UntaggedCompressedStackMaps),
+                 OFFSET_OF_RETURNED_VALUE(UntaggedCompressedStackMaps, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t length) {
     return RoundedAllocationSize(UnroundedSize(length));
   }
 
-  bool UsesGlobalTable() const { return UsesGlobalTable(raw()); }
+  bool UsesGlobalTable() const { return UsesGlobalTable(ptr()); }
   static bool UsesGlobalTable(const CompressedStackMapsPtr raw) {
-    return CompressedStackMapsLayout::UsesTableBit::decode(
-        raw->ptr()->flags_and_size_);
+    return UntaggedCompressedStackMaps::UsesTableBit::decode(
+        raw->untag()->flags_and_size_);
   }
 
-  bool IsGlobalTable() const { return IsGlobalTable(raw()); }
+  bool IsGlobalTable() const { return IsGlobalTable(ptr()); }
   static bool IsGlobalTable(const CompressedStackMapsPtr raw) {
-    return CompressedStackMapsLayout::GlobalTableBit::decode(
-        raw->ptr()->flags_and_size_);
+    return UntaggedCompressedStackMaps::GlobalTableBit::decode(
+        raw->untag()->flags_and_size_);
   }
 
   static CompressedStackMapsPtr NewInlined(const void* payload, intptr_t size) {
@@ -5791,12 +5788,12 @@
   bool HasCatchAll(intptr_t try_index) const;
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(ExceptionHandlersLayout) ==
-           OFFSET_OF_RETURNED_VALUE(ExceptionHandlersLayout, data));
+    ASSERT(sizeof(UntaggedExceptionHandlers) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedExceptionHandlers, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t len) {
-    return RoundedAllocationSize(sizeof(ExceptionHandlersLayout) +
+    return RoundedAllocationSize(sizeof(UntaggedExceptionHandlers) +
                                  (len * sizeof(ExceptionHandlerInfo)));
   }
 
@@ -5845,7 +5842,7 @@
 //  * Code::owner_
 class WeakSerializationReference : public Object {
  public:
-  ObjectPtr target() const { return TargetOf(raw()); }
+  ObjectPtr target() const { return TargetOf(ptr()); }
   static ObjectPtr TargetOf(const WeakSerializationReferencePtr raw) {
 #if defined(DART_PRECOMPILED_RUNTIME)
     // WSRs in the precompiled runtime only contain some remaining info about
@@ -5853,23 +5850,23 @@
     return Object::null();
 #else
     // Outside the precompiled runtime, they should always have a target.
-    ASSERT(raw->ptr()->target() != Object::null());
-    return raw->ptr()->target();
+    ASSERT(raw->untag()->target() != Object::null());
+    return raw->untag()->target();
 #endif
   }
 
-  classid_t TargetClassId() const { return TargetClassIdOf(raw()); }
+  classid_t TargetClassId() const { return TargetClassIdOf(ptr()); }
   static classid_t TargetClassIdOf(const WeakSerializationReferencePtr raw) {
 #if defined(DART_PRECOMPILED_RUNTIME)
     // No new instances of WSRs are created in the precompiled runtime, so
     // this instance came from deserialization and thus must be the empty WSR.
-    return raw->ptr()->cid_;
+    return raw->untag()->cid_;
 #else
     return TargetOf(raw)->GetClassId();
 #endif
   }
 
-  static ObjectPtr Unwrap(const Object& obj) { return Unwrap(obj.raw()); }
+  static ObjectPtr Unwrap(const Object& obj) { return Unwrap(obj.ptr()); }
   // Gets the underlying object from a WSR, or the original object if it is
   // not one. Notably, Unwrap(Wrap(r)) == r for all raw objects r, whether
   // CanWrap(r) or not. However, this will not hold if a serialization and
@@ -5883,7 +5880,7 @@
   // WSR is returned. Useful for cases where we want to call Object methods
   // like ToCString() on whatever non-null object we can get.
   static ObjectPtr UnwrapIfTarget(const Object& obj) {
-    return UnwrapIfTarget(obj.raw());
+    return UnwrapIfTarget(obj.ptr());
   }
   static ObjectPtr UnwrapIfTarget(ObjectPtr raw) {
 #if defined(DART_PRECOMPILED_RUNTIME)
@@ -5898,7 +5895,7 @@
   }
 
   static classid_t UnwrappedClassIdOf(const Object& obj) {
-    return UnwrappedClassIdOf(obj.raw());
+    return UnwrappedClassIdOf(obj.ptr());
   }
   // Gets the class ID of the underlying object from a WSR, or the class ID of
   // the object if it is not one.
@@ -5913,7 +5910,7 @@
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(WeakSerializationReferenceLayout));
+    return RoundedAllocationSize(sizeof(UntaggedWeakSerializationReference));
   }
 
 #if defined(DART_PRECOMPILER)
@@ -5921,7 +5918,7 @@
   static bool CanWrap(const Object& object);
 
   // This returns ObjectPtr, not WeakSerializationReferencePtr, because
-  // target.raw() is returned when CanWrap(target) is false.
+  // target.ptr() is returned when CanWrap(target) is false.
   static ObjectPtr Wrap(Zone* zone, const Object& target);
 #endif
 
@@ -5938,18 +5935,18 @@
     UNREACHABLE();
     return NULL;
 #else
-    return raw_ptr()->active_instructions();
+    return untag()->active_instructions();
 #endif
   }
 
   // When dual mapping, these return the executable view.
-  InstructionsPtr instructions() const { return raw_ptr()->instructions(); }
+  InstructionsPtr instructions() const { return untag()->instructions(); }
   static InstructionsPtr InstructionsOf(const CodePtr code) {
-    return code->ptr()->instructions();
+    return code->untag()->instructions();
   }
 
   static intptr_t saved_instructions_offset() {
-    return OFFSET_OF(CodeLayout, instructions_);
+    return OFFSET_OF(UntaggedCode, instructions_);
   }
 
   using EntryKind = CodeEntryKind;
@@ -5960,54 +5957,55 @@
   static intptr_t entry_point_offset(EntryKind kind = EntryKind::kNormal) {
     switch (kind) {
       case EntryKind::kNormal:
-        return OFFSET_OF(CodeLayout, entry_point_);
+        return OFFSET_OF(UntaggedCode, entry_point_);
       case EntryKind::kUnchecked:
-        return OFFSET_OF(CodeLayout, unchecked_entry_point_);
+        return OFFSET_OF(UntaggedCode, unchecked_entry_point_);
       case EntryKind::kMonomorphic:
-        return OFFSET_OF(CodeLayout, monomorphic_entry_point_);
+        return OFFSET_OF(UntaggedCode, monomorphic_entry_point_);
       case EntryKind::kMonomorphicUnchecked:
-        return OFFSET_OF(CodeLayout, monomorphic_unchecked_entry_point_);
+        return OFFSET_OF(UntaggedCode, monomorphic_unchecked_entry_point_);
       default:
         UNREACHABLE();
     }
   }
 
-  ObjectPoolPtr object_pool() const { return raw_ptr()->object_pool(); }
+  ObjectPoolPtr object_pool() const { return untag()->object_pool(); }
   static intptr_t object_pool_offset() {
-    return OFFSET_OF(CodeLayout, object_pool_);
+    return OFFSET_OF(UntaggedCode, object_pool_);
   }
 
   intptr_t pointer_offsets_length() const {
-    return PtrOffBits::decode(raw_ptr()->state_bits_);
+    return PtrOffBits::decode(untag()->state_bits_);
   }
 
   bool is_optimized() const {
-    return OptimizedBit::decode(raw_ptr()->state_bits_);
+    return OptimizedBit::decode(untag()->state_bits_);
   }
   void set_is_optimized(bool value) const;
   static bool IsOptimized(CodePtr code) {
-    return Code::OptimizedBit::decode(code->ptr()->state_bits_);
+    return Code::OptimizedBit::decode(code->untag()->state_bits_);
   }
 
   bool is_force_optimized() const {
-    return ForceOptimizedBit::decode(raw_ptr()->state_bits_);
+    return ForceOptimizedBit::decode(untag()->state_bits_);
   }
   void set_is_force_optimized(bool value) const;
 
-  bool is_alive() const { return AliveBit::decode(raw_ptr()->state_bits_); }
+  bool is_alive() const { return AliveBit::decode(untag()->state_bits_); }
   void set_is_alive(bool value) const;
 
-  bool HasMonomorphicEntry() const { return HasMonomorphicEntry(raw()); }
+  bool HasMonomorphicEntry() const { return HasMonomorphicEntry(ptr()); }
   static bool HasMonomorphicEntry(const CodePtr code) {
 #if defined(DART_PRECOMPILED_RUNTIME)
-    return code->ptr()->entry_point_ != code->ptr()->monomorphic_entry_point_;
+    return code->untag()->entry_point_ !=
+           code->untag()->monomorphic_entry_point_;
 #else
     return Instructions::HasMonomorphicEntry(InstructionsOf(code));
 #endif
   }
 
   // Returns the payload start of [instructions()].
-  uword PayloadStart() const { return PayloadStartOf(raw()); }
+  uword PayloadStart() const { return PayloadStartOf(ptr()); }
   static uword PayloadStartOf(const CodePtr code) {
 #if defined(DART_PRECOMPILED_RUNTIME)
     const uword entry_offset = HasMonomorphicEntry(code)
@@ -6020,10 +6018,10 @@
   }
 
   // Returns the entry point of [instructions()].
-  uword EntryPoint() const { return EntryPointOf(raw()); }
+  uword EntryPoint() const { return EntryPointOf(ptr()); }
   static uword EntryPointOf(const CodePtr code) {
 #if defined(DART_PRECOMPILED_RUNTIME)
-    return code->ptr()->entry_point_;
+    return code->untag()->entry_point_;
 #else
     return Instructions::EntryPoint(InstructionsOf(code));
 #endif
@@ -6032,15 +6030,15 @@
   // Returns the unchecked entry point of [instructions()].
   uword UncheckedEntryPoint() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->unchecked_entry_point_;
+    return untag()->unchecked_entry_point_;
 #else
-    return EntryPoint() + raw_ptr()->unchecked_offset_;
+    return EntryPoint() + untag()->unchecked_offset_;
 #endif
   }
   // Returns the monomorphic entry point of [instructions()].
   uword MonomorphicEntryPoint() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->monomorphic_entry_point_;
+    return untag()->monomorphic_entry_point_;
 #else
     return Instructions::MonomorphicEntryPoint(instructions());
 #endif
@@ -6048,17 +6046,17 @@
   // Returns the unchecked monomorphic entry point of [instructions()].
   uword MonomorphicUncheckedEntryPoint() const {
 #if defined(DART_PRECOMPILED_RUNTIME)
-    return raw_ptr()->monomorphic_unchecked_entry_point_;
+    return untag()->monomorphic_unchecked_entry_point_;
 #else
-    return MonomorphicEntryPoint() + raw_ptr()->unchecked_offset_;
+    return MonomorphicEntryPoint() + untag()->unchecked_offset_;
 #endif
   }
 
   // Returns the size of [instructions()].
-  intptr_t Size() const { return PayloadSizeOf(raw()); }
+  intptr_t Size() const { return PayloadSizeOf(ptr()); }
   static intptr_t PayloadSizeOf(const CodePtr code) {
 #if defined(DART_PRECOMPILED_RUNTIME)
-    return code->ptr()->instructions_length_;
+    return code->untag()->instructions_length_;
 #else
     return Instructions::Size(InstructionsOf(code));
 #endif
@@ -6067,32 +6065,30 @@
   ObjectPoolPtr GetObjectPool() const;
   // Returns whether the given PC address is in [instructions()].
   bool ContainsInstructionAt(uword addr) const {
-    return ContainsInstructionAt(raw(), addr);
+    return ContainsInstructionAt(ptr(), addr);
   }
 
   // Returns whether the given PC address is in [InstructionsOf(code)].
   static bool ContainsInstructionAt(const CodePtr code, uword pc) {
-    return CodeLayout::ContainsPC(code, pc);
+    return UntaggedCode::ContainsPC(code, pc);
   }
 
   // Returns true if there is a debugger breakpoint set in this code object.
   bool HasBreakpoint() const;
 
-  PcDescriptorsPtr pc_descriptors() const {
-    return raw_ptr()->pc_descriptors();
-  }
+  PcDescriptorsPtr pc_descriptors() const { return untag()->pc_descriptors(); }
   void set_pc_descriptors(const PcDescriptors& descriptors) const {
     ASSERT(descriptors.IsOld());
-    raw_ptr()->set_pc_descriptors(descriptors.raw());
+    untag()->set_pc_descriptors(descriptors.ptr());
   }
 
   CodeSourceMapPtr code_source_map() const {
-    return raw_ptr()->code_source_map();
+    return untag()->code_source_map();
   }
 
   void set_code_source_map(const CodeSourceMap& code_source_map) const {
     ASSERT(code_source_map.IsOld());
-    raw_ptr()->set_code_source_map(code_source_map.raw());
+    untag()->set_code_source_map(code_source_map.ptr());
   }
 
   // Array of DeoptInfo objects.
@@ -6101,7 +6097,7 @@
     UNREACHABLE();
     return NULL;
 #else
-    return raw_ptr()->deopt_info_array();
+    return untag()->deopt_info_array();
 #endif
   }
   void set_deopt_info_array(const Array& array) const;
@@ -6117,7 +6113,7 @@
 #endif
 
   CompressedStackMapsPtr compressed_stackmaps() const {
-    return raw_ptr()->compressed_stackmaps();
+    return untag()->compressed_stackmaps();
   }
   void set_compressed_stackmaps(const CompressedStackMaps& maps) const;
 
@@ -6157,7 +6153,7 @@
     UNREACHABLE();
     return NULL;
 #else
-    return raw_ptr()->static_calls_target_table();
+    return untag()->static_calls_target_table();
 #endif
   }
 
@@ -6210,7 +6206,7 @@
     UNREACHABLE();
     return NULL;
 #else
-    return raw_ptr()->return_address_metadata();
+    return untag()->return_address_metadata();
 #endif
   }
   // Sets |return_address_metadata|.
@@ -6251,7 +6247,7 @@
     UNREACHABLE();
     return NULL;
 #else
-    return raw_ptr()->var_descriptors();
+    return untag()->var_descriptors();
 #endif
   }
   void set_var_descriptors(const LocalVarDescriptors& value) const {
@@ -6259,7 +6255,7 @@
     UNREACHABLE();
 #else
     ASSERT(value.IsOld());
-    raw_ptr()->set_var_descriptors(value.raw());
+    untag()->set_var_descriptors(value.ptr());
 #endif
   }
 
@@ -6267,11 +6263,11 @@
   LocalVarDescriptorsPtr GetLocalVarDescriptors() const;
 
   ExceptionHandlersPtr exception_handlers() const {
-    return raw_ptr()->exception_handlers();
+    return untag()->exception_handlers();
   }
   void set_exception_handlers(const ExceptionHandlers& handlers) const {
     ASSERT(handlers.IsOld());
-    raw_ptr()->set_exception_handlers(handlers.raw());
+    untag()->set_exception_handlers(handlers.ptr());
   }
 
   // WARNING: function() returns the owner which is not guaranteed to be
@@ -6283,33 +6279,36 @@
   FunctionPtr function() const {
     ASSERT(IsFunctionCode());
     return Function::RawCast(
-        WeakSerializationReference::Unwrap(raw_ptr()->owner()));
+        WeakSerializationReference::Unwrap(untag()->owner()));
   }
 
-  ObjectPtr owner() const { return raw_ptr()->owner(); }
+  ObjectPtr owner() const { return untag()->owner(); }
   void set_owner(const Object& owner) const;
 
-  classid_t OwnerClassId() const { return OwnerClassIdOf(raw()); }
+  classid_t OwnerClassId() const { return OwnerClassIdOf(ptr()); }
   static classid_t OwnerClassIdOf(CodePtr raw) {
-    return WeakSerializationReference::UnwrappedClassIdOf(raw->ptr()->owner());
+    return WeakSerializationReference::UnwrappedClassIdOf(
+        raw->untag()->owner());
   }
 
-  static intptr_t owner_offset() { return OFFSET_OF(CodeLayout, owner_); }
+  static intptr_t owner_offset() { return OFFSET_OF(UntaggedCode, owner_); }
 
   // We would have a VisitPointers function here to traverse all the
   // embedded objects in the instructions using pointer_offsets.
 
   static const intptr_t kBytesPerElement =
-      sizeof(reinterpret_cast<CodeLayout*>(0)->data()[0]);
+      sizeof(reinterpret_cast<UntaggedCode*>(0)->data()[0]);
   static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(CodeLayout) == OFFSET_OF_RETURNED_VALUE(CodeLayout, data));
+    ASSERT(sizeof(UntaggedCode) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedCode, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t len) {
     ASSERT(0 <= len && len <= kMaxElements);
-    return RoundedAllocationSize(sizeof(CodeLayout) + (len * kBytesPerElement));
+    return RoundedAllocationSize(sizeof(UntaggedCode) +
+                                 (len * kBytesPerElement));
   }
 #if !defined(DART_PRECOMPILED_RUNTIME)
   // Finalizes the generated code, by generating various kinds of metadata (e.g.
@@ -6362,7 +6361,7 @@
 
   // Find pc, return 0 if not found.
   uword GetPcForDeoptId(intptr_t deopt_id,
-                        PcDescriptorsLayout::Kind kind) const;
+                        UntaggedPcDescriptors::Kind kind) const;
   intptr_t GetDeoptIdForOsr(uword pc) const;
 
   const char* Name() const;
@@ -6372,7 +6371,7 @@
 #if defined(PRODUCT)
     return 0;
 #else
-    return raw_ptr()->compile_timestamp_;
+    return untag()->compile_timestamp_;
 #endif
   }
 
@@ -6391,25 +6390,26 @@
     ResetActiveInstructions();
   }
 
-  bool IsDisabled() const { return IsDisabled(raw()); }
+  bool IsDisabled() const { return IsDisabled(ptr()); }
   static bool IsDisabled(CodePtr code) {
 #if defined(DART_PRECOMPILED_RUNTIME)
     UNREACHABLE();
     return false;
 #else
-    return code->ptr()->instructions() != code->ptr()->active_instructions();
+    return code->untag()->instructions() !=
+           code->untag()->active_instructions();
 #endif
   }
 
   void set_object_pool(ObjectPoolPtr object_pool) const {
-    raw_ptr()->set_object_pool(object_pool);
+    untag()->set_object_pool(object_pool);
   }
 
  private:
   void set_state_bits(intptr_t bits) const;
 
-  friend class ObjectLayout;  // For ObjectLayout::SizeFromClass().
-  friend class CodeLayout;
+  friend class UntaggedObject;  // For UntaggedObject::SizeFromClass().
+  friend class UntaggedCode;
   enum {
     kOptimizedBit = 0,
     kForceOptimizedBit = 1,
@@ -6449,7 +6449,7 @@
 #if defined(PRODUCT)
     UNREACHABLE();
 #else
-    StoreNonPointer(&raw_ptr()->compile_timestamp_, timestamp);
+    StoreNonPointer(&untag()->compile_timestamp_, timestamp);
 #endif
   }
 
@@ -6470,36 +6470,36 @@
 
   void set_instructions(const Instructions& instructions) const {
     ASSERT(Thread::Current()->IsMutatorThread() || !is_alive());
-    raw_ptr()->set_instructions(instructions.raw());
+    untag()->set_instructions(instructions.ptr());
   }
 #if !defined(DART_PRECOMPILED_RUNTIME)
   void set_unchecked_offset(uword offset) const {
-    StoreNonPointer(&raw_ptr()->unchecked_offset_, offset);
+    StoreNonPointer(&untag()->unchecked_offset_, offset);
   }
 #endif
 
   // Returns the unchecked entry point offset for [instructions_].
   uint32_t UncheckedEntryPointOffset() const {
-    return UncheckedEntryPointOffsetOf(raw());
+    return UncheckedEntryPointOffsetOf(ptr());
   }
   static uint32_t UncheckedEntryPointOffsetOf(CodePtr code) {
 #if defined(DART_PRECOMPILED_RUNTIME)
     UNREACHABLE();
 #else
-    return code->ptr()->unchecked_offset_;
+    return code->untag()->unchecked_offset_;
 #endif
   }
 
   void set_pointer_offsets_length(intptr_t value) {
     // The number of fixups is limited to 1-billion.
     ASSERT(Utils::IsUint(30, value));
-    set_state_bits(PtrOffBits::update(value, raw_ptr()->state_bits_));
+    set_state_bits(PtrOffBits::update(value, untag()->state_bits_));
   }
   int32_t* PointerOffsetAddrAt(int index) const {
     ASSERT(index >= 0);
     ASSERT(index < pointer_offsets_length());
     // TODO(iposva): Unit test is missing for this functionality.
-    return &UnsafeMutableNonPointer(raw_ptr()->data())[index];
+    return &UnsafeMutableNonPointer(untag()->data())[index];
   }
   void SetPointerOffsetAt(int index, int32_t offset_in_instructions) {
     NoSafepointScope no_safepoint;
@@ -6530,31 +6530,33 @@
   friend class MegamorphicCacheTable;  // for set_object_pool
   friend class CodePatcher;            // for set_instructions
   friend class ProgramVisitor;         // for set_instructions
-  // So that the FunctionLayout pointer visitor can determine whether code the
+  // So that the UntaggedFunction pointer visitor can determine whether code the
   // function points to is optimized.
-  friend class FunctionLayout;
+  friend class UntaggedFunction;
   friend class CallSiteResetter;
   friend class CodeKeyValueTrait;  // for UncheckedEntryPointOffset
 };
 
 class Context : public Object {
  public:
-  ContextPtr parent() const { return raw_ptr()->parent(); }
+  ContextPtr parent() const { return untag()->parent(); }
   void set_parent(const Context& parent) const {
-    raw_ptr()->set_parent(parent.raw());
+    untag()->set_parent(parent.ptr());
   }
-  static intptr_t parent_offset() { return OFFSET_OF(ContextLayout, parent_); }
+  static intptr_t parent_offset() {
+    return OFFSET_OF(UntaggedContext, parent_);
+  }
 
-  intptr_t num_variables() const { return raw_ptr()->num_variables_; }
+  intptr_t num_variables() const { return untag()->num_variables_; }
   static intptr_t num_variables_offset() {
-    return OFFSET_OF(ContextLayout, num_variables_);
+    return OFFSET_OF(UntaggedContext, num_variables_);
   }
   static intptr_t NumVariables(const ContextPtr context) {
-    return context->ptr()->num_variables_;
+    return context->untag()->num_variables_;
   }
 
   ObjectPtr At(intptr_t context_index) const {
-    return raw_ptr()->element(context_index);
+    return untag()->element(context_index);
   }
   inline void SetAt(intptr_t context_index, const Object& value) const;
 
@@ -6575,7 +6577,7 @@
   static const intptr_t kIsSyncIndex = 2;
 
   static intptr_t variable_offset(intptr_t context_index) {
-    return OFFSET_OF_RETURNED_VALUE(ContextLayout, data) +
+    return OFFSET_OF_RETURNED_VALUE(UntaggedContext, data) +
            (kWordSize * context_index);
   }
 
@@ -6584,14 +6586,14 @@
   }
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(ContextLayout) ==
-           OFFSET_OF_RETURNED_VALUE(ContextLayout, data));
+    ASSERT(sizeof(UntaggedContext) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedContext, data));
     return 0;
   }
 
   static intptr_t InstanceSize(intptr_t len) {
     ASSERT(IsValidLength(len));
-    return RoundedAllocationSize(sizeof(ContextLayout) +
+    return RoundedAllocationSize(sizeof(UntaggedContext) +
                                  (len * kBytesPerElement));
   }
 
@@ -6599,7 +6601,7 @@
 
  private:
   void set_num_variables(intptr_t num_variables) const {
-    StoreNonPointer(&raw_ptr()->num_variables_, num_variables);
+    StoreNonPointer(&untag()->num_variables_, num_variables);
   }
 
   FINAL_HEAP_OBJECT_IMPLEMENTATION(Context, Object);
@@ -6619,7 +6621,7 @@
 // which is true if the ContextScope was created for an implicit closure.
 class ContextScope : public Object {
  public:
-  intptr_t num_variables() const { return raw_ptr()->num_variables_; }
+  intptr_t num_variables() const { return untag()->num_variables_; }
 
   TokenPosition TokenIndexAt(intptr_t scope_index) const;
   void SetTokenIndexAt(intptr_t scope_index, TokenPosition token_pos) const;
@@ -6659,18 +6661,18 @@
   void SetContextLevelAt(intptr_t scope_index, intptr_t context_level) const;
 
   static const intptr_t kBytesPerElement =
-      sizeof(ContextScopeLayout::VariableDesc);
+      sizeof(UntaggedContextScope::VariableDesc);
   static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(ContextScopeLayout) ==
-           OFFSET_OF_RETURNED_VALUE(ContextScopeLayout, data));
+    ASSERT(sizeof(UntaggedContextScope) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedContextScope, data));
     return 0;
   }
 
   static intptr_t InstanceSize(intptr_t len) {
     ASSERT(0 <= len && len <= kMaxElements);
-    return RoundedAllocationSize(sizeof(ContextScopeLayout) +
+    return RoundedAllocationSize(sizeof(UntaggedContextScope) +
                                  (len * kBytesPerElement));
   }
 
@@ -6678,17 +6680,17 @@
 
  private:
   void set_num_variables(intptr_t num_variables) const {
-    StoreNonPointer(&raw_ptr()->num_variables_, num_variables);
+    StoreNonPointer(&untag()->num_variables_, num_variables);
   }
 
   void set_is_implicit(bool is_implicit) const {
-    StoreNonPointer(&raw_ptr()->is_implicit_, is_implicit);
+    StoreNonPointer(&untag()->is_implicit_, is_implicit);
   }
 
-  const ContextScopeLayout::VariableDesc* VariableDescAddr(
+  const UntaggedContextScope::VariableDesc* VariableDescAddr(
       intptr_t index) const {
     ASSERT((index >= 0) && (index < num_variables()));
-    return raw_ptr()->VariableDescAddr(index);
+    return untag()->VariableDescAddr(index);
   }
 
   bool GetFlagAt(intptr_t scope_index, intptr_t mask) const;
@@ -6721,13 +6723,13 @@
   void set_filled_entry_count(intptr_t num) const;
 
   static intptr_t buckets_offset() {
-    return OFFSET_OF(MegamorphicCacheLayout, buckets_);
+    return OFFSET_OF(UntaggedMegamorphicCache, buckets_);
   }
   static intptr_t mask_offset() {
-    return OFFSET_OF(MegamorphicCacheLayout, mask_);
+    return OFFSET_OF(UntaggedMegamorphicCache, mask_);
   }
   static intptr_t arguments_descriptor_offset() {
-    return OFFSET_OF(MegamorphicCacheLayout, args_descriptor_);
+    return OFFSET_OF(UntaggedMegamorphicCache, args_descriptor_);
   }
 
   static MegamorphicCachePtr New(const String& target_name,
@@ -6739,7 +6741,7 @@
   void SwitchToBareInstructions();
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(MegamorphicCacheLayout));
+    return RoundedAllocationSize(sizeof(UntaggedMegamorphicCache));
   }
 
  private:
@@ -6848,11 +6850,11 @@
   static SubtypeTestCachePtr New();
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(SubtypeTestCacheLayout));
+    return RoundedAllocationSize(sizeof(UntaggedSubtypeTestCache));
   }
 
   static intptr_t cache_offset() {
-    return OFFSET_OF(SubtypeTestCacheLayout, cache_);
+    return OFFSET_OF(UntaggedSubtypeTestCache, cache_);
   }
 
   static void Init();
@@ -6881,7 +6883,7 @@
   static LoadingUnitPtr New();
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(LoadingUnitLayout));
+    return RoundedAllocationSize(sizeof(UntaggedLoadingUnit));
   }
 
   LoadingUnitPtr parent() const;
@@ -6890,20 +6892,20 @@
   ArrayPtr base_objects() const;
   void set_base_objects(const Array& value) const;
 
-  intptr_t id() const { return raw_ptr()->id_; }
-  void set_id(intptr_t id) const { StoreNonPointer(&raw_ptr()->id_, id); }
+  intptr_t id() const { return untag()->id_; }
+  void set_id(intptr_t id) const { StoreNonPointer(&untag()->id_, id); }
 
   // True once the VM deserializes this unit's snapshot.
-  bool loaded() const { return raw_ptr()->loaded_; }
+  bool loaded() const { return untag()->loaded_; }
   void set_loaded(bool value) const {
-    StoreNonPointer(&raw_ptr()->loaded_, value);
+    StoreNonPointer(&untag()->loaded_, value);
   }
 
   // True once the VM invokes the embedder's deferred load callback until the
   // embedder calls Dart_DeferredLoadComplete[Error].
-  bool load_outstanding() const { return raw_ptr()->load_outstanding_; }
+  bool load_outstanding() const { return untag()->load_outstanding_; }
   void set_load_outstanding(bool value) const {
-    StoreNonPointer(&raw_ptr()->load_outstanding_, value);
+    StoreNonPointer(&untag()->load_outstanding_, value);
   }
 
   ObjectPtr IssueLoad() const;
@@ -6925,10 +6927,10 @@
 
 class ApiError : public Error {
  public:
-  StringPtr message() const { return raw_ptr()->message(); }
+  StringPtr message() const { return untag()->message(); }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ApiErrorLayout));
+    return RoundedAllocationSize(sizeof(UntaggedApiError));
   }
 
   static ApiErrorPtr New(const String& message, Heap::Space space = Heap::kNew);
@@ -6947,14 +6949,14 @@
 class LanguageError : public Error {
  public:
   Report::Kind kind() const {
-    return static_cast<Report::Kind>(raw_ptr()->kind_);
+    return static_cast<Report::Kind>(untag()->kind_);
   }
 
   // Build, cache, and return formatted message.
   StringPtr FormatMessage() const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(LanguageErrorLayout));
+    return RoundedAllocationSize(sizeof(UntaggedLanguageError));
   }
 
   // A null script means no source and a negative token_pos means no position.
@@ -6982,26 +6984,26 @@
 
   virtual const char* ToErrorCString() const;
 
-  TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
+  TokenPosition token_pos() const { return untag()->token_pos_; }
 
  private:
-  ErrorPtr previous_error() const { return raw_ptr()->previous_error(); }
+  ErrorPtr previous_error() const { return untag()->previous_error(); }
   void set_previous_error(const Error& value) const;
 
-  ScriptPtr script() const { return raw_ptr()->script(); }
+  ScriptPtr script() const { return untag()->script(); }
   void set_script(const Script& value) const;
 
   void set_token_pos(TokenPosition value) const;
 
-  bool report_after_token() const { return raw_ptr()->report_after_token_; }
+  bool report_after_token() const { return untag()->report_after_token_; }
   void set_report_after_token(bool value);
 
   void set_kind(uint8_t value) const;
 
-  StringPtr message() const { return raw_ptr()->message(); }
+  StringPtr message() const { return untag()->message(); }
   void set_message(const String& value) const;
 
-  StringPtr formatted_message() const { return raw_ptr()->formatted_message(); }
+  StringPtr formatted_message() const { return untag()->formatted_message(); }
   void set_formatted_message(const String& value) const;
 
   static LanguageErrorPtr New();
@@ -7012,18 +7014,18 @@
 
 class UnhandledException : public Error {
  public:
-  InstancePtr exception() const { return raw_ptr()->exception(); }
+  InstancePtr exception() const { return untag()->exception(); }
   static intptr_t exception_offset() {
-    return OFFSET_OF(UnhandledExceptionLayout, exception_);
+    return OFFSET_OF(UntaggedUnhandledException, exception_);
   }
 
-  InstancePtr stacktrace() const { return raw_ptr()->stacktrace(); }
+  InstancePtr stacktrace() const { return untag()->stacktrace(); }
   static intptr_t stacktrace_offset() {
-    return OFFSET_OF(UnhandledExceptionLayout, stacktrace_);
+    return OFFSET_OF(UntaggedUnhandledException, stacktrace_);
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(UnhandledExceptionLayout));
+    return RoundedAllocationSize(sizeof(UntaggedUnhandledException));
   }
 
   static UnhandledExceptionPtr New(const Instance& exception,
@@ -7045,13 +7047,13 @@
 
 class UnwindError : public Error {
  public:
-  bool is_user_initiated() const { return raw_ptr()->is_user_initiated_; }
+  bool is_user_initiated() const { return untag()->is_user_initiated_; }
   void set_is_user_initiated(bool value) const;
 
-  StringPtr message() const { return raw_ptr()->message(); }
+  StringPtr message() const { return untag()->message(); }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(UnwindErrorLayout));
+    return RoundedAllocationSize(sizeof(UntaggedUnwindError));
   }
 
   static UnwindErrorPtr New(const String& message,
@@ -7087,7 +7089,7 @@
     const Class& cls = Class::Handle(clazz());
     ASSERT(cls.is_finalized() || cls.is_prefinalized());
 #endif
-    return (clazz()->ptr()->host_instance_size_in_words_ * kWordSize);
+    return (clazz()->untag()->host_instance_size_in_words_ * kWordSize);
   }
 
   InstancePtr Canonicalize(Thread* thread) const;
@@ -7132,7 +7134,7 @@
   static bool NullIsAssignableTo(const AbstractType& other);
 
   bool IsValidNativeIndex(int index) const {
-    return ((index >= 0) && (index < clazz()->ptr()->num_native_fields_));
+    return ((index >= 0) && (index < clazz()->untag()->num_native_fields_));
   }
 
   intptr_t* NativeFieldsDataAddr() const;
@@ -7142,7 +7144,7 @@
   void SetNativeFields(uint16_t num_fields, const intptr_t* field_values) const;
 
   uint16_t NumNativeFields() const {
-    return clazz()->ptr()->num_native_fields_;
+    return clazz()->untag()->num_native_fields_;
   }
 
   void SetNativeField(int index, intptr_t value) const;
@@ -7184,7 +7186,7 @@
   ObjectPtr IdentityHashCode() const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(InstanceLayout));
+    return RoundedAllocationSize(sizeof(UntaggedInstance));
   }
 
   static InstancePtr New(const Class& cls, Heap::Space space = Heap::kNew);
@@ -7199,7 +7201,7 @@
   // Pointer handles.
   virtual bool IsPointer() const;
 
-  static intptr_t NextFieldOffset() { return sizeof(InstanceLayout); }
+  static intptr_t NextFieldOffset() { return sizeof(UntaggedInstance); }
 
  protected:
 #ifndef PRODUCT
@@ -7233,10 +7235,10 @@
     return FieldAddrAtOffset(field.HostOffset());
   }
   ObjectPtr* NativeFieldsAddr() const {
-    return FieldAddrAtOffset(sizeof(ObjectLayout));
+    return FieldAddrAtOffset(sizeof(UntaggedObject));
   }
   void SetFieldAtOffset(intptr_t offset, const Object& value) const {
-    StorePointer(FieldAddrAtOffset(offset), value.raw());
+    StorePointer(FieldAddrAtOffset(offset), value.ptr());
   }
   bool IsValidFieldOffset(intptr_t offset) const;
 
@@ -7249,7 +7251,7 @@
     return *RawFieldAddrAtOffset(offset);
   }
   void RawSetFieldAtOffset(intptr_t offset, const Object& value) const {
-    StorePointer(RawFieldAddrAtOffset(offset), value.raw());
+    StorePointer(RawFieldAddrAtOffset(offset), value.ptr());
   }
 
   static InstancePtr NewFromCidAndSize(SharedClassTable* shared_class_table,
@@ -7276,24 +7278,24 @@
 
 class LibraryPrefix : public Instance {
  public:
-  StringPtr name() const { return raw_ptr()->name(); }
+  StringPtr name() const { return untag()->name(); }
   virtual StringPtr DictionaryName() const { return name(); }
 
-  ArrayPtr imports() const { return raw_ptr()->imports(); }
-  intptr_t num_imports() const { return raw_ptr()->num_imports_; }
-  LibraryPtr importer() const { return raw_ptr()->importer(); }
+  ArrayPtr imports() const { return untag()->imports(); }
+  intptr_t num_imports() const { return untag()->num_imports_; }
+  LibraryPtr importer() const { return untag()->importer(); }
 
   LibraryPtr GetLibrary(int index) const;
   void AddImport(const Namespace& import) const;
 
-  bool is_deferred_load() const { return raw_ptr()->is_deferred_load_; }
-  bool is_loaded() const { return raw_ptr()->is_loaded_; }
+  bool is_deferred_load() const { return untag()->is_deferred_load_; }
+  bool is_loaded() const { return untag()->is_loaded_; }
   void set_is_loaded(bool value) const {
-    return StoreNonPointer(&raw_ptr()->is_loaded_, value);
+    return StoreNonPointer(&untag()->is_loaded_, value);
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(LibraryPrefixLayout));
+    return RoundedAllocationSize(sizeof(UntaggedLibraryPrefix));
   }
 
   static LibraryPrefixPtr New(const String& name,
@@ -7331,13 +7333,13 @@
   // expects a vector of length [count]. Always true for the null vector.
   bool HasCount(intptr_t count) const;
   static intptr_t length_offset() {
-    return OFFSET_OF(TypeArgumentsLayout, length_);
+    return OFFSET_OF(UntaggedTypeArguments, length_);
   }
   intptr_t Length() const;
   AbstractTypePtr TypeAt(intptr_t index) const;
   AbstractTypePtr TypeAtNullSafe(intptr_t index) const;
   static intptr_t types_offset() {
-    return OFFSET_OF_RETURNED_VALUE(TypeArgumentsLayout, types);
+    return OFFSET_OF_RETURNED_VALUE(UntaggedTypeArguments, types);
   }
   static intptr_t type_at_offset(intptr_t index) {
     return types_offset() + index * kWordSize;
@@ -7374,7 +7376,7 @@
   static const intptr_t kLegacyBits = 2;
   intptr_t nullability() const;
   static intptr_t nullability_offset() {
-    return OFFSET_OF(TypeArgumentsLayout, nullability_);
+    return OFFSET_OF(UntaggedTypeArguments, nullability_);
   }
 
   // The name of this type argument vector, e.g. "<T, dynamic, List<T>, Smi>".
@@ -7527,25 +7529,25 @@
   intptr_t NumInstantiations() const;
 
   static intptr_t instantiations_offset() {
-    return OFFSET_OF(TypeArgumentsLayout, instantiations_);
+    return OFFSET_OF(UntaggedTypeArguments, instantiations_);
   }
 
   static const intptr_t kBytesPerElement = kWordSize;
   static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(TypeArgumentsLayout) ==
-           OFFSET_OF_RETURNED_VALUE(TypeArgumentsLayout, types));
+    ASSERT(sizeof(UntaggedTypeArguments) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedTypeArguments, types));
     return 0;
   }
 
   static intptr_t InstanceSize(intptr_t len) {
     // Ensure that the types() is not adding to the object size, which includes
     // 4 fields: instantiations_, length_, hash_, and nullability_.
-    ASSERT(sizeof(TypeArgumentsLayout) ==
-           (sizeof(ObjectLayout) + (kNumFields * kWordSize)));
+    ASSERT(sizeof(UntaggedTypeArguments) ==
+           (sizeof(UntaggedObject) + (kNumFields * kWordSize)));
     ASSERT(0 <= len && len <= kMaxElements);
-    return RoundedAllocationSize(sizeof(TypeArgumentsLayout) +
+    return RoundedAllocationSize(sizeof(UntaggedTypeArguments) +
                                  (len * kBytesPerElement));
   }
 
@@ -7823,20 +7825,20 @@
       const TypeArguments& function_type_args);
 
   static intptr_t type_test_stub_entry_point_offset() {
-    return OFFSET_OF(AbstractTypeLayout, type_test_stub_entry_point_);
+    return OFFSET_OF(UntaggedAbstractType, type_test_stub_entry_point_);
   }
 
   uword type_test_stub_entry_point() const {
-    return raw_ptr()->type_test_stub_entry_point_;
+    return untag()->type_test_stub_entry_point_;
   }
-  CodePtr type_test_stub() const { return raw_ptr()->type_test_stub(); }
+  CodePtr type_test_stub() const { return untag()->type_test_stub(); }
 
   void SetTypeTestingStub(const Code& stub) const;
 
   // No instances of type AbstractType are allocated, but InstanceSize() and
   // NextFieldOffset() are required to register class _AbstractType.
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(AbstractTypeLayout));
+    return RoundedAllocationSize(sizeof(UntaggedAbstractType));
   }
 
   static intptr_t NextFieldOffset() { return -kWordSize; }
@@ -7861,25 +7863,25 @@
 class Type : public AbstractType {
  public:
   static intptr_t type_class_id_offset() {
-    return OFFSET_OF(TypeLayout, type_class_id_);
+    return OFFSET_OF(UntaggedType, type_class_id_);
   }
   static intptr_t arguments_offset() {
-    return OFFSET_OF(TypeLayout, arguments_);
+    return OFFSET_OF(UntaggedType, arguments_);
   }
   static intptr_t type_state_offset() {
-    return OFFSET_OF(TypeLayout, type_state_);
+    return OFFSET_OF(UntaggedType, type_state_);
   }
-  static intptr_t hash_offset() { return OFFSET_OF(TypeLayout, hash_); }
+  static intptr_t hash_offset() { return OFFSET_OF(UntaggedType, hash_); }
   static intptr_t nullability_offset() {
-    return OFFSET_OF(TypeLayout, nullability_);
+    return OFFSET_OF(UntaggedType, nullability_);
   }
   virtual bool IsFinalized() const {
-    return (raw_ptr()->type_state_ == TypeLayout::kFinalizedInstantiated) ||
-           (raw_ptr()->type_state_ == TypeLayout::kFinalizedUninstantiated);
+    return (untag()->type_state_ == UntaggedType::kFinalizedInstantiated) ||
+           (untag()->type_state_ == UntaggedType::kFinalizedUninstantiated);
   }
   virtual void SetIsFinalized() const;
   virtual bool IsBeingFinalized() const {
-    return raw_ptr()->type_state_ == TypeLayout::kBeingFinalized;
+    return untag()->type_state_ == UntaggedType::kBeingFinalized;
   }
   virtual void SetIsBeingFinalized() const;
   virtual bool HasTypeClass() const {
@@ -7887,13 +7889,13 @@
     return true;
   }
   virtual Nullability nullability() const {
-    return static_cast<Nullability>(raw_ptr()->nullability_);
+    return static_cast<Nullability>(untag()->nullability_);
   }
   TypePtr ToNullability(Nullability value, Heap::Space space) const;
   virtual classid_t type_class_id() const;
   virtual ClassPtr type_class() const;
   void set_type_class(const Class& value) const;
-  virtual TypeArgumentsPtr arguments() const { return raw_ptr()->arguments(); }
+  virtual TypeArgumentsPtr arguments() const { return untag()->arguments(); }
   virtual void set_arguments(const TypeArguments& value) const;
   virtual bool IsInstantiated(Genericity genericity = kAny,
                               intptr_t num_free_fun_type_params = kAllFree,
@@ -7926,7 +7928,7 @@
   intptr_t ComputeHash() const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(TypeLayout));
+    return RoundedAllocationSize(sizeof(UntaggedType));
   }
 
   // The type of the literal 'null'.
@@ -8003,7 +8005,7 @@
   void set_type_state(uint8_t state) const;
   void set_nullability(Nullability value) const {
     ASSERT(!IsCanonical());
-    StoreNonPointer(&raw_ptr()->nullability_, static_cast<uint8_t>(value));
+    StoreNonPointer(&untag()->nullability_, static_cast<uint8_t>(value));
   }
 
   static TypePtr New(Heap::Space space = Heap::kOld);
@@ -8020,24 +8022,26 @@
 class FunctionType : public AbstractType {
  public:
   static intptr_t type_state_offset() {
-    return OFFSET_OF(FunctionTypeLayout, type_state_);
+    return OFFSET_OF(UntaggedFunctionType, type_state_);
   }
-  static intptr_t hash_offset() { return OFFSET_OF(FunctionTypeLayout, hash_); }
+  static intptr_t hash_offset() {
+    return OFFSET_OF(UntaggedFunctionType, hash_);
+  }
   static intptr_t nullability_offset() {
-    return OFFSET_OF(FunctionTypeLayout, nullability_);
+    return OFFSET_OF(UntaggedFunctionType, nullability_);
   }
   virtual bool IsFinalized() const {
-    return (raw_ptr()->type_state_ == TypeLayout::kFinalizedInstantiated) ||
-           (raw_ptr()->type_state_ == TypeLayout::kFinalizedUninstantiated);
+    return (untag()->type_state_ == UntaggedType::kFinalizedInstantiated) ||
+           (untag()->type_state_ == UntaggedType::kFinalizedUninstantiated);
   }
   virtual void SetIsFinalized() const;
   virtual bool IsBeingFinalized() const {
-    return raw_ptr()->type_state_ == TypeLayout::kBeingFinalized;
+    return untag()->type_state_ == UntaggedType::kBeingFinalized;
   }
   virtual void SetIsBeingFinalized() const;
   virtual bool HasTypeClass() const { return false; }
   virtual Nullability nullability() const {
-    return static_cast<Nullability>(raw_ptr()->nullability_);
+    return static_cast<Nullability>(untag()->nullability_);
   }
   FunctionTypePtr ToNullability(Nullability value, Heap::Space space) const;
   virtual classid_t type_class_id() const { return kIllegalCid; }
@@ -8073,8 +8077,8 @@
 
   // Return the number of type arguments in enclosing signature.
   intptr_t NumParentTypeArguments() const {
-    return FunctionTypeLayout::PackedNumParentTypeArguments::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunctionType::PackedNumParentTypeArguments::decode(
+        untag()->packed_fields_);
   }
   void SetNumParentTypeArguments(intptr_t value) const;
 
@@ -8083,31 +8087,31 @@
   }
 
   intptr_t num_implicit_parameters() const {
-    return FunctionTypeLayout::PackedNumImplicitParameters::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunctionType::PackedNumImplicitParameters::decode(
+        untag()->packed_fields_);
   }
   void set_num_implicit_parameters(intptr_t value) const;
   intptr_t num_fixed_parameters() const {
-    return FunctionTypeLayout::PackedNumFixedParameters::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunctionType::PackedNumFixedParameters::decode(
+        untag()->packed_fields_);
   }
   void set_num_fixed_parameters(intptr_t value) const;
 
   bool HasOptionalParameters() const {
-    return FunctionTypeLayout::PackedNumOptionalParameters::decode(
-               raw_ptr()->packed_fields_) > 0;
+    return UntaggedFunctionType::PackedNumOptionalParameters::decode(
+               untag()->packed_fields_) > 0;
   }
   bool HasOptionalNamedParameters() const {
     return HasOptionalParameters() &&
-           FunctionTypeLayout::PackedHasNamedOptionalParameters::decode(
-               raw_ptr()->packed_fields_);
+           UntaggedFunctionType::PackedHasNamedOptionalParameters::decode(
+               untag()->packed_fields_);
   }
   bool HasOptionalPositionalParameters() const {
     return HasOptionalParameters() && !HasOptionalNamedParameters();
   }
   intptr_t NumOptionalParameters() const {
-    return FunctionTypeLayout::PackedNumOptionalParameters::decode(
-        raw_ptr()->packed_fields_);
+    return UntaggedFunctionType::PackedNumOptionalParameters::decode(
+        untag()->packed_fields_);
   }
   void SetNumOptionalParameters(intptr_t num_optional_parameters,
                                 bool are_optional_positional) const;
@@ -8119,13 +8123,13 @@
   intptr_t NumOptionalNamedParameters() const {
     return HasOptionalNamedParameters() ? NumOptionalParameters() : 0;
   }
-  uint32_t packed_fields() const { return raw_ptr()->packed_fields_; }
+  uint32_t packed_fields() const { return untag()->packed_fields_; }
   void set_packed_fields(uint32_t packed_fields) const;
   static intptr_t packed_fields_offset() {
-    return OFFSET_OF(FunctionTypeLayout, packed_fields_);
+    return OFFSET_OF(UntaggedFunctionType, packed_fields_);
   }
 
-  AbstractTypePtr result_type() const { return raw_ptr()->result_type(); }
+  AbstractTypePtr result_type() const { return untag()->result_type(); }
   void set_result_type(const AbstractType& value) const;
 
   // The parameters, starting with NumImplicitParameters() parameters which are
@@ -8133,10 +8137,10 @@
   // Note that type checks exclude implicit parameters.
   AbstractTypePtr ParameterTypeAt(intptr_t index) const;
   void SetParameterTypeAt(intptr_t index, const AbstractType& value) const;
-  ArrayPtr parameter_types() const { return raw_ptr()->parameter_types(); }
+  ArrayPtr parameter_types() const { return untag()->parameter_types(); }
   void set_parameter_types(const Array& value) const;
   static intptr_t parameter_types_offset() {
-    return OFFSET_OF(FunctionTypeLayout, parameter_types_);
+    return OFFSET_OF(UntaggedFunctionType, parameter_types_);
   }
   // Parameter names are valid for all valid parameter indices, and are not
   // limited to named optional parameters. However, they are meaningless after
@@ -8147,7 +8151,7 @@
   // but the first NumParameters() elements are the names.
   StringPtr ParameterNameAt(intptr_t index) const;
   void SetParameterNameAt(intptr_t index, const String& value) const;
-  ArrayPtr parameter_names() const { return raw_ptr()->parameter_names(); }
+  ArrayPtr parameter_names() const { return untag()->parameter_names(); }
   void set_parameter_names(const Array& value) const;
 
   // The required flags are stored at the end of the parameter_names. The flags
@@ -8180,11 +8184,11 @@
   // The type parameters (and their bounds) are specified as an array of
   // TypeParameter.
   TypeArgumentsPtr type_parameters() const {
-    return raw_ptr()->type_parameters();
+    return untag()->type_parameters();
   }
   void set_type_parameters(const TypeArguments& value) const;
   static intptr_t type_parameters_offset() {
-    return OFFSET_OF(FunctionTypeLayout, type_parameters_);
+    return OFFSET_OF(UntaggedFunctionType, type_parameters_);
   }
   intptr_t NumTypeParameters(Thread* thread) const;
   intptr_t NumTypeParameters() const {
@@ -8226,7 +8230,7 @@
   const char* ToUserVisibleCString() const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(FunctionTypeLayout));
+    return RoundedAllocationSize(sizeof(UntaggedFunctionType));
   }
 
   static FunctionTypePtr New(intptr_t num_parent_type_arguments = 0,
@@ -8239,7 +8243,7 @@
   void set_type_state(uint8_t state) const;
   void set_nullability(Nullability value) const {
     ASSERT(!IsCanonical());
-    StoreNonPointer(&raw_ptr()->nullability_, static_cast<uint8_t>(value));
+    StoreNonPointer(&untag()->nullability_, static_cast<uint8_t>(value));
   }
 
   static FunctionTypePtr New(Heap::Space space);
@@ -8256,7 +8260,7 @@
 // Note that the cycle always involves type arguments.
 class TypeRef : public AbstractType {
  public:
-  static intptr_t type_offset() { return OFFSET_OF(TypeRefLayout, type_); }
+  static intptr_t type_offset() { return OFFSET_OF(UntaggedTypeRef, type_); }
 
   virtual bool IsFinalized() const {
     const AbstractType& ref_type = AbstractType::Handle(type());
@@ -8275,7 +8279,7 @@
     return (type() != AbstractType::null()) &&
            AbstractType::Handle(type()).HasTypeClass();
   }
-  AbstractTypePtr type() const { return raw_ptr()->type(); }
+  AbstractTypePtr type() const { return untag()->type(); }
   void set_type(const AbstractType& value) const;
   virtual classid_t type_class_id() const {
     return AbstractType::Handle(type()).type_class_id();
@@ -8311,7 +8315,7 @@
   virtual intptr_t Hash() const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(TypeRefLayout));
+    return RoundedAllocationSize(sizeof(UntaggedTypeRef));
   }
 
   static TypeRefPtr New(const AbstractType& type);
@@ -8336,26 +8340,26 @@
 class TypeParameter : public AbstractType {
  public:
   virtual bool IsFinalized() const {
-    return TypeParameterLayout::FinalizedBit::decode(raw_ptr()->flags_);
+    return UntaggedTypeParameter::FinalizedBit::decode(untag()->flags_);
   }
   virtual void SetIsFinalized() const;
   virtual bool IsBeingFinalized() const {
-    return TypeParameterLayout::BeingFinalizedBit::decode(raw_ptr()->flags_);
+    return UntaggedTypeParameter::BeingFinalizedBit::decode(untag()->flags_);
   }
   virtual void SetIsBeingFinalized() const;
   bool IsGenericCovariantImpl() const {
-    return TypeParameterLayout::GenericCovariantImplBit::decode(
-        raw_ptr()->flags_);
+    return UntaggedTypeParameter::GenericCovariantImplBit::decode(
+        untag()->flags_);
   }
   void SetGenericCovariantImpl(bool value) const;
   static intptr_t flags_offset() {
-    return OFFSET_OF(TypeParameterLayout, flags_);
+    return OFFSET_OF(UntaggedTypeParameter, flags_);
   }
   static intptr_t nullability_offset() {
-    return OFFSET_OF(TypeParameterLayout, nullability_);
+    return OFFSET_OF(UntaggedTypeParameter, nullability_);
   }
   virtual Nullability nullability() const {
-    return static_cast<Nullability>(raw_ptr()->nullability_);
+    return static_cast<Nullability>(untag()->nullability_);
   }
   TypeParameterPtr ToNullability(Nullability value, Heap::Space space) const;
   virtual bool HasTypeClass() const { return false; }
@@ -8371,29 +8375,29 @@
   }
 
   static intptr_t parameterized_class_id_offset() {
-    return OFFSET_OF(TypeParameterLayout, parameterized_class_id_);
+    return OFFSET_OF(UntaggedTypeParameter, parameterized_class_id_);
   }
 
-  intptr_t base() const { return raw_ptr()->base_; }
+  intptr_t base() const { return untag()->base_; }
   void set_base(intptr_t value) const;
-  intptr_t index() const { return raw_ptr()->index_; }
+  intptr_t index() const { return untag()->index_; }
   void set_index(intptr_t value) const;
   static intptr_t index_offset() {
-    return OFFSET_OF(TypeParameterLayout, index_);
+    return OFFSET_OF(UntaggedTypeParameter, index_);
   }
 
-  StringPtr name() const { return raw_ptr()->name(); }
+  StringPtr name() const { return untag()->name(); }
   static intptr_t name_offset() {
-    return OFFSET_OF(TypeParameterLayout, name_);
+    return OFFSET_OF(UntaggedTypeParameter, name_);
   }
-  AbstractTypePtr bound() const { return raw_ptr()->bound(); }
+  AbstractTypePtr bound() const { return untag()->bound(); }
   void set_bound(const AbstractType& value) const;
   static intptr_t bound_offset() {
-    return OFFSET_OF(TypeParameterLayout, bound_);
+    return OFFSET_OF(UntaggedTypeParameter, bound_);
   }
 
   AbstractTypePtr default_argument() const {
-    return raw_ptr()->default_argument();
+    return untag()->default_argument();
   }
   void set_default_argument(const AbstractType& value) const;
 
@@ -8433,7 +8437,7 @@
       const TypeArguments& function_type_arguments) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(TypeParameterLayout));
+    return RoundedAllocationSize(sizeof(UntaggedTypeParameter));
   }
 
   // 'parameterized_class' is null for a function type parameter.
@@ -8510,7 +8514,7 @@
   virtual uint32_t CanonicalizeHash() const { return AsTruncatedUint32Value(); }
   virtual bool Equals(const Instance& other) const;
 
-  virtual ObjectPtr HashCode() const { return raw(); }
+  virtual ObjectPtr HashCode() const { return ptr(); }
 
   virtual bool IsZero() const;
   virtual bool IsNegative() const;
@@ -8548,7 +8552,7 @@
       return (raw_value >> kSmiTagShift);
     } else {
       ASSERT(obj->IsMint());
-      return static_cast<const MintPtr>(obj)->ptr()->value_;
+      return static_cast<const MintPtr>(obj)->untag()->value_;
     }
   }
 
@@ -8563,7 +8567,7 @@
   static const intptr_t kMaxValue = kSmiMax;
   static const intptr_t kMinValue = kSmiMin;
 
-  intptr_t Value() const { return RawSmiValue(raw()); }
+  intptr_t Value() const { return RawSmiValue(ptr()); }
 
   virtual bool Equals(const Instance& other) const;
   virtual bool IsZero() const { return Value() == 0; }
@@ -8602,11 +8606,11 @@
   static bool IsValid(int64_t value) { return compiler::target::IsSmi(value); }
 
   void operator=(SmiPtr value) {
-    raw_ = value;
+    ptr_ = value;
     CHECK_HANDLE();
   }
   void operator^=(ObjectPtr value) {
-    raw_ = value;
+    ptr_ = value;
     CHECK_HANDLE();
   }
 
@@ -8646,8 +8650,8 @@
   static const int64_t kMinValue =
       static_cast<int64_t>(DART_2PART_UINT64_C(0x80000000, 00000000));
 
-  int64_t value() const { return raw_ptr()->value_; }
-  static intptr_t value_offset() { return OFFSET_OF(MintLayout, value_); }
+  int64_t value() const { return untag()->value_; }
+  static intptr_t value_offset() { return OFFSET_OF(UntaggedMint, value_); }
 
   virtual bool IsZero() const { return value() == 0; }
   virtual bool IsNegative() const { return value() < 0; }
@@ -8663,7 +8667,7 @@
   virtual int CompareWith(const Integer& other) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(MintLayout));
+    return RoundedAllocationSize(sizeof(UntaggedMint));
   }
 
  protected:
@@ -8687,7 +8691,7 @@
 // abstract class double in corelib.
 class Double : public Number {
  public:
-  double value() const { return raw_ptr()->value_; }
+  double value() const { return untag()->value_; }
 
   bool BitwiseEqualsToDouble(double value) const;
   virtual bool OperatorEquals(const Instance& other) const;
@@ -8708,10 +8712,10 @@
   static DoublePtr NewCanonical(const String& str);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(DoubleLayout));
+    return RoundedAllocationSize(sizeof(UntaggedDouble));
   }
 
-  static intptr_t value_offset() { return OFFSET_OF(DoubleLayout, value_); }
+  static intptr_t value_offset() { return OFFSET_OF(UntaggedDouble, value_); }
 
  private:
   void set_value(double value) const;
@@ -8736,10 +8740,10 @@
 // simple.  We choose a value that will prevent integer overflow for
 // 2 byte strings, since it is the worst case.
 #if defined(HASH_IN_OBJECT_HEADER)
-  static const intptr_t kSizeofRawString = sizeof(InstanceLayout) + kWordSize;
+  static const intptr_t kSizeofRawString = sizeof(UntaggedInstance) + kWordSize;
 #else
   static const intptr_t kSizeofRawString =
-      sizeof(InstanceLayout) + 2 * kWordSize;
+      sizeof(UntaggedInstance) + 2 * kWordSize;
 #endif
   static const intptr_t kMaxElements = kSmiMax / kTwoByteChar;
 
@@ -8774,19 +8778,19 @@
     DISALLOW_IMPLICIT_CONSTRUCTORS(CodePointIterator);
   };
 
-  intptr_t Length() const { return LengthOf(raw()); }
+  intptr_t Length() const { return LengthOf(ptr()); }
   static intptr_t LengthOf(StringPtr obj) {
-    return Smi::Value(obj->ptr()->length());
+    return Smi::Value(obj->untag()->length());
   }
-  static intptr_t length_offset() { return OFFSET_OF(StringLayout, length_); }
+  static intptr_t length_offset() { return OFFSET_OF(UntaggedString, length_); }
 
   intptr_t Hash() const {
-    intptr_t result = GetCachedHash(raw());
+    intptr_t result = GetCachedHash(ptr());
     if (result != 0) {
       return result;
     }
     result = String::Hash(*this, 0, this->Length());
-    SetCachedHash(raw(), result);
+    SetCachedHash(ptr(), result);
     return result;
   }
 
@@ -8794,16 +8798,16 @@
 
   bool HasHash() const {
     ASSERT(Smi::New(0) == nullptr);
-    return GetCachedHash(raw()) != 0;
+    return GetCachedHash(ptr()) != 0;
   }
 
   static intptr_t hash_offset() {
 #if defined(HASH_IN_OBJECT_HEADER)
-    COMPILE_ASSERT(ObjectLayout::kHashTagPos % kBitsPerByte == 0);
-    return OFFSET_OF(ObjectLayout, tags_) +
-           ObjectLayout::kHashTagPos / kBitsPerByte;
+    COMPILE_ASSERT(UntaggedObject::kHashTagPos % kBitsPerByte == 0);
+    return OFFSET_OF(UntaggedObject, tags_) +
+           UntaggedObject::kHashTagPos / kBitsPerByte;
 #else
-    return OFFSET_OF(StringLayout, hash_);
+    return OFFSET_OF(UntaggedString, hash_);
 #endif
   }
   static intptr_t Hash(const String& str, intptr_t begin_index, intptr_t len);
@@ -8811,7 +8815,7 @@
   static intptr_t Hash(const uint16_t* characters, intptr_t len);
   static intptr_t Hash(const int32_t* characters, intptr_t len);
   static intptr_t HashRawSymbol(const StringPtr symbol) {
-    ASSERT(symbol->ptr()->IsCanonical());
+    ASSERT(symbol->untag()->IsCanonical());
     intptr_t result = GetCachedHash(symbol);
     ASSERT(result != 0);
     return result;
@@ -8822,7 +8826,7 @@
 
   virtual ObjectPtr HashCode() const { return Integer::New(Hash()); }
 
-  uint16_t CharAt(intptr_t index) const { return CharAt(raw(), index); }
+  uint16_t CharAt(intptr_t index) const { return CharAt(ptr(), index); }
   static uint16_t CharAt(StringPtr str, intptr_t index);
 
   intptr_t CharSize() const;
@@ -8863,7 +8867,7 @@
 
   bool StartsWith(const String& other) const {
     NoSafepointScope no_safepoint;
-    return StartsWith(raw(), other.raw());
+    return StartsWith(ptr(), other.ptr());
   }
   static bool StartsWith(StringPtr str, StringPtr prefix);
   bool EndsWith(const String& other) const;
@@ -8877,26 +8881,26 @@
   virtual bool CheckIsCanonical(Thread* thread) const;
 #endif  // DEBUG
 
-  bool IsSymbol() const { return raw()->ptr()->IsCanonical(); }
+  bool IsSymbol() const { return ptr()->untag()->IsCanonical(); }
 
   bool IsOneByteString() const {
-    return raw()->GetClassId() == kOneByteStringCid;
+    return ptr()->GetClassId() == kOneByteStringCid;
   }
 
   bool IsTwoByteString() const {
-    return raw()->GetClassId() == kTwoByteStringCid;
+    return ptr()->GetClassId() == kTwoByteStringCid;
   }
 
   bool IsExternalOneByteString() const {
-    return raw()->GetClassId() == kExternalOneByteStringCid;
+    return ptr()->GetClassId() == kExternalOneByteStringCid;
   }
 
   bool IsExternalTwoByteString() const {
-    return raw()->GetClassId() == kExternalTwoByteStringCid;
+    return ptr()->GetClassId() == kExternalTwoByteStringCid;
   }
 
   bool IsExternal() const {
-    return IsExternalStringClassId(raw()->GetClassId());
+    return IsExternalStringClassId(ptr()->GetClassId());
   }
 
   void* GetPeer() const;
@@ -9027,11 +9031,11 @@
 
 #if !defined(HASH_IN_OBJECT_HEADER)
   static uint32_t GetCachedHash(const StringPtr obj) {
-    return Smi::Value(obj->ptr()->hash_);
+    return Smi::Value(obj->untag()->hash_);
   }
 
   static void SetCachedHash(StringPtr obj, uint32_t hash) {
-    obj->ptr()->hash_ = Smi::New(hash);
+    obj->untag()->hash_ = Smi::New(hash);
   }
 #endif
 
@@ -9045,10 +9049,10 @@
   void SetLength(intptr_t value) const {
     // This is only safe because we create a new Smi, which does not cause
     // heap allocation.
-    raw_ptr()->set_length(Smi::New(value));
+    untag()->set_length(Smi::New(value));
   }
 
-  void SetHash(intptr_t value) const { SetCachedHash(raw(), value); }
+  void SetHash(intptr_t value) const { SetCachedHash(ptr(), value); }
 
   template <typename HandleType, typename ElementType, typename CallbackType>
   static void ReadFromImpl(SnapshotReader* reader,
@@ -9070,7 +9074,7 @@
   friend class TwoByteString;
   friend class ExternalOneByteString;
   friend class ExternalTwoByteString;
-  friend class OneByteStringLayout;
+  friend class UntaggedOneByteString;
   friend class RODataSerializationCluster;  // SetHash
   friend class Pass2Visitor;                // Stack "handle"
 };
@@ -9106,13 +9110,13 @@
   static uint16_t CharAt(const String& str, intptr_t index) {
     ASSERT(str.IsOneByteString());
     NoSafepointScope no_safepoint;
-    return OneByteString::CharAt(static_cast<OneByteStringPtr>(str.raw()),
+    return OneByteString::CharAt(static_cast<OneByteStringPtr>(str.ptr()),
                                  index);
   }
 
   static uint16_t CharAt(OneByteStringPtr str, intptr_t index) {
     ASSERT(index >= 0 && index < String::LengthOf(str));
-    return str->ptr()->data()[index];
+    return str->untag()->data()[index];
   }
 
   static void SetCharAt(const String& str, intptr_t index, uint8_t code_unit) {
@@ -9125,22 +9129,22 @@
   static const intptr_t kMaxElements = String::kMaxElements;
 
   static intptr_t data_offset() {
-    return OFFSET_OF_RETURNED_VALUE(OneByteStringLayout, data);
+    return OFFSET_OF_RETURNED_VALUE(UntaggedOneByteString, data);
   }
 
   static intptr_t UnroundedSize(OneByteStringPtr str) {
-    return UnroundedSize(Smi::Value(str->ptr()->length()));
+    return UnroundedSize(Smi::Value(str->untag()->length()));
   }
   static intptr_t UnroundedSize(intptr_t len) {
-    return sizeof(OneByteStringLayout) + (len * kBytesPerElement);
+    return sizeof(UntaggedOneByteString) + (len * kBytesPerElement);
   }
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(OneByteStringLayout) ==
-           OFFSET_OF_RETURNED_VALUE(OneByteStringLayout, data));
+    ASSERT(sizeof(UntaggedOneByteString) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedOneByteString, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t len) {
-    ASSERT(sizeof(OneByteStringLayout) == String::kSizeofRawString);
+    ASSERT(sizeof(UntaggedOneByteString) == String::kSizeofRawString);
     ASSERT(0 <= len && len <= kMaxElements);
     return String::RoundedAllocationSize(UnroundedSize(len));
   }
@@ -9205,22 +9209,22 @@
 
  private:
   static OneByteStringPtr raw(const String& str) {
-    return static_cast<OneByteStringPtr>(str.raw());
+    return static_cast<OneByteStringPtr>(str.ptr());
   }
 
-  static const OneByteStringLayout* raw_ptr(const String& str) {
-    return reinterpret_cast<const OneByteStringLayout*>(str.raw_ptr());
+  static const UntaggedOneByteString* untag(const String& str) {
+    return reinterpret_cast<const UntaggedOneByteString*>(str.untag());
   }
 
   static uint8_t* CharAddr(const String& str, intptr_t index) {
     ASSERT((index >= 0) && (index < str.Length()));
     ASSERT(str.IsOneByteString());
-    return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[index];
+    return &str.UnsafeMutableNonPointer(untag(str)->data())[index];
   }
 
   static uint8_t* DataStart(const String& str) {
     ASSERT(str.IsOneByteString());
-    return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[0];
+    return &str.UnsafeMutableNonPointer(untag(str)->data())[0];
   }
 
   static OneByteStringPtr ReadFrom(SnapshotReader* reader,
@@ -9244,13 +9248,13 @@
   static uint16_t CharAt(const String& str, intptr_t index) {
     ASSERT(str.IsTwoByteString());
     NoSafepointScope no_safepoint;
-    return TwoByteString::CharAt(static_cast<TwoByteStringPtr>(str.raw()),
+    return TwoByteString::CharAt(static_cast<TwoByteStringPtr>(str.ptr()),
                                  index);
   }
 
   static uint16_t CharAt(TwoByteStringPtr str, intptr_t index) {
     ASSERT(index >= 0 && index < String::LengthOf(str));
-    return str->ptr()->data()[index];
+    return str->untag()->data()[index];
   }
 
   static void SetCharAt(const String& str, intptr_t index, uint16_t ch) {
@@ -9265,21 +9269,21 @@
   static const intptr_t kMaxElements = String::kMaxElements;
 
   static intptr_t data_offset() {
-    return OFFSET_OF_RETURNED_VALUE(TwoByteStringLayout, data);
+    return OFFSET_OF_RETURNED_VALUE(UntaggedTwoByteString, data);
   }
   static intptr_t UnroundedSize(TwoByteStringPtr str) {
-    return UnroundedSize(Smi::Value(str->ptr()->length()));
+    return UnroundedSize(Smi::Value(str->untag()->length()));
   }
   static intptr_t UnroundedSize(intptr_t len) {
-    return sizeof(TwoByteStringLayout) + (len * kBytesPerElement);
+    return sizeof(UntaggedTwoByteString) + (len * kBytesPerElement);
   }
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(TwoByteStringLayout) ==
-           OFFSET_OF_RETURNED_VALUE(TwoByteStringLayout, data));
+    ASSERT(sizeof(UntaggedTwoByteString) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedTwoByteString, data));
     return 0;
   }
   static intptr_t InstanceSize(intptr_t len) {
-    ASSERT(sizeof(TwoByteStringLayout) == String::kSizeofRawString);
+    ASSERT(sizeof(UntaggedTwoByteString) == String::kSizeofRawString);
     ASSERT(0 <= len && len <= kMaxElements);
     return String::RoundedAllocationSize(UnroundedSize(len));
   }
@@ -9325,24 +9329,24 @@
 
  private:
   static TwoByteStringPtr raw(const String& str) {
-    return static_cast<TwoByteStringPtr>(str.raw());
+    return static_cast<TwoByteStringPtr>(str.ptr());
   }
 
-  static const TwoByteStringLayout* raw_ptr(const String& str) {
-    return reinterpret_cast<const TwoByteStringLayout*>(str.raw_ptr());
+  static const UntaggedTwoByteString* untag(const String& str) {
+    return reinterpret_cast<const UntaggedTwoByteString*>(str.untag());
   }
 
   static uint16_t* CharAddr(const String& str, intptr_t index) {
     ASSERT((index >= 0) && (index < str.Length()));
     ASSERT(str.IsTwoByteString());
-    return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[index];
+    return &str.UnsafeMutableNonPointer(untag(str)->data())[index];
   }
 
   // Use this instead of CharAddr(0).  It will not assert that the index is <
   // length.
   static uint16_t* DataStart(const String& str) {
     ASSERT(str.IsTwoByteString());
-    return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[0];
+    return &str.UnsafeMutableNonPointer(untag(str)->data())[0];
   }
 
   static TwoByteStringPtr ReadFrom(SnapshotReader* reader,
@@ -9365,18 +9369,18 @@
     ASSERT(str.IsExternalOneByteString());
     NoSafepointScope no_safepoint;
     return ExternalOneByteString::CharAt(
-        static_cast<ExternalOneByteStringPtr>(str.raw()), index);
+        static_cast<ExternalOneByteStringPtr>(str.ptr()), index);
   }
 
   static uint16_t CharAt(ExternalOneByteStringPtr str, intptr_t index) {
     ASSERT(index >= 0 && index < String::LengthOf(str));
-    return str->ptr()->external_data_[index];
+    return str->untag()->external_data_[index];
   }
 
-  static void* GetPeer(const String& str) { return raw_ptr(str)->peer_; }
+  static void* GetPeer(const String& str) { return untag(str)->peer_; }
 
   static intptr_t external_data_offset() {
-    return OFFSET_OF(ExternalOneByteStringLayout, external_data_);
+    return OFFSET_OF(UntaggedExternalOneByteString, external_data_);
   }
 
   // We use the same maximum elements for all strings.
@@ -9384,7 +9388,7 @@
   static const intptr_t kMaxElements = String::kMaxElements;
 
   static intptr_t InstanceSize() {
-    return String::RoundedAllocationSize(sizeof(ExternalOneByteStringLayout));
+    return String::RoundedAllocationSize(sizeof(UntaggedExternalOneByteString));
   }
 
   static ExternalOneByteStringPtr New(const uint8_t* characters,
@@ -9406,22 +9410,22 @@
 
  private:
   static ExternalOneByteStringPtr raw(const String& str) {
-    return static_cast<ExternalOneByteStringPtr>(str.raw());
+    return static_cast<ExternalOneByteStringPtr>(str.ptr());
   }
 
-  static const ExternalOneByteStringLayout* raw_ptr(const String& str) {
-    return reinterpret_cast<const ExternalOneByteStringLayout*>(str.raw_ptr());
+  static const UntaggedExternalOneByteString* untag(const String& str) {
+    return reinterpret_cast<const UntaggedExternalOneByteString*>(str.untag());
   }
 
   static const uint8_t* CharAddr(const String& str, intptr_t index) {
     ASSERT((index >= 0) && (index < str.Length()));
     ASSERT(str.IsExternalOneByteString());
-    return &(raw_ptr(str)->external_data_[index]);
+    return &(untag(str)->external_data_[index]);
   }
 
   static const uint8_t* DataStart(const String& str) {
     ASSERT(str.IsExternalOneByteString());
-    return raw_ptr(str)->external_data_;
+    return untag(str)->external_data_;
   }
 
   static void SetExternalData(const String& str,
@@ -9430,8 +9434,8 @@
     ASSERT(str.IsExternalOneByteString());
     ASSERT(!IsolateGroup::Current()->heap()->Contains(
         reinterpret_cast<uword>(data)));
-    str.StoreNonPointer(&raw_ptr(str)->external_data_, data);
-    str.StoreNonPointer(&raw_ptr(str)->peer_, peer);
+    str.StoreNonPointer(&untag(str)->external_data_, data);
+    str.StoreNonPointer(&untag(str)->peer_, peer);
   }
 
   static void Finalize(void* isolate_callback_data,
@@ -9463,18 +9467,18 @@
     ASSERT(str.IsExternalTwoByteString());
     NoSafepointScope no_safepoint;
     return ExternalTwoByteString::CharAt(
-        static_cast<ExternalTwoByteStringPtr>(str.raw()), index);
+        static_cast<ExternalTwoByteStringPtr>(str.ptr()), index);
   }
 
   static uint16_t CharAt(ExternalTwoByteStringPtr str, intptr_t index) {
     ASSERT(index >= 0 && index < String::LengthOf(str));
-    return str->ptr()->external_data_[index];
+    return str->untag()->external_data_[index];
   }
 
-  static void* GetPeer(const String& str) { return raw_ptr(str)->peer_; }
+  static void* GetPeer(const String& str) { return untag(str)->peer_; }
 
   static intptr_t external_data_offset() {
-    return OFFSET_OF(ExternalTwoByteStringLayout, external_data_);
+    return OFFSET_OF(UntaggedExternalTwoByteString, external_data_);
   }
 
   // We use the same maximum elements for all strings.
@@ -9482,7 +9486,7 @@
   static const intptr_t kMaxElements = String::kMaxElements;
 
   static intptr_t InstanceSize() {
-    return String::RoundedAllocationSize(sizeof(ExternalTwoByteStringLayout));
+    return String::RoundedAllocationSize(sizeof(UntaggedExternalTwoByteString));
   }
 
   static ExternalTwoByteStringPtr New(const uint16_t* characters,
@@ -9500,22 +9504,22 @@
 
  private:
   static ExternalTwoByteStringPtr raw(const String& str) {
-    return static_cast<ExternalTwoByteStringPtr>(str.raw());
+    return static_cast<ExternalTwoByteStringPtr>(str.ptr());
   }
 
-  static const ExternalTwoByteStringLayout* raw_ptr(const String& str) {
-    return reinterpret_cast<const ExternalTwoByteStringLayout*>(str.raw_ptr());
+  static const UntaggedExternalTwoByteString* untag(const String& str) {
+    return reinterpret_cast<const UntaggedExternalTwoByteString*>(str.untag());
   }
 
   static const uint16_t* CharAddr(const String& str, intptr_t index) {
     ASSERT((index >= 0) && (index < str.Length()));
     ASSERT(str.IsExternalTwoByteString());
-    return &(raw_ptr(str)->external_data_[index]);
+    return &(untag(str)->external_data_[index]);
   }
 
   static const uint16_t* DataStart(const String& str) {
     ASSERT(str.IsExternalTwoByteString());
-    return raw_ptr(str)->external_data_;
+    return untag(str)->external_data_;
   }
 
   static void SetExternalData(const String& str,
@@ -9524,8 +9528,8 @@
     ASSERT(str.IsExternalTwoByteString());
     ASSERT(!IsolateGroup::Current()->heap()->Contains(
         reinterpret_cast<uword>(data)));
-    str.StoreNonPointer(&raw_ptr(str)->external_data_, data);
-    str.StoreNonPointer(&raw_ptr(str)->peer_, peer);
+    str.StoreNonPointer(&untag(str)->external_data_, data);
+    str.StoreNonPointer(&untag(str)->peer_, peer);
   }
 
   static void Finalize(void* isolate_callback_data,
@@ -9553,10 +9557,10 @@
 // Class Bool implements Dart core class bool.
 class Bool : public Instance {
  public:
-  bool value() const { return raw_ptr()->value_; }
+  bool value() const { return untag()->value_; }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(BoolLayout));
+    return RoundedAllocationSize(sizeof(UntaggedBool));
   }
 
   static const Bool& True() { return Object::bool_true(); }
@@ -9568,13 +9572,11 @@
   }
 
   virtual uint32_t CanonicalizeHash() const {
-    return raw() == True().raw() ? 1231 : 1237;
+    return ptr() == True().ptr() ? 1231 : 1237;
   }
 
  private:
-  void set_value(bool value) const {
-    StoreNonPointer(&raw_ptr()->value_, value);
-  }
+  void set_value(bool value) const { StoreNonPointer(&untag()->value_, value); }
 
   // New should only be called to initialize the two legal bool values.
   static BoolPtr New(bool value);
@@ -9596,17 +9598,17 @@
     return Array::InstanceSize(array_length) > Heap::kNewAllocatableSize;
   }
 
-  intptr_t Length() const { return LengthOf(raw()); }
+  intptr_t Length() const { return LengthOf(ptr()); }
   static intptr_t LengthOf(const ArrayPtr array) {
-    return Smi::Value(array->ptr()->length());
+    return Smi::Value(array->untag()->length());
   }
 
-  static intptr_t length_offset() { return OFFSET_OF(ArrayLayout, length_); }
+  static intptr_t length_offset() { return OFFSET_OF(UntaggedArray, length_); }
   static intptr_t data_offset() {
-    return OFFSET_OF_RETURNED_VALUE(ArrayLayout, data);
+    return OFFSET_OF_RETURNED_VALUE(UntaggedArray, data);
   }
   static intptr_t element_offset(intptr_t index) {
-    return OFFSET_OF_RETURNED_VALUE(ArrayLayout, data) + kWordSize * index;
+    return OFFSET_OF_RETURNED_VALUE(UntaggedArray, data) + kWordSize * index;
   }
   static intptr_t index_at_offset(intptr_t offset_in_bytes) {
     intptr_t index = (offset_in_bytes - data_offset()) / kWordSize;
@@ -9623,39 +9625,41 @@
   static bool Equals(ArrayPtr a, ArrayPtr b) {
     if (a == b) return true;
     if (a->IsRawNull() || b->IsRawNull()) return false;
-    if (a->ptr()->length() != b->ptr()->length()) return false;
-    if (a->ptr()->type_arguments() != b->ptr()->type_arguments()) return false;
+    if (a->untag()->length() != b->untag()->length()) return false;
+    if (a->untag()->type_arguments() != b->untag()->type_arguments())
+      return false;
     const intptr_t length = LengthOf(a);
-    return memcmp(a->ptr()->data(), b->ptr()->data(), kWordSize * length) == 0;
+    return memcmp(a->untag()->data(), b->untag()->data(), kWordSize * length) ==
+           0;
   }
 
-  static ObjectPtr* DataOf(ArrayPtr array) { return array->ptr()->data(); }
+  static ObjectPtr* DataOf(ArrayPtr array) { return array->untag()->data(); }
 
   template <std::memory_order order = std::memory_order_relaxed>
   ObjectPtr At(intptr_t index) const {
-    return raw_ptr()->element(index);
+    return untag()->element(index);
   }
   template <std::memory_order order = std::memory_order_relaxed>
   void SetAt(intptr_t index, const Object& value) const {
     // TODO(iposva): Add storing NoSafepointScope.
-    raw_ptr()->set_element(index, value.raw());
+    untag()->set_element(index, value.ptr());
   }
 
   // Access to the array with acquire release semantics.
   ObjectPtr AtAcquire(intptr_t index) const {
-    return raw_ptr()->element<std::memory_order_acquire>(index);
+    return untag()->element<std::memory_order_acquire>(index);
   }
   void SetAtRelease(intptr_t index, const Object& value) const {
-    raw_ptr()->set_element<std::memory_order_release>(index, value.raw());
+    untag()->set_element<std::memory_order_release>(index, value.ptr());
   }
 
-  bool IsImmutable() const { return raw()->GetClassId() == kImmutableArrayCid; }
+  bool IsImmutable() const { return ptr()->GetClassId() == kImmutableArrayCid; }
 
   // Position of element type in type arguments.
   static const intptr_t kElementTypeTypeArgPos = 0;
 
   virtual TypeArgumentsPtr GetTypeArguments() const {
-    return raw_ptr()->type_arguments();
+    return untag()->type_arguments();
   }
   virtual void SetTypeArguments(const TypeArguments& value) const {
     // An Array is raw or takes one type argument. However, its type argument
@@ -9666,7 +9670,7 @@
             value.IsInstantiated() /*&& value.IsCanonical()*/));
     // TODO(asiva): Values read from a message snapshot are not properly marked
     // as canonical. See for example tests/isolate/mandel_isolate_test.dart.
-    StoreArrayPointer(&raw_ptr()->type_arguments_, value.raw());
+    StoreArrayPointer(&untag()->type_arguments_, value.ptr());
   }
 
   virtual bool CanonicalizeEquals(const Instance& other) const;
@@ -9675,10 +9679,10 @@
   static const intptr_t kBytesPerElement = kWordSize;
   static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
   static const intptr_t kMaxNewSpaceElements =
-      (Heap::kNewAllocatableSize - sizeof(ArrayLayout)) / kBytesPerElement;
+      (Heap::kNewAllocatableSize - sizeof(UntaggedArray)) / kBytesPerElement;
 
   static intptr_t type_arguments_offset() {
-    return OFFSET_OF(ArrayLayout, type_arguments_);
+    return OFFSET_OF(UntaggedArray, type_arguments_);
   }
 
   static bool IsValidLength(intptr_t len) {
@@ -9686,15 +9690,17 @@
   }
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(ArrayLayout) == OFFSET_OF_RETURNED_VALUE(ArrayLayout, data));
+    ASSERT(sizeof(UntaggedArray) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedArray, data));
     return 0;
   }
 
   static intptr_t InstanceSize(intptr_t len) {
     // Ensure that variable length data is not adding to the object length.
-    ASSERT(sizeof(ArrayLayout) == (sizeof(InstanceLayout) + (2 * kWordSize)));
+    ASSERT(sizeof(UntaggedArray) ==
+           (sizeof(UntaggedInstance) + (2 * kWordSize)));
     ASSERT(IsValidLength(len));
-    return RoundedAllocationSize(sizeof(ArrayLayout) +
+    return RoundedAllocationSize(sizeof(UntaggedArray) +
                                  (len * kBytesPerElement));
   }
 
@@ -9744,19 +9750,17 @@
   ObjectPtr const* ObjectAddr(intptr_t index) const {
     // TODO(iposva): Determine if we should throw an exception here.
     ASSERT((index >= 0) && (index < Length()));
-    return &raw_ptr()->data()[index];
+    return &untag()->data()[index];
   }
 
-  void SetLength(intptr_t value) const {
-    raw_ptr()->set_length(Smi::New(value));
-  }
+  void SetLength(intptr_t value) const { untag()->set_length(Smi::New(value)); }
   void SetLengthRelease(intptr_t value) const {
-    raw_ptr()->set_length<std::memory_order_release>(Smi::New(value));
+    untag()->set_length<std::memory_order_release>(Smi::New(value));
   }
 
   template <typename type, std::memory_order order = std::memory_order_relaxed>
   void StoreArrayPointer(type const* addr, type value) const {
-    raw()->ptr()->StoreArrayPointer<type, order>(addr, value);
+    ptr()->untag()->StoreArrayPointer<type, order>(addr, value);
   }
 
   // Store a range of pointers [from, from + count) into [to, to + count).
@@ -9765,7 +9769,7 @@
                           ObjectPtr const* from,
                           intptr_t count) {
     ASSERT(Contains(reinterpret_cast<uword>(to)));
-    if (raw()->IsNewObject()) {
+    if (ptr()->IsNewObject()) {
       memmove(const_cast<ObjectPtr*>(to), from, count * kWordSize);
     } else {
       for (intptr_t i = 0; i < count; ++i) {
@@ -9806,7 +9810,7 @@
   }
 
   static ImmutableArrayPtr raw(const Array& array) {
-    return static_cast<ImmutableArrayPtr>(array.raw());
+    return static_cast<ImmutableArrayPtr>(array.ptr());
   }
 
   friend class Class;
@@ -9821,29 +9825,29 @@
   }
   intptr_t Length() const {
     ASSERT(!IsNull());
-    return Smi::Value(raw_ptr()->length());
+    return Smi::Value(untag()->length());
   }
   void SetLength(intptr_t value) const {
     // This is only safe because we create a new Smi, which does not cause
     // heap allocation.
-    raw_ptr()->set_length(Smi::New(value));
+    untag()->set_length(Smi::New(value));
   }
 
-  ArrayPtr data() const { return raw_ptr()->data(); }
-  void SetData(const Array& value) const { raw_ptr()->set_data(value.raw()); }
+  ArrayPtr data() const { return untag()->data(); }
+  void SetData(const Array& value) const { untag()->set_data(value.ptr()); }
 
   ObjectPtr At(intptr_t index) const {
     NoSafepointScope no_safepoint;
     ASSERT(!IsNull());
     ASSERT(index < Length());
-    return data()->ptr()->element(index);
+    return data()->untag()->element(index);
   }
   void SetAt(intptr_t index, const Object& value) const {
     ASSERT(!IsNull());
     ASSERT(index < Length());
 
     // TODO(iposva): Add storing NoSafepointScope.
-    data()->ptr()->set_element(index, value.raw());
+    data()->untag()->set_element(index, value.ptr());
   }
 
   void Add(const Object& value, Heap::Space space = Heap::kNew) const;
@@ -9852,7 +9856,7 @@
   ObjectPtr RemoveLast() const;
 
   virtual TypeArgumentsPtr GetTypeArguments() const {
-    return raw_ptr()->type_arguments();
+    return untag()->type_arguments();
   }
   virtual void SetTypeArguments(const TypeArguments& value) const {
     // A GrowableObjectArray is raw or takes one type argument. However, its
@@ -9860,7 +9864,8 @@
     // reusing the type argument vector of the instantiator.
     ASSERT(value.IsNull() || ((value.Length() >= 1) && value.IsInstantiated() &&
                               value.IsCanonical()));
-    raw_ptr()->set_type_arguments(value.raw());
+
+    untag()->set_type_arguments(value.ptr());
   }
 
   // We don't expect a growable object array to be canonicalized.
@@ -9876,18 +9881,18 @@
   }
 
   static intptr_t type_arguments_offset() {
-    return OFFSET_OF(GrowableObjectArrayLayout, type_arguments_);
+    return OFFSET_OF(UntaggedGrowableObjectArray, type_arguments_);
   }
 
   static intptr_t length_offset() {
-    return OFFSET_OF(GrowableObjectArrayLayout, length_);
+    return OFFSET_OF(UntaggedGrowableObjectArray, length_);
   }
   static intptr_t data_offset() {
-    return OFFSET_OF(GrowableObjectArrayLayout, data_);
+    return OFFSET_OF(UntaggedGrowableObjectArray, data_);
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(GrowableObjectArrayLayout));
+    return RoundedAllocationSize(sizeof(UntaggedGrowableObjectArray));
   }
 
   static GrowableObjectArrayPtr New(Heap::Space space = Heap::kNew) {
@@ -9899,15 +9904,15 @@
                                     Heap::Space space = Heap::kNew);
 
   static SmiPtr NoSafepointLength(const GrowableObjectArrayPtr array) {
-    return array->ptr()->length();
+    return array->untag()->length();
   }
 
   static ArrayPtr NoSafepointData(const GrowableObjectArrayPtr array) {
-    return array->ptr()->data();
+    return array->untag()->data();
   }
 
  private:
-  ArrayLayout* DataArray() const { return data()->ptr(); }
+  UntaggedArray* DataArray() const { return data()->untag(); }
 
   static const int kDefaultInitialCapacity = 0;
 
@@ -9940,10 +9945,12 @@
   void set_value(simd128_value_t value) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(Float32x4Layout));
+    return RoundedAllocationSize(sizeof(UntaggedFloat32x4));
   }
 
-  static intptr_t value_offset() { return OFFSET_OF(Float32x4Layout, value_); }
+  static intptr_t value_offset() {
+    return OFFSET_OF(UntaggedFloat32x4, value_);
+  }
 
  private:
   FINAL_HEAP_OBJECT_IMPLEMENTATION(Float32x4, Instance);
@@ -9973,10 +9980,10 @@
   void set_value(simd128_value_t value) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(Int32x4Layout));
+    return RoundedAllocationSize(sizeof(UntaggedInt32x4));
   }
 
-  static intptr_t value_offset() { return OFFSET_OF(Int32x4Layout, value_); }
+  static intptr_t value_offset() { return OFFSET_OF(UntaggedInt32x4, value_); }
 
  private:
   FINAL_HEAP_OBJECT_IMPLEMENTATION(Int32x4, Instance);
@@ -10001,10 +10008,12 @@
   void set_value(simd128_value_t value) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(Float64x2Layout));
+    return RoundedAllocationSize(sizeof(UntaggedFloat64x2));
   }
 
-  static intptr_t value_offset() { return OFFSET_OF(Float64x2Layout, value_); }
+  static intptr_t value_offset() {
+    return OFFSET_OF(UntaggedFloat64x2, value_);
+  }
 
  private:
   FINAL_HEAP_OBJECT_IMPLEMENTATION(Float64x2, Instance);
@@ -10014,33 +10023,33 @@
 class PointerBase : public Instance {
  public:
   static intptr_t data_field_offset() {
-    return OFFSET_OF(PointerBaseLayout, data_);
+    return OFFSET_OF(UntaggedPointerBase, data_);
   }
 };
 
 class TypedDataBase : public PointerBase {
  public:
   static intptr_t length_offset() {
-    return OFFSET_OF(TypedDataBaseLayout, length_);
+    return OFFSET_OF(UntaggedTypedDataBase, length_);
   }
 
-  SmiPtr length() const { return raw_ptr()->length(); }
+  SmiPtr length() const { return untag()->length(); }
 
   intptr_t Length() const {
     ASSERT(!IsNull());
-    return Smi::Value(raw_ptr()->length());
+    return Smi::Value(untag()->length());
   }
 
   intptr_t LengthInBytes() const {
-    return ElementSizeInBytes(raw()->GetClassId()) * Length();
+    return ElementSizeInBytes(ptr()->GetClassId()) * Length();
   }
 
   TypedDataElementType ElementType() const {
-    return ElementType(raw()->GetClassId());
+    return ElementType(ptr()->GetClassId());
   }
 
   intptr_t ElementSizeInBytes() const {
-    return element_size(ElementType(raw()->GetClassId()));
+    return element_size(ElementType(ptr()->GetClassId()));
   }
 
   static intptr_t ElementSizeInBytes(classid_t cid) {
@@ -10069,13 +10078,13 @@
   void* DataAddr(intptr_t byte_offset) const {
     ASSERT((byte_offset == 0) ||
            ((byte_offset > 0) && (byte_offset < LengthInBytes())));
-    return reinterpret_cast<void*>(Validate(raw_ptr()->data_) + byte_offset);
+    return reinterpret_cast<void*>(Validate(untag()->data_) + byte_offset);
   }
 
  protected:
   void SetLength(intptr_t value) const {
     ASSERT(value <= Smi::kMaxValue);
-    raw_ptr()->set_length(Smi::New(value));
+    untag()->set_length(Smi::New(value));
   }
 
   virtual uint8_t* Validate(uint8_t* data) const {
@@ -10136,17 +10145,17 @@
 
 #undef TYPED_GETTER_SETTER
 
-  static intptr_t data_offset() { return TypedDataLayout::payload_offset(); }
+  static intptr_t data_offset() { return UntaggedTypedData::payload_offset(); }
 
   static intptr_t InstanceSize() {
-    ASSERT(sizeof(TypedDataLayout) ==
-           OFFSET_OF_RETURNED_VALUE(TypedDataLayout, internal_data));
+    ASSERT(sizeof(UntaggedTypedData) ==
+           OFFSET_OF_RETURNED_VALUE(UntaggedTypedData, internal_data));
     return 0;
   }
 
   static intptr_t InstanceSize(intptr_t lengthInBytes) {
     ASSERT(0 <= lengthInBytes && lengthInBytes <= kSmiMax);
-    return RoundedAllocationSize(sizeof(TypedDataLayout) + lengthInBytes);
+    return RoundedAllocationSize(sizeof(UntaggedTypedData) + lengthInBytes);
   }
 
   static intptr_t MaxElements(intptr_t class_id) {
@@ -10156,7 +10165,7 @@
 
   static intptr_t MaxNewSpaceElements(intptr_t class_id) {
     ASSERT(IsTypedDataClassId(class_id));
-    return (Heap::kNewAllocatableSize - sizeof(TypedDataLayout)) /
+    return (Heap::kNewAllocatableSize - sizeof(UntaggedTypedData)) /
            ElementSizeInBytes(class_id);
   }
 
@@ -10213,12 +10222,12 @@
 
   static bool IsTypedData(const Instance& obj) {
     ASSERT(!obj.IsNull());
-    intptr_t cid = obj.raw()->GetClassId();
+    intptr_t cid = obj.ptr()->GetClassId();
     return IsTypedDataClassId(cid);
   }
 
  protected:
-  void RecomputeDataField() { raw()->ptr()->RecomputeDataField(); }
+  void RecomputeDataField() { ptr()->untag()->RecomputeDataField(); }
 
  private:
   // Provides const access to non-pointer, non-aligned data within the object.
@@ -10229,8 +10238,7 @@
   // ensure the returned pointer does not escape.
   template <typename FieldType>
   const FieldType* ReadOnlyDataAddr(intptr_t byte_offset) const {
-    return reinterpret_cast<const FieldType*>((raw_ptr()->data()) +
-                                              byte_offset);
+    return reinterpret_cast<const FieldType*>((untag()->data()) + byte_offset);
   }
 
   FINAL_HEAP_OBJECT_IMPLEMENTATION(TypedData, TypedDataBase);
@@ -10273,11 +10281,11 @@
                                             intptr_t external_size) const;
 
   static intptr_t data_offset() {
-    return OFFSET_OF(ExternalTypedDataLayout, data_);
+    return OFFSET_OF(UntaggedExternalTypedData, data_);
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ExternalTypedDataLayout));
+    return RoundedAllocationSize(sizeof(UntaggedExternalTypedData));
   }
 
   static intptr_t MaxElements(intptr_t class_id) {
@@ -10296,7 +10304,7 @@
 
   static bool IsExternalTypedData(const Instance& obj) {
     ASSERT(!obj.IsNull());
-    intptr_t cid = obj.raw()->GetClassId();
+    intptr_t cid = obj.ptr()->GetClassId();
     return IsExternalTypedDataClassId(cid);
   }
 
@@ -10305,13 +10313,13 @@
 
   void SetLength(intptr_t value) const {
     ASSERT(value <= Smi::kMaxValue);
-    raw_ptr()->set_length(Smi::New(value));
+    untag()->set_length(Smi::New(value));
   }
 
   void SetData(uint8_t* data) const {
     ASSERT(!IsolateGroup::Current()->heap()->Contains(
         reinterpret_cast<uword>(data)));
-    StoreNonPointer(&raw_ptr()->data_, data);
+    StoreNonPointer(&untag()->data_, data);
   }
 
  private:
@@ -10330,7 +10338,7 @@
                               Heap::Space space = Heap::kNew);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(TypedDataViewLayout));
+    return RoundedAllocationSize(sizeof(UntaggedTypedDataView));
   }
 
   static InstancePtr Data(const TypedDataView& view) {
@@ -10343,47 +10351,47 @@
 
   static bool IsExternalTypedDataView(const TypedDataView& view_obj) {
     const auto& data = Instance::Handle(Data(view_obj));
-    intptr_t cid = data.raw()->GetClassId();
+    intptr_t cid = data.ptr()->GetClassId();
     ASSERT(IsTypedDataClassId(cid) || IsExternalTypedDataClassId(cid));
     return IsExternalTypedDataClassId(cid);
   }
 
   static intptr_t data_offset() {
-    return OFFSET_OF(TypedDataViewLayout, typed_data_);
+    return OFFSET_OF(UntaggedTypedDataView, typed_data_);
   }
 
   static intptr_t offset_in_bytes_offset() {
-    return OFFSET_OF(TypedDataViewLayout, offset_in_bytes_);
+    return OFFSET_OF(UntaggedTypedDataView, offset_in_bytes_);
   }
 
-  InstancePtr typed_data() const { return raw_ptr()->typed_data(); }
+  InstancePtr typed_data() const { return untag()->typed_data(); }
 
   void InitializeWith(const TypedDataBase& typed_data,
                       intptr_t offset_in_bytes,
                       intptr_t length) {
     const classid_t cid = typed_data.GetClassId();
     ASSERT(IsTypedDataClassId(cid) || IsExternalTypedDataClassId(cid));
-    raw_ptr()->set_typed_data(typed_data.raw());
-    raw_ptr()->set_length(Smi::New(length));
-    raw_ptr()->set_offset_in_bytes(Smi::New(offset_in_bytes));
+    untag()->set_typed_data(typed_data.ptr());
+    untag()->set_length(Smi::New(length));
+    untag()->set_offset_in_bytes(Smi::New(offset_in_bytes));
 
     // Update the inner pointer.
     RecomputeDataField();
   }
 
-  SmiPtr offset_in_bytes() const { return raw_ptr()->offset_in_bytes(); }
+  SmiPtr offset_in_bytes() const { return untag()->offset_in_bytes(); }
 
  protected:
   virtual uint8_t* Validate(uint8_t* data) const { return data; }
 
  private:
-  void RecomputeDataField() { raw()->ptr()->RecomputeDataField(); }
+  void RecomputeDataField() { ptr()->untag()->RecomputeDataField(); }
 
   void Clear() {
-    raw_ptr()->set_length(Smi::New(0));
-    raw_ptr()->set_offset_in_bytes(Smi::New(0));
-    StoreNonPointer(&raw_ptr()->data_, nullptr);
-    raw_ptr()->set_typed_data(TypedDataBase::RawCast(Object::null()));
+    untag()->set_length(Smi::New(0));
+    untag()->set_offset_in_bytes(Smi::New(0));
+    StoreNonPointer(&untag()->data_, nullptr);
+    untag()->set_typed_data(TypedDataBase::RawCast(Object::null()));
   }
 
   FINAL_HEAP_OBJECT_IMPLEMENTATION(TypedDataView, TypedDataBase);
@@ -10396,7 +10404,7 @@
  public:
   static InstancePtr Data(const Instance& view_obj) {
     ASSERT(!view_obj.IsNull());
-    return *reinterpret_cast<InstancePtr const*>(view_obj.raw_ptr() +
+    return *reinterpret_cast<InstancePtr const*>(view_obj.untag() +
                                                  kDataOffset);
   }
 
@@ -10417,25 +10425,25 @@
                         Heap::Space space = Heap::kNew);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(PointerLayout));
+    return RoundedAllocationSize(sizeof(UntaggedPointer));
   }
 
   static bool IsPointer(const Instance& obj);
 
   size_t NativeAddress() const {
-    return reinterpret_cast<size_t>(raw_ptr()->data_);
+    return reinterpret_cast<size_t>(untag()->data_);
   }
 
   void SetNativeAddress(size_t address) const {
     uint8_t* value = reinterpret_cast<uint8_t*>(address);
-    StoreNonPointer(&raw_ptr()->data_, value);
+    StoreNonPointer(&untag()->data_, value);
   }
 
   static intptr_t type_arguments_offset() {
-    return OFFSET_OF(PointerLayout, type_arguments_);
+    return OFFSET_OF(UntaggedPointer, type_arguments_);
   }
 
-  static intptr_t NextFieldOffset() { return sizeof(PointerLayout); }
+  static intptr_t NextFieldOffset() { return sizeof(UntaggedPointer); }
 
   static const intptr_t kNativeTypeArgPos = 0;
 
@@ -10456,22 +10464,22 @@
   static DynamicLibraryPtr New(void* handle, Heap::Space space = Heap::kNew);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(DynamicLibraryLayout));
+    return RoundedAllocationSize(sizeof(UntaggedDynamicLibrary));
   }
 
   static bool IsDynamicLibrary(const Instance& obj) {
     ASSERT(!obj.IsNull());
-    intptr_t cid = obj.raw()->GetClassId();
+    intptr_t cid = obj.ptr()->GetClassId();
     return IsFfiDynamicLibraryClassId(cid);
   }
 
   void* GetHandle() const {
     ASSERT(!IsNull());
-    return raw_ptr()->handle_;
+    return untag()->handle_;
   }
 
   void SetHandle(void* value) const {
-    StoreNonPointer(&raw_ptr()->handle_, value);
+    StoreNonPointer(&untag()->handle_, value);
   }
 
  private:
@@ -10487,7 +10495,7 @@
 class LinkedHashMap : public Instance {
  public:
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(LinkedHashMapLayout));
+    return RoundedAllocationSize(sizeof(UntaggedLinkedHashMap));
   }
 
   // Allocates a map with some default capacity, just like "new Map()".
@@ -10500,7 +10508,7 @@
                               Heap::Space space = Heap::kNew);
 
   virtual TypeArgumentsPtr GetTypeArguments() const {
-    return raw_ptr()->type_arguments();
+    return untag()->type_arguments();
   }
   virtual void SetTypeArguments(const TypeArguments& value) const {
     ASSERT(value.IsNull() ||
@@ -10508,58 +10516,58 @@
             value.IsInstantiated() /*&& value.IsCanonical()*/));
     // TODO(asiva): Values read from a message snapshot are not properly marked
     // as canonical. See for example tests/isolate/message3_test.dart.
-    raw_ptr()->set_type_arguments(value.raw());
+    untag()->set_type_arguments(value.ptr());
   }
   static intptr_t type_arguments_offset() {
-    return OFFSET_OF(LinkedHashMapLayout, type_arguments_);
+    return OFFSET_OF(UntaggedLinkedHashMap, type_arguments_);
   }
 
-  TypedDataPtr index() const { return raw_ptr()->index(); }
+  TypedDataPtr index() const { return untag()->index(); }
   void SetIndex(const TypedData& value) const {
     ASSERT(!value.IsNull());
-    raw_ptr()->set_index(value.raw());
+    untag()->set_index(value.ptr());
   }
   static intptr_t index_offset() {
-    return OFFSET_OF(LinkedHashMapLayout, index_);
+    return OFFSET_OF(UntaggedLinkedHashMap, index_);
   }
 
-  ArrayPtr data() const { return raw_ptr()->data(); }
-  void SetData(const Array& value) const { raw_ptr()->set_data(value.raw()); }
+  ArrayPtr data() const { return untag()->data(); }
+  void SetData(const Array& value) const { untag()->set_data(value.ptr()); }
   static intptr_t data_offset() {
-    return OFFSET_OF(LinkedHashMapLayout, data_);
+    return OFFSET_OF(UntaggedLinkedHashMap, data_);
   }
 
-  SmiPtr hash_mask() const { return raw_ptr()->hash_mask(); }
+  SmiPtr hash_mask() const { return untag()->hash_mask(); }
   void SetHashMask(intptr_t value) const {
-    raw_ptr()->set_hash_mask(Smi::New(value));
+    untag()->set_hash_mask(Smi::New(value));
   }
   static intptr_t hash_mask_offset() {
-    return OFFSET_OF(LinkedHashMapLayout, hash_mask_);
+    return OFFSET_OF(UntaggedLinkedHashMap, hash_mask_);
   }
 
-  SmiPtr used_data() const { return raw_ptr()->used_data(); }
+  SmiPtr used_data() const { return untag()->used_data(); }
   void SetUsedData(intptr_t value) const {
-    raw_ptr()->set_used_data(Smi::New(value));
+    untag()->set_used_data(Smi::New(value));
   }
   static intptr_t used_data_offset() {
-    return OFFSET_OF(LinkedHashMapLayout, used_data_);
+    return OFFSET_OF(UntaggedLinkedHashMap, used_data_);
   }
 
-  SmiPtr deleted_keys() const { return raw_ptr()->deleted_keys(); }
+  SmiPtr deleted_keys() const { return untag()->deleted_keys(); }
   void SetDeletedKeys(intptr_t value) const {
-    raw_ptr()->set_deleted_keys(Smi::New(value));
+    untag()->set_deleted_keys(Smi::New(value));
   }
   static intptr_t deleted_keys_offset() {
-    return OFFSET_OF(LinkedHashMapLayout, deleted_keys_);
+    return OFFSET_OF(UntaggedLinkedHashMap, deleted_keys_);
   }
 
   intptr_t Length() const {
     // The map may be uninitialized.
-    if (raw_ptr()->used_data() == Object::null()) return 0;
-    if (raw_ptr()->deleted_keys() == Object::null()) return 0;
+    if (untag()->used_data() == Object::null()) return 0;
+    if (untag()->deleted_keys() == Object::null()) return 0;
 
-    intptr_t used = Smi::Value(raw_ptr()->used_data());
-    intptr_t deleted = Smi::Value(raw_ptr()->deleted_keys());
+    intptr_t used = Smi::Value(untag()->used_data());
+    intptr_t deleted = Smi::Value(untag()->deleted_keys());
     return (used >> 1) - deleted;
   }
 
@@ -10583,7 +10591,7 @@
           return false;
         }
         scratch_ = data_.At(offset_);
-        if (scratch_.raw() != data_.raw()) {
+        if (scratch_.ptr() != data_.ptr()) {
           // Slot is not deleted (self-reference indicates deletion).
           return true;
         }
@@ -10619,43 +10627,43 @@
 class Closure : public Instance {
  public:
   TypeArgumentsPtr instantiator_type_arguments() const {
-    return raw_ptr()->instantiator_type_arguments();
+    return untag()->instantiator_type_arguments();
   }
   void set_instantiator_type_arguments(const TypeArguments& args) const {
-    raw_ptr()->set_instantiator_type_arguments(args.raw());
+    untag()->set_instantiator_type_arguments(args.ptr());
   }
   static intptr_t instantiator_type_arguments_offset() {
-    return OFFSET_OF(ClosureLayout, instantiator_type_arguments_);
+    return OFFSET_OF(UntaggedClosure, instantiator_type_arguments_);
   }
 
   TypeArgumentsPtr function_type_arguments() const {
-    return raw_ptr()->function_type_arguments();
+    return untag()->function_type_arguments();
   }
   void set_function_type_arguments(const TypeArguments& args) const {
-    raw_ptr()->set_function_type_arguments(args.raw());
+    untag()->set_function_type_arguments(args.ptr());
   }
   static intptr_t function_type_arguments_offset() {
-    return OFFSET_OF(ClosureLayout, function_type_arguments_);
+    return OFFSET_OF(UntaggedClosure, function_type_arguments_);
   }
 
   TypeArgumentsPtr delayed_type_arguments() const {
-    return raw_ptr()->delayed_type_arguments();
+    return untag()->delayed_type_arguments();
   }
   void set_delayed_type_arguments(const TypeArguments& args) const {
-    raw_ptr()->set_delayed_type_arguments(args.raw());
+    untag()->set_delayed_type_arguments(args.ptr());
   }
   static intptr_t delayed_type_arguments_offset() {
-    return OFFSET_OF(ClosureLayout, delayed_type_arguments_);
+    return OFFSET_OF(UntaggedClosure, delayed_type_arguments_);
   }
 
-  FunctionPtr function() const { return raw_ptr()->function(); }
+  FunctionPtr function() const { return untag()->function(); }
   static intptr_t function_offset() {
-    return OFFSET_OF(ClosureLayout, function_);
+    return OFFSET_OF(UntaggedClosure, function_);
   }
 
-  ContextPtr context() const { return raw_ptr()->context(); }
+  ContextPtr context() const { return untag()->context(); }
   static intptr_t context_offset() {
-    return OFFSET_OF(ClosureLayout, context_);
+    return OFFSET_OF(UntaggedClosure, context_);
   }
 
   bool IsGeneric(Thread* thread) const { return NumTypeParameters(thread) > 0; }
@@ -10663,11 +10671,11 @@
   // No need for num_parent_type_arguments, as a closure is always closed
   // over its parents type parameters (i.e., function_type_parameters() above).
 
-  SmiPtr hash() const { return raw_ptr()->hash(); }
-  static intptr_t hash_offset() { return OFFSET_OF(ClosureLayout, hash_); }
+  SmiPtr hash() const { return untag()->hash(); }
+  static intptr_t hash_offset() { return OFFSET_OF(UntaggedClosure, hash_); }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ClosureLayout));
+    return RoundedAllocationSize(sizeof(UntaggedClosure));
   }
 
   virtual void CanonicalizeFieldsLocked(Thread* thread) const;
@@ -10701,10 +10709,10 @@
 
 class Capability : public Instance {
  public:
-  uint64_t Id() const { return raw_ptr()->id_; }
+  uint64_t Id() const { return untag()->id_; }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(CapabilityLayout));
+    return RoundedAllocationSize(sizeof(UntaggedCapability));
   }
   static CapabilityPtr New(uint64_t id, Heap::Space space = Heap::kNew);
 
@@ -10715,22 +10723,22 @@
 
 class ReceivePort : public Instance {
  public:
-  SendPortPtr send_port() const { return raw_ptr()->send_port(); }
-  Dart_Port Id() const { return send_port()->ptr()->id_; }
+  SendPortPtr send_port() const { return untag()->send_port(); }
+  Dart_Port Id() const { return send_port()->untag()->id_; }
 
-  InstancePtr handler() const { return raw_ptr()->handler(); }
+  InstancePtr handler() const { return untag()->handler(); }
   void set_handler(const Instance& value) const;
 
 #if !defined(PRODUCT)
   StackTracePtr allocation_location() const {
-    return raw_ptr()->allocation_location();
+    return untag()->allocation_location();
   }
 
-  StringPtr debug_name() const { return raw_ptr()->debug_name(); }
+  StringPtr debug_name() const { return untag()->debug_name(); }
 #endif
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(ReceivePortLayout));
+    return RoundedAllocationSize(sizeof(UntaggedReceivePort));
   }
   static ReceivePortPtr New(Dart_Port id,
                             const String& debug_name,
@@ -10744,16 +10752,16 @@
 
 class SendPort : public Instance {
  public:
-  Dart_Port Id() const { return raw_ptr()->id_; }
+  Dart_Port Id() const { return untag()->id_; }
 
-  Dart_Port origin_id() const { return raw_ptr()->origin_id_; }
+  Dart_Port origin_id() const { return untag()->origin_id_; }
   void set_origin_id(Dart_Port id) const {
     ASSERT(origin_id() == 0);
-    StoreNonPointer(&(raw_ptr()->origin_id_), id);
+    StoreNonPointer(&(untag()->origin_id_), id);
   }
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(SendPortLayout));
+    return RoundedAllocationSize(sizeof(UntaggedSendPort));
   }
   static SendPortPtr New(Dart_Port id, Heap::Space space = Heap::kNew);
   static SendPortPtr New(Dart_Port id,
@@ -10801,7 +10809,7 @@
                                       Heap::Space space = Heap::kNew);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(TransferableTypedDataLayout));
+    return RoundedAllocationSize(sizeof(UntaggedTransferableTypedData));
   }
 
  private:
@@ -10816,15 +10824,15 @@
 
   intptr_t Length() const;
 
-  StackTracePtr async_link() const { return raw_ptr()->async_link(); }
+  StackTracePtr async_link() const { return untag()->async_link(); }
   void set_async_link(const StackTrace& async_link) const;
   void set_expand_inlined(bool value) const;
 
-  ArrayPtr code_array() const { return raw_ptr()->code_array(); }
+  ArrayPtr code_array() const { return untag()->code_array(); }
   ObjectPtr CodeAtFrame(intptr_t frame_index) const;
   void SetCodeAtFrame(intptr_t frame_index, const Object& code) const;
 
-  ArrayPtr pc_offset_array() const { return raw_ptr()->pc_offset_array(); }
+  ArrayPtr pc_offset_array() const { return untag()->pc_offset_array(); }
   SmiPtr PcOffsetAtFrame(intptr_t frame_index) const;
   void SetPcOffsetAtFrame(intptr_t frame_index, const Smi& pc_offset) const;
 
@@ -10846,7 +10854,7 @@
   static constexpr intptr_t kSyncAsyncCroppedFrames = 2;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(StackTraceLayout));
+    return RoundedAllocationSize(sizeof(UntaggedStackTrace));
   }
   static StackTracePtr New(const Array& code_array,
                            const Array& pc_offset_array,
@@ -10945,23 +10953,23 @@
   bool is_complex() const { return (type() == kComplex); }
 
   intptr_t num_registers(bool is_one_byte) const {
-    return is_one_byte ? raw_ptr()->num_one_byte_registers_
-                       : raw_ptr()->num_two_byte_registers_;
+    return is_one_byte ? untag()->num_one_byte_registers_
+                       : untag()->num_two_byte_registers_;
   }
 
-  StringPtr pattern() const { return raw_ptr()->pattern(); }
+  StringPtr pattern() const { return untag()->pattern(); }
   SmiPtr num_bracket_expressions() const {
-    return raw_ptr()->num_bracket_expressions();
+    return untag()->num_bracket_expressions();
   }
-  ArrayPtr capture_name_map() const { return raw_ptr()->capture_name_map(); }
+  ArrayPtr capture_name_map() const { return untag()->capture_name_map(); }
 
   TypedDataPtr bytecode(bool is_one_byte, bool sticky) const {
     if (sticky) {
-      return TypedData::RawCast(is_one_byte ? raw_ptr()->one_byte_sticky_
-                                            : raw_ptr()->two_byte_sticky_);
+      return TypedData::RawCast(is_one_byte ? untag()->one_byte_sticky_
+                                            : untag()->two_byte_sticky_);
     } else {
-      return TypedData::RawCast(is_one_byte ? raw_ptr()->one_byte_
-                                            : raw_ptr()->two_byte_);
+      return TypedData::RawCast(is_one_byte ? untag()->one_byte_
+                                            : untag()->two_byte_);
     }
   }
 
@@ -10969,24 +10977,24 @@
     if (sticky) {
       switch (cid) {
         case kOneByteStringCid:
-          return OFFSET_OF(RegExpLayout, one_byte_sticky_);
+          return OFFSET_OF(UntaggedRegExp, one_byte_sticky_);
         case kTwoByteStringCid:
-          return OFFSET_OF(RegExpLayout, two_byte_sticky_);
+          return OFFSET_OF(UntaggedRegExp, two_byte_sticky_);
         case kExternalOneByteStringCid:
-          return OFFSET_OF(RegExpLayout, external_one_byte_sticky_);
+          return OFFSET_OF(UntaggedRegExp, external_one_byte_sticky_);
         case kExternalTwoByteStringCid:
-          return OFFSET_OF(RegExpLayout, external_two_byte_sticky_);
+          return OFFSET_OF(UntaggedRegExp, external_two_byte_sticky_);
       }
     } else {
       switch (cid) {
         case kOneByteStringCid:
-          return OFFSET_OF(RegExpLayout, one_byte_);
+          return OFFSET_OF(UntaggedRegExp, one_byte_);
         case kTwoByteStringCid:
-          return OFFSET_OF(RegExpLayout, two_byte_);
+          return OFFSET_OF(UntaggedRegExp, two_byte_);
         case kExternalOneByteStringCid:
-          return OFFSET_OF(RegExpLayout, external_one_byte_);
+          return OFFSET_OF(UntaggedRegExp, external_one_byte_);
         case kExternalTwoByteStringCid:
-          return OFFSET_OF(RegExpLayout, external_two_byte_);
+          return OFFSET_OF(UntaggedRegExp, external_two_byte_);
       }
     }
 
@@ -11040,36 +11048,36 @@
   void set_is_complex() const { set_type(kComplex); }
   void set_num_registers(bool is_one_byte, intptr_t value) const {
     if (is_one_byte) {
-      StoreNonPointer(&raw_ptr()->num_one_byte_registers_, value);
+      StoreNonPointer(&untag()->num_one_byte_registers_, value);
     } else {
-      StoreNonPointer(&raw_ptr()->num_two_byte_registers_, value);
+      StoreNonPointer(&untag()->num_two_byte_registers_, value);
     }
   }
 
   RegExpFlags flags() const {
-    return RegExpFlags(FlagsBits::decode(raw_ptr()->type_flags_));
+    return RegExpFlags(FlagsBits::decode(untag()->type_flags_));
   }
   void set_flags(RegExpFlags flags) const {
-    StoreNonPointer(&raw_ptr()->type_flags_,
-                    FlagsBits::update(flags.value(), raw_ptr()->type_flags_));
+    StoreNonPointer(&untag()->type_flags_,
+                    FlagsBits::update(flags.value(), untag()->type_flags_));
   }
   const char* Flags() const;
 
   virtual bool CanonicalizeEquals(const Instance& other) const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(RegExpLayout));
+    return RoundedAllocationSize(sizeof(UntaggedRegExp));
   }
 
   static RegExpPtr New(Heap::Space space = Heap::kNew);
 
  private:
   void set_type(RegExType type) const {
-    StoreNonPointer(&raw_ptr()->type_flags_,
-                    TypeBits::update(type, raw_ptr()->type_flags_));
+    StoreNonPointer(&untag()->type_flags_,
+                    TypeBits::update(type, untag()->type_flags_));
   }
 
-  RegExType type() const { return TypeBits::decode(raw_ptr()->type_flags_); }
+  RegExType type() const { return TypeBits::decode(untag()->type_flags_); }
 
   FINAL_HEAP_OBJECT_IMPLEMENTATION(RegExp, Instance);
   friend class Class;
@@ -11077,29 +11085,27 @@
 
 class WeakProperty : public Instance {
  public:
-  ObjectPtr key() const { return raw_ptr()->key(); }
-  void set_key(const Object& key) const { raw_ptr()->set_key(key.raw()); }
-  static intptr_t key_offset() { return OFFSET_OF(WeakPropertyLayout, key_); }
+  ObjectPtr key() const { return untag()->key(); }
+  void set_key(const Object& key) const { untag()->set_key(key.ptr()); }
+  static intptr_t key_offset() { return OFFSET_OF(UntaggedWeakProperty, key_); }
 
-  ObjectPtr value() const { return raw_ptr()->value(); }
-  void set_value(const Object& value) const {
-    raw_ptr()->set_value(value.raw());
-  }
+  ObjectPtr value() const { return untag()->value(); }
+  void set_value(const Object& value) const { untag()->set_value(value.ptr()); }
   static intptr_t value_offset() {
-    return OFFSET_OF(WeakPropertyLayout, value_);
+    return OFFSET_OF(UntaggedWeakProperty, value_);
   }
 
   static WeakPropertyPtr New(Heap::Space space = Heap::kNew);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(WeakPropertyLayout));
+    return RoundedAllocationSize(sizeof(UntaggedWeakProperty));
   }
 
   static void Clear(WeakPropertyPtr raw_weak) {
-    ASSERT(raw_weak->ptr()->next_ == WeakProperty::null());
+    ASSERT(raw_weak->untag()->next_ == WeakProperty::null());
     // This action is performed by the GC. No barrier.
-    raw_weak->ptr()->key_ = Object::null();
-    raw_weak->ptr()->value_ = Object::null();
+    raw_weak->untag()->key_ = Object::null();
+    raw_weak->untag()->value_ = Object::null();
   }
 
  private:
@@ -11109,10 +11115,10 @@
 
 class MirrorReference : public Instance {
  public:
-  ObjectPtr referent() const { return raw_ptr()->referent(); }
+  ObjectPtr referent() const { return untag()->referent(); }
 
   void set_referent(const Object& referent) const {
-    raw_ptr()->set_referent(referent.raw());
+    untag()->set_referent(referent.ptr());
   }
 
   AbstractTypePtr GetAbstractTypeReferent() const;
@@ -11133,7 +11139,7 @@
                                 Heap::Space space = Heap::kNew);
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(MirrorReferenceLayout));
+    return RoundedAllocationSize(sizeof(UntaggedMirrorReference));
   }
 
  private:
@@ -11143,20 +11149,20 @@
 
 class UserTag : public Instance {
  public:
-  uword tag() const { return raw_ptr()->tag(); }
+  uword tag() const { return untag()->tag(); }
   void set_tag(uword t) const {
     ASSERT(t >= UserTags::kUserTagIdOffset);
     ASSERT(t < UserTags::kUserTagIdOffset + UserTags::kMaxUserTags);
-    StoreNonPointer(&raw_ptr()->tag_, t);
+    StoreNonPointer(&untag()->tag_, t);
   }
-  static intptr_t tag_offset() { return OFFSET_OF(UserTagLayout, tag_); }
+  static intptr_t tag_offset() { return OFFSET_OF(UntaggedUserTag, tag_); }
 
-  StringPtr label() const { return raw_ptr()->label(); }
+  StringPtr label() const { return untag()->label(); }
 
   void MakeActive() const;
 
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(UserTagLayout));
+    return RoundedAllocationSize(sizeof(UntaggedUserTag));
   }
 
   static UserTagPtr New(const String& label, Heap::Space space = Heap::kOld);
@@ -11170,7 +11176,7 @@
   static void AddTagToIsolate(Thread* thread, const UserTag& tag);
 
   void set_label(const String& tag_label) const {
-    raw_ptr()->set_label(tag_label.raw());
+    untag()->set_label(tag_label.ptr());
   }
 
   FINAL_HEAP_OBJECT_IMPLEMENTATION(UserTag, Instance);
@@ -11181,14 +11187,14 @@
 class FutureOr : public Instance {
  public:
   static intptr_t InstanceSize() {
-    return RoundedAllocationSize(sizeof(FutureOrLayout));
+    return RoundedAllocationSize(sizeof(UntaggedFutureOr));
   }
 
   virtual TypeArgumentsPtr GetTypeArguments() const {
-    return raw_ptr()->type_arguments();
+    return untag()->type_arguments();
   }
   static intptr_t type_arguments_offset() {
-    return OFFSET_OF(FutureOrLayout, type_arguments_);
+    return OFFSET_OF(UntaggedFutureOr, type_arguments_);
   }
 
  private:
@@ -11199,18 +11205,18 @@
 
 // Breaking cycles and loops.
 ClassPtr Object::clazz() const {
-  uword raw_value = static_cast<uword>(raw_);
+  uword raw_value = static_cast<uword>(ptr_);
   if ((raw_value & kSmiTagMask) == kSmiTag) {
     return Smi::Class();
   }
   ASSERT(!IsolateGroup::Current()->compaction_in_progress());
-  return IsolateGroup::Current()->class_table()->At(raw()->GetClassId());
+  return IsolateGroup::Current()->class_table()->At(ptr()->GetClassId());
 }
 
 DART_FORCE_INLINE
-void Object::SetRaw(ObjectPtr value) {
+void Object::SetPtr(ObjectPtr value) {
   NoSafepointScope no_safepoint_scope;
-  raw_ = value;
+  ptr_ = value;
   intptr_t cid = value->GetClassIdMayBeSmi();
   // Free-list elements cannot be wrapped in a handle.
   ASSERT(cid != kFreeListElement);
@@ -11220,16 +11226,16 @@
   }
   set_vtable(builtin_vtables_[cid]);
 #if defined(DEBUG)
-  if (FLAG_verify_handles && raw_->IsHeapObject()) {
+  if (FLAG_verify_handles && ptr_->IsHeapObject()) {
     Heap* isolate_heap = IsolateGroup::Current()->heap();
     // TODO(rmacnak): Remove after rewriting StackFrame::VisitObjectPointers
     // to not use handles.
     if (!isolate_heap->new_space()->scavenging()) {
       Heap* vm_isolate_heap = Dart::vm_isolate_group()->heap();
-      uword addr = ObjectLayout::ToAddr(raw_);
+      uword addr = UntaggedObject::ToAddr(ptr_);
       if (!isolate_heap->Contains(addr) && !vm_isolate_heap->Contains(addr)) {
         ASSERT(FLAG_write_protect_code);
-        addr = ObjectLayout::ToAddr(OldPage::ToWritable(raw_));
+        addr = UntaggedObject::ToAddr(OldPage::ToWritable(ptr_));
         ASSERT(isolate_heap->Contains(addr) || vm_isolate_heap->Contains(addr));
       }
     }
@@ -11239,13 +11245,13 @@
 
 intptr_t Field::HostOffset() const {
   ASSERT(is_instance());  // Valid only for dart instance fields.
-  return (Smi::Value(raw_ptr()->host_offset_or_field_id()) * kWordSize);
+  return (Smi::Value(untag()->host_offset_or_field_id()) * kWordSize);
 }
 
 intptr_t Field::TargetOffset() const {
   ASSERT(is_instance());  // Valid only for dart instance fields.
 #if !defined(DART_PRECOMPILED_RUNTIME)
-  return (raw_ptr()->target_offset_ * compiler::target::kWordSize);
+  return (untag()->target_offset_ * compiler::target::kWordSize);
 #else
   return HostOffset();
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
@@ -11253,9 +11259,9 @@
 
 inline intptr_t Field::TargetOffsetOf(const FieldPtr field) {
 #if !defined(DART_PRECOMPILED_RUNTIME)
-  return field->ptr()->target_offset_;
+  return field->untag()->target_offset_;
 #else
-  return Smi::Value(field->ptr()->host_offset_or_field_id_);
+  return Smi::Value(field->untag()->host_offset_or_field_id_);
 #endif  //  !defined(DART_PRECOMPILED_RUNTIME)
 }
 
@@ -11263,11 +11269,11 @@
                       intptr_t target_offset_in_bytes) const {
   ASSERT(is_instance());  // Valid only for dart instance fields.
   ASSERT(kWordSize != 0);
-  raw_ptr()->set_host_offset_or_field_id(
+  untag()->set_host_offset_or_field_id(
       Smi::New(host_offset_in_bytes / kWordSize));
 #if !defined(DART_PRECOMPILED_RUNTIME)
   ASSERT(compiler::target::kWordSize != 0);
-  StoreNonPointer(&raw_ptr()->target_offset_,
+  StoreNonPointer(&untag()->target_offset_,
                   target_offset_in_bytes / compiler::target::kWordSize);
 #else
   ASSERT(host_offset_in_bytes == target_offset_in_bytes);
@@ -11280,17 +11286,17 @@
 }
 
 inline intptr_t Field::field_id() const {
-  return Smi::Value(raw_ptr()->host_offset_or_field_id());
+  return Smi::Value(untag()->host_offset_or_field_id());
 }
 
 void Field::set_field_id(intptr_t field_id) const {
   ASSERT(is_static());
   ASSERT(Thread::Current()->IsMutatorThread());
-  raw_ptr()->set_host_offset_or_field_id(Smi::New(field_id));
+  untag()->set_host_offset_or_field_id(Smi::New(field_id));
 }
 
 void Context::SetAt(intptr_t index, const Object& value) const {
-  raw_ptr()->set_element(index, value.raw());
+  untag()->set_element(index, value.ptr());
 }
 
 intptr_t Instance::GetNativeField(int index) const {
@@ -11300,7 +11306,7 @@
   if (native_fields == TypedData::null()) {
     return 0;
   }
-  return reinterpret_cast<intptr_t*>(native_fields->ptr()->data())[index];
+  return reinterpret_cast<intptr_t*>(native_fields->untag()->data())[index];
 }
 
 void Instance::GetNativeFields(uint16_t num_fields,
@@ -11314,14 +11320,15 @@
       field_values[i] = 0;
     }
   }
-  intptr_t* fields = reinterpret_cast<intptr_t*>(native_fields->ptr()->data());
+  intptr_t* fields =
+      reinterpret_cast<intptr_t*>(native_fields->untag()->data());
   for (intptr_t i = 0; i < num_fields; i++) {
     field_values[i] = fields[i];
   }
 }
 
 bool String::Equals(const String& str) const {
-  if (raw() == str.raw()) {
+  if (ptr() == str.ptr()) {
     return true;  // Both handles point to the same raw instance.
   }
   if (str.IsNull()) {
@@ -11373,7 +11380,7 @@
 
 inline intptr_t Type::Hash() const {
   ASSERT(IsFinalized());
-  intptr_t result = Smi::Value(raw_ptr()->hash());
+  intptr_t result = Smi::Value(untag()->hash());
   if (result != 0) {
     return result;
   }
@@ -11383,12 +11390,12 @@
 inline void Type::SetHash(intptr_t value) const {
   // This is only safe because we create a new Smi, which does not cause
   // heap allocation.
-  raw_ptr()->set_hash(Smi::New(value));
+  untag()->set_hash(Smi::New(value));
 }
 
 inline intptr_t FunctionType::Hash() const {
   ASSERT(IsFinalized());
-  intptr_t result = Smi::Value(raw_ptr()->hash_);
+  intptr_t result = Smi::Value(untag()->hash_);
   if (result != 0) {
     return result;
   }
@@ -11398,12 +11405,12 @@
 inline void FunctionType::SetHash(intptr_t value) const {
   // This is only safe because we create a new Smi, which does not cause
   // heap allocation.
-  StoreSmi(&raw_ptr()->hash_, Smi::New(value));
+  StoreSmi(&untag()->hash_, Smi::New(value));
 }
 
 inline intptr_t TypeParameter::Hash() const {
   ASSERT(IsFinalized() || IsBeingFinalized());  // Bound may not be finalized.
-  intptr_t result = Smi::Value(raw_ptr()->hash());
+  intptr_t result = Smi::Value(untag()->hash());
   if (result != 0) {
     return result;
   }
@@ -11413,12 +11420,12 @@
 inline void TypeParameter::SetHash(intptr_t value) const {
   // This is only safe because we create a new Smi, which does not cause
   // heap allocation.
-  raw_ptr()->set_hash(Smi::New(value));
+  untag()->set_hash(Smi::New(value));
 }
 
 inline intptr_t TypeArguments::Hash() const {
   if (IsNull()) return kAllDynamicHash;
-  intptr_t result = Smi::Value(raw_ptr()->hash());
+  intptr_t result = Smi::Value(untag()->hash());
   if (result != 0) {
     return result;
   }
@@ -11428,7 +11435,7 @@
 inline void TypeArguments::SetHash(intptr_t value) const {
   // This is only safe because we create a new Smi, which does not cause
   // heap allocation.
-  raw_ptr()->set_hash(Smi::New(value));
+  untag()->set_hash(Smi::New(value));
 }
 
 inline uint16_t String::CharAt(StringPtr str, intptr_t index) {
diff --git a/runtime/vm/object_arm64_test.cc b/runtime/vm/object_arm64_test.cc
index 640c91b..681b8e6 100644
--- a/runtime/vm/object_arm64_test.cc
+++ b/runtime/vm/object_arm64_test.cc
@@ -46,7 +46,7 @@
 // This is used to test Embedded Smi objects in the instructions.
 void GenerateEmbedSmiInCode(compiler::Assembler* assembler, intptr_t value) {
   const Smi& smi_object = Smi::ZoneHandle(Smi::New(value));
-  const int64_t val = static_cast<int64_t>(smi_object.raw());
+  const int64_t val = static_cast<int64_t>(smi_object.ptr());
   __ LoadImmediate(R0, val);
   __ ret();
 }
diff --git a/runtime/vm/object_graph.cc b/runtime/vm/object_graph.cc
index 9f5785a..489b5e1 100644
--- a/runtime/vm/object_graph.cc
+++ b/runtime/vm/object_graph.cc
@@ -55,7 +55,8 @@
   // won't be in use while handling a service request (ObjectGraph's only use).
   virtual void VisitPointers(ObjectPtr* first, ObjectPtr* last) {
     for (ObjectPtr* current = first; current <= last; ++current) {
-      if ((*current)->IsHeapObject() && !(*current)->ptr()->InVMIsolateHeap() &&
+      if ((*current)->IsHeapObject() &&
+          !(*current)->untag()->InVMIsolateHeap() &&
           object_ids_->GetValueExclusive(*current) == 0) {  // not visited yet
         if (!include_vm_objects_ && !IsUserClass((*current)->GetClassId())) {
           continue;
@@ -93,7 +94,7 @@
       }
       if (direction == ObjectGraph::Visitor::kProceed) {
         set_gc_root_type(node.gc_root_type);
-        obj->ptr()->VisitPointers(this);
+        obj->untag()->VisitPointers(this);
         clear_gc_root_type();
       }
     }
@@ -162,17 +163,17 @@
     return -1;
   }
   Stack::Node parent = stack_->data_[parent_index];
-  uword parent_start = ObjectLayout::ToAddr(parent.obj);
+  uword parent_start = UntaggedObject::ToAddr(parent.obj);
   Stack::Node child = stack_->data_[index_];
   ASSERT(child.obj == *child.ptr);
   uword child_ptr_addr = reinterpret_cast<uword>(child.ptr);
   intptr_t offset = child_ptr_addr - parent_start;
-  if (offset > 0 && offset < parent.obj->ptr()->HeapSize()) {
+  if (offset > 0 && offset < parent.obj->untag()->HeapSize()) {
     ASSERT(Utils::IsAligned(offset, kWordSize));
     return offset >> kWordSizeLog2;
   } else {
     // Some internal VM objects visit pointers not contained within the parent.
-    // For instance, CodeLayout::VisitCodePointers visits pointers in
+    // For instance, UntaggedCode::VisitCodePointers visits pointers in
     // instructions.
     ASSERT(!parent.obj->IsDartInstance());
     return -1;
@@ -198,16 +199,16 @@
     while (entries.HasNext()) {
       entry = entries.GetNext();
       if (entry.IsClass()) {
-        cls ^= entry.raw();
+        cls ^= entry.ptr();
         fields = cls.fields();
         for (intptr_t j = 0; j < fields.Length(); j++) {
           field ^= fields.At(j);
-          ObjectPtr ptr = field.raw();
+          ObjectPtr ptr = field.ptr();
           visitor->VisitPointer(&ptr);
         }
       } else if (entry.IsField()) {
-        field ^= entry.raw();
-        ObjectPtr ptr = field.raw();
+        field ^= entry.ptr();
+        ObjectPtr ptr = field.ptr();
         visitor->VisitPointer(&ptr);
       }
     }
@@ -246,7 +247,7 @@
   Stack stack(isolate_group());
   stack.set_visit_weak_persistent_handles(
       visitor->visit_weak_persistent_handles());
-  ObjectPtr root_raw = root.raw();
+  ObjectPtr root_raw = root.ptr();
   stack.VisitPointer(&root_raw);
   stack.TraverseGraph(visitor);
 }
@@ -291,7 +292,7 @@
     if (ShouldSkip(obj)) {
       return kBacktrack;
     }
-    size_ += obj->ptr()->HeapSize();
+    size_ += obj->untag()->HeapSize();
     return kProceed;
   }
 
@@ -302,7 +303,7 @@
 class SizeExcludingObjectVisitor : public SizeVisitor {
  public:
   explicit SizeExcludingObjectVisitor(const Object& skip) : skip_(skip) {}
-  virtual bool ShouldSkip(ObjectPtr obj) const { return obj == skip_.raw(); }
+  virtual bool ShouldSkip(ObjectPtr obj) const { return obj == skip_.ptr(); }
 
  private:
   const Object& skip_;
@@ -446,7 +447,7 @@
   HeapIterationScope iteration_scope(Thread::Current(), true);
   // To break the trivial path, the handle 'obj' is temporarily cleared during
   // the search, but restored before returning.
-  ObjectPtr raw = obj->raw();
+  ObjectPtr raw = obj->ptr();
   *obj = Object::null();
   RetainingPathVisitor visitor(raw, path);
   IterateUserObjects(&visitor);
@@ -480,7 +481,7 @@
 
   virtual void VisitObject(ObjectPtr raw_obj) {
     source_ = raw_obj;
-    raw_obj->ptr()->VisitPointers(this);
+    raw_obj->untag()->VisitPointers(this);
   }
 
   virtual void VisitPointers(ObjectPtr* first, ObjectPtr* last) {
@@ -494,15 +495,15 @@
           references_.SetAt(obj_index, *scratch_);
 
           *scratch_ = Smi::New(0);
-          uword source_start = ObjectLayout::ToAddr(source_);
+          uword source_start = UntaggedObject::ToAddr(source_);
           uword current_ptr_addr = reinterpret_cast<uword>(current_ptr);
           intptr_t offset = current_ptr_addr - source_start;
-          if (offset > 0 && offset < source_->ptr()->HeapSize()) {
+          if (offset > 0 && offset < source_->untag()->HeapSize()) {
             ASSERT(Utils::IsAligned(offset, kWordSize));
             *scratch_ = Smi::New(offset >> kWordSizeLog2);
           } else {
             // Some internal VM objects visit pointers not contained within the
-            // parent. For instance, CodeLayout::VisitCodePointers visits
+            // parent. For instance, UntaggedCode::VisitCodePointers visits
             // pointers in instructions.
             ASSERT(!source_->IsDartInstance());
             *scratch_ = Smi::New(-1);
@@ -526,7 +527,7 @@
   Object& scratch = Object::Handle();
   HeapIterationScope iteration(Thread::Current());
   NoSafepointScope no_safepoint;
-  InboundReferencesVisitor visitor(isolate(), obj->raw(), references, &scratch);
+  InboundReferencesVisitor visitor(isolate(), obj->ptr(), references, &scratch);
   iteration.IterateObjects(&visitor);
   return visitor.length();
 }
@@ -692,7 +693,7 @@
 }
 
 bool HeapSnapshotWriter::OnImagePage(ObjectPtr obj) const {
-  const uword addr = ObjectLayout::ToAddr(obj);
+  const uword addr = UntaggedObject::ToAddr(obj);
   for (intptr_t i = 0; i < kMaxImagePages; i++) {
     if ((addr - image_page_ranges_[i].base) < image_page_ranges_[i].size) {
       return true;
@@ -718,7 +719,7 @@
   CountingPage* counting_page = FindCountingPage(obj);
   if (counting_page != nullptr) {
     // Likely: object on an ordinary page.
-    counting_page->Record(ObjectLayout::ToAddr(obj), ++object_count_);
+    counting_page->Record(UntaggedObject::ToAddr(obj), ++object_count_);
   } else {
     // Unlikely: new space object, or object on a large or image page.
     thread()->heap()->SetObjectId(obj, ++object_count_);
@@ -739,7 +740,7 @@
   intptr_t id;
   if (counting_page != nullptr) {
     // Likely: object on an ordinary page.
-    id = counting_page->Lookup(ObjectLayout::ToAddr(obj));
+    id = counting_page->Lookup(UntaggedObject::ToAddr(obj));
   } else {
     // Unlikely: new space object, or object on a large or image page.
     id = thread()->heap()->GetObjectId(obj);
@@ -776,7 +777,7 @@
     if (obj->IsPseudoObject()) return;
 
     writer_->AssignObjectId(obj);
-    obj->ptr()->VisitPointers(this);
+    obj->untag()->VisitPointers(this);
   }
 
   void VisitPointers(ObjectPtr* from, ObjectPtr* to) {
@@ -788,7 +789,7 @@
   void VisitHandle(uword addr) {
     FinalizablePersistentHandle* weak_persistent_handle =
         reinterpret_cast<FinalizablePersistentHandle*>(addr);
-    if (!weak_persistent_handle->raw()->IsHeapObject()) {
+    if (!weak_persistent_handle->ptr()->IsHeapObject()) {
       return;  // Free handle.
     }
 
@@ -833,112 +834,112 @@
 
     intptr_t cid = obj->GetClassId();
     writer_->WriteUnsigned(cid);
-    writer_->WriteUnsigned(discount_sizes_ ? 0 : obj->ptr()->HeapSize());
+    writer_->WriteUnsigned(discount_sizes_ ? 0 : obj->untag()->HeapSize());
 
     if (cid == kNullCid) {
       writer_->WriteUnsigned(kNullData);
     } else if (cid == kBoolCid) {
       writer_->WriteUnsigned(kBoolData);
       writer_->WriteUnsigned(
-          static_cast<uintptr_t>(static_cast<BoolPtr>(obj)->ptr()->value_));
+          static_cast<uintptr_t>(static_cast<BoolPtr>(obj)->untag()->value_));
     } else if (cid == kSmiCid) {
       UNREACHABLE();
     } else if (cid == kMintCid) {
       writer_->WriteUnsigned(kIntData);
-      writer_->WriteSigned(static_cast<MintPtr>(obj)->ptr()->value_);
+      writer_->WriteSigned(static_cast<MintPtr>(obj)->untag()->value_);
     } else if (cid == kDoubleCid) {
       writer_->WriteUnsigned(kDoubleData);
-      writer_->WriteBytes(&(static_cast<DoublePtr>(obj)->ptr()->value_),
+      writer_->WriteBytes(&(static_cast<DoublePtr>(obj)->untag()->value_),
                           sizeof(double));
     } else if (cid == kOneByteStringCid) {
       OneByteStringPtr str = static_cast<OneByteStringPtr>(obj);
-      intptr_t len = Smi::Value(str->ptr()->length_);
+      intptr_t len = Smi::Value(str->untag()->length_);
       intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
       writer_->WriteUnsigned(kLatin1Data);
       writer_->WriteUnsigned(len);
       writer_->WriteUnsigned(trunc_len);
-      writer_->WriteBytes(&str->ptr()->data()[0], trunc_len);
+      writer_->WriteBytes(&str->untag()->data()[0], trunc_len);
     } else if (cid == kExternalOneByteStringCid) {
       ExternalOneByteStringPtr str = static_cast<ExternalOneByteStringPtr>(obj);
-      intptr_t len = Smi::Value(str->ptr()->length_);
+      intptr_t len = Smi::Value(str->untag()->length_);
       intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
       writer_->WriteUnsigned(kLatin1Data);
       writer_->WriteUnsigned(len);
       writer_->WriteUnsigned(trunc_len);
-      writer_->WriteBytes(&str->ptr()->external_data_[0], trunc_len);
+      writer_->WriteBytes(&str->untag()->external_data_[0], trunc_len);
     } else if (cid == kTwoByteStringCid) {
       TwoByteStringPtr str = static_cast<TwoByteStringPtr>(obj);
-      intptr_t len = Smi::Value(str->ptr()->length_);
+      intptr_t len = Smi::Value(str->untag()->length_);
       intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
       writer_->WriteUnsigned(kUTF16Data);
       writer_->WriteUnsigned(len);
       writer_->WriteUnsigned(trunc_len);
-      writer_->WriteBytes(&str->ptr()->data()[0], trunc_len * 2);
+      writer_->WriteBytes(&str->untag()->data()[0], trunc_len * 2);
     } else if (cid == kExternalTwoByteStringCid) {
       ExternalTwoByteStringPtr str = static_cast<ExternalTwoByteStringPtr>(obj);
-      intptr_t len = Smi::Value(str->ptr()->length_);
+      intptr_t len = Smi::Value(str->untag()->length_);
       intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
       writer_->WriteUnsigned(kUTF16Data);
       writer_->WriteUnsigned(len);
       writer_->WriteUnsigned(trunc_len);
-      writer_->WriteBytes(&str->ptr()->external_data_[0], trunc_len * 2);
+      writer_->WriteBytes(&str->untag()->external_data_[0], trunc_len * 2);
     } else if (cid == kArrayCid || cid == kImmutableArrayCid) {
       writer_->WriteUnsigned(kLengthData);
       writer_->WriteUnsigned(
-          Smi::Value(static_cast<ArrayPtr>(obj)->ptr()->length_));
+          Smi::Value(static_cast<ArrayPtr>(obj)->untag()->length_));
     } else if (cid == kGrowableObjectArrayCid) {
       writer_->WriteUnsigned(kLengthData);
-      writer_->WriteUnsigned(
-          Smi::Value(static_cast<GrowableObjectArrayPtr>(obj)->ptr()->length_));
+      writer_->WriteUnsigned(Smi::Value(
+          static_cast<GrowableObjectArrayPtr>(obj)->untag()->length_));
     } else if (cid == kLinkedHashMapCid) {
       writer_->WriteUnsigned(kLengthData);
       writer_->WriteUnsigned(
-          Smi::Value(static_cast<LinkedHashMapPtr>(obj)->ptr()->used_data_));
+          Smi::Value(static_cast<LinkedHashMapPtr>(obj)->untag()->used_data_));
     } else if (cid == kObjectPoolCid) {
       writer_->WriteUnsigned(kLengthData);
-      writer_->WriteUnsigned(static_cast<ObjectPoolPtr>(obj)->ptr()->length_);
+      writer_->WriteUnsigned(static_cast<ObjectPoolPtr>(obj)->untag()->length_);
     } else if (IsTypedDataClassId(cid)) {
       writer_->WriteUnsigned(kLengthData);
       writer_->WriteUnsigned(
-          Smi::Value(static_cast<TypedDataPtr>(obj)->ptr()->length_));
+          Smi::Value(static_cast<TypedDataPtr>(obj)->untag()->length_));
     } else if (IsExternalTypedDataClassId(cid)) {
       writer_->WriteUnsigned(kLengthData);
       writer_->WriteUnsigned(
-          Smi::Value(static_cast<ExternalTypedDataPtr>(obj)->ptr()->length_));
+          Smi::Value(static_cast<ExternalTypedDataPtr>(obj)->untag()->length_));
     } else if (cid == kFunctionCid) {
       writer_->WriteUnsigned(kNameData);
-      ScrubAndWriteUtf8(static_cast<FunctionPtr>(obj)->ptr()->name_);
+      ScrubAndWriteUtf8(static_cast<FunctionPtr>(obj)->untag()->name_);
     } else if (cid == kCodeCid) {
-      ObjectPtr owner = static_cast<CodePtr>(obj)->ptr()->owner_;
+      ObjectPtr owner = static_cast<CodePtr>(obj)->untag()->owner_;
       if (owner->IsFunction()) {
         writer_->WriteUnsigned(kNameData);
-        ScrubAndWriteUtf8(static_cast<FunctionPtr>(owner)->ptr()->name_);
+        ScrubAndWriteUtf8(static_cast<FunctionPtr>(owner)->untag()->name_);
       } else if (owner->IsClass()) {
         writer_->WriteUnsigned(kNameData);
-        ScrubAndWriteUtf8(static_cast<ClassPtr>(owner)->ptr()->name_);
+        ScrubAndWriteUtf8(static_cast<ClassPtr>(owner)->untag()->name_);
       } else {
         writer_->WriteUnsigned(kNoData);
       }
     } else if (cid == kFieldCid) {
       writer_->WriteUnsigned(kNameData);
-      ScrubAndWriteUtf8(static_cast<FieldPtr>(obj)->ptr()->name_);
+      ScrubAndWriteUtf8(static_cast<FieldPtr>(obj)->untag()->name_);
     } else if (cid == kClassCid) {
       writer_->WriteUnsigned(kNameData);
-      ScrubAndWriteUtf8(static_cast<ClassPtr>(obj)->ptr()->name_);
+      ScrubAndWriteUtf8(static_cast<ClassPtr>(obj)->untag()->name_);
     } else if (cid == kLibraryCid) {
       writer_->WriteUnsigned(kNameData);
-      ScrubAndWriteUtf8(static_cast<LibraryPtr>(obj)->ptr()->url_);
+      ScrubAndWriteUtf8(static_cast<LibraryPtr>(obj)->untag()->url_);
     } else if (cid == kScriptCid) {
       writer_->WriteUnsigned(kNameData);
-      ScrubAndWriteUtf8(static_cast<ScriptPtr>(obj)->ptr()->url_);
+      ScrubAndWriteUtf8(static_cast<ScriptPtr>(obj)->untag()->url_);
     } else {
       writer_->WriteUnsigned(kNoData);
     }
 
     DoCount();
-    obj->ptr()->VisitPointersPrecise(isolate_, this);
+    obj->untag()->VisitPointersPrecise(isolate_, this);
     DoWrite();
-    obj->ptr()->VisitPointersPrecise(isolate_, this);
+    obj->untag()->VisitPointersPrecise(isolate_, this);
   }
 
   void ScrubAndWriteUtf8(StringPtr str) {
@@ -983,11 +984,11 @@
   void VisitHandle(uword addr) {
     FinalizablePersistentHandle* weak_persistent_handle =
         reinterpret_cast<FinalizablePersistentHandle*>(addr);
-    if (!weak_persistent_handle->raw()->IsHeapObject()) {
+    if (!weak_persistent_handle->ptr()->IsHeapObject()) {
       return;  // Free handle.
     }
 
-    writer_->WriteUnsigned(writer_->GetObjectId(weak_persistent_handle->raw()));
+    writer_->WriteUnsigned(writer_->GetObjectId(weak_persistent_handle->ptr()));
     writer_->WriteUnsigned(weak_persistent_handle->external_size());
     // Attempt to include a native symbol name.
     auto const name = NativeSymbolResolver::LookupSymbolName(
@@ -1203,7 +1204,7 @@
 
 void CountObjectsVisitor::VisitObject(ObjectPtr obj) {
   intptr_t cid = obj->GetClassId();
-  intptr_t size = obj->ptr()->HeapSize();
+  intptr_t size = obj->untag()->HeapSize();
   if (obj->IsNewObject()) {
     new_count_[cid] += 1;
     new_size_[cid] += size;
@@ -1216,7 +1217,7 @@
 void CountObjectsVisitor::VisitHandle(uword addr) {
   FinalizablePersistentHandle* handle =
       reinterpret_cast<FinalizablePersistentHandle*>(addr);
-  ObjectPtr obj = handle->raw();
+  ObjectPtr obj = handle->ptr();
   if (!obj->IsHeapObject()) {
     return;
   }
diff --git a/runtime/vm/object_graph_test.cc b/runtime/vm/object_graph_test.cc
index 1fbe1d4..6e968dd 100644
--- a/runtime/vm/object_graph_test.cc
+++ b/runtime/vm/object_graph_test.cc
@@ -25,7 +25,7 @@
       return kBacktrack;
     }
     ++count_;
-    size_ += obj->ptr()->HeapSize();
+    size_ += obj->untag()->HeapSize();
     return kProceed;
   }
 
@@ -55,14 +55,14 @@
   b.SetAt(0, c);
   b.SetAt(1, d);
   a.SetAt(11, d);
-  intptr_t a_size = a.raw()->ptr()->HeapSize();
-  intptr_t b_size = b.raw()->ptr()->HeapSize();
-  intptr_t c_size = c.raw()->ptr()->HeapSize();
-  intptr_t d_size = d.raw()->ptr()->HeapSize();
+  intptr_t a_size = a.ptr()->untag()->HeapSize();
+  intptr_t b_size = b.ptr()->untag()->HeapSize();
+  intptr_t c_size = c.ptr()->untag()->HeapSize();
+  intptr_t d_size = d.ptr()->untag()->HeapSize();
   {
     // No more allocation; raw pointers ahead.
     SafepointOperationScope safepoint(thread);
-    ObjectPtr b_raw = b.raw();
+    ObjectPtr b_raw = b.ptr();
     // Clear handles to cut unintended retained paths.
     b = Array::null();
     c = Array::null();
@@ -74,7 +74,7 @@
         // Compare count and size when 'b' is/isn't skipped.
         CounterVisitor with(Object::null(), Object::null());
         graph.IterateObjectsFrom(a, &with);
-        CounterVisitor without(b_raw, a.raw());
+        CounterVisitor without(b_raw, a.ptr());
         graph.IterateObjectsFrom(a, &without);
         // Only 'b' and 'c' were cut off.
         EXPECT_EQ(2, with.count() - without.count());
@@ -85,7 +85,7 @@
         // are thus larger, but the difference should still be just 'b' and 'c'.
         CounterVisitor with(Object::null(), Object::null());
         graph.IterateObjects(&with);
-        CounterVisitor without(b_raw, a.raw());
+        CounterVisitor without(b_raw, a.ptr());
         graph.IterateObjects(&without);
         EXPECT_EQ(2, with.count() - without.count());
         EXPECT_EQ(b_size + c_size, with.size() - without.size());
@@ -136,9 +136,9 @@
                 offset_from_parent.Value() * kWordSize);
       Array& expected_a = Array::Handle();
       expected_a ^= path.At(4);
-      EXPECT(expected_c.raw() == c.raw());
-      EXPECT(expected_b.raw() == a.At(10));
-      EXPECT(expected_a.raw() == a.raw());
+      EXPECT(expected_c.ptr() == c.ptr());
+      EXPECT(expected_b.ptr() == a.At(10));
+      EXPECT(expected_a.ptr() == a.ptr());
     }
   }
 }
@@ -150,7 +150,7 @@
   Dart_WeakPersistentHandle weak_persistent_handle;
   Array& path = Array::Handle(Array::New(1, Heap::kNew));
   ObjectGraph graph(thread);
-  Dart_Handle handle = Api::NewHandle(thread, path.raw());
+  Dart_Handle handle = Api::NewHandle(thread, path.ptr());
 
   // GC root should be a local handle
   auto result = graph.RetainingPath(&path, path);
diff --git a/runtime/vm/object_id_ring_test.cc b/runtime/vm/object_id_ring_test.cc
index d0d62c9..e1e6cf0 100644
--- a/runtime/vm/object_id_ring_test.cc
+++ b/runtime/vm/object_id_ring_test.cc
@@ -163,8 +163,10 @@
     EXPECT_EQ(ObjectIdRing::kValid, kind);
     EXPECT_NE(Object::null(), raw_obj1);
     EXPECT_NE(Object::null(), raw_obj2);
-    EXPECT_EQ(ObjectLayout::ToAddr(raw_obj), ObjectLayout::ToAddr(raw_obj1));
-    EXPECT_EQ(ObjectLayout::ToAddr(raw_obj), ObjectLayout::ToAddr(raw_obj2));
+    EXPECT_EQ(UntaggedObject::ToAddr(raw_obj),
+              UntaggedObject::ToAddr(raw_obj1));
+    EXPECT_EQ(UntaggedObject::ToAddr(raw_obj),
+              UntaggedObject::ToAddr(raw_obj2));
     // Force a scavenge.
     GCTestHelper::CollectNewSpace();
     ObjectPtr raw_object_moved1 = ring->GetObjectForId(raw_obj_id1, &kind);
@@ -173,13 +175,13 @@
     EXPECT_EQ(ObjectIdRing::kValid, kind);
     EXPECT_NE(Object::null(), raw_object_moved1);
     EXPECT_NE(Object::null(), raw_object_moved2);
-    EXPECT_EQ(ObjectLayout::ToAddr(raw_object_moved1),
-              ObjectLayout::ToAddr(raw_object_moved2));
+    EXPECT_EQ(UntaggedObject::ToAddr(raw_object_moved1),
+              UntaggedObject::ToAddr(raw_object_moved2));
     // Test that objects have moved.
-    EXPECT_NE(ObjectLayout::ToAddr(raw_obj1),
-              ObjectLayout::ToAddr(raw_object_moved1));
-    EXPECT_NE(ObjectLayout::ToAddr(raw_obj2),
-              ObjectLayout::ToAddr(raw_object_moved2));
+    EXPECT_NE(UntaggedObject::ToAddr(raw_obj1),
+              UntaggedObject::ToAddr(raw_object_moved1));
+    EXPECT_NE(UntaggedObject::ToAddr(raw_obj2),
+              UntaggedObject::ToAddr(raw_object_moved2));
     // Test that we still point at the same list.
     moved_handle = Api::NewHandle(thread, raw_object_moved1);
     // Test id reuse.
@@ -207,7 +209,7 @@
     EXPECT(!str.IsNull());
     EXPECT_EQ(3, str.Length());
 
-    ObjectPtr raw_obj = Object::RawCast(str.raw());
+    ObjectPtr raw_obj = Object::RawCast(str.ptr());
     // Verify that it is located in old heap.
     EXPECT(raw_obj->IsOldObject());
     EXPECT_NE(Object::null(), raw_obj);
@@ -221,8 +223,10 @@
     EXPECT_EQ(ObjectIdRing::kValid, kind);
     EXPECT_NE(Object::null(), raw_obj1);
     EXPECT_NE(Object::null(), raw_obj2);
-    EXPECT_EQ(ObjectLayout::ToAddr(raw_obj), ObjectLayout::ToAddr(raw_obj1));
-    EXPECT_EQ(ObjectLayout::ToAddr(raw_obj), ObjectLayout::ToAddr(raw_obj2));
+    EXPECT_EQ(UntaggedObject::ToAddr(raw_obj),
+              UntaggedObject::ToAddr(raw_obj1));
+    EXPECT_EQ(UntaggedObject::ToAddr(raw_obj),
+              UntaggedObject::ToAddr(raw_obj2));
     // Exit scope. Freeing String handle.
   }
   // Force a GC. No reference exist to the old string anymore. It should be
@@ -245,27 +249,27 @@
 
   // Insert an object and check we can look it up.
   String& obj = String::Handle(String::New("I will expire"));
-  intptr_t obj_id = ring->GetIdForObject(obj.raw());
+  intptr_t obj_id = ring->GetIdForObject(obj.ptr());
   ObjectIdRing::LookupResult kind = ObjectIdRing::kInvalid;
   ObjectPtr obj_lookup = ring->GetObjectForId(obj_id, &kind);
   EXPECT_EQ(ObjectIdRing::kValid, kind);
-  EXPECT_EQ(obj.raw(), obj_lookup);
+  EXPECT_EQ(obj.ptr(), obj_lookup);
 
   // Insert as many new objects as the ring size to bump out our first entry.
   Object& new_obj = Object::Handle();
   for (intptr_t i = 0; i < ObjectIdRing::kDefaultCapacity; i++) {
     new_obj = String::New("Bump");
-    intptr_t new_obj_id = ring->GetIdForObject(new_obj.raw());
+    intptr_t new_obj_id = ring->GetIdForObject(new_obj.ptr());
     ObjectIdRing::LookupResult kind = ObjectIdRing::kInvalid;
     ObjectPtr new_obj_lookup = ring->GetObjectForId(new_obj_id, &kind);
     EXPECT_EQ(ObjectIdRing::kValid, kind);
-    EXPECT_EQ(new_obj.raw(), new_obj_lookup);
+    EXPECT_EQ(new_obj.ptr(), new_obj_lookup);
   }
 
   // Check our first entry reports it has expired.
   obj_lookup = ring->GetObjectForId(obj_id, &kind);
   EXPECT_EQ(ObjectIdRing::kExpired, kind);
-  EXPECT_NE(obj.raw(), obj_lookup);
+  EXPECT_NE(obj.ptr(), obj_lookup);
   EXPECT_EQ(Object::null(), obj_lookup);
 }
 
diff --git a/runtime/vm/object_reload.cc b/runtime/vm/object_reload.cc
index d2b8a33..374658c 100644
--- a/runtime/vm/object_reload.cc
+++ b/runtime/vm/object_reload.cc
@@ -71,7 +71,7 @@
   ASSERT(!instrs_.IsNull());
   uword base_address = instrs_.PayloadStart();
   intptr_t offsets_length = code.pointer_offsets_length();
-  const int32_t* offsets = code.raw_ptr()->data();
+  const int32_t* offsets = code.untag()->data();
   for (intptr_t i = 0; i < offsets_length; i++) {
     int32_t offset = offsets[i];
     ObjectPtr* object_ptr = reinterpret_cast<ObjectPtr*>(base_address + offset);
@@ -127,7 +127,7 @@
   }
   const Function& function = Function::Cast(object_);
 
-  if (function.kind() == FunctionLayout::kIrregexpFunction) {
+  if (function.kind() == UntaggedFunction::kIrregexpFunction) {
     // Regex matchers do not support breakpoints or stepping, and they only call
     // core library functions that cannot change due to reload. As a performance
     // optimization, avoid this matching of ICData to PCs for these functions'
@@ -143,7 +143,7 @@
     // calls.
 #if defined(DEBUG)
     descriptors_ = code.pc_descriptors();
-    PcDescriptors::Iterator iter(descriptors_, PcDescriptorsLayout::kIcCall);
+    PcDescriptors::Iterator iter(descriptors_, UntaggedPcDescriptors::kIcCall);
     while (iter.MoveNext()) {
       FATAL1("%s has IC calls but no ic_data_array\n", object_.ToCString());
     }
@@ -152,7 +152,7 @@
   }
 
   descriptors_ = code.pc_descriptors();
-  PcDescriptors::Iterator iter(descriptors_, PcDescriptorsLayout::kIcCall);
+  PcDescriptors::Iterator iter(descriptors_, UntaggedPcDescriptors::kIcCall);
   while (iter.MoveNext()) {
     uword pc = code.PayloadStart() + iter.PcOffset();
     CodePatcher::GetInstanceCallAt(pc, code, &object_);
@@ -278,7 +278,7 @@
   static const char* Name() { return "EnumMapTraits"; }
 
   static bool IsMatch(const Object& a, const Object& b) {
-    return a.raw() == b.raw();
+    return a.ptr() == b.ptr();
   }
 
   static uword Hash(const Object& obj) {
@@ -334,7 +334,7 @@
   TIR_Print("Replacing enum `%s`\n", String::Handle(Name()).ToCString());
 
   {
-    UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.raw());
+    UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.ptr());
     // Build a map of all enum name -> old enum instance.
     enum_fields = old_enum.fields();
     for (intptr_t i = 0; i < enum_fields.Length(); i++) {
@@ -366,12 +366,12 @@
     }
     // The storage given to the map may have been reallocated, remember the new
     // address.
-    enum_map_storage = enum_map.Release().raw();
+    enum_map_storage = enum_map.Release().ptr();
   }
 
   bool enums_deleted = false;
   {
-    UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.raw());
+    UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.ptr());
     // Add a become mapping from the old instances to the new instances.
     enum_fields = fields();
     for (intptr_t i = 0; i < enum_fields.Length(); i++) {
@@ -411,7 +411,7 @@
     enums_deleted = enum_map.NumOccupied() > 0;
     // The storage given to the map may have been reallocated, remember the new
     // address.
-    enum_map_storage = enum_map.Release().raw();
+    enum_map_storage = enum_map.Release().ptr();
   }
 
   // Map the old E.values array to the new E.values array.
@@ -432,7 +432,7 @@
         "The following enum values were deleted from %s and will become the "
         "deleted enum sentinel:\n",
         old_enum.ToCString());
-    UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.raw());
+    UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.ptr());
     UnorderedHashMap<EnumMapTraits>::Iterator it(&enum_map);
     while (it.MoveNext()) {
       const intptr_t entry = it.Current();
@@ -475,7 +475,7 @@
     owner = func.RawOwner();
     ASSERT(!owner.IsNull());
     if (!owner.IsPatchClass()) {
-      ASSERT(owner.raw() == this->raw());
+      ASSERT(owner.ptr() == this->ptr());
       func.set_owner(patch);
     }
   }
@@ -489,7 +489,7 @@
     owner = field.RawOwner();
     ASSERT(!owner.IsNull());
     if (!owner.IsPatchClass()) {
-      ASSERT(owner.raw() == this->raw());
+      ASSERT(owner.ptr() == this->ptr());
       field.set_owner(patch);
     }
     field.ForceDynamicGuardedCidAndLength();
@@ -549,7 +549,7 @@
  private:
   const Error& error_;
 
-  ErrorPtr ToError() { return error_.raw(); }
+  ErrorPtr ToError() { return error_.ptr(); }
 
   StringPtr ToString() { return String::New(error_.ToErrorCString()); }
 };
@@ -860,7 +860,7 @@
   while (it.HasNext()) {
     object = it.GetNext();
     if (!object.IsLibraryPrefix()) continue;
-    prefix ^= object.raw();
+    prefix ^= object.ptr();
     if (prefix.is_deferred_load()) {
       const String& prefix_name = String::Handle(prefix.name());
       context->group_reload_context()->AddReasonForCancelling(
@@ -890,7 +890,7 @@
       GrowableArray<intptr_t> class_ids(2);
       Function& target = Function::Handle(zone_);
       ic.GetCheckAt(0, &class_ids, &target);
-      if ((target.raw() == smi_op_target.raw()) && (class_ids[0] == kSmiCid) &&
+      if ((target.ptr() == smi_op_target.ptr()) && (class_ids[0] == kSmiCid) &&
           (class_ids[1] == kSmiCid)) {
         // The smi fast path case, preserve the initial entry but reset the
         // count.
@@ -919,7 +919,7 @@
 
     if (rule == ICData::kStatic) {
       ASSERT(old_target_.is_static() ||
-             old_target_.kind() == FunctionLayout::kConstructor);
+             old_target_.kind() == UntaggedFunction::kConstructor);
       // This can be incorrect if the call site was an unqualified invocation.
       new_cls_ = old_target_.Owner();
       new_target_ = Resolver::ResolveFunction(zone_, new_cls_, name_);
diff --git a/runtime/vm/object_service.cc b/runtime/vm/object_service.cc
index ac079da..89e394b 100644
--- a/runtime/vm/object_service.cc
+++ b/runtime/vm/object_service.cc
@@ -44,8 +44,8 @@
     jsobj->AddProperty("class", cls);
   }
   if (!ref) {
-    if (raw()->IsHeapObject()) {
-      jsobj->AddProperty("size", raw()->ptr()->HeapSize());
+    if (ptr()->IsHeapObject()) {
+      jsobj->AddProperty("size", ptr()->untag()->HeapSize());
     } else {
       jsobj->AddProperty("size", (intptr_t)0);
     }
@@ -76,7 +76,7 @@
 void Class::PrintJSONImpl(JSONStream* stream, bool ref) const {
   Isolate* isolate = Isolate::Current();
   JSONObject jsobj(stream);
-  if ((raw() == Class::null()) || (id() == kFreeListElement)) {
+  if ((ptr() == Class::null()) || (id() == kFreeListElement)) {
     // TODO(turnidge): This is weird and needs to be changed.
     jsobj.AddProperty("type", "null");
     return;
@@ -258,7 +258,7 @@
   // Regular functions known to their owner use their name (percent-encoded).
   String& name = String::Handle(f.name());
   Thread* thread = Thread::Current();
-  if (Resolver::ResolveFunction(thread->zone(), cls, name) == f.raw()) {
+  if (Resolver::ResolveFunction(thread->zone(), cls, name) == f.ptr()) {
     const char* encoded_name = String::EncodeIRI(name);
     if (cls.IsTopLevel()) {
       const auto& library = Library::Handle(cls.library());
@@ -329,10 +329,10 @@
   jsobj.AddProperty("_optimizedCallSiteCount", optimized_call_site_count());
   jsobj.AddProperty("_deoptimizations",
                     static_cast<intptr_t>(deoptimization_counter()));
-  if ((kind() == FunctionLayout::kImplicitGetter) ||
-      (kind() == FunctionLayout::kImplicitSetter) ||
-      (kind() == FunctionLayout::kImplicitStaticGetter) ||
-      (kind() == FunctionLayout::kFieldInitializer)) {
+  if ((kind() == UntaggedFunction::kImplicitGetter) ||
+      (kind() == UntaggedFunction::kImplicitSetter) ||
+      (kind() == UntaggedFunction::kImplicitStaticGetter) ||
+      (kind() == UntaggedFunction::kFieldInitializer)) {
     const Field& field = Field::Handle(accessor_field());
     if (!field.IsNull()) {
       jsobj.AddProperty("_field", field);
@@ -538,7 +538,7 @@
     while (entries.HasNext()) {
       entry = entries.GetNext();
       if (entry.IsLibraryPrefix()) {
-        prefix ^= entry.raw();
+        prefix ^= entry.ptr();
         imports = prefix.imports();
         if (!imports.IsNull()) {
           for (intptr_t i = 0; i < imports.Length(); i++) {
@@ -578,9 +578,9 @@
       entry = entries.GetNext();
       if (entry.IsFunction()) {
         const Function& func = Function::Cast(entry);
-        if (func.kind() == FunctionLayout::kRegularFunction ||
-            func.kind() == FunctionLayout::kGetterFunction ||
-            func.kind() == FunctionLayout::kSetterFunction) {
+        if (func.kind() == UntaggedFunction::kRegularFunction ||
+            func.kind() == UntaggedFunction::kGetterFunction ||
+            func.kind() == UntaggedFunction::kSetterFunction) {
           jsarr.AddValue(func);
         }
       }
@@ -685,7 +685,7 @@
     return;
   }
   JSONArray members(jsobj, "members");
-  Iterator iter(*this, PcDescriptorsLayout::kAnyKind);
+  Iterator iter(*this, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     JSONObject descriptor(&members);
     descriptor.AddPropertyF("pcOffset", "%" Px "", iter.PcOffset());
@@ -722,7 +722,7 @@
   JSONArray members(&jsobj, "members");
   String& var_name = String::Handle();
   for (intptr_t i = 0; i < Length(); i++) {
-    LocalVarDescriptorsLayout::VarInfo info;
+    UntaggedLocalVarDescriptors::VarInfo info;
     var_name = GetName(i);
     GetInfo(i, &info);
     JSONObject var(&members);
@@ -1010,7 +1010,7 @@
     cls = cls.SuperClass();
   }
   do {
-    classes.Add(&Class::Handle(cls.raw()));
+    classes.Add(&Class::Handle(cls.ptr()));
     cls = cls.SuperClass();
   } while (!cls.IsNull());
 
@@ -1051,12 +1051,12 @@
   JSONObject jsobj(stream);
 
   // Handle certain special instance values.
-  if (raw() == Object::sentinel().raw()) {
+  if (ptr() == Object::sentinel().ptr()) {
     jsobj.AddProperty("type", "Sentinel");
     jsobj.AddProperty("kind", "NotInitialized");
     jsobj.AddProperty("valueAsString", "<not initialized>");
     return;
-  } else if (raw() == Object::transition_sentinel().raw()) {
+  } else if (ptr() == Object::transition_sentinel().ptr()) {
     jsobj.AddProperty("type", "Sentinel");
     jsobj.AddProperty("kind", "BeingInitialized");
     jsobj.AddProperty("valueAsString", "<being initialized>");
@@ -1100,7 +1100,7 @@
   PrintSharedInstanceJSON(&jsobj, ref);
   jsobj.AddProperty("kind", "Type");
   const Class& type_cls = Class::Handle(type_class());
-  if (type_cls.DeclarationType() == raw()) {
+  if (type_cls.DeclarationType() == ptr()) {
     intptr_t cid = type_cls.id();
     jsobj.AddFixedServiceId("classes/%" Pd "/types/%d", cid, 0);
   } else {
@@ -1193,7 +1193,7 @@
 
 void String::PrintJSONImpl(JSONStream* stream, bool ref) const {
   JSONObject jsobj(stream);
-  if (raw() == Symbols::OptimizedOut().raw()) {
+  if (ptr() == Symbols::OptimizedOut().ptr()) {
     // TODO(turnidge): This is a hack.  The user could have this
     // special string in their program.  Fixing this involves updating
     // the debugging api a bit.
diff --git a/runtime/vm/object_set.h b/runtime/vm/object_set.h
index 6d8e1ff..36a3101 100644
--- a/runtime/vm/object_set.h
+++ b/runtime/vm/object_set.h
@@ -64,7 +64,7 @@
   }
 
   bool Contains(ObjectPtr raw_obj) const {
-    uword raw_addr = ObjectLayout::ToAddr(raw_obj);
+    uword raw_addr = UntaggedObject::ToAddr(raw_obj);
     ObjectSetRegion* region;
     if (FindRegion(raw_addr, &region)) {
       return region->ContainsObject(raw_addr);
@@ -73,7 +73,7 @@
   }
 
   void Add(ObjectPtr raw_obj) {
-    uword raw_addr = ObjectLayout::ToAddr(raw_obj);
+    uword raw_addr = UntaggedObject::ToAddr(raw_obj);
     ObjectSetRegion* region;
     if (FindRegion(raw_addr, &region)) {
       return region->AddObject(raw_addr);
diff --git a/runtime/vm/object_store.cc b/runtime/vm/object_store.cc
index 8b2995a..76c15da 100644
--- a/runtime/vm/object_store.cc
+++ b/runtime/vm/object_store.cc
@@ -65,7 +65,7 @@
   // Expansion of inlined functions requires additional memory at run time,
   // avoid it.
   stack_trace.set_expand_inlined(false);
-  return stack_trace.raw();
+  return stack_trace.ptr();
 }
 
 ErrorPtr IsolateObjectStore::PreallocateObjects() {
@@ -162,13 +162,13 @@
 
   result = AllocateObjectByClassName(library, Symbols::StackOverflowError());
   if (result.IsError()) {
-    return Error::Cast(result).raw();
+    return Error::Cast(result).ptr();
   }
   set_stack_overflow(Instance::Cast(result));
 
   result = AllocateObjectByClassName(library, Symbols::OutOfMemoryError());
   if (result.IsError()) {
-    return Error::Cast(result).raw();
+    return Error::Cast(result).ptr();
   }
   set_out_of_memory(Instance::Cast(result));
 
@@ -185,7 +185,7 @@
   const Function& result = Function::Handle(
       Resolver::ResolveDynamicFunction(thread->zone(), cls, mangled));
   ASSERT(!result.IsNull());
-  return result.raw();
+  return result.ptr();
 }
 
 void ObjectStore::InitKnownObjects() {
diff --git a/runtime/vm/object_store.h b/runtime/vm/object_store.h
index 0cac485..3642e50 100644
--- a/runtime/vm/object_store.h
+++ b/runtime/vm/object_store.h
@@ -323,7 +323,7 @@
 
 #define DECLARE_GETTER_AND_SETTER(Type, name)                                  \
   DECLARE_GETTER(Type, name)                                                   \
-  void set_##name(const Type& value) { name##_ = value.raw(); }
+  void set_##name(const Type& value) { name##_ = value.ptr(); }
   ISOLATE_OBJECT_STORE_FIELD_LIST(DECLARE_GETTER, DECLARE_GETTER_AND_SETTER)
 #undef DECLARE_GETTER
 #undef DECLARE_GETTER_AND_SETTER
@@ -393,7 +393,7 @@
   static intptr_t name##_offset() { return OFFSET_OF(ObjectStore, name##_); }
 #define DECLARE_GETTER_AND_SETTER(Type, name)                                  \
   DECLARE_GETTER(Type, name)                                                   \
-  void set_##name(const Type& value) { name##_ = value.raw(); }
+  void set_##name(const Type& value) { name##_ = value.ptr(); }
 #define DECLARE_LAZY_INIT_GETTER(Type, name, init)                             \
   Type##Ptr name() {                                                           \
     if (name##_ == Type::null()) {                                             \
@@ -404,10 +404,10 @@
   static intptr_t name##_offset() { return OFFSET_OF(ObjectStore, name##_); }
 #define DECLARE_LAZY_INIT_CORE_GETTER_AND_SETTER(Type, name)                   \
   DECLARE_LAZY_INIT_GETTER(Type, name, LazyInitCoreTypes)                      \
-  void set_##name(const Type& value) { name##_ = value.raw(); }
+  void set_##name(const Type& value) { name##_ = value.ptr(); }
 #define DECLARE_LAZY_INIT_FUTURE_GETTER_AND_SETTER(Type, name)                 \
   DECLARE_LAZY_INIT_GETTER(Type, name, LazyInitFutureTypes)                    \
-  void set_##name(const Type& value) { name##_ = value.raw(); }
+  void set_##name(const Type& value) { name##_ = value.ptr(); }
   OBJECT_STORE_FIELD_LIST(DECLARE_GETTER,
                           DECLARE_GETTER_AND_SETTER,
                           DECLARE_LAZY_INIT_CORE_GETTER_AND_SETTER,
@@ -437,7 +437,7 @@
     switch (index) {
 #define MAKE_CASE(CamelName, name)                                             \
   case k##CamelName:                                                           \
-    name##_library_ = value.raw();                                             \
+    name##_library_ = value.ptr();                                             \
     break;
 
       FOR_EACH_BOOTSTRAP_LIBRARY(MAKE_CASE)
diff --git a/runtime/vm/object_test.cc b/runtime/vm/object_test.cc
index 87513b2..1d0c396 100644
--- a/runtime/vm/object_test.cc
+++ b/runtime/vm/object_test.cc
@@ -42,7 +42,7 @@
       Library::Handle(), class_name, script, TokenPosition::kNoSource));
   cls.set_is_synthesized_class();  // Dummy class for testing.
   cls.set_is_declaration_loaded();
-  return cls.raw();
+  return cls.ptr();
 }
 
 ISOLATE_UNIT_TEST_CASE(Class) {
@@ -84,13 +84,13 @@
   function_name = Symbols::New(thread, "foo");
   signature = FunctionType::New();
   function = Function::New(signature, function_name,
-                           FunctionLayout::kRegularFunction, false, false,
+                           UntaggedFunction::kRegularFunction, false, false,
                            false, false, false, cls, TokenPosition::kMinSource);
   functions.SetAt(0, function);
   function_name = Symbols::New(thread, "bar");
   signature = FunctionType::New();
   function = Function::New(signature, function_name,
-                           FunctionLayout::kRegularFunction, false, false,
+                           UntaggedFunction::kRegularFunction, false, false,
                            false, false, false, cls, TokenPosition::kMinSource);
 
   const int kNumFixedParameters = 2;
@@ -104,28 +104,28 @@
   function_name = Symbols::New(thread, "baz");
   signature = FunctionType::New();
   function = Function::New(signature, function_name,
-                           FunctionLayout::kRegularFunction, false, false,
+                           UntaggedFunction::kRegularFunction, false, false,
                            false, false, false, cls, TokenPosition::kMinSource);
   functions.SetAt(2, function);
 
   function_name = Symbols::New(thread, "Foo");
   signature = FunctionType::New();
   function = Function::New(signature, function_name,
-                           FunctionLayout::kRegularFunction, true, false, false,
-                           false, false, cls, TokenPosition::kMinSource);
+                           UntaggedFunction::kRegularFunction, true, false,
+                           false, false, false, cls, TokenPosition::kMinSource);
 
   functions.SetAt(3, function);
   function_name = Symbols::New(thread, "Bar");
   signature = FunctionType::New();
   function = Function::New(signature, function_name,
-                           FunctionLayout::kRegularFunction, true, false, false,
-                           false, false, cls, TokenPosition::kMinSource);
+                           UntaggedFunction::kRegularFunction, true, false,
+                           false, false, false, cls, TokenPosition::kMinSource);
   functions.SetAt(4, function);
   function_name = Symbols::New(thread, "BaZ");
   signature = FunctionType::New();
   function = Function::New(signature, function_name,
-                           FunctionLayout::kRegularFunction, true, false, false,
-                           false, false, cls, TokenPosition::kMinSource);
+                           UntaggedFunction::kRegularFunction, true, false,
+                           false, false, false, cls, TokenPosition::kMinSource);
   functions.SetAt(5, function);
 
   // Setup the functions in the class.
@@ -143,13 +143,13 @@
   function = cls.LookupStaticFunction(function_name);
   EXPECT(!function.IsNull());
   EXPECT(function_name.Equals(String::Handle(function.name())));
-  EXPECT_EQ(cls.raw(), function.Owner());
+  EXPECT_EQ(cls.ptr(), function.Owner());
   EXPECT(function.is_static());
   function_name = String::New("baz");
   function = Resolver::ResolveDynamicFunction(Z, cls, function_name);
   EXPECT(!function.IsNull());
   EXPECT(function_name.Equals(String::Handle(function.name())));
-  EXPECT_EQ(cls.raw(), function.Owner());
+  EXPECT_EQ(cls.ptr(), function.Owner());
   EXPECT(!function.is_static());
   function = cls.LookupStaticFunction(function_name);
   EXPECT(function.IsNull());
@@ -264,14 +264,14 @@
       TypeArguments::Handle(TypeArguments::New(2));
   type_arguments2.SetTypeAt(0, type1);
   type_arguments2.SetTypeAt(1, type2);
-  EXPECT_NE(type_arguments1.raw(), type_arguments2.raw());
+  EXPECT_NE(type_arguments1.ptr(), type_arguments2.ptr());
   OS::PrintErr("1: %s\n", type_arguments1.ToCString());
   OS::PrintErr("2: %s\n", type_arguments2.ToCString());
   EXPECT(type_arguments1.Equals(type_arguments2));
   TypeArguments& type_arguments3 = TypeArguments::Handle();
   type_arguments1.Canonicalize(thread, nullptr);
   type_arguments3 ^= type_arguments2.Canonicalize(thread, nullptr);
-  EXPECT_EQ(type_arguments1.raw(), type_arguments3.raw());
+  EXPECT_EQ(type_arguments1.ptr(), type_arguments3.ptr());
 }
 
 TEST_CASE(Class_EndTokenPos) {
@@ -321,7 +321,7 @@
 
   EXPECT_EQ(kObjectAlignment, empty_class.host_instance_size());
   Instance& instance = Instance::Handle(Instance::New(empty_class));
-  EXPECT_EQ(empty_class.raw(), instance.clazz());
+  EXPECT_EQ(empty_class.ptr(), instance.clazz());
 
   class_name = Symbols::New(thread, "OneFieldClass");
   const Class& one_field_class =
@@ -345,7 +345,7 @@
     one_field_class.SetFields(one_fields);
   }
   one_field_class.Finalize();
-  intptr_t header_size = sizeof(ObjectLayout);
+  intptr_t header_size = sizeof(UntaggedObject);
   EXPECT_EQ(Utils::RoundUp((header_size + (1 * kWordSize)), kObjectAlignment),
             one_field_class.host_instance_size());
   EXPECT_EQ(header_size, field.HostOffset());
@@ -356,7 +356,7 @@
 
 ISOLATE_UNIT_TEST_CASE(Smi) {
   const Smi& smi = Smi::Handle(Smi::New(5));
-  Object& smi_object = Object::Handle(smi.raw());
+  Object& smi_object = Object::Handle(smi.ptr());
   EXPECT(smi.IsSmi());
   EXPECT(smi_object.IsSmi());
   EXPECT_EQ(5, smi.Value());
@@ -560,7 +560,7 @@
   mint2 ^= Integer::NewCanonical(mint_string);
   EXPECT_EQ(mint1.value(), mint_value);
   EXPECT_EQ(mint2.value(), mint_value);
-  EXPECT_EQ(mint1.raw(), mint2.raw());
+  EXPECT_EQ(mint1.ptr(), mint2.ptr());
 #endif
 }
 
@@ -568,7 +568,7 @@
   {
     const double dbl_const = 5.0;
     const Double& dbl = Double::Handle(Double::New(dbl_const));
-    Object& dbl_object = Object::Handle(dbl.raw());
+    Object& dbl_object = Object::Handle(dbl.ptr());
     EXPECT(dbl.IsDouble());
     EXPECT(dbl_object.IsDouble());
     EXPECT_EQ(dbl_const, dbl.value());
@@ -577,7 +577,7 @@
   {
     const double dbl_const = -5.0;
     const Double& dbl = Double::Handle(Double::New(dbl_const));
-    Object& dbl_object = Object::Handle(dbl.raw());
+    Object& dbl_object = Object::Handle(dbl.ptr());
     EXPECT(dbl.IsDouble());
     EXPECT(dbl_object.IsDouble());
     EXPECT_EQ(dbl_const, dbl.value());
@@ -586,7 +586,7 @@
   {
     const double dbl_const = 0.0;
     const Double& dbl = Double::Handle(Double::New(dbl_const));
-    Object& dbl_object = Object::Handle(dbl.raw());
+    Object& dbl_object = Object::Handle(dbl.ptr());
     EXPECT(dbl.IsDouble());
     EXPECT(dbl_object.IsDouble());
     EXPECT_EQ(dbl_const, dbl.value());
@@ -601,8 +601,8 @@
     EXPECT_EQ(dbl_const, dbl1.value());
     EXPECT_EQ(dbl_const, dbl2.value());
     EXPECT_EQ(dbl_const, dbl3.value());
-    EXPECT_EQ(dbl1.raw(), dbl2.raw());
-    EXPECT_EQ(dbl1.raw(), dbl3.raw());
+    EXPECT_EQ(dbl1.ptr(), dbl2.ptr());
+    EXPECT_EQ(dbl1.ptr(), dbl3.ptr());
   }
 
   {
@@ -1795,18 +1795,18 @@
   const String& nine = String::Handle(Symbols::New(thread, "Neun"));
   const String& ten = String::Handle(Symbols::New(thread, "Zehn"));
   String& eins = String::Handle(Symbols::New(thread, "Eins"));
-  EXPECT_EQ(one.raw(), eins.raw());
-  EXPECT(one.raw() != two.raw());
+  EXPECT_EQ(one.ptr(), eins.ptr());
+  EXPECT(one.ptr() != two.ptr());
   EXPECT(two.Equals(String::Handle(String::New("Zwei"))));
-  EXPECT_EQ(two.raw(), Symbols::New(thread, "Zwei"));
-  EXPECT_EQ(three.raw(), Symbols::New(thread, "Drei"));
-  EXPECT_EQ(four.raw(), Symbols::New(thread, "Vier"));
-  EXPECT_EQ(five.raw(), Symbols::New(thread, "Fuenf"));
-  EXPECT_EQ(six.raw(), Symbols::New(thread, "Sechs"));
-  EXPECT_EQ(seven.raw(), Symbols::New(thread, "Sieben"));
-  EXPECT_EQ(eight.raw(), Symbols::New(thread, "Acht"));
-  EXPECT_EQ(nine.raw(), Symbols::New(thread, "Neun"));
-  EXPECT_EQ(ten.raw(), Symbols::New(thread, "Zehn"));
+  EXPECT_EQ(two.ptr(), Symbols::New(thread, "Zwei"));
+  EXPECT_EQ(three.ptr(), Symbols::New(thread, "Drei"));
+  EXPECT_EQ(four.ptr(), Symbols::New(thread, "Vier"));
+  EXPECT_EQ(five.ptr(), Symbols::New(thread, "Fuenf"));
+  EXPECT_EQ(six.ptr(), Symbols::New(thread, "Sechs"));
+  EXPECT_EQ(seven.ptr(), Symbols::New(thread, "Sieben"));
+  EXPECT_EQ(eight.ptr(), Symbols::New(thread, "Acht"));
+  EXPECT_EQ(nine.ptr(), Symbols::New(thread, "Neun"));
+  EXPECT_EQ(ten.ptr(), Symbols::New(thread, "Zehn"));
 
   // Make sure to cause symbol table overflow.
   for (int i = 0; i < 1024; i++) {
@@ -1815,23 +1815,23 @@
     Symbols::New(thread, buf);
   }
   eins = Symbols::New(thread, "Eins");
-  EXPECT_EQ(one.raw(), eins.raw());
-  EXPECT_EQ(two.raw(), Symbols::New(thread, "Zwei"));
-  EXPECT_EQ(three.raw(), Symbols::New(thread, "Drei"));
-  EXPECT_EQ(four.raw(), Symbols::New(thread, "Vier"));
-  EXPECT_EQ(five.raw(), Symbols::New(thread, "Fuenf"));
-  EXPECT_EQ(six.raw(), Symbols::New(thread, "Sechs"));
-  EXPECT_EQ(seven.raw(), Symbols::New(thread, "Sieben"));
-  EXPECT_EQ(eight.raw(), Symbols::New(thread, "Acht"));
-  EXPECT_EQ(nine.raw(), Symbols::New(thread, "Neun"));
-  EXPECT_EQ(ten.raw(), Symbols::New(thread, "Zehn"));
+  EXPECT_EQ(one.ptr(), eins.ptr());
+  EXPECT_EQ(two.ptr(), Symbols::New(thread, "Zwei"));
+  EXPECT_EQ(three.ptr(), Symbols::New(thread, "Drei"));
+  EXPECT_EQ(four.ptr(), Symbols::New(thread, "Vier"));
+  EXPECT_EQ(five.ptr(), Symbols::New(thread, "Fuenf"));
+  EXPECT_EQ(six.ptr(), Symbols::New(thread, "Sechs"));
+  EXPECT_EQ(seven.ptr(), Symbols::New(thread, "Sieben"));
+  EXPECT_EQ(eight.ptr(), Symbols::New(thread, "Acht"));
+  EXPECT_EQ(nine.ptr(), Symbols::New(thread, "Neun"));
+  EXPECT_EQ(ten.ptr(), Symbols::New(thread, "Zehn"));
 
   // Symbols from Strings.
   eins = String::New("Eins");
   EXPECT(!eins.IsSymbol());
   String& ein_symbol = String::Handle(Symbols::New(thread, eins));
-  EXPECT_EQ(one.raw(), ein_symbol.raw());
-  EXPECT(one.raw() != eins.raw());
+  EXPECT_EQ(one.ptr(), ein_symbol.ptr());
+  EXPECT(one.ptr() != eins.ptr());
 
   uint16_t char16[] = {'E', 'l', 'f'};
   String& elf1 = String::Handle(Symbols::FromUTF16(thread, char16, 3));
@@ -1840,8 +1840,8 @@
       Symbols::New(thread, String::Handle(String::FromUTF32(char32, 3))));
   EXPECT(elf1.IsSymbol());
   EXPECT(elf2.IsSymbol());
-  EXPECT_EQ(elf1.raw(), Symbols::New(thread, "Elf"));
-  EXPECT_EQ(elf2.raw(), Symbols::New(thread, "Elf"));
+  EXPECT_EQ(elf1.ptr(), Symbols::New(thread, "Elf"));
+  EXPECT_EQ(elf2.ptr(), Symbols::New(thread, "Elf"));
 }
 
 ISOLATE_UNIT_TEST_CASE(SymbolUnicode) {
@@ -1849,12 +1849,12 @@
   String& monkey = String::Handle(Symbols::FromUTF16(thread, monkey_utf16, 2));
   EXPECT(monkey.IsSymbol());
   const char monkey_utf8[] = {'\xf0', '\x9f', '\x90', '\xb5', 0};
-  EXPECT_EQ(monkey.raw(), Symbols::New(thread, monkey_utf8));
+  EXPECT_EQ(monkey.ptr(), Symbols::New(thread, monkey_utf8));
 
   int32_t kMonkeyFace = 0x1f435;
   String& monkey2 = String::Handle(
       Symbols::New(thread, String::Handle(String::FromUTF32(&kMonkeyFace, 1))));
-  EXPECT_EQ(monkey.raw(), monkey2.raw());
+  EXPECT_EQ(monkey.ptr(), monkey2.ptr());
 
   // Unicode cat face with tears of joy.
   int32_t kCatFaceWithTearsOfJoy = 0x1f639;
@@ -1864,7 +1864,7 @@
   uint16_t cat_utf16[] = {0xd83d, 0xde39};
   String& cat2 = String::Handle(Symbols::FromUTF16(thread, cat_utf16, 2));
   EXPECT(cat2.IsSymbol());
-  EXPECT_EQ(cat2.raw(), cat.raw());
+  EXPECT_EQ(cat2.ptr(), cat.ptr());
 }
 
 ISOLATE_UNIT_TEST_CASE(Bool) {
@@ -1883,11 +1883,11 @@
   array.SetAt(0, array);
   array.SetAt(2, array);
   element = array.At(0);
-  EXPECT_EQ(array.raw(), element.raw());
+  EXPECT_EQ(array.ptr(), element.ptr());
   element = array.At(1);
   EXPECT(element.IsNull());
   element = array.At(2);
-  EXPECT_EQ(array.raw(), element.raw());
+  EXPECT_EQ(array.ptr(), element.ptr());
 
   Array& other_array = Array::Handle(Array::New(kArrayLen));
   other_array.SetAt(0, array);
@@ -1911,7 +1911,7 @@
   EXPECT(Smi::Cast(element).IsZero());
 
   array.MakeImmutable();
-  Object& obj = Object::Handle(array.raw());
+  Object& obj = Object::Handle(array.ptr());
   EXPECT(obj.IsArray());
 }
 
@@ -2165,15 +2165,15 @@
   }
   used_size = Array::InstanceSize(array.Length());
   new_array = Array::MakeFixedLength(array);
-  addr = ObjectLayout::ToAddr(new_array.raw());
-  obj = ObjectLayout::FromAddr(addr);
+  addr = UntaggedObject::ToAddr(new_array.ptr());
+  obj = UntaggedObject::FromAddr(addr);
   EXPECT(obj.IsArray());
-  new_array ^= obj.raw();
+  new_array ^= obj.ptr();
   EXPECT_EQ(2, new_array.Length());
   addr += used_size;
-  obj = ObjectLayout::FromAddr(addr);
+  obj = UntaggedObject::FromAddr(addr);
   EXPECT(obj.IsTypedData());
-  left_over_array ^= obj.raw();
+  left_over_array ^= obj.ptr();
   EXPECT_EQ(4 * kWordSize - TypedData::InstanceSize(0),
             left_over_array.Length());
 
@@ -2188,16 +2188,16 @@
   }
   used_size = Array::InstanceSize(array.Length());
   new_array = Array::MakeFixedLength(array);
-  addr = ObjectLayout::ToAddr(new_array.raw());
-  obj = ObjectLayout::FromAddr(addr);
+  addr = UntaggedObject::ToAddr(new_array.ptr());
+  obj = UntaggedObject::FromAddr(addr);
   EXPECT(obj.IsArray());
-  new_array ^= obj.raw();
+  new_array ^= obj.ptr();
   EXPECT_EQ(3, new_array.Length());
   addr += used_size;
-  obj = ObjectLayout::FromAddr(addr);
+  obj = UntaggedObject::FromAddr(addr);
   if (TypedData::InstanceSize(0) <= 2 * kWordSize) {
     EXPECT(obj.IsTypedData());
-    left_over_array ^= obj.raw();
+    left_over_array ^= obj.ptr();
     EXPECT_EQ(2 * kWordSize - TypedData::InstanceSize(0),
               left_over_array.Length());
   } else {
@@ -2214,15 +2214,15 @@
   }
   used_size = Array::InstanceSize(array.Length());
   new_array = Array::MakeFixedLength(array);
-  addr = ObjectLayout::ToAddr(new_array.raw());
-  obj = ObjectLayout::FromAddr(addr);
+  addr = UntaggedObject::ToAddr(new_array.ptr());
+  obj = UntaggedObject::FromAddr(addr);
   EXPECT(obj.IsArray());
-  new_array ^= obj.raw();
+  new_array ^= obj.ptr();
   EXPECT_EQ(1, new_array.Length());
   addr += used_size;
-  obj = ObjectLayout::FromAddr(addr);
+  obj = UntaggedObject::FromAddr(addr);
   EXPECT(obj.IsTypedData());
-  left_over_array ^= obj.raw();
+  left_over_array ^= obj.ptr();
   EXPECT_EQ(8 * kWordSize - TypedData::InstanceSize(0),
             left_over_array.Length());
 
@@ -2504,7 +2504,7 @@
   const Context& context = Context::Handle(Context::New(kNumVariables));
   context.set_parent(parent_context);
   EXPECT_EQ(kNumVariables, context.num_variables());
-  EXPECT(Context::Handle(context.parent()).raw() == parent_context.raw());
+  EXPECT(Context::Handle(context.parent()).ptr() == parent_context.ptr());
   EXPECT_EQ(0, Context::Handle(context.parent()).num_variables());
   EXPECT(Context::Handle(Context::Handle(context.parent()).parent()).IsNull());
   Object& variable = Object::Handle(context.At(0));
@@ -2640,8 +2640,8 @@
   const String& parent_name = String::Handle(Symbols::New(thread, "foo_papa"));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   parent = Function::New(signature, parent_name,
-                         FunctionLayout::kRegularFunction, false, false, false,
-                         false, false, cls, TokenPosition::kMinSource);
+                         UntaggedFunction::kRegularFunction, false, false,
+                         false, false, false, cls, TokenPosition::kMinSource);
   functions.SetAt(0, parent);
   {
     SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
@@ -2658,9 +2658,9 @@
   const Class& closure_class = Class::Handle(closure.clazz());
   EXPECT_EQ(closure_class.id(), kClosureCid);
   const Function& closure_function = Function::Handle(closure.function());
-  EXPECT_EQ(closure_function.raw(), function.raw());
+  EXPECT_EQ(closure_function.ptr(), function.ptr());
   const Context& closure_context = Context::Handle(closure.context());
-  EXPECT_EQ(closure_context.raw(), context.raw());
+  EXPECT_EQ(closure_context.ptr(), context.ptr());
 }
 
 ISOLATE_UNIT_TEST_CASE(ObjectPrinting) {
@@ -2718,7 +2718,7 @@
   const String& function_name = String::ZoneHandle(Symbols::New(thread, name));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   return Function::New(signature, function_name,
-                       FunctionLayout::kRegularFunction, true, false, false,
+                       UntaggedFunction::kRegularFunction, true, false, false,
                        false, false, owner_class, TokenPosition::kMinSource);
 }
 
@@ -2734,7 +2734,7 @@
   function.AttachCode(code);
   const Instructions& instructions = Instructions::Handle(code.instructions());
   uword payload_start = instructions.PayloadStart();
-  EXPECT_EQ(instructions.raw(), Instructions::FromPayloadStart(payload_start));
+  EXPECT_EQ(instructions.ptr(), Instructions::FromPayloadStart(payload_start));
   const Object& result =
       Object::Handle(DartEntry::InvokeFunction(function, Array::empty_array()));
   EXPECT_EQ(1, Smi::Cast(result).Value());
@@ -2756,7 +2756,7 @@
   function.AttachCode(code);
   Instructions& instructions = Instructions::Handle(code.instructions());
   uword payload_start = instructions.PayloadStart();
-  EXPECT_EQ(instructions.raw(), Instructions::FromPayloadStart(payload_start));
+  EXPECT_EQ(instructions.ptr(), Instructions::FromPayloadStart(payload_start));
   // Try writing into the generated code, expected to crash.
   *(reinterpret_cast<char*>(payload_start) + 1) = 1;
   if (!FLAG_write_protect_code) {
@@ -2795,15 +2795,15 @@
   Instructions& instructions = Instructions::Handle(code.instructions());
   uword payload_start = code.PayloadStart();
   const uword unchecked_offset = code.UncheckedEntryPoint() - code.EntryPoint();
-  EXPECT_EQ(instructions.raw(), Instructions::FromPayloadStart(payload_start));
+  EXPECT_EQ(instructions.ptr(), Instructions::FromPayloadStart(payload_start));
   // Execute the executable view of the instructions (default).
   Object& result =
       Object::Handle(DartEntry::InvokeFunction(function, Array::empty_array()));
   EXPECT_EQ(1, Smi::Cast(result).Value());
   // Switch to the writeable but non-executable view of the instructions.
-  instructions ^= OldPage::ToWritable(instructions.raw());
+  instructions ^= OldPage::ToWritable(instructions.ptr());
   payload_start = instructions.PayloadStart();
-  EXPECT_EQ(instructions.raw(), Instructions::FromPayloadStart(payload_start));
+  EXPECT_EQ(instructions.ptr(), Instructions::FromPayloadStart(payload_start));
   // Hook up Code and Instructions objects.
   CodeTestHelper::SetInstructions(code, instructions, unchecked_offset);
   function.AttachCode(code);
@@ -2835,9 +2835,9 @@
   function.AttachCode(code);
   const Object& result =
       Object::Handle(DartEntry::InvokeFunction(function, Array::empty_array()));
-  EXPECT(result.raw()->IsHeapObject());
+  EXPECT(result.ptr()->IsHeapObject());
   String& string_object = String::Handle();
-  string_object ^= result.raw();
+  string_object ^= result.ptr();
   EXPECT(string_object.Length() == expected_length);
   for (int i = 0; i < expected_length; i++) {
     EXPECT(string_object.CharAt(i) == kHello[i]);
@@ -2925,17 +2925,17 @@
   DescriptorList* builder = new DescriptorList(thread->zone());
 
   // kind, pc_offset, deopt_id, token_pos, try_index, yield_index
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 10, 1,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 10, 1,
                          TokenPosition::Deserialize(20), 1, 1);
-  builder->AddDescriptor(PcDescriptorsLayout::kDeopt, 20, 2,
+  builder->AddDescriptor(UntaggedPcDescriptors::kDeopt, 20, 2,
                          TokenPosition::Deserialize(30), 0, -1);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 30, 3,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 30, 3,
                          TokenPosition::Deserialize(40), 1, 10);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 10, 4,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 10, 4,
                          TokenPosition::Deserialize(40), 2, 20);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 10, 5,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 10, 5,
                          TokenPosition::Deserialize(80), 3, 30);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 80, 6,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 80, 6,
                          TokenPosition::Deserialize(150), 3, 30);
 
   PcDescriptors& descriptors = PcDescriptors::Handle();
@@ -2952,7 +2952,7 @@
 
   // Verify the PcDescriptor entries by accessing them.
   const PcDescriptors& pc_descs = PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator iter(pc_descs, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(pc_descs, UntaggedPcDescriptors::kAnyKind);
 
   EXPECT_EQ(true, iter.MoveNext());
   EXPECT_EQ(1, iter.YieldIndex());
@@ -2960,12 +2960,12 @@
   EXPECT_EQ(1, iter.TryIndex());
   EXPECT_EQ(static_cast<uword>(10), iter.PcOffset());
   EXPECT_EQ(1, iter.DeoptId());
-  EXPECT_EQ(PcDescriptorsLayout::kOther, iter.Kind());
+  EXPECT_EQ(UntaggedPcDescriptors::kOther, iter.Kind());
 
   EXPECT_EQ(true, iter.MoveNext());
   EXPECT_EQ(-1, iter.YieldIndex());
   EXPECT_EQ(30, iter.TokenPos().Pos());
-  EXPECT_EQ(PcDescriptorsLayout::kDeopt, iter.Kind());
+  EXPECT_EQ(UntaggedPcDescriptors::kDeopt, iter.Kind());
 
   EXPECT_EQ(true, iter.MoveNext());
   EXPECT_EQ(10, iter.YieldIndex());
@@ -2986,7 +2986,7 @@
   EXPECT_EQ(3, iter.TryIndex());
   EXPECT_EQ(static_cast<uword>(80), iter.PcOffset());
   EXPECT_EQ(150, iter.TokenPos().Pos());
-  EXPECT_EQ(PcDescriptorsLayout::kOther, iter.Kind());
+  EXPECT_EQ(UntaggedPcDescriptors::kOther, iter.Kind());
 
   EXPECT_EQ(false, iter.MoveNext());
 }
@@ -2995,17 +2995,17 @@
   DescriptorList* builder = new DescriptorList(thread->zone());
 
   // kind, pc_offset, deopt_id, token_pos, try_index
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 100, 1,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 100, 1,
                          TokenPosition::Deserialize(200), 1, 10);
-  builder->AddDescriptor(PcDescriptorsLayout::kDeopt, 200, 2,
+  builder->AddDescriptor(UntaggedPcDescriptors::kDeopt, 200, 2,
                          TokenPosition::Deserialize(300), 0, -1);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 300, 3,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 300, 3,
                          TokenPosition::Deserialize(400), 1, 10);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 100, 4,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 100, 4,
                          TokenPosition::Deserialize(0), 2, 20);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 100, 5,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 100, 5,
                          TokenPosition::Deserialize(800), 3, 30);
-  builder->AddDescriptor(PcDescriptorsLayout::kOther, 800, 6,
+  builder->AddDescriptor(UntaggedPcDescriptors::kOther, 800, 6,
                          TokenPosition::Deserialize(150), 3, 30);
 
   PcDescriptors& descriptors = PcDescriptors::Handle();
@@ -3022,7 +3022,7 @@
 
   // Verify the PcDescriptor entries by accessing them.
   const PcDescriptors& pc_descs = PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator iter(pc_descs, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(pc_descs, UntaggedPcDescriptors::kAnyKind);
 
   EXPECT_EQ(true, iter.MoveNext());
   EXPECT_EQ(10, iter.YieldIndex());
@@ -3030,12 +3030,12 @@
   EXPECT_EQ(1, iter.TryIndex());
   EXPECT_EQ(static_cast<uword>(100), iter.PcOffset());
   EXPECT_EQ(1, iter.DeoptId());
-  EXPECT_EQ(PcDescriptorsLayout::kOther, iter.Kind());
+  EXPECT_EQ(UntaggedPcDescriptors::kOther, iter.Kind());
 
   EXPECT_EQ(true, iter.MoveNext());
   EXPECT_EQ(-1, iter.YieldIndex());
   EXPECT_EQ(300, iter.TokenPos().Pos());
-  EXPECT_EQ(PcDescriptorsLayout::kDeopt, iter.Kind());
+  EXPECT_EQ(UntaggedPcDescriptors::kDeopt, iter.Kind());
 
   EXPECT_EQ(true, iter.MoveNext());
   EXPECT_EQ(10, iter.YieldIndex());
@@ -3056,7 +3056,7 @@
   EXPECT_EQ(3, iter.TryIndex());
   EXPECT_EQ(static_cast<uword>(800), iter.PcOffset());
   EXPECT_EQ(150, iter.TokenPos().Pos());
-  EXPECT_EQ(PcDescriptorsLayout::kOther, iter.Kind());
+  EXPECT_EQ(UntaggedPcDescriptors::kOther, iter.Kind());
 
   EXPECT_EQ(false, iter.MoveNext());
 }
@@ -3066,7 +3066,7 @@
       String::Handle(Symbols::New(Thread::Current(), name));
   const Class& cls =
       Class::Handle(CreateDummyClass(class_name, Script::Handle()));
-  return cls.raw();
+  return cls.ptr();
 }
 
 static FieldPtr CreateTestField(const char* name) {
@@ -3081,7 +3081,7 @@
                                   thread->isolate_group()->program_lock());
     thread->isolate_group()->RegisterStaticField(field, Instance::sentinel());
   }
-  return field.raw();
+  return field.ptr();
 }
 
 ISOLATE_UNIT_TEST_CASE(ClassDictionaryIterator) {
@@ -3100,7 +3100,7 @@
   Class& cls = Class::Handle();
   while (iterator.HasNext()) {
     cls = iterator.GetNextClass();
-    EXPECT((cls.raw() == ae66.raw()) || (cls.raw() == re44.raw()));
+    EXPECT((cls.ptr() == ae66.ptr()) || (cls.ptr() == re44.ptr()));
     count++;
   }
   EXPECT(count == 2);
@@ -3118,7 +3118,7 @@
   const bool is_native = false;
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   return Function::New(signature, function_name,
-                       FunctionLayout::kRegularFunction, is_static, is_const,
+                       UntaggedFunction::kRegularFunction, is_static, is_const,
                        is_abstract, is_external, is_native, cls,
                        TokenPosition::kMinSource);
 }
@@ -3137,10 +3137,10 @@
                    ICData::kInstance);
   EXPECT_EQ(1, o1.NumArgsTested());
   EXPECT_EQ(id, o1.deopt_id());
-  EXPECT_EQ(function.raw(), o1.Owner());
+  EXPECT_EQ(function.ptr(), o1.Owner());
   EXPECT_EQ(0, o1.NumberOfChecks());
-  EXPECT_EQ(target_name.raw(), o1.target_name());
-  EXPECT_EQ(args_descriptor.raw(), o1.arguments_descriptor());
+  EXPECT_EQ(target_name.ptr(), o1.target_name());
+  EXPECT_EQ(args_descriptor.ptr(), o1.arguments_descriptor());
 
   const Function& target1 = Function::Handle(GetDummyTarget("Thun"));
   o1.AddReceiverCheck(kSmiCid, target1);
@@ -3150,13 +3150,13 @@
   Function& test_target = Function::Handle();
   o1.GetOneClassCheckAt(0, &test_class_id, &test_target);
   EXPECT_EQ(kSmiCid, test_class_id);
-  EXPECT_EQ(target1.raw(), test_target.raw());
+  EXPECT_EQ(target1.ptr(), test_target.ptr());
   EXPECT_EQ(kSmiCid, o1.GetCidAt(0));
   GrowableArray<intptr_t> test_class_ids;
   o1.GetCheckAt(0, &test_class_ids, &test_target);
   EXPECT_EQ(1, test_class_ids.length());
   EXPECT_EQ(kSmiCid, test_class_ids[0]);
-  EXPECT_EQ(target1.raw(), test_target.raw());
+  EXPECT_EQ(target1.ptr(), test_target.ptr());
 
   const Function& target2 = Function::Handle(GetDummyTarget("Thun"));
   o1.AddReceiverCheck(kDoubleCid, target2);
@@ -3164,7 +3164,7 @@
   EXPECT_EQ(2, o1.NumberOfUsedChecks());
   o1.GetOneClassCheckAt(1, &test_class_id, &test_target);
   EXPECT_EQ(kDoubleCid, test_class_id);
-  EXPECT_EQ(target2.raw(), test_target.raw());
+  EXPECT_EQ(target2.ptr(), test_target.ptr());
   EXPECT_EQ(kDoubleCid, o1.GetCidAt(1));
 
   o1.AddReceiverCheck(kMintCid, target2);
@@ -3177,7 +3177,7 @@
                    ICData::kInstance);
   EXPECT_EQ(2, o2.NumArgsTested());
   EXPECT_EQ(57, o2.deopt_id());
-  EXPECT_EQ(function.raw(), o2.Owner());
+  EXPECT_EQ(function.ptr(), o2.Owner());
   EXPECT_EQ(0, o2.NumberOfChecks());
   GrowableArray<intptr_t> classes;
   classes.Add(kSmiCid);
@@ -3188,14 +3188,14 @@
   EXPECT_EQ(2, test_class_ids.length());
   EXPECT_EQ(kSmiCid, test_class_ids[0]);
   EXPECT_EQ(kSmiCid, test_class_ids[1]);
-  EXPECT_EQ(target1.raw(), test_target.raw());
+  EXPECT_EQ(target1.ptr(), test_target.ptr());
 
   // Check ICData for unoptimized static calls.
   const intptr_t kNumArgsChecked = 0;
   const ICData& scall_icdata = ICData::Handle(
       ICData::NewForStaticCall(function, target1, args_descriptor, 57,
                                kNumArgsChecked, ICData::kInstance));
-  EXPECT_EQ(target1.raw(), scall_icdata.GetTargetAt(0));
+  EXPECT_EQ(target1.ptr(), scall_icdata.GetTargetAt(0));
 }
 
 ISOLATE_UNIT_TEST_CASE(SubtypeTestCache) {
@@ -3233,14 +3233,14 @@
   cache.GetCheck(0, &test_class_id_or_fun, &test_dest_type, &test_targ_0,
                  &test_targ_1, &test_targ_2, &test_targ_3, &test_targ_4,
                  &test_result);
-  EXPECT_EQ(class_id_or_fun.raw(), test_class_id_or_fun.raw());
-  EXPECT_EQ(dest_type.raw(), test_dest_type.raw());
-  EXPECT_EQ(targ_0.raw(), test_targ_0.raw());
-  EXPECT_EQ(targ_1.raw(), test_targ_1.raw());
-  EXPECT_EQ(targ_2.raw(), test_targ_2.raw());
-  EXPECT_EQ(targ_3.raw(), test_targ_3.raw());
-  EXPECT_EQ(targ_4.raw(), test_targ_4.raw());
-  EXPECT_EQ(Bool::True().raw(), test_result.raw());
+  EXPECT_EQ(class_id_or_fun.ptr(), test_class_id_or_fun.ptr());
+  EXPECT_EQ(dest_type.ptr(), test_dest_type.ptr());
+  EXPECT_EQ(targ_0.ptr(), test_targ_0.ptr());
+  EXPECT_EQ(targ_1.ptr(), test_targ_1.ptr());
+  EXPECT_EQ(targ_2.ptr(), test_targ_2.ptr());
+  EXPECT_EQ(targ_3.ptr(), test_targ_3.ptr());
+  EXPECT_EQ(targ_4.ptr(), test_targ_4.ptr());
+  EXPECT_EQ(Bool::True().ptr(), test_result.ptr());
 }
 
 ISOLATE_UNIT_TEST_CASE(MegamorphicCache) {
@@ -3261,11 +3261,11 @@
 
     EXPECT(cache.Lookup(cidA) == Object::null());
     cache.EnsureContains(cidA, valueA);
-    EXPECT(cache.Lookup(cidA) == valueA.raw());
+    EXPECT(cache.Lookup(cidA) == valueA.ptr());
 
     EXPECT(cache.Lookup(cidB) == Object::null());
     cache.EnsureContains(cidB, valueB);
-    EXPECT(cache.Lookup(cidB) == valueB.raw());
+    EXPECT(cache.Lookup(cidB) == valueB.ptr());
   }
 
   // Try to insert many keys to hit collisions & growth.
@@ -3904,16 +3904,16 @@
   reference.set_referent(library);
   const Object& returned_referent = Object::Handle(reference.referent());
   EXPECT(returned_referent.IsLibrary());
-  EXPECT_EQ(returned_referent.raw(), library.raw());
+  EXPECT_EQ(returned_referent.ptr(), library.ptr());
 
   const MirrorReference& other_reference =
       MirrorReference::Handle(MirrorReference::New(Object::Handle()));
-  EXPECT_NE(reference.raw(), other_reference.raw());
+  EXPECT_NE(reference.ptr(), other_reference.ptr());
   other_reference.set_referent(library);
-  EXPECT_NE(reference.raw(), other_reference.raw());
+  EXPECT_NE(reference.ptr(), other_reference.ptr());
   EXPECT_EQ(reference.referent(), other_reference.referent());
 
-  Object& obj = Object::Handle(reference.raw());
+  Object& obj = Object::Handle(reference.ptr());
   EXPECT(obj.IsMirrorReference());
 }
 
@@ -3924,7 +3924,7 @@
   const Function& result = Function::Handle(Resolver::ResolveDynamicFunction(
       Z, cls, String::Handle(String::New(name))));
   EXPECT(!result.IsNull());
-  return result.raw();
+  return result.ptr();
 }
 
 static FunctionPtr GetStaticFunction(const Class& cls, const char* name) {
@@ -3933,21 +3933,21 @@
   const Function& result = Function::Handle(
       cls.LookupStaticFunction(String::Handle(String::New(name))));
   EXPECT(!result.IsNull());
-  return result.raw();
+  return result.ptr();
 }
 
 static FieldPtr GetField(const Class& cls, const char* name) {
   const Field& field =
       Field::Handle(cls.LookupField(String::Handle(String::New(name))));
   EXPECT(!field.IsNull());
-  return field.raw();
+  return field.ptr();
 }
 
 static ClassPtr GetClass(const Library& lib, const char* name) {
   const Class& cls = Class::Handle(
       lib.LookupClass(String::Handle(Symbols::New(Thread::Current(), name))));
   EXPECT(!cls.IsNull());  // No ambiguity error expected.
-  return cls.raw();
+  return cls.ptr();
 }
 
 ISOLATE_UNIT_TEST_CASE(FindClosureIndex) {
@@ -3961,8 +3961,8 @@
   const String& parent_name = String::Handle(Symbols::New(thread, "foo_papa"));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   parent = Function::New(signature, parent_name,
-                         FunctionLayout::kRegularFunction, false, false, false,
-                         false, false, cls, TokenPosition::kMinSource);
+                         UntaggedFunction::kRegularFunction, false, false,
+                         false, false, false, cls, TokenPosition::kMinSource);
   functions.SetAt(0, parent);
   {
     SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
@@ -3992,7 +3992,7 @@
   func_from_index ^=
       ClosureFunctionsCache::ClosureFunctionFromIndex(good_closure_index);
   // Same closure function.
-  EXPECT_EQ(func_from_index.raw(), function.raw());
+  EXPECT_EQ(func_from_index.ptr(), function.ptr());
 }
 
 ISOLATE_UNIT_TEST_CASE(FindInvocationDispatcherFunctionIndex) {
@@ -4006,8 +4006,8 @@
   const String& parent_name = String::Handle(Symbols::New(thread, "foo_papa"));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   parent = Function::New(signature, parent_name,
-                         FunctionLayout::kRegularFunction, false, false, false,
-                         false, false, cls, TokenPosition::kMinSource);
+                         UntaggedFunction::kRegularFunction, false, false,
+                         false, false, false, cls, TokenPosition::kMinSource);
   functions.SetAt(0, parent);
   {
     SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
@@ -4022,7 +4022,7 @@
   Function& invocation_dispatcher = Function::Handle();
   invocation_dispatcher ^= cls.GetInvocationDispatcher(
       invocation_dispatcher_name, args_desc,
-      FunctionLayout::kNoSuchMethodDispatcher, true /* create_if_absent */);
+      UntaggedFunction::kNoSuchMethodDispatcher, true /* create_if_absent */);
   EXPECT(!invocation_dispatcher.IsNull());
   // Get index to function.
   intptr_t invocation_dispatcher_index =
@@ -4034,8 +4034,8 @@
   invocation_dispatcher_from_index ^=
       cls.InvocationDispatcherFunctionFromIndex(invocation_dispatcher_index);
   // Same function.
-  EXPECT_EQ(invocation_dispatcher.raw(),
-            invocation_dispatcher_from_index.raw());
+  EXPECT_EQ(invocation_dispatcher.ptr(),
+            invocation_dispatcher_from_index.ptr());
   // Test function not found case.
   const Function& bad_function = Function::Handle(Function::null());
   intptr_t bad_invocation_dispatcher_index =
@@ -4657,7 +4657,7 @@
   EXPECT(!clazz.IsNull());
   const Instance& a0 = Instance::Handle(Instance::New(clazz));
   const Instance& a1 = Instance::Handle(Instance::New(clazz));
-  EXPECT(a0.raw() != a1.raw());
+  EXPECT(a0.ptr() != a1.ptr());
   EXPECT(a0.OperatorEquals(a0));
   EXPECT(a0.OperatorEquals(a1));
   EXPECT(a0.IsIdenticalTo(a0));
diff --git a/runtime/vm/object_x64_test.cc b/runtime/vm/object_x64_test.cc
index e8913ce..30e9f84 100644
--- a/runtime/vm/object_x64_test.cc
+++ b/runtime/vm/object_x64_test.cc
@@ -43,7 +43,7 @@
 // This is used to test Embedded Smi objects in the instructions.
 void GenerateEmbedSmiInCode(compiler::Assembler* assembler, intptr_t value) {
   const Smi& smi_object = Smi::ZoneHandle(Smi::New(value));
-  __ movq(RAX, compiler::Immediate(static_cast<int64_t>(smi_object.raw())));
+  __ movq(RAX, compiler::Immediate(static_cast<int64_t>(smi_object.ptr())));
   __ ret();
 }
 
diff --git a/runtime/vm/parser.cc b/runtime/vm/parser.cc
index 52faef4..866d0aa 100644
--- a/runtime/vm/parser.cc
+++ b/runtime/vm/parser.cc
@@ -116,7 +116,7 @@
   // inlining.
   ASSERT(field->IsOriginal() ==
          !CompilerState::Current().should_clone_fields());
-  guarded_fields_->Add(&Field::ZoneHandle(Z, field->raw()));
+  guarded_fields_->Add(&Field::ZoneHandle(Z, field->ptr()));
 }
 
 void ParsedFunction::Bailout(const char* origin, const char* reason) const {
diff --git a/runtime/vm/profiler.cc b/runtime/vm/profiler.cc
index d12a763..0c7cad8 100644
--- a/runtime/vm/profiler.cc
+++ b/runtime/vm/profiler.cc
@@ -317,7 +317,7 @@
   ReturnAddressLocator(Sample* sample, const Code& code)
       : stack_buffer_(sample->GetStackBuffer()),
         pc_(sample->pc()),
-        code_(Code::ZoneHandle(code.raw())) {
+        code_(Code::ZoneHandle(code.ptr())) {
     ASSERT(!code_.IsNull());
     ASSERT(code_.ContainsInstructionAt(pc()));
   }
@@ -325,7 +325,7 @@
   ReturnAddressLocator(uword pc, uword* stack_buffer, const Code& code)
       : stack_buffer_(stack_buffer),
         pc_(pc),
-        code_(Code::ZoneHandle(code.raw())) {
+        code_(Code::ZoneHandle(code.ptr())) {
     ASSERT(!code_.IsNull());
     ASSERT(code_.ContainsInstructionAt(pc_));
   }
@@ -1460,7 +1460,7 @@
 void CodeLookupTable::Add(const Object& code) {
   ASSERT(!code.IsNull());
   ASSERT(code.IsCode());
-  CodeDescriptor* cd = new CodeDescriptor(AbstractCode(code.raw()));
+  CodeDescriptor* cd = new CodeDescriptor(AbstractCode(code.ptr()));
   code_objects_.Add(cd);
 }
 
@@ -1636,7 +1636,7 @@
                                                uword pc_marker,
                                                uword* stack_buffer) {
   ASSERT(cd != NULL);
-  const Code& code = Code::Handle(Code::RawCast(cd->code().raw()));
+  const Code& code = Code::Handle(Code::RawCast(cd->code().ptr()));
   ASSERT(!code.IsNull());
   // Some stubs (and intrinsics) do not push a frame onto the stack leaving
   // the frame pointer in the caller.
diff --git a/runtime/vm/profiler.h b/runtime/vm/profiler.h
index 4f92d72..5f24c2a 100644
--- a/runtime/vm/profiler.h
+++ b/runtime/vm/profiler.h
@@ -460,7 +460,7 @@
     ASSERT(code_.IsNull() || code_.IsCode());
   }
 
-  ObjectPtr raw() const { return code_.raw(); }
+  ObjectPtr ptr() const { return code_.ptr(); }
   const Object* handle() const { return &code_; }
 
   uword PayloadStart() const {
diff --git a/runtime/vm/profiler_service.cc b/runtime/vm/profiler_service.cc
index 8df86d6..e1a3c38 100644
--- a/runtime/vm/profiler_service.cc
+++ b/runtime/vm/profiler_service.cc
@@ -65,7 +65,7 @@
     }
     NoSafepointScope no_safepoint_scope;
     for (intptr_t i = 0; i < array.Length(); i++) {
-      if (code.raw() == array.At(i)) {
+      if (code.ptr() == array.At(i)) {
         return true;
       }
     }
@@ -110,7 +110,7 @@
                                  const intptr_t table_index)
     : kind_(kind),
       name_(name),
-      function_(Function::ZoneHandle(function.raw())),
+      function_(Function::ZoneHandle(function.ptr())),
       table_index_(table_index),
       profile_codes_(0),
       source_position_ticks_(0),
@@ -607,7 +607,7 @@
     static inline intptr_t Hashcode(Key key) { return key->Hash(); }
 
     static inline bool IsKeyEqual(Pair kv, Key key) {
-      return kv->function()->raw() == key->raw();
+      return kv->function()->ptr() == key->ptr();
     }
   };
 
@@ -1207,7 +1207,7 @@
     TokenPosition token_position = TokenPosition::kNoSource;
     Code& code = Code::ZoneHandle();
     if (profile_code->code().IsCode()) {
-      code ^= profile_code->code().raw();
+      code ^= profile_code->code().ptr();
       inlined_functions_cache_->Get(pc, code, sample, frame_index,
                                     &inlined_functions,
                                     &inlined_token_positions, &token_position);
@@ -1662,7 +1662,7 @@
   Code& code = Code::ZoneHandle();
 
   if (profile_code->code().IsCode()) {
-    code ^= profile_code->code().raw();
+    code ^= profile_code->code().ptr();
     cache_->Get(pc, code, sample, frame_index, &inlined_functions,
                 &inlined_token_positions, &token_position);
     if (FLAG_trace_profiler_verbose && (inlined_functions != NULL)) {
@@ -1869,7 +1869,7 @@
                      thread_task_mask,
                      time_origin_micros,
                      time_extent_micros),
-        cls_(Class::Handle(cls.raw())) {
+        cls_(Class::Handle(cls.ptr())) {
     ASSERT(!cls_.IsNull());
   }
 
diff --git a/runtime/vm/profiler_test.cc b/runtime/vm/profiler_test.cc
index 49ecdd3..d4e62d2 100644
--- a/runtime/vm/profiler_test.cc
+++ b/runtime/vm/profiler_test.cc
@@ -164,7 +164,7 @@
   }
   Library& lib = Library::Handle();
   lib ^= Api::UnwrapHandle(api_lib);
-  return lib.raw();
+  return lib.ptr();
 }
 
 static ClassPtr GetClass(const Library& lib, const char* name) {
@@ -172,7 +172,7 @@
   const Class& cls = Class::Handle(
       lib.LookupClassAllowPrivate(String::Handle(Symbols::New(thread, name))));
   EXPECT(!cls.IsNull());  // No ambiguity error expected.
-  return cls.raw();
+  return cls.ptr();
 }
 
 static FunctionPtr GetFunction(const Library& lib, const char* name) {
@@ -180,7 +180,7 @@
   const Function& func = Function::Handle(lib.LookupFunctionAllowPrivate(
       String::Handle(Symbols::New(thread, name))));
   EXPECT(!func.IsNull());  // No ambiguity error expected.
-  return func.raw();
+  return func.ptr();
 }
 
 static void Invoke(const Library& lib,
@@ -188,7 +188,7 @@
                    intptr_t argc = 0,
                    Dart_Handle* argv = NULL) {
   Thread* thread = Thread::Current();
-  Dart_Handle api_lib = Api::NewHandle(thread, lib.raw());
+  Dart_Handle api_lib = Api::NewHandle(thread, lib.ptr());
   TransitionVMToNative transition(thread);
   Dart_Handle result = Dart_Invoke(api_lib, NewString(name), argc, argv);
   EXPECT_VALID(result);
@@ -365,7 +365,7 @@
     TokenPosition token_position = TokenPosition::kNoSource;
     Code& code = Code::ZoneHandle();
     if (profile_code->code().IsCode()) {
-      code ^= profile_code->code().raw();
+      code ^= profile_code->code().ptr();
       inlined_functions_cache_.Get(pc, code, sample_, index_,
                                    &inlined_functions_,
                                    &inlined_token_positions_, &token_position);
diff --git a/runtime/vm/program_visitor.cc b/runtime/vm/program_visitor.cc
index 6e81fb3..66c6a58 100644
--- a/runtime/vm/program_visitor.cc
+++ b/runtime/vm/program_visitor.cc
@@ -17,9 +17,9 @@
 class WorklistElement : public ZoneAllocated {
  public:
   WorklistElement(Zone* zone, const Object& object)
-      : object_(Object::Handle(zone, object.raw())), next_(nullptr) {}
+      : object_(Object::Handle(zone, object.ptr())), next_(nullptr) {}
 
-  ObjectPtr value() const { return object_.raw(); }
+  ObjectPtr value() const { return object_.ptr(); }
 
   void set_next(WorklistElement* elem) { next_ = elem; }
   WorklistElement* next() const { return next_; }
@@ -99,8 +99,8 @@
     // We don't visit null, non-heap objects, or objects in the VM heap.
     if (object.IsNull() || object.IsSmi() || object.InVMIsolateHeap()) return;
     // Check and set visited, even if we don't end up adding this to the list.
-    if (heap_->GetObjectId(object.raw()) != 0) return;
-    heap_->SetObjectId(object.raw(), 1);
+    if (heap_->GetObjectId(object.ptr()) != 0) return;
+    heap_->SetObjectId(object.ptr(), 1);
     if (object.IsClass() ||
         (object.IsFunction() && visitor_->IsFunctionVisitor()) ||
         (object.IsCode() && visitor_->IsCodeVisitor())) {
@@ -318,7 +318,7 @@
   void AddCanonical(const T& obj) {
     if (!ShouldAdd(obj)) return;
     ASSERT(!canonical_objects_.HasKey(&obj));
-    canonical_objects_.Insert(&T::ZoneHandle(zone_, obj.raw()));
+    canonical_objects_.Insert(&T::ZoneHandle(zone_, obj.ptr()));
   }
 
   void AddVMBaseObjects() {
@@ -334,11 +334,11 @@
   typename T::ObjectPtrType Dedup(const T& obj) {
     if (ShouldAdd(obj)) {
       if (auto const canonical = canonical_objects_.LookupValue(&obj)) {
-        return canonical->raw();
+        return canonical->ptr();
       }
       AddCanonical(obj);
     }
-    return obj.raw();
+    return obj.ptr();
   }
 
   Zone* const zone_;
@@ -399,7 +399,7 @@
         const auto& fun = Function::Cast(target_);
         ASSERT(!FLAG_precompiled_mode || fun.HasCode());
         target_code_ = fun.HasCode() ? fun.CurrentCode()
-                                     : StubCode::CallStaticFunction().raw();
+                                     : StubCode::CallStaticFunction().ptr();
         CodePatcher::PatchStaticCallAt(pc, code, target_code_);
       }
 
@@ -452,9 +452,9 @@
 class StackMapEntry : public ZoneAllocated {
  public:
   StackMapEntry(Zone* zone, const CompressedStackMaps::Iterator& it)
-      : maps_(CompressedStackMaps::Handle(zone, it.maps_.raw())),
+      : maps_(CompressedStackMaps::Handle(zone, it.maps_.ptr())),
         bits_container_(
-            CompressedStackMaps::Handle(zone, it.bits_container_.raw())),
+            CompressedStackMaps::Handle(zone, it.bits_container_.ptr())),
         // If the map uses the global table, this accessor call ensures the
         // entry is fully loaded before we retrieve [it.current_bits_offset_].
         spill_slot_bit_count_(it.SpillSlotBitCount()),
@@ -522,7 +522,7 @@
   }
   const uint8_t* PayloadData() const {
     ASSERT(!Thread::Current()->IsAtSafepoint());
-    return bits_container_.raw()->ptr()->data() + bits_offset_;
+    return bits_container_.ptr()->untag()->data() + bits_offset_;
   }
 
   const CompressedStackMaps& maps_;
@@ -623,7 +623,7 @@
       const auto& data = CompressedStackMaps::Handle(
           zone_, CompressedStackMaps::NewGlobalTable(stream.buffer(),
                                                      stream.bytes_written()));
-      return data.raw();
+      return data.ptr();
     }
 
    private:
@@ -688,7 +688,7 @@
       // First check is to make sure [maps] hasn't already been normalized,
       // since any normalized map already has a canonical entry in the set.
       if (auto const canonical = canonical_objects_.LookupValue(&maps_)) {
-        maps_ = canonical->raw();
+        maps_ = canonical->ptr();
       } else {
         maps_ = NormalizeEntries(maps_);
         maps_ = Dedup(maps_);
@@ -701,7 +701,7 @@
     CompressedStackMapsPtr NormalizeEntries(const CompressedStackMaps& maps) {
       if (maps.payload_size() == 0) {
         // No entries, so use the canonical empty map.
-        return Object::empty_compressed_stackmaps().raw();
+        return Object::empty_compressed_stackmaps().ptr();
       }
       MallocWriteStream new_payload(maps.payload_size());
       CompressedStackMaps::Iterator it(maps, old_global_table_);
@@ -1059,7 +1059,7 @@
       // their names if any named parameter is required (signature needed).
       if (FLAG_precompiled_mode && !function.InVMIsolateHeap() &&
           !function.IsClosureFunction() && !function.IsFfiTrampoline() &&
-          function.name() != Symbols::Call().raw() && !function.is_native() &&
+          function.name() != Symbols::Call().ptr() && !function.is_native() &&
           !function.HasRequiredNamedParameters() &&
           !MayBeEntryPoint(function)) {
         // Function type not needed for function type tests or resolution.
@@ -1082,7 +1082,7 @@
           list_.SetAt(i, Symbols::OptimizedOut());
         }
       }
-      return list_.raw();
+      return list_.ptr();
     }
 
     bool MayBeEntryPoint(const Function& function) {
@@ -1090,10 +1090,10 @@
       // Use presence of pragma as conservative approximation.
       if (function.has_pragma()) return true;
       auto kind = function.kind();
-      if ((kind == FunctionLayout::kImplicitGetter) ||
-          (kind == FunctionLayout::kImplicitSetter) ||
-          (kind == FunctionLayout::kImplicitStaticGetter) ||
-          (kind == FunctionLayout::kFieldInitializer)) {
+      if ((kind == UntaggedFunction::kImplicitGetter) ||
+          (kind == UntaggedFunction::kImplicitSetter) ||
+          (kind == UntaggedFunction::kImplicitStaticGetter) ||
+          (kind == UntaggedFunction::kFieldInitializer)) {
         field_ = function.accessor_field();
         if (!field_.IsNull() && field_.has_pragma()) return true;
       }
@@ -1163,7 +1163,7 @@
     // In AOT, disabled code objects should not be considered for deduplication.
     ASSERT(!pair->IsDisabled() && !key->IsDisabled());
 
-    if (pair->raw() == key->raw()) return true;
+    if (pair->ptr() == key->ptr()) return true;
 
     // Notice we assume that these entries have already been de-duped, so we
     // can use pointer equality.
@@ -1364,8 +1364,8 @@
       UNREACHABLE();
     }
 
-    ASSERT(heap_->GetLoadingUnit(code.raw()) == WeakTable::kNoValue);
-    heap_->SetLoadingUnit(code.raw(), id);
+    ASSERT(heap_->GetLoadingUnit(code.ptr()) == WeakTable::kNoValue);
+    heap_->SetLoadingUnit(code.ptr(), id);
 
     obj_ = code.code_source_map();
     MergeAssignment(obj_, id);
@@ -1380,15 +1380,15 @@
   void MergeAssignment(const Object& obj, intptr_t id) {
     if (obj.IsNull()) return;
 
-    intptr_t old_id = heap_->GetLoadingUnit(obj_.raw());
+    intptr_t old_id = heap_->GetLoadingUnit(obj_.ptr());
     if (old_id == WeakTable::kNoValue) {
-      heap_->SetLoadingUnit(obj_.raw(), id);
+      heap_->SetLoadingUnit(obj_.ptr(), id);
     } else if (old_id == id) {
       // Shared with another code in the same loading unit.
     } else {
       // Shared with another code in a different loading unit.
       // Could assign to dominating loading unit.
-      heap_->SetLoadingUnit(obj_.raw(), LoadingUnit::kRootId);
+      heap_->SetLoadingUnit(obj_.ptr(), LoadingUnit::kRootId);
     }
   }
 
@@ -1411,7 +1411,7 @@
   Code& code = Code::Handle(zone);
   for (intptr_t i = 0; i < StubCode::NumEntries(); i++) {
     inst = StubCode::EntryAt(i).instructions();
-    thread->heap()->SetLoadingUnit(inst.raw(), LoadingUnit::kRootId);
+    thread->heap()->SetLoadingUnit(inst.ptr(), LoadingUnit::kRootId);
   }
 
   // Isolate stubs.
@@ -1422,7 +1422,7 @@
     if ((*p)->IsCode()) {
       code ^= *p;
       inst = code.instructions();
-      thread->heap()->SetLoadingUnit(inst.raw(), LoadingUnit::kRootId);
+      thread->heap()->SetLoadingUnit(inst.ptr(), LoadingUnit::kRootId);
     }
   }
 
diff --git a/runtime/vm/raw_object.cc b/runtime/vm/raw_object.cc
index 41a97f5..a142516 100644
--- a/runtime/vm/raw_object.cc
+++ b/runtime/vm/raw_object.cc
@@ -16,7 +16,7 @@
 
 namespace dart {
 
-bool ObjectLayout::InVMIsolateHeap() const {
+bool UntaggedObject::InVMIsolateHeap() const {
   // All "vm-isolate" objects are pre-marked and in old space
   // (see [Object::FinalizeVMIsolate]).
   if (!IsOldObject() || !IsMarked()) return false;
@@ -35,10 +35,10 @@
   if (tagged_pointer_ == kHeapObjectTag) {
     FATAL("RAW_NULL encountered");
   }
-  ptr()->Validate(isolate_group);
+  untag()->Validate(isolate_group);
 }
 
-void ObjectLayout::Validate(IsolateGroup* isolate_group) const {
+void UntaggedObject::Validate(IsolateGroup* isolate_group) const {
   if (static_cast<uword>(Object::void_class_) == kHeapObjectTag) {
     // Validation relies on properly initialized class classes. Skip if the
     // VM is still being initialized.
@@ -90,14 +90,14 @@
 // compaction when the class objects are moving. Can use the class
 // id in the header and the sizes in the Class Table.
 // Cannot deference ptr()->tags_. May dereference other parts of the object.
-intptr_t ObjectLayout::HeapSizeFromClass(uword tags) const {
+intptr_t UntaggedObject::HeapSizeFromClass(uword tags) const {
   intptr_t class_id = ClassIdTag::decode(tags);
   intptr_t instance_size = 0;
   switch (class_id) {
     case kCodeCid: {
       const CodePtr raw_code = static_cast<const CodePtr>(this);
       intptr_t pointer_offsets_length =
-          Code::PtrOffBits::decode(raw_code->ptr()->state_bits_);
+          Code::PtrOffBits::decode(raw_code->untag()->state_bits_);
       instance_size = Code::InstanceSize(pointer_offsets_length);
       break;
     }
@@ -117,28 +117,28 @@
     }
     case kContextCid: {
       const ContextPtr raw_context = static_cast<const ContextPtr>(this);
-      intptr_t num_variables = raw_context->ptr()->num_variables_;
+      intptr_t num_variables = raw_context->untag()->num_variables_;
       instance_size = Context::InstanceSize(num_variables);
       break;
     }
     case kContextScopeCid: {
       const ContextScopePtr raw_context_scope =
           static_cast<const ContextScopePtr>(this);
-      intptr_t num_variables = raw_context_scope->ptr()->num_variables_;
+      intptr_t num_variables = raw_context_scope->untag()->num_variables_;
       instance_size = ContextScope::InstanceSize(num_variables);
       break;
     }
     case kOneByteStringCid: {
       const OneByteStringPtr raw_string =
           static_cast<const OneByteStringPtr>(this);
-      intptr_t string_length = Smi::Value(raw_string->ptr()->length_);
+      intptr_t string_length = Smi::Value(raw_string->untag()->length_);
       instance_size = OneByteString::InstanceSize(string_length);
       break;
     }
     case kTwoByteStringCid: {
       const TwoByteStringPtr raw_string =
           static_cast<const TwoByteStringPtr>(this);
-      intptr_t string_length = Smi::Value(raw_string->ptr()->length_);
+      intptr_t string_length = Smi::Value(raw_string->untag()->length_);
       instance_size = TwoByteString::InstanceSize(string_length);
       break;
     }
@@ -146,21 +146,21 @@
     case kImmutableArrayCid: {
       const ArrayPtr raw_array = static_cast<const ArrayPtr>(this);
       intptr_t array_length =
-          Smi::Value(raw_array->ptr()->length<std::memory_order_acquire>());
+          Smi::Value(raw_array->untag()->length<std::memory_order_acquire>());
       instance_size = Array::InstanceSize(array_length);
       break;
     }
     case kObjectPoolCid: {
       const ObjectPoolPtr raw_object_pool =
           static_cast<const ObjectPoolPtr>(this);
-      intptr_t len = raw_object_pool->ptr()->length_;
+      intptr_t len = raw_object_pool->untag()->length_;
       instance_size = ObjectPool::InstanceSize(len);
       break;
     }
 #define SIZE_FROM_CLASS(clazz) case kTypedData##clazz##Cid:
       CLASS_LIST_TYPED_DATA(SIZE_FROM_CLASS) {
         const TypedDataPtr raw_obj = static_cast<const TypedDataPtr>(this);
-        intptr_t array_len = Smi::Value(raw_obj->ptr()->length_);
+        intptr_t array_len = Smi::Value(raw_obj->untag()->length_);
         intptr_t lengthInBytes =
             array_len * TypedData::ElementSizeInBytes(class_id);
         instance_size = TypedData::InstanceSize(lengthInBytes);
@@ -173,21 +173,21 @@
     case kTypeArgumentsCid: {
       const TypeArgumentsPtr raw_array =
           static_cast<const TypeArgumentsPtr>(this);
-      intptr_t array_length = Smi::Value(raw_array->ptr()->length_);
+      intptr_t array_length = Smi::Value(raw_array->untag()->length_);
       instance_size = TypeArguments::InstanceSize(array_length);
       break;
     }
     case kPcDescriptorsCid: {
       const PcDescriptorsPtr raw_descriptors =
           static_cast<const PcDescriptorsPtr>(this);
-      intptr_t length = raw_descriptors->ptr()->length_;
+      intptr_t length = raw_descriptors->untag()->length_;
       instance_size = PcDescriptors::InstanceSize(length);
       break;
     }
     case kCodeSourceMapCid: {
       const CodeSourceMapPtr raw_code_source_map =
           static_cast<const CodeSourceMapPtr>(this);
-      intptr_t length = raw_code_source_map->ptr()->length_;
+      intptr_t length = raw_code_source_map->untag()->length_;
       instance_size = CodeSourceMap::InstanceSize(length);
       break;
     }
@@ -201,25 +201,25 @@
     case kLocalVarDescriptorsCid: {
       const LocalVarDescriptorsPtr raw_descriptors =
           static_cast<const LocalVarDescriptorsPtr>(this);
-      intptr_t num_descriptors = raw_descriptors->ptr()->num_entries_;
+      intptr_t num_descriptors = raw_descriptors->untag()->num_entries_;
       instance_size = LocalVarDescriptors::InstanceSize(num_descriptors);
       break;
     }
     case kExceptionHandlersCid: {
       const ExceptionHandlersPtr raw_handlers =
           static_cast<const ExceptionHandlersPtr>(this);
-      intptr_t num_handlers = raw_handlers->ptr()->num_entries_;
+      intptr_t num_handlers = raw_handlers->untag()->num_entries_;
       instance_size = ExceptionHandlers::InstanceSize(num_handlers);
       break;
     }
     case kFreeListElement: {
-      uword addr = ObjectLayout::ToAddr(this);
+      uword addr = UntaggedObject::ToAddr(this);
       FreeListElement* element = reinterpret_cast<FreeListElement*>(addr);
       instance_size = element->HeapSize();
       break;
     }
     case kForwardingCorpse: {
-      uword addr = ObjectLayout::ToAddr(this);
+      uword addr = UntaggedObject::ToAddr(this);
       ForwardingCorpse* element = reinterpret_cast<ForwardingCorpse*>(addr);
       instance_size = element->HeapSize();
       break;
@@ -264,7 +264,7 @@
     do {
       OS::Sleep(1);
       const ArrayPtr raw_array = static_cast<const ArrayPtr>(this);
-      intptr_t array_length = Smi::Value(raw_array->ptr()->length_);
+      intptr_t array_length = Smi::Value(raw_array->untag()->length_);
       instance_size = Array::InstanceSize(array_length);
     } while ((instance_size > tags_size) && (--retries_remaining > 0));
   }
@@ -276,8 +276,8 @@
   return instance_size;
 }
 
-intptr_t ObjectLayout::VisitPointersPredefined(ObjectPointerVisitor* visitor,
-                                               intptr_t class_id) {
+intptr_t UntaggedObject::VisitPointersPredefined(ObjectPointerVisitor* visitor,
+                                                 intptr_t class_id) {
   ASSERT(class_id < kNumPredefinedCids);
 
   intptr_t size = 0;
@@ -286,7 +286,7 @@
 #define RAW_VISITPOINTERS(clazz)                                               \
   case k##clazz##Cid: {                                                        \
     clazz##Ptr raw_obj = static_cast<clazz##Ptr>(this);                        \
-    size = clazz##Layout::Visit##clazz##Pointers(raw_obj, visitor);            \
+    size = Untagged##clazz::Visit##clazz##Pointers(raw_obj, visitor);          \
     break;                                                                     \
   }
     CLASS_LIST_NO_OBJECT(RAW_VISITPOINTERS)
@@ -294,15 +294,15 @@
 #define RAW_VISITPOINTERS(clazz) case kTypedData##clazz##Cid:
     CLASS_LIST_TYPED_DATA(RAW_VISITPOINTERS) {
       TypedDataPtr raw_obj = static_cast<TypedDataPtr>(this);
-      size = TypedDataLayout::VisitTypedDataPointers(raw_obj, visitor);
+      size = UntaggedTypedData::VisitTypedDataPointers(raw_obj, visitor);
       break;
     }
 #undef RAW_VISITPOINTERS
 #define RAW_VISITPOINTERS(clazz) case kExternalTypedData##clazz##Cid:
     CLASS_LIST_TYPED_DATA(RAW_VISITPOINTERS) {
       auto raw_obj = static_cast<ExternalTypedDataPtr>(this);
-      size = ExternalTypedDataLayout::VisitExternalTypedDataPointers(raw_obj,
-                                                                     visitor);
+      size = UntaggedExternalTypedData::VisitExternalTypedDataPointers(raw_obj,
+                                                                       visitor);
       break;
     }
 #undef RAW_VISITPOINTERS
@@ -311,24 +311,24 @@
       CLASS_LIST_TYPED_DATA(RAW_VISITPOINTERS) {
         auto raw_obj = static_cast<TypedDataViewPtr>(this);
         size =
-            TypedDataViewLayout::VisitTypedDataViewPointers(raw_obj, visitor);
+            UntaggedTypedDataView::VisitTypedDataViewPointers(raw_obj, visitor);
         break;
       }
 #undef RAW_VISITPOINTERS
     case kByteBufferCid: {
       InstancePtr raw_obj = static_cast<InstancePtr>(this);
-      size = InstanceLayout::VisitInstancePointers(raw_obj, visitor);
+      size = UntaggedInstance::VisitInstancePointers(raw_obj, visitor);
       break;
     }
     case kFfiPointerCid: {
       PointerPtr raw_obj = static_cast<PointerPtr>(this);
-      size = PointerLayout::VisitPointerPointers(raw_obj, visitor);
+      size = UntaggedPointer::VisitPointerPointers(raw_obj, visitor);
       break;
     }
     case kFfiDynamicLibraryCid: {
       DynamicLibraryPtr raw_obj = static_cast<DynamicLibraryPtr>(this);
       size =
-          DynamicLibraryLayout::VisitDynamicLibraryPointers(raw_obj, visitor);
+          UntaggedDynamicLibrary::VisitDynamicLibraryPointers(raw_obj, visitor);
       break;
     }
 #define RAW_VISITPOINTERS(clazz) case kFfi##clazz##Cid:
@@ -339,13 +339,13 @@
       }
 #undef RAW_VISITPOINTERS
     case kFreeListElement: {
-      uword addr = ObjectLayout::ToAddr(this);
+      uword addr = UntaggedObject::ToAddr(this);
       FreeListElement* element = reinterpret_cast<FreeListElement*>(addr);
       size = element->HeapSize();
       break;
     }
     case kForwardingCorpse: {
-      uword addr = ObjectLayout::ToAddr(this);
+      uword addr = UntaggedObject::ToAddr(this);
       ForwardingCorpse* forwarder = reinterpret_cast<ForwardingCorpse*>(addr);
       size = forwarder->HeapSize();
       break;
@@ -377,8 +377,8 @@
 #endif
 }
 
-void ObjectLayout::VisitPointersPrecise(Isolate* isolate,
-                                        ObjectPointerVisitor* visitor) {
+void UntaggedObject::VisitPointersPrecise(Isolate* isolate,
+                                          ObjectPointerVisitor* visitor) {
   intptr_t class_id = GetClassId();
   if (class_id < kNumPredefinedCids) {
     VisitPointersPredefined(visitor, class_id);
@@ -387,12 +387,12 @@
 
   // N.B.: Not using the heap size!
   uword next_field_offset = isolate->GetClassForHeapWalkAt(class_id)
-                                ->ptr()
+                                ->untag()
                                 ->host_next_field_offset_in_words_
                             << kWordSizeLog2;
   ASSERT(next_field_offset > 0);
-  uword obj_addr = ObjectLayout::ToAddr(this);
-  uword from = obj_addr + sizeof(ObjectLayout);
+  uword obj_addr = UntaggedObject::ToAddr(this);
+  uword from = obj_addr + sizeof(UntaggedObject);
   uword to = obj_addr + next_field_offset - kWordSize;
   const auto first = reinterpret_cast<ObjectPtr*>(from);
   const auto last = reinterpret_cast<ObjectPtr*>(to);
@@ -402,7 +402,7 @@
       visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
 
   if (!unboxed_fields_bitmap.IsEmpty()) {
-    intptr_t bit = sizeof(ObjectLayout) / kWordSize;
+    intptr_t bit = sizeof(UntaggedObject) / kWordSize;
     for (ObjectPtr* current = first; current <= last; current++) {
       if (!unboxed_fields_bitmap.Get(bit++)) {
         visitor->VisitPointer(current);
@@ -416,7 +416,7 @@
 #endif  // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
 }
 
-bool ObjectLayout::FindObject(FindObjectVisitor* visitor) {
+bool UntaggedObject::FindObject(FindObjectVisitor* visitor) {
   ASSERT(visitor != NULL);
   return visitor->FindObject(static_cast<ObjectPtr>(this));
 }
@@ -425,12 +425,12 @@
 // methods on the raw object to get the first and last cells that need
 // visiting.
 #define REGULAR_VISITOR(Type)                                                  \
-  intptr_t Type##Layout::Visit##Type##Pointers(                                \
+  intptr_t Untagged##Type::Visit##Type##Pointers(                              \
       Type##Ptr raw_obj, ObjectPointerVisitor* visitor) {                      \
     /* Make sure that we got here with the tagged pointer as this. */          \
     ASSERT(raw_obj->IsHeapObject());                                           \
     ASSERT_UNCOMPRESSED(Type);                                                 \
-    visitor->VisitPointers(raw_obj->ptr()->from(), raw_obj->ptr()->to());      \
+    visitor->VisitPointers(raw_obj->untag()->from(), raw_obj->untag()->to());  \
     return Type::InstanceSize();                                               \
   }
 
@@ -440,26 +440,26 @@
 // Though as opposed to Similar to [REGULAR_VISITOR] this visitor will call the
 // specializd VisitTypedDataViewPointers
 #define TYPED_DATA_VIEW_VISITOR(Type)                                          \
-  intptr_t Type##Layout::Visit##Type##Pointers(                                \
+  intptr_t Untagged##Type::Visit##Type##Pointers(                              \
       Type##Ptr raw_obj, ObjectPointerVisitor* visitor) {                      \
     /* Make sure that we got here with the tagged pointer as this. */          \
     ASSERT(raw_obj->IsHeapObject());                                           \
     ASSERT_UNCOMPRESSED(Type);                                                 \
-    visitor->VisitTypedDataViewPointers(raw_obj, raw_obj->ptr()->from(),       \
-                                        raw_obj->ptr()->to());                 \
+    visitor->VisitTypedDataViewPointers(raw_obj, raw_obj->untag()->from(),     \
+                                        raw_obj->untag()->to());               \
     return Type::InstanceSize();                                               \
   }
 
 // For variable length objects. get_length is a code snippet that gets the
 // length of the object, which is passed to InstanceSize and the to() method.
 #define VARIABLE_VISITOR(Type, get_length)                                     \
-  intptr_t Type##Layout::Visit##Type##Pointers(                                \
+  intptr_t Untagged##Type::Visit##Type##Pointers(                              \
       Type##Ptr raw_obj, ObjectPointerVisitor* visitor) {                      \
     /* Make sure that we got here with the tagged pointer as this. */          \
     ASSERT(raw_obj->IsHeapObject());                                           \
     intptr_t length = get_length;                                              \
-    visitor->VisitPointers(raw_obj->ptr()->from(),                             \
-                           raw_obj->ptr()->to(length));                        \
+    visitor->VisitPointers(raw_obj->untag()->from(),                           \
+                           raw_obj->untag()->to(length));                      \
     return Type::InstanceSize(length);                                         \
   }
 
@@ -470,7 +470,7 @@
 
 // For fixed-length objects that don't have any pointers that need visiting.
 #define NULL_VISITOR(Type)                                                     \
-  intptr_t Type##Layout::Visit##Type##Pointers(                                \
+  intptr_t Untagged##Type::Visit##Type##Pointers(                              \
       Type##Ptr raw_obj, ObjectPointerVisitor* visitor) {                      \
     /* Make sure that we got here with the tagged pointer as this. */          \
     ASSERT(raw_obj->IsHeapObject());                                           \
@@ -481,7 +481,7 @@
 // For objects that don't have any pointers that need visiting, but have a
 // variable length.
 #define VARIABLE_NULL_VISITOR(Type, get_length)                                \
-  intptr_t Type##Layout::Visit##Type##Pointers(                                \
+  intptr_t Untagged##Type::Visit##Type##Pointers(                              \
       Type##Ptr raw_obj, ObjectPointerVisitor* visitor) {                      \
     /* Make sure that we got here with the tagged pointer as this. */          \
     ASSERT(raw_obj->IsHeapObject());                                           \
@@ -492,7 +492,7 @@
 
 // For objects that are never instantiated on the heap.
 #define UNREACHABLE_VISITOR(Type)                                              \
-  intptr_t Type##Layout::Visit##Type##Pointers(                                \
+  intptr_t Untagged##Type::Visit##Type##Pointers(                              \
       Type##Ptr raw_obj, ObjectPointerVisitor* visitor) {                      \
     UNREACHABLE();                                                             \
     return 0;                                                                  \
@@ -536,16 +536,16 @@
 REGULAR_VISITOR(SubtypeTestCache)
 REGULAR_VISITOR(LoadingUnit)
 REGULAR_VISITOR(KernelProgramInfo)
-VARIABLE_VISITOR(TypeArguments, Smi::Value(raw_obj->ptr()->length_))
-VARIABLE_VISITOR(LocalVarDescriptors, raw_obj->ptr()->num_entries_)
-VARIABLE_VISITOR(ExceptionHandlers, raw_obj->ptr()->num_entries_)
-VARIABLE_VISITOR(Context, raw_obj->ptr()->num_variables_)
-VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->ptr()->length()))
+VARIABLE_VISITOR(TypeArguments, Smi::Value(raw_obj->untag()->length_))
+VARIABLE_VISITOR(LocalVarDescriptors, raw_obj->untag()->num_entries_)
+VARIABLE_VISITOR(ExceptionHandlers, raw_obj->untag()->num_entries_)
+VARIABLE_VISITOR(Context, raw_obj->untag()->num_variables_)
+VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag()->length()))
 VARIABLE_COMPRESSED_VISITOR(
     TypedData,
     TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *
-        Smi::Value(raw_obj->ptr()->length_))
-VARIABLE_VISITOR(ContextScope, raw_obj->ptr()->num_variables_)
+        Smi::Value(raw_obj->untag()->length_))
+VARIABLE_VISITOR(ContextScope, raw_obj->untag()->num_variables_)
 NULL_VISITOR(Mint)
 NULL_VISITOR(Double)
 NULL_VISITOR(Float32x4)
@@ -559,12 +559,12 @@
 NULL_VISITOR(DynamicLibrary)
 VARIABLE_NULL_VISITOR(Instructions, Instructions::Size(raw_obj))
 VARIABLE_NULL_VISITOR(InstructionsSection, InstructionsSection::Size(raw_obj))
-VARIABLE_NULL_VISITOR(PcDescriptors, raw_obj->ptr()->length_)
-VARIABLE_NULL_VISITOR(CodeSourceMap, raw_obj->ptr()->length_)
+VARIABLE_NULL_VISITOR(PcDescriptors, raw_obj->untag()->length_)
+VARIABLE_NULL_VISITOR(CodeSourceMap, raw_obj->untag()->length_)
 VARIABLE_NULL_VISITOR(CompressedStackMaps,
                       CompressedStackMaps::PayloadSizeOf(raw_obj))
-VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->ptr()->length_))
-VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->ptr()->length_))
+VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag()->length_))
+VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag()->length_))
 // Abstract types don't have their visitor called.
 UNREACHABLE_VISITOR(AbstractType)
 UNREACHABLE_VISITOR(CallSiteData)
@@ -582,18 +582,18 @@
 REGULAR_VISITOR(WeakSerializationReference)
 #endif
 
-intptr_t FieldLayout::VisitFieldPointers(FieldPtr raw_obj,
-                                         ObjectPointerVisitor* visitor) {
+intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj,
+                                           ObjectPointerVisitor* visitor) {
   ASSERT(raw_obj->IsHeapObject());
   ASSERT_UNCOMPRESSED(Field);
-  visitor->VisitPointers(raw_obj->ptr()->from(), raw_obj->ptr()->to());
+  visitor->VisitPointers(raw_obj->untag()->from(), raw_obj->untag()->to());
 
   if (visitor->trace_values_through_fields()) {
-    if (Field::StaticBit::decode(raw_obj->ptr()->kind_bits_)) {
+    if (Field::StaticBit::decode(raw_obj->untag()->kind_bits_)) {
       visitor->isolate_group()->ForEachIsolate(
           [&](Isolate* isolate) {
             intptr_t index =
-                Smi::Value(raw_obj->ptr()->host_offset_or_field_id_);
+                Smi::Value(raw_obj->untag()->host_offset_or_field_id_);
             visitor->VisitPointer(&isolate->field_table()->table()[index]);
           },
           /*at_safepoint=*/true);
@@ -602,7 +602,7 @@
   return Field::InstanceSize();
 }
 
-bool CodeLayout::ContainsPC(const ObjectPtr raw_obj, uword pc) {
+bool UntaggedCode::ContainsPC(const ObjectPtr raw_obj, uword pc) {
   if (!raw_obj->IsCode()) return false;
   auto const raw_code = static_cast<const CodePtr>(raw_obj);
   const uword start = Code::PayloadStartOf(raw_code);
@@ -610,11 +610,11 @@
   return (pc - start) <= size;  // pc may point just past last instruction.
 }
 
-intptr_t CodeLayout::VisitCodePointers(CodePtr raw_obj,
-                                       ObjectPointerVisitor* visitor) {
-  visitor->VisitPointers(raw_obj->ptr()->from(), raw_obj->ptr()->to());
+intptr_t UntaggedCode::VisitCodePointers(CodePtr raw_obj,
+                                         ObjectPointerVisitor* visitor) {
+  visitor->VisitPointers(raw_obj->untag()->from(), raw_obj->untag()->to());
 
-  CodeLayout* obj = raw_obj->ptr();
+  UntaggedCode* obj = raw_obj->untag();
   intptr_t length = Code::PtrOffBits::decode(obj->state_bits_);
 #if defined(TARGET_ARCH_IA32)
   // On IA32 only we embed pointers to objects directly in the generated
@@ -636,12 +636,12 @@
 #endif
 }
 
-intptr_t ObjectPoolLayout::VisitObjectPoolPointers(
+intptr_t UntaggedObjectPool::VisitObjectPoolPointers(
     ObjectPoolPtr raw_obj,
     ObjectPointerVisitor* visitor) {
-  const intptr_t length = raw_obj->ptr()->length_;
-  ObjectPoolLayout::Entry* entries = raw_obj->ptr()->data();
-  uint8_t* entry_bits = raw_obj->ptr()->entry_bits();
+  const intptr_t length = raw_obj->untag()->length_;
+  UntaggedObjectPool::Entry* entries = raw_obj->untag()->data();
+  uint8_t* entry_bits = raw_obj->untag()->entry_bits();
   for (intptr_t i = 0; i < length; ++i) {
     ObjectPool::EntryType entry_type =
         ObjectPool::TypeBits::decode(entry_bits[i]);
@@ -652,7 +652,8 @@
   return ObjectPool::InstanceSize(length);
 }
 
-bool InstructionsLayout::ContainsPC(const InstructionsPtr raw_instr, uword pc) {
+bool UntaggedInstructions::ContainsPC(const InstructionsPtr raw_instr,
+                                      uword pc) {
   const uword start = Instructions::PayloadStart(raw_instr);
   const uword size = Instructions::Size(raw_instr);
   // We use <= instead of < here because the saved-pc can be outside the
@@ -661,11 +662,12 @@
   return (pc - start) <= size;
 }
 
-intptr_t InstanceLayout::VisitInstancePointers(InstancePtr raw_obj,
-                                               ObjectPointerVisitor* visitor) {
+intptr_t UntaggedInstance::VisitInstancePointers(
+    InstancePtr raw_obj,
+    ObjectPointerVisitor* visitor) {
   // Make sure that we got here with the tagged pointer as this.
   ASSERT(raw_obj->IsHeapObject());
-  uword tags = raw_obj->ptr()->tags_;
+  uword tags = raw_obj->untag()->tags_;
   intptr_t instance_size = SizeTag::decode(tags);
   if (instance_size == 0) {
     instance_size = visitor->isolate_group()->GetClassSizeForHeapWalkAt(
@@ -673,21 +675,21 @@
   }
 
   // Calculate the first and last raw object pointer fields.
-  uword obj_addr = ObjectLayout::ToAddr(raw_obj);
-  uword from = obj_addr + sizeof(ObjectLayout);
+  uword obj_addr = UntaggedObject::ToAddr(raw_obj);
+  uword from = obj_addr + sizeof(UntaggedObject);
   uword to = obj_addr + instance_size - kWordSize;
   visitor->VisitPointers(reinterpret_cast<ObjectPtr*>(from),
                          reinterpret_cast<ObjectPtr*>(to));
   return instance_size;
 }
 
-intptr_t ImmutableArrayLayout::VisitImmutableArrayPointers(
+intptr_t UntaggedImmutableArray::VisitImmutableArrayPointers(
     ImmutableArrayPtr raw_obj,
     ObjectPointerVisitor* visitor) {
-  return ArrayLayout::VisitArrayPointers(raw_obj, visitor);
+  return UntaggedArray::VisitArrayPointers(raw_obj, visitor);
 }
 
-void ObjectLayout::RememberCard(ObjectPtr const* slot) {
+void UntaggedObject::RememberCard(ObjectPtr const* slot) {
   OldPage::Of(static_cast<ObjectPtr>(this))->RememberCard(slot);
 }
 
@@ -698,12 +700,12 @@
                           ObjectPtr* slot) {
   ObjectPtr object = static_cast<ObjectPtr>(object_in);
   ASSERT(object->IsOldObject());
-  ASSERT(object->ptr()->IsCardRemembered());
+  ASSERT(object->untag()->IsCardRemembered());
   OldPage::Of(object)->RememberCard(slot);
 }
 END_LEAF_RUNTIME_ENTRY
 
-const char* PcDescriptorsLayout::KindToCString(Kind k) {
+const char* UntaggedPcDescriptors::KindToCString(Kind k) {
   switch (k) {
 #define ENUM_CASE(name, init)                                                  \
   case Kind::k##name:                                                          \
@@ -715,7 +717,7 @@
   }
 }
 
-bool PcDescriptorsLayout::ParseKind(const char* cstr, Kind* out) {
+bool UntaggedPcDescriptors::ParseKind(const char* cstr, Kind* out) {
   ASSERT(cstr != nullptr && out != nullptr);
 #define ENUM_CASE(name, init)                                                  \
   if (strcmp(#name, cstr) == 0) {                                              \
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index dac1451..8a02e59 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -28,7 +28,7 @@
 //  * Target architecture
 //  * DART_PRECOMPILED_RUNTIME (i.e, AOT vs. JIT)
 //
-// That is, fields in ObjectLayout and its subclasses should only be included or
+// That is, fields in UntaggedObject and its subclasses should only be included or
 // excluded conditionally based on these factors. Otherwise, the generated
 // offsets can be wrong (which should be caught by offset checking in dart.cc).
 //
@@ -42,7 +42,7 @@
 // Forward declarations.
 class Isolate;
 class IsolateGroup;
-#define DEFINE_FORWARD_DECLARATION(clazz) class clazz##Layout;
+#define DEFINE_FORWARD_DECLARATION(clazz) class Untagged##clazz;
 CLASS_LIST(DEFINE_FORWARD_DECLARATION)
 #undef DEFINE_FORWARD_DECLARATION
 class CodeStatistics;
@@ -59,15 +59,15 @@
 #define VISIT_NOTHING() int NothingToVisit();
 
 #define ASSERT_UNCOMPRESSED(Type)                                              \
-  ASSERT(SIZE_OF_DEREFERENCED_RETURNED_VALUE(Type##Layout, from) == kWordSize)
+  ASSERT(SIZE_OF_DEREFERENCED_RETURNED_VALUE(Untagged##Type, from) == kWordSize)
 
 // For now there are no compressed pointers, so this assert is the same as
 // the above.
 #define ASSERT_COMPRESSED(Type)                                                \
-  ASSERT(SIZE_OF_DEREFERENCED_RETURNED_VALUE(Type##Layout, from) == kWordSize)
+  ASSERT(SIZE_OF_DEREFERENCED_RETURNED_VALUE(Untagged##Type, from) == kWordSize)
 
 #define ASSERT_NOTHING_TO_VISIT(Type)                                          \
-  ASSERT(SIZE_OF_RETURNED_VALUE(Type##Layout, NothingToVisit) == sizeof(int))
+  ASSERT(SIZE_OF_RETURNED_VALUE(Untagged##Type, NothingToVisit) == sizeof(int))
 
 enum TypedDataElementType {
 #define V(name) k##name##Element,
@@ -90,13 +90,13 @@
  private: /* NOLINT */                                                         \
   VISITOR_SUPPORT(object)                                                      \
   friend class object;                                                         \
-  friend class ObjectLayout;                                                   \
+  friend class UntaggedObject;                                                 \
   friend class Heap;                                                           \
   friend class Simulator;                                                      \
   friend class SimulatorHelpers;                                               \
   friend class OffsetsTable;                                                   \
   DISALLOW_ALLOCATION();                                                       \
-  DISALLOW_IMPLICIT_CONSTRUCTORS(object##Layout)
+  DISALLOW_IMPLICIT_CONSTRUCTORS(Untagged##object)
 
 #define RAW_HEAP_OBJECT_IMPLEMENTATION(object)                                 \
  private:                                                                      \
@@ -112,7 +112,7 @@
 // RawObject is the base class of all raw objects; even though it carries the
 // tags_ field not all raw objects are allocated in the heap and thus cannot
 // be dereferenced (e.g. RawSmi).
-class ObjectLayout {
+class UntaggedObject {
  public:
   // The tags field which is a part of the object header uses the following
   // bit fields for storing tags.
@@ -151,7 +151,7 @@
     typedef intptr_t Type;
 
     static constexpr intptr_t kMaxSizeTagInUnitsOfAlignment =
-        ((1 << ObjectLayout::kSizeTagSize) - 1);
+        ((1 << UntaggedObject::kSizeTagSize) - 1);
     static constexpr intptr_t kMaxSizeTag =
         kMaxSizeTagInUnitsOfAlignment * kObjectAlignment;
 
@@ -414,7 +414,7 @@
 
   bool Contains(uword addr) const {
     intptr_t this_size = HeapSize();
-    uword this_addr = ObjectLayout::ToAddr(this);
+    uword this_addr = UntaggedObject::ToAddr(this);
     return (addr >= this_addr) && (addr < (this_addr + this_size));
   }
 
@@ -434,7 +434,7 @@
     // Calculate the first and last raw object pointer fields.
     intptr_t instance_size = HeapSize();
     uword obj_addr = ToAddr(this);
-    uword from = obj_addr + sizeof(ObjectLayout);
+    uword from = obj_addr + sizeof(UntaggedObject);
     uword to = obj_addr + instance_size - kWordSize;
     const auto first = reinterpret_cast<ObjectPtr*>(from);
     const auto last = reinterpret_cast<ObjectPtr*>(to);
@@ -444,7 +444,7 @@
         visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
 
     if (!unboxed_fields_bitmap.IsEmpty()) {
-      intptr_t bit = sizeof(ObjectLayout) / kWordSize;
+      intptr_t bit = sizeof(UntaggedObject) / kWordSize;
       for (ObjectPtr* current = first; current <= last; current++) {
         if (!unboxed_fields_bitmap.Get(bit++)) {
           visitor->VisitPointer(current);
@@ -472,7 +472,7 @@
     // Calculate the first and last raw object pointer fields.
     intptr_t instance_size = HeapSize();
     uword obj_addr = ToAddr(this);
-    uword from = obj_addr + sizeof(ObjectLayout);
+    uword from = obj_addr + sizeof(UntaggedObject);
     uword to = obj_addr + instance_size - kWordSize;
     const auto first = reinterpret_cast<ObjectPtr*>(from);
     const auto last = reinterpret_cast<ObjectPtr*>(to);
@@ -482,7 +482,7 @@
         visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
 
     if (!unboxed_fields_bitmap.IsEmpty()) {
-      intptr_t bit = sizeof(ObjectLayout) / kWordSize;
+      intptr_t bit = sizeof(UntaggedObject) / kWordSize;
       for (ObjectPtr* current = first; current <= last; current++) {
         if (!unboxed_fields_bitmap.Get(bit++)) {
           visitor->V::VisitPointers(current, current);
@@ -509,7 +509,7 @@
     return static_cast<ObjectPtr>(addr + kHeapObjectTag);
   }
 
-  static uword ToAddr(const ObjectLayout* raw_obj) {
+  static uword ToAddr(const UntaggedObject* raw_obj) {
     return reinterpret_cast<uword>(raw_obj);
   }
   static uword ToAddr(const ObjectPtr raw_obj) {
@@ -595,7 +595,7 @@
   template <std::memory_order order = std::memory_order_relaxed>
   void StoreSmi(SmiPtr const* addr, SmiPtr value) {
     // Can't use Contains, as array length is initialized through this method.
-    ASSERT(reinterpret_cast<uword>(addr) >= ObjectLayout::ToAddr(this));
+    ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(this));
     reinterpret_cast<std::atomic<SmiPtr>*>(const_cast<SmiPtr*>(addr))
         ->store(value, order);
   }
@@ -604,7 +604,7 @@
   DART_FORCE_INLINE
   void CheckHeapPointerStore(ObjectPtr value, Thread* thread) {
     uword source_tags = this->tags_;
-    uword target_tags = value->ptr()->tags_;
+    uword target_tags = value->untag()->tags_;
     if (((source_tags >> kBarrierOverlapShift) & target_tags &
          thread->write_barrier_mask()) != 0) {
       if (value->IsNewObject()) {
@@ -622,7 +622,7 @@
           return;
         }
 #endif
-        if (value->ptr()->TryAcquireMarkBit()) {
+        if (value->untag()->TryAcquireMarkBit()) {
           thread->MarkingStackAddObject(value);
         }
       }
@@ -634,7 +634,7 @@
                                                 ObjectPtr value,
                                                 Thread* thread) {
     uword source_tags = this->tags_;
-    uword target_tags = value->ptr()->tags_;
+    uword target_tags = value->untag()->tags_;
     if (((source_tags >> kBarrierOverlapShift) & target_tags &
          thread->write_barrier_mask()) != 0) {
       if (value->IsNewObject()) {
@@ -658,7 +658,7 @@
           return;
         }
 #endif
-        if (value->ptr()->TryAcquireMarkBit()) {
+        if (value->untag()->TryAcquireMarkBit()) {
           thread->MarkingStackAddObject(value);
         }
       }
@@ -691,7 +691,7 @@
   friend class Mint;
   friend class Object;
   friend class OneByteString;  // StoreSmi
-  friend class InstanceLayout;
+  friend class UntaggedInstance;
   friend class Scavenger;
   template <bool>
   friend class ScavengerVisitorBase;
@@ -715,11 +715,11 @@
   friend class Object;
 
   DISALLOW_ALLOCATION();
-  DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectLayout);
+  DISALLOW_IMPLICIT_CONSTRUCTORS(UntaggedObject);
 };
 
 inline intptr_t ObjectPtr::GetClassId() const {
-  return ptr()->GetClassId();
+  return untag()->GetClassId();
 }
 
 #define POINTER_FIELD(type, name)                                              \
@@ -782,7 +782,7 @@
  protected:                                                                    \
   type name##_;
 
-class ClassLayout : public ObjectLayout {
+class UntaggedClass : public UntaggedObject {
  public:
   enum ClassFinalizedState {
     kAllocated = 0,  // Initial state.
@@ -884,16 +884,16 @@
   friend class Instance;
   friend class Isolate;
   friend class Object;
-  friend class InstanceLayout;
-  friend class InstructionsLayout;
-  friend class TypeArgumentsLayout;
+  friend class UntaggedInstance;
+  friend class UntaggedInstructions;
+  friend class UntaggedTypeArguments;
   friend class SnapshotReader;
   friend class InstanceSerializationCluster;
   friend class CidRewriteVisitor;
   friend class Api;
 };
 
-class PatchClassLayout : public ObjectLayout {
+class UntaggedPatchClass : public UntaggedObject {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(PatchClass);
 
@@ -926,7 +926,7 @@
   friend class Function;
 };
 
-class FunctionLayout : public ObjectLayout {
+class UntaggedFunction : public UntaggedObject {
  public:
   // When you add a new kind, please also update the observatory to account
   // for the new string returned by KindToCString().
@@ -1128,7 +1128,7 @@
 
   // TODO(regis): Split packed_fields_ in 2 uint32_t if max values are too low.
 
-  // Keep in sync with corresponding constants in FunctionTypeLayout.
+  // Keep in sync with corresponding constants in UntaggedFunctionType.
   static constexpr intptr_t kMaxOptimizableBits = 1;
   static constexpr intptr_t kMaxBackgroundOptimizableBits = 1;
   static constexpr intptr_t kMaxTypeParametersBits = 7;
@@ -1164,7 +1164,7 @@
       PackedNumOptionalParameters;
   static_assert(PackedNumOptionalParameters::kNextBit <=
                     kBitsPerByte * sizeof(decltype(packed_fields_)),
-                "FunctionLayout::packed_fields_ bitfields don't fit.");
+                "UntaggedFunction::packed_fields_ bitfields don't fit.");
   static_assert(PackedNumOptionalParameters::kNextBit <=
                     compiler::target::kSmiBits,
                 "In-place mask for number of optional parameters cannot fit in "
@@ -1188,7 +1188,7 @@
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 };
 
-class ClosureDataLayout : public ObjectLayout {
+class UntaggedClosureData : public UntaggedObject {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(ClosureData);
 
@@ -1207,7 +1207,7 @@
   friend class Function;
 };
 
-class FfiTrampolineDataLayout : public ObjectLayout {
+class UntaggedFfiTrampolineData : public UntaggedObject {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(FfiTrampolineData);
 
@@ -1236,7 +1236,7 @@
   uint32_t callback_id_;
 };
 
-class FieldLayout : public ObjectLayout {
+class UntaggedField : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Field);
 
   VISIT_FROM(ObjectPtr, name)
@@ -1307,7 +1307,7 @@
   friend class StoreInstanceFieldInstr;  // For sizeof(guarded_cid_/...)
 };
 
-class alignas(8) ScriptLayout : public ObjectLayout {
+class alignas(8) UntaggedScript : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Script);
 
   VISIT_FROM(ObjectPtr, url)
@@ -1368,7 +1368,7 @@
 #endif
 };
 
-class LibraryLayout : public ObjectLayout {
+class UntaggedLibrary : public UntaggedObject {
   enum LibraryState {
     kAllocated,       // Initial state.
     kLoadRequested,   // Compiler or script requested load of library.
@@ -1452,7 +1452,7 @@
   friend class Isolate;
 };
 
-class NamespaceLayout : public ObjectLayout {
+class UntaggedNamespace : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Namespace);
 
   VISIT_FROM(ObjectPtr, target)
@@ -1464,7 +1464,7 @@
   ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
 };
 
-class KernelProgramInfoLayout : public ObjectLayout {
+class UntaggedKernelProgramInfo : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(KernelProgramInfo);
 
   VISIT_FROM(ObjectPtr, string_offsets)
@@ -1490,7 +1490,7 @@
   }
 };
 
-class WeakSerializationReferenceLayout : public ObjectLayout {
+class UntaggedWeakSerializationReference : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(WeakSerializationReference);
 
 #if defined(DART_PRECOMPILED_RUNTIME)
@@ -1503,7 +1503,7 @@
 #endif
 };
 
-class CodeLayout : public ObjectLayout {
+class UntaggedCode : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Code);
 
   // When in the precompiled runtime, there is no disabling of Code objects
@@ -1619,7 +1619,7 @@
   friend class CallSiteResetter;
 };
 
-class ObjectPoolLayout : public ObjectLayout {
+class UntaggedObjectPool : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ObjectPool);
 
   intptr_t length_;
@@ -1646,7 +1646,7 @@
   friend class UnitDeserializationRoots;
 };
 
-class InstructionsLayout : public ObjectLayout {
+class UntaggedInstructions : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Instructions);
   VISIT_NOTHING();
 
@@ -1662,8 +1662,8 @@
   // is not allowed to create handles.
   static bool ContainsPC(const InstructionsPtr raw_instr, uword pc);
 
-  friend class CodeLayout;
-  friend class FunctionLayout;
+  friend class UntaggedCode;
+  friend class UntaggedFunction;
   friend class Code;
   friend class StackFrame;
   template <bool>
@@ -1679,7 +1679,7 @@
 // interface, to provide memory accounting for the bare instruction payloads
 // we serialize, since they are no longer part of RawInstructions objects,
 // and to avoid special casing bare instructions payload Images in the GC.
-class InstructionsSectionLayout : public ObjectLayout {
+class UntaggedInstructionsSection : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(InstructionsSection);
   VISIT_NOTHING();
 
@@ -1700,7 +1700,7 @@
   friend class Image;
 };
 
-class PcDescriptorsLayout : public ObjectLayout {
+class UntaggedPcDescriptors : public UntaggedObject {
  public:
 // The macro argument V is passed two arguments, the raw name of the enum value
 // and the initialization expression used within the enum definition.  The uses
@@ -1798,7 +1798,7 @@
 
 // CodeSourceMap encodes a mapping from code PC ranges to source token
 // positions and the stack of inlined functions.
-class CodeSourceMapLayout : public ObjectLayout {
+class UntaggedCodeSourceMap : public UntaggedObject {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(CodeSourceMap);
   VISIT_NOTHING();
@@ -1818,7 +1818,7 @@
 // RawCompressedStackMaps is a compressed representation of the stack maps
 // for certain PC offsets into a set of instructions, where a stack map is a bit
 // map that marks each live object index starting from the base of the frame.
-class CompressedStackMapsLayout : public ObjectLayout {
+class UntaggedCompressedStackMaps : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(CompressedStackMaps);
   VISIT_NOTHING();
 
@@ -1891,7 +1891,7 @@
   friend class StackMapEntry;
 };
 
-class LocalVarDescriptorsLayout : public ObjectLayout {
+class UntaggedLocalVarDescriptors : public UntaggedObject {
  public:
   enum VarInfoKind {
     kStackVar = 1,
@@ -1961,7 +1961,7 @@
   friend class Object;
 };
 
-class ExceptionHandlersLayout : public ObjectLayout {
+class UntaggedExceptionHandlers : public UntaggedObject {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(ExceptionHandlers);
 
@@ -1985,7 +1985,7 @@
   friend class Object;
 };
 
-class ContextLayout : public ObjectLayout {
+class UntaggedContext : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Context);
 
   int32_t num_variables_;
@@ -2000,7 +2000,7 @@
   friend class SnapshotReader;
 };
 
-class ContextScopeLayout : public ObjectLayout {
+class UntaggedContextScope : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ContextScope);
 
   // TODO(iposva): Switch to conventional enum offset based structure to avoid
@@ -2046,11 +2046,11 @@
   }
 
   friend class Object;
-  friend class ClosureDataLayout;
+  friend class UntaggedClosureData;
   friend class SnapshotReader;
 };
 
-class SingleTargetCacheLayout : public ObjectLayout {
+class UntaggedSingleTargetCache : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(SingleTargetCache);
   VISIT_FROM(ObjectPtr, target)
   POINTER_FIELD(CodePtr, target)
@@ -2060,7 +2060,7 @@
   ClassIdTagType upper_limit_;
 };
 
-class MonomorphicSmiableCallLayout : public ObjectLayout {
+class UntaggedMonomorphicSmiableCall : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(MonomorphicSmiableCall);
   VISIT_FROM(ObjectPtr, target)
   POINTER_FIELD(CodePtr,
@@ -2072,7 +2072,7 @@
 };
 
 // Abstract base class for RawICData/RawMegamorphicCache
-class CallSiteDataLayout : public ObjectLayout {
+class UntaggedCallSiteData : public UntaggedObject {
  protected:
   POINTER_FIELD(StringPtr, target_name);  // Name of target function.
   // arg_descriptor in RawICData and in RawMegamorphicCache should be
@@ -2082,7 +2082,7 @@
   RAW_HEAP_OBJECT_IMPLEMENTATION(CallSiteData)
 };
 
-class UnlinkedCallLayout : public CallSiteDataLayout {
+class UntaggedUnlinkedCall : public UntaggedCallSiteData {
   RAW_HEAP_OBJECT_IMPLEMENTATION(UnlinkedCall);
   VISIT_FROM(ObjectPtr, target_name)
   VISIT_TO(ObjectPtr, args_descriptor)
@@ -2091,7 +2091,7 @@
   bool can_patch_to_monomorphic_;
 };
 
-class ICDataLayout : public CallSiteDataLayout {
+class UntaggedICData : public UntaggedCallSiteData {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ICData);
   VISIT_FROM(ObjectPtr, target_name)
   POINTER_FIELD(ArrayPtr, entries)  // Contains class-ids, target and count.
@@ -2120,7 +2120,7 @@
   uint32_t state_bits_;  // Number of arguments tested in IC, deopt reasons.
 };
 
-class MegamorphicCacheLayout : public CallSiteDataLayout {
+class UntaggedMegamorphicCache : public UntaggedCallSiteData {
   RAW_HEAP_OBJECT_IMPLEMENTATION(MegamorphicCache);
 
   VISIT_FROM(ObjectPtr, target_name)
@@ -2132,7 +2132,7 @@
   int32_t filled_entry_count_;
 };
 
-class SubtypeTestCacheLayout : public ObjectLayout {
+class UntaggedSubtypeTestCache : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(SubtypeTestCache);
 
   VISIT_FROM(ObjectPtr, cache)
@@ -2140,7 +2140,7 @@
   VISIT_TO(ObjectPtr, cache)
 };
 
-class LoadingUnitLayout : public ObjectLayout {
+class UntaggedLoadingUnit : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(LoadingUnit);
 
   VISIT_FROM(ObjectPtr, parent)
@@ -2152,11 +2152,11 @@
   bool loaded_;
 };
 
-class ErrorLayout : public ObjectLayout {
+class UntaggedError : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Error);
 };
 
-class ApiErrorLayout : public ErrorLayout {
+class UntaggedApiError : public UntaggedError {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ApiError);
 
   VISIT_FROM(ObjectPtr, message)
@@ -2164,7 +2164,7 @@
   VISIT_TO(ObjectPtr, message)
 };
 
-class LanguageErrorLayout : public ErrorLayout {
+class UntaggedLanguageError : public UntaggedError {
   RAW_HEAP_OBJECT_IMPLEMENTATION(LanguageError);
 
   VISIT_FROM(ObjectPtr, previous_error)
@@ -2181,7 +2181,7 @@
   ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
 };
 
-class UnhandledExceptionLayout : public ErrorLayout {
+class UntaggedUnhandledException : public UntaggedError {
   RAW_HEAP_OBJECT_IMPLEMENTATION(UnhandledException);
 
   VISIT_FROM(ObjectPtr, exception)
@@ -2191,7 +2191,7 @@
   ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
 };
 
-class UnwindErrorLayout : public ErrorLayout {
+class UntaggedUnwindError : public UntaggedError {
   RAW_HEAP_OBJECT_IMPLEMENTATION(UnwindError);
 
   VISIT_FROM(ObjectPtr, message)
@@ -2200,11 +2200,11 @@
   bool is_user_initiated_;
 };
 
-class InstanceLayout : public ObjectLayout {
+class UntaggedInstance : public UntaggedObject {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Instance);
 };
 
-class LibraryPrefixLayout : public InstanceLayout {
+class UntaggedLibraryPrefix : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(LibraryPrefix);
 
   VISIT_FROM(ObjectPtr, name)
@@ -2233,7 +2233,7 @@
   bool is_loaded_;
 };
 
-class TypeArgumentsLayout : public InstanceLayout {
+class UntaggedTypeArguments : public UntaggedInstance {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(TypeArguments);
 
@@ -2254,7 +2254,7 @@
   friend class SnapshotReader;
 };
 
-class AbstractTypeLayout : public InstanceLayout {
+class UntaggedAbstractType : public UntaggedInstance {
  public:
   enum TypeState {
     kAllocated,                // Initial state.
@@ -2280,7 +2280,7 @@
   friend class StubCode;
 };
 
-class TypeLayout : public AbstractTypeLayout {
+class UntaggedType : public UntaggedAbstractType {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(Type);
 
@@ -2295,10 +2295,10 @@
   ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
 
   friend class CidRewriteVisitor;
-  friend class TypeArgumentsLayout;
+  friend class UntaggedTypeArguments;
 };
 
-class FunctionTypeLayout : public AbstractTypeLayout {
+class UntaggedFunctionType : public UntaggedAbstractType {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(FunctionType);
 
@@ -2313,7 +2313,7 @@
   uint8_t type_state_;
   uint8_t nullability_;
 
-  // Keep in sync with corresponding constants in FunctionLayout.
+  // Keep in sync with corresponding constants in UntaggedFunction.
   static constexpr intptr_t kMaxParentTypeArgumentsBits = 8;
   static constexpr intptr_t kMaxHasNamedOptionalParametersBits = 1;
   static constexpr intptr_t kMaxImplicitParametersBits = 1;
@@ -2344,14 +2344,14 @@
       PackedNumOptionalParameters;
   static_assert(PackedNumOptionalParameters::kNextBit <=
                     kBitsPerByte * sizeof(decltype(packed_fields_)),
-                "FunctionTypeLayout::packed_fields_ bitfields don't fit.");
+                "UntaggedFunctionType::packed_fields_ bitfields don't fit.");
 
   ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
 
   friend class Function;
 };
 
-class TypeRefLayout : public AbstractTypeLayout {
+class UntaggedTypeRef : public UntaggedAbstractType {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(TypeRef);
 
@@ -2361,7 +2361,7 @@
   ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
 };
 
-class TypeParameterLayout : public AbstractTypeLayout {
+class UntaggedTypeParameter : public UntaggedAbstractType {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(TypeParameter);
 
@@ -2398,7 +2398,7 @@
   friend class CidRewriteVisitor;
 };
 
-class ClosureLayout : public InstanceLayout {
+class UntaggedClosure : public UntaggedInstance {
  private:
   RAW_HEAP_OBJECT_IMPLEMENTATION(Closure);
 
@@ -2443,19 +2443,19 @@
   // any type arguments passed directly (or NSM will be invoked instead).
 };
 
-class NumberLayout : public InstanceLayout {
+class UntaggedNumber : public UntaggedInstance {
   RAW_OBJECT_IMPLEMENTATION(Number);
 };
 
-class IntegerLayout : public NumberLayout {
+class UntaggedInteger : public UntaggedNumber {
   RAW_OBJECT_IMPLEMENTATION(Integer);
 };
 
-class SmiLayout : public IntegerLayout {
+class UntaggedSmi : public UntaggedInteger {
   RAW_OBJECT_IMPLEMENTATION(Smi);
 };
 
-class MintLayout : public IntegerLayout {
+class UntaggedMint : public UntaggedInteger {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Mint);
   VISIT_NOTHING();
 
@@ -2466,9 +2466,9 @@
   friend class Integer;
   friend class SnapshotReader;
 };
-COMPILE_ASSERT(sizeof(MintLayout) == 16);
+COMPILE_ASSERT(sizeof(UntaggedMint) == 16);
 
-class DoubleLayout : public NumberLayout {
+class UntaggedDouble : public UntaggedNumber {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Double);
   VISIT_NOTHING();
 
@@ -2478,9 +2478,9 @@
   friend class SnapshotReader;
   friend class Class;
 };
-COMPILE_ASSERT(sizeof(DoubleLayout) == 16);
+COMPILE_ASSERT(sizeof(UntaggedDouble) == 16);
 
-class StringLayout : public InstanceLayout {
+class UntaggedString : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(String);
 
  protected:
@@ -2503,7 +2503,7 @@
   friend class ImageWriter;
 };
 
-class OneByteStringLayout : public StringLayout {
+class UntaggedOneByteString : public UntaggedString {
   RAW_HEAP_OBJECT_IMPLEMENTATION(OneByteString);
   VISIT_NOTHING();
 
@@ -2517,7 +2517,7 @@
   friend class String;
 };
 
-class TwoByteStringLayout : public StringLayout {
+class UntaggedTwoByteString : public UntaggedString {
   RAW_HEAP_OBJECT_IMPLEMENTATION(TwoByteString);
   VISIT_NOTHING();
 
@@ -2535,7 +2535,7 @@
 //
 // TypedData extends this with a length field, while Pointer extends this with
 // TypeArguments field.
-class PointerBaseLayout : public InstanceLayout {
+class UntaggedPointerBase : public UntaggedInstance {
  protected:
   // The contents of [data_] depends on what concrete subclass is used:
   //
@@ -2555,23 +2555,23 @@
 };
 
 // Abstract base class for RawTypedData/RawExternalTypedData/RawTypedDataView.
-class TypedDataBaseLayout : public PointerBaseLayout {
+class UntaggedTypedDataBase : public UntaggedPointerBase {
  protected:
   // The length of the view in element sizes (obtainable via
   // [TypedDataBase::ElementSizeInBytes]).
   SMI_FIELD(SmiPtr, length);
 
  private:
-  friend class TypedDataViewLayout;
+  friend class UntaggedTypedDataView;
   RAW_HEAP_OBJECT_IMPLEMENTATION(TypedDataBase);
 };
 
-class TypedDataLayout : public TypedDataBaseLayout {
+class UntaggedTypedData : public UntaggedTypedDataBase {
   RAW_HEAP_OBJECT_IMPLEMENTATION(TypedData);
 
  public:
   static intptr_t payload_offset() {
-    return OFFSET_OF_RETURNED_VALUE(TypedDataLayout, internal_data);
+    return OFFSET_OF_RETURNED_VALUE(UntaggedTypedData, internal_data);
   }
 
   // Recompute [data_] pointer to internal data.
@@ -2602,19 +2602,19 @@
   friend class ObjectPool;
   friend class ObjectPoolDeserializationCluster;
   friend class ObjectPoolSerializationCluster;
-  friend class ObjectPoolLayout;
+  friend class UntaggedObjectPool;
   friend class SnapshotReader;
 };
 
 // All _*ArrayView/_ByteDataView classes share the same layout.
-class TypedDataViewLayout : public TypedDataBaseLayout {
+class UntaggedTypedDataView : public UntaggedTypedDataBase {
   RAW_HEAP_OBJECT_IMPLEMENTATION(TypedDataView);
 
  public:
   // Recompute [data_] based on internal/external [typed_data_].
   void RecomputeDataField() {
     const intptr_t offset_in_bytes = RawSmiValue(offset_in_bytes_);
-    uint8_t* payload = typed_data()->ptr()->data_;
+    uint8_t* payload = typed_data()->untag()->data_;
     data_ = payload + offset_in_bytes;
   }
 
@@ -2626,13 +2626,14 @@
   // fields don't need to be valid - only it's address.
   void RecomputeDataFieldForInternalTypedData() {
     const intptr_t offset_in_bytes = RawSmiValue(offset_in_bytes_);
-    uint8_t* payload = reinterpret_cast<uint8_t*>(
-        ObjectLayout::ToAddr(typed_data()) + TypedDataLayout::payload_offset());
+    uint8_t* payload =
+        reinterpret_cast<uint8_t*>(UntaggedObject::ToAddr(typed_data()) +
+                                   UntaggedTypedData::payload_offset());
     data_ = payload + offset_in_bytes;
   }
 
   void ValidateInnerPointer() {
-    if (typed_data()->ptr()->GetClassId() == kNullCid) {
+    if (typed_data()->untag()->GetClassId() == kNullCid) {
       // The view object must have gotten just initialized.
       if (data_ != nullptr || RawSmiValue(offset_in_bytes_) != 0 ||
           RawSmiValue(length_) != 0) {
@@ -2640,7 +2641,7 @@
       }
     } else {
       const intptr_t offset_in_bytes = RawSmiValue(offset_in_bytes_);
-      uint8_t* payload = typed_data()->ptr()->data_;
+      uint8_t* payload = typed_data()->untag()->data_;
       if ((payload + offset_in_bytes) != data_) {
         FATAL("RawTypedDataView has invalid inner pointer.");
       }
@@ -2658,14 +2659,14 @@
   friend class Object;
   friend class ObjectPoolDeserializationCluster;
   friend class ObjectPoolSerializationCluster;
-  friend class ObjectPoolLayout;
+  friend class UntaggedObjectPool;
   friend class GCCompactor;
   template <bool>
   friend class ScavengerVisitorBase;
   friend class SnapshotReader;
 };
 
-class ExternalOneByteStringLayout : public StringLayout {
+class UntaggedExternalOneByteString : public UntaggedString {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ExternalOneByteString);
 
   const uint8_t* external_data_;
@@ -2674,7 +2675,7 @@
   friend class String;
 };
 
-class ExternalTwoByteStringLayout : public StringLayout {
+class UntaggedExternalTwoByteString : public UntaggedString {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ExternalTwoByteString);
 
   const uint16_t* external_data_;
@@ -2683,7 +2684,7 @@
   friend class String;
 };
 
-class BoolLayout : public InstanceLayout {
+class UntaggedBool : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Bool);
   VISIT_NOTHING();
 
@@ -2692,7 +2693,7 @@
   friend class Object;
 };
 
-class ArrayLayout : public InstanceLayout {
+class UntaggedArray : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Array);
 
   VISIT_FROM(RawCompressed, type_arguments)
@@ -2707,12 +2708,12 @@
   friend class CodeSerializationCluster;
   friend class CodeDeserializationCluster;
   friend class Deserializer;
-  friend class CodeLayout;
-  friend class ImmutableArrayLayout;
+  friend class UntaggedCode;
+  friend class UntaggedImmutableArray;
   friend class SnapshotReader;
   friend class GrowableObjectArray;
   friend class LinkedHashMap;
-  friend class LinkedHashMapLayout;
+  friend class UntaggedLinkedHashMap;
   friend class Object;
   friend class ICData;            // For high performance access.
   friend class SubtypeTestCache;  // For high performance access.
@@ -2721,13 +2722,13 @@
   friend class OldPage;
 };
 
-class ImmutableArrayLayout : public ArrayLayout {
+class UntaggedImmutableArray : public UntaggedArray {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ImmutableArray);
 
   friend class SnapshotReader;
 };
 
-class GrowableObjectArrayLayout : public InstanceLayout {
+class UntaggedGrowableObjectArray : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(GrowableObjectArray);
 
   VISIT_FROM(RawCompressed, type_arguments)
@@ -2741,7 +2742,7 @@
   friend class ReversePc;
 };
 
-class LinkedHashMapLayout : public InstanceLayout {
+class UntaggedLinkedHashMap : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(LinkedHashMap);
 
   VISIT_FROM(RawCompressed, type_arguments)
@@ -2756,7 +2757,7 @@
   friend class SnapshotReader;
 };
 
-class Float32x4Layout : public InstanceLayout {
+class UntaggedFloat32x4 : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Float32x4);
   VISIT_NOTHING();
 
@@ -2771,9 +2772,9 @@
   float z() const { return value_[2]; }
   float w() const { return value_[3]; }
 };
-COMPILE_ASSERT(sizeof(Float32x4Layout) == 24);
+COMPILE_ASSERT(sizeof(UntaggedFloat32x4) == 24);
 
-class Int32x4Layout : public InstanceLayout {
+class UntaggedInt32x4 : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Int32x4);
   VISIT_NOTHING();
 
@@ -2787,9 +2788,9 @@
   int32_t z() const { return value_[2]; }
   int32_t w() const { return value_[3]; }
 };
-COMPILE_ASSERT(sizeof(Int32x4Layout) == 24);
+COMPILE_ASSERT(sizeof(UntaggedInt32x4) == 24);
 
-class Float64x2Layout : public InstanceLayout {
+class UntaggedFloat64x2 : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Float64x2);
   VISIT_NOTHING();
 
@@ -2802,7 +2803,7 @@
   double x() const { return value_[0]; }
   double y() const { return value_[1]; }
 };
-COMPILE_ASSERT(sizeof(Float64x2Layout) == 24);
+COMPILE_ASSERT(sizeof(UntaggedFloat64x2) == 24);
 
 // Define an aliases for intptr_t.
 #if defined(ARCH_IS_32_BIT)
@@ -2815,7 +2816,7 @@
 #error Architecture is not 32-bit or 64-bit.
 #endif  // ARCH_IS_32_BIT
 
-class ExternalTypedDataLayout : public TypedDataBaseLayout {
+class UntaggedExternalTypedData : public UntaggedTypedDataBase {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ExternalTypedData);
 
  protected:
@@ -2823,7 +2824,7 @@
   VISIT_TO(RawCompressed, length)
 };
 
-class PointerLayout : public PointerBaseLayout {
+class UntaggedPointer : public UntaggedPointerBase {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Pointer);
 
   VISIT_FROM(RawCompressed, type_arguments)
@@ -2833,7 +2834,7 @@
   friend class Pointer;
 };
 
-class DynamicLibraryLayout : public InstanceLayout {
+class UntaggedDynamicLibrary : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(DynamicLibrary);
   VISIT_NOTHING();
   void* handle_;
@@ -2842,13 +2843,13 @@
 };
 
 // VM implementations of the basic types in the isolate.
-class alignas(8) CapabilityLayout : public InstanceLayout {
+class alignas(8) UntaggedCapability : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Capability);
   VISIT_NOTHING();
   uint64_t id_;
 };
 
-class alignas(8) SendPortLayout : public InstanceLayout {
+class alignas(8) UntaggedSendPort : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(SendPort);
   VISIT_NOTHING();
   Dart_Port id_;
@@ -2857,7 +2858,7 @@
   friend class ReceivePort;
 };
 
-class ReceivePortLayout : public InstanceLayout {
+class UntaggedReceivePort : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(ReceivePort);
 
   VISIT_FROM(ObjectPtr, send_port)
@@ -2872,7 +2873,7 @@
 #endif  // !defined(PRODUCT)
 };
 
-class TransferableTypedDataLayout : public InstanceLayout {
+class UntaggedTransferableTypedData : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(TransferableTypedData);
   VISIT_NOTHING();
 };
@@ -2881,7 +2882,7 @@
 // Currently we don't have any interface that this object is supposed
 // to implement so we just support the 'toString' method which
 // converts the stack trace into a string.
-class StackTraceLayout : public InstanceLayout {
+class UntaggedStackTrace : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(StackTrace);
 
   VISIT_FROM(ObjectPtr, async_link)
@@ -2902,7 +2903,7 @@
 };
 
 // VM type for capturing JS regular expressions.
-class RegExpLayout : public InstanceLayout {
+class UntaggedRegExp : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(RegExp);
 
   VISIT_FROM(ObjectPtr, num_bracket_expressions)
@@ -2934,7 +2935,7 @@
   int8_t type_flags_;
 };
 
-class WeakPropertyLayout : public InstanceLayout {
+class UntaggedWeakProperty : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(WeakProperty);
 
   VISIT_FROM(ObjectPtr, key)
@@ -2957,7 +2958,7 @@
 
 // MirrorReferences are used by mirrors to hold reflectees that are VM
 // internal objects, such as libraries, classes, functions or types.
-class MirrorReferenceLayout : public InstanceLayout {
+class UntaggedMirrorReference : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(MirrorReference);
 
   VISIT_FROM(ObjectPtr, referent)
@@ -2966,7 +2967,7 @@
 };
 
 // UserTag are used by the profiler to track Dart script state.
-class UserTagLayout : public InstanceLayout {
+class UntaggedUserTag : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(UserTag);
 
   VISIT_FROM(ObjectPtr, label)
@@ -2983,7 +2984,7 @@
   uword tag() const { return tag_; }
 };
 
-class FutureOrLayout : public InstanceLayout {
+class UntaggedFutureOr : public UntaggedInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(FutureOr);
 
   VISIT_FROM(RawCompressed, type_arguments)
diff --git a/runtime/vm/raw_object_fields.cc b/runtime/vm/raw_object_fields.cc
index 35917b6..16b25bd 100644
--- a/runtime/vm/raw_object_fields.cc
+++ b/runtime/vm/raw_object_fields.cc
@@ -223,7 +223,7 @@
 
 #define DEFINE_OFFSETS_TABLE_ENTRY(class_name, field_name)                     \
   {class_name::kClassId, #field_name,                                          \
-   OFFSET_OF(class_name##Layout, field_name)},
+   OFFSET_OF(Untagged##class_name, field_name)},
 
 // clang-format off
 OffsetsTable::OffsetsTableEntry OffsetsTable::offsets_table[] = {
diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc
index 7f7f766..230bde3 100644
--- a/runtime/vm/raw_object_snapshot.cc
+++ b/runtime/vm/raw_object_snapshot.cc
@@ -24,7 +24,7 @@
     " port after it exceeds certain size in bytes.");
 
 #define OFFSET_OF_FROM(obj)                                                    \
-  obj.raw()->from() - reinterpret_cast<ObjectPtr*>(obj.raw()->ptr())
+  obj.ptr()->from() - reinterpret_cast<ObjectPtr*>(obj.ptr()->untag())
 
 // TODO(18854): Need to assert No GC can happen here, even though
 // allocations may happen.
@@ -32,7 +32,7 @@
   intptr_t num_flds = (to) - (from);                                           \
   for (intptr_t i = 0; i <= num_flds; i++) {                                   \
     (*reader->PassiveObjectHandle()) = reader->ReadObjectImpl(as_reference);   \
-    object.StorePointer(((from) + i), reader->PassiveObjectHandle()->raw());   \
+    object.StorePointer(((from) + i), reader->PassiveObjectHandle()->ptr());   \
   }
 
 ClassPtr Class::ReadFrom(SnapshotReader* reader,
@@ -44,13 +44,13 @@
 
   Class& cls = Class::ZoneHandle(reader->zone(), Class::null());
   cls = reader->ReadClassId(object_id);
-  return cls.raw();
+  return cls.ptr();
 }
 
-void ClassLayout::WriteTo(SnapshotWriter* writer,
-                          intptr_t object_id,
-                          Snapshot::Kind kind,
-                          bool as_reference) {
+void UntaggedClass::WriteTo(SnapshotWriter* writer,
+                            intptr_t object_id,
+                            Snapshot::Kind kind,
+                            bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -83,7 +83,7 @@
 
   // Allocate type object.
   Type& type = Type::ZoneHandle(reader->zone(), Type::New());
-  bool is_canonical = ObjectLayout::IsCanonical(tags);
+  bool is_canonical = UntaggedObject::IsCanonical(tags);
   reader->AddBackRef(object_id, &type, kIsDeserialized);
 
   // Set all non object fields.
@@ -95,8 +95,8 @@
   reader->EnqueueTypePostprocessing(type);
 
   // Set all the object fields.
-  READ_OBJECT_FIELDS(type, type.raw()->ptr()->from(), type.raw()->ptr()->to(),
-                     as_reference);
+  READ_OBJECT_FIELDS(type, type.ptr()->untag()->from(),
+                     type.ptr()->untag()->to(), as_reference);
 
   // Read in the type class.
   (*reader->ClassHandle()) =
@@ -112,18 +112,18 @@
     type ^= type.Canonicalize(Thread::Current(), nullptr);
   }
 
-  return type.raw();
+  return type.ptr();
 }
 
-void TypeLayout::WriteTo(SnapshotWriter* writer,
-                         intptr_t object_id,
-                         Snapshot::Kind kind,
-                         bool as_reference) {
+void UntaggedType::WriteTo(SnapshotWriter* writer,
+                           intptr_t object_id,
+                           Snapshot::Kind kind,
+                           bool as_reference) {
   ASSERT(writer != NULL);
 
   // Only resolved and finalized types should be written to a snapshot.
-  ASSERT((type_state_ == TypeLayout::kFinalizedInstantiated) ||
-         (type_state_ == TypeLayout::kFinalizedUninstantiated));
+  ASSERT((type_state_ == UntaggedType::kFinalizedInstantiated) ||
+         (type_state_ == UntaggedType::kFinalizedUninstantiated));
   ASSERT(type_class_id() != Object::null());
 
   // Write out the serialization header value for this object.
@@ -181,21 +181,21 @@
   reader->EnqueueTypePostprocessing(type_ref);
 
   // Set all the object fields.
-  READ_OBJECT_FIELDS(type_ref, type_ref.raw()->ptr()->from(),
-                     type_ref.raw()->ptr()->to(), kAsReference);
+  READ_OBJECT_FIELDS(type_ref, type_ref.ptr()->untag()->from(),
+                     type_ref.ptr()->untag()->to(), kAsReference);
 
   // Fill in the type testing stub.
   Code& code = *reader->CodeHandle();
   code = TypeTestingStubGenerator::DefaultCodeForType(type_ref);
   type_ref.SetTypeTestingStub(code);
 
-  return type_ref.raw();
+  return type_ref.ptr();
 }
 
-void TypeRefLayout::WriteTo(SnapshotWriter* writer,
-                            intptr_t object_id,
-                            Snapshot::Kind kind,
-                            bool as_reference) {
+void UntaggedTypeRef::WriteTo(SnapshotWriter* writer,
+                              intptr_t object_id,
+                              Snapshot::Kind kind,
+                              bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -220,7 +220,7 @@
   // Allocate type parameter object.
   TypeParameter& type_parameter =
       TypeParameter::ZoneHandle(reader->zone(), TypeParameter::New());
-  bool is_canonical = ObjectLayout::IsCanonical(tags);
+  bool is_canonical = UntaggedObject::IsCanonical(tags);
   reader->AddBackRef(object_id, &type_parameter, kIsDeserialized);
 
   // Set all non object fields.
@@ -232,8 +232,8 @@
   reader->EnqueueTypePostprocessing(type_parameter);
 
   // Set all the object fields.
-  READ_OBJECT_FIELDS(type_parameter, type_parameter.raw()->ptr()->from(),
-                     type_parameter.raw()->ptr()->to(), kAsReference);
+  READ_OBJECT_FIELDS(type_parameter, type_parameter.ptr()->untag()->from(),
+                     type_parameter.ptr()->untag()->to(), kAsReference);
 
   // Read in the parameterized class.
   (*reader->ClassHandle()) =
@@ -252,13 +252,13 @@
     type_parameter ^= type_parameter.Canonicalize(Thread::Current(), nullptr);
   }
 
-  return type_parameter.raw();
+  return type_parameter.ptr();
 }
 
-void TypeParameterLayout::WriteTo(SnapshotWriter* writer,
-                                  intptr_t object_id,
-                                  Snapshot::Kind kind,
-                                  bool as_reference) {
+void UntaggedTypeParameter::WriteTo(SnapshotWriter* writer,
+                                    intptr_t object_id,
+                                    Snapshot::Kind kind,
+                                    bool as_reference) {
   ASSERT(writer != NULL);
 
   // Only finalized type parameters should be written to a snapshot.
@@ -301,7 +301,7 @@
 
   TypeArguments& type_arguments =
       TypeArguments::ZoneHandle(reader->zone(), TypeArguments::New(len));
-  bool is_canonical = ObjectLayout::IsCanonical(tags);
+  bool is_canonical = UntaggedObject::IsCanonical(tags);
   reader->AddBackRef(object_id, &type_arguments, kIsDeserialized);
 
   // Set the instantiations field, which is only read from a full snapshot.
@@ -318,13 +318,13 @@
     type_arguments = type_arguments.Canonicalize(Thread::Current(), nullptr);
   }
 
-  return type_arguments.raw();
+  return type_arguments.ptr();
 }
 
-void TypeArgumentsLayout::WriteTo(SnapshotWriter* writer,
-                                  intptr_t object_id,
-                                  Snapshot::Kind kind,
-                                  bool as_reference) {
+void UntaggedTypeArguments::WriteTo(SnapshotWriter* writer,
+                                    intptr_t object_id,
+                                    Snapshot::Kind kind,
+                                    bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -348,10 +348,11 @@
     if (!writer->can_send_any_object()) {
       // Lookup the type class.
       TypePtr raw_type = Type::RawCast(types()[i]);
-      SmiPtr raw_type_class_id = Smi::RawCast(raw_type->ptr()->type_class_id());
+      SmiPtr raw_type_class_id =
+          Smi::RawCast(raw_type->untag()->type_class_id());
       ClassPtr type_class = writer->isolate_group()->class_table()->At(
           Smi::Value(raw_type_class_id));
-      if (!writer->AllowObjectsInDartLibrary(type_class->ptr()->library())) {
+      if (!writer->AllowObjectsInDartLibrary(type_class->untag()->library())) {
         writer->WriteVMIsolateObject(kDynamicType);
       } else {
         writer->WriteObjectImpl(types()[i], as_reference);
@@ -371,10 +372,10 @@
   return Closure::null();
 }
 
-void ClosureLayout::WriteTo(SnapshotWriter* writer,
-                            intptr_t object_id,
-                            Snapshot::Kind kind,
-                            bool as_reference) {
+void UntaggedClosure::WriteTo(SnapshotWriter* writer,
+                              intptr_t object_id,
+                              Snapshot::Kind kind,
+                              bool as_reference) {
   ASSERT(writer != NULL);
   ASSERT(kind == Snapshot::kMessage);
 
@@ -407,20 +408,20 @@
     // TODO(5411462): Need to assert No GC can happen here, even though
     // allocations may happen.
     intptr_t num_flds =
-        (context.raw()->ptr()->to(num_vars) - context.raw()->ptr()->from());
+        (context.ptr()->untag()->to(num_vars) - context.ptr()->untag()->from());
     for (intptr_t i = 0; i <= num_flds; i++) {
       (*reader->PassiveObjectHandle()) = reader->ReadObjectImpl(kAsReference);
-      context.StorePointer((context.raw()->ptr()->from() + i),
-                           reader->PassiveObjectHandle()->raw());
+      context.StorePointer((context.ptr()->untag()->from() + i),
+                           reader->PassiveObjectHandle()->ptr());
     }
   }
-  return context.raw();
+  return context.ptr();
 }
 
-void ContextLayout::WriteTo(SnapshotWriter* writer,
-                            intptr_t object_id,
-                            Snapshot::Kind kind,
-                            bool as_reference) {
+void UntaggedContext::WriteTo(SnapshotWriter* writer,
+                              intptr_t object_id,
+                              Snapshot::Kind kind,
+                              bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -465,16 +466,16 @@
     context_scope.SetTypeAt(0, *reader->TypeHandle());
     context_scope.SetContextIndexAt(0, 0);
     context_scope.SetContextLevelAt(0, 0);
-    return context_scope.raw();
+    return context_scope.ptr();
   }
   UNREACHABLE();
   return NULL;
 }
 
-void ContextScopeLayout::WriteTo(SnapshotWriter* writer,
-                                 intptr_t object_id,
-                                 Snapshot::Kind kind,
-                                 bool as_reference) {
+void UntaggedContextScope::WriteTo(SnapshotWriter* writer,
+                                   intptr_t object_id,
+                                   Snapshot::Kind kind,
+                                   bool as_reference) {
   ASSERT(writer != NULL);
 
   if (is_implicit_) {
@@ -506,8 +507,8 @@
     UNREACHABLE();                                                             \
     return type::null();                                                       \
   }                                                                            \
-  void type##Layout::WriteTo(SnapshotWriter* writer, intptr_t object_id,       \
-                             Snapshot::Kind kind, bool as_reference) {         \
+  void Untagged##type::WriteTo(SnapshotWriter* writer, intptr_t object_id,     \
+                               Snapshot::Kind kind, bool as_reference) {       \
     UNREACHABLE();                                                             \
   }
 
@@ -518,8 +519,8 @@
     UNREACHABLE();                                                             \
     return type::null();                                                       \
   }                                                                            \
-  void type##Layout::WriteTo(SnapshotWriter* writer, intptr_t object_id,       \
-                             Snapshot::Kind kind, bool as_reference) {         \
+  void Untagged##type::WriteTo(SnapshotWriter* writer, intptr_t object_id,     \
+                               Snapshot::Kind kind, bool as_reference) {       \
     writer->SetWriteException(Exceptions::kArgument,                           \
                               "Illegal argument in isolate message"            \
                               " : (object is a " #type ")");                   \
@@ -581,16 +582,16 @@
   reader->AddBackRef(object_id, &api_error, kIsDeserialized);
 
   // Set all the object fields.
-  READ_OBJECT_FIELDS(api_error, api_error.raw()->ptr()->from(),
-                     api_error.raw()->ptr()->to(), kAsReference);
+  READ_OBJECT_FIELDS(api_error, api_error.ptr()->untag()->from(),
+                     api_error.ptr()->untag()->to(), kAsReference);
 
-  return api_error.raw();
+  return api_error.ptr();
 }
 
-void ApiErrorLayout::WriteTo(SnapshotWriter* writer,
-                             intptr_t object_id,
-                             Snapshot::Kind kind,
-                             bool as_reference) {
+void UntaggedApiError::WriteTo(SnapshotWriter* writer,
+                               intptr_t object_id,
+                               Snapshot::Kind kind,
+                               bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -624,16 +625,16 @@
   language_error.set_kind(reader->Read<uint8_t>());
 
   // Set all the object fields.
-  READ_OBJECT_FIELDS(language_error, language_error.raw()->ptr()->from(),
-                     language_error.raw()->ptr()->to(), kAsReference);
+  READ_OBJECT_FIELDS(language_error, language_error.ptr()->untag()->from(),
+                     language_error.ptr()->untag()->to(), kAsReference);
 
-  return language_error.raw();
+  return language_error.ptr();
 }
 
-void LanguageErrorLayout::WriteTo(SnapshotWriter* writer,
-                                  intptr_t object_id,
-                                  Snapshot::Kind kind,
-                                  bool as_reference) {
+void UntaggedLanguageError::WriteTo(SnapshotWriter* writer,
+                                    intptr_t object_id,
+                                    Snapshot::Kind kind,
+                                    bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -663,16 +664,16 @@
   reader->AddBackRef(object_id, &result, kIsDeserialized);
 
   // Set all the object fields.
-  READ_OBJECT_FIELDS(result, result.raw()->ptr()->from(),
-                     result.raw()->ptr()->to(), kAsReference);
+  READ_OBJECT_FIELDS(result, result.ptr()->untag()->from(),
+                     result.ptr()->untag()->to(), kAsReference);
 
-  return result.raw();
+  return result.ptr();
 }
 
-void UnhandledExceptionLayout::WriteTo(SnapshotWriter* writer,
-                                       intptr_t object_id,
-                                       Snapshot::Kind kind,
-                                       bool as_reference) {
+void UntaggedUnhandledException::WriteTo(SnapshotWriter* writer,
+                                         intptr_t object_id,
+                                         Snapshot::Kind kind,
+                                         bool as_reference) {
   // Write out the serialization header value for this object.
   writer->WriteInlinedObjectHeader(object_id);
 
@@ -695,18 +696,18 @@
   // constant.
   Instance& obj = Instance::ZoneHandle(reader->zone(), Instance::null());
   obj ^= Object::Allocate(kInstanceCid, Instance::InstanceSize(), Heap::kNew);
-  if (ObjectLayout::IsCanonical(tags)) {
+  if (UntaggedObject::IsCanonical(tags)) {
     obj = obj.Canonicalize(reader->thread());
   }
   reader->AddBackRef(object_id, &obj, kIsDeserialized);
 
-  return obj.raw();
+  return obj.ptr();
 }
 
-void InstanceLayout::WriteTo(SnapshotWriter* writer,
-                             intptr_t object_id,
-                             Snapshot::Kind kind,
-                             bool as_reference) {
+void UntaggedInstance::WriteTo(SnapshotWriter* writer,
+                               intptr_t object_id,
+                               Snapshot::Kind kind,
+                               bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -733,7 +734,7 @@
     Smi& smi =
         Smi::ZoneHandle(reader->zone(), Smi::New(static_cast<intptr_t>(value)));
     reader->AddBackRef(object_id, &smi, kIsDeserialized);
-    return smi.raw();
+    return smi.ptr();
   }
 
   // Create a Mint object or get canonical one if it is a canonical constant.
@@ -743,20 +744,20 @@
   // full snapshot). Objects that are only in the script need not be
   // canonicalized as they are already canonical.
   // When reading a message snapshot we always have to canonicalize.
-  if (ObjectLayout::IsCanonical(tags)) {
+  if (UntaggedObject::IsCanonical(tags)) {
     mint = Mint::NewCanonical(value);
     ASSERT(mint.IsCanonical());
   } else {
     mint = Mint::New(value);
   }
   reader->AddBackRef(object_id, &mint, kIsDeserialized);
-  return mint.raw();
+  return mint.ptr();
 }
 
-void MintLayout::WriteTo(SnapshotWriter* writer,
-                         intptr_t object_id,
-                         Snapshot::Kind kind,
-                         bool as_reference) {
+void UntaggedMint::WriteTo(SnapshotWriter* writer,
+                           intptr_t object_id,
+                           Snapshot::Kind kind,
+                           bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -786,20 +787,20 @@
   // references that are objects from the core library (loaded from a
   // full snapshot). Objects that are only in the script need not be
   // canonicalized as they are already canonical.
-  if (ObjectLayout::IsCanonical(tags)) {
+  if (UntaggedObject::IsCanonical(tags)) {
     dbl = Double::NewCanonical(value);
     ASSERT(dbl.IsCanonical());
   } else {
     dbl = Double::New(value);
   }
   reader->AddBackRef(object_id, &dbl, kIsDeserialized);
-  return dbl.raw();
+  return dbl.ptr();
 }
 
-void DoubleLayout::WriteTo(SnapshotWriter* writer,
-                           intptr_t object_id,
-                           Snapshot::Kind kind,
-                           bool as_reference) {
+void UntaggedDouble::WriteTo(SnapshotWriter* writer,
+                             intptr_t object_id,
+                             Snapshot::Kind kind,
+                             bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -821,7 +822,7 @@
                           CallbackType new_symbol,
                           Snapshot::Kind kind) {
   ASSERT(reader != NULL);
-  if (ObjectLayout::IsCanonical(tags)) {
+  if (UntaggedObject::IsCanonical(tags)) {
     // Set up canonical string object.
     ASSERT(reader != NULL);
     CharacterType* ptr = reader->zone()->Alloc<CharacterType>(len);
@@ -910,18 +911,18 @@
   }
 }
 
-void OneByteStringLayout::WriteTo(SnapshotWriter* writer,
-                                  intptr_t object_id,
-                                  Snapshot::Kind kind,
-                                  bool as_reference) {
+void UntaggedOneByteString::WriteTo(SnapshotWriter* writer,
+                                    intptr_t object_id,
+                                    Snapshot::Kind kind,
+                                    bool as_reference) {
   StringWriteTo(writer, object_id, kind, kOneByteStringCid,
                 writer->GetObjectTags(this), length(), data());
 }
 
-void TwoByteStringLayout::WriteTo(SnapshotWriter* writer,
-                                  intptr_t object_id,
-                                  Snapshot::Kind kind,
-                                  bool as_reference) {
+void UntaggedTwoByteString::WriteTo(SnapshotWriter* writer,
+                                    intptr_t object_id,
+                                    Snapshot::Kind kind,
+                                    bool as_reference) {
   StringWriteTo(writer, object_id, kind, kTwoByteStringCid,
                 writer->GetObjectTags(this), length(), data());
 }
@@ -944,19 +945,19 @@
   return ExternalTwoByteString::null();
 }
 
-void ExternalOneByteStringLayout::WriteTo(SnapshotWriter* writer,
-                                          intptr_t object_id,
-                                          Snapshot::Kind kind,
-                                          bool as_reference) {
+void UntaggedExternalOneByteString::WriteTo(SnapshotWriter* writer,
+                                            intptr_t object_id,
+                                            Snapshot::Kind kind,
+                                            bool as_reference) {
   // Serialize as a non-external one byte string.
   StringWriteTo(writer, object_id, kind, kOneByteStringCid,
                 writer->GetObjectTags(this), length(), external_data_);
 }
 
-void ExternalTwoByteStringLayout::WriteTo(SnapshotWriter* writer,
-                                          intptr_t object_id,
-                                          Snapshot::Kind kind,
-                                          bool as_reference) {
+void UntaggedExternalTwoByteString::WriteTo(SnapshotWriter* writer,
+                                            intptr_t object_id,
+                                            Snapshot::Kind kind,
+                                            bool as_reference) {
   // Serialize as a non-external two byte string.
   StringWriteTo(writer, object_id, kind, kTwoByteStringCid,
                 writer->GetObjectTags(this), length(), external_data_);
@@ -985,10 +986,10 @@
   }
   if (!as_reference) {
     // Read all the individual elements for inlined objects.
-    ASSERT(!ObjectLayout::IsCanonical(tags));
+    ASSERT(!UntaggedObject::IsCanonical(tags));
     reader->ArrayReadFrom(object_id, *array, len, tags);
   }
-  return array->raw();
+  return array->ptr();
 }
 
 ImmutableArrayPtr ImmutableArray::ReadFrom(SnapshotReader* reader,
@@ -1015,26 +1016,26 @@
   if (!as_reference) {
     // Read all the individual elements for inlined objects.
     reader->ArrayReadFrom(object_id, *array, len, tags);
-    if (ObjectLayout::IsCanonical(tags)) {
+    if (UntaggedObject::IsCanonical(tags)) {
       *array ^= array->Canonicalize(reader->thread());
     }
   }
   return raw(*array);
 }
 
-void ArrayLayout::WriteTo(SnapshotWriter* writer,
-                          intptr_t object_id,
-                          Snapshot::Kind kind,
-                          bool as_reference) {
+void UntaggedArray::WriteTo(SnapshotWriter* writer,
+                            intptr_t object_id,
+                            Snapshot::Kind kind,
+                            bool as_reference) {
   ASSERT(!this->IsCanonical());
   writer->ArrayWriteTo(object_id, kArrayCid, writer->GetObjectTags(this),
                        length(), type_arguments(), data(), as_reference);
 }
 
-void ImmutableArrayLayout::WriteTo(SnapshotWriter* writer,
-                                   intptr_t object_id,
-                                   Snapshot::Kind kind,
-                                   bool as_reference) {
+void UntaggedImmutableArray::WriteTo(SnapshotWriter* writer,
+                                     intptr_t object_id,
+                                     Snapshot::Kind kind,
+                                     bool as_reference) {
   writer->ArrayWriteTo(object_id, kImmutableArrayCid,
                        writer->GetObjectTags(this), length_, type_arguments_,
                        data(), as_reference);
@@ -1055,8 +1056,8 @@
 
   // Read type arguments of growable array object.
   *reader->TypeArgumentsHandle() ^= reader->ReadObjectImpl(kAsInlinedObject);
-  array.StorePointer(&array.raw_ptr()->type_arguments_,
-                     reader->TypeArgumentsHandle()->raw());
+  array.StorePointer(&array.untag()->type_arguments_,
+                     reader->TypeArgumentsHandle()->ptr());
 
   // Read length of growable array object.
   array.SetLength(reader->ReadSmiValue());
@@ -1065,13 +1066,13 @@
   *(reader->ArrayHandle()) ^= reader->ReadObjectImpl(kAsReference);
   array.SetData(*(reader->ArrayHandle()));
 
-  return array.raw();
+  return array.ptr();
 }
 
-void GrowableObjectArrayLayout::WriteTo(SnapshotWriter* writer,
-                                        intptr_t object_id,
-                                        Snapshot::Kind kind,
-                                        bool as_reference) {
+void UntaggedGrowableObjectArray::WriteTo(SnapshotWriter* writer,
+                                          intptr_t object_id,
+                                          Snapshot::Kind kind,
+                                          bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -1131,18 +1132,18 @@
   reader->EnqueueRehashingOfMap(map);
 
   // Read the keys and values.
-  bool read_as_reference = ObjectLayout::IsCanonical(tags) ? false : true;
+  bool read_as_reference = UntaggedObject::IsCanonical(tags) ? false : true;
   for (intptr_t i = 0; i < used_data; i++) {
     *reader->PassiveObjectHandle() = reader->ReadObjectImpl(read_as_reference);
     data.SetAt(i, *reader->PassiveObjectHandle());
   }
-  return map.raw();
+  return map.ptr();
 }
 
-void LinkedHashMapLayout::WriteTo(SnapshotWriter* writer,
-                                  intptr_t object_id,
-                                  Snapshot::Kind kind,
-                                  bool as_reference) {
+void UntaggedLinkedHashMap::WriteTo(SnapshotWriter* writer,
+                                    intptr_t object_id,
+                                    Snapshot::Kind kind,
+                                    bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -1165,8 +1166,8 @@
   // Write out the keys and values.
   const bool write_as_reference = this->IsCanonical() ? false : true;
   ArrayPtr data_array = data_;
-  ObjectPtr* data_elements = data_array->ptr()->data();
-  ASSERT(used_data <= Smi::Value(data_array->ptr()->length_));
+  ObjectPtr* data_elements = data_array->untag()->data();
+  ASSERT(used_data <= Smi::Value(data_array->untag()->length_));
 #if defined(DEBUG)
   intptr_t deleted_keys_found = 0;
 #endif  // DEBUG
@@ -1201,13 +1202,13 @@
   Float32x4& simd = Float32x4::ZoneHandle(reader->zone(), Float32x4::null());
   simd = Float32x4::New(value0, value1, value2, value3);
   reader->AddBackRef(object_id, &simd, kIsDeserialized);
-  return simd.raw();
+  return simd.ptr();
 }
 
-void Float32x4Layout::WriteTo(SnapshotWriter* writer,
-                              intptr_t object_id,
-                              Snapshot::Kind kind,
-                              bool as_reference) {
+void UntaggedFloat32x4::WriteTo(SnapshotWriter* writer,
+                                intptr_t object_id,
+                                Snapshot::Kind kind,
+                                bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -1240,13 +1241,13 @@
   Int32x4& simd = Int32x4::ZoneHandle(reader->zone(), Int32x4::null());
   simd = Int32x4::New(value0, value1, value2, value3);
   reader->AddBackRef(object_id, &simd, kIsDeserialized);
-  return simd.raw();
+  return simd.ptr();
 }
 
-void Int32x4Layout::WriteTo(SnapshotWriter* writer,
-                            intptr_t object_id,
-                            Snapshot::Kind kind,
-                            bool as_reference) {
+void UntaggedInt32x4::WriteTo(SnapshotWriter* writer,
+                              intptr_t object_id,
+                              Snapshot::Kind kind,
+                              bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -1277,13 +1278,13 @@
   Float64x2& simd = Float64x2::ZoneHandle(reader->zone(), Float64x2::null());
   simd = Float64x2::New(value0, value1);
   reader->AddBackRef(object_id, &simd, kIsDeserialized);
-  return simd.raw();
+  return simd.ptr();
 }
 
-void Float64x2Layout::WriteTo(SnapshotWriter* writer,
-                              intptr_t object_id,
-                              Snapshot::Kind kind,
-                              bool as_reference) {
+void UntaggedFloat64x2::WriteTo(SnapshotWriter* writer,
+                                intptr_t object_id,
+                                Snapshot::Kind kind,
+                                bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -1305,7 +1306,7 @@
                                  bool as_reference) {
   ASSERT(reader != NULL);
 
-  intptr_t cid = ObjectLayout::ClassIdTag::decode(tags);
+  intptr_t cid = UntaggedObject::ClassIdTag::decode(tags);
   intptr_t len = reader->ReadSmiValue();
   TypedData& result =
       TypedData::ZoneHandle(reader->zone(), TypedData::New(cid, len));
@@ -1324,12 +1325,12 @@
   // as it would already be a canonical object.
   // When reading a script snapshot or a message snapshot we always have
   // to canonicalize the object.
-  if (ObjectLayout::IsCanonical(tags)) {
+  if (UntaggedObject::IsCanonical(tags)) {
     result ^= result.Canonicalize(reader->thread());
     ASSERT(!result.IsNull());
     ASSERT(result.IsCanonical());
   }
-  return result.raw();
+  return result.ptr();
 }
 
 ExternalTypedDataPtr ExternalTypedData::ReadFrom(SnapshotReader* reader,
@@ -1338,7 +1339,7 @@
                                                  Snapshot::Kind kind,
                                                  bool as_reference) {
   ASSERT(!Snapshot::IsFull(kind));
-  intptr_t cid = ObjectLayout::ClassIdTag::decode(tags);
+  intptr_t cid = UntaggedObject::ClassIdTag::decode(tags);
   intptr_t length = reader->ReadSmiValue();
 
   FinalizableData finalizable_data =
@@ -1350,7 +1351,7 @@
   intptr_t external_size = obj.LengthInBytes();
   obj.AddFinalizer(finalizable_data.peer, finalizable_data.callback,
                    external_size);
-  return obj.raw();
+  return obj.ptr();
 }
 
 // This function's name can appear in Observatory.
@@ -1359,10 +1360,10 @@
   free(buffer);
 }
 
-void TypedDataLayout::WriteTo(SnapshotWriter* writer,
-                              intptr_t object_id,
-                              Snapshot::Kind kind,
-                              bool as_reference) {
+void UntaggedTypedData::WriteTo(SnapshotWriter* writer,
+                                intptr_t object_id,
+                                Snapshot::Kind kind,
+                                bool as_reference) {
   ASSERT(writer != NULL);
   intptr_t cid = this->GetClassId();
   intptr_t length = Smi::Value(length_);  // In elements.
@@ -1459,10 +1460,10 @@
   }
 }
 
-void ExternalTypedDataLayout::WriteTo(SnapshotWriter* writer,
-                                      intptr_t object_id,
-                                      Snapshot::Kind kind,
-                                      bool as_reference) {
+void UntaggedExternalTypedData::WriteTo(SnapshotWriter* writer,
+                                        intptr_t object_id,
+                                        Snapshot::Kind kind,
+                                        bool as_reference) {
   ASSERT(writer != NULL);
   intptr_t cid = this->GetClassId();
   intptr_t length = Smi::Value(length_);  // In elements.
@@ -1532,10 +1533,10 @@
       IsolateMessageTypedDataFinalizer);
 }
 
-void TypedDataViewLayout::WriteTo(SnapshotWriter* writer,
-                                  intptr_t object_id,
-                                  Snapshot::Kind kind,
-                                  bool as_reference) {
+void UntaggedTypedDataView::WriteTo(SnapshotWriter* writer,
+                                    intptr_t object_id,
+                                    Snapshot::Kind kind,
+                                    bool as_reference) {
   // Views have always a backing store.
   ASSERT(typed_data_ != Object::null());
 
@@ -1558,7 +1559,7 @@
                                          Snapshot::Kind kind,
                                          bool as_reference) {
   auto& typed_data = *reader->TypedDataBaseHandle();
-  const classid_t cid = ObjectLayout::ClassIdTag::decode(tags);
+  const classid_t cid = UntaggedObject::ClassIdTag::decode(tags);
 
   auto& view = *reader->TypedDataViewHandle();
   view = TypedDataView::New(cid);
@@ -1569,7 +1570,7 @@
   typed_data ^= reader->ReadObjectImpl(as_reference);
   view.InitializeWith(typed_data, offset_in_bytes, length);
 
-  return view.raw();
+  return view.ptr();
 }
 
 CapabilityPtr Capability::ReadFrom(SnapshotReader* reader,
@@ -1582,13 +1583,13 @@
   Capability& result =
       Capability::ZoneHandle(reader->zone(), Capability::New(id));
   reader->AddBackRef(object_id, &result, kIsDeserialized);
-  return result.raw();
+  return result.ptr();
 }
 
-void CapabilityLayout::WriteTo(SnapshotWriter* writer,
-                               intptr_t object_id,
-                               Snapshot::Kind kind,
-                               bool as_reference) {
+void UntaggedCapability::WriteTo(SnapshotWriter* writer,
+                                 intptr_t object_id,
+                                 Snapshot::Kind kind,
+                                 bool as_reference) {
   // Write out the serialization header value for this object.
   writer->WriteInlinedObjectHeader(object_id);
 
@@ -1612,13 +1613,13 @@
   SendPort& result =
       SendPort::ZoneHandle(reader->zone(), SendPort::New(id, origin_id));
   reader->AddBackRef(object_id, &result, kIsDeserialized);
-  return result.raw();
+  return result.ptr();
 }
 
-void SendPortLayout::WriteTo(SnapshotWriter* writer,
-                             intptr_t object_id,
-                             Snapshot::Kind kind,
-                             bool as_reference) {
+void UntaggedSendPort::WriteTo(SnapshotWriter* writer,
+                               intptr_t object_id,
+                               Snapshot::Kind kind,
+                               bool as_reference) {
   // Write out the serialization header value for this object.
   writer->WriteInlinedObjectHeader(object_id);
 
@@ -1646,13 +1647,13 @@
   auto& transferableTypedData = TransferableTypedData::ZoneHandle(
       reader->zone(), TransferableTypedData::New(data, length));
   reader->AddBackRef(object_id, &transferableTypedData, kIsDeserialized);
-  return transferableTypedData.raw();
+  return transferableTypedData.ptr();
 }
 
-void TransferableTypedDataLayout::WriteTo(SnapshotWriter* writer,
-                                          intptr_t object_id,
-                                          Snapshot::Kind kind,
-                                          bool as_reference) {
+void UntaggedTransferableTypedData::WriteTo(SnapshotWriter* writer,
+                                            intptr_t object_id,
+                                            Snapshot::Kind kind,
+                                            bool as_reference) {
   ASSERT(writer != nullptr);
   ASSERT(GetClassId() == kTransferableTypedDataCid);
   void* peer = writer->thread()->heap()->GetPeer(ObjectPtr(this));
@@ -1703,19 +1704,18 @@
   reader->AddBackRef(object_id, &regex, kIsDeserialized);
 
   // Read and Set all the other fields.
-  regex.StoreSmi(&regex.raw_ptr()->num_bracket_expressions_,
-                 reader->ReadAsSmi());
+  regex.StoreSmi(&regex.untag()->num_bracket_expressions_, reader->ReadAsSmi());
 
   *reader->ArrayHandle() ^= reader->ReadObjectImpl(kAsInlinedObject);
   regex.set_capture_name_map(*reader->ArrayHandle());
   *reader->StringHandle() ^= reader->ReadObjectImpl(kAsInlinedObject);
   regex.set_pattern(*reader->StringHandle());
 
-  regex.StoreNonPointer(&regex.raw_ptr()->num_one_byte_registers_,
+  regex.StoreNonPointer(&regex.untag()->num_one_byte_registers_,
                         reader->Read<int32_t>());
-  regex.StoreNonPointer(&regex.raw_ptr()->num_two_byte_registers_,
+  regex.StoreNonPointer(&regex.untag()->num_two_byte_registers_,
                         reader->Read<int32_t>());
-  regex.StoreNonPointer(&regex.raw_ptr()->type_flags_, reader->Read<int8_t>());
+  regex.StoreNonPointer(&regex.untag()->type_flags_, reader->Read<int8_t>());
 
   const Function& no_function = Function::Handle(reader->zone());
   for (intptr_t cid = kOneByteStringCid; cid <= kExternalTwoByteStringCid;
@@ -1724,13 +1724,13 @@
     regex.set_function(cid, /*sticky=*/true, no_function);
   }
 
-  return regex.raw();
+  return regex.ptr();
 }
 
-void RegExpLayout::WriteTo(SnapshotWriter* writer,
-                           intptr_t object_id,
-                           Snapshot::Kind kind,
-                           bool as_reference) {
+void UntaggedRegExp::WriteTo(SnapshotWriter* writer,
+                             intptr_t object_id,
+                             Snapshot::Kind kind,
+                             bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
@@ -1761,16 +1761,16 @@
   reader->AddBackRef(object_id, &weak_property, kIsDeserialized);
 
   // Set all the object fields.
-  READ_OBJECT_FIELDS(weak_property, weak_property.raw()->ptr()->from(),
-                     weak_property.raw()->ptr()->to(), kAsReference);
+  READ_OBJECT_FIELDS(weak_property, weak_property.ptr()->untag()->from(),
+                     weak_property.ptr()->untag()->to(), kAsReference);
 
-  return weak_property.raw();
+  return weak_property.ptr();
 }
 
-void WeakPropertyLayout::WriteTo(SnapshotWriter* writer,
-                                 intptr_t object_id,
-                                 Snapshot::Kind kind,
-                                 bool as_reference) {
+void UntaggedWeakProperty::WriteTo(SnapshotWriter* writer,
+                                   intptr_t object_id,
+                                   Snapshot::Kind kind,
+                                   bool as_reference) {
   ASSERT(writer != NULL);
 
   // Write out the serialization header value for this object.
diff --git a/runtime/vm/regexp.cc b/runtime/vm/regexp.cc
index 9de6262..9b880c8 100644
--- a/runtime/vm/regexp.cc
+++ b/runtime/vm/regexp.cc
@@ -5517,7 +5517,7 @@
       FunctionType::Handle(zone, FunctionType::New());
   Function& fn =
       Function::Handle(zone, Function::New(signature, Symbols::ColonMatcher(),
-                                           FunctionLayout::kIrregexpFunction,
+                                           UntaggedFunction::kIrregexpFunction,
                                            true,   // Static.
                                            false,  // Not const.
                                            false,  // Not abstract.
@@ -5583,7 +5583,7 @@
     }
   }
 
-  return regexp.raw();
+  return regexp.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/vm/regexp_assembler.cc b/runtime/vm/regexp_assembler.cc
index 3c2e119..c36aa38 100644
--- a/runtime/vm/regexp_assembler.cc
+++ b/runtime/vm/regexp_assembler.cc
@@ -44,12 +44,12 @@
         int32_t s2[1] = {c2};
         canonicalize.get(c2, '\0', s2);
         if (s1[0] != s2[0]) {
-          return static_cast<uword>(Bool::False().raw());
+          return static_cast<uword>(Bool::False().ptr());
         }
       }
     }
   }
-  return static_cast<uword>(Bool::True().raw());
+  return static_cast<uword>(Bool::True().ptr());
 }
 
 uword /*BoolPtr*/ CaseInsensitiveCompareUTF16(uword /*StringPtr*/ str_raw,
@@ -68,7 +68,7 @@
       // Non-BMP characters do not have case-equivalents in the BMP.
       // Both have to be non-BMP for them to be able to match.
       if (!Utf16::IsLeadSurrogate(c2))
-        return static_cast<uword>(Bool::False().raw());
+        return static_cast<uword>(Bool::False().ptr());
       if (i + 1 < length.Value()) {
         uint16_t c1t = str.CharAt(lhs_index.Value() + i + 1);
         uint16_t c2t = str.CharAt(rhs_index.Value() + i + 1);
@@ -81,9 +81,9 @@
     }
     c1 = u_foldCase(c1, U_FOLD_CASE_DEFAULT);
     c2 = u_foldCase(c2, U_FOLD_CASE_DEFAULT);
-    if (c1 != c2) return static_cast<uword>(Bool::False().raw());
+    if (c1 != c2) return static_cast<uword>(Bool::False().ptr());
   }
-  return static_cast<uword>(Bool::True().raw());
+  return static_cast<uword>(Bool::True().ptr());
 }
 
 DEFINE_RAW_LEAF_RUNTIME_ENTRY(
diff --git a/runtime/vm/regexp_assembler_bytecode.cc b/runtime/vm/regexp_assembler_bytecode.cc
index db8a554..4d8841f 100644
--- a/runtime/vm/regexp_assembler_bytecode.cc
+++ b/runtime/vm/regexp_assembler_bytecode.cc
@@ -401,7 +401,7 @@
   NoSafepointScope no_safepoint;
   memmove(bytecode.DataAddr(0), buffer_->data(), len);
 
-  return bytecode.raw();
+  return bytecode.ptr();
 }
 
 intptr_t BytecodeRegExpMacroAssembler::length() {
@@ -554,7 +554,7 @@
               capture_register_count * sizeof(int32_t));
     }
 
-    return result.raw();
+    return result.ptr();
   }
   if (result == IrregexpInterpreter::RE_EXCEPTION) {
     UNREACHABLE();
diff --git a/runtime/vm/regexp_assembler_ir.cc b/runtime/vm/regexp_assembler_ir.cc
index f01b655..552fc72 100644
--- a/runtime/vm/regexp_assembler_ir.cc
+++ b/runtime/vm/regexp_assembler_ir.cc
@@ -323,7 +323,7 @@
   }
 
   ASSERT(retval.IsArray());
-  return Array::Cast(retval).raw();
+  return Array::Cast(retval).ptr();
 }
 
 LocalVariable* IRRegExpMacroAssembler::Parameter(const String& name,
@@ -383,7 +383,7 @@
     Report::LongJump(Error::Cast(value));
   }
   return new (Z)
-      ConstantInstr(Instance::ZoneHandle(Z, Instance::RawCast(value.raw())));
+      ConstantInstr(Instance::ZoneHandle(Z, Instance::RawCast(value.ptr())));
 }
 
 ComparisonInstr* IRRegExpMacroAssembler::Comparison(ComparisonKind kind,
diff --git a/runtime/vm/regexp_interpreter.cc b/runtime/vm/regexp_interpreter.cc
index 5706ca9..eb0a243 100644
--- a/runtime/vm/regexp_interpreter.cc
+++ b/runtime/vm/regexp_interpreter.cc
@@ -40,12 +40,12 @@
   Bool& ret = Bool::Handle();
   if (unicode) {
     ret = static_cast<BoolPtr>(CaseInsensitiveCompareUTF16(
-        static_cast<uword>(subject.raw()), static_cast<uword>(Smi::New(from)),
+        static_cast<uword>(subject.ptr()), static_cast<uword>(Smi::New(from)),
         static_cast<uword>(Smi::New(current)),
         static_cast<uword>(Smi::New(len))));
   } else {
     ret = static_cast<BoolPtr>(CaseInsensitiveCompareUCS2(
-        static_cast<uword>(subject.raw()), static_cast<uword>(Smi::New(from)),
+        static_cast<uword>(subject.ptr()), static_cast<uword>(Smi::New(from)),
         static_cast<uword>(Smi::New(current)),
         static_cast<uword>(Smi::New(len))));
   }
diff --git a/runtime/vm/regexp_parser.cc b/runtime/vm/regexp_parser.cc
index edad22b..4b4d12b 100644
--- a/runtime/vm/regexp_parser.cc
+++ b/runtime/vm/regexp_parser.cc
@@ -1262,7 +1262,7 @@
     array.SetAt(i * 2 + 1, smi);
   }
 
-  return array.raw();
+  return array.ptr();
 }
 
 bool RegExpParser::HasNamedCaptures() {
diff --git a/runtime/vm/report.cc b/runtime/vm/report.cc
index 069b3be..eee2f42 100644
--- a/runtime/vm/report.cc
+++ b/runtime/vm/report.cc
@@ -80,7 +80,7 @@
     result = String::NewFormatted(Heap::kOld, "%s: ", message_header);
     result = String::Concat(result, message, Heap::kOld);
   }
-  return result.raw();
+  return result.ptr();
 }
 
 void Report::LongJump(const Error& error) {
diff --git a/runtime/vm/resolver.cc b/runtime/vm/resolver.cc
index a361c5a5..6593f3e 100644
--- a/runtime/vm/resolver.cc
+++ b/runtime/vm/resolver.cc
@@ -36,7 +36,7 @@
     const String& function_name,
     bool allow_add,
     std::function<FunctionPtr(Class&, const String&)> lookup) {
-  Class& cls = Class::Handle(zone, receiver_class.raw());
+  Class& cls = Class::Handle(zone, receiver_class.ptr());
   if (FLAG_trace_resolving) {
     THR_Print("ResolveDynamic '%s' for class %s\n", function_name.ToCString(),
               String::Handle(zone, cls.Name()).ToCString());
@@ -47,7 +47,7 @@
       zone,
       Function::IsDynamicInvocationForwarderName(function_name)
           ? Function::DemangleDynamicInvocationForwarderName(function_name)
-          : function_name.raw());
+          : function_name.ptr());
 
   const bool is_getter = Field::IsGetterName(demangled);
   String& demangled_getter_name = String::Handle();
@@ -55,7 +55,7 @@
     demangled_getter_name = Field::NameFromGetter(demangled);
   }
 
-  const bool is_dyn_call = demangled.raw() != function_name.raw();
+  const bool is_dyn_call = demangled.ptr() != function_name.ptr();
 
   Thread* thread = Thread::Current();
   bool need_to_create_method_extractor = false;
@@ -64,10 +64,10 @@
       // Try to find a dyn:* forwarder & return it.
       function = cls.GetInvocationDispatcher(
           function_name, Array::null_array(),
-          FunctionLayout::kDynamicInvocationForwarder,
+          UntaggedFunction::kDynamicInvocationForwarder,
           /*create_if_absent=*/false);
     }
-    if (!function.IsNull()) return function.raw();
+    if (!function.IsNull()) return function.ptr();
 
     ASSERT(cls.is_finalized());
     {
@@ -81,7 +81,7 @@
           function.GetDynamicInvocationForwarder(function_name, allow_add);
     }
 #endif
-    if (!function.IsNull()) return function.raw();
+    if (!function.IsNull()) return function.ptr();
 
     // Getter invocation might actually be a method extraction.
     if (is_getter) {
@@ -105,7 +105,7 @@
     // nobody created method extractor since we last checked under ReadRwLocker.
     function = function.GetMethodExtractor(demangled);
   }
-  return function.raw();
+  return function.ptr();
 }
 
 static FunctionPtr ResolveDynamicForReceiverClassWithCustomLookup(
@@ -136,7 +136,7 @@
     }
     return Function::null();
   }
-  return function.raw();
+  return function.ptr();
 }
 
 FunctionPtr Resolver::ResolveDynamicForReceiverClass(
@@ -208,7 +208,7 @@
     // Check if we are referring to a top level function.
     const Object& object = Object::Handle(library.ResolveName(function_name));
     if (!object.IsNull() && object.IsFunction()) {
-      function ^= object.raw();
+      function ^= object.ptr();
       if (!function.AreValidArguments(type_args_len, num_arguments,
                                       argument_names, NULL)) {
         if (FLAG_trace_resolving) {
@@ -241,7 +241,7 @@
                 class_name.ToCString(), function_name.ToCString());
     }
   }
-  return function.raw();
+  return function.ptr();
 }
 
 FunctionPtr Resolver::ResolveStatic(const Class& cls,
@@ -272,7 +272,7 @@
     }
     return Function::null();
   }
-  return function.raw();
+  return function.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/vm/reusable_handles.h b/runtime/vm/reusable_handles.h
index 7c452bc..5b05ecc 100644
--- a/runtime/vm/reusable_handles.h
+++ b/runtime/vm/reusable_handles.h
@@ -45,7 +45,7 @@
     ~Reusable##name##HandleScope() {                                           \
       ASSERT(thread_->reusable_##name##_handle_scope_active());                \
       thread_->set_reusable_##name##_handle_scope_active(false);               \
-      Handle().raw_ = name::null();                                            \
+      Handle().ptr_ = name::null();                                            \
     }                                                                          \
     name& Handle() const {                                                     \
       ASSERT(thread_->name##_handle_ != NULL);                                 \
@@ -64,7 +64,7 @@
         : handle_(thread->name##_handle_) {}                                   \
     Reusable##name##HandleScope()                                              \
         : handle_(Thread::Current()->name##_handle_) {}                        \
-    ~Reusable##name##HandleScope() { handle_->raw_ = name::null(); }           \
+    ~Reusable##name##HandleScope() { handle_->ptr_ = name::null(); }           \
     name& Handle() const {                                                     \
       ASSERT(handle_ != NULL);                                                 \
       return *handle_;                                                         \
diff --git a/runtime/vm/reverse_pc_lookup_cache.cc b/runtime/vm/reverse_pc_lookup_cache.cc
index ef62406..dad8849 100644
--- a/runtime/vm/reverse_pc_lookup_cache.cc
+++ b/runtime/vm/reverse_pc_lookup_cache.cc
@@ -25,22 +25,22 @@
   // this changes, would could sort the table list during deserialization and
   // binary search for the table.
   GrowableObjectArrayPtr tables = group->object_store()->code_order_tables();
-  intptr_t tables_length = Smi::Value(tables->ptr()->length_);
+  intptr_t tables_length = Smi::Value(tables->untag()->length_);
   for (intptr_t i = 0; i < tables_length; i++) {
     ArrayPtr table =
-        static_cast<ArrayPtr>(tables->ptr()->data_->ptr()->data()[i]);
+        static_cast<ArrayPtr>(tables->untag()->data_->untag()->data()[i]);
     intptr_t lo = 0;
-    intptr_t hi = Smi::Value(table->ptr()->length_) - 1;
+    intptr_t hi = Smi::Value(table->untag()->length_) - 1;
 
     // Fast check if pc belongs to this table.
     if (lo > hi) {
       continue;
     }
-    CodePtr first = static_cast<CodePtr>(table->ptr()->data()[lo]);
+    CodePtr first = static_cast<CodePtr>(table->untag()->data()[lo]);
     if (pc < Code::PayloadStartOf(first)) {
       continue;
     }
-    CodePtr last = static_cast<CodePtr>(table->ptr()->data()[hi]);
+    CodePtr last = static_cast<CodePtr>(table->untag()->data()[hi]);
     if (pc >= (Code::PayloadStartOf(last) + Code::PayloadSizeOf(last))) {
       continue;
     }
@@ -50,7 +50,7 @@
       intptr_t mid = (hi - lo + 1) / 2 + lo;
       ASSERT(mid >= lo);
       ASSERT(mid <= hi);
-      CodePtr code = static_cast<CodePtr>(table->ptr()->data()[mid]);
+      CodePtr code = static_cast<CodePtr>(table->untag()->data()[mid]);
       uword code_start = Code::PayloadStartOf(code);
       uword code_end = code_start + Code::PayloadSizeOf(code);
       if (pc < code_start) {
diff --git a/runtime/vm/runtime_entry.cc b/runtime/vm/runtime_entry.cc
index a50f2ce..20125f3 100644
--- a/runtime/vm/runtime_entry.cc
+++ b/runtime/vm/runtime_entry.cc
@@ -196,7 +196,7 @@
     const ObjectPool& pool = ObjectPool::Handle(zone, code.GetObjectPool());
     member_name ^= pool.ObjectAt(name_index);
   } else {
-    member_name = Symbols::OptimizedOut().raw();
+    member_name = Symbols::OptimizedOut().ptr();
   }
 
   NullErrorHelper(zone, member_name);
@@ -371,7 +371,7 @@
   // outermost runtime code (to which the genenerated Dart code might not return
   // in a long time).
   bool add_to_remembered_set = true;
-  if (object->ptr()->IsRemembered()) {
+  if (object->untag()->IsRemembered()) {
     // Objects must not be added to the remembered set twice because the
     // scavenger's visitor is not idempotent.
     // Might already be remembered because of type argument store in
@@ -390,7 +390,7 @@
   }
 
   if (add_to_remembered_set) {
-    object->ptr()->AddToRememberedSet(thread);
+    object->untag()->AddToRememberedSet(thread);
   }
 
   // For incremental write barrier elimination, we need to ensure that the
@@ -543,7 +543,7 @@
     OS::PrintErr("%s: '%s' %d %s '%s' %d (pc: %#" Px ").\n", message,
                  String::Handle(instance_type.Name()).ToCString(),
                  instance_type.type_class_id(),
-                 (result.raw() == Bool::True().raw()) ? "is" : "is !",
+                 (result.ptr() == Bool::True().ptr()) ? "is" : "is !",
                  String::Handle(type.Name()).ToCString(), type.type_class_id(),
                  caller_frame->pc());
   } else {
@@ -553,7 +553,7 @@
                              function_type_arguments, kAllFree, Heap::kOld));
     OS::PrintErr("%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n",
                  message, String::Handle(instance_type.Name()).ToCString(),
-                 (result.raw() == Bool::True().raw()) ? "is" : "is !",
+                 (result.ptr() == Bool::True().ptr()) ? "is" : "is !",
                  String::Handle(instantiated_type.Name()).ToCString(),
                  String::Handle(type.Name()).ToCString(), caller_frame->pc());
   }
@@ -611,7 +611,7 @@
   if (instance_class.IsClosureClass()) {
     const auto& closure = Closure::Cast(instance);
     const auto& closure_function = Function::Handle(zone, closure.function());
-    instance_class_id_or_function = closure_function.raw();
+    instance_class_id_or_function = closure_function.ptr();
     instance_type_arguments = closure.instantiator_type_arguments();
     instance_parent_function_type_arguments = closure.function_type_arguments();
     instance_delayed_type_arguments = closure.delayed_type_arguments();
@@ -626,7 +626,7 @@
         String::Handle(zone, instance_class.Name());
     TextBuffer buffer(256);
     buffer.Printf("  Updating test cache %#" Px " with result %s for:\n",
-                  static_cast<uword>(new_cache.raw()), result.ToCString());
+                  static_cast<uword>(new_cache.ptr()), result.ToCString());
     if (instance.IsString()) {
       buffer.Printf("    instance: '%s'\n", instance.ToCString());
     } else {
@@ -637,14 +637,14 @@
     buffer.Printf(
         "    raw entry: [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
         ", %#" Px ", %#" Px ", %#" Px " ]\n",
-        static_cast<uword>(instance_class_id_or_function.raw()),
-        static_cast<uword>(destination_type.raw()),
-        static_cast<uword>(instance_type_arguments.raw()),
-        static_cast<uword>(instantiator_type_arguments.raw()),
-        static_cast<uword>(function_type_arguments.raw()),
-        static_cast<uword>(instance_parent_function_type_arguments.raw()),
-        static_cast<uword>(instance_delayed_type_arguments.raw()),
-        static_cast<uword>(result.raw()));
+        static_cast<uword>(instance_class_id_or_function.ptr()),
+        static_cast<uword>(destination_type.ptr()),
+        static_cast<uword>(instance_type_arguments.ptr()),
+        static_cast<uword>(instantiator_type_arguments.ptr()),
+        static_cast<uword>(function_type_arguments.ptr()),
+        static_cast<uword>(instance_parent_function_type_arguments.ptr()),
+        static_cast<uword>(instance_delayed_type_arguments.ptr()),
+        static_cast<uword>(result.ptr()));
     OS::PrintErr("%s", buffer.buffer());
   }
   {
@@ -680,7 +680,7 @@
       if (FLAG_trace_type_checks) {
         TextBuffer buffer(256);
         buffer.Printf("  Collision for test cache %#" Px " at index %" Pd ":\n",
-                      static_cast<uword>(new_cache.raw()), colliding_index);
+                      static_cast<uword>(new_cache.ptr()), colliding_index);
         buffer.Printf("    entry: ");
         new_cache.WriteEntryToBuffer(zone, &buffer, colliding_index, "      ");
         OS::PrintErr("%s\n", buffer.buffer());
@@ -688,7 +688,7 @@
       if (!FLAG_enable_isolate_groups) {
         FATAL("Duplicate subtype test cache entry");
       }
-      if (old_result.raw() != result.raw()) {
+      if (old_result.ptr() != result.ptr()) {
         FATAL("Existing subtype test cache entry has result %s, not %s",
               old_result.ToCString(), result.ToCString());
       }
@@ -705,7 +705,7 @@
       TextBuffer buffer(256);
       buffer.Printf("  Added new entry to test cache %#" Px " at index %" Pd
                     ":\n",
-                    static_cast<uword>(new_cache.raw()), len);
+                    static_cast<uword>(new_cache.ptr()), len);
       buffer.Printf("    new entry: ");
       new_cache.WriteEntryToBuffer(zone, &buffer, len, "      ");
       OS::PrintErr("%s\n", buffer.buffer());
@@ -818,8 +818,8 @@
 #endif
     }
 
-    if (dst_name.raw() ==
-        Symbols::dynamic_assert_assignable_stc_check().raw()) {
+    if (dst_name.ptr() ==
+        Symbols::dynamic_assert_assignable_stc_check().ptr()) {
 #if !defined(TARGET_ARCH_IA32)
       // Can only come here from type testing stub via dynamic AssertAssignable.
       ASSERT(mode != kTypeCheckFromInline);
@@ -897,8 +897,8 @@
     would_update_cache_if_not_lazy =
         (!src_instance.IsNull() &&
          dst_type.type_test_stub() ==
-             StubCode::DefaultNullableTypeTest().raw()) ||
-        dst_type.type_test_stub() == StubCode::DefaultTypeTest().raw();
+             StubCode::DefaultNullableTypeTest().ptr()) ||
+        dst_type.type_test_stub() == StubCode::DefaultTypeTest().ptr();
     should_update_cache = would_update_cache_if_not_lazy && cache.IsNull();
   }
 
@@ -919,7 +919,7 @@
 #endif
     TypeTestingStubGenerator::SpecializeStubFor(thread, dst_type);
 #if defined(DEBUG)
-    ASSERT(old_code.raw() != dst_type.type_test_stub());
+    ASSERT(old_code.ptr() != dst_type.type_test_stub());
 #endif
     // Only create the cache when we come from a normal stub.
     should_update_cache = false;
@@ -1033,7 +1033,7 @@
   const Code& target_code = Code::Handle(zone, target_function.EnsureHasCode());
   // Before patching verify that we are not repeatedly patching to the same
   // target.
-  ASSERT(target_code.raw() !=
+  ASSERT(target_code.ptr() !=
          CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code));
   CodePatcher::PatchStaticCallAt(caller_frame->pc(), caller_code, target_code);
   caller_code.SetStaticCallTargetCodeAt(caller_frame->pc(), target_code);
@@ -1109,7 +1109,7 @@
   const Function& target_function =
       Function::Handle(receiver_class.GetInvocationDispatcher(
           target_name, arguments_descriptor,
-          FunctionLayout::kInvokeFieldDispatcher, FLAG_lazy_dispatchers));
+          UntaggedFunction::kInvokeFieldDispatcher, FLAG_lazy_dispatchers));
   ASSERT(!target_function.IsNull() || !FLAG_lazy_dispatchers);
   if (FLAG_trace_ic) {
     OS::PrintErr(
@@ -1117,7 +1117,7 @@
         receiver_class.ToCString(), receiver_class.id(),
         target_function.IsNull() ? "null" : target_function.ToCString());
   }
-  *result = target_function.raw();
+  *result = target_function.ptr();
   return true;
 }
 
@@ -1142,19 +1142,19 @@
     const Function& target_function =
         Function::Handle(receiver_class.GetInvocationDispatcher(
             *demangled, args_descriptor,
-            FunctionLayout::kNoSuchMethodDispatcher, FLAG_lazy_dispatchers));
+            UntaggedFunction::kNoSuchMethodDispatcher, FLAG_lazy_dispatchers));
     if (FLAG_trace_ic) {
       OS::PrintErr(
           "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n",
           receiver_class.ToCString(), receiver_class.id(),
           target_function.IsNull() ? "null" : target_function.ToCString());
     }
-    result = target_function.raw();
+    result = target_function.ptr();
   }
   // May be null if --no-lazy-dispatchers, in which case dispatch will be
   // handled by NoSuchMethodFromCallStub.
   ASSERT(!result.IsNull() || !FLAG_lazy_dispatchers);
-  return result.raw();
+  return result.ptr();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -1187,7 +1187,7 @@
   // the ic_data_array belongs to the function instead of the code. This should
   // only happen because of reload, but it sometimes happens with KBC mixed mode
   // probably through a race between foreground and background compilation.
-  if (caller_function.unoptimized_code() != caller_code.raw()) {
+  if (caller_function.unoptimized_code() != caller_code.ptr()) {
     return;
   }
 #if !defined(PRODUCT)
@@ -1251,7 +1251,7 @@
       Function::Handle(result ? store->simple_instance_of_true_function()
                               : store->simple_instance_of_false_function());
   ASSERT(!target.IsNull());
-  return target.raw();
+  return target.ptr();
 }
 
 static FunctionPtr Resolve(
@@ -1270,7 +1270,7 @@
                                                                name, args_desc);
   }
   if (caller_arguments.length() == 2 &&
-      target_function.raw() == thread->isolate_group()
+      target_function.ptr() == thread->isolate_group()
                                    ->object_store()
                                    ->simple_instance_of_function()) {
     // Replace the target function with constant function.
@@ -1286,7 +1286,7 @@
     ASSERT(!FLAG_lazy_dispatchers);
   }
 
-  return target_function.raw();
+  return target_function.ptr();
 }
 
 // Handles a static call in unoptimized code that has one argument type not
@@ -1360,7 +1360,7 @@
     if (!cls.is_allocated()) continue;
     other_target = Resolver::ResolveDynamicAnyArgs(zone, cls, name,
                                                    /*allow_add=*/false);
-    if (other_target.raw() != target.raw()) {
+    if (other_target.ptr() != target.ptr()) {
       return false;
     }
   }
@@ -1404,7 +1404,7 @@
   const auto& new_or_old_value = UnlinkedCall::Handle(
       zone, UnlinkedCall::RawCast(
                 unlinked_call_map.InsertOrGetValue(pc, unlinked_call)));
-  RELEASE_ASSERT(new_or_old_value.raw() == unlinked_call.raw());
+  RELEASE_ASSERT(new_or_old_value.ptr() == unlinked_call.ptr());
   isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
 }
 
@@ -1422,7 +1422,7 @@
   const auto& unlinked_call = UnlinkedCall::Cast(
       Object::Handle(zone, unlinked_call_map.GetOrDie(pc_integer)));
   isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
-  return unlinked_call.raw();
+  return unlinked_call.ptr();
 }
 
 // NOTE: Right now we never delete [UnlinkedCall] objects. They are needed while
@@ -1578,8 +1578,8 @@
           ? NewICData()
           : NewICDataWithTarget(receiver().GetClassId(), target_function));
 
-  Object& object = Object::Handle(zone_, ic_data.raw());
-  Code& code = Code::Handle(zone_, StubCode::ICCallThroughCode().raw());
+  Object& object = Object::Handle(zone_, ic_data.ptr());
+  Code& code = Code::Handle(zone_, StubCode::ICCallThroughCode().ptr());
   // If the target function has optional parameters or is generic, it's
   // prologue requires ARGS_DESC_REG to be populated. Yet the switchable calls
   // do not populate that on the call site, which is why we don't transition
@@ -1598,12 +1598,12 @@
         Smi::Handle(zone_, Smi::New(receiver().GetClassId()));
 
     if (unlinked.can_patch_to_monomorphic()) {
-      object = expected_cid.raw();
-      code = target_code.raw();
+      object = expected_cid.ptr();
+      code = target_code.ptr();
       ASSERT(code.HasMonomorphicEntry());
     } else {
       object = MonomorphicSmiableCall::New(expected_cid.Value(), target_code);
-      code = StubCode::MonomorphicSmiableCheck().raw();
+      code = StubCode::MonomorphicSmiableCheck().ptr();
     }
   }
   CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, object,
@@ -1620,7 +1620,7 @@
     const Function& target_function,
     intptr_t* lower,
     intptr_t* upper) {
-  if (old_target.raw() != target_function.raw()) {
+  if (old_target.ptr() != target_function.ptr()) {
     return false;
   }
   intptr_t unchecked_lower, unchecked_upper;
@@ -1646,7 +1646,7 @@
   uword pc_offset = pc - code.PayloadStart();
   const PcDescriptors& descriptors =
       PcDescriptors::Handle(zone, code.pc_descriptors());
-  PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kIcCall);
+  PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kIcCall);
   intptr_t deopt_id = -1;
   while (iter.MoveNext()) {
     if (iter.PcOffset() == pc_offset) {
@@ -1851,15 +1851,15 @@
   ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
 
   if (ic_data.NumArgsTested() == 1) {
-    ASSERT(old_code.raw() == StubCode::OneArgCheckInlineCache().raw() ||
-           old_code.raw() ==
-               StubCode::OneArgCheckInlineCacheWithExactnessCheck().raw() ||
-           old_code.raw() ==
-               StubCode::OneArgOptimizedCheckInlineCache().raw() ||
-           old_code.raw() ==
+    ASSERT(old_code.ptr() == StubCode::OneArgCheckInlineCache().ptr() ||
+           old_code.ptr() ==
+               StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr() ||
+           old_code.ptr() ==
+               StubCode::OneArgOptimizedCheckInlineCache().ptr() ||
+           old_code.ptr() ==
                StubCode::OneArgOptimizedCheckInlineCacheWithExactnessCheck()
-                   .raw() ||
-           old_code.raw() == StubCode::ICCallBreakpoint().raw() ||
+                   .ptr() ||
+           old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
            (old_code.IsNull() && !should_consider_patching()));
     UpdateICDataWithTarget(ic_data, target_function);
     if (should_consider_patching()) {
@@ -1868,17 +1868,17 @@
     }
     const Code& stub = Code::Handle(
         zone_, ic_data.is_tracking_exactness()
-                   ? StubCode::OneArgCheckInlineCacheWithExactnessCheck().raw()
-                   : StubCode::OneArgCheckInlineCache().raw());
+                   ? StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr()
+                   : StubCode::OneArgCheckInlineCache().ptr());
     ReturnJIT(stub, ic_data, target_function);
   } else {
-    ASSERT(old_code.raw() == StubCode::TwoArgsCheckInlineCache().raw() ||
-           old_code.raw() == StubCode::SmiAddInlineCache().raw() ||
-           old_code.raw() == StubCode::SmiLessInlineCache().raw() ||
-           old_code.raw() == StubCode::SmiEqualInlineCache().raw() ||
-           old_code.raw() ==
-               StubCode::TwoArgsOptimizedCheckInlineCache().raw() ||
-           old_code.raw() == StubCode::ICCallBreakpoint().raw() ||
+    ASSERT(old_code.ptr() == StubCode::TwoArgsCheckInlineCache().ptr() ||
+           old_code.ptr() == StubCode::SmiAddInlineCache().ptr() ||
+           old_code.ptr() == StubCode::SmiLessInlineCache().ptr() ||
+           old_code.ptr() == StubCode::SmiEqualInlineCache().ptr() ||
+           old_code.ptr() ==
+               StubCode::TwoArgsOptimizedCheckInlineCache().ptr() ||
+           old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
            (old_code.IsNull() && !should_consider_patching()));
     UpdateICDataWithTarget(ic_data, target_function);
     ReturnJIT(StubCode::TwoArgsCheckInlineCache(), ic_data, target_function);
@@ -2084,7 +2084,7 @@
                                            &data);
   } else {
     ASSERT(old_data.IsICData() || old_data.IsMegamorphicCache());
-    data = old_data.raw();
+    data = old_data.ptr();
   }
 #endif
   HandleMiss(data, code, target_function);
@@ -2096,21 +2096,21 @@
   switch (old_data.GetClassId()) {
 #if defined(DART_PRECOMPILED_RUNTIME)
     case kUnlinkedCallCid:
-      ASSERT(old_code.raw() == StubCode::SwitchableCallMiss().raw());
+      ASSERT(old_code.ptr() == StubCode::SwitchableCallMiss().ptr());
       DoUnlinkedCallAOT(UnlinkedCall::Cast(old_data), target_function);
       break;
     case kMonomorphicSmiableCallCid:
-      ASSERT(old_code.raw() == StubCode::MonomorphicSmiableCheck().raw());
+      ASSERT(old_code.ptr() == StubCode::MonomorphicSmiableCheck().ptr());
       FALL_THROUGH;
     case kSmiCid:
       DoMonomorphicMissAOT(old_data, target_function);
       break;
     case kSingleTargetCacheCid:
-      ASSERT(old_code.raw() == StubCode::SingleTargetCall().raw());
+      ASSERT(old_code.ptr() == StubCode::SingleTargetCall().ptr());
       DoSingleTargetMissAOT(SingleTargetCache::Cast(old_data), target_function);
       break;
     case kICDataCid:
-      ASSERT(old_code.raw() == StubCode::ICCallThroughCode().raw());
+      ASSERT(old_code.ptr() == StubCode::ICCallThroughCode().ptr());
       DoICDataMissAOT(ICData::Cast(old_data), target_function);
       break;
 #else
@@ -2124,7 +2124,7 @@
       break;
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
     case kMegamorphicCacheCid:
-      ASSERT(old_code.raw() == StubCode::MegamorphicCall().raw() ||
+      ASSERT(old_code.ptr() == StubCode::MegamorphicCall().ptr() ||
              (old_code.IsNull() && !should_consider_patching()));
       DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
       break;
@@ -2239,7 +2239,7 @@
   ASSERT(!FLAG_lazy_dispatchers);
   const bool is_dynamic_call =
       Function::IsDynamicInvocationForwarderName(target_name);
-  String& demangled_target_name = String::Handle(zone, target_name.raw());
+  String& demangled_target_name = String::Handle(zone, target_name.ptr());
   if (is_dynamic_call) {
     demangled_target_name =
         Function::DemangleDynamicInvocationForwarderName(target_name);
@@ -2276,7 +2276,7 @@
             Function::Handle(zone, function.ImplicitClosureFunction());
         const Object& result = Object::Handle(
             zone, closure_function.ImplicitInstanceClosure(receiver));
-        return result.raw();
+        return result.ptr();
       }
       cls = cls.SuperClass();
     }
@@ -2287,7 +2287,7 @@
     // o.foo(...) failed, invoke noSuchMethod is foo exists but has the wrong
     // number of arguments, or try (o.foo).call(...)
 
-    if ((target_name.raw() == Symbols::Call().raw()) && receiver.IsClosure()) {
+    if ((target_name.ptr() == Symbols::Call().ptr()) && receiver.IsClosure()) {
       // Special case: closures are implemented with a call getter instead of a
       // call method and with lazy dispatchers the field-invocation-dispatcher
       // would perform the closure call.
@@ -2302,7 +2302,7 @@
     const auto& dyn_getter_name = String::Handle(
         zone, is_dynamic_call
                   ? Function::CreateDynamicInvocationForwarderName(getter_name)
-                  : getter_name.raw());
+                  : getter_name.ptr());
     ArgumentsDescriptor args_desc(orig_arguments_desc);
     while (!cls.IsNull()) {
       // If there is a function with the target name but mismatched arguments
@@ -2336,7 +2336,7 @@
         const Object& getter_result = Object::Handle(
             zone, DartEntry::InvokeFunction(function, getter_arguments));
         if (getter_result.IsError()) {
-          return getter_result.raw();
+          return getter_result.ptr();
         }
         ASSERT(getter_result.IsNull() || getter_result.IsInstance());
 
@@ -2352,7 +2352,7 @@
       zone,
       DartEntry::InvokeNoSuchMethod(thread, receiver, demangled_target_name,
                                     orig_arguments, orig_arguments_desc));
-  return result.raw();
+  return result.ptr();
 }
 
 // Invoke appropriate noSuchMethod or closure from getter.
@@ -2396,8 +2396,8 @@
   const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
 
   String& orig_function_name = String::Handle(zone);
-  if ((function.kind() == FunctionLayout::kClosureFunction) ||
-      (function.kind() == FunctionLayout::kImplicitClosureFunction)) {
+  if ((function.kind() == UntaggedFunction::kClosureFunction) ||
+      (function.kind() == UntaggedFunction::kImplicitClosureFunction)) {
     // For closure the function name is always 'call'. Replace it with the
     // name of the closurized function so that exception contains more
     // relevant information.
@@ -2583,7 +2583,7 @@
 
   // If the code of the frame does not match the function's unoptimized code,
   // we bail out since the code was reset by an isolate reload.
-  if (code.raw() != function.unoptimized_code()) {
+  if (code.ptr() != function.unoptimized_code()) {
     return;
   }
 
@@ -2619,7 +2619,7 @@
     const Code& code = Code::Cast(result);
     uword optimized_entry = code.EntryPoint();
     frame->set_pc(optimized_entry);
-    frame->set_pc_marker(code.raw());
+    frame->set_pc_marker(code.ptr());
   }
 }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
@@ -2710,7 +2710,7 @@
   ASSERT(frame != NULL);
   OS::PrintErr(
       "IC call @%#" Px ": ICData: %#" Px " cnt:%" Pd " nchecks: %" Pd " %s\n",
-      frame->pc(), static_cast<uword>(ic_data.raw()), function.usage_counter(),
+      frame->pc(), static_cast<uword>(ic_data.ptr()), function.usage_counter(),
       ic_data.NumberOfChecks(), function.ToFullyQualifiedCString());
 }
 
@@ -3132,7 +3132,7 @@
     ASSERT(!function.IsNull());
 
     // The code will be the same as before.
-    ASSERT(code.raw() == optimized_code.raw());
+    ASSERT(code.ptr() == optimized_code.ptr());
 
     // Some sanity checking of the optimized code.
     ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized());
@@ -3219,8 +3219,8 @@
   Object& result = Object::Handle(zone, field.InitializeInstance(instance));
   ThrowIfError(result);
   result = instance.GetField(field);
-  ASSERT((result.raw() != Object::sentinel().raw()) &&
-         (result.raw() != Object::transition_sentinel().raw()));
+  ASSERT((result.ptr() != Object::sentinel().ptr()) &&
+         (result.ptr() != Object::transition_sentinel().ptr()));
   arguments.SetReturn(result);
 }
 
@@ -3229,8 +3229,8 @@
   Object& result = Object::Handle(zone, field.InitializeStatic());
   ThrowIfError(result);
   result = field.StaticValue();
-  ASSERT((result.raw() != Object::sentinel().raw()) &&
-         (result.raw() != Object::transition_sentinel().raw()));
+  ASSERT((result.ptr() != Object::sentinel().ptr()) &&
+         (result.ptr() != Object::transition_sentinel().ptr()));
   arguments.SetReturn(result);
 }
 
diff --git a/runtime/vm/scopes.cc b/runtime/vm/scopes.cc
index 155ac8e..3b08708 100644
--- a/runtime/vm/scopes.cc
+++ b/runtime/vm/scopes.cc
@@ -110,7 +110,7 @@
   ASSERT(name.IsSymbol());
   intptr_t num_references = referenced_.length();
   for (intptr_t i = 0; i < num_references; i++) {
-    if (name.raw() == referenced_[i]->name().raw()) {
+    if (name.ptr() == referenced_[i]->name().ptr()) {
       return referenced_[i];
     }
   }
@@ -325,27 +325,27 @@
 // The parser creates internal variables that start with ":"
 static bool IsFilteredIdentifier(const String& str) {
   ASSERT(str.Length() > 0);
-  if (str.raw() == Symbols::AsyncOperation().raw()) {
+  if (str.ptr() == Symbols::AsyncOperation().ptr()) {
     // Keep :async_op for asynchronous debugging.
     return false;
   }
-  if (str.raw() == Symbols::AsyncFuture().raw()) {
+  if (str.ptr() == Symbols::AsyncFuture().ptr()) {
     // Keep :async_future for asynchronous debugging.
     return false;
   }
-  if (str.raw() == Symbols::ControllerStream().raw()) {
+  if (str.ptr() == Symbols::ControllerStream().ptr()) {
     // Keep :controller_stream for asynchronous debugging.
     return false;
   }
-  if (str.raw() == Symbols::AwaitJumpVar().raw()) {
+  if (str.ptr() == Symbols::AwaitJumpVar().ptr()) {
     // Keep :await_jump_var for asynchronous debugging.
     return false;
   }
-  if (str.raw() == Symbols::is_sync().raw()) {
+  if (str.ptr() == Symbols::is_sync().ptr()) {
     // Keep :is_sync for asynchronous debugging.
     return false;
   }
-  if (str.raw() == Symbols::FunctionTypeArgumentsVar().raw()) {
+  if (str.ptr() == Symbols::FunctionTypeArgumentsVar().ptr()) {
     // Keep :function_type_arguments for accessing type variables in debugging.
     return false;
   }
@@ -365,9 +365,9 @@
     ASSERT(func.IsLocalFunction());
     for (int i = 0; i < context_scope.num_variables(); i++) {
       String& name = String::Handle(context_scope.NameAt(i));
-      LocalVarDescriptorsLayout::VarInfoKind kind;
+      UntaggedLocalVarDescriptors::VarInfoKind kind;
       if (!IsFilteredIdentifier(name)) {
-        kind = LocalVarDescriptorsLayout::kContextVar;
+        kind = UntaggedLocalVarDescriptors::kContextVar;
       } else {
         continue;
       }
@@ -399,12 +399,12 @@
   for (int i = 0; i < this->variables_.length(); i++) {
     LocalVariable* var = variables_[i];
     if ((var->owner() == this) && !var->is_invisible()) {
-      if (var->name().raw() == Symbols::CurrentContextVar().raw()) {
+      if (var->name().ptr() == Symbols::CurrentContextVar().ptr()) {
         // This is the local variable in which the function saves its
         // own context before calling a closure function.
         LocalVarDescriptorsBuilder::VarDesc desc;
         desc.name = &var->name();
-        desc.info.set_kind(LocalVarDescriptorsLayout::kSavedCurrentContext);
+        desc.info.set_kind(UntaggedLocalVarDescriptors::kSavedCurrentContext);
         desc.info.scope_id = 0;
         desc.info.declaration_pos = TokenPosition::kMinSource;
         desc.info.begin_pos = TokenPosition::kMinSource;
@@ -416,12 +416,12 @@
         LocalVarDescriptorsBuilder::VarDesc desc;
         desc.name = &var->name();
         if (var->is_captured()) {
-          desc.info.set_kind(LocalVarDescriptorsLayout::kContextVar);
+          desc.info.set_kind(UntaggedLocalVarDescriptors::kContextVar);
           ASSERT(var->owner() != NULL);
           ASSERT(var->owner()->context_level() >= 0);
           desc.info.scope_id = var->owner()->context_level();
         } else {
-          desc.info.set_kind(LocalVarDescriptorsLayout::kStackVar);
+          desc.info.set_kind(UntaggedLocalVarDescriptors::kStackVar);
           desc.info.scope_id = *scope_id;
         }
         desc.info.set_index(var->index().value());
@@ -443,7 +443,7 @@
   ASSERT(name.IsSymbol());
   for (intptr_t i = 0; i < labels_.length(); i++) {
     SourceLabel* label = labels_[i];
-    if (label->name().raw() == name.raw()) {
+    if (label->name().ptr() == name.ptr()) {
       return label;
     }
   }
@@ -455,7 +455,7 @@
   for (intptr_t i = 0; i < variables_.length(); i++) {
     LocalVariable* var = variables_[i];
     ASSERT(var->name().IsSymbol());
-    if (var->name().raw() == name.raw()) {
+    if (var->name().ptr() == name.ptr()) {
       return var;
     }
   }
@@ -641,7 +641,7 @@
     }
   }
   ASSERT(context_scope.num_variables() == captured_idx);  // Verify count.
-  return context_scope.raw();
+  return context_scope.ptr();
 }
 
 LocalScope* LocalScope::RestoreOuterScope(const ContextScope& context_scope) {
@@ -693,11 +693,11 @@
     for (intptr_t i = 0; i < scope->num_variables(); i++) {
       LocalVariable* variable = scope->VariableAt(i);
       if (variable->is_forced_stack() ||
-          (variable->name().raw() == Symbols::ExceptionVar().raw()) ||
-          (variable->name().raw() == Symbols::SavedTryContextVar().raw()) ||
-          (variable->name().raw() == Symbols::ArgDescVar().raw()) ||
-          (variable->name().raw() ==
-           Symbols::FunctionTypeArgumentsVar().raw())) {
+          (variable->name().ptr() == Symbols::ExceptionVar().ptr()) ||
+          (variable->name().ptr() == Symbols::SavedTryContextVar().ptr()) ||
+          (variable->name().ptr() == Symbols::ArgDescVar().ptr()) ||
+          (variable->name().ptr() ==
+           Symbols::FunctionTypeArgumentsVar().ptr())) {
         // Don't capture those variables because the VM expects them to be on
         // the stack.
         continue;
@@ -727,7 +727,7 @@
   context_scope.SetContextIndexAt(0, 0);
   context_scope.SetContextLevelAt(0, 0);
   ASSERT(context_scope.num_variables() == kNumCapturedVars);  // Verify count.
-  return context_scope.raw();
+  return context_scope.ptr();
 }
 
 bool LocalVariable::Equals(const LocalVariable& other) const {
@@ -777,7 +777,7 @@
 
     VarDesc desc;
     desc.name = &Symbols::Empty();  // No name.
-    desc.info.set_kind(LocalVarDescriptorsLayout::kContextLevel);
+    desc.info.set_kind(UntaggedLocalVarDescriptors::kContextLevel);
     desc.info.scope_id = 0;
     // We repurpose the token position fields to store deopt IDs in this case.
     desc.info.begin_pos = TokenPosition::Deserialize(start_deopt_id);
@@ -791,14 +791,14 @@
 
 LocalVarDescriptorsPtr LocalVarDescriptorsBuilder::Done() {
   if (vars_.is_empty()) {
-    return Object::empty_var_descriptors().raw();
+    return Object::empty_var_descriptors().ptr();
   }
   const LocalVarDescriptors& var_desc =
       LocalVarDescriptors::Handle(LocalVarDescriptors::New(vars_.length()));
   for (int i = 0; i < vars_.length(); i++) {
     var_desc.SetVar(i, *(vars_[i].name), &vars_[i].info);
   }
-  return var_desc.raw();
+  return var_desc.ptr();
 }
 
 }  // namespace dart
diff --git a/runtime/vm/scopes.h b/runtime/vm/scopes.h
index b003324..593e186 100644
--- a/runtime/vm/scopes.h
+++ b/runtime/vm/scopes.h
@@ -259,7 +259,7 @@
  public:
   struct VarDesc {
     const String* name;
-    LocalVarDescriptorsLayout::VarInfo info;
+    UntaggedLocalVarDescriptors::VarInfo info;
   };
 
   LocalVarDescriptorsBuilder() : vars_(8) {}
diff --git a/runtime/vm/service.cc b/runtime/vm/service.cc
index 8a87782..a4a8c0b 100644
--- a/runtime/vm/service.cc
+++ b/runtime/vm/service.cc
@@ -101,7 +101,7 @@
   Thread* thread = Thread::Current();
   Zone* zone = thread->zone();
   ASSERT(zone != NULL);
-  const intptr_t id = ring_->GetIdForObject(obj.raw(), policy_);
+  const intptr_t id = ring_->GetIdForObject(obj.ptr(), policy_);
   return zone->PrintToString("objects/%" Pd "", id);
 }
 
@@ -213,7 +213,7 @@
     Exceptions::PropagateError(error);
     return Object::null();
   }
-  return object.raw();
+  return object.ptr();
 }
 
 static void PrintMissingParamError(JSONStream* js, const char* param) {
@@ -1702,11 +1702,11 @@
     }
     const Integer& obj =
         Integer::Handle(thread->zone(), Smi::New(static_cast<intptr_t>(value)));
-    return obj.raw();
+    return obj.ptr();
   } else if (strcmp(arg, "bool-true") == 0) {
-    return Bool::True().raw();
+    return Bool::True().ptr();
   } else if (strcmp(arg, "bool-false") == 0) {
-    return Bool::False().raw();
+    return Bool::False().ptr();
   } else if (strcmp(arg, "null") == 0) {
     return Object::null();
   }
@@ -1727,23 +1727,23 @@
   auto zone = thread->zone();
 
   if (num_parts != 4) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
 
   const char* encoded_id = parts[3];
   auto& id = String::Handle(String::New(encoded_id));
   id = String::DecodeIRI(id);
   if (id.IsNull()) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
 
   if (strcmp(parts[2], "fields") == 0) {
     // Field ids look like: "classes/17/fields/name"
     const auto& field = Field::Handle(klass.LookupField(id));
     if (field.IsNull()) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
-    return field.raw();
+    return field.ptr();
   }
   if (strcmp(parts[2], "functions") == 0) {
     // Function ids look like: "classes/17/functions/name"
@@ -1751,52 +1751,52 @@
     const auto& function =
         Function::Handle(Resolver::ResolveFunction(zone, klass, id));
     if (function.IsNull()) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
-    return function.raw();
+    return function.ptr();
   }
   if (strcmp(parts[2], "implicit_closures") == 0) {
     // Function ids look like: "classes/17/implicit_closures/11"
     intptr_t id;
     if (!GetIntegerId(parts[3], &id)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     const auto& func =
         Function::Handle(zone, klass.ImplicitClosureFunctionFromIndex(id));
     if (func.IsNull()) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
-    return func.raw();
+    return func.ptr();
   }
   if (strcmp(parts[2], "dispatchers") == 0) {
     // Dispatcher Function ids look like: "classes/17/dispatchers/11"
     intptr_t id;
     if (!GetIntegerId(parts[3], &id)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     const auto& func =
         Function::Handle(zone, klass.InvocationDispatcherFunctionFromIndex(id));
     if (func.IsNull()) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
-    return func.raw();
+    return func.ptr();
   }
   if (strcmp(parts[2], "closures") == 0) {
     // Closure ids look like: "classes/17/closures/11"
     intptr_t id;
     if (!GetIntegerId(parts[3], &id)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     Function& func = Function::Handle(zone);
     func = ClosureFunctionsCache::ClosureFunctionFromIndex(id);
     if (func.IsNull()) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
-    return func.raw();
+    return func.ptr();
   }
 
   UNREACHABLE();
-  return Object::sentinel().raw();
+  return Object::sentinel().ptr();
 }
 
 static ObjectPtr LookupHeapObjectLibraries(IsolateGroup* isolate_group,
@@ -1804,7 +1804,7 @@
                                            int num_parts) {
   // Library ids look like "libraries/35"
   if (num_parts < 2) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   const auto& libs =
       GrowableObjectArray::Handle(isolate_group->object_store()->libraries());
@@ -1824,14 +1824,14 @@
     }
   }
   if (!lib_found) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
 
   const auto& klass = Class::Handle(lib.toplevel_class());
   ASSERT(!klass.IsNull());
 
   if (num_parts == 2) {
-    return lib.raw();
+    return lib.ptr();
   }
   if (strcmp(parts[2], "fields") == 0) {
     // Library field ids look like: "libraries/17/fields/name"
@@ -1853,7 +1853,7 @@
   if (strcmp(parts[2], "scripts") == 0) {
     // Script ids look like "libraries/35/scripts/library%2Furl.dart/12345"
     if (num_parts != 5) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     const String& id = String::Handle(String::New(parts[3]));
     ASSERT(!id.IsNull());
@@ -1863,7 +1863,7 @@
     // Each script id is tagged with a load time.
     int64_t timestamp;
     if (!GetInteger64Id(parts[4], &timestamp, 16) || (timestamp < 0)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
 
     Script& script = Script::Handle();
@@ -1877,13 +1877,13 @@
       script_url = script.url();
       if (script_url.Equals(requested_url) &&
           (timestamp == script.load_timestamp())) {
-        return script.raw();
+        return script.ptr();
       }
     }
   }
 
   // Not found.
-  return Object::sentinel().raw();
+  return Object::sentinel().ptr();
 }
 
 static ObjectPtr LookupHeapObjectClasses(Thread* thread,
@@ -1891,17 +1891,17 @@
                                          int num_parts) {
   // Class ids look like: "classes/17"
   if (num_parts < 2) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   Zone* zone = thread->zone();
   auto table = thread->isolate_group()->class_table();
   intptr_t id;
   if (!GetIntegerId(parts[1], &id) || !table->IsValidIndex(id)) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   Class& cls = Class::Handle(zone, table->At(id));
   if (num_parts == 2) {
-    return cls.raw();
+    return cls.ptr();
   }
   if (strcmp(parts[2], "closures") == 0) {
     // Closure ids look like: "classes/17/closures/11"
@@ -1921,23 +1921,23 @@
   } else if (strcmp(parts[2], "types") == 0) {
     // Type ids look like: "classes/17/types/11"
     if (num_parts != 4) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     intptr_t id;
     if (!GetIntegerId(parts[3], &id)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     if (id != 0) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     const Type& type = Type::Handle(zone, cls.DeclarationType());
     if (!type.IsNull()) {
-      return type.raw();
+      return type.ptr();
     }
   }
 
   // Not found.
-  return Object::sentinel().raw();
+  return Object::sentinel().ptr();
 }
 
 static ObjectPtr LookupHeapObjectTypeArguments(Thread* thread,
@@ -1945,11 +1945,11 @@
                                                int num_parts) {
   // TypeArguments ids look like: "typearguments/17"
   if (num_parts < 2) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   intptr_t id;
   if (!GetIntegerId(parts[1], &id)) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   ObjectStore* object_store = thread->isolate_group()->object_store();
   const Array& table =
@@ -1957,14 +1957,14 @@
   ASSERT(table.Length() > 0);
   const intptr_t table_size = table.Length() - 1;
   if ((id < 0) || (id >= table_size) || (table.At(id) == Object::null())) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   return table.At(id);
 }
 
 static ObjectPtr LookupHeapObjectCode(char** parts, int num_parts) {
   if (num_parts != 2) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   uword pc;
   static const char* const kCollectedPrefix = "collected-";
@@ -1976,53 +1976,53 @@
   const char* id = parts[1];
   if (strncmp(kCollectedPrefix, id, kCollectedPrefixLen) == 0) {
     if (!GetUnsignedIntegerId(&id[kCollectedPrefixLen], &pc, 16)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     // TODO(turnidge): Return "collected" instead.
     return Object::null();
   }
   if (strncmp(kNativePrefix, id, kNativePrefixLen) == 0) {
     if (!GetUnsignedIntegerId(&id[kNativePrefixLen], &pc, 16)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     // TODO(johnmccutchan): Support native Code.
     return Object::null();
   }
   if (strncmp(kReusedPrefix, id, kReusedPrefixLen) == 0) {
     if (!GetUnsignedIntegerId(&id[kReusedPrefixLen], &pc, 16)) {
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
     // TODO(turnidge): Return "expired" instead.
     return Object::null();
   }
   int64_t timestamp = 0;
   if (!GetCodeId(id, &timestamp, &pc) || (timestamp < 0)) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   Code& code = Code::Handle(Code::FindCode(pc, timestamp));
   if (!code.IsNull()) {
-    return code.raw();
+    return code.ptr();
   }
 
   // Not found.
-  return Object::sentinel().raw();
+  return Object::sentinel().ptr();
 }
 
 static ObjectPtr LookupHeapObjectMessage(Thread* thread,
                                          char** parts,
                                          int num_parts) {
   if (num_parts != 2) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   uword message_id = 0;
   if (!GetUnsignedIntegerId(parts[1], &message_id, 16)) {
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   MessageHandler::AcquiredQueues aq(thread->isolate()->message_handler());
   Message* message = aq.queue()->FindMessageById(message_id);
   if (message == NULL) {
     // The user may try to load an expired message.
-    return Object::sentinel().raw();
+    return Object::sentinel().ptr();
   }
   if (message->IsRaw()) {
     return message->raw_obj();
@@ -2073,9 +2073,9 @@
       if (result != NULL) {
         *result = lookup_result;
       }
-      return Object::sentinel().raw();
+      return Object::sentinel().ptr();
     }
-    return obj.raw();
+    return obj.ptr();
 
   } else if (strcmp(parts[0], "libraries") == 0) {
     return LookupHeapObjectLibraries(isolate->group(), parts, num_parts);
@@ -2090,7 +2090,7 @@
   }
 
   // Not found.
-  return Object::sentinel().raw();
+  return Object::sentinel().ptr();
 }
 
 static Breakpoint* LookupBreakpoint(Isolate* isolate,
@@ -2200,7 +2200,7 @@
     HANDLESCOPE(thread);
     obj = LookupHeapObject(thread, target_id, &lookup_result);
   }
-  if (obj.raw() == Object::sentinel().raw()) {
+  if (obj.ptr() == Object::sentinel().ptr()) {
     if (lookup_result == ObjectIdRing::kCollected) {
       PrintSentinel(js, kCollectedSentinel);
     } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -2263,7 +2263,7 @@
           }
         }
       } else if (element.IsWeakProperty()) {
-        wp ^= static_cast<WeakPropertyPtr>(element.raw());
+        wp ^= static_cast<WeakPropertyPtr>(element.ptr());
         element = wp.key();
         jselement.AddProperty("parentMapKey", element);
       } else if (element.IsInstance()) {
@@ -2325,7 +2325,7 @@
     HANDLESCOPE(thread);
     obj = LookupHeapObject(thread, target_id, &lookup_result);
   }
-  if (obj.raw() == Object::sentinel().raw()) {
+  if (obj.ptr() == Object::sentinel().ptr()) {
     if (lookup_result == ObjectIdRing::kCollected) {
       PrintSentinel(js, kCollectedSentinel);
     } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -2350,7 +2350,7 @@
   ObjectIdRing::LookupResult lookup_result;
   Object& obj =
       Object::Handle(LookupHeapObject(thread, target_id, &lookup_result));
-  if (obj.raw() == Object::sentinel().raw()) {
+  if (obj.ptr() == Object::sentinel().ptr()) {
     if (lookup_result == ObjectIdRing::kCollected) {
       PrintSentinel(js, kCollectedSentinel);
     } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -2390,7 +2390,7 @@
   ObjectIdRing::LookupResult lookup_result;
   Object& obj =
       Object::Handle(LookupHeapObject(thread, target_id, &lookup_result));
-  if (obj.raw() == Object::sentinel().raw()) {
+  if (obj.ptr() == Object::sentinel().ptr()) {
     if (lookup_result == ObjectIdRing::kCollected) {
       PrintSentinel(js, kCollectedSentinel);
     } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -2451,7 +2451,7 @@
   ObjectIdRing::LookupResult lookup_result;
   Object& receiver = Object::Handle(
       zone, LookupHeapObject(thread, receiver_id, &lookup_result));
-  if (receiver.raw() == Object::sentinel().raw()) {
+  if (receiver.ptr() == Object::sentinel().ptr()) {
     if (lookup_result == ObjectIdRing::kCollected) {
       PrintSentinel(js, kCollectedSentinel);
     } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -2501,7 +2501,7 @@
         PrintInvalidParamError(js, "argumentIds");
         return true;
       }
-      if (argument.raw() == Object::sentinel().raw()) {
+      if (argument.ptr() == Object::sentinel().ptr()) {
         if (lookup_result == ObjectIdRing::kCollected) {
           PrintSentinel(js, kCollectedSentinel);
         } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -2539,7 +2539,7 @@
   if (is_instance) {
     // We don't use Instance::Cast here because it doesn't allow null.
     Instance& instance = Instance::Handle(zone);
-    instance ^= receiver.raw();
+    instance ^= receiver.ptr();
     const Object& result =
         Object::Handle(zone, instance.Invoke(selector, args, arg_names));
     result.PrintJSON(js, true);
@@ -2642,7 +2642,7 @@
     for (intptr_t i = 0; i < cids.length(); i++) {
       ObjectIdRing::LookupResult lookup_result;
       obj = LookupHeapObject(thread, cids[i], &lookup_result);
-      if (obj.raw() == Object::sentinel().raw()) {
+      if (obj.ptr() == Object::sentinel().ptr()) {
         if (lookup_result == ObjectIdRing::kCollected) {
           PrintSentinel(js, kCollectedSentinel);
         } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -2749,7 +2749,7 @@
     ObjectIdRing::LookupResult lookup_result;
     Object& obj = Object::Handle(
         zone, LookupHeapObject(thread, target_id, &lookup_result));
-    if (obj.raw() == Object::sentinel().raw()) {
+    if (obj.ptr() == Object::sentinel().ptr()) {
       PrintInvalidParamError(js, "targetId");
       return true;
     }
@@ -2761,11 +2761,11 @@
                                  !ContainsNonInstance(obj))) {
       Class& cls = Class::Handle(zone);
       if (obj.IsClass()) {
-        cls ^= obj.raw();
+        cls ^= obj.ptr();
         isStatic = true;
       } else {
         Instance& instance = Instance::Handle(zone);
-        instance ^= obj.raw();
+        instance ^= obj.ptr();
         cls = instance.clazz();
         isStatic = false;
       }
@@ -3004,7 +3004,7 @@
     ObjectIdRing::LookupResult lookup_result;
     Object& obj = Object::Handle(
         zone, LookupHeapObject(thread, target_id, &lookup_result));
-    if (obj.raw() == Object::sentinel().raw()) {
+    if (obj.ptr() == Object::sentinel().ptr()) {
       if (lookup_result == ObjectIdRing::kCollected) {
         PrintSentinel(js, kCollectedSentinel);
       } else if (lookup_result == ObjectIdRing::kExpired) {
@@ -3042,7 +3042,7 @@
     if ((obj.IsInstance() || obj.IsNull()) && !ContainsNonInstance(obj)) {
       // We don't use Instance::Cast here because it doesn't allow null.
       Instance& instance = Instance::Handle(zone);
-      instance ^= obj.raw();
+      instance ^= obj.ptr();
       const Class& receiver_cls = Class::Handle(zone, instance.clazz());
       const Object& result = Object::Handle(
           zone,
@@ -3136,7 +3136,7 @@
   }
 
   const Object& obj = Object::Handle(LookupHeapObject(thread, object_id, NULL));
-  if (obj.raw() == Object::sentinel().raw() || !obj.IsClass()) {
+  if (obj.ptr() == Object::sentinel().ptr() || !obj.IsClass()) {
     PrintInvalidParamError(js, "objectId");
     return true;
   }
@@ -3256,11 +3256,11 @@
     const char* script_id_param = js->LookupParam("scriptId");
     const Object& obj =
         Object::Handle(LookupHeapObject(thread, script_id_param, NULL));
-    if (obj.raw() == Object::sentinel().raw() || !obj.IsScript()) {
+    if (obj.ptr() == Object::sentinel().ptr() || !obj.IsScript()) {
       PrintInvalidParamError(js, "scriptId");
       return true;
     }
-    script ^= obj.raw();
+    script ^= obj.ptr();
   } else {
     if (js->HasParam("tokenPos")) {
       js->PrintError(
@@ -3358,7 +3358,7 @@
       return isolate->PausePostRequest();
     }
   }
-  return error.raw();
+  return error.ptr();
 }
 
 static bool AddBreakpointCommon(Thread* thread,
@@ -3409,7 +3409,7 @@
 
   const char* script_id_param = js->LookupParam("scriptId");
   Object& obj = Object::Handle(LookupHeapObject(thread, script_id_param, NULL));
-  if (obj.raw() == Object::sentinel().raw() || !obj.IsScript()) {
+  if (obj.ptr() == Object::sentinel().ptr() || !obj.IsScript()) {
     PrintInvalidParamError(js, "scriptId");
     return true;
   }
@@ -3450,7 +3450,7 @@
 
   const char* function_id = js->LookupParam("functionId");
   Object& obj = Object::Handle(LookupHeapObject(thread, function_id, NULL));
-  if (obj.raw() == Object::sentinel().raw() || !obj.IsFunction()) {
+  if (obj.ptr() == Object::sentinel().ptr() || !obj.IsFunction()) {
     PrintInvalidParamError(js, "functionId");
     return true;
   }
@@ -3480,7 +3480,7 @@
 
   const char* object_id = js->LookupParam("objectId");
   Object& obj = Object::Handle(LookupHeapObject(thread, object_id, NULL));
-  if (obj.raw() == Object::sentinel().raw() || !obj.IsInstance()) {
+  if (obj.ptr() == Object::sentinel().ptr() || !obj.IsInstance()) {
     PrintInvalidParamError(js, "objectId");
     return true;
   }
@@ -3534,7 +3534,7 @@
   const Class& metrics_cls =
       Class::Handle(zone, prof_lib.LookupClass(metrics_cls_name));
   ASSERT(!metrics_cls.IsNull());
-  return metrics_cls.raw();
+  return metrics_cls.ptr();
 }
 
 static bool HandleNativeMetricsList(Thread* thread, JSONStream* js) {
@@ -4367,18 +4367,18 @@
   void Append(PersistentHandle* persistent_handle) {
     JSONObject obj(handles_);
     obj.AddProperty("type", "_PersistentHandle");
-    const Object& object = Object::Handle(persistent_handle->raw());
+    const Object& object = Object::Handle(persistent_handle->ptr());
     obj.AddProperty("object", object);
   }
 
   void Append(FinalizablePersistentHandle* weak_persistent_handle) {
-    if (!weak_persistent_handle->raw()->IsHeapObject()) {
+    if (!weak_persistent_handle->ptr()->IsHeapObject()) {
       return;  // Free handle.
     }
 
     JSONObject obj(handles_);
     obj.AddProperty("type", "_WeakPersistentHandle");
-    const Object& object = Object::Handle(weak_persistent_handle->raw());
+    const Object& object = Object::Handle(weak_persistent_handle->ptr());
     obj.AddProperty("object", object);
     obj.AddPropertyF(
         "peer", "0x%" Px "",
@@ -4503,7 +4503,7 @@
   // Handle heap objects.
   ObjectIdRing::LookupResult lookup_result;
   Object& obj = Object::Handle(LookupHeapObject(thread, id, &lookup_result));
-  if (obj.raw() != Object::sentinel().raw()) {
+  if (obj.ptr() != Object::sentinel().ptr()) {
 #if !defined(DART_PRECOMPILED_RUNTIME)
     // If obj is a script from dart:* and doesn't have source loaded, try and
     // load the source before sending the response.
diff --git a/runtime/vm/service_isolate.cc b/runtime/vm/service_isolate.cc
index 6258f55..e01ccd4 100644
--- a/runtime/vm/service_isolate.cc
+++ b/runtime/vm/service_isolate.cc
@@ -60,7 +60,7 @@
   list.SetAt(1, port_int);
   list.SetAt(2, send_port);
   list.SetAt(3, name);
-  return list.raw();
+  return list.ptr();
 }
 
 static ArrayPtr MakeServerControlMessage(const SendPort& sp,
@@ -73,7 +73,7 @@
   list.SetAt(1, sp);
   list.SetAt(2, Bool::Get(enable));
   list.SetAt(3, silenceOutput);
-  return list.raw();
+  return list.ptr();
 }
 
 const char* ServiceIsolate::kName = DART_VM_SERVICE_ISOLATE_NAME;
diff --git a/runtime/vm/service_test.cc b/runtime/vm/service_test.cc
index 515a2b2..c078398 100644
--- a/runtime/vm/service_test.cc
+++ b/runtime/vm/service_test.cc
@@ -55,12 +55,12 @@
     }
     if (response_obj.IsString()) {
       String& response = String::Handle();
-      response ^= response_obj.raw();
+      response ^= response_obj.ptr();
       _msg = Utils::StrDup(response.ToCString());
     } else {
       ASSERT(response_obj.IsArray());
       Array& response_array = Array::Handle();
-      response_array ^= response_obj.raw();
+      response_array ^= response_obj.ptr();
       ASSERT(response_array.Length() == 1);
       ExternalTypedData& response = ExternalTypedData::Handle();
       response ^= response_array.At(0);
@@ -101,7 +101,7 @@
   growable.Add(dummy_isolate_id);
   array = Array::MakeFixedLength(growable);
   result.SetAt(5, array);
-  return result.raw();
+  return result.ptr();
 }
 
 static ArrayPtr EvalF(Dart_Handle lib, const char* fmt, ...) {
@@ -123,14 +123,14 @@
   const Function& result = Function::Handle(Resolver::ResolveDynamicFunction(
       Thread::Current()->zone(), cls, String::Handle(String::New(name))));
   EXPECT(!result.IsNull());
-  return result.raw();
+  return result.ptr();
 }
 
 static ClassPtr GetClass(const Library& lib, const char* name) {
   const Class& cls = Class::Handle(
       lib.LookupClass(String::Handle(Symbols::New(Thread::Current(), name))));
   EXPECT(!cls.IsNull());  // No ambiguity error expected.
-  return cls.raw();
+  return cls.ptr();
 }
 
 static void HandleIsolateMessage(Isolate* isolate, const Array& msg) {
@@ -386,7 +386,7 @@
       PcDescriptors::Handle(code_c.pc_descriptors());
   EXPECT(!descriptors.IsNull());
   ObjectIdRing* ring = isolate->EnsureObjectIdRing();
-  intptr_t id = ring->GetIdForObject(descriptors.raw());
+  intptr_t id = ring->GetIdForObject(descriptors.ptr());
 
   // Build a mock message handler and wrap it in a dart port.
   ServiceTestMessageHandler handler;
@@ -457,7 +457,7 @@
       LocalVarDescriptors::Handle(code_c.GetLocalVarDescriptors());
   // Generate an ID for this object.
   ObjectIdRing* ring = isolate->EnsureObjectIdRing();
-  intptr_t id = ring->GetIdForObject(descriptors.raw());
+  intptr_t id = ring->GetIdForObject(descriptors.ptr());
 
   // Build a mock message handler and wrap it in a dart port.
   ServiceTestMessageHandler handler;
diff --git a/runtime/vm/simulator_arm.cc b/runtime/vm/simulator_arm.cc
index 7eccbd8..fe33fb5 100644
--- a/runtime/vm/simulator_arm.cc
+++ b/runtime/vm/simulator_arm.cc
@@ -238,7 +238,7 @@
   uword pc_offset = pc - code.PayloadStart();
   const PcDescriptors& descriptors =
       PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     if (iter.PcOffset() == pc_offset) {
       return iter.TokenPos();
diff --git a/runtime/vm/simulator_arm64.cc b/runtime/vm/simulator_arm64.cc
index a055948..1c78f20 100644
--- a/runtime/vm/simulator_arm64.cc
+++ b/runtime/vm/simulator_arm64.cc
@@ -266,7 +266,7 @@
   uword pc_offset = pc - code.PayloadStart();
   const PcDescriptors& descriptors =
       PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     if (iter.PcOffset() == pc_offset) {
       return iter.TokenPos();
diff --git a/runtime/vm/snapshot.cc b/runtime/vm/snapshot.cc
index d174d7c..5202079 100644
--- a/runtime/vm/snapshot.cc
+++ b/runtime/vm/snapshot.cc
@@ -327,17 +327,17 @@
     }
     Object& result = Object::Handle(zone_);
     if (backward_references_->length() > 0) {
-      result = (*backward_references_)[0].reference()->raw();
+      result = (*backward_references_)[0].reference()->ptr();
     } else {
-      result = obj.raw();
+      result = obj.ptr();
     }
     RunDelayedTypePostprocessing();
     const Object& ok = Object::Handle(zone_, RunDelayedRehashingOfMaps());
     objects_to_rehash_ = GrowableObjectArray::null();
     if (!ok.IsNull()) {
-      return ok.raw();
+      return ok.ptr();
     }
-    return result.raw();
+    return result.ptr();
   } else {
     // An error occurred while reading, return the error object.
     return Thread::Current()->StealStickyError();
@@ -408,7 +408,7 @@
         "Invalid object found in message: library is not found or loaded.");
   }
   str_ ^= ReadObjectImpl(kAsInlinedObject);
-  if (str_.raw() == Symbols::TopLevel().raw()) {
+  if (str_.ptr() == Symbols::TopLevel().ptr()) {
     cls = library_.toplevel_class();
   } else {
     str_ = String::New(String::ScrubName(str_));
@@ -418,7 +418,7 @@
     SetReadException("Invalid object found in message: class not found");
   }
   cls.EnsureIsFinalized(thread());
-  return cls.raw();
+  return cls.ptr();
 }
 
 ObjectPtr SnapshotReader::ReadStaticImplicitClosure(intptr_t object_id,
@@ -481,7 +481,7 @@
   } else {
     obj = func.ImplicitStaticClosure();
   }
-  return obj.raw();
+  return obj.ptr();
 }
 
 intptr_t SnapshotReader::NextAvailableObjectId() const {
@@ -537,7 +537,7 @@
   // Read the class header information.
   intptr_t class_header = Read<int32_t>();
   intptr_t tags = ReadTags();
-  bool read_as_reference = as_reference && !ObjectLayout::IsCanonical(tags);
+  bool read_as_reference = as_reference && !UntaggedObject::IsCanonical(tags);
   intptr_t header_id = SerializedHeaderData::decode(class_header);
   if (header_id == kInstanceObjectId) {
     return ReadInstance(object_id, tags, read_as_reference);
@@ -561,7 +561,7 @@
 #define SNAPSHOT_READ(clazz) case kTypedData##clazz##Cid:
 
     CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) {
-      tags = ObjectLayout::ClassIdTag::update(class_id, tags);
+      tags = UntaggedObject::ClassIdTag::update(class_id, tags);
       pobj_ =
           TypedData::ReadFrom(this, object_id, tags, kind_, read_as_reference);
       break;
@@ -570,7 +570,7 @@
 #define SNAPSHOT_READ(clazz) case kExternalTypedData##clazz##Cid:
 
     CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) {
-      tags = ObjectLayout::ClassIdTag::update(class_id, tags);
+      tags = UntaggedObject::ClassIdTag::update(class_id, tags);
       pobj_ = ExternalTypedData::ReadFrom(this, object_id, tags, kind_, true);
       break;
     }
@@ -579,7 +579,7 @@
 
     case kByteDataViewCid:
       CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) {
-        tags = ObjectLayout::ClassIdTag::update(class_id, tags);
+        tags = UntaggedObject::ClassIdTag::update(class_id, tags);
         pobj_ = TypedDataView::ReadFrom(this, object_id, tags, kind_, true);
         break;
       }
@@ -592,7 +592,7 @@
       UNREACHABLE();
       break;
   }
-  return pobj_.raw();
+  return pobj_.ptr();
 }
 
 void SnapshotReader::EnqueueRehashingOfSet(const Object& set) {
@@ -643,7 +643,7 @@
     ASSERT(next_field_offset > 0);
     // Instance::NextFieldOffset() returns the offset of the first field in
     // a Dart object.
-    bool read_as_reference = ObjectLayout::IsCanonical(tags) ? false : true;
+    bool read_as_reference = UntaggedObject::IsCanonical(tags) ? false : true;
     intptr_t offset = Instance::NextFieldOffset();
     intptr_t result_cid = result->GetClassId();
 
@@ -663,7 +663,7 @@
         if ((offset != type_argument_field_offset) &&
             (kind_ == Snapshot::kMessage) &&
             isolate_group()->use_field_guards() &&
-            (pobj_.raw() != Object::sentinel().raw())) {
+            (pobj_.ptr() != Object::sentinel().ptr())) {
           // TODO(fschneider): Consider hoisting these lookups out of the loop.
           // This would involve creating a handle, since cls_ can't be reused
           // across the call to ReadObjectImpl.
@@ -672,7 +672,7 @@
           field_ ^= array_.At(offset >> kWordSizeLog2);
           ASSERT(!field_.IsNull());
           ASSERT(field_.HostOffset() == offset);
-          obj_ = pobj_.raw();
+          obj_ = pobj_.ptr();
           field_.RecordStore(obj_);
         }
         // TODO(fschneider): Verify the guarded cid and length for other kinds
@@ -680,12 +680,12 @@
       }
       offset += kWordSize;
     }
-    if (ObjectLayout::IsCanonical(tags)) {
+    if (UntaggedObject::IsCanonical(tags)) {
       *result = result->Canonicalize(thread());
       ASSERT(!result->IsNull());
     }
   }
-  return result->raw();
+  return result->ptr();
 }
 
 void SnapshotReader::AddBackRef(intptr_t id,
@@ -809,29 +809,29 @@
 
   // First check if it is one of the singleton objects.
   READ_VM_SINGLETON_OBJ(kNullObject, Object::null());
-  READ_VM_SINGLETON_OBJ(kSentinelObject, Object::sentinel().raw());
+  READ_VM_SINGLETON_OBJ(kSentinelObject, Object::sentinel().ptr());
   READ_VM_SINGLETON_OBJ(kTransitionSentinelObject,
-                        Object::transition_sentinel().raw());
-  READ_VM_SINGLETON_OBJ(kEmptyArrayObject, Object::empty_array().raw());
-  READ_VM_SINGLETON_OBJ(kZeroArrayObject, Object::zero_array().raw());
-  READ_VM_SINGLETON_OBJ(kDynamicType, Object::dynamic_type().raw());
-  READ_VM_SINGLETON_OBJ(kVoidType, Object::void_type().raw());
+                        Object::transition_sentinel().ptr());
+  READ_VM_SINGLETON_OBJ(kEmptyArrayObject, Object::empty_array().ptr());
+  READ_VM_SINGLETON_OBJ(kZeroArrayObject, Object::zero_array().ptr());
+  READ_VM_SINGLETON_OBJ(kDynamicType, Object::dynamic_type().ptr());
+  READ_VM_SINGLETON_OBJ(kVoidType, Object::void_type().ptr());
   READ_VM_SINGLETON_OBJ(kEmptyTypeArguments,
-                        Object::empty_type_arguments().raw());
-  READ_VM_SINGLETON_OBJ(kTrueValue, Bool::True().raw());
-  READ_VM_SINGLETON_OBJ(kFalseValue, Bool::False().raw());
+                        Object::empty_type_arguments().ptr());
+  READ_VM_SINGLETON_OBJ(kTrueValue, Bool::True().ptr());
+  READ_VM_SINGLETON_OBJ(kFalseValue, Bool::False().ptr());
   READ_VM_SINGLETON_OBJ(kExtractorParameterTypes,
-                        Object::extractor_parameter_types().raw());
+                        Object::extractor_parameter_types().ptr());
   READ_VM_SINGLETON_OBJ(kExtractorParameterNames,
-                        Object::extractor_parameter_names().raw());
+                        Object::extractor_parameter_names().ptr());
   READ_VM_SINGLETON_OBJ(kEmptyContextScopeObject,
-                        Object::empty_context_scope().raw());
-  READ_VM_SINGLETON_OBJ(kEmptyObjectPool, Object::empty_object_pool().raw());
-  READ_VM_SINGLETON_OBJ(kEmptyDescriptors, Object::empty_descriptors().raw());
+                        Object::empty_context_scope().ptr());
+  READ_VM_SINGLETON_OBJ(kEmptyObjectPool, Object::empty_object_pool().ptr());
+  READ_VM_SINGLETON_OBJ(kEmptyDescriptors, Object::empty_descriptors().ptr());
   READ_VM_SINGLETON_OBJ(kEmptyVarDescriptors,
-                        Object::empty_var_descriptors().raw());
+                        Object::empty_var_descriptors().ptr());
   READ_VM_SINGLETON_OBJ(kEmptyExceptionHandlers,
-                        Object::empty_exception_handlers().raw());
+                        Object::empty_exception_handlers().ptr());
 
   // Check if it is a double.
   if (object_id == kDoubleObject) {
@@ -878,7 +878,7 @@
   if (index < max_vm_isolate_object_id_) {
     return VmIsolateSnapshotObject(index);
   }
-  return GetBackRef(object_id)->raw();
+  return GetBackRef(object_id)->ptr();
 }
 
 void SnapshotReader::ArrayReadFrom(intptr_t object_id,
@@ -889,7 +889,7 @@
   *TypeArgumentsHandle() ^= ReadObjectImpl(kAsInlinedObject);
   result.SetTypeArguments(*TypeArgumentsHandle());
 
-  bool as_reference = ObjectLayout::IsCanonical(tags) ? false : true;
+  bool as_reference = UntaggedObject::IsCanonical(tags) ? false : true;
   for (intptr_t i = 0; i < len; i++) {
     *PassiveObjectHandle() = ReadObjectImpl(as_reference);
     result.SetAt(i, *PassiveObjectHandle());
@@ -931,25 +931,25 @@
 }
 
 uint32_t SnapshotWriter::GetObjectTags(ObjectPtr raw) {
-  uword tags = raw->ptr()->tags_;
+  uword tags = raw->untag()->tags_;
 #if defined(HASH_IN_OBJECT_HEADER)
   // Clear hash to make the narrowing cast safe / appease UBSAN.
-  tags = ObjectLayout::HashTag::update(0, tags);
+  tags = UntaggedObject::HashTag::update(0, tags);
 #endif
   return tags;
 }
 
-uint32_t SnapshotWriter::GetObjectTags(ObjectLayout* raw) {
+uint32_t SnapshotWriter::GetObjectTags(UntaggedObject* raw) {
   uword tags = raw->tags_;
 #if defined(HASH_IN_OBJECT_HEADER)
   // Clear hash to make the narrowing cast safe / appease UBSAN.
-  tags = ObjectLayout::HashTag::update(0, tags);
+  tags = UntaggedObject::HashTag::update(0, tags);
 #endif
   return tags;
 }
 
 uword SnapshotWriter::GetObjectTagsAndHash(ObjectPtr raw) {
-  return raw->ptr()->tags_;
+  return raw->untag()->tags_;
 }
 
 #define VM_OBJECT_CLASS_LIST(V)                                                \
@@ -963,7 +963,7 @@
   case clazz::kClassId: {                                                      \
     object_id = forward_list_->AddObject(zone(), rawobj, kIsSerialized);       \
     clazz##Ptr raw_obj = static_cast<clazz##Ptr>(rawobj);                      \
-    raw_obj->ptr()->WriteTo(this, object_id, kind(), false);                   \
+    raw_obj->untag()->WriteTo(this, object_id, kind(), false);                 \
     return true;                                                               \
   }
 
@@ -976,28 +976,28 @@
 bool SnapshotWriter::HandleVMIsolateObject(ObjectPtr rawobj) {
   // Check if it is one of the singleton VM objects.
   WRITE_VM_SINGLETON_OBJ(Object::null(), kNullObject);
-  WRITE_VM_SINGLETON_OBJ(Object::sentinel().raw(), kSentinelObject);
-  WRITE_VM_SINGLETON_OBJ(Object::transition_sentinel().raw(),
+  WRITE_VM_SINGLETON_OBJ(Object::sentinel().ptr(), kSentinelObject);
+  WRITE_VM_SINGLETON_OBJ(Object::transition_sentinel().ptr(),
                          kTransitionSentinelObject);
-  WRITE_VM_SINGLETON_OBJ(Object::empty_array().raw(), kEmptyArrayObject);
-  WRITE_VM_SINGLETON_OBJ(Object::zero_array().raw(), kZeroArrayObject);
-  WRITE_VM_SINGLETON_OBJ(Object::dynamic_type().raw(), kDynamicType);
-  WRITE_VM_SINGLETON_OBJ(Object::void_type().raw(), kVoidType);
-  WRITE_VM_SINGLETON_OBJ(Object::empty_type_arguments().raw(),
+  WRITE_VM_SINGLETON_OBJ(Object::empty_array().ptr(), kEmptyArrayObject);
+  WRITE_VM_SINGLETON_OBJ(Object::zero_array().ptr(), kZeroArrayObject);
+  WRITE_VM_SINGLETON_OBJ(Object::dynamic_type().ptr(), kDynamicType);
+  WRITE_VM_SINGLETON_OBJ(Object::void_type().ptr(), kVoidType);
+  WRITE_VM_SINGLETON_OBJ(Object::empty_type_arguments().ptr(),
                          kEmptyTypeArguments);
-  WRITE_VM_SINGLETON_OBJ(Bool::True().raw(), kTrueValue);
-  WRITE_VM_SINGLETON_OBJ(Bool::False().raw(), kFalseValue);
-  WRITE_VM_SINGLETON_OBJ(Object::extractor_parameter_types().raw(),
+  WRITE_VM_SINGLETON_OBJ(Bool::True().ptr(), kTrueValue);
+  WRITE_VM_SINGLETON_OBJ(Bool::False().ptr(), kFalseValue);
+  WRITE_VM_SINGLETON_OBJ(Object::extractor_parameter_types().ptr(),
                          kExtractorParameterTypes);
-  WRITE_VM_SINGLETON_OBJ(Object::extractor_parameter_names().raw(),
+  WRITE_VM_SINGLETON_OBJ(Object::extractor_parameter_names().ptr(),
                          kExtractorParameterNames);
-  WRITE_VM_SINGLETON_OBJ(Object::empty_context_scope().raw(),
+  WRITE_VM_SINGLETON_OBJ(Object::empty_context_scope().ptr(),
                          kEmptyContextScopeObject);
-  WRITE_VM_SINGLETON_OBJ(Object::empty_object_pool().raw(), kEmptyObjectPool);
-  WRITE_VM_SINGLETON_OBJ(Object::empty_descriptors().raw(), kEmptyDescriptors);
-  WRITE_VM_SINGLETON_OBJ(Object::empty_var_descriptors().raw(),
+  WRITE_VM_SINGLETON_OBJ(Object::empty_object_pool().ptr(), kEmptyObjectPool);
+  WRITE_VM_SINGLETON_OBJ(Object::empty_descriptors().ptr(), kEmptyDescriptors);
+  WRITE_VM_SINGLETON_OBJ(Object::empty_var_descriptors().ptr(),
                          kEmptyVarDescriptors);
-  WRITE_VM_SINGLETON_OBJ(Object::empty_exception_handlers().raw(),
+  WRITE_VM_SINGLETON_OBJ(Object::empty_exception_handlers().ptr(),
                          kEmptyExceptionHandlers);
 
   // Check if it is a singleton class object which is shared by
@@ -1005,7 +1005,7 @@
   intptr_t id = rawobj->GetClassId();
   if (id == kClassCid) {
     ClassPtr raw_class = static_cast<ClassPtr>(rawobj);
-    intptr_t class_id = raw_class->ptr()->id_;
+    intptr_t class_id = raw_class->untag()->id_;
     if (IsSingletonClassId(class_id)) {
       intptr_t object_id = ObjectIdFromClassId(class_id);
       WriteVMIsolateObject(object_id);
@@ -1038,7 +1038,7 @@
     return true;
   } else {
     // We do this check down here, because it's quite expensive.
-    if (!rawobj->ptr()->InVMIsolateHeap()) {
+    if (!rawobj->untag()->InVMIsolateHeap()) {
       return false;
     }
 
@@ -1047,7 +1047,7 @@
       case kTypedDataUint32ArrayCid: {
         object_id = forward_list_->AddObject(zone(), rawobj, kIsSerialized);
         TypedDataPtr raw_obj = static_cast<TypedDataPtr>(rawobj);
-        raw_obj->ptr()->WriteTo(this, object_id, kind(), false);
+        raw_obj->untag()->WriteTo(this, object_id, kind(), false);
         return true;
       }
       default:
@@ -1096,7 +1096,7 @@
 intptr_t ForwardList::FindObject(ObjectPtr raw) {
   NoSafepointScope no_safepoint;
   intptr_t id = GetObjectId(raw);
-  ASSERT(id == 0 || NodeForObjectId(id)->obj()->raw() == raw);
+  ASSERT(id == 0 || NodeForObjectId(id)->obj()->ptr() == raw);
   return (id == 0) ? static_cast<intptr_t>(kInvalidIndex) : id;
 }
 
@@ -1135,7 +1135,7 @@
   if ((kind_ == Snapshot::kMessage) && (cid == kDoubleCid)) {
     WriteVMIsolateObject(kDoubleObject);
     DoublePtr rd = static_cast<DoublePtr>(rawobj);
-    WriteDouble(rd->ptr()->value_);
+    WriteDouble(rd->untag()->value_);
     return true;
   }
 
@@ -1165,7 +1165,7 @@
   // Check if it is an internal VM class which is in the object store.
   if (cid == kClassCid) {
     ClassPtr raw_class = static_cast<ClassPtr>(rawobj);
-    intptr_t class_id = raw_class->ptr()->id_;
+    intptr_t class_id = raw_class->untag()->id_;
     if (IsBootstrapedClassId(class_id)) {
       intptr_t object_id = ObjectIdFromClassId(class_id);
       WriteIndexedObject(object_id);
@@ -1191,7 +1191,7 @@
 
   // When we know that we are dealing with leaf or shallow objects we write
   // these objects inline even when 'as_reference' is true.
-  const bool write_as_reference = as_reference && !raw->ptr()->IsCanonical();
+  const bool write_as_reference = as_reference && !raw->untag()->IsCanonical();
   uintptr_t tags = GetObjectTagsAndHash(raw);
 
   // Add object to the forward ref list and mark it so that future references
@@ -1216,9 +1216,9 @@
                                            intptr_t object_id,
                                            bool as_reference) {
   NoSafepointScope no_safepoint;
-  ClassPtr cls = class_table_->At(ObjectLayout::ClassIdTag::decode(tags));
-  intptr_t class_id = cls->ptr()->id_;
-  ASSERT(class_id == ObjectLayout::ClassIdTag::decode(tags));
+  ClassPtr cls = class_table_->At(UntaggedObject::ClassIdTag::decode(tags));
+  intptr_t class_id = cls->untag()->id_;
+  ASSERT(class_id == UntaggedObject::ClassIdTag::decode(tags));
   if (class_id >= kNumPredefinedCids || IsImplicitFieldClassId(class_id)) {
     WriteInstance(raw, cls, tags, object_id, as_reference);
     return;
@@ -1227,7 +1227,7 @@
 #define SNAPSHOT_WRITE(clazz)                                                  \
   case clazz::kClassId: {                                                      \
     clazz##Ptr raw_obj = static_cast<clazz##Ptr>(raw);                         \
-    raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference);             \
+    raw_obj->untag()->WriteTo(this, object_id, kind_, as_reference);           \
     return;                                                                    \
   }
 
@@ -1237,7 +1237,7 @@
 
     CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) {
       TypedDataPtr raw_obj = static_cast<TypedDataPtr>(raw);
-      raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference);
+      raw_obj->untag()->WriteTo(this, object_id, kind_, as_reference);
       return;
     }
 #undef SNAPSHOT_WRITE
@@ -1245,7 +1245,7 @@
 
     CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) {
       ExternalTypedDataPtr raw_obj = static_cast<ExternalTypedDataPtr>(raw);
-      raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference);
+      raw_obj->untag()->WriteTo(this, object_id, kind_, as_reference);
       return;
     }
 #undef SNAPSHOT_WRITE
@@ -1254,7 +1254,7 @@
     case kByteDataViewCid:
       CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) {
         auto raw_obj = static_cast<TypedDataViewPtr>(raw);
-        raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference);
+        raw_obj->untag()->WriteTo(this, object_id, kind_, as_reference);
         return;
       }
 #undef SNAPSHOT_WRITE
@@ -1312,7 +1312,7 @@
        ++id) {
     if (!NodeForObjectId(id)->is_serialized()) {
       // Write the object out in the stream.
-      ObjectPtr raw = NodeForObjectId(id)->obj()->raw();
+      ObjectPtr raw = NodeForObjectId(id)->obj()->ptr();
       writer->VisitObject(raw);
 
       // Mark object as serialized.
@@ -1322,7 +1322,7 @@
   first_unprocessed_object_id_ = next_object_id();
 }
 
-void SnapshotWriter::WriteClassId(ClassLayout* cls) {
+void SnapshotWriter::WriteClassId(UntaggedClass* cls) {
   ASSERT(!Snapshot::IsFull(kind_));
   int class_id = cls->id_;
   ASSERT(!IsSingletonClassId(class_id) && !IsBootstrapedClassId(class_id));
@@ -1330,7 +1330,7 @@
   // Write out the library url and class name.
   LibraryPtr library = cls->library();
   ASSERT(library != Library::null());
-  WriteObjectImpl(library->ptr()->url_, kAsInlinedObject);
+  WriteObjectImpl(library->untag()->url_, kAsInlinedObject);
   WriteObjectImpl(cls->name(), kAsInlinedObject);
 }
 
@@ -1351,11 +1351,11 @@
   // Write out the library url, class name and signature function name.
   ClassPtr cls = GetFunctionOwner(func);
   ASSERT(cls != Class::null());
-  LibraryPtr library = cls->ptr()->library();
+  LibraryPtr library = cls->untag()->library();
   ASSERT(library != Library::null());
-  WriteObjectImpl(library->ptr()->url(), kAsInlinedObject);
-  WriteObjectImpl(cls->ptr()->name(), kAsInlinedObject);
-  WriteObjectImpl(func->ptr()->name(), kAsInlinedObject);
+  WriteObjectImpl(library->untag()->url(), kAsInlinedObject);
+  WriteObjectImpl(cls->untag()->name(), kAsInlinedObject);
+  WriteObjectImpl(func->untag()->name(), kAsInlinedObject);
   WriteObjectImpl(delayed_type_arguments, kAsInlinedObject);
 }
 
@@ -1393,7 +1393,7 @@
     WriteObjectImpl(type_arguments, kAsInlinedObject);
 
     // Write out the individual object ids.
-    bool write_as_reference = ObjectLayout::IsCanonical(tags) ? false : true;
+    bool write_as_reference = UntaggedObject::IsCanonical(tags) ? false : true;
     for (intptr_t i = 0; i < len; i++) {
       WriteObjectImpl(data[i], write_as_reference);
     }
@@ -1403,7 +1403,7 @@
 FunctionPtr SnapshotWriter::IsSerializableClosure(ClosurePtr closure) {
   // Extract the function object to check if this closure
   // can be sent in an isolate message.
-  FunctionPtr func = closure->ptr()->function();
+  FunctionPtr func = closure->untag()->function();
   // We only allow closure of top level methods or static functions in a
   // class to be sent in isolate messages.
   if (can_send_any_object() &&
@@ -1427,18 +1427,18 @@
 }
 
 ClassPtr SnapshotWriter::GetFunctionOwner(FunctionPtr func) {
-  ObjectPtr owner = func->ptr()->owner();
+  ObjectPtr owner = func->untag()->owner();
   uword tags = GetObjectTags(owner);
-  intptr_t class_id = ObjectLayout::ClassIdTag::decode(tags);
+  intptr_t class_id = UntaggedObject::ClassIdTag::decode(tags);
   if (class_id == kClassCid) {
     return static_cast<ClassPtr>(owner);
   }
   ASSERT(class_id == kPatchClassCid);
-  return static_cast<PatchClassPtr>(owner)->ptr()->patched_class_;
+  return static_cast<PatchClassPtr>(owner)->untag()->patched_class_;
 }
 
 void SnapshotWriter::CheckForNativeFields(ClassPtr cls) {
-  if (cls->ptr()->num_native_fields_ != 0) {
+  if (cls->untag()->num_native_fields_ != 0) {
     // We do not allow objects with native fields in an isolate message.
     HANDLESCOPE(thread());
     const Class& clazz = Class::Handle(zone(), cls);
@@ -1463,7 +1463,7 @@
                                    intptr_t tags,
                                    intptr_t object_id,
                                    bool as_reference) {
-  // Closure instances are handled by ClosureLayout::WriteTo().
+  // Closure instances are handled by UntaggedClosure::WriteTo().
   ASSERT(!Class::IsClosureClass(cls));
 
   // Check if the instance has native fields and throw an exception if it does.
@@ -1499,23 +1499,23 @@
 
     const auto unboxed_fields =
         isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(
-            cls->ptr()->id_);
+            cls->untag()->id_);
 
     // Write out all the fields for the object.
     // Instance::NextFieldOffset() returns the offset of the first field in
     // a Dart object.
-    bool write_as_reference = ObjectLayout::IsCanonical(tags) ? false : true;
+    bool write_as_reference = UntaggedObject::IsCanonical(tags) ? false : true;
 
     intptr_t offset = Instance::NextFieldOffset();
     while (offset < next_field_offset) {
       if (unboxed_fields.Get(offset / kWordSize)) {
         // Writes 32 bits of the unboxed value at a time
         const uword value = *reinterpret_cast<uword*>(
-            reinterpret_cast<uword>(raw->ptr()) + offset);
+            reinterpret_cast<uword>(raw->untag()) + offset);
         WriteWordWith32BitWrites(value);
       } else {
         ObjectPtr raw_obj = *reinterpret_cast<ObjectPtr*>(
-            reinterpret_cast<uword>(raw->ptr()) + offset);
+            reinterpret_cast<uword>(raw->untag()) + offset);
         WriteObjectImpl(raw_obj, write_as_reference);
       }
       offset += kWordSize;
@@ -1545,7 +1545,7 @@
   {
     NoSafepointScope no_safepoint;
     ErrorPtr error = thread()->StealStickyError();
-    ASSERT(error == Object::snapshot_writer_error().raw());
+    ASSERT(error == Object::snapshot_writer_error().ptr());
   }
 
   if (msg != NULL) {
@@ -1610,7 +1610,7 @@
     LongJumpScope jump;
     if (setjmp(*jump.Set()) == 0) {
       NoSafepointScope no_safepoint;
-      WriteObject(obj.raw());
+      WriteObject(obj.ptr());
     } else {
       FreeBuffer();
       has_exception = true;
diff --git a/runtime/vm/snapshot.h b/runtime/vm/snapshot.h
index 2a829f3..d526847 100644
--- a/runtime/vm/snapshot.h
+++ b/runtime/vm/snapshot.h
@@ -598,7 +598,7 @@
   void WriteObject(ObjectPtr raw);
 
   static uint32_t GetObjectTags(ObjectPtr raw);
-  static uint32_t GetObjectTags(ObjectLayout* raw);
+  static uint32_t GetObjectTags(UntaggedObject* raw);
   static uword GetObjectTagsAndHash(ObjectPtr raw);
 
   Exceptions::ExceptionType exception_type() const { return exception_type_; }
@@ -624,7 +624,7 @@
   bool CheckAndWritePredefinedObject(ObjectPtr raw);
   bool HandleVMIsolateObject(ObjectPtr raw);
 
-  void WriteClassId(ClassLayout* cls);
+  void WriteClassId(UntaggedClass* cls);
   void WriteObjectImpl(ObjectPtr raw, bool as_reference);
   void WriteMarkedObjectImpl(ObjectPtr raw,
                              intptr_t tags,
@@ -661,37 +661,37 @@
   const char* exception_msg_;  // Message associated with exception.
   bool can_send_any_object_;   // True if any Dart instance can be sent.
 
-  friend class ArrayLayout;
-  friend class ClassLayout;
-  friend class CodeLayout;
-  friend class ContextScopeLayout;
-  friend class DynamicLibraryLayout;
-  friend class ExceptionHandlersLayout;
-  friend class FieldLayout;
-  friend class FunctionLayout;
-  friend class GrowableObjectArrayLayout;
-  friend class ImmutableArrayLayout;
-  friend class InstructionsLayout;
-  friend class LibraryLayout;
-  friend class LinkedHashMapLayout;
-  friend class LocalVarDescriptorsLayout;
-  friend class MirrorReferenceLayout;
-  friend class ObjectPoolLayout;
-  friend class PointerLayout;
-  friend class ReceivePortLayout;
-  friend class RegExpLayout;
-  friend class ScriptLayout;
-  friend class StackTraceLayout;
-  friend class SubtypeTestCacheLayout;
-  friend class TransferableTypedDataLayout;
-  friend class TypeLayout;
-  friend class FunctionTypeLayout;
-  friend class TypeArgumentsLayout;
-  friend class TypeParameterLayout;
-  friend class TypeRefLayout;
-  friend class TypedDataViewLayout;
-  friend class UserTagLayout;
-  friend class WeakSerializationReferenceLayout;
+  friend class UntaggedArray;
+  friend class UntaggedClass;
+  friend class UntaggedCode;
+  friend class UntaggedContextScope;
+  friend class UntaggedDynamicLibrary;
+  friend class UntaggedExceptionHandlers;
+  friend class UntaggedField;
+  friend class UntaggedFunction;
+  friend class UntaggedFunctionType;
+  friend class UntaggedGrowableObjectArray;
+  friend class UntaggedImmutableArray;
+  friend class UntaggedInstructions;
+  friend class UntaggedLibrary;
+  friend class UntaggedLinkedHashMap;
+  friend class UntaggedLocalVarDescriptors;
+  friend class UntaggedMirrorReference;
+  friend class UntaggedObjectPool;
+  friend class UntaggedPointer;
+  friend class UntaggedReceivePort;
+  friend class UntaggedRegExp;
+  friend class UntaggedScript;
+  friend class UntaggedStackTrace;
+  friend class UntaggedSubtypeTestCache;
+  friend class UntaggedTransferableTypedData;
+  friend class UntaggedType;
+  friend class UntaggedTypeArguments;
+  friend class UntaggedTypeParameter;
+  friend class UntaggedTypeRef;
+  friend class UntaggedTypedDataView;
+  friend class UntaggedUserTag;
+  friend class UntaggedWeakSerializationReference;
   friend class SnapshotWriterVisitor;
   friend class WriteInlinedObjectVisitor;
   DISALLOW_COPY_AND_ASSIGN(SnapshotWriter);
diff --git a/runtime/vm/snapshot_test.cc b/runtime/vm/snapshot_test.cc
index a660f5c..16bce51 100644
--- a/runtime/vm/snapshot_test.cc
+++ b/runtime/vm/snapshot_test.cc
@@ -29,7 +29,7 @@
   }
   if (expected.IsSmi()) {
     if (actual.IsSmi()) {
-      return expected.raw() == actual.raw();
+      return expected.ptr() == actual.ptr();
     }
     return false;
   }
@@ -37,15 +37,15 @@
     if (actual.IsDouble()) {
       Double& dbl1 = Double::Handle();
       Double& dbl2 = Double::Handle();
-      dbl1 ^= expected.raw();
-      dbl2 ^= actual.raw();
+      dbl1 ^= expected.ptr();
+      dbl2 ^= actual.ptr();
       return dbl1.value() == dbl2.value();
     }
     return false;
   }
   if (expected.IsBool()) {
     if (actual.IsBool()) {
-      return expected.raw() == actual.raw();
+      return expected.ptr() == actual.ptr();
     }
     return false;
   }
diff --git a/runtime/vm/source_report.cc b/runtime/vm/source_report.cc
index aeba553..03eaf12 100644
--- a/runtime/vm/source_report.cc
+++ b/runtime/vm/source_report.cc
@@ -76,7 +76,7 @@
   }
 
   if (script_ != NULL && !script_->IsNull()) {
-    if (func.script() != script_->raw()) {
+    if (func.script() != script_->ptr()) {
       // The function is from the wrong script.
       return true;
     }
@@ -90,14 +90,14 @@
   if (func.ForceOptimize()) return true;
 
   switch (func.kind()) {
-    case FunctionLayout::kRegularFunction:
-    case FunctionLayout::kClosureFunction:
-    case FunctionLayout::kImplicitClosureFunction:
-    case FunctionLayout::kImplicitStaticGetter:
-    case FunctionLayout::kFieldInitializer:
-    case FunctionLayout::kGetterFunction:
-    case FunctionLayout::kSetterFunction:
-    case FunctionLayout::kConstructor:
+    case UntaggedFunction::kRegularFunction:
+    case UntaggedFunction::kClosureFunction:
+    case UntaggedFunction::kImplicitClosureFunction:
+    case UntaggedFunction::kImplicitStaticGetter:
+    case UntaggedFunction::kFieldInitializer:
+    case UntaggedFunction::kGetterFunction:
+    case UntaggedFunction::kSetterFunction:
+    case UntaggedFunction::kConstructor:
       break;
     default:
       return true;
@@ -123,7 +123,7 @@
   }
 
   if (script_ != NULL && !script_->IsNull()) {
-    if (field.Script() != script_->raw()) {
+    if (field.Script() != script_->ptr()) {
       // The field is from the wrong script.
       return true;
     }
@@ -140,7 +140,7 @@
   ScriptTableEntry wrapper;
   const String& url = String::Handle(zone(), script.url());
   wrapper.key = &url;
-  wrapper.script = &Script::Handle(zone(), script.raw());
+  wrapper.script = &Script::Handle(zone(), script.ptr());
   ScriptTableEntry* pair = script_table_.LookupValue(&wrapper);
   if (pair != NULL) {
     return pair->index;
@@ -169,7 +169,7 @@
     ASSERT(url2.Equals(*url));
     ScriptTableEntry wrapper;
     wrapper.key = &url2;
-    wrapper.script = &Script::Handle(zone(), script->raw());
+    wrapper.script = &Script::Handle(zone(), script->ptr());
     ScriptTableEntry* pair = script_table_.LookupValue(&wrapper);
     ASSERT(i == pair->index);
   }
@@ -180,7 +180,7 @@
                                            const Library& lib) {
   const Array& scripts = Array::Handle(zone(), lib.LoadedScripts());
   for (intptr_t j = 0; j < scripts.Length(); j++) {
-    if (scripts.At(j) == script.raw()) {
+    if (scripts.At(j) == script.ptr()) {
       return true;
     }
   }
@@ -203,7 +203,7 @@
 
   PcDescriptors::Iterator iter(
       descriptors,
-      PcDescriptorsLayout::kIcCall | PcDescriptorsLayout::kUnoptStaticCall);
+      UntaggedPcDescriptors::kIcCall | UntaggedPcDescriptors::kUnoptStaticCall);
   while (iter.MoveNext()) {
     HANDLESCOPE(thread());
     ASSERT(iter.DeoptId() < ic_data_array->length());
@@ -251,7 +251,7 @@
 
   PcDescriptors::Iterator iter(
       descriptors,
-      PcDescriptorsLayout::kIcCall | PcDescriptorsLayout::kUnoptStaticCall);
+      UntaggedPcDescriptors::kIcCall | UntaggedPcDescriptors::kUnoptStaticCall);
   while (iter.MoveNext()) {
     HANDLESCOPE(thread());
     ASSERT(iter.DeoptId() < ic_data_array->length());
@@ -306,9 +306,9 @@
 
   ASSERT(!code.IsNull());
 
-  const uint8_t kSafepointKind =
-      (PcDescriptorsLayout::kIcCall | PcDescriptorsLayout::kUnoptStaticCall |
-       PcDescriptorsLayout::kRuntimeCall);
+  const uint8_t kSafepointKind = (UntaggedPcDescriptors::kIcCall |
+                                  UntaggedPcDescriptors::kUnoptStaticCall |
+                                  UntaggedPcDescriptors::kRuntimeCall);
 
   const PcDescriptors& descriptors =
       PcDescriptors::Handle(zone(), code.pc_descriptors());
@@ -505,7 +505,7 @@
     for (int i = 0; i < functions.Length(); i++) {
       func ^= functions.At(i);
       // Skip getter functions of static const field.
-      if (func.kind() == FunctionLayout::kImplicitStaticGetter) {
+      if (func.kind() == UntaggedFunction::kImplicitStaticGetter) {
         field ^= func.accessor_field();
         if (field.is_const() && field.is_static()) {
           continue;
@@ -589,7 +589,7 @@
       scriptRef ^= scripts.At(j);
       const String& url = String::Handle(zone(), scriptRef.url());
       wrapper.key = &url;
-      wrapper.script = &Script::Handle(zone(), scriptRef.raw());
+      wrapper.script = &Script::Handle(zone(), scriptRef.ptr());
       ScriptTableEntry* pair = local_script_table->LookupValue(&wrapper);
       if (pair != NULL) {
         // Existing one.
diff --git a/runtime/vm/source_report.h b/runtime/vm/source_report.h
index 246f64a..08f0441 100644
--- a/runtime/vm/source_report.h
+++ b/runtime/vm/source_report.h
@@ -108,7 +108,7 @@
     static inline intptr_t Hashcode(Key key) { return key->key->Hash(); }
 
     static inline bool IsKeyEqual(Pair kv, Key key) {
-      return kv->script->raw() == key->script->raw();
+      return kv->script->ptr() == key->script->ptr();
     }
   };
 
diff --git a/runtime/vm/stack_frame.cc b/runtime/vm/stack_frame.cc
index 0f3c3ed..5a6c5a9 100644
--- a/runtime/vm/stack_frame.cc
+++ b/runtime/vm/stack_frame.cc
@@ -26,7 +26,7 @@
 
 namespace dart {
 
-const FrameLayout invalid_frame_layout = {
+const UntaggedFrame invalid_frame_layout = {
     /*.first_object_from_fp = */ -1,
     /*.last_fixed_object_from_fp = */ -1,
     /*.param_end_from_fp = */ -1,
@@ -38,7 +38,7 @@
     /*.exit_link_slot_from_entry_fp = */ -1,
 };
 
-const FrameLayout default_frame_layout = {
+const UntaggedFrame default_frame_layout = {
     /*.first_object_from_fp = */ kFirstObjectSlotFromFp,
     /*.last_fixed_object_from_fp = */ kLastFixedObjectSlotFromFp,
     /*.param_end_from_fp = */ kParamEndSlotFromFp,
@@ -49,7 +49,7 @@
     /*.code_from_fp = */ kPcMarkerSlotFromFp,
     /*.exit_link_slot_from_entry_fp = */ kExitLinkSlotFromEntryFp,
 };
-const FrameLayout bare_instructions_frame_layout = {
+const UntaggedFrame bare_instructions_frame_layout = {
     /*.first_object_from_pc =*/kFirstObjectSlotFromFp,  // No saved PP slot.
     /*.last_fixed_object_from_fp = */ kLastFixedObjectSlotFromFp +
         2,  // No saved CODE, PP slots
@@ -67,19 +67,19 @@
 namespace compiler {
 
 namespace target {
-FrameLayout frame_layout = invalid_frame_layout;
+UntaggedFrame frame_layout = invalid_frame_layout;
 }
 
 }  // namespace compiler
 
-FrameLayout runtime_frame_layout = invalid_frame_layout;
+UntaggedFrame runtime_frame_layout = invalid_frame_layout;
 
-int FrameLayout::FrameSlotForVariable(const LocalVariable* variable) const {
+int UntaggedFrame::FrameSlotForVariable(const LocalVariable* variable) const {
   ASSERT(!variable->is_captured());
   return this->FrameSlotForVariableIndex(variable->index().value());
 }
 
-int FrameLayout::FrameSlotForVariableIndex(int variable_index) const {
+int UntaggedFrame::FrameSlotForVariableIndex(int variable_index) const {
   // Variable indices are:
   //    [1, 2, ..., M] for the M parameters.
   //    [0, -1, -2, ... -(N-1)] for the N [LocalVariable]s
@@ -88,7 +88,7 @@
                              : (variable_index + param_end_from_fp);
 }
 
-void FrameLayout::Init() {
+void UntaggedFrame::Init() {
   // By default we use frames with CODE_REG/PP in the frame.
   compiler::target::frame_layout = default_frame_layout;
   runtime_frame_layout = default_frame_layout;
@@ -401,7 +401,7 @@
 
   intptr_t try_index = -1;
   uword pc_offset = pc() - code.PayloadStart();
-  PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     const intptr_t current_try_index = iter.TryIndex();
     if ((iter.PcOffset() == pc_offset) && (current_try_index != -1)) {
@@ -430,7 +430,7 @@
   const PcDescriptors& descriptors =
       PcDescriptors::Handle(code.pc_descriptors());
   ASSERT(!descriptors.IsNull());
-  PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     if (iter.PcOffset() == pc_offset) {
       return TokenPosition(iter.TokenPos());
@@ -646,7 +646,7 @@
     : index_(0),
       num_materializations_(0),
       dest_frame_size_(0),
-      code_(Code::Handle(code.raw())),
+      code_(Code::Handle(code.ptr())),
       deopt_info_(TypedData::Handle()),
       function_(Function::Handle()),
       pc_(pc),
diff --git a/runtime/vm/stack_frame.h b/runtime/vm/stack_frame.h
index dff0279..92f864f 100644
--- a/runtime/vm/stack_frame.h
+++ b/runtime/vm/stack_frame.h
@@ -28,7 +28,7 @@
 class ObjectPointerVisitor;
 class LocalVariable;
 
-extern FrameLayout runtime_frame_layout;
+extern UntaggedFrame runtime_frame_layout;
 
 // Generic stack frame.
 class StackFrame : public ValueObject {
@@ -365,7 +365,7 @@
 
   FunctionPtr function() const {
     ASSERT(!Done());
-    return function_.raw();
+    return function_.ptr();
   }
 
   uword pc() const {
@@ -375,7 +375,7 @@
 
   CodePtr code() const {
     ASSERT(!Done());
-    return code_.raw();
+    return code_.ptr();
   }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
diff --git a/runtime/vm/stack_trace.cc b/runtime/vm/stack_trace.cc
index fb01137..ae8849a 100644
--- a/runtime/vm/stack_trace.cc
+++ b/runtime/vm/stack_trace.cc
@@ -22,7 +22,7 @@
 intptr_t GetYieldIndex(const Closure& receiver_closure) {
   const auto& function = Function::Handle(receiver_closure.function());
   if (!function.IsAsyncClosure() && !function.IsAsyncGenClosure()) {
-    return PcDescriptorsLayout::kInvalidYieldIndex;
+    return UntaggedPcDescriptors::kInvalidYieldIndex;
   }
   const auto& await_jump_var =
       Object::Handle(Context::Handle(receiver_closure.context())
@@ -32,10 +32,10 @@
 }
 
 intptr_t FindPcOffset(const PcDescriptors& pc_descs, intptr_t yield_index) {
-  if (yield_index == PcDescriptorsLayout::kInvalidYieldIndex) {
+  if (yield_index == UntaggedPcDescriptors::kInvalidYieldIndex) {
     return 0;
   }
-  PcDescriptors::Iterator iter(pc_descs, PcDescriptorsLayout::kAnyKind);
+  PcDescriptors::Iterator iter(pc_descs, UntaggedPcDescriptors::kAnyKind);
   while (iter.MoveNext()) {
     if (iter.YieldIndex() == yield_index) {
       return iter.PcOffset();
@@ -161,7 +161,7 @@
   }
   ASSERT(callback_.IsClosure());
 
-  return Closure::Cast(callback_).raw();
+  return Closure::Cast(callback_).ptr();
 }
 
 ClosurePtr CallerClosureFinder::FindCallerInAsyncClosure(
@@ -200,8 +200,8 @@
   // callback we found.
   receiver_function_ = Closure::Cast(callback_).function();
   if (!receiver_function_.IsImplicitInstanceClosureFunction() ||
-      receiver_function_.Owner() != stream_iterator_class.raw()) {
-    return Closure::Cast(callback_).raw();
+      receiver_function_.Owner() != stream_iterator_class.ptr()) {
+    return Closure::Cast(callback_).ptr();
   }
 
   // All implicit closure functions (tear-offs) have the "this" receiver
@@ -292,8 +292,8 @@
     ObjectPtr arg = last_object_in_caller[i];
     if (arg->IsHeapObject() && arg->GetClassId() == kClosureCid) {
       closure = Closure::RawCast(arg);
-      if (closure.function() == function.raw()) {
-        return closure.raw();
+      if (closure.function() == function.ptr()) {
+        return closure.ptr();
       }
     }
   }
@@ -339,7 +339,7 @@
 
     // Add the current synchronous frame.
     code = frame->LookupDartCode();
-    ASSERT(function.raw() == code.function());
+    ASSERT(function.ptr() == code.function());
     code_array.Add(code);
     const intptr_t pc_offset = frame->pc() - code.PayloadStart();
     ASSERT(pc_offset > 0 && pc_offset <= code.Size());
@@ -446,7 +446,7 @@
 
     if (!async_function_is_null && !function_is_null &&
         function.parent_function() != Function::null()) {
-      if (async_function.raw() == function.parent_function()) {
+      if (async_function.ptr() == function.parent_function()) {
         if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) {
           ObjectPtr* last_caller_obj =
               reinterpret_cast<ObjectPtr*>(frame->GetCallerSp());
diff --git a/runtime/vm/stub_code.cc b/runtime/vm/stub_code.cc
index ebc7583..7f09adc0 100644
--- a/runtime/vm/stub_code.cc
+++ b/runtime/vm/stub_code.cc
@@ -58,7 +58,7 @@
       ObjectPool::Handle(ObjectPool::NewFromBuilder(object_pool_builder));
 
   for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
-    entries_[i].code->set_object_pool(object_pool.raw());
+    entries_[i].code->set_object_pool(object_pool.ptr());
   }
 }
 
@@ -79,7 +79,7 @@
     Disassembler::DisassembleStub(name, code);
   }
 #endif  // !PRODUCT
-  return code.raw();
+  return code.ptr();
 }
 #endif  // defined(DART_PRECOMPILED_RUNTIME)
 
@@ -133,7 +133,7 @@
     view.Set<Code::kSCallTableKindAndOffset>(kind_type_and_offset);
     view.Set<Code::kSCallTableCodeOrTypeTarget>(unresolved_call->target());
   }
-  return static_calls_table.raw();
+  return static_calls_table.ptr();
 }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
 
@@ -238,7 +238,7 @@
 #endif  // !PRODUCT
   }
 #endif  // !defined(DART_PRECOMPILED_RUNTIME)
-  return stub.raw();
+  return stub.ptr();
 }
 
 CodePtr StubCode::GetAllocationStubForTypedData(classid_t class_id) {
@@ -310,7 +310,7 @@
     Disassembler::DisassembleStub(name, stub);
   }
 #endif  // !PRODUCT
-  return stub.raw();
+  return stub.ptr();
 #else   // !defined(DART_PRECOMPILED_RUNTIME)
   UNIMPLEMENTED();
   return nullptr;
diff --git a/runtime/vm/stub_code_arm64_test.cc b/runtime/vm/stub_code_arm64_test.cc
index d6fb518..ee35305 100644
--- a/runtime/vm/stub_code_arm64_test.cc
+++ b/runtime/vm/stub_code_arm64_test.cc
@@ -30,7 +30,7 @@
       String::ZoneHandle(Symbols::New(Thread::Current(), name));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   Function& function = Function::ZoneHandle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
   return &function;
 }
@@ -108,7 +108,7 @@
   const Function& function = RegisterFakeFunction(kName, code);
   Instance& result = Instance::Handle();
   result ^= DartEntry::InvokeFunction(function, Object::empty_array());
-  EXPECT_EQ(Bool::True().raw(), result.raw());
+  EXPECT_EQ(Bool::True().ptr(), result.ptr());
 }
 
 }  // namespace dart
diff --git a/runtime/vm/stub_code_arm_test.cc b/runtime/vm/stub_code_arm_test.cc
index 767e7ba..f5cabff 100644
--- a/runtime/vm/stub_code_arm_test.cc
+++ b/runtime/vm/stub_code_arm_test.cc
@@ -30,7 +30,7 @@
       String::ZoneHandle(Symbols::New(Thread::Current(), name));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   Function& function = Function::ZoneHandle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
   return &function;
 }
@@ -106,7 +106,7 @@
   const Function& function = RegisterFakeFunction(kName, code);
   Instance& result = Instance::Handle();
   result ^= DartEntry::InvokeFunction(function, Object::empty_array());
-  EXPECT_EQ(Bool::True().raw(), result.raw());
+  EXPECT_EQ(Bool::True().ptr(), result.ptr());
 }
 
 }  // namespace dart
diff --git a/runtime/vm/stub_code_ia32_test.cc b/runtime/vm/stub_code_ia32_test.cc
index 202b487..f86f895 100644
--- a/runtime/vm/stub_code_ia32_test.cc
+++ b/runtime/vm/stub_code_ia32_test.cc
@@ -30,7 +30,7 @@
       String::ZoneHandle(Symbols::New(Thread::Current(), name));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   Function& function = Function::ZoneHandle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kMinSource));
   return &function;
 }
@@ -110,7 +110,7 @@
   const Function& function = RegisterFakeFunction(kName, code);
   Instance& result = Instance::Handle();
   result ^= DartEntry::InvokeFunction(function, Object::empty_array());
-  EXPECT_EQ(Bool::True().raw(), result.raw());
+  EXPECT_EQ(Bool::True().ptr(), result.ptr());
 }
 
 }  // namespace dart
diff --git a/runtime/vm/stub_code_test.cc b/runtime/vm/stub_code_test.cc
index 37ca980..25d1d46 100644
--- a/runtime/vm/stub_code_test.cc
+++ b/runtime/vm/stub_code_test.cc
@@ -20,7 +20,7 @@
   const String& function_name = String::ZoneHandle(Symbols::New(thread, name));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   Function& function = Function::ZoneHandle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
   const Array& functions = Array::Handle(Array::New(1));
   functions.SetAt(0, function);
diff --git a/runtime/vm/stub_code_x64_test.cc b/runtime/vm/stub_code_x64_test.cc
index 290e106..909d124 100644
--- a/runtime/vm/stub_code_x64_test.cc
+++ b/runtime/vm/stub_code_x64_test.cc
@@ -30,7 +30,7 @@
       String::ZoneHandle(Symbols::New(Thread::Current(), name));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   Function& function = Function::ZoneHandle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
   return &function;
 }
@@ -108,7 +108,7 @@
   const Function& function = RegisterFakeFunction(kName, code);
   Instance& result = Instance::Handle();
   result ^= DartEntry::InvokeFunction(function, Object::empty_array());
-  EXPECT_EQ(Bool::True().raw(), result.raw());
+  EXPECT_EQ(Bool::True().ptr(), result.ptr());
 }
 
 }  // namespace dart
diff --git a/runtime/vm/symbols.cc b/runtime/vm/symbols.cc
index 984d520..5da457a 100644
--- a/runtime/vm/symbols.cc
+++ b/runtime/vm/symbols.cc
@@ -47,13 +47,13 @@
 StringPtr StringSlice::ToSymbol() const {
   if (is_all() && str_.IsOld()) {
     str_.SetCanonical();
-    return str_.raw();
+    return str_.ptr();
   } else {
     String& result =
         String::Handle(String::SubString(str_, begin_index_, len_, Heap::kOld));
     result.SetCanonical();
     result.SetHash(hash_);
-    return result.raw();
+    return result.ptr();
   }
 }
 
@@ -61,7 +61,7 @@
   String& result = String::Handle(String::Concat(str1_, str2_, Heap::kOld));
   result.SetCanonical();
   result.SetHash(hash_);
-  return result.raw();
+  return result.ptr();
 }
 
 
@@ -119,7 +119,7 @@
     *str ^= table.InsertOrGet(*str);
     ASSERT(predefined_[c] == nullptr);
     str->SetCanonical();  // Make canonical once entered.
-    predefined_[c] = str->raw();
+    predefined_[c] = str->ptr();
     symbol_handles_[idx] = str;
   }
 
@@ -160,7 +160,7 @@
     ASSERT(!str->IsNull());
     ASSERT(str->HasHash());
     ASSERT(str->IsCanonical());
-    predefined_[c] = str->raw();
+    predefined_[c] = str->ptr();
     symbol_handles_[idx] = str;
   }
 
@@ -411,7 +411,7 @@
   }
   ASSERT(symbol.IsSymbol());
   ASSERT(symbol.HasHash());
-  return symbol.raw();
+  return symbol.ptr();
 }
 
 template <typename StringType>
@@ -456,7 +456,7 @@
   }
   ASSERT(symbol.IsNull() || symbol.IsSymbol());
   ASSERT(symbol.IsNull() || symbol.HasHash());
-  return symbol.raw();
+  return symbol.ptr();
 }
 
 StringPtr Symbols::LookupFromConcat(Thread* thread,
@@ -485,7 +485,7 @@
 
 StringPtr Symbols::New(Thread* thread, const String& str) {
   if (str.IsSymbol()) {
-    return str.raw();
+    return str.ptr();
   }
   return New(thread, str, 0, str.Length());
 }
@@ -552,7 +552,7 @@
 
 intptr_t Symbols::LookupPredefinedSymbol(ObjectPtr obj) {
   for (intptr_t i = 1; i < Symbols::kMaxPredefinedId; i++) {
-    if (symbol_handles_[i]->raw() == obj) {
+    if (symbol_handles_[i]->ptr() == obj) {
       return (i + kMaxPredefinedObjectIds);
     }
   }
@@ -563,7 +563,7 @@
   ASSERT(IsPredefinedSymbolId(object_id));
   intptr_t i = (object_id - kMaxPredefinedObjectIds);
   if ((i > kIllegal) && (i < Symbols::kMaxPredefinedId)) {
-    return symbol_handles_[i]->raw();
+    return symbol_handles_[i]->ptr();
   }
   return Object::null();
 }
diff --git a/runtime/vm/tagged_pointer.h b/runtime/vm/tagged_pointer.h
index 0e6cdfa..68dc056 100644
--- a/runtime/vm/tagged_pointer.h
+++ b/runtime/vm/tagged_pointer.h
@@ -13,14 +13,14 @@
 namespace dart {
 
 class IsolateGroup;
-class ObjectLayout;
+class UntaggedObject;
 
 class ObjectPtr {
  public:
   ObjectPtr* operator->() { return this; }
   const ObjectPtr* operator->() const { return this; }
-  ObjectLayout* ptr() const {
-    return reinterpret_cast<ObjectLayout*>(UntaggedPointer());
+  UntaggedObject* untag() const {
+    return reinterpret_cast<UntaggedObject*>(untagged_pointer());
   }
 
   bool IsWellFormed() const {
@@ -184,12 +184,12 @@
   explicit constexpr ObjectPtr(uword tagged) : tagged_pointer_(tagged) {}
   explicit constexpr ObjectPtr(intptr_t tagged) : tagged_pointer_(tagged) {}
   constexpr ObjectPtr(std::nullptr_t) : tagged_pointer_(0) {}  // NOLINT
-  explicit ObjectPtr(ObjectLayout* heap_object)
+  explicit ObjectPtr(UntaggedObject* heap_object)
       : tagged_pointer_(reinterpret_cast<uword>(heap_object) + kHeapObjectTag) {
   }
 
  protected:
-  uword UntaggedPointer() const {
+  uword untagged_pointer() const {
     ASSERT(IsHeapObject());
     return tagged_pointer_ - kHeapObjectTag;
   }
@@ -206,17 +206,17 @@
 #endif
 
 #define DEFINE_TAGGED_POINTER(klass, base)                                     \
-  class klass##Layout;                                                         \
+  class Untagged##klass;                                                       \
   class klass##Ptr : public base##Ptr {                                        \
    public:                                                                     \
     klass##Ptr* operator->() { return this; }                                  \
     const klass##Ptr* operator->() const { return this; }                      \
-    klass##Layout* ptr() {                                                     \
-      return reinterpret_cast<klass##Layout*>(UntaggedPointer());              \
+    Untagged##klass* untag() {                                                 \
+      return reinterpret_cast<Untagged##klass*>(untagged_pointer());           \
     }                                                                          \
     /* TODO: Return const pointer */                                           \
-    klass##Layout* ptr() const {                                               \
-      return reinterpret_cast<klass##Layout*>(UntaggedPointer());              \
+    Untagged##klass* untag() const {                                           \
+      return reinterpret_cast<Untagged##klass*>(untagged_pointer());           \
     }                                                                          \
     klass##Ptr& operator=(const klass##Ptr& other) = default;                  \
     constexpr klass##Ptr(const klass##Ptr& other) = default;                   \
@@ -224,8 +224,9 @@
         : base##Ptr(other) {}                                                  \
     klass##Ptr() : base##Ptr() {}                                              \
     explicit constexpr klass##Ptr(uword tagged) : base##Ptr(tagged) {}         \
+    explicit constexpr klass##Ptr(intptr_t tagged) : base##Ptr(tagged) {}      \
     constexpr klass##Ptr(std::nullptr_t) : base##Ptr(nullptr) {} /* NOLINT */  \
-    explicit klass##Ptr(const ObjectLayout* untagged)                          \
+    explicit klass##Ptr(const UntaggedObject* untagged)                        \
         : base##Ptr(reinterpret_cast<uword>(untagged) + kHeapObjectTag) {}     \
   };
 
diff --git a/runtime/vm/thread.cc b/runtime/vm/thread.cc
index 0c29f31..caff423 100644
--- a/runtime/vm/thread.cc
+++ b/runtime/vm/thread.cc
@@ -61,7 +61,7 @@
 Thread::Thread(bool is_vm_isolate)
     : ThreadState(false),
       stack_limit_(0),
-      write_barrier_mask_(ObjectLayout::kGenerationalBarrierMask),
+      write_barrier_mask_(UntaggedObject::kGenerationalBarrierMask),
       isolate_(NULL),
       dispatch_table_array_(NULL),
       saved_stack_limit_(0),
@@ -227,11 +227,11 @@
 }
 
 void Thread::set_active_exception(const Object& value) {
-  active_exception_ = value.raw();
+  active_exception_ = value.ptr();
 }
 
 void Thread::set_active_stacktrace(const Object& value) {
-  active_stacktrace_ = value.raw();
+  active_stacktrace_ = value.ptr();
 }
 
 ErrorPtr Thread::sticky_error() const {
@@ -240,7 +240,7 @@
 
 void Thread::set_sticky_error(const Error& value) {
   ASSERT(!value.IsNull());
-  sticky_error_ = value.raw();
+  sticky_error_ = value.ptr();
 }
 
 void Thread::ClearStickyError() {
@@ -573,14 +573,14 @@
 void Thread::MarkingStackRelease() {
   MarkingStackBlock* block = marking_stack_block_;
   marking_stack_block_ = NULL;
-  write_barrier_mask_ = ObjectLayout::kGenerationalBarrierMask;
+  write_barrier_mask_ = UntaggedObject::kGenerationalBarrierMask;
   isolate_group()->marking_stack()->PushBlock(block);
 }
 
 void Thread::MarkingStackAcquire() {
   marking_stack_block_ = isolate_group()->marking_stack()->PopEmptyBlock();
-  write_barrier_mask_ = ObjectLayout::kGenerationalBarrierMask |
-                        ObjectLayout::kIncrementalBarrierMask;
+  write_barrier_mask_ = UntaggedObject::kGenerationalBarrierMask |
+                        UntaggedObject::kIncrementalBarrierMask;
 }
 
 void Thread::DeferredMarkingStackRelease() {
@@ -710,17 +710,17 @@
         continue;
 
       // Dart code won't store into canonical instances.
-      if (obj->ptr()->IsCanonical()) continue;
+      if (obj->untag()->IsCanonical()) continue;
 
       // Objects in the VM isolate heap are immutable and won't be
       // stored into. Check this condition last because there's no bit
       // in the header for it.
-      if (obj->ptr()->InVMIsolateHeap()) continue;
+      if (obj->untag()->InVMIsolateHeap()) continue;
 
       switch (op_) {
         case Thread::RestoreWriteBarrierInvariantOp::kAddToRememberedSet:
-          if (!obj->ptr()->IsRemembered()) {
-            obj->ptr()->AddToRememberedSet(current_);
+          if (!obj->untag()->IsRemembered()) {
+            obj->untag()->AddToRememberedSet(current_);
           }
           if (current_->is_marking()) {
             current_->DeferredMarkingStackAddObject(obj);
@@ -791,7 +791,7 @@
   // [object] is in fact a [Code] object.
   if (object.IsCode()) {
 #define CHECK_OBJECT(type_name, member_name, expr, default_init_value)         \
-  if (object.raw() == expr) {                                                  \
+  if (object.ptr() == expr) {                                                  \
     return true;                                                               \
   }
     CACHED_VM_STUBS_LIST(CHECK_OBJECT)
@@ -801,7 +801,7 @@
   // For non [Code] objects we check if the object equals to any of the cached
   // non-stub entries.
 #define CHECK_OBJECT(type_name, member_name, expr, default_init_value)         \
-  if (object.raw() == expr) {                                                  \
+  if (object.ptr() == expr) {                                                  \
     return true;                                                               \
   }
   CACHED_NON_VM_STUB_LIST(CHECK_OBJECT)
@@ -815,8 +815,8 @@
   // [object] is in fact a [Code] object.
   if (object.IsCode()) {
 #define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value)       \
-  ASSERT((expr)->ptr()->InVMIsolateHeap());                                    \
-  if (object.raw() == expr) {                                                  \
+  ASSERT((expr)->untag()->InVMIsolateHeap());                                  \
+  if (object.ptr() == expr) {                                                  \
     return Thread::member_name##offset();                                      \
   }
     CACHED_VM_STUBS_LIST(COMPUTE_OFFSET)
@@ -826,7 +826,7 @@
   // For non [Code] objects we check if the object equals to any of the cached
   // non-stub entries.
 #define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value)       \
-  if (object.raw() == expr) {                                                  \
+  if (object.ptr() == expr) {                                                  \
     return Thread::member_name##offset();                                      \
   }
   CACHED_NON_VM_STUB_LIST(COMPUTE_OFFSET)
@@ -1113,8 +1113,8 @@
       for (intptr_t i = 0; i < capacity; i++) {
         new_array.SetUint8(i, array.GetUint8(i));
       }
-      array ^= new_array.raw();
-      ffi_callback_stack_return_ = new_array.raw();
+      array ^= new_array.ptr();
+      ffi_callback_stack_return_ = new_array.ptr();
     }
   }
 
diff --git a/runtime/vm/thread.h b/runtime/vm/thread.h
index 98afe6f..6dbacbf 100644
--- a/runtime/vm/thread.h
+++ b/runtime/vm/thread.h
@@ -86,72 +86,72 @@
   V(TypeParameter)
 
 #define CACHED_VM_STUBS_LIST(V)                                                \
-  V(CodePtr, write_barrier_code_, StubCode::WriteBarrier().raw(), nullptr)     \
-  V(CodePtr, array_write_barrier_code_, StubCode::ArrayWriteBarrier().raw(),   \
+  V(CodePtr, write_barrier_code_, StubCode::WriteBarrier().ptr(), nullptr)     \
+  V(CodePtr, array_write_barrier_code_, StubCode::ArrayWriteBarrier().ptr(),   \
     nullptr)                                                                   \
-  V(CodePtr, fix_callers_target_code_, StubCode::FixCallersTarget().raw(),     \
+  V(CodePtr, fix_callers_target_code_, StubCode::FixCallersTarget().ptr(),     \
     nullptr)                                                                   \
   V(CodePtr, fix_allocation_stub_code_,                                        \
-    StubCode::FixAllocationStubTarget().raw(), nullptr)                        \
-  V(CodePtr, invoke_dart_code_stub_, StubCode::InvokeDartCode().raw(),         \
+    StubCode::FixAllocationStubTarget().ptr(), nullptr)                        \
+  V(CodePtr, invoke_dart_code_stub_, StubCode::InvokeDartCode().ptr(),         \
     nullptr)                                                                   \
-  V(CodePtr, call_to_runtime_stub_, StubCode::CallToRuntime().raw(), nullptr)  \
+  V(CodePtr, call_to_runtime_stub_, StubCode::CallToRuntime().ptr(), nullptr)  \
   V(CodePtr, late_initialization_error_shared_without_fpu_regs_stub_,          \
-    StubCode::LateInitializationErrorSharedWithoutFPURegs().raw(), nullptr)    \
+    StubCode::LateInitializationErrorSharedWithoutFPURegs().ptr(), nullptr)    \
   V(CodePtr, late_initialization_error_shared_with_fpu_regs_stub_,             \
-    StubCode::LateInitializationErrorSharedWithFPURegs().raw(), nullptr)       \
+    StubCode::LateInitializationErrorSharedWithFPURegs().ptr(), nullptr)       \
   V(CodePtr, null_error_shared_without_fpu_regs_stub_,                         \
-    StubCode::NullErrorSharedWithoutFPURegs().raw(), nullptr)                  \
+    StubCode::NullErrorSharedWithoutFPURegs().ptr(), nullptr)                  \
   V(CodePtr, null_error_shared_with_fpu_regs_stub_,                            \
-    StubCode::NullErrorSharedWithFPURegs().raw(), nullptr)                     \
+    StubCode::NullErrorSharedWithFPURegs().ptr(), nullptr)                     \
   V(CodePtr, null_arg_error_shared_without_fpu_regs_stub_,                     \
-    StubCode::NullArgErrorSharedWithoutFPURegs().raw(), nullptr)               \
+    StubCode::NullArgErrorSharedWithoutFPURegs().ptr(), nullptr)               \
   V(CodePtr, null_arg_error_shared_with_fpu_regs_stub_,                        \
-    StubCode::NullArgErrorSharedWithFPURegs().raw(), nullptr)                  \
+    StubCode::NullArgErrorSharedWithFPURegs().ptr(), nullptr)                  \
   V(CodePtr, null_cast_error_shared_without_fpu_regs_stub_,                    \
-    StubCode::NullCastErrorSharedWithoutFPURegs().raw(), nullptr)              \
+    StubCode::NullCastErrorSharedWithoutFPURegs().ptr(), nullptr)              \
   V(CodePtr, null_cast_error_shared_with_fpu_regs_stub_,                       \
-    StubCode::NullCastErrorSharedWithFPURegs().raw(), nullptr)                 \
+    StubCode::NullCastErrorSharedWithFPURegs().ptr(), nullptr)                 \
   V(CodePtr, range_error_shared_without_fpu_regs_stub_,                        \
-    StubCode::RangeErrorSharedWithoutFPURegs().raw(), nullptr)                 \
+    StubCode::RangeErrorSharedWithoutFPURegs().ptr(), nullptr)                 \
   V(CodePtr, range_error_shared_with_fpu_regs_stub_,                           \
-    StubCode::RangeErrorSharedWithFPURegs().raw(), nullptr)                    \
+    StubCode::RangeErrorSharedWithFPURegs().ptr(), nullptr)                    \
   V(CodePtr, allocate_mint_with_fpu_regs_stub_,                                \
-    StubCode::AllocateMintSharedWithFPURegs().raw(), nullptr)                  \
+    StubCode::AllocateMintSharedWithFPURegs().ptr(), nullptr)                  \
   V(CodePtr, allocate_mint_without_fpu_regs_stub_,                             \
-    StubCode::AllocateMintSharedWithoutFPURegs().raw(), nullptr)               \
-  V(CodePtr, allocate_object_stub_, StubCode::AllocateObject().raw(), nullptr) \
+    StubCode::AllocateMintSharedWithoutFPURegs().ptr(), nullptr)               \
+  V(CodePtr, allocate_object_stub_, StubCode::AllocateObject().ptr(), nullptr) \
   V(CodePtr, allocate_object_parameterized_stub_,                              \
-    StubCode::AllocateObjectParameterized().raw(), nullptr)                    \
-  V(CodePtr, allocate_object_slow_stub_, StubCode::AllocateObjectSlow().raw(), \
+    StubCode::AllocateObjectParameterized().ptr(), nullptr)                    \
+  V(CodePtr, allocate_object_slow_stub_, StubCode::AllocateObjectSlow().ptr(), \
     nullptr)                                                                   \
   V(CodePtr, stack_overflow_shared_without_fpu_regs_stub_,                     \
-    StubCode::StackOverflowSharedWithoutFPURegs().raw(), nullptr)              \
+    StubCode::StackOverflowSharedWithoutFPURegs().ptr(), nullptr)              \
   V(CodePtr, stack_overflow_shared_with_fpu_regs_stub_,                        \
-    StubCode::StackOverflowSharedWithFPURegs().raw(), nullptr)                 \
-  V(CodePtr, switchable_call_miss_stub_, StubCode::SwitchableCallMiss().raw(), \
+    StubCode::StackOverflowSharedWithFPURegs().ptr(), nullptr)                 \
+  V(CodePtr, switchable_call_miss_stub_, StubCode::SwitchableCallMiss().ptr(), \
     nullptr)                                                                   \
-  V(CodePtr, throw_stub_, StubCode::Throw().raw(), nullptr)                    \
-  V(CodePtr, re_throw_stub_, StubCode::Throw().raw(), nullptr)                 \
-  V(CodePtr, assert_boolean_stub_, StubCode::AssertBoolean().raw(), nullptr)   \
-  V(CodePtr, optimize_stub_, StubCode::OptimizeFunction().raw(), nullptr)      \
-  V(CodePtr, deoptimize_stub_, StubCode::Deoptimize().raw(), nullptr)          \
+  V(CodePtr, throw_stub_, StubCode::Throw().ptr(), nullptr)                    \
+  V(CodePtr, re_throw_stub_, StubCode::Throw().ptr(), nullptr)                 \
+  V(CodePtr, assert_boolean_stub_, StubCode::AssertBoolean().ptr(), nullptr)   \
+  V(CodePtr, optimize_stub_, StubCode::OptimizeFunction().ptr(), nullptr)      \
+  V(CodePtr, deoptimize_stub_, StubCode::Deoptimize().ptr(), nullptr)          \
   V(CodePtr, lazy_deopt_from_return_stub_,                                     \
-    StubCode::DeoptimizeLazyFromReturn().raw(), nullptr)                       \
+    StubCode::DeoptimizeLazyFromReturn().ptr(), nullptr)                       \
   V(CodePtr, lazy_deopt_from_throw_stub_,                                      \
-    StubCode::DeoptimizeLazyFromThrow().raw(), nullptr)                        \
-  V(CodePtr, slow_type_test_stub_, StubCode::SlowTypeTest().raw(), nullptr)    \
+    StubCode::DeoptimizeLazyFromThrow().ptr(), nullptr)                        \
+  V(CodePtr, slow_type_test_stub_, StubCode::SlowTypeTest().ptr(), nullptr)    \
   V(CodePtr, lazy_specialize_type_test_stub_,                                  \
-    StubCode::LazySpecializeTypeTest().raw(), nullptr)                         \
-  V(CodePtr, enter_safepoint_stub_, StubCode::EnterSafepoint().raw(), nullptr) \
-  V(CodePtr, exit_safepoint_stub_, StubCode::ExitSafepoint().raw(), nullptr)   \
+    StubCode::LazySpecializeTypeTest().ptr(), nullptr)                         \
+  V(CodePtr, enter_safepoint_stub_, StubCode::EnterSafepoint().ptr(), nullptr) \
+  V(CodePtr, exit_safepoint_stub_, StubCode::ExitSafepoint().ptr(), nullptr)   \
   V(CodePtr, call_native_through_safepoint_stub_,                              \
-    StubCode::CallNativeThroughSafepoint().raw(), nullptr)
+    StubCode::CallNativeThroughSafepoint().ptr(), nullptr)
 
 #define CACHED_NON_VM_STUB_LIST(V)                                             \
   V(ObjectPtr, object_null_, Object::null(), nullptr)                          \
-  V(BoolPtr, bool_true_, Object::bool_true().raw(), nullptr)                   \
-  V(BoolPtr, bool_false_, Object::bool_false().raw(), nullptr)
+  V(BoolPtr, bool_true_, Object::bool_true().ptr(), nullptr)                   \
+  V(BoolPtr, bool_false_, Object::bool_false().ptr(), nullptr)
 
 // List of VM-global objects/addresses cached in each Thread object.
 // Important: constant false must immediately follow constant true.
diff --git a/runtime/vm/thread_test.cc b/runtime/vm/thread_test.cc
index 3e4eb24..80be01e 100644
--- a/runtime/vm/thread_test.cc
+++ b/runtime/vm/thread_test.cc
@@ -85,7 +85,7 @@
 
   virtual void VisitPointers(ObjectPtr* first, ObjectPtr* last) {
     for (ObjectPtr* current = first; current <= last; ++current) {
-      if (*current == obj_->raw()) {
+      if (*current == obj_->ptr()) {
         ++count_;
       }
     }
@@ -433,7 +433,7 @@
       String::ZoneHandle(Symbols::New(Thread::Current(), name));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   Function& function = Function::ZoneHandle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, owner_class, TokenPosition::kNoSource));
   return &function;
 }
@@ -544,7 +544,7 @@
           EXPECT_EQ(*expected_count_, counter.count());
         }
         UserTag& tag = UserTag::Handle(zone, isolate_->current_tag());
-        if (tag.raw() != isolate_->default_tag()) {
+        if (tag.ptr() != isolate_->default_tag()) {
           String& label = String::Handle(zone, tag.label());
           EXPECT(label.Equals("foo"));
           MonitorLocker ml(monitor_);
diff --git a/runtime/vm/type_testing_stubs.cc b/runtime/vm/type_testing_stubs.cc
index d73c5b8..5fed5d2 100644
--- a/runtime/vm/type_testing_stubs.cc
+++ b/runtime/vm/type_testing_stubs.cc
@@ -96,8 +96,8 @@
 
   if (type.IsTypeRef()) {
     return isolate_group->use_strict_null_safety_checks()
-               ? StubCode::DefaultTypeTest().raw()
-               : StubCode::DefaultNullableTypeTest().raw();
+               ? StubCode::DefaultTypeTest().ptr()
+               : StubCode::DefaultNullableTypeTest().ptr();
   }
 
   // During bootstrapping we have no access to stubs yet, so we'll just return
@@ -110,36 +110,36 @@
   }
 
   if (type.IsTopTypeForSubtyping()) {
-    return StubCode::TopTypeTypeTest().raw();
+    return StubCode::TopTypeTypeTest().ptr();
   }
   if (type.IsTypeParameter()) {
     const bool nullable = Instance::NullIsAssignableTo(type);
     if (nullable) {
-      return StubCode::NullableTypeParameterTypeTest().raw();
+      return StubCode::NullableTypeParameterTypeTest().ptr();
     } else {
-      return StubCode::TypeParameterTypeTest().raw();
+      return StubCode::TypeParameterTypeTest().ptr();
     }
   }
 
   if (type.IsFunctionType()) {
     const bool nullable = Instance::NullIsAssignableTo(type);
-    return nullable ? StubCode::DefaultNullableTypeTest().raw()
-                    : StubCode::DefaultTypeTest().raw();
+    return nullable ? StubCode::DefaultNullableTypeTest().ptr()
+                    : StubCode::DefaultTypeTest().ptr();
   }
 
   if (type.IsType()) {
     const bool should_specialize = !FLAG_precompiled_mode && lazy_specialize;
     const bool nullable = Instance::NullIsAssignableTo(type);
     if (should_specialize) {
-      return nullable ? StubCode::LazySpecializeNullableTypeTest().raw()
-                      : StubCode::LazySpecializeTypeTest().raw();
+      return nullable ? StubCode::LazySpecializeNullableTypeTest().ptr()
+                      : StubCode::LazySpecializeTypeTest().ptr();
     } else {
-      return nullable ? StubCode::DefaultNullableTypeTest().raw()
-                      : StubCode::DefaultTypeTest().raw();
+      return nullable ? StubCode::DefaultNullableTypeTest().ptr()
+                      : StubCode::DefaultTypeTest().ptr();
     }
   }
 
-  return StubCode::UnreachableTypeTest().raw();
+  return StubCode::UnreachableTypeTest().ptr();
 }
 
 #if !defined(DART_PRECOMPILED_RUNTIME)
@@ -167,7 +167,7 @@
   }
 
   if (type.IsTopTypeForSubtyping()) {
-    return StubCode::TopTypeTypeTest().raw();
+    return StubCode::TopTypeTypeTest().ptr();
   }
 
   if (type.IsCanonical()) {
@@ -176,7 +176,7 @@
       const Code& code = Code::Handle(
           TypeTestingStubGenerator::BuildCodeForType(Type::Cast(type)));
       if (!code.IsNull()) {
-        return code.raw();
+        return code.ptr();
       }
 
       // Fall back to default.
@@ -263,7 +263,7 @@
   }
 #endif  // !PRODUCT
 
-  return code.raw();
+  return code.ptr();
 }
 
 void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
@@ -548,7 +548,7 @@
     const Object& object = constant->value();
     ASSERT(object.IsNull() || object.IsTypeArguments());
     const TypeArguments& type_arguments =
-        TypeArguments::Handle(TypeArguments::RawCast(object.raw()));
+        TypeArguments::Handle(TypeArguments::RawCast(object.ptr()));
     type_usage_info->UseTypeArgumentsInInstanceCreation(klass, type_arguments);
   } else if (InstantiateTypeArgumentsInstr* instantiate =
                  type_arguments->AsInstantiateTypeArguments()) {
@@ -681,7 +681,7 @@
     return nullptr;
   } else if (type.IsType()) {
     if (type.IsInstantiated() || type.arguments() == TypeArguments::null()) {
-      return type.raw();
+      return type.ptr();
     }
 
     const Type& from = Type::Cast(type);
@@ -698,7 +698,7 @@
     to->SetIsFinalized();
     *to ^= to->Canonicalize(Thread::Current(), nullptr);
 
-    return to->raw();
+    return to->ptr();
   }
   UNREACHABLE();
   return NULL;
@@ -753,12 +753,12 @@
       return;
     }
 
-    klass_ = klass.raw();
+    klass_ = klass.ptr();
     while (klass_.NumTypeArguments() > 0) {
       const intptr_t cid = klass_.id();
       TypeArgumentsSet& set = instance_creation_arguments_[cid];
       if (!set.HasKey(&ta)) {
-        set.Insert(&TypeArguments::ZoneHandle(zone_, ta.raw()));
+        set.Insert(&TypeArguments::ZoneHandle(zone_, ta.ptr()));
       }
       klass_ = klass_.SuperClass();
     }
@@ -819,7 +819,7 @@
           if (!klass.IsNull()) {
             // We know that "klass<type_arguments[0:N]>" happens inside
             // [enclosing_class].
-            if (enclosing_class.raw() != klass.raw()) {
+            if (enclosing_class.ptr() != klass.ptr()) {
               // Now we try to instantiate [type_arguments] with all the known
               // instantiator type argument vectors of the [enclosing_class].
               const intptr_t enclosing_class_cid = enclosing_class.id();
@@ -862,7 +862,7 @@
                 TypeArguments::RawCast(delayed_type_argument_set.At(i));
             if (!type_argument_set.HasKey(&temp_type_arguments)) {
               type_argument_set.Insert(
-                  &TypeArguments::ZoneHandle(zone_, temp_type_arguments.raw()));
+                  &TypeArguments::ZoneHandle(zone_, temp_type_arguments.ptr()));
             }
           }
           klass = klass.SuperClass();
@@ -934,16 +934,16 @@
                                         const AbstractType* type,
                                         TypeParameter* param) {
   if (type->IsTypeParameter()) {
-    *param ^= type->raw();
+    *param ^= type->ptr();
     if (!param->IsNull() && !set->HasKey(param)) {
-      set->Insert(&TypeParameter::Handle(zone_, param->raw()));
+      set->Insert(&TypeParameter::Handle(zone_, param->ptr()));
     }
   }
 }
 
 void TypeUsageInfo::AddTypeToSet(TypeSet* set, const AbstractType* type) {
   if (!set->HasKey(type)) {
-    set->Insert(&AbstractType::ZoneHandle(zone_, type->raw()));
+    set->Insert(&AbstractType::ZoneHandle(zone_, type->ptr()));
   }
 }
 
diff --git a/runtime/vm/type_testing_stubs.h b/runtime/vm/type_testing_stubs.h
index 36fc47d..848ae1a 100644
--- a/runtime/vm/type_testing_stubs.h
+++ b/runtime/vm/type_testing_stubs.h
@@ -240,8 +240,8 @@
       const Class& klass,
       const TypeArguments& type_arguments,
       const TypeArguments& instantiator_type_arguments) {
-    instantiator_type_arguments_ = instantiator_type_arguments.raw();
-    return InstantiateTypeArguments(klass, type_arguments).raw();
+    instantiator_type_arguments_ = instantiator_type_arguments.ptr();
+    return InstantiateTypeArguments(klass, type_arguments).ptr();
   }
 
  private:
@@ -302,7 +302,7 @@
    public:
     static inline bool IsKeyEqual(const TypeArguments* pair,
                                   const TypeArguments* key) {
-      return pair->raw() == key->raw();
+      return pair->ptr() == key->ptr();
     }
   };
 
@@ -310,7 +310,7 @@
    public:
     static inline bool IsKeyEqual(const TypeParameter* pair,
                                   const TypeParameter* key) {
-      return pair->raw() == key->raw();
+      return pair->ptr() == key->ptr();
     }
   };
 
diff --git a/runtime/vm/type_testing_stubs_test.cc b/runtime/vm/type_testing_stubs_test.cc
index c0a3800..bf9e154 100644
--- a/runtime/vm/type_testing_stubs_test.cc
+++ b/runtime/vm/type_testing_stubs_test.cc
@@ -171,14 +171,14 @@
       Symbols::New(thread, OS::SCreate(thread->zone(), "TTSTest")));
   const auto& signature = FunctionType::ZoneHandle(FunctionType::New());
   const auto& function = Function::Handle(Function::New(
-      signature, symbol, FunctionLayout::kRegularFunction, false, false, false,
-      false, false, klass, TokenPosition::kNoSource));
+      signature, symbol, UntaggedFunction::kRegularFunction, false, false,
+      false, false, false, klass, TokenPosition::kNoSource));
   compiler::ObjectPoolBuilder pool_builder;
   const auto& invoke_tts = Code::Handle(
       StubCode::Generate("InvokeTTS", &pool_builder, &GenerateInvokeTTSStub));
   const auto& pool =
       ObjectPool::Handle(ObjectPool::NewFromBuilder(pool_builder));
-  invoke_tts.set_object_pool(pool.raw());
+  invoke_tts.set_object_pool(pool.ptr());
   invoke_tts.set_owner(function);
   invoke_tts.set_exception_handlers(
       ExceptionHandlers::Handle(ExceptionHandlers::New(0)));
@@ -199,14 +199,14 @@
   arguments.SetAt(5, dst_type);
 
   // Ensure we have a) uninitialized TTS b) no/empty SubtypeTestCache.
-  auto& instantiated_dst_type = AbstractType::Handle(dst_type.raw());
+  auto& instantiated_dst_type = AbstractType::Handle(dst_type.ptr());
   if (dst_type.IsTypeParameter()) {
     instantiated_dst_type = TypeParameter::Cast(dst_type).GetFromTypeArguments(
         instantiator_tav, function_tav);
   }
   instantiated_dst_type.SetTypeTestingStub(StubCode::LazySpecializeTypeTest());
   EXPECT(instantiated_dst_type.type_test_stub() ==
-         StubCode::LazySpecializeTypeTest().raw());
+         StubCode::LazySpecializeTypeTest().ptr());
   EXPECT(pool.ObjectAt(kSubtypeTestCacheIndex) == Object::null());
 
   auto& result = Object::Handle();
@@ -224,7 +224,7 @@
   stc ^= pool.ObjectAt(kSubtypeTestCacheIndex);
   tts = instantiated_dst_type.type_test_stub();
   if (!result.IsError()) {
-    EXPECT(tts.raw() != StubCode::LazySpecializeTypeTest().raw());
+    EXPECT(tts.ptr() != StubCode::LazySpecializeTypeTest().ptr());
   }
   lazy(result, stc);
 
@@ -236,8 +236,8 @@
   abi_regs_modified ^= abi_regs_modified_box.At(0);
   rest_regs_modified ^= rest_regs_modified_box.At(0);
   EXPECT(result2.IsError() || !abi_regs_modified.IsNull());
-  EXPECT(tts2.raw() == tts.raw());
-  EXPECT(stc2.raw() == stc.raw());
+  EXPECT(tts2.ptr() == tts.ptr());
+  EXPECT(stc2.ptr() == stc.ptr());
   nonlazy(result2, stc2, abi_regs_modified, rest_regs_modified);
 
   // Third invocation will a) explicitly install TTS beforehand b) keep optional
@@ -254,8 +254,8 @@
   abi_regs_modified ^= abi_regs_modified_box.At(0);
   rest_regs_modified ^= rest_regs_modified_box.At(0);
   EXPECT(result2.IsError() || !abi_regs_modified.IsNull());
-  EXPECT(tts2.raw() == tts.raw());
-  EXPECT(stc2.raw() == stc.raw());
+  EXPECT(tts2.ptr() == tts.ptr());
+  EXPECT(stc2.ptr() == stc.ptr());
   nonlazy(result2, stc2, abi_regs_modified, rest_regs_modified);
 }
 
@@ -306,7 +306,7 @@
   EXPECT_EQ(1, stc.NumberOfChecks());
   SubtypeTestCacheTable entries(Array::Handle(stc.cache()));
   EXPECT(entries[0].Get<SubtypeTestCache::kTestResult>() ==
-         Object::bool_true().raw());
+         Object::bool_true().ptr());
 }
 
 static void ExpectLazilyHandledViaSTC(const Object& result,
diff --git a/runtime/vm/unit_test.cc b/runtime/vm/unit_test.cc
index ac3dd5e..cf6d614 100644
--- a/runtime/vm/unit_test.cc
+++ b/runtime/vm/unit_test.cc
@@ -643,7 +643,7 @@
                                          param_values,
                                          TypeArguments::null_type_arguments());
   }
-  return Api::NewHandle(thread, val.raw());
+  return Api::NewHandle(thread, val.ptr());
 }
 
 #if !defined(PRODUCT)
@@ -666,7 +666,7 @@
       Class::New(lib, function_name, script, TokenPosition::kMinSource));
   const FunctionType& signature = FunctionType::ZoneHandle(FunctionType::New());
   Function& function = Function::ZoneHandle(Function::New(
-      signature, function_name, FunctionLayout::kRegularFunction, true, false,
+      signature, function_name, UntaggedFunction::kRegularFunction, true, false,
       false, false, false, cls, TokenPosition::kMinSource));
   code_ = Code::FinalizeCodeAndNotify(function, nullptr, assembler_,
                                       Code::PoolAttachment::kAttachPool);
diff --git a/runtime/vm/unit_test.h b/runtime/vm/unit_test.h
index 0ef84f1..89bc50e 100644
--- a/runtime/vm/unit_test.h
+++ b/runtime/vm/unit_test.h
@@ -114,7 +114,7 @@
     }                                                                          \
                                                                                \
     const Error& error = Error::Handle(Thread::Current()->sticky_error());     \
-    if (error.raw() == Object::branch_offset_error().raw()) {                  \
+    if (error.ptr() == Object::branch_offset_error().ptr()) {                  \
       bool use_far_branches = true;                                            \
       compiler::ObjectPoolBuilder object_pool_builder;                         \
       compiler::Assembler assembler(&object_pool_builder, use_far_branches);   \
diff --git a/sdk/lib/_internal/vm/bin/process_patch.dart b/sdk/lib/_internal/vm/bin/process_patch.dart
index 97426b8..6a158e7 100644
--- a/sdk/lib/_internal/vm/bin/process_patch.dart
+++ b/sdk/lib/_internal/vm/bin/process_patch.dart
@@ -387,6 +387,7 @@
 
   Future<Process> _start() {
     var completer = new Completer<Process>();
+    var stackTrace = StackTrace.current;
     if (_modeIsAttached(_mode)) {
       _exitCode = new Completer<int>();
     }
@@ -407,8 +408,10 @@
           _modeIsAttached(_mode) ? _exitHandler._nativeSocket : null,
           status);
       if (!success) {
-        completer.completeError(new ProcessException(
-            _path, _arguments, status._errorMessage!, status._errorCode!));
+        completer.completeError(
+            new ProcessException(
+                _path, _arguments, status._errorMessage!, status._errorCode!),
+            stackTrace);
         return;
       }
 
diff --git a/tests/language_2/extension_methods/static_extension_this_not_promoted_error_test.dart b/tests/language_2/extension_methods/static_extension_this_not_promoted_error_test.dart
new file mode 100644
index 0000000..ed3e301
--- /dev/null
+++ b/tests/language_2/extension_methods/static_extension_this_not_promoted_error_test.dart
@@ -0,0 +1,36 @@
+// Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// This test verifies that attempts to promote the type of `this` inside an
+// extension method have no effect.
+
+void f(dynamic d) {}
+
+class C {
+  int cProp;
+}
+
+class D extends C {
+  int dProp;
+}
+
+extension on C {
+  void testC() {
+    if (this is D) {
+      f(this.dProp);
+      //     ^^^^^
+      // [analyzer] COMPILE_TIME_ERROR.UNDEFINED_GETTER
+      // [cfe] The getter 'dProp' isn't defined for the class 'C'.
+      f(dProp);
+      //^^^^^
+      // [analyzer] COMPILE_TIME_ERROR.UNDEFINED_IDENTIFIER
+      // [cfe] The getter 'dProp' isn't defined for the class 'C'.
+    }
+  }
+}
+
+main() {
+  C().testC();
+  D().testC();
+}
diff --git a/tests/standalone/io/process_start_exception_test.dart b/tests/standalone/io/process_start_exception_test.dart
index 3a9642e..96d150e 100644
--- a/tests/standalone/io/process_start_exception_test.dart
+++ b/tests/standalone/io/process_start_exception_test.dart
@@ -24,9 +24,10 @@
       environment: {"PATH": ""});
   processFuture
       .then((p) => Expect.fail('got process despite start error'))
-      .catchError((error) {
+      .catchError((error, stackTrace) {
     Expect.isTrue(error is ProcessException);
     Expect.equals(ENOENT, error.errorCode, error.toString());
+    Expect.notEquals(stackTrace.toString(), '');
   });
 }
 
diff --git a/tests/standalone_2/io/process_start_exception_test.dart b/tests/standalone_2/io/process_start_exception_test.dart
index 3a9642e..96d150e 100644
--- a/tests/standalone_2/io/process_start_exception_test.dart
+++ b/tests/standalone_2/io/process_start_exception_test.dart
@@ -24,9 +24,10 @@
       environment: {"PATH": ""});
   processFuture
       .then((p) => Expect.fail('got process despite start error'))
-      .catchError((error) {
+      .catchError((error, stackTrace) {
     Expect.isTrue(error is ProcessException);
     Expect.equals(ENOENT, error.errorCode, error.toString());
+    Expect.notEquals(stackTrace.toString(), '');
   });
 }
 
diff --git a/tools/VERSION b/tools/VERSION
index cdb060a..4c662f7 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 12
 PATCH 0
-PRERELEASE 232
+PRERELEASE 233
 PRERELEASE_PATCH 0
\ No newline at end of file