Version 2.14.0-27.0.dev

Merge commit '2d632d0eacdeb137f02e35197d703e740e3aa6e1' into 'dev'
diff --git a/pkg/analysis_server/tool/code_completion/completion_metrics.dart b/pkg/analysis_server/tool/code_completion/completion_metrics.dart
index 31237c3..4a685e3 100644
--- a/pkg/analysis_server/tool/code_completion/completion_metrics.dart
+++ b/pkg/analysis_server/tool/code_completion/completion_metrics.dart
@@ -654,15 +654,17 @@
 
       var rank = place.rank;
       var suggestion = suggestions[rank - 1];
-      var actualSuggestion =
-          SuggestionData(suggestion, listener.featureMap[suggestion]!);
+      var features = listener.featureMap[suggestion] ??
+          MetricsSuggestionListener.noFeatures;
+      var actualSuggestion = SuggestionData(suggestion, features);
       List<SuggestionData>? topSuggestions;
       Map<int, int>? precedingRelevanceCounts;
       if (options.printWorstResults) {
+        var features = listener.featureMap[suggestion] ??
+            MetricsSuggestionListener.noFeatures;
         topSuggestions = suggestions
             .sublist(0, math.min(10, suggestions.length))
-            .map((suggestion) =>
-                SuggestionData(suggestion, listener.featureMap[suggestion]!))
+            .map((suggestion) => SuggestionData(suggestion, features))
             .toList();
         precedingRelevanceCounts = <int, int>{};
         for (var i = 0; i < rank - 1; i++) {
@@ -1687,9 +1689,9 @@
 }
 
 class MetricsSuggestionListener implements SuggestionListener {
-  Map<protocol.CompletionSuggestion, List<double>> featureMap = {};
-
-  List<double> cachedFeatures = const [
+  /// The feature values to use when there are no features for a suggestion.
+  static const List<double> noFeatures = [
+    0.0,
     0.0,
     0.0,
     0.0,
@@ -1701,6 +1703,10 @@
     0.0
   ];
 
+  Map<protocol.CompletionSuggestion, List<double>> featureMap = {};
+
+  List<double> cachedFeatures = noFeatures;
+
   String? missingCompletionLocation;
 
   String? missingCompletionLocationTable;
@@ -1708,7 +1714,7 @@
   @override
   void builtSuggestion(protocol.CompletionSuggestion suggestion) {
     featureMap[suggestion] = cachedFeatures;
-    cachedFeatures = const [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0];
+    cachedFeatures = noFeatures;
   }
 
   @override
diff --git a/pkg/analyzer/lib/src/error/best_practices_verifier.dart b/pkg/analyzer/lib/src/error/best_practices_verifier.dart
index 21460b7..fdc1130 100644
--- a/pkg/analyzer/lib/src/error/best_practices_verifier.dart
+++ b/pkg/analyzer/lib/src/error/best_practices_verifier.dart
@@ -653,7 +653,9 @@
 
   @override
   void visitReturnStatement(ReturnStatement node) {
-    _checkForReturnOfDoNotStore(node.expression);
+    if (!_invalidAccessVerifier._inTestDirectory) {
+      _checkForReturnOfDoNotStore(node.expression);
+    }
     super.visitReturnStatement(node);
   }
 
diff --git a/pkg/analyzer/test/src/diagnostics/return_of_do_not_store_test.dart b/pkg/analyzer/test/src/diagnostics/return_of_do_not_store_test.dart
index 4ec91f6..9722e80 100644
--- a/pkg/analyzer/test/src/diagnostics/return_of_do_not_store_test.dart
+++ b/pkg/analyzer/test/src/diagnostics/return_of_do_not_store_test.dart
@@ -37,6 +37,10 @@
   var v = () => _v;
   return v();
 }
+
+String g() {
+  return _v;
+}
 ''',
     );
   }
diff --git a/pkg/nnbd_migration/lib/src/nullability_migration_impl.dart b/pkg/nnbd_migration/lib/src/nullability_migration_impl.dart
index 4e235b0..ca75cdf 100644
--- a/pkg/nnbd_migration/lib/src/nullability_migration_impl.dart
+++ b/pkg/nnbd_migration/lib/src/nullability_migration_impl.dart
@@ -121,7 +121,7 @@
     _queriedUnmigratedDependencies = true;
     var unmigratedDependencies = <Source>[];
     for (var entry in _libraryOptInStatus.entries) {
-      if (_graph.isBeingMigrated(entry.key)) continue;
+      if (_graph.isPathBeingMigrated(entry.key.fullName)) continue;
       if (!entry.value) {
         unmigratedDependencies.add(entry.key);
       }
diff --git a/pkg/nnbd_migration/lib/src/nullability_node.dart b/pkg/nnbd_migration/lib/src/nullability_node.dart
index 668b8ad..11d8725 100644
--- a/pkg/nnbd_migration/lib/src/nullability_node.dart
+++ b/pkg/nnbd_migration/lib/src/nullability_node.dart
@@ -276,6 +276,9 @@
   /// Set containing all sources being migrated.
   final _sourcesBeingMigrated = <Source>{};
 
+  /// Set containing paths to all sources being migrated.
+  final _pathsBeingMigrated = <String>{};
+
   /// A set containing all of the nodes in the graph.
   final Set<NullabilityNode> nodes = {};
 
@@ -391,6 +394,10 @@
     return _sourcesBeingMigrated.contains(source);
   }
 
+  bool isPathBeingMigrated(String path) {
+    return _pathsBeingMigrated.contains(path);
+  }
+
   /// Creates a graph edge that will try to force the given [node] to be
   /// non-nullable.
   NullabilityEdge makeNonNullable(NullabilityNode node, EdgeOrigin origin,
@@ -421,6 +428,7 @@
   /// Record source as code that is being migrated.
   void migrating(Source source) {
     _sourcesBeingMigrated.add(source);
+    _pathsBeingMigrated.add(source.fullName);
   }
 
   /// Determines the nullability of each node in the graph by propagating
diff --git a/pkg/nnbd_migration/test/migration_cli_test.dart b/pkg/nnbd_migration/test/migration_cli_test.dart
index 36a85b2..fb66e06 100644
--- a/pkg/nnbd_migration/test/migration_cli_test.dart
+++ b/pkg/nnbd_migration/test/migration_cli_test.dart
@@ -832,6 +832,31 @@
     expect(output, isNot(contains('package:bar/bar.dart')));
   }
 
+  test_lifecycle_import_lib_from_test() async {
+    Map<String, String> makeProject({bool migrated = false}) {
+      return simpleProject(migrated: migrated)
+        ..['test/foo.dart'] = '''
+import '../lib/test.dart';
+''';
+    }
+
+    var projectContents = makeProject();
+    var projectDir = createProjectDir(projectContents);
+    var cli = _createCli();
+    var cliRunner =
+        cli.decodeCommandLineArgs(_parseArgs(['--apply-changes', projectDir]));
+    bool applyHookCalled = false;
+    cli._onApplyHook = () {
+      expect(applyHookCalled, false);
+      applyHookCalled = true;
+      // Changes should have been made
+      assertProjectContents(projectDir, makeProject(migrated: true));
+    };
+    await cliRunner.run();
+    assertNormalExit(cliRunner);
+    expect(applyHookCalled, true);
+  }
+
   @FailingTest(issue: 'https://github.com/dart-lang/sdk/issues/44118')
   test_lifecycle_issue_44118() async {
     var projectContents = simpleProject(sourceText: '''
diff --git a/runtime/vm/compiler/backend/flow_graph.cc b/runtime/vm/compiler/backend/flow_graph.cc
index 2f14395..c593d17 100644
--- a/runtime/vm/compiler/backend/flow_graph.cc
+++ b/runtime/vm/compiler/backend/flow_graph.cc
@@ -183,16 +183,24 @@
                                                        : &reverse_postorder_;
 }
 
-ConstantInstr* FlowGraph::GetExistingConstant(const Object& object) const {
-  return constant_instr_pool_.LookupValue(object);
+ConstantInstr* FlowGraph::GetExistingConstant(
+    const Object& object,
+    Representation representation) const {
+  return constant_instr_pool_.LookupValue(
+      ConstantAndRepresentation{object, representation});
 }
 
-ConstantInstr* FlowGraph::GetConstant(const Object& object) {
-  ConstantInstr* constant = GetExistingConstant(object);
+ConstantInstr* FlowGraph::GetConstant(const Object& object,
+                                      Representation representation) {
+  ConstantInstr* constant = GetExistingConstant(object, representation);
   if (constant == nullptr) {
     // Otherwise, allocate and add it to the pool.
-    constant =
-        new (zone()) ConstantInstr(Object::ZoneHandle(zone(), object.ptr()));
+    const Object& zone_object = Object::ZoneHandle(zone(), object.ptr());
+    if (representation == kTagged) {
+      constant = new (zone()) ConstantInstr(zone_object);
+    } else {
+      constant = new (zone()) UnboxedConstantInstr(zone_object, representation);
+    }
     constant->set_ssa_temp_index(alloc_ssa_temp_index());
     if (NeedsPairLocation(constant->representation())) {
       alloc_ssa_temp_index();
@@ -236,28 +244,21 @@
 Definition* FlowGraph::TryCreateConstantReplacementFor(Definition* op,
                                                        const Object& value) {
   // Check that representation of the constant matches expected representation.
+  const auto representation = op->representation();
   if (!IsConstantRepresentable(
-          value, op->representation(),
+          value, representation,
           /*tagged_value_must_be_smi=*/op->Type()->IsNullableSmi())) {
     return op;
   }
 
-  Definition* result = GetConstant(value);
-  if (op->representation() != kTagged) {
-    // We checked above that constant can be safely unboxed.
-    result = UnboxInstr::Create(op->representation(), new Value(result),
-                                DeoptId::kNone, Instruction::kNotSpeculative);
-    // If the current instruction is a phi we need to insert the replacement
-    // into the block which contains this phi - because phis exist separately
-    // from all other instructions.
-    if (auto phi = op->AsPhi()) {
-      InsertAfter(phi->GetBlock(), result, nullptr, FlowGraph::kValue);
-    } else {
-      InsertBefore(op, result, nullptr, FlowGraph::kValue);
-    }
+  if (representation == kUnboxedDouble && value.IsInteger()) {
+    // Convert the boxed constant from int to double.
+    return GetConstant(Double::Handle(Double::NewCanonical(
+                           Integer::Cast(value).AsDoubleValue())),
+                       kUnboxedDouble);
   }
 
-  return result;
+  return GetConstant(value, representation);
 }
 
 void FlowGraph::AddToGraphInitialDefinitions(Definition* defn) {
diff --git a/runtime/vm/compiler/backend/flow_graph.h b/runtime/vm/compiler/backend/flow_graph.h
index b028847..7a7caa1 100644
--- a/runtime/vm/compiler/backend/flow_graph.h
+++ b/runtime/vm/compiler/backend/flow_graph.h
@@ -49,34 +49,42 @@
   intptr_t current_;
 };
 
+struct ConstantAndRepresentation {
+  const Object& constant;
+  Representation representation;
+};
+
 struct ConstantPoolTrait {
   typedef ConstantInstr* Value;
-  typedef const Object& Key;
+  typedef ConstantAndRepresentation Key;
   typedef ConstantInstr* Pair;
 
-  static Key KeyOf(Pair kv) { return kv->value(); }
+  static Key KeyOf(Pair kv) {
+    return ConstantAndRepresentation{kv->value(), kv->representation()};
+  }
 
   static Value ValueOf(Pair kv) { return kv; }
 
   static inline uword Hash(Key key) {
-    if (key.IsSmi()) {
-      return Smi::Cast(key).Value();
+    if (key.constant.IsSmi()) {
+      return Smi::Cast(key.constant).Value();
     }
-    if (key.IsDouble()) {
+    if (key.constant.IsDouble()) {
       return static_cast<intptr_t>(bit_cast<int32_t, float>(
-          static_cast<float>(Double::Cast(key).value())));
+          static_cast<float>(Double::Cast(key.constant).value())));
     }
-    if (key.IsMint()) {
-      return static_cast<intptr_t>(Mint::Cast(key).value());
+    if (key.constant.IsMint()) {
+      return static_cast<intptr_t>(Mint::Cast(key.constant).value());
     }
-    if (key.IsString()) {
-      return String::Cast(key).Hash();
+    if (key.constant.IsString()) {
+      return String::Cast(key.constant).Hash();
     }
-    return key.GetClassId();
+    return key.constant.GetClassId();
   }
 
   static inline bool IsKeyEqual(Pair kv, Key key) {
-    return kv->value().ptr() == key.ptr();
+    return (kv->value().ptr() == key.constant.ptr()) &&
+           (kv->representation() == key.representation);
   }
 };
 
@@ -264,11 +272,14 @@
 
   // Returns the definition for the object from the constant pool if
   // one exists, otherwise returns nullptr.
-  ConstantInstr* GetExistingConstant(const Object& object) const;
+  ConstantInstr* GetExistingConstant(
+      const Object& object,
+      Representation representation = kTagged) const;
 
   // Always returns a definition for the object from the constant pool,
   // allocating one if it doesn't already exist.
-  ConstantInstr* GetConstant(const Object& object);
+  ConstantInstr* GetConstant(const Object& object,
+                             Representation representation = kTagged);
 
   void AddToGraphInitialDefinitions(Definition* defn);
   void AddToInitialDefinitions(BlockEntryWithInitialDefs* entry,
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
index f3acbb0..ddcfc7a 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
@@ -966,10 +966,12 @@
     if (destination.IsFpuRegister() || destination.IsDoubleStackSlot() ||
         destination.IsStackSlot()) {
       Register tmp = allocator->AllocateTemporary();
-      source.constant_instruction()->EmitMoveToLocation(this, destination, tmp);
+      source.constant_instruction()->EmitMoveToLocation(this, destination, tmp,
+                                                        source.pair_index());
       allocator->ReleaseTemporary();
     } else {
-      source.constant_instruction()->EmitMoveToLocation(this, destination);
+      source.constant_instruction()->EmitMoveToLocation(
+          this, destination, kNoRegister, source.pair_index());
     }
   }
 }
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
index 5984b26..2885afe 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
@@ -918,7 +918,8 @@
     }
   } else {
     ASSERT(source.IsConstant());
-    source.constant_instruction()->EmitMoveToLocation(this, destination);
+    source.constant_instruction()->EmitMoveToLocation(
+        this, destination, kNoRegister, source.pair_index());
   }
 }
 
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index 777e394..85561c4 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -3207,21 +3207,14 @@
   }
 
   if (representation() == kUnboxedDouble && value()->BindsToConstant()) {
-    UnboxedConstantInstr* uc = NULL;
-
     const Object& val = value()->BoundConstant();
     if (val.IsInteger()) {
       const Double& double_val = Double::ZoneHandle(
           flow_graph->zone(),
           Double::NewCanonical(Integer::Cast(val).AsDoubleValue()));
-      uc = new UnboxedConstantInstr(double_val, kUnboxedDouble);
+      return flow_graph->GetConstant(double_val, kUnboxedDouble);
     } else if (val.IsDouble()) {
-      uc = new UnboxedConstantInstr(val, kUnboxedDouble);
-    }
-
-    if (uc != NULL) {
-      flow_graph->InsertBefore(this, uc, NULL, FlowGraph::kValue);
-      return uc;
+      return flow_graph->GetConstant(val, kUnboxedDouble);
     }
   }
 
@@ -3283,13 +3276,7 @@
       }
     }
 
-    UnboxedConstantInstr* uc =
-        new UnboxedConstantInstr(c->value(), kUnboxedInt32);
-    if (c->range() != NULL) {
-      uc->set_range(*c->range());
-    }
-    flow_graph->InsertBefore(this, uc, NULL, FlowGraph::kValue);
-    return uc;
+    return flow_graph->GetConstant(c->value(), kUnboxedInt32);
   }
 
   return this;
@@ -3304,14 +3291,8 @@
   // Currently we perform this only on 64-bit architectures.
   if (compiler::target::kBitsPerWord == 64) {
     ConstantInstr* c = value()->definition()->AsConstant();
-    if (c != NULL && (c->value().IsSmi() || c->value().IsMint())) {
-      UnboxedConstantInstr* uc =
-          new UnboxedConstantInstr(c->value(), kUnboxedInt64);
-      if (c->range() != NULL) {
-        uc->set_range(*c->range());
-      }
-      flow_graph->InsertBefore(this, uc, NULL, FlowGraph::kValue);
-      return uc;
+    if (c != NULL && c->value().IsInteger()) {
+      return flow_graph->GetConstant(c->value(), kUnboxedInt64);
     }
   }
 
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 3ffd151..22326fb 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -3561,7 +3561,8 @@
 
   void EmitMoveToLocation(FlowGraphCompiler* compiler,
                           const Location& destination,
-                          Register tmp = kNoRegister);
+                          Register tmp = kNoRegister,
+                          intptr_t pair_index = 0);
 
   PRINT_OPERANDS_TO_SUPPORT
 
@@ -5657,6 +5658,7 @@
   intptr_t class_id() const { return class_id_; }
   bool aligned() const { return alignment_ == kAlignedAccess; }
 
+  virtual intptr_t DeoptimizationTarget() const { return GetDeoptId(); }
   virtual bool ComputeCanDeoptimize() const {
     return GetDeoptId() != DeoptId::kNone;
   }
@@ -6586,6 +6588,7 @@
   DECLARE_INSTRUCTION(LoadField)
   virtual CompileType ComputeType() const;
 
+  virtual intptr_t DeoptimizationTarget() const { return GetDeoptId(); }
   virtual bool ComputeCanDeoptimize() const { return false; }
   virtual bool ComputeCanDeoptimizeAfterCall() const {
     return calls_initializer() && !CompilerState::Current().is_aot();
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index 93540db..70f7fb0 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -664,7 +664,8 @@
 
 void ConstantInstr::EmitMoveToLocation(FlowGraphCompiler* compiler,
                                        const Location& destination,
-                                       Register tmp) {
+                                       Register tmp,
+                                       intptr_t pair_index) {
   if (destination.IsRegister()) {
     if (RepresentationUtils::IsUnboxedInteger(representation())) {
       int64_t v;
@@ -675,7 +676,9 @@
         // Smi untagging, which means the resulting value may be unexpected.
         ASSERT(v >= 0);
       }
-      __ LoadImmediate(destination.reg(), v);
+      __ LoadImmediate(destination.reg(), pair_index == 0
+                                              ? Utils::Low32Bits(v)
+                                              : Utils::High32Bits(v));
     } else {
       ASSERT(representation() == kTagged);
       __ LoadObject(destination.reg(), value_);
@@ -708,7 +711,8 @@
       int64_t v;
       const bool ok = compiler::HasIntegerValue(value_, &v);
       RELEASE_ASSERT(ok);
-      __ LoadImmediate(tmp, v);
+      __ LoadImmediate(
+          tmp, pair_index == 0 ? Utils::Low32Bits(v) : Utils::High32Bits(v));
     } else {
       __ LoadObject(tmp, value_);
     }
diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index 1935dac..d375c59 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -580,7 +580,9 @@
 
 void ConstantInstr::EmitMoveToLocation(FlowGraphCompiler* compiler,
                                        const Location& destination,
-                                       Register tmp) {
+                                       Register tmp,
+                                       intptr_t pair_index) {
+  ASSERT(pair_index == 0);  // No pair representation needed on 64-bit.
   if (destination.IsRegister()) {
     if (representation() == kUnboxedInt32 ||
         representation() == kUnboxedUint32 ||
diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc
index a308aaa..4cd399c 100644
--- a/runtime/vm/compiler/backend/il_ia32.cc
+++ b/runtime/vm/compiler/backend/il_ia32.cc
@@ -398,7 +398,8 @@
 
 void ConstantInstr::EmitMoveToLocation(FlowGraphCompiler* compiler,
                                        const Location& destination,
-                                       Register tmp) {
+                                       Register tmp,
+                                       intptr_t pair_index) {
   if (destination.IsRegister()) {
     if (RepresentationUtils::IsUnboxedInteger(representation())) {
       int64_t v;
@@ -409,7 +410,9 @@
         // Smi untagging, which means the resulting value may be unexpected.
         ASSERT(v >= 0);
       }
-      __ movl(destination.reg(), compiler::Immediate(v));
+      __ movl(destination.reg(),
+              compiler::Immediate(pair_index == 0 ? Utils::Low32Bits(v)
+                                                  : Utils::High32Bits(v)));
     } else {
       ASSERT(representation() == kTagged);
       __ LoadObjectSafely(destination.reg(), value_);
@@ -444,10 +447,13 @@
     __ movsd(LocationToStackSlotAddress(destination), FpuTMP);
   } else {
     ASSERT(destination.IsStackSlot());
-    if (value_.IsSmi() &&
-        RepresentationUtils::IsUnboxedInteger(representation())) {
+    if (RepresentationUtils::IsUnboxedInteger(representation())) {
+      int64_t v;
+      const bool ok = compiler::HasIntegerValue(value_, &v);
+      RELEASE_ASSERT(ok);
       __ movl(LocationToStackSlotAddress(destination),
-              compiler::Immediate(Smi::Cast(value_).Value()));
+              compiler::Immediate(pair_index == 0 ? Utils::Low32Bits(v)
+                                                  : Utils::High32Bits(v)));
     } else {
       if (compiler::Assembler::IsSafeSmi(value_) || value_.IsNull()) {
         __ movl(LocationToStackSlotAddress(destination),
diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc
index e24c972..03d9556 100644
--- a/runtime/vm/compiler/backend/il_x64.cc
+++ b/runtime/vm/compiler/backend/il_x64.cc
@@ -529,7 +529,9 @@
 
 void ConstantInstr::EmitMoveToLocation(FlowGraphCompiler* compiler,
                                        const Location& destination,
-                                       Register tmp) {
+                                       Register tmp,
+                                       intptr_t pair_index) {
+  ASSERT(pair_index == 0);  // No pair representation needed on 64-bit.
   if (destination.IsRegister()) {
     if (RepresentationUtils::IsUnboxedInteger(representation())) {
       const int64_t value = Integer::Cast(value_).AsInt64Value();
diff --git a/runtime/vm/compiler/backend/inliner_test.cc b/runtime/vm/compiler/backend/inliner_test.cc
index 26eb9e5..cf772e4 100644
--- a/runtime/vm/compiler/backend/inliner_test.cc
+++ b/runtime/vm/compiler/backend/inliner_test.cc
@@ -216,15 +216,15 @@
   ILMatcher cursor(flow_graph, entry, /*trace=*/true,
                    ParallelMovesHandling::kSkip);
 
-  Instruction* unbox1 = nullptr;
-  Instruction* unbox2 = nullptr;
-
   RELEASE_ASSERT(cursor.TryMatch({
       kMoveGlob,
       kMatchAndMoveCreateArray,
-      {kMoveAny, &unbox1},
       kMatchAndMoveUnboxInt64,
-      {kMoveAny, &unbox2},
+#if defined(TARGET_ARCH_IS_32_BIT)
+      // TODO(rmacnak): Implement missing ops to allow 32-bit architectures in
+      // UnboxInt64Instr::Canonicalize.
+      kMatchAndMoveUnboxInt64,
+#endif
       kMatchAndMoveGoto,
 
       // Loop header
@@ -249,9 +249,6 @@
       kMatchAndMoveTargetEntry,
       kMatchReturn,
   }));
-
-  EXPECT(unbox1->IsUnboxedConstant() || unbox1->IsUnboxInt64());
-  EXPECT(unbox2->IsUnboxedConstant() || unbox2->IsUnboxInt64());
 }
 
 #endif  // defined(DART_PRECOMPILER)
diff --git a/runtime/vm/compiler/backend/linearscan.cc b/runtime/vm/compiler/backend/linearscan.cc
index a7fe16d..219cc0d 100644
--- a/runtime/vm/compiler/backend/linearscan.cc
+++ b/runtime/vm/compiler/backend/linearscan.cc
@@ -642,7 +642,14 @@
   GraphEntryInstr* graph_entry = flow_graph_.graph_entry();
   for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); i++) {
     Definition* defn = (*graph_entry->initial_definitions())[i];
-    ASSERT(!defn->HasPairRepresentation());
+    if (defn->HasPairRepresentation()) {
+      // The lower bits are pushed after the higher bits
+      LiveRange* range = GetLiveRange(ToSecondPairVreg(defn->ssa_temp_index()));
+      range->AddUseInterval(graph_entry->start_pos(), graph_entry->end_pos());
+      range->DefineAt(graph_entry->start_pos());
+      ProcessInitialDefinition(defn, range, graph_entry,
+                               /*second_location_for_definition=*/true);
+    }
     LiveRange* range = GetLiveRange(defn->ssa_temp_index());
     range->AddUseInterval(graph_entry->start_pos(), graph_entry->end_pos());
     range->DefineAt(graph_entry->start_pos());
@@ -724,8 +731,9 @@
   } else {
     ConstantInstr* constant = defn->AsConstant();
     ASSERT(constant != NULL);
-    range->set_assigned_location(Location::Constant(constant));
-    range->set_spill_slot(Location::Constant(constant));
+    const intptr_t pair_index = second_location_for_definition ? 1 : 0;
+    range->set_assigned_location(Location::Constant(constant, pair_index));
+    range->set_spill_slot(Location::Constant(constant, pair_index));
   }
   AssignSafepoints(defn, range);
   range->finger()->Initialize(range);
@@ -823,7 +831,11 @@
 
     ConstantInstr* constant = val->definition()->AsConstant();
     if (constant != NULL) {
-      move->set_src(Location::Constant(constant));
+      move->set_src(Location::Constant(constant, /*pair_index*/ 0));
+      if (val->definition()->HasPairRepresentation()) {
+        move = parallel_move->MoveOperandsAt(move_index++);
+        move->set_src(Location::Constant(constant, /*pair_index*/ 1));
+      }
       continue;
     }
 
@@ -2926,7 +2938,10 @@
     Definition* def = (*initial_definitions)[i];
     value_representations_[def->ssa_temp_index()] =
         RepresentationForRange(def->representation());
-    ASSERT(!def->HasPairRepresentation());
+    if (def->HasPairRepresentation()) {
+      value_representations_[ToSecondPairVreg(def->ssa_temp_index())] =
+          RepresentationForRange(def->representation());
+    }
   }
 
   for (BlockIterator it = flow_graph_.reverse_postorder_iterator(); !it.Done();
diff --git a/runtime/vm/compiler/backend/locations.h b/runtime/vm/compiler/backend/locations.h
index ea708e4..c758233 100644
--- a/runtime/vm/compiler/backend/locations.h
+++ b/runtime/vm/compiler/backend/locations.h
@@ -145,21 +145,21 @@
     // allocated by a register allocator.  Each unallocated location has
     // a policy that specifies what kind of location is suitable. Payload
     // contains register allocation policy.
-    kUnallocated = 3,
+    kUnallocated = 1 << 2,
 
     // Spill slots allocated by the register allocator.  Payload contains
     // a spill index.
-    kStackSlot = 4,        // Word size slot.
-    kDoubleStackSlot = 7,  // 64bit stack slot.
-    kQuadStackSlot = 11,   // 128bit stack slot.
+    kStackSlot = 2 << 2,        // Word size slot.
+    kDoubleStackSlot = 3 << 2,  // 64bit stack slot.
+    kQuadStackSlot = 4 << 2,    // 128bit stack slot.
 
     // Register location represents a fixed register.  Payload contains
     // register code.
-    kRegister = 8,
+    kRegister = 5 << 2,
 
     // FpuRegister location represents a fixed fpu register.  Payload contains
     // its code.
-    kFpuRegister = 12,
+    kFpuRegister = 6 << 2,
   };
 
   Location() : value_(kInvalidLocation) {
@@ -204,14 +204,21 @@
   bool IsInvalid() const { return value_ == kInvalidLocation; }
 
   // Constants.
-  bool IsConstant() const {
-    return (value_ & kLocationTagMask) == kConstantTag;
+  bool IsConstant() const { return (value_ & kConstantTag) == kConstantTag; }
+
+  static Location Constant(const ConstantInstr* obj, int pair_index = 0) {
+    ASSERT((pair_index == 0) || (pair_index == 1));
+    Location loc(reinterpret_cast<uword>(obj) |
+                 (pair_index != 0 ? kPairLocationTag : 0) |
+                 kConstantTag);
+    ASSERT(obj == loc.constant_instruction());
+    ASSERT(loc.pair_index() == pair_index);
+    return loc;
   }
 
-  static Location Constant(const ConstantInstr* obj) {
-    Location loc(reinterpret_cast<uword>(obj) | kConstantTag);
-    ASSERT(obj == loc.constant_instruction());
-    return loc;
+  intptr_t pair_index() const {
+    ASSERT(IsConstant());
+    return (value_ & kPairLocationTag) != 0 ? 1 : 0;
   }
 
   ConstantInstr* constant_instruction() const {
diff --git a/runtime/vm/compiler/backend/redundancy_elimination_test.cc b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
index 2556df7..2e88fcc 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination_test.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
@@ -1150,47 +1150,48 @@
   /* Flow graph to match:
 
   4:     CheckStackOverflow:8(stack=0, loop=0)
-  6:     v590 <- UnboxedConstant(#1.0 double) T{_Double}
-  8:     v592 <- UnboxedConstant(#2.0 double) T{_Double}
-  9:     ParallelMove r0 <- S+2
- 10:     CheckClass:14(v2 Cids[1: _Double@0150898 etc.  cid 52])
- 12:     v526 <- Unbox:14(v2 T{_Double}) T{_Double}
- 14:     v352 <- BinaryDoubleOp:22(+, v590, v526) T{_Double}
- 15:     ParallelMove DS-9 <- q3
- 16:     v363 <- BinaryDoubleOp:34(+, v592, v526) T{_Double}
- 17:     ParallelMove DS-7 <- q0
- 18:     v21 <- BinaryDoubleOp:28(+, v352, v363) T{_Double}
- 19:     ParallelMove r1 <- C, r2 <- C, DS-5 <- q1
- 20:     v24 <- CreateArray:30(v0, v23) T{_List}
- 21:     ParallelMove r2 <- r0
- 22:     ParallelMove S-3 <- r2
- 22:     StoreIndexed(v24, v6, v26, NoStoreBarrier)
- 24:     StoreIndexed(v24, v7, v7, NoStoreBarrier)
- 26:     StoreIndexed(v24, v3, v29, NoStoreBarrier)
- 28:     StoreIndexed(v24, v30, v8, NoStoreBarrier)
- 30:     StoreIndexed(v24, v33, v34, NoStoreBarrier)
- 32:     StoreIndexed(v24, v35, v9, NoStoreBarrier)
- 34:     StoreIndexed(v24, v38, v29, NoStoreBarrier)
- 36:     StoreIndexed(v24, v39, v10, NoStoreBarrier)
- 38:     StoreIndexed(v24, v42, v43, NoStoreBarrier)
- 39:     ParallelMove q0 <- DS-9
- 40:     v586 <- Box(v352) T{_Double}
- 41:     ParallelMove r1 <- r2, r0 <- r0
- 42:     StoreIndexed(v24, v44, v586)
- 44:     StoreIndexed(v24, v47, v29, NoStoreBarrier)
- 45:     ParallelMove q0 <- DS-7
- 46:     v588 <- Box(v363) T{_Double}
- 47:     ParallelMove r1 <- r2, r0 <- r0
- 48:     StoreIndexed(v24, v48, v588)
- 50:     StoreIndexed(v24, v51, v52, NoStoreBarrier)
- 51:     ParallelMove q0 <- DS-5
- 52:     v580 <- Box(v21) T{_Double}
- 53:     ParallelMove r1 <- r2, r0 <- r0
- 54:     StoreIndexed(v24, v53, v580)
- 55:     ParallelMove r0 <- r2
- 56:     v54 <- StringInterpolate:44(v24) T{String}
- 57:     ParallelMove r0 <- r0
- 58:     Return:48(v54)
+  5:     ParallelMove rax <- S+2
+  6:     CheckClass:14(v2 Cids[1: _Double@0150898 etc.  cid 52])
+  8:     v526 <- Unbox:14(v2 T{_Double}) T{_Double}
+ 10:     ParallelMove xmm1 <- C
+ 10:     v352 <- BinaryDoubleOp:22(+, v590, v526) T{_Double}
+ 11:     ParallelMove DS-6 <- xmm1
+ 12:     ParallelMove xmm2 <- C
+ 12:     v363 <- BinaryDoubleOp:34(+, v591, v526) T{_Double}
+ 13:     ParallelMove DS-5 <- xmm2
+ 14:     ParallelMove xmm0 <- xmm1
+ 14:     v21 <- BinaryDoubleOp:28(+, v352, v363) T{_Double}
+ 15:     ParallelMove rbx <- C, r10 <- C, DS-4 <- xmm0
+ 16:     v24 <- CreateArray:30(v0, v23) T{_List}
+ 17:     ParallelMove rcx <- rax
+ 18:     ParallelMove S-3 <- rcx
+ 18:     StoreIndexed(v24, v6, v26, NoStoreBarrier)
+ 20:     StoreIndexed(v24, v7, v7, NoStoreBarrier)
+ 22:     StoreIndexed(v24, v3, v29, NoStoreBarrier)
+ 24:     StoreIndexed(v24, v30, v8, NoStoreBarrier)
+ 26:     StoreIndexed(v24, v33, v34, NoStoreBarrier)
+ 28:     StoreIndexed(v24, v35, v9, NoStoreBarrier)
+ 30:     StoreIndexed(v24, v38, v29, NoStoreBarrier)
+ 32:     StoreIndexed(v24, v39, v10, NoStoreBarrier)
+ 34:     StoreIndexed(v24, v42, v43, NoStoreBarrier)
+ 35:     ParallelMove xmm0 <- DS-6
+ 36:     v586 <- Box(v352) T{_Double}
+ 37:     ParallelMove rdx <- rcx, rax <- rax
+ 38:     StoreIndexed(v24, v44, v586)
+ 40:     StoreIndexed(v24, v47, v29, NoStoreBarrier)
+ 41:     ParallelMove xmm0 <- DS-5
+ 42:     v588 <- Box(v363) T{_Double}
+ 43:     ParallelMove rdx <- rcx, rax <- rax
+ 44:     StoreIndexed(v24, v48, v588)
+ 46:     StoreIndexed(v24, v51, v52, NoStoreBarrier)
+ 47:     ParallelMove xmm0 <- DS-4
+ 48:     v580 <- Box(v21) T{_Double}
+ 49:     ParallelMove rdx <- rcx, rax <- rax
+ 50:     StoreIndexed(v24, v53, v580)
+ 51:     ParallelMove rax <- rcx
+ 52:     v54 <- StringInterpolate:44(v24) T{String}
+ 53:     ParallelMove rax <- rax
+ 54:     Return:48(v54)
 */
 
   CreateArrayInstr* create_array = nullptr;
@@ -1201,8 +1202,6 @@
   RELEASE_ASSERT(cursor.TryMatch({
       kMatchAndMoveFunctionEntry,
       kMatchAndMoveCheckStackOverflow,
-      kMatchAndMoveUnboxedConstant,
-      kMatchAndMoveUnboxedConstant,
       kMatchAndMoveCheckClass,
       kMatchAndMoveUnbox,
       kMatchAndMoveBinaryDoubleOp,
diff --git a/tools/VERSION b/tools/VERSION
index cf03173..76da197 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 14
 PATCH 0
-PRERELEASE 26
+PRERELEASE 27
 PRERELEASE_PATCH 0
\ No newline at end of file