[VM interpreter] Support inline type check of implicit setter value.

Change-Id: I2c357a638308f15d83d284a85623b550395a6f4e
Reviewed-on: https://dart-review.googlesource.com/64825
Commit-Queue: Régis Crelier <regis@google.com>
Reviewed-by: Alexander Markov <alexmarkov@google.com>
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc
index d180a24..73dc5fb 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc
@@ -1065,9 +1065,6 @@
 // Returns 'true' if regular code generation should be skipped.
 bool FlowGraphCompiler::TryIntrinsify() {
   if (FLAG_intrinsify) {
-    const Class& owner = Class::Handle(parsed_function().function().Owner());
-    String& name = String::Handle(parsed_function().function().name());
-
     // Intrinsification skips arguments checks, therefore disable if in checked
     // mode or strong mode.
     //
@@ -1075,9 +1072,7 @@
     // there are no checks necessary in any case and we can therefore intrinsify
     // them even in checked mode and strong mode.
     if (parsed_function().function().kind() == RawFunction::kImplicitGetter) {
-      // TODO(27590) Store Field object inside RawFunction::data_ if possible.
-      name = Field::NameFromGetter(name);
-      const Field& field = Field::Handle(owner.LookupFieldAllowPrivate(name));
+      const Field& field = Field::Handle(function().accessor_field());
       ASSERT(!field.IsNull());
 
       // Only intrinsify getter if the field cannot contain a mutable double.
@@ -1093,9 +1088,7 @@
     } else if (parsed_function().function().kind() ==
                RawFunction::kImplicitSetter) {
       if (!isolate()->argument_type_checks()) {
-        // TODO(27590) Store Field object inside RawFunction::data_ if possible.
-        name = Field::NameFromSetter(name);
-        const Field& field = Field::Handle(owner.LookupFieldAllowPrivate(name));
+        const Field& field = Field::Handle(function().accessor_field());
         ASSERT(!field.IsNull());
 
         if (field.is_instance() &&
diff --git a/runtime/vm/constants_dbc.h b/runtime/vm/constants_dbc.h
index 2c94483..c211898 100644
--- a/runtime/vm/constants_dbc.h
+++ b/runtime/vm/constants_dbc.h
@@ -596,9 +596,11 @@
 //    arguments SP[-2] using SubtypeTestCache PP[D].
 //    If A is 1, then the instance may be a Smi.
 //
+//    Instance remains on stack. Other arguments are consumed.
+//
 //  - AssertSubtype
 //
-//    Assers that one type is a subtype of another.  Throws a TypeError
+//    Assert that one type is a subtype of another.  Throws a TypeError
 //    otherwise.  The stack has the following arguments on it:
 //
 //        SP[-4]  instantiator type args
diff --git a/runtime/vm/constants_kbc.h b/runtime/vm/constants_kbc.h
index e6250b5..6874731 100644
--- a/runtime/vm/constants_kbc.h
+++ b/runtime/vm/constants_kbc.h
@@ -624,9 +624,11 @@
 //    arguments SP[-2] using SubtypeTestCache PP[D].
 //    If A is 1, then the instance may be a Smi.
 //
+//    Instance remains on stack. Other arguments are consumed.
+//
 //  - AssertSubtype
 //
-//    Assers that one type is a subtype of another.  Throws a TypeError
+//    Assert that one type is a subtype of another.  Throws a TypeError
 //    otherwise.  The stack has the following arguments on it:
 //
 //        SP[-4]  instantiator type args
diff --git a/runtime/vm/interpreter.cc b/runtime/vm/interpreter.cc
index 4101d0f..11fc088 100644
--- a/runtime/vm/interpreter.cc
+++ b/runtime/vm/interpreter.cc
@@ -963,16 +963,46 @@
     }
     case RawFunction::kImplicitSetter: {
       // Field object is cached in function's data_.
-      // TODO(regis): We currently ignore field.guarded_cid() and the type
-      // test of the setter value. Either execute these tests here or fall
-      // back to compiling the setter when required.
+      // TODO(regis): We currently ignore field.guarded_cid().
       RawInstance* instance = reinterpret_cast<RawInstance*>(*call_base);
       RawField* field = reinterpret_cast<RawField*>(function->ptr()->data_);
       intptr_t offset_in_words = Smi::Value(field->ptr()->value_.offset_);
-      reinterpret_cast<RawObject**>(instance->ptr())[offset_in_words] =
-          *(call_base + 1);
+      RawAbstractType* field_type = field->ptr()->type_;
+      const classid_t cid =
+          field_type->GetClassId() == kTypeCid
+              ? Smi::Value(reinterpret_cast<RawSmi*>(
+                    Type::RawCast(field_type)->ptr()->type_class_id_))
+              : kIllegalCid;  // Not really illegal, but not a Type to skip.
+      // Perform type test of value if field type is not one of dynamic, object,
+      // or void, and if the value is not null.
+      RawObject* null_value = Object::null();
+      RawObject* value = *(call_base + 1);
+      if (cid != kDynamicCid && cid != kInstanceCid && cid != kVoidCid &&
+          value != null_value) {
+        // Push arguments of type test.
+        // Type checked value is at call_base + 1.
+        // Provide type arguments of instance as instantiator.
+        RawClass* instance_class = thread->isolate()->class_table()->At(
+            InterpreterHelpers::GetClassId(instance));
+        call_base[2] =
+            instance_class->ptr()->num_type_arguments_ > 0
+                ? reinterpret_cast<RawObject**>(
+                      instance
+                          ->ptr())[instance_class->ptr()
+                                       ->type_arguments_field_offset_in_words_]
+                : null_value;
+        call_base[3] = null_value;  // Implicit setters cannot be generic.
+        call_base[4] = field_type;
+        call_base[5] = field->ptr()->name_;
+        if (!AssertAssignable(thread, *pc, *FP, call_base + 5, call_base + 1,
+                              SubtypeTestCache::RawCast(null_value))) {
+          *invoked = true;
+          return false;
+        }
+      }
+      reinterpret_cast<RawObject**>(instance->ptr())[offset_in_words] = value;
       *SP = call_base;
-      **SP = Object::null();
+      **SP = null_value;
       *invoked = true;
       return true;
     }
@@ -1460,6 +1490,81 @@
   return true;
 }
 
+bool Interpreter::AssertAssignable(Thread* thread,
+                                   uint32_t* pc,
+                                   RawObject** FP,
+                                   RawObject** call_top,
+                                   RawObject** args,
+                                   RawSubtypeTestCache* cache) {
+  RawObject* null_value = Object::null();
+  if (cache != null_value) {
+    RawInstance* instance = static_cast<RawInstance*>(args[0]);
+    RawTypeArguments* instantiator_type_arguments =
+        static_cast<RawTypeArguments*>(args[1]);
+    RawTypeArguments* function_type_arguments =
+        static_cast<RawTypeArguments*>(args[2]);
+
+    const intptr_t cid = InterpreterHelpers::GetClassId(instance);
+
+    RawTypeArguments* instance_type_arguments =
+        static_cast<RawTypeArguments*>(null_value);
+    RawObject* instance_cid_or_function;
+    if (cid == kClosureCid) {
+      RawClosure* closure = static_cast<RawClosure*>(instance);
+      if (closure->ptr()->function_type_arguments_ != TypeArguments::null()) {
+        // Cache cannot be used for generic closures.
+        goto AssertAssignableCallRuntime;
+      }
+      instance_type_arguments = closure->ptr()->instantiator_type_arguments_;
+      instance_cid_or_function = closure->ptr()->function_;
+    } else {
+      instance_cid_or_function = Smi::New(cid);
+
+      RawClass* instance_class = thread->isolate()->class_table()->At(cid);
+      if (instance_class->ptr()->num_type_arguments_ < 0) {
+        goto AssertAssignableCallRuntime;
+      } else if (instance_class->ptr()->num_type_arguments_ > 0) {
+        instance_type_arguments = reinterpret_cast<RawTypeArguments**>(
+            instance->ptr())[instance_class->ptr()
+                                 ->type_arguments_field_offset_in_words_];
+      }
+    }
+
+    for (RawObject** entries = cache->ptr()->cache_->ptr()->data();
+         entries[0] != null_value;
+         entries += SubtypeTestCache::kTestEntryLength) {
+      if ((entries[SubtypeTestCache::kInstanceClassIdOrFunction] ==
+           instance_cid_or_function) &&
+          (entries[SubtypeTestCache::kInstanceTypeArguments] ==
+           instance_type_arguments) &&
+          (entries[SubtypeTestCache::kInstantiatorTypeArguments] ==
+           instantiator_type_arguments) &&
+          (entries[SubtypeTestCache::kFunctionTypeArguments] ==
+           function_type_arguments)) {
+        if (Bool::True().raw() == entries[SubtypeTestCache::kTestResult]) {
+          return true;
+        } else {
+          break;
+        }
+      }
+    }
+  }
+
+AssertAssignableCallRuntime:
+  // TODO(regis): Modify AssertAssignable bytecode to expect arguments in same
+  // order as the TypeCheck runtime call, so this copying can be avoided.
+  call_top[1] = args[0];  // instance
+  call_top[2] = args[3];  // type
+  call_top[3] = args[1];  // instantiator type args
+  call_top[4] = args[2];  // function type args
+  call_top[5] = args[4];  // name
+  call_top[6] = cache;
+  call_top[7] = Smi::New(kTypeCheckFromInline);
+  Exit(thread, FP, call_top + 8, pc);
+  NativeArguments native_args(thread, 7, call_top + 1, call_top + 1);
+  return InvokeRuntime(thread, this, DRT_TypeCheck, native_args);
+}
+
 RawObject* Interpreter::Call(const Function& function,
                              const Array& arguments_descriptor,
                              const Array& arguments,
@@ -3583,76 +3688,11 @@
     if (!smi_ok && (args[0] != null_value)) {
       RawSubtypeTestCache* cache =
           static_cast<RawSubtypeTestCache*>(LOAD_CONSTANT(rD));
-      if (cache != null_value) {
-        RawInstance* instance = static_cast<RawInstance*>(args[0]);
-        RawTypeArguments* instantiator_type_arguments =
-            static_cast<RawTypeArguments*>(args[1]);
-        RawTypeArguments* function_type_arguments =
-            static_cast<RawTypeArguments*>(args[2]);
 
-        const intptr_t cid = InterpreterHelpers::GetClassId(instance);
-
-        RawTypeArguments* instance_type_arguments =
-            static_cast<RawTypeArguments*>(null_value);
-        RawObject* instance_cid_or_function;
-        if (cid == kClosureCid) {
-          RawClosure* closure = static_cast<RawClosure*>(instance);
-          if (closure->ptr()->function_type_arguments_ !=
-              TypeArguments::null()) {
-            // Cache cannot be used for generic closures.
-            goto AssertAssignableCallRuntime;
-          }
-          instance_type_arguments =
-              closure->ptr()->instantiator_type_arguments_;
-          instance_cid_or_function = closure->ptr()->function_;
-        } else {
-          instance_cid_or_function = Smi::New(cid);
-
-          RawClass* instance_class = thread->isolate()->class_table()->At(cid);
-          if (instance_class->ptr()->num_type_arguments_ < 0) {
-            goto AssertAssignableCallRuntime;
-          } else if (instance_class->ptr()->num_type_arguments_ > 0) {
-            instance_type_arguments = reinterpret_cast<RawTypeArguments**>(
-                instance->ptr())[instance_class->ptr()
-                                     ->type_arguments_field_offset_in_words_];
-          }
-        }
-
-        for (RawObject** entries = cache->ptr()->cache_->ptr()->data();
-             entries[0] != null_value;
-             entries += SubtypeTestCache::kTestEntryLength) {
-          if ((entries[SubtypeTestCache::kInstanceClassIdOrFunction] ==
-               instance_cid_or_function) &&
-              (entries[SubtypeTestCache::kInstanceTypeArguments] ==
-               instance_type_arguments) &&
-              (entries[SubtypeTestCache::kInstantiatorTypeArguments] ==
-               instantiator_type_arguments) &&
-              (entries[SubtypeTestCache::kFunctionTypeArguments] ==
-               function_type_arguments)) {
-            if (true_value == entries[SubtypeTestCache::kTestResult]) {
-              goto AssertAssignableOk;
-            } else {
-              break;
-            }
-          }
-        }
-      }
-
-    AssertAssignableCallRuntime:
-      SP[1] = args[0];  // instance
-      SP[2] = args[3];  // type
-      SP[3] = args[1];  // instantiator type args
-      SP[4] = args[2];  // function type args
-      SP[5] = args[4];  // name
-      SP[6] = cache;
-      SP[7] = Smi::New(kTypeCheckFromInline);
-      Exit(thread, FP, SP + 8, pc);
-      NativeArguments native_args(thread, 7, SP + 1, SP - 4);
-      INVOKE_RUNTIME(DRT_TypeCheck, native_args);
+      AssertAssignable(thread, pc, FP, SP, args, cache);
     }
 
-  AssertAssignableOk:
-    SP -= 4;
+    SP -= 4;  // Instance remains on stack.
     DISPATCH();
   }
 
diff --git a/runtime/vm/interpreter.h b/runtime/vm/interpreter.h
index 1ca0809..80621a4 100644
--- a/runtime/vm/interpreter.h
+++ b/runtime/vm/interpreter.h
@@ -24,6 +24,7 @@
 class RawArray;
 class RawObjectPool;
 class RawFunction;
+class RawSubtypeTestCache;
 class ObjectPointerVisitor;
 
 // Interpreter intrinsic handler. It is invoked on entry to the intrinsified
@@ -195,6 +196,13 @@
                           RawObject*** SP,
                           uint32_t** pc);
 
+  bool AssertAssignable(Thread* thread,
+                        uint32_t* pc,
+                        RawObject** FP,
+                        RawObject** call_top,
+                        RawObject** args,
+                        RawSubtypeTestCache* cache);
+
 #if defined(DEBUG)
   // Returns true if tracing of executed instructions is enabled.
   bool IsTracingExecution() const;
diff --git a/runtime/vm/runtime_entry.cc b/runtime/vm/runtime_entry.cc
index 856a6ca..0ebea7e 100644
--- a/runtime/vm/runtime_entry.cc
+++ b/runtime/vm/runtime_entry.cc
@@ -794,6 +794,15 @@
 
   if (should_update_cache) {
     if (cache.IsNull()) {
+#if defined(DART_USE_INTERPRETER)
+      // TODO(regis): Remove this workaround once the interpreter can provide a
+      // non-null cache for the type test in an implicit setter.
+      if (mode == kTypeCheckFromInline) {
+        arguments.SetReturn(src_instance);
+        return;
+      }
+#endif  // defined(DART_USE_INTERPRETER)
+
 #if !defined(TARGET_ARCH_DBC) && !defined(TARGET_ARCH_IA32)
       ASSERT(mode == kTypeCheckFromSlowStub);
       // We lazily create [SubtypeTestCache] for those call sites which actually