Keep a trail while checking upper bounds in the VM in order to properly handle
cycles via bounds (fixes #25568).
Add a regression test.
Improve handling of recursive types with trails.
Make sure we do not modify already canonicalized types.
R=rmacnak@google.com
Review URL: https://codereview.chromium.org/1674383002 .
diff --git a/runtime/lib/mirrors.cc b/runtime/lib/mirrors.cc
index 24254db..b7d95b1 100644
--- a/runtime/lib/mirrors.cc
+++ b/runtime/lib/mirrors.cc
@@ -783,7 +783,7 @@
TypeArguments::Handle(instantiator.arguments());
Error& bound_error = Error::Handle();
AbstractType& result = AbstractType::Handle(
- type.InstantiateFrom(type_args, &bound_error, NULL, Heap::kOld));
+ type.InstantiateFrom(type_args, &bound_error, NULL, NULL, Heap::kOld));
if (!bound_error.IsNull()) {
Exceptions::PropagateError(bound_error);
UNREACHABLE();
@@ -1675,7 +1675,7 @@
// type arguments of the type reflected by the class mirror.
Error& bound_error = Error::Handle();
redirect_type ^= redirect_type.InstantiateFrom(
- type_arguments, &bound_error, NULL, Heap::kOld);
+ type_arguments, &bound_error, NULL, NULL, Heap::kOld);
if (!bound_error.IsNull()) {
Exceptions::PropagateError(bound_error);
UNREACHABLE();
diff --git a/runtime/vm/class_finalizer.cc b/runtime/vm/class_finalizer.cc
index e2fd349..f2c7b12 100644
--- a/runtime/vm/class_finalizer.cc
+++ b/runtime/vm/class_finalizer.cc
@@ -440,7 +440,7 @@
const TypeArguments& type_args = TypeArguments::Handle(type.arguments());
Error& bound_error = Error::Handle();
target_type ^= target_type.InstantiateFrom(
- type_args, &bound_error, NULL, Heap::kOld);
+ type_args, &bound_error, NULL, NULL, Heap::kOld);
if (bound_error.IsNull()) {
target_type ^= FinalizeType(cls, target_type, kCanonicalize);
} else {
@@ -624,13 +624,14 @@
TypeArguments& pending_arguments = TypeArguments::Handle(zone);
const intptr_t num_pending_types = pending_types->length();
for (intptr_t i = num_pending_types - 1; i >= 0; i--) {
- const Type& pending_type = Type::Cast(pending_types->At(i));
+ const AbstractType& pending_type = pending_types->At(i);
if (FLAG_trace_type_finalization) {
THR_Print(" Comparing with pending type '%s': %s\n",
String::Handle(pending_type.Name()).ToCString(),
pending_type.ToCString());
}
if ((pending_type.raw() != type.raw()) &&
+ pending_type.IsType() &&
(pending_type.type_class() == type_cls.raw())) {
pending_arguments = pending_type.arguments();
if (!pending_arguments.IsSubvectorEquivalent(arguments,
@@ -741,10 +742,10 @@
}
}
if (offset > 0) {
- TrailPtr trail = new Trail(Z, 4);
- Error& bound_error = Error::Handle();
+ TrailPtr instantiation_trail = new Trail(Z, 4);
+ Error& bound_error = Error::Handle(Z);
FinalizeTypeArguments(type_class, full_arguments, offset,
- &bound_error, pending_types, trail);
+ &bound_error, pending_types, instantiation_trail);
}
if (full_arguments.IsRaw(0, num_type_arguments)) {
// The parameterized_type is raw. Set its argument vector to null, which
@@ -761,6 +762,11 @@
if (!type.IsFinalized()) {
ASSERT(full_arguments.IsNull() ||
!full_arguments.IsRaw(0, num_type_arguments));
+ if (FLAG_trace_type_finalization) {
+ THR_Print("Marking type '%s' as finalized for class '%s'\n",
+ String::Handle(Z, type.Name()).ToCString(),
+ String::Handle(Z, cls.Name()).ToCString());
+ }
// Mark the type as finalized.
type.SetIsFinalized();
// Do not yet remove the type from the pending_types array.
@@ -808,7 +814,7 @@
intptr_t num_uninitialized_arguments,
Error* bound_error,
PendingTypes* pending_types,
- TrailPtr trail) {
+ TrailPtr instantiation_trail) {
ASSERT(arguments.Length() >= cls.NumTypeArguments());
if (!cls.is_type_finalized()) {
FinalizeTypeParameters(cls, pending_types);
@@ -868,7 +874,7 @@
}
Error& error = Error::Handle();
super_type_arg = super_type_arg.InstantiateFrom(
- arguments, &error, trail, Heap::kOld);
+ arguments, &error, instantiation_trail, NULL, Heap::kOld);
if (!error.IsNull()) {
// InstantiateFrom does not report an error if the type is still
// uninstantiated. Instead, it will return a new BoundedType so
@@ -895,7 +901,7 @@
cls.NumTypeArguments() - cls.NumTypeParameters(),
bound_error,
pending_types,
- trail);
+ instantiation_trail);
Type::Cast(super_type_arg).SetIsFinalized();
}
}
@@ -903,7 +909,7 @@
arguments.SetTypeAt(i, super_type_arg);
}
FinalizeTypeArguments(super_class, arguments, super_offset,
- bound_error, pending_types, trail);
+ bound_error, pending_types, instantiation_trail);
}
}
@@ -918,7 +924,7 @@
Error* bound_error) {
if (!cls.is_type_finalized()) {
FinalizeTypeParameters(cls);
- FinalizeUpperBounds(cls);
+ FinalizeUpperBounds(cls, kFinalize); // No canonicalization yet.
}
// Note that when finalizing a type, we need to verify the bounds in both
// production mode and checked mode, because the finalized type may be written
@@ -970,8 +976,8 @@
if (declared_bound.IsInstantiated()) {
instantiated_bound = declared_bound.raw();
} else {
- instantiated_bound =
- declared_bound.InstantiateFrom(arguments, &error, NULL, Heap::kOld);
+ instantiated_bound = declared_bound.InstantiateFrom(
+ arguments, &error, NULL, NULL, Heap::kOld);
}
if (!instantiated_bound.IsFinalized()) {
// The bound refers to type parameters, creating a cycle; postpone
@@ -1001,7 +1007,8 @@
// This may be called only if type needs to be finalized, therefore
// seems OK to allocate finalized types in old space.
if (!type_param.CheckBound(type_arg, instantiated_bound,
- &error, Heap::kOld) && error.IsNull()) {
+ &error, NULL, Heap::kOld) &&
+ error.IsNull()) {
// The bound cannot be checked at compile time; postpone to run time.
type_arg = BoundedType::New(type_arg, instantiated_bound, type_param);
arguments.SetTypeAt(offset + i, type_arg);
@@ -1022,32 +1029,47 @@
void ClassFinalizer::CheckTypeBounds(const Class& cls,
const AbstractType& type) {
+ Zone* Z = Thread::Current()->zone();
ASSERT(type.IsType() || type.IsFunctionType());
ASSERT(type.IsFinalized());
- TypeArguments& arguments = TypeArguments::Handle(type.arguments());
+ ASSERT(!type.IsCanonical());
+ TypeArguments& arguments = TypeArguments::Handle(Z, type.arguments());
if (arguments.IsNull()) {
return;
}
- const Class& type_class = Class::Handle(type.type_class());
- Error& bound_error = Error::Handle();
+ if (FLAG_trace_type_finalization) {
+ THR_Print("Checking bounds of type '%s' for class '%s'\n",
+ String::Handle(Z, type.Name()).ToCString(),
+ String::Handle(Z, cls.Name()).ToCString());
+ }
+ const Class& type_class = Class::Handle(Z, type.type_class());
+ Error& bound_error = Error::Handle(Z);
CheckTypeArgumentBounds(type_class, arguments, &bound_error);
- type.set_arguments(arguments);
- // If a bound error occurred, mark the type as malbounded.
- // The bound error will be ignored in production mode.
- if (!bound_error.IsNull()) {
- // No compile-time error during finalization.
- const String& type_name = String::Handle(type.UserVisibleName());
- FinalizeMalboundedType(bound_error,
- Script::Handle(cls.script()),
- type,
- "type '%s' has an out of bound type argument",
- type_name.ToCString());
- if (FLAG_trace_type_finalization) {
- THR_Print("Marking type '%s' as malbounded: %s\n",
- String::Handle(type.Name()).ToCString(),
- bound_error.ToCString());
+ // CheckTypeArgumentBounds may have indirectly canonicalized this type.
+ if (!type.IsCanonical()) {
+ type.set_arguments(arguments);
+ // If a bound error occurred, mark the type as malbounded.
+ // The bound error will be ignored in production mode.
+ if (!bound_error.IsNull()) {
+ // No compile-time error during finalization.
+ const String& type_name = String::Handle(Z, type.UserVisibleName());
+ FinalizeMalboundedType(bound_error,
+ Script::Handle(Z, cls.script()),
+ type,
+ "type '%s' has an out of bound type argument",
+ type_name.ToCString());
+ if (FLAG_trace_type_finalization) {
+ THR_Print("Marking type '%s' as malbounded: %s\n",
+ String::Handle(Z, type.Name()).ToCString(),
+ bound_error.ToCString());
+ }
}
}
+ if (FLAG_trace_type_finalization) {
+ THR_Print("Done checking bounds of type '%s': %s\n",
+ String::Handle(Z, type.Name()).ToCString(),
+ type.ToCString());
+ }
}
@@ -1062,7 +1084,11 @@
if (type.IsFinalized()) {
// Ensure type is canonical if canonicalization is requested, unless type is
// malformed.
- if ((finalization >= kCanonicalize) && !type.IsMalformed()) {
+ if ((finalization >= kCanonicalize) &&
+ !type.IsMalformed() &&
+ !type.IsCanonical() &&
+ (type.IsType() || type.IsFunctionType())) {
+ CheckTypeBounds(cls, type);
return type.Canonicalize();
}
return type.raw();
@@ -1091,7 +1117,7 @@
if (FLAG_trace_type_finalization) {
THR_Print("Finalizing type '%s' for class '%s'\n",
String::Handle(Z, resolved_type.Name()).ToCString(),
- cls.ToCString());
+ String::Handle(Z, cls.Name()).ToCString());
}
if (resolved_type.IsTypeParameter()) {
@@ -1142,11 +1168,9 @@
// bounds.
if (is_root_type) {
for (intptr_t i = pending_types->length() - 1; i >= 0; i--) {
- CheckTypeBounds(cls, pending_types->At(i));
- if (FLAG_trace_type_finalization && resolved_type.IsRecursive()) {
- THR_Print("Done finalizing recursive type '%s': %s\n",
- String::Handle(Z, resolved_type.Name()).ToCString(),
- resolved_type.ToCString());
+ const AbstractType& type = pending_types->At(i);
+ if (!type.IsMalformed() && !type.IsCanonical()) {
+ CheckTypeBounds(cls, type);
}
}
}
@@ -1178,13 +1202,14 @@
}
if (finalization >= kCanonicalize) {
- if (FLAG_trace_type_finalization && resolved_type.IsRecursive()) {
- AbstractType& recursive_type =
+ if (FLAG_trace_type_finalization) {
+ THR_Print("Canonicalizing type '%s'\n",
+ String::Handle(Z, resolved_type.Name()).ToCString());
+ AbstractType& canonical_type =
AbstractType::Handle(Z, resolved_type.Canonicalize());
- THR_Print("Done canonicalizing recursive type '%s': %s\n",
- String::Handle(Z, recursive_type.Name()).ToCString(),
- recursive_type.ToCString());
- return recursive_type.raw();
+ THR_Print("Done canonicalizing type '%s'\n",
+ String::Handle(Z, canonical_type.Name()).ToCString());
+ return canonical_type.raw();
}
return resolved_type.Canonicalize();
} else {
@@ -1308,7 +1333,8 @@
// Finalize the upper bounds of the type parameters of class cls.
-void ClassFinalizer::FinalizeUpperBounds(const Class& cls) {
+void ClassFinalizer::FinalizeUpperBounds(const Class& cls,
+ FinalizationKind finalization) {
const intptr_t num_type_params = cls.NumTypeParameters();
TypeParameter& type_param = TypeParameter::Handle();
AbstractType& bound = AbstractType::Handle();
@@ -1324,7 +1350,7 @@
// A bound involved in F-bounded quantification may form a cycle.
continue;
}
- bound = FinalizeType(cls, bound, kCanonicalize);
+ bound = FinalizeType(cls, bound, finalization);
type_param.set_bound(bound);
}
}
@@ -1766,7 +1792,7 @@
param.set_bound(param_bound); // In case part of recursive type.
}
param_bound = param_bound.InstantiateFrom(
- instantiator, &bound_error, NULL, Heap::kOld);
+ instantiator, &bound_error, NULL, NULL, Heap::kOld);
// The instantiator contains only TypeParameter objects and no
// BoundedType objects, so no bound error may occur.
ASSERT(!param_bound.IsBoundedType());
@@ -2000,20 +2026,20 @@
if (type.IsBoundedType()) {
bounded_type = BoundedType::Cast(type).type();
bounded_type = bounded_type.InstantiateFrom(
- instantiator, &bound_error, NULL, Heap::kOld);
+ instantiator, &bound_error, NULL, NULL, Heap::kOld);
// The instantiator contains only TypeParameter objects and no
// BoundedType objects, so no bound error may occur.
ASSERT(bound_error.IsNull());
upper_bound = BoundedType::Cast(type).bound();
upper_bound = upper_bound.InstantiateFrom(
- instantiator, &bound_error, NULL, Heap::kOld);
+ instantiator, &bound_error, NULL, NULL, Heap::kOld);
ASSERT(bound_error.IsNull());
type_parameter = BoundedType::Cast(type).type_parameter();
// The type parameter that declared the bound does not change.
type = BoundedType::New(bounded_type, upper_bound, type_parameter);
} else {
type = type.InstantiateFrom(
- instantiator, &bound_error, NULL, Heap::kOld);
+ instantiator, &bound_error, NULL, NULL, Heap::kOld);
ASSERT(bound_error.IsNull());
}
}
@@ -2312,7 +2338,7 @@
// unfinalized bounds. It is more efficient to finalize them early.
// Finalize bounds even if running in production mode, so that a snapshot
// contains them.
- FinalizeUpperBounds(cls);
+ FinalizeUpperBounds(cls, kCanonicalizeWellFormed);
// Finalize super type.
AbstractType& super_type = AbstractType::Handle(cls.super_type());
if (!super_type.IsNull()) {
diff --git a/runtime/vm/class_finalizer.h b/runtime/vm/class_finalizer.h
index 92a79ab..e630df0 100644
--- a/runtime/vm/class_finalizer.h
+++ b/runtime/vm/class_finalizer.h
@@ -151,7 +151,8 @@
const TypeArguments& arguments,
Error* bound_error);
static void ResolveUpperBounds(const Class& cls);
- static void FinalizeUpperBounds(const Class& cls);
+ static void FinalizeUpperBounds(const Class& cls,
+ FinalizationKind finalization);
static void ResolveSignature(const Class& cls, const Function& function);
static void FinalizeSignature(const Class& cls, const Function& function);
static void ResolveAndFinalizeMemberTypes(const Class& cls);
diff --git a/runtime/vm/code_generator.cc b/runtime/vm/code_generator.cc
index c027722..bf7508a 100644
--- a/runtime/vm/code_generator.cc
+++ b/runtime/vm/code_generator.cc
@@ -206,7 +206,8 @@
ASSERT(!type.IsNull() && !type.IsInstantiated());
ASSERT(instantiator.IsNull() || instantiator.IsInstantiated());
Error& bound_error = Error::Handle();
- type = type.InstantiateFrom(instantiator, &bound_error, NULL, Heap::kOld);
+ type =
+ type.InstantiateFrom(instantiator, &bound_error, NULL, NULL, Heap::kOld);
if (!bound_error.IsNull()) {
// Throw a dynamic type error.
const TokenPosition location = GetCallerLocation();
diff --git a/runtime/vm/flow_graph_builder.cc b/runtime/vm/flow_graph_builder.cc
index b752659..63bdb24 100644
--- a/runtime/vm/flow_graph_builder.cc
+++ b/runtime/vm/flow_graph_builder.cc
@@ -1626,7 +1626,7 @@
// All objects are instances of type T if Object type is a subtype of type T.
const Type& object_type = Type::Handle(Z, Type::ObjectType());
if (type.IsInstantiated() &&
- object_type.IsSubtypeOf(type, NULL, Heap::kOld)) {
+ object_type.IsSubtypeOf(type, NULL, NULL, Heap::kOld)) {
// Must evaluate left side.
EffectGraphVisitor for_left_value(owner());
node->left()->Visit(&for_left_value);
diff --git a/runtime/vm/flow_graph_compiler_arm.cc b/runtime/vm/flow_graph_compiler_arm.cc
index e3780cb..4eb7f3f 100644
--- a/runtime/vm/flow_graph_compiler_arm.cc
+++ b/runtime/vm/flow_graph_compiler_arm.cc
@@ -278,7 +278,8 @@
const Register kInstanceReg = R0;
Error& bound_error = Error::Handle(zone());
const Type& int_type = Type::Handle(zone(), Type::IntType());
- const bool smi_is_ok = int_type.IsSubtypeOf(type, &bound_error, Heap::kOld);
+ const bool smi_is_ok =
+ int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
// Malformed type should have been handled at graph construction time.
ASSERT(smi_is_ok || bound_error.IsNull());
__ tst(kInstanceReg, Operand(kSmiTagMask));
@@ -318,7 +319,7 @@
ASSERT(tp_argument.HasResolvedTypeClass());
// Check if type argument is dynamic or Object.
const Type& object_type = Type::Handle(zone(), Type::ObjectType());
- if (object_type.IsSubtypeOf(tp_argument, NULL, Heap::kOld)) {
+ if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
// Instance class test only necessary.
return GenerateSubtype1TestCacheLookup(
token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
@@ -378,6 +379,7 @@
type_class,
TypeArguments::Handle(zone()),
NULL,
+ NULL,
Heap::kOld)) {
__ b(is_instance_lbl, EQ);
} else {
@@ -405,7 +407,7 @@
// Custom checking for numbers (Smi, Mint, Bigint and Double).
// Note that instance is not Smi (checked above).
if (type.IsSubtypeOf(
- Type::Handle(zone(), Type::Number()), NULL, Heap::kOld)) {
+ Type::Handle(zone(), Type::Number()), NULL, NULL, Heap::kOld)) {
GenerateNumberTypeCheck(
kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
return false;
diff --git a/runtime/vm/flow_graph_compiler_arm64.cc b/runtime/vm/flow_graph_compiler_arm64.cc
index b86c581..2a237d9 100644
--- a/runtime/vm/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/flow_graph_compiler_arm64.cc
@@ -270,7 +270,8 @@
const Register kInstanceReg = R0;
Error& bound_error = Error::Handle(zone());
const Type& int_type = Type::Handle(zone(), Type::IntType());
- const bool smi_is_ok = int_type.IsSubtypeOf(type, &bound_error, Heap::kOld);
+ const bool smi_is_ok =
+ int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
// Malformed type should have been handled at graph construction time.
ASSERT(smi_is_ok || bound_error.IsNull());
__ tsti(kInstanceReg, Immediate(kSmiTagMask));
@@ -310,7 +311,7 @@
ASSERT(tp_argument.HasResolvedTypeClass());
// Check if type argument is dynamic or Object.
const Type& object_type = Type::Handle(zone(), Type::ObjectType());
- if (object_type.IsSubtypeOf(tp_argument, NULL, Heap::kOld)) {
+ if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
// Instance class test only necessary.
return GenerateSubtype1TestCacheLookup(
token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
@@ -370,6 +371,7 @@
type_class,
TypeArguments::Handle(zone()),
NULL,
+ NULL,
Heap::kOld)) {
__ b(is_instance_lbl, EQ);
} else {
@@ -397,7 +399,7 @@
// Custom checking for numbers (Smi, Mint, Bigint and Double).
// Note that instance is not Smi (checked above).
if (type.IsSubtypeOf(
- Type::Handle(zone(), Type::Number()), NULL, Heap::kOld)) {
+ Type::Handle(zone(), Type::Number()), NULL, NULL, Heap::kOld)) {
GenerateNumberTypeCheck(
kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
return false;
diff --git a/runtime/vm/flow_graph_compiler_ia32.cc b/runtime/vm/flow_graph_compiler_ia32.cc
index e142041..0476344 100644
--- a/runtime/vm/flow_graph_compiler_ia32.cc
+++ b/runtime/vm/flow_graph_compiler_ia32.cc
@@ -282,7 +282,8 @@
const Register kInstanceReg = EAX;
Error& bound_error = Error::Handle(zone());
const Type& int_type = Type::Handle(zone(), Type::IntType());
- const bool smi_is_ok = int_type.IsSubtypeOf(type, &bound_error, Heap::kOld);
+ const bool smi_is_ok =
+ int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
// Malformed type should have been handled at graph construction time.
ASSERT(smi_is_ok || bound_error.IsNull());
__ testl(kInstanceReg, Immediate(kSmiTagMask));
@@ -322,7 +323,7 @@
ASSERT(tp_argument.HasResolvedTypeClass());
// Check if type argument is dynamic or Object.
const Type& object_type = Type::Handle(zone(), Type::ObjectType());
- if (object_type.IsSubtypeOf(tp_argument, NULL, Heap::kOld)) {
+ if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
// Instance class test only necessary.
return GenerateSubtype1TestCacheLookup(
token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
@@ -381,6 +382,7 @@
type_class,
TypeArguments::Handle(zone()),
NULL,
+ NULL,
Heap::kOld)) {
__ j(ZERO, is_instance_lbl);
} else {
@@ -408,7 +410,7 @@
// Custom checking for numbers (Smi, Mint, Bigint and Double).
// Note that instance is not Smi (checked above).
if (type.IsSubtypeOf(
- Type::Handle(zone(), Type::Number()), NULL, Heap::kOld)) {
+ Type::Handle(zone(), Type::Number()), NULL, NULL, Heap::kOld)) {
GenerateNumberTypeCheck(
kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
return false;
diff --git a/runtime/vm/flow_graph_compiler_mips.cc b/runtime/vm/flow_graph_compiler_mips.cc
index 654b755..f9777c0 100644
--- a/runtime/vm/flow_graph_compiler_mips.cc
+++ b/runtime/vm/flow_graph_compiler_mips.cc
@@ -270,7 +270,8 @@
const Register kInstanceReg = A0;
Error& bound_error = Error::Handle(zone());
const Type& int_type = Type::Handle(zone(), Type::IntType());
- const bool smi_is_ok = int_type.IsSubtypeOf(type, &bound_error, Heap::kOld);
+ const bool smi_is_ok =
+ int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
// Malformed type should have been handled at graph construction time.
ASSERT(smi_is_ok || bound_error.IsNull());
__ andi(CMPRES1, kInstanceReg, Immediate(kSmiTagMask));
@@ -309,7 +310,7 @@
ASSERT(tp_argument.HasResolvedTypeClass());
// Check if type argument is dynamic or Object.
const Type& object_type = Type::Handle(zone(), Type::ObjectType());
- if (object_type.IsSubtypeOf(tp_argument, NULL, Heap::kOld)) {
+ if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
// Instance class test only necessary.
return GenerateSubtype1TestCacheLookup(
token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
@@ -369,6 +370,7 @@
type_class,
TypeArguments::Handle(zone()),
NULL,
+ NULL,
Heap::kOld)) {
__ beq(T0, ZR, is_instance_lbl);
} else {
@@ -394,7 +396,7 @@
// Custom checking for numbers (Smi, Mint, Bigint and Double).
// Note that instance is not Smi (checked above).
if (type.IsSubtypeOf(
- Type::Handle(zone(), Type::Number()), NULL, Heap::kOld)) {
+ Type::Handle(zone(), Type::Number()), NULL, NULL, Heap::kOld)) {
GenerateNumberTypeCheck(
kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
return false;
diff --git a/runtime/vm/flow_graph_compiler_x64.cc b/runtime/vm/flow_graph_compiler_x64.cc
index e5c09b0..b922bb8 100644
--- a/runtime/vm/flow_graph_compiler_x64.cc
+++ b/runtime/vm/flow_graph_compiler_x64.cc
@@ -278,7 +278,8 @@
const Register kInstanceReg = RAX;
Error& bound_error = Error::Handle(zone());
const Type& int_type = Type::Handle(zone(), Type::IntType());
- const bool smi_is_ok = int_type.IsSubtypeOf(type, &bound_error, Heap::kOld);
+ const bool smi_is_ok =
+ int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
// Malformed type should have been handled at graph construction time.
ASSERT(smi_is_ok || bound_error.IsNull());
__ testq(kInstanceReg, Immediate(kSmiTagMask));
@@ -318,7 +319,7 @@
ASSERT(tp_argument.HasResolvedTypeClass());
// Check if type argument is dynamic or Object.
const Type& object_type = Type::Handle(zone(), Type::ObjectType());
- if (object_type.IsSubtypeOf(tp_argument, NULL, Heap::kOld)) {
+ if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
// Instance class test only necessary.
return GenerateSubtype1TestCacheLookup(
token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
@@ -377,6 +378,7 @@
type_class,
TypeArguments::Handle(zone()),
NULL,
+ NULL,
Heap::kOld)) {
__ j(ZERO, is_instance_lbl);
} else {
@@ -404,7 +406,7 @@
// Custom checking for numbers (Smi, Mint, Bigint and Double).
// Note that instance is not Smi (checked above).
if (type.IsSubtypeOf(
- Type::Handle(zone(), Type::Number()), NULL, Heap::kOld)) {
+ Type::Handle(zone(), Type::Number()), NULL, NULL, Heap::kOld)) {
GenerateNumberTypeCheck(
kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
return false;
diff --git a/runtime/vm/flow_graph_optimizer.cc b/runtime/vm/flow_graph_optimizer.cc
index 429cff6..f10954a 100644
--- a/runtime/vm/flow_graph_optimizer.cc
+++ b/runtime/vm/flow_graph_optimizer.cc
@@ -3887,6 +3887,7 @@
type_class,
TypeArguments::Handle(Z),
NULL,
+ NULL,
Heap::kOld);
results->Add(cls.id());
results->Add(is_subtype);
@@ -3981,6 +3982,7 @@
type_class,
TypeArguments::Handle(),
NULL,
+ NULL,
Heap::kOld);
results->Add((*results)[results->length() - 2]);
results->Add((*results)[results->length() - 2]);
diff --git a/runtime/vm/flow_graph_type_propagator.cc b/runtime/vm/flow_graph_type_propagator.cc
index 4cb9840..93e06b7 100644
--- a/runtime/vm/flow_graph_type_propagator.cc
+++ b/runtime/vm/flow_graph_type_propagator.cc
@@ -414,10 +414,11 @@
const AbstractType* compile_type = ToAbstractType();
const AbstractType* other_compile_type = other->ToAbstractType();
- if (compile_type->IsMoreSpecificThan(*other_compile_type, NULL, Heap::kOld)) {
+ if (compile_type->IsMoreSpecificThan(
+ *other_compile_type, NULL, NULL, Heap::kOld)) {
type_ = other_compile_type;
} else if (other_compile_type->
- IsMoreSpecificThan(*compile_type, NULL, Heap::kOld)) {
+ IsMoreSpecificThan(*compile_type, NULL, NULL, Heap::kOld)) {
// Nothing to do.
} else {
// Can't unify.
@@ -620,7 +621,7 @@
return false;
}
- *is_instance = compile_type.IsMoreSpecificThan(type, NULL, Heap::kOld);
+ *is_instance = compile_type.IsMoreSpecificThan(type, NULL, NULL, Heap::kOld);
return *is_instance;
}
@@ -635,7 +636,7 @@
return IsNull();
}
- return ToAbstractType()->IsMoreSpecificThan(other, NULL, Heap::kOld);
+ return ToAbstractType()->IsMoreSpecificThan(other, NULL, NULL, Heap::kOld);
}
diff --git a/runtime/vm/intermediate_language.cc b/runtime/vm/intermediate_language.cc
index bcc0a91..be7171a 100644
--- a/runtime/vm/intermediate_language.cc
+++ b/runtime/vm/intermediate_language.cc
@@ -2057,7 +2057,7 @@
Error& bound_error = Error::Handle();
const AbstractType& new_dst_type = AbstractType::Handle(
dst_type().InstantiateFrom(
- instantiator_type_args, &bound_error, NULL, Heap::kOld));
+ instantiator_type_args, &bound_error, NULL, NULL, Heap::kOld));
// If dst_type is instantiated to dynamic or Object, skip the test.
if (!new_dst_type.IsMalformedOrMalbounded() && bound_error.IsNull() &&
(new_dst_type.IsDynamicType() || new_dst_type.IsObjectType())) {
@@ -2344,7 +2344,7 @@
static bool MaybeNumber(CompileType* type) {
ASSERT(Type::Handle(Type::Number()).IsMoreSpecificThan(
- Type::Handle(Type::Number()), NULL, Heap::kOld));
+ Type::Handle(Type::Number()), NULL, NULL, Heap::kOld));
return type->ToAbstractType()->IsDynamicType()
|| type->ToAbstractType()->IsObjectType()
|| type->ToAbstractType()->IsTypeParameter()
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 481cbed..febf565 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -3626,6 +3626,7 @@
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) {
// Use the thsi object as if it was the receiver of this method, but instead
// of recursing reset it to the super class and loop.
@@ -3684,6 +3685,7 @@
from_index,
num_type_params,
bound_error,
+ bound_trail,
space);
}
if (other.IsFunctionClass()) {
@@ -3744,7 +3746,11 @@
// The index of the type parameters is adjusted upon finalization.
error = Error::null();
interface_args =
- interface_args.InstantiateFrom(type_arguments, &error, NULL, space);
+ interface_args.InstantiateFrom(type_arguments,
+ &error,
+ NULL,
+ bound_trail,
+ space);
if (!error.IsNull()) {
// Return the first bound error to the caller if it requests it.
if ((bound_error != NULL) && bound_error->IsNull()) {
@@ -3758,6 +3764,7 @@
other,
other_type_arguments,
bound_error,
+ bound_trail,
space)) {
return true;
}
@@ -3784,6 +3791,7 @@
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) const {
return TypeTestNonRecursive(*this,
test_kind,
@@ -3791,6 +3799,7 @@
other,
other_type_arguments,
bound_error,
+ bound_trail,
space);
}
@@ -4300,6 +4309,7 @@
intptr_t from_index,
intptr_t len,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) const {
ASSERT(Length() >= (from_index + len));
ASSERT(!other.IsNull());
@@ -4311,7 +4321,11 @@
ASSERT(!type.IsNull());
other_type = other.TypeAt(from_index + i);
ASSERT(!other_type.IsNull());
- if (!type.TypeTest(test_kind, other_type, bound_error, space)) {
+ if (!type.TypeTest(test_kind,
+ other_type,
+ bound_error,
+ bound_trail,
+ space)) {
return false;
}
}
@@ -4361,6 +4375,7 @@
void TypeArguments::SetTypeAt(intptr_t index,
const AbstractType& value) const {
+ ASSERT(!IsCanonical());
StorePointer(TypeAddr(index), value.raw());
}
@@ -4538,7 +4553,8 @@
RawTypeArguments* TypeArguments::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail,
+ TrailPtr instantiation_trail,
+ TrailPtr bound_trail,
Heap::Space space) const {
ASSERT(!IsInstantiated());
if (!instantiator_type_arguments.IsNull() &&
@@ -4561,7 +4577,8 @@
if (!type.IsNull() && !type.IsInstantiated()) {
type = type.InstantiateFrom(instantiator_type_arguments,
bound_error,
- trail,
+ instantiation_trail,
+ bound_trail,
space);
}
instantiated_array.SetTypeAt(i, type);
@@ -4595,7 +4612,7 @@
// Cache lookup failed. Instantiate the type arguments.
TypeArguments& result = TypeArguments::Handle();
result = InstantiateFrom(
- instantiator_type_arguments, bound_error, NULL, Heap::kOld);
+ instantiator_type_arguments, bound_error, NULL, NULL, Heap::kOld);
if ((bound_error != NULL) && !bound_error->IsNull()) {
return result.raw();
}
@@ -4826,6 +4843,11 @@
for (intptr_t i = 0; i < num_types; i++) {
type_arg = TypeAt(i);
type_arg = type_arg.Canonicalize(trail);
+ if (IsCanonical()) {
+ // Canonicalizing this type_arg canonicalized this type.
+ ASSERT(IsRecursive());
+ return this->raw();
+ }
SetTypeAt(i, type_arg);
}
// Canonicalization of a recursive type may change its hash.
@@ -5938,10 +5960,12 @@
AbstractType& other_param_type =
AbstractType::Handle(other.ParameterTypeAt(other_parameter_position));
if (!other_param_type.IsInstantiated()) {
- other_param_type = other_param_type.InstantiateFrom(other_type_arguments,
- bound_error,
- NULL, // trail
- space);
+ other_param_type =
+ other_param_type.InstantiateFrom(other_type_arguments,
+ bound_error,
+ NULL, // instantiation_trail
+ NULL, // bound_trail
+ space);
ASSERT((bound_error == NULL) || bound_error->IsNull());
}
if (other_param_type.IsDynamicType()) {
@@ -5950,21 +5974,25 @@
AbstractType& param_type =
AbstractType::Handle(ParameterTypeAt(parameter_position));
if (!param_type.IsInstantiated()) {
- param_type = param_type.InstantiateFrom(
- type_arguments, bound_error, NULL /*trail*/, space);
+ param_type = param_type.InstantiateFrom(type_arguments,
+ bound_error,
+ NULL, // instantiation_trail
+ NULL, // bound_trail
+ space);
ASSERT((bound_error == NULL) || bound_error->IsNull());
}
if (param_type.IsDynamicType()) {
return test_kind == kIsSubtypeOf;
}
if (test_kind == kIsSubtypeOf) {
- if (!param_type.IsSubtypeOf(other_param_type, bound_error, space) &&
- !other_param_type.IsSubtypeOf(param_type, bound_error, space)) {
+ if (!param_type.IsSubtypeOf(other_param_type, bound_error, NULL, space) &&
+ !other_param_type.IsSubtypeOf(param_type, bound_error, NULL, space)) {
return false;
}
} else {
ASSERT(test_kind == kIsMoreSpecificThan);
- if (!param_type.IsMoreSpecificThan(other_param_type, bound_error, space)) {
+ if (!param_type.IsMoreSpecificThan(
+ other_param_type, bound_error, NULL, space)) {
return false;
}
}
@@ -6387,7 +6415,9 @@
while (i < num_fixed_params) {
param_type = ParameterTypeAt(i);
ASSERT(!param_type.IsNull());
- if (instantiate && !param_type.IsInstantiated()) {
+ if (instantiate &&
+ param_type.IsFinalized() &&
+ !param_type.IsInstantiated()) {
param_type = param_type.InstantiateFrom(instantiator, NULL);
}
name = param_type.BuildName(name_visibility);
@@ -6412,7 +6442,9 @@
pieces->Add(Symbols::ColonSpace());
}
param_type = ParameterTypeAt(i);
- if (instantiate && !param_type.IsInstantiated()) {
+ if (instantiate &&
+ param_type.IsFinalized() &&
+ !param_type.IsInstantiated()) {
param_type = param_type.InstantiateFrom(instantiator, NULL);
}
ASSERT(!param_type.IsNull());
@@ -6510,7 +6542,7 @@
&pieces);
pieces.Add(Symbols::RParenArrow());
AbstractType& res_type = AbstractType::Handle(zone, result_type());
- if (instantiate && !res_type.IsInstantiated()) {
+ if (instantiate && res_type.IsFinalized() && !res_type.IsInstantiated()) {
res_type = res_type.InstantiateFrom(instantiator, NULL);
}
name = res_type.BuildName(name_visibility);
@@ -14287,7 +14319,7 @@
}
other_class = instantiated_other.type_class();
return cls.IsSubtypeOf(type_arguments, other_class, other_type_arguments,
- bound_error, Heap::kOld);
+ bound_error, NULL, Heap::kOld);
}
@@ -14614,7 +14646,8 @@
RawAbstractType* AbstractType::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail,
+ TrailPtr instantiation_trail,
+ TrailPtr bound_trail,
Heap::Space space) const {
// AbstractType is an abstract class.
UNREACHABLE();
@@ -14674,6 +14707,47 @@
}
+bool AbstractType::TestAndAddToTrail(TrailPtr* trail) const {
+ if (*trail == NULL) {
+ *trail = new Trail(Thread::Current()->zone(), 4);
+ } else {
+ const intptr_t len = (*trail)->length();
+ for (intptr_t i = 0; i < len; i++) {
+ if ((*trail)->At(i).raw() == this->raw()) {
+ return true;
+ }
+ }
+ }
+ (*trail)->Add(*this);
+ return false;
+}
+
+
+bool AbstractType::TestAndAddBuddyToTrail(TrailPtr* trail,
+ const AbstractType& buddy) const {
+ if (*trail == NULL) {
+ *trail = new Trail(Thread::Current()->zone(), 4);
+ } else {
+ const intptr_t len = (*trail)->length();
+ ASSERT((len % 2) == 0);
+ const bool this_is_typeref = IsTypeRef();
+ const bool buddy_is_typeref = buddy.IsTypeRef();
+ ASSERT(this_is_typeref || buddy_is_typeref);
+ for (intptr_t i = 0; i < len; i += 2) {
+ if ((((*trail)->At(i).raw() == this->raw()) ||
+ (buddy_is_typeref && (*trail)->At(i).Equals(*this))) &&
+ (((*trail)->At(i + 1).raw() == buddy.raw()) ||
+ (this_is_typeref && (*trail)->At(i + 1).Equals(buddy)))) {
+ return true;
+ }
+ }
+ }
+ (*trail)->Add(*this);
+ (*trail)->Add(buddy);
+ return false;
+}
+
+
RawString* AbstractType::BuildName(NameVisibility name_visibility) const {
Zone* zone = Thread::Current()->zone();
if (IsBoundedType()) {
@@ -14880,6 +14954,7 @@
bool AbstractType::TypeTest(TypeTestKind test_kind,
const AbstractType& other,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) const {
ASSERT(IsFinalized());
ASSERT(other.IsFinalized());
@@ -14950,7 +15025,9 @@
if (!bound.IsFinalized()) {
return false; // TODO(regis): Return "maybe after instantiation".
}
- if (bound.IsMoreSpecificThan(other, bound_error)) {
+ // The current bound_trail cannot be used, because operands are swapped and
+ // the test is different anyway (more specific vs. subtype).
+ if (bound.IsMoreSpecificThan(other, bound_error, NULL)) {
return true;
}
return false; // TODO(regis): We should return "maybe after instantiation".
@@ -15010,6 +15087,7 @@
Class::Handle(zone, other.type_class()),
TypeArguments::Handle(zone, other.arguments()),
bound_error,
+ bound_trail,
space);
}
@@ -15254,7 +15332,8 @@
RawAbstractType* Type::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail,
+ TrailPtr instantiation_trail,
+ TrailPtr bound_trail,
Heap::Space space) const {
Zone* zone = Thread::Current()->zone();
ASSERT(IsFinalized() || IsBeingFinalized());
@@ -15268,32 +15347,21 @@
if (arguments() == instantiator_type_arguments.raw()) {
return raw();
}
- // If this type is recursive, we may already be instantiating it.
- Type& instantiated_type = Type::Handle(zone);
- instantiated_type ^= OnlyBuddyInTrail(trail);
- if (!instantiated_type.IsNull()) {
- ASSERT(IsRecursive());
- return instantiated_type.raw();
- }
// Note that the type class has to be resolved at this time, but not
// necessarily finalized yet. We may be checking bounds at compile time or
// finalizing the type argument vector of a recursive type.
const Class& cls = Class::Handle(zone, type_class());
-
- // This uninstantiated type is not modified, as it can be instantiated
- // with different instantiators. Allocate a new instantiated version of it.
- instantiated_type =
- Type::New(cls, TypeArguments::Handle(zone), token_pos(), space);
TypeArguments& type_arguments = TypeArguments::Handle(zone, arguments());
ASSERT(type_arguments.Length() == cls.NumTypeArguments());
- if (type_arguments.IsRecursive()) {
- AddOnlyBuddyToTrail(&trail, instantiated_type);
- }
type_arguments = type_arguments.InstantiateFrom(instantiator_type_arguments,
bound_error,
- trail,
+ instantiation_trail,
+ bound_trail,
space);
- instantiated_type.set_arguments(type_arguments);
+ // This uninstantiated type is not modified, as it can be instantiated
+ // with different instantiators. Allocate a new instantiated version of it.
+ const Type& instantiated_type =
+ Type::Handle(zone, Type::New(cls, type_arguments, token_pos(), space));
if (IsFinalized()) {
instantiated_type.SetIsFinalized();
} else {
@@ -15457,6 +15525,11 @@
// Canonicalize the type arguments of the supertype, if any.
TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
type_args = type_args.Canonicalize(trail);
+ if (IsCanonical()) {
+ // Canonicalizing type_args canonicalized this type.
+ ASSERT(IsRecursive());
+ return this->raw();
+ }
set_arguments(type_args);
type = cls.CanonicalType(); // May be set while canonicalizing type args.
if (type.IsNull()) {
@@ -15501,6 +15574,11 @@
// vector may be longer than necessary. This is not an issue.
ASSERT(type_args.IsNull() || (type_args.Length() >= cls.NumTypeArguments()));
type_args = type_args.Canonicalize(trail);
+ if (IsCanonical()) {
+ // Canonicalizing type_args canonicalized this type as a side effect.
+ ASSERT(IsRecursive());
+ return this->raw();
+ }
set_arguments(type_args);
// Canonicalizing the type arguments may have changed the index, may have
@@ -15559,6 +15637,7 @@
void Type::set_arguments(const TypeArguments& value) const {
+ ASSERT(!IsCanonical());
StorePointer(&raw_ptr()->arguments_, value.raw());
}
@@ -15723,7 +15802,8 @@
RawAbstractType* FunctionType::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail,
+ TrailPtr instantiation_trail,
+ TrailPtr bound_trail,
Heap::Space space) const {
Zone* zone = Thread::Current()->zone();
ASSERT(IsFinalized() || IsBeingFinalized());
@@ -15734,36 +15814,25 @@
if (arguments() == instantiator_type_arguments.raw()) {
return raw();
}
- // If this type is recursive, we may already be instantiating it.
- FunctionType& instantiated_type = FunctionType::Handle(zone);
- instantiated_type ^= OnlyBuddyInTrail(trail);
- if (!instantiated_type.IsNull()) {
- ASSERT(IsRecursive());
- return instantiated_type.raw();
- }
// Note that the scope class has to be resolved at this time, but not
// necessarily finalized yet. We may be checking bounds at compile time or
// finalizing the type argument vector of a recursive type.
const Class& cls = Class::Handle(zone, scope_class());
-
- // This uninstantiated type is not modified, as it can be instantiated
- // with different instantiators. Allocate a new instantiated version of it.
- instantiated_type =
- FunctionType::New(cls,
- TypeArguments::Handle(zone),
- Function::Handle(zone, signature()),
- token_pos(),
- space);
TypeArguments& type_arguments = TypeArguments::Handle(zone, arguments());
ASSERT(type_arguments.Length() == cls.NumTypeArguments());
- if (type_arguments.IsRecursive()) {
- AddOnlyBuddyToTrail(&trail, instantiated_type);
- }
type_arguments = type_arguments.InstantiateFrom(instantiator_type_arguments,
bound_error,
- trail,
+ instantiation_trail,
+ bound_trail,
space);
- instantiated_type.set_arguments(type_arguments);
+ // This uninstantiated type is not modified, as it can be instantiated
+ // with different instantiators. Allocate a new instantiated version of it.
+ const FunctionType& instantiated_type = FunctionType::Handle(zone,
+ FunctionType::New(cls,
+ type_arguments,
+ Function::Handle(zone, signature()),
+ token_pos(),
+ space));
if (IsFinalized()) {
instantiated_type.SetIsFinalized();
} else {
@@ -15971,6 +16040,13 @@
ASSERT(type_args.IsNull() ||
(type_args.Length() >= scope_cls.NumTypeArguments()));
type_args = type_args.Canonicalize(trail);
+ if (IsCanonical()) {
+ // Canonicalizing type_args canonicalized this type as a side effect.
+ ASSERT(IsRecursive());
+ // Cycles via typedefs are detected and disallowed, but a function type can
+ // be recursive due to a cycle in its type arguments.
+ return this->raw();
+ }
set_arguments(type_args);
// Replace the actual function by a signature function.
@@ -16068,6 +16144,7 @@
void FunctionType::set_arguments(const TypeArguments& value) const {
+ ASSERT(!IsCanonical());
StorePointer(&raw_ptr()->arguments_, value.raw());
}
@@ -16168,21 +16245,28 @@
RawTypeRef* TypeRef::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail,
+ TrailPtr instantiation_trail,
+ TrailPtr bound_trail,
Heap::Space space) const {
TypeRef& instantiated_type_ref = TypeRef::Handle();
- instantiated_type_ref ^= OnlyBuddyInTrail(trail);
+ instantiated_type_ref ^= OnlyBuddyInTrail(instantiation_trail);
if (!instantiated_type_ref.IsNull()) {
return instantiated_type_ref.raw();
}
+ instantiated_type_ref = TypeRef::New();
+ AddOnlyBuddyToTrail(&instantiation_trail, instantiated_type_ref);
+
AbstractType& ref_type = AbstractType::Handle(type());
ASSERT(!ref_type.IsTypeRef());
AbstractType& instantiated_ref_type = AbstractType::Handle();
instantiated_ref_type = ref_type.InstantiateFrom(
- instantiator_type_arguments, bound_error, trail, space);
+ instantiator_type_arguments,
+ bound_error,
+ instantiation_trail,
+ bound_trail,
+ space);
ASSERT(!instantiated_ref_type.IsTypeRef());
- instantiated_type_ref = TypeRef::New(instantiated_ref_type);
- AddOnlyBuddyToTrail(&trail, instantiated_type_ref);
+ instantiated_type_ref.set_type(instantiated_ref_type);
return instantiated_type_ref.raw();
}
@@ -16194,13 +16278,14 @@
if (!cloned_type_ref.IsNull()) {
return cloned_type_ref.raw();
}
+ cloned_type_ref = TypeRef::New();
+ AddOnlyBuddyToTrail(&trail, cloned_type_ref);
AbstractType& ref_type = AbstractType::Handle(type());
ASSERT(!ref_type.IsTypeRef());
AbstractType& cloned_ref_type = AbstractType::Handle();
cloned_ref_type = ref_type.CloneUninstantiated(new_owner, trail);
ASSERT(!cloned_ref_type.IsTypeRef());
- cloned_type_ref = TypeRef::New(cloned_ref_type);
- AddOnlyBuddyToTrail(&trail, cloned_type_ref);
+ cloned_type_ref.set_type(cloned_ref_type);
return cloned_type_ref.raw();
}
@@ -16237,42 +16322,6 @@
}
-bool TypeRef::TestAndAddToTrail(TrailPtr* trail) const {
- if (*trail == NULL) {
- *trail = new Trail(Thread::Current()->zone(), 4);
- } else {
- const intptr_t len = (*trail)->length();
- for (intptr_t i = 0; i < len; i++) {
- if ((*trail)->At(i).raw() == this->raw()) {
- return true;
- }
- }
- }
- (*trail)->Add(*this);
- return false;
-}
-
-
-bool TypeRef::TestAndAddBuddyToTrail(TrailPtr* trail,
- const AbstractType& buddy) const {
- if (*trail == NULL) {
- *trail = new Trail(Thread::Current()->zone(), 4);
- } else {
- const intptr_t len = (*trail)->length();
- ASSERT((len % 2) == 0);
- for (intptr_t i = 0; i < len; i += 2) {
- if (((*trail)->At(i).raw() == this->raw()) &&
- ((*trail)->At(i + 1).raw() == buddy.raw())) {
- return true;
- }
- }
- }
- (*trail)->Add(*this);
- (*trail)->Add(buddy);
- return false;
-}
-
-
RawTypeRef* TypeRef::New() {
RawObject* raw = Object::Allocate(TypeRef::kClassId,
TypeRef::InstanceSize(),
@@ -16361,7 +16410,8 @@
RawAbstractType* TypeParameter::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail,
+ TrailPtr instantiation_trail,
+ TrailPtr bound_trail,
Heap::Space space) const {
ASSERT(IsFinalized());
if (instantiator_type_arguments.IsNull()) {
@@ -16373,6 +16423,12 @@
// type arguments are canonicalized at type finalization time. It would be too
// early to canonicalize the returned type argument here, since instantiation
// not only happens at run time, but also during type finalization.
+
+ // If the instantiated type parameter type_arg is a BoundedType, it means that
+ // it is still uninstantiated and that we are instantiating at finalization
+ // time (i.e. compile time).
+ // Indeed, the instantiator (type arguments of an instance) is always
+ // instantiated at run time and any bounds were checked during allocation.
return type_arg.raw();
}
@@ -16380,12 +16436,21 @@
bool TypeParameter::CheckBound(const AbstractType& bounded_type,
const AbstractType& upper_bound,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) const {
ASSERT((bound_error != NULL) && bound_error->IsNull());
ASSERT(bounded_type.IsFinalized());
ASSERT(upper_bound.IsFinalized());
ASSERT(!bounded_type.IsMalformed());
- if (bounded_type.IsSubtypeOf(upper_bound, bound_error, space)) {
+ if (bounded_type.IsTypeRef() || upper_bound.IsTypeRef()) {
+ // Shortcut the bound check if the pair <bounded_type, upper_bound> is
+ // already in the trail.
+ if (bounded_type.TestAndAddBuddyToTrail(&bound_trail, upper_bound)) {
+ return true;
+ }
+ }
+
+ if (bounded_type.IsSubtypeOf(upper_bound, bound_error, bound_trail, space)) {
return true;
}
// Set bound_error if the caller is interested and if this is the first error.
@@ -16442,18 +16507,24 @@
RawAbstractType* TypeParameter::CloneUninstantiated(
const Class& new_owner, TrailPtr trail) const {
ASSERT(IsFinalized());
- AbstractType& upper_bound = AbstractType::Handle(bound());
- upper_bound = upper_bound.CloneUninstantiated(new_owner, trail);
+ TypeParameter& clone = TypeParameter::Handle();
+ clone ^= OnlyBuddyInTrail(trail);
+ if (!clone.IsNull()) {
+ return clone.raw();
+ }
const Class& old_owner = Class::Handle(parameterized_class());
const intptr_t new_index = index() +
new_owner.NumTypeArguments() - old_owner.NumTypeArguments();
- const TypeParameter& clone = TypeParameter::Handle(
- TypeParameter::New(new_owner,
- new_index,
- String::Handle(name()),
- upper_bound,
- token_pos()));
+ AbstractType& upper_bound = AbstractType::Handle(bound());
+ clone = TypeParameter::New(new_owner,
+ new_index,
+ String::Handle(name()),
+ upper_bound, // Not cloned yet.
+ token_pos());
clone.SetIsFinalized();
+ AddOnlyBuddyToTrail(&trail, clone);
+ upper_bound = upper_bound.CloneUninstantiated(new_owner, trail);
+ clone.set_bound(upper_bound);
return clone.raw();
}
@@ -16608,18 +16679,24 @@
RawAbstractType* BoundedType::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail,
+ TrailPtr instantiation_trail,
+ TrailPtr bound_trail,
Heap::Space space) const {
ASSERT(IsFinalized());
AbstractType& bounded_type = AbstractType::Handle(type());
ASSERT(bounded_type.IsFinalized());
+ AbstractType& instantiated_bounded_type =
+ AbstractType::Handle(bounded_type.raw());
if (!bounded_type.IsInstantiated()) {
- bounded_type = bounded_type.InstantiateFrom(instantiator_type_arguments,
- bound_error,
- trail,
- space);
- // In case types of instantiator_type_arguments are not finalized, then
- // the instantiated bounded_type is not finalized either.
+ instantiated_bounded_type =
+ bounded_type.InstantiateFrom(instantiator_type_arguments,
+ bound_error,
+ instantiation_trail,
+ bound_trail,
+ space);
+ // In case types of instantiator_type_arguments are not finalized
+ // (or instantiated), then the instantiated_bounded_type is not finalized
+ // (or instantiated) either.
// Note that instantiator_type_arguments must have the final length, though.
}
if ((Isolate::Current()->flags().type_checks()) &&
@@ -16627,34 +16704,49 @@
AbstractType& upper_bound = AbstractType::Handle(bound());
ASSERT(upper_bound.IsFinalized());
ASSERT(!upper_bound.IsObjectType() && !upper_bound.IsDynamicType());
- const TypeParameter& type_param = TypeParameter::Handle(type_parameter());
+ AbstractType& instantiated_upper_bound =
+ AbstractType::Handle(upper_bound.raw());
if (!upper_bound.IsInstantiated()) {
- upper_bound = upper_bound.InstantiateFrom(instantiator_type_arguments,
- bound_error,
- trail,
- space);
- // Instantiated upper_bound may not be finalized. See comment above.
+ instantiated_upper_bound =
+ upper_bound.InstantiateFrom(instantiator_type_arguments,
+ bound_error,
+ instantiation_trail,
+ bound_trail,
+ space);
+ // The instantiated_upper_bound may not be finalized or instantiated.
+ // See comment above.
}
if (bound_error->IsNull()) {
- if (bounded_type.IsBeingFinalized() ||
- upper_bound.IsBeingFinalized() ||
- (!type_param.CheckBound(bounded_type, upper_bound, bound_error) &&
+ // Shortcut the F-bounded case where we have reached a fixpoint.
+ if (instantiated_bounded_type.Equals(bounded_type) &&
+ instantiated_upper_bound.Equals(upper_bound)) {
+ return bounded_type.raw();
+ }
+ const TypeParameter& type_param = TypeParameter::Handle(type_parameter());
+ if (instantiated_bounded_type.IsBeingFinalized() ||
+ instantiated_upper_bound.IsBeingFinalized() ||
+ (!type_param.CheckBound(instantiated_bounded_type,
+ instantiated_upper_bound,
+ bound_error,
+ bound_trail) &&
bound_error->IsNull())) {
// We cannot determine yet whether the bounded_type is below the
// upper_bound, because one or both of them is still being finalized or
// uninstantiated.
- ASSERT(bounded_type.IsBeingFinalized() ||
- upper_bound.IsBeingFinalized() ||
- !bounded_type.IsInstantiated() ||
- !upper_bound.IsInstantiated());
+ ASSERT(instantiated_bounded_type.IsBeingFinalized() ||
+ instantiated_upper_bound.IsBeingFinalized() ||
+ !instantiated_bounded_type.IsInstantiated() ||
+ !instantiated_upper_bound.IsInstantiated());
// Postpone bound check by returning a new BoundedType with unfinalized
// or partially instantiated bounded_type and upper_bound, but keeping
// type_param.
- bounded_type = BoundedType::New(bounded_type, upper_bound, type_param);
+ instantiated_bounded_type = BoundedType::New(instantiated_bounded_type,
+ instantiated_upper_bound,
+ type_param);
}
}
}
- return bounded_type.raw();
+ return instantiated_bounded_type.raw();
}
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 26e68ce..34440a1 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -907,6 +907,10 @@
};
+typedef ZoneGrowableHandlePtrArray<const AbstractType> Trail;
+typedef ZoneGrowableHandlePtrArray<const AbstractType>* TrailPtr;
+
+
class Class : public Object {
public:
intptr_t instance_size() const {
@@ -1128,12 +1132,14 @@
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const {
return TypeTest(kIsSubtypeOf,
type_arguments,
other,
other_type_arguments,
bound_error,
+ bound_trail,
space);
}
@@ -1142,12 +1148,14 @@
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const {
return TypeTest(kIsMoreSpecificThan,
type_arguments,
other,
other_type_arguments,
bound_error,
+ bound_trail,
space);
}
@@ -1470,6 +1478,7 @@
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) const;
static bool TypeTestNonRecursive(
@@ -1479,6 +1488,7 @@
const Class& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space);
FINAL_HEAP_OBJECT_IMPLEMENTATION(Class, Object);
@@ -1524,9 +1534,6 @@
};
-typedef ZoneGrowableHandlePtrArray<const AbstractType> Trail;
-typedef ZoneGrowableHandlePtrArray<const AbstractType>* TrailPtr;
-
// A TypeArguments is an array of AbstractType.
class TypeArguments : public Object {
public:
@@ -1575,8 +1582,10 @@
intptr_t from_index,
intptr_t len,
Error* bound_error,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const {
- return TypeTest(kIsSubtypeOf, other, from_index, len, bound_error, space);
+ return TypeTest(kIsSubtypeOf, other, from_index, len,
+ bound_error, bound_trail, space);
}
// Check the 'more specific' relationship, considering only a subvector of
@@ -1585,9 +1594,10 @@
intptr_t from_index,
intptr_t len,
Error* bound_error,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const {
- return TypeTest(kIsMoreSpecificThan,
- other, from_index, len, bound_error, space);
+ return TypeTest(kIsMoreSpecificThan, other, from_index, len,
+ bound_error, bound_trail, space);
}
// Check if the vectors are equal (they may be null).
@@ -1643,7 +1653,8 @@
RawTypeArguments* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail = NULL,
+ TrailPtr instantiation_trail = NULL,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
// Runtime instantiation with canonicalization. Not to be used during type
@@ -1700,6 +1711,7 @@
intptr_t from_index,
intptr_t len,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) const;
// Return the internal or public name of a subvector of this type argument
@@ -2504,7 +2516,7 @@
const Function& other,
const TypeArguments& other_type_arguments,
Error* bound_error,
- Heap::Space space = Heap::kNew) const {
+ Heap::Space space = Heap::kNew) const {
return TypeTest(kIsMoreSpecificThan,
type_arguments,
other,
@@ -5271,7 +5283,8 @@
virtual RawAbstractType* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail = NULL,
+ TrailPtr instantiation_trail = NULL,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
// Return a clone of this unfinalized type or the type itself if it is
@@ -5302,6 +5315,17 @@
// the trail. The receiver may only be added once with its only buddy.
void AddOnlyBuddyToTrail(TrailPtr* trail, const AbstractType& buddy) const;
+ // Return true if the receiver is contained in the trail.
+ // Otherwise, if the trail is null, allocate a trail, then add the receiver to
+ // the trail and return false.
+ bool TestAndAddToTrail(TrailPtr* trail) const;
+
+ // Return true if the pair <receiver, buddy> is contained in the trail.
+ // Otherwise, if the trail is null, allocate a trail, add the pair <receiver,
+ // buddy> to the trail and return false.
+ // The receiver may be added several times, each time with a different buddy.
+ bool TestAndAddBuddyToTrail(TrailPtr* trail, const AbstractType& buddy) const;
+
// The name of this type, including the names of its type arguments, if any.
virtual RawString* Name() const {
return BuildName(kInternalName);
@@ -5374,15 +5398,18 @@
// Check the subtype relationship.
bool IsSubtypeOf(const AbstractType& other,
Error* bound_error,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const {
- return TypeTest(kIsSubtypeOf, other, bound_error, space);
+ return TypeTest(kIsSubtypeOf, other, bound_error, bound_trail, space);
}
// Check the 'more specific' relationship.
bool IsMoreSpecificThan(const AbstractType& other,
Error* bound_error,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const {
- return TypeTest(kIsMoreSpecificThan, other, bound_error, space);
+ return TypeTest(kIsMoreSpecificThan, other,
+ bound_error, bound_trail, space);
}
private:
@@ -5390,6 +5417,7 @@
bool TypeTest(TypeTestKind test_kind,
const AbstractType& other,
Error* bound_error,
+ TrailPtr bound_trail,
Heap::Space space) const;
// Return the internal or public name of this type, including the names of its
@@ -5448,7 +5476,8 @@
virtual RawAbstractType* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail = NULL,
+ TrailPtr instantiation_trail = NULL,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
virtual RawAbstractType* CloneUnfinalized() const;
virtual RawAbstractType* CloneUninstantiated(
@@ -5595,7 +5624,8 @@
virtual RawAbstractType* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* malformed_error,
- TrailPtr trail = NULL,
+ TrailPtr instantiation_trail = NULL,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
virtual RawAbstractType* CloneUnfinalized() const;
virtual RawAbstractType* CloneUninstantiated(
@@ -5671,7 +5701,8 @@
virtual RawTypeRef* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail = NULL,
+ TrailPtr instantiation_trail = NULL,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
virtual RawTypeRef* CloneUninstantiated(
const Class& new_owner,
@@ -5680,17 +5711,6 @@
virtual intptr_t Hash() const;
- // Return true if the receiver is contained in the trail.
- // Otherwise, if the trail is null, allocate a trail, then add the receiver to
- // the trail and return false.
- bool TestAndAddToTrail(TrailPtr* trail) const;
-
- // Return true if the pair <receiver, buddy> is contained in the trail.
- // Otherwise, if the trail is null, allocate a trail, add the pair <receiver,
- // buddy> to the trail and return false.
- // The receiver may be added several times, each time with a different buddy.
- bool TestAndAddBuddyToTrail(TrailPtr* trail, const AbstractType& buddy) const;
-
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawTypeRef));
}
@@ -5743,6 +5763,7 @@
bool CheckBound(const AbstractType& bounded_type,
const AbstractType& upper_bound,
Error* bound_error,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
virtual TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
virtual bool IsInstantiated(TrailPtr trail = NULL) const {
@@ -5753,7 +5774,8 @@
virtual RawAbstractType* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail = NULL,
+ TrailPtr instantiation_trail = NULL,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
virtual RawAbstractType* CloneUnfinalized() const;
virtual RawAbstractType* CloneUninstantiated(
@@ -5838,7 +5860,8 @@
virtual RawAbstractType* InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
Error* bound_error,
- TrailPtr trail = NULL,
+ TrailPtr instantiation_trail = NULL,
+ TrailPtr bound_trail = NULL,
Heap::Space space = Heap::kNew) const;
virtual RawAbstractType* CloneUnfinalized() const;
virtual RawAbstractType* CloneUninstantiated(
diff --git a/runtime/vm/parser.cc b/runtime/vm/parser.cc
index de7b405..2449f9d 100644
--- a/runtime/vm/parser.cc
+++ b/runtime/vm/parser.cc
@@ -13229,7 +13229,11 @@
ASSERT(!type.IsMalformedOrMalbounded());
if (!type.IsInstantiated()) {
Error& error = Error::Handle(Z);
- type ^= type.InstantiateFrom(*type_arguments, &error, NULL, Heap::kOld);
+ type ^= type.InstantiateFrom(*type_arguments,
+ &error,
+ NULL, // instantiation_trail
+ NULL, // bound_trail
+ Heap::kOld);
ASSERT(error.IsNull());
}
*type_arguments = type.arguments();
@@ -13393,7 +13397,8 @@
redirect_type ^= redirect_type.InstantiateFrom(
type_arguments,
&error,
- NULL, // trail
+ NULL, // instantiation_trail
+ NULL, // bound_trail
Heap::kOld);
if (!error.IsNull()) {
redirect_type = ClassFinalizer::NewFinalizedMalformedType(
@@ -13433,7 +13438,7 @@
return ThrowTypeError(redirect_type.token_pos(), redirect_type);
}
if (I->flags().type_checks() &&
- !redirect_type.IsSubtypeOf(type, NULL, Heap::kOld)) {
+ !redirect_type.IsSubtypeOf(type, NULL, NULL, Heap::kOld)) {
// Additional type checking of the result is necessary.
type_bound = type.raw();
}
diff --git a/tests/language/regress_25568_test.dart b/tests/language/regress_25568_test.dart
new file mode 100644
index 0000000..6e18a60
--- /dev/null
+++ b/tests/language/regress_25568_test.dart
@@ -0,0 +1,22 @@
+// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+main() {}
+
+class BacklogListEditorState
+ extends AbstractListEditorState<BacklogsState, BacklogListEditorState> {}
+
+class BacklogsState extends MutableEntityState<BacklogsState> {}
+
+abstract class AbstractListEditorState<ES extends ComponentState<ES>,
+ S extends AbstractListEditorState<ES, S>> extends ComponentState<S> {}
+
+abstract class ComponentState<S extends ComponentState<S>> {}
+
+abstract class EntityState<ES extends EntityState<ES>>
+ extends ComponentState<ES> {}
+
+abstract class MutableEntityState<S extends MutableEntityState<S>>
+ extends EntityState<S> implements ComponentState<S> {}
+